text stringlengths 8 6.05M |
|---|
import sys
import maya.OpenMaya as OpenMaya
import maya.OpenMayaMPx as OpenMayaMPx
import maya.OpenMayaRender as OpenMayaRender
nodeTypeName = "RG_Part"
# Gve an ID (identifier) to our type of node
nodeTypeId = OpenMaya.MTypeId(0x00000004)
# A pointer to hardware render class
glRenderer = OpenMayaRender.MHardwareRenderer.theRenderer()
# Pointer to open GL instructions
glFT = glRenderer.glFunctionTable()
aBase = OpenMaya.MObject()
class RigNode(OpenMayaMPx.MPxLocatorNode):
size = OpenMaya.MObject()
def __init__(self):
OpenMayaMPx.MPxLocatorNode.__init__(self)
def compute(self, plug, dataBlock):
return OpenMaya.kUnknownParameter
# Define how the rigNode will draw.
def draw(self, view, path, style, status):
thisNode = self.thisMObject()
plug = OpenMaya.MPlug(thisNode, self.size)
sizeVal = plug.asMDistance()
multiplier = sizeVal.asCentimeters()
view.beginGL()
def nodeCreator():
return OpenMayaMPx.asMPxPtr(RigNode())
def nodeInitializer():
return OpenMaya.MStatus.kSuccess
def initializePlugin(obj):
plugin = OpenMayaMPx.MFnPlugin(obj)
try:
plugin.registerNode(nodeTypeName, nodeTypeId, nodeCreator, nodeInitializer, OpenMayaMPx.MPxNode.kLocatorNode)
except:
sys.stderr.write( "Failed to register node: %s" % nodeTypeName)
def uninitializePlugin(obj):
plugin = OpenMayaMPx.MFnPlugin(obj)
try:
plugin.deregisterNode(nodeTypeId)
except:
sys.stderr.write( "Failed to deregister node: %s" % nodeTypeName)
|
from random import randint
from datetime import datetime, timedelta
from Utility import Utility
import uuid
from decorators import run_time_decorator
device_list = {'billing': ['ICam007', 'ICam008', 'ICam009', 'ICam012'], 'sha': ['ICam001', 'ICam002', 'ICam003'],
'footfall': ['ICam004', 'ICam005', 'ICam006', 'ICam011'], 'queue': ['ICam010']}
device_mapping = {'Karachi Bakery': ['ICam007', 'ICam004'], 'Tiffin Express': ['ICam008', 'ICam005'],
'KFC': ['ICam009', 'ICam006'], 'Shopper Stop': ['ICam011', 'ICam012']}
flag = [True, False]
gender_list = ["Male", "Female"]
emotion_list = ["HAPPY", "ANGRY", "SAD", "SURPRISED", "CONFUSED", "DISGUSTED", "CALM"]
@run_time_decorator
def get_device_list():
"""
:return: list of devices registered for data generation
"""
all_device_list = []
for device_type in device_list:
print device_type
devices = device_list[device_type]
for device in devices:
print device
all_device_list.append(device)
return all_device_list
@run_time_decorator
def random_number(inrange):
"""
:param inrange:
:return: an random integer within the passed range, [20,52]
"""
return randint(inrange[0], inrange[1]-1)
@run_time_decorator
def create_image_name(date, time, device_id, image_type="None", psngr_pnr="None", psngr_flight="None"):
"""
:param date:
:param time:
:param device_id:
:param image_type:
:param psngr_pnr:
:param psngr_flight:
:return:image_key = "F1ICam012_Grp_N65E68_D45K89_2017-06-03_19:01:41.632899.jpg"
"""
return "_".join(["F1", device_id, image_type, psngr_pnr, psngr_flight, date, (time+".jpg")])
@run_time_decorator
def location_finder(device_id):
for i in device_mapping.keys():
if device_id in device_mapping[i]:
return i
return "NA"
@run_time_decorator
# get the device mapper initialized
def create_device_mapper():
"""
:return: dictionary of device mapping
"""
utility = Utility(log_file="data_gen_process_log_file.txt", debug=1)
sql = "SELECT DeviceName, DeviceType, DeviceLocation FROM DeviceMapper"
mapped_device_list, device_count = utility.query_database(sql)
if device_count == 0:
print("no device registered in the database")
return None
for mapped_device in mapped_device_list:
print mapped_device
return None
@run_time_decorator
def flight_details(flight_info=None):
"""
:param flight_info:
:return: pnr details
"""
if flight_info is None:
print("flight/destination required in args")
randnum = str(uuid.uuid4())
create_pnr = flight_info[0]+randnum[-5:].upper()
return create_pnr
@run_time_decorator
def get_emotions():
"""
:return: meta information of emotions for a face in an image
"""
emotions = "{}:{} {}:{} {}:{}".format(emotion_list[random_number([0, len(emotion_list)])],
random_number([100, 150]) / 4.0,
emotion_list[random_number([0, len(emotion_list)])],
random_number([50, 100]) / 4.0,
emotion_list[random_number([0, len(emotion_list)])],
random_number([10, 50]) / 4.0)
return emotions
@run_time_decorator
def get_psngr_flight_info(airline, airport):
"""
:param airline:
:param airport:
:return: passenger flight number
"""
sql_get_airport_code = "SELECT Airport_3LC FROM Airports_Info where City_Desc = '{}'".format(airport)
utility = Utility(log_file="data_gen_process_log_file.txt", debug=1)
airport_code, airport_count = utility.query_database(sql_get_airport_code)
if airport_count == 0:
print("'{}': airport not served".format(airport))
return None
sql_get_airline_code = "SELECT Airline2LC FROM Airlines_Info where Description = '{}'".format(airline)
airline_code, airline_count = utility.query_database(sql_get_airline_code)
if airline_count == 0:
print("'{};: airline service not available".format(airline))
return None
sql_get_flight_code = "SELECT FLNO3 FROM Flights_Info where Destination='{}' and FLNO3 LIKE '{}%'".format(airport_code[0][0],
airline_code[0][0])
psngr_flight, flight_count = utility.query_database(sql_get_flight_code)
if flight_count == 0:
print("{} not served to {}".format(airline, airport))
return None
return psngr_flight[0][0]
@run_time_decorator
def create_data(date, process_start_time, end_time, airline, destination):
"""
:param date:
:param process_start_time:
:param end_time:
:param airline:
:param destination:
:return: generate and dump data
"""
# device name list
utility = Utility(log_file="data_gen_process_log_file.txt", debug=1)
print(airline, destination, process_start_time, end_time)
# prepare flight details
psngr_flight, start_time = get_psngr_flight_info(airline, destination), process_start_time
if psngr_flight is None:
return None
print("{} is ready to take off for {}, on boarding passengers..........".format(airline, destination))
reg_device_list = get_device_list()
sha_devices = device_list['sha']
# round robin selection of sha
def get_device():
for device in sha_devices:
yield device
device_gen_obj = get_device()
while 4 > 3:
if start_time >= end_time:
break
# create passenger at sha's using round robin
try:
device_id = device_gen_obj.next()
except StopIteration:
print("generation exception catched")
device_gen_obj = get_device()
device_id = device_gen_obj.next()
device_type = "SHA"
date, time = date, start_time.time()
print device_id, date, time, device_type
image_type, face_id, psngr_pnr, status = "Ind", "", flight_details(destination), 4
image_key = create_image_name(str(date), str(time), device_id, image_type, psngr_pnr, psngr_flight)
image_name = image_key[3:]
print image_key, image_name
# dump data in AP_ImageData(add passenger in the database)
ap_image_info_row = [image_name, image_key, str(start_time), image_type, device_id, device_type, status]
print ap_image_info_row
sql = "INSERT INTO AP_ImageData (ImageName, ImageKey, LogDate, Type, DevId, status, device_type) VALUES ('{}', " \
"'{}', '{}', '{}', '{}', '{}', '{}')".format(image_name, image_key, str(start_time),
image_type, device_id, status, device_type)
# print sql
utility.update_database(sql)
# characteristics info
age_low = random_number([15, 30])
age_high = random_number([30, 70])
emotions = get_emotions()
gender, time_grp = gender_list[random_number([0, len(gender_list)])], str(time)[:2]
print age_low, age_high, gender, emotions, time_grp
# dump meta info in AP_ImageData_Info
sql = "INSERT INTO AP_ImageData_Info (ImageKey, gender, age_High, age_Low, emotions, type, " \
"psngr_pnr, psngr_flight, LogDate, location, device_type) VALUES ('{}', '{}', '{}', '{}', '{}', '{}'" \
", '{}', '{}', '{}', '{}', '{}' )".format(image_key, gender, age_high,
age_low, emotions, image_type,
psngr_pnr, psngr_flight,
str(start_time), device_id,
device_type)
# print sql
utility.update_database(sql)
grp_matched_time = start_time
# start_time += timedelta(minutes=5)
print reg_device_list
# find group images for the passenger
for reg_device in reg_device_list:
# minutes = random_number([1, 10])
# grp_matched_time += timedelta(minutes=minutes)
# generate matches for individual passengers.
# create grp image against different locations:-
if not flag[random_number([0, 1])]: # flag true means grp image is captured
continue
print reg_device
matched_device_id, matched_device_type = "", ""
reg_device_ind = False
for device_type in device_list:
if device_type == "sha":
# check if reg_device is of sha as well in that case please ignore the reg_device
if reg_device in device_list[device_type]:
reg_device_ind = True
break
if reg_device in device_list[device_type]:
matched_device_id = reg_device
matched_device_type = device_type
break
if reg_device_ind:
continue
minutes = random_number([3, 15])
if matched_device_type == "queue":
old_grp_matched_time = grp_matched_time
grp_matched_time = process_start_time - timedelta(minutes=minutes)
else:
grp_matched_time += timedelta(minutes=minutes)
matched_image_type, matched_date, matched_time = "Grp", date, grp_matched_time.time()
matched_image_key = create_image_name(str(date), str(grp_matched_time), matched_device_id, matched_image_type)
matched_image_name = matched_image_key[3:]
# add group in ap_image_data
sql_grp_img = "INSERT INTO AP_ImageData (ImageName, ImageKey, LogDate, Type, DevId, status, device_type) " \
"VALUES ('{}', " \
"'{}', '{}', '{}', '{}', '{}', '{}')".format(matched_image_name, matched_image_key,
str(grp_matched_time),
matched_image_type,
matched_device_id,
status, matched_device_type)
# print sql_grp_img
utility.update_database(sql_grp_img)
# add meta info of group image in ap_image_data
# dump meta info in AP_ImageData_Info
grp_image_emotions = get_emotions()
sql_grp_img_meta = "INSERT INTO AP_ImageData_Info (ImageKey, gender, age_High, age_Low, emotions, " \
"type, psngr_pnr, psngr_flight, LogDate, location, device_type) " \
"VALUES ('{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}' )".format\
(matched_image_key, gender, age_high, age_low, grp_image_emotions,
matched_image_type, "None", "None", str(grp_matched_time), matched_device_id,matched_device_type)
print sql_grp_img_meta
# utility.update_database(sql_grp_img_meta)
# add matches in CL_ImageMap
similarity = str(random_number([75, 100]))
matched_device_location = ""
for location in device_mapping:
if reg_device in device_mapping[location]:
matched_device_location = location
sql_grp_img_match = "INSERT INTO CL_ImageMap (IndKey, GrpKey, Similarity, GrpLogDate, IndLogDate, " \
"MatchDevId, device_type, DeviceLocation) VALUES ('{}', '{}', '{}', '{}', '{}', " \
"'{}', '{}', '{}')".format(image_key, matched_image_key, similarity, grp_matched_time,
start_time, matched_device_id, matched_device_type,
matched_device_location)
# print sql_grp_img_match
utility.query_database(sql_grp_img_match)
if matched_device_type == "queue":
grp_matched_time = old_grp_matched_time
start_time += timedelta(minutes=5)
# start making data
# create_device_mapper()
create_data(datetime.now().date(), datetime.now()-timedelta(hours=2), datetime.now(), "Indigo", "Ahmedabad")
# print get_psngr_flight_info("Emirates", "Chandigarh")
# print get_device_list() |
from os import environ
# if you set a property in SESSION_CONFIG_DEFAULTS, it will be inherited by all configs
# in SESSION_CONFIGS, except those that explicitly override it.
# the session config can be accessed from methods in your apps as self.session.config,
# e.g. self.session.config['participation_fee']
SESSION_CONFIG_DEFAULTS = {
'real_world_currency_per_point': 0.04,
'initial_points': 50,
'participation_fee': 1.,
'min_players_start': 4,
'num_interactions': 8,
"wait_to_skip": 120,
"timeout":20,
"timeout_mins":20,
'treatment': 1,
'compensation_units': 25,
'pay_for_waiting': 7/3600,
'max_pay_for_waiting': 5.,
'REAL_WORLD_CURRENCY_DECIMAL_PLACES': 2,
'quiz_bonus': 0.,
'base_points': 0,
'doc': "",
"mturk_hit_settings": {
'keywords': ['bonus', 'study', 'experiment'],
'title': 'Multiperson decision making experiment with large bonus',
'description': 'Earn a large bonus, in this 20-50 minute experiment. Total hourly wage has been between $11-$20/hour in previous sessions.',
'frame_height': 700,
'template': 'global/mturk_template.html',
'minutes_allotted_per_assignment': 100,
'expiration_hours': 2,
'grant_qualification_id': '3EZ90CUA4RK0VSYFM17NZ8UO4BLIWL',
'qualification_requirements': [
{
'QualificationTypeId': '3EZ90CUA4RK0VSYFM17NZ8UO4BLIWL',
'Comparator': 'DoesNotExist',
},
# {
# 'QualificationTypeId': "000000000000000000L0",
# 'Comparator': "GreaterThan",
# 'IntegerValues': [95]
# },
# {
# 'QualificationTypeId': "00000000000000000071",
# 'Comparator': "EqualTo",
# 'LocaleValues': [{
# 'Country': "US",
# }]
# }
]
}
}
SESSION_CONFIGS = [
{
'name': 'full',
'display_name': "Full experiment",
'num_demo_participants': 6,
'app_sequence': ['lobby', 'waiting', 'pd'],
},
{
'name': 'only_games',
'display_name': "Only Games",
'num_demo_participants': 6,
'min_players_start': 4,
'app_sequence': ['waiting', 'pd'],
},
{
'name': 'bots',
'display_name': "Bots",
'user_browser_bots': True,
'num_demo_participants': 4,
'treatment': 2,
"timeout": 4,
"timeout_mins":0.1,
'min_players_start': 4,
'app_sequence': ['lobby', 'waiting', 'pd'],
}
]
# ISO-639 code
# for example: de, fr, ja, ko, zh-hans
LANGUAGE_CODE = 'en'
# e.g. EUR, GBP, CNY, JPY
REAL_WORLD_CURRENCY_CODE = 'USD'
USE_POINTS = True
ROOMS = []
ADMIN_USERNAME = 'admin'
# for security, best to set admin password in an environment variable
ADMIN_PASSWORD = environ.get('OTREE_ADMIN_PASSWORD')
AUTH_LEVEL = environ.get('OTREE_AUTH_LEVEL')
DEMO_PAGE_INTRO_HTML = """ """
SECRET_KEY = '!8=wtrajrj+gu-=pg6wd^!f-^rk$mj%$dob)yvl+0s+b#80vm_'
DEBUG = (environ.get('OTREE_PRODUCTION') in {None, '', '0'})
# if an app is included in SESSION_CONFIGS, you don't need to list it here
INSTALLED_APPS = ['otree']
|
from typing import Optional
from pydantic import BaseModel
from enum import Enum
from datetime import datetime
class Sex(str, Enum):
"""发送者的性别枚举
"""
male = "male"
female = "female"
unknown = "unknown"
class PostType(str, Enum):
"""事件类型枚举
"""
message = "message"
notice = "notice"
request = "request"
meta_event = "meta_event"
class MessageType(str, Enum):
"""消息事件中消息的类型
"""
private = "private"
group = "group"
class MessageSubType(str, Enum):
"""消息子类型
"""
# 私聊消息子类型
friend = "friend"
group = "group"
other = "other"
# 群聊消息子类型
normal = "normal"
anonymous = "anonymous"
notice = "notice"
class Anonymous(BaseModel):
"""匿名用户信息
"""
id: int
name: str
flag: str
class Sender(BaseModel):
"""发送者信息(不保证每个字段都一定存在)
"""
user_id: Optional[int] = None
nickname: Optional[str] = None
card: Optional[str] = None
sex: Optional[Sex] = None
age: Optional[int] = None
area: Optional[str] = None
level: Optional[str] = None
role: Optional[str] = None
title: Optional[str] = None
class Event(BaseModel):
"""事件数据
"""
# 每个事件都有的字段
time: datetime
self_id: int
post_type: PostType
# 私聊消息
message_type: Optional[MessageType] = None
sub_type: Optional[MessageSubType] = None
message_id: Optional[int] = None
user_id: Optional[int] = None
message: Optional[str] = None
raw_message: Optional[str] = None
font: Optional[int] = None
sender: Optional[Sender] = None
# 群聊消息
group_id: Optional[int] = None
anonymous: Optional[Anonymous] = None
class QuickReply(BaseModel):
"""快速回复信息
"""
reply: str
auto_escape: bool
at_sender: Optional[bool] = None
delete: Optional[bool] = None
kick: Optional[bool] = None
ban: Optional[bool] = None
ban_duration: Optional[int] = None |
import time
import sys
import numpy as np
import random
from random import choice
import multiprocessing
import time
# Set Global Variables:
# numberrr = 0
start = 0
run_time = 0
termination = 0
random_seed = 0
vertices = 0
depot = 0
required_edges = 0 # task number!!!
# non_required_edges = 0
# vehicles = 0
capacity = 0
total_cost = 0 # 没啥用。。
actual_total_cost = 0 # FINAL RESULT COST !!!!! important!!!
# A list to store all actual_total_cost (1000个) in order.
actual_total_cost_list = [0, ]
cost_matrix = []
demand_matrix = []
# 任意两点间的最短距离
min_dist = []
# 有任务的边 set of tuple (v1,v2)
free_set = set()
# A list to store all rotes
all_routes = []
# A list to store 所有的 all_routes (1000个)
all_routes_list = ['', ]
MAX_VALUE = 99999
# Set global variables
def set_globals(file_content_list):
global vertices
vertices_line = file_content_list[1].rstrip('\n')
vertices = extract_digit(vertices_line)
global depot
depot_line = file_content_list[2].rstrip('\n')
depot = extract_digit(depot_line)
global required_edges
required_edges_line = file_content_list[3].rstrip('\n')
required_edges = extract_digit(required_edges_line)
global non_required_edges
non_required_edges_line = file_content_list[4].rstrip('\n')
non_required_edges = extract_digit(non_required_edges_line)
global vehicles
vehicles_line = file_content_list[5].rstrip('\n')
vehicles = extract_digit(vehicles_line)
global capacity
capacity_line = file_content_list[6].rstrip('\n')
capacity = extract_digit(capacity_line)
global total_cost
total_cost_line = file_content_list[7].rstrip('\n')
total_cost = extract_digit(total_cost_line)
# After getting vertices number ==> cost_matrix (2d), demand_matrix(2d)
global cost_matrix
global demand_matrix
cost_matrix = np.zeros((vertices + 1, vertices + 1), dtype=np.int)
demand_matrix = np.zeros((vertices + 1, vertices + 1), dtype=np.int)
set_cost_n_demand_matrix(file_content_list)
# 任意两点间的最短距离
global min_dist
min_dist = np.zeros((vertices + 1, vertices + 1), dtype=np.int)
# for x in range(vertices + 1):
# for y in range(vertices + 1):
# min_dist[x][y] = MAX_VALUE
# min_dist[y][x] = MAX_VALUE
set_min_dist()
global free_set
set_free_set(file_content_list)
# Extract digits from string
def extract_digit(stri):
res = ''
for i in range(len(stri)):
if stri[i].isdigit():
res += stri[i]
return int(res)
# Set cost matrix and demand matrix
def set_cost_n_demand_matrix(file_content_list):
global cost_matrix
global demand_matrix
for i in range(len(cost_matrix)):
for j in range(len(cost_matrix[i])):
if i != j:
cost_matrix[i][j] = MAX_VALUE
cost_matrix[0][0] = MAX_VALUE
length = len(file_content_list)
# print(length)
for i in range(9, length - 1):
a = file_content_list[i].rstrip('\n')
x = int(a.split()[0])
y = int(a.split()[1])
cost_matrix[x][y] = int(a.split()[2])
cost_matrix[y][x] = int(a.split()[2])
demand_matrix[x][y] = int(a.split()[3])
demand_matrix[y][x] = int(a.split()[3])
# Set free set
def set_free_set(file_content_list):
global free_set
length = len(file_content_list)
for i in range(9, length - 1):
a = file_content_list[i].rstrip('\n')
if int(a.split()[3]) != 0:
# print(a.split()[3])
x = int(a.split()[0])
y = int(a.split()[1])
free_set.add((x, y))
free_set.add((y, x))
# Set min_dist
def set_min_dist():
global min_dist
global cost_matrix
for i in range(len(cost_matrix)):
for j in range(len(cost_matrix[i])):
min_dist[i][j] = cost_matrix[i][j]
find_v_2_all_sp2()
# v_0 = 1
# v_n = vertices
#
# for v in range(v_0, v_n + 1):
# # find v to every other nodes' shortest path:
# find_v_2_all_sp(v)
def find_v_2_all_sp2():
global min_dist
global vertices
for k in range(1, vertices + 1):
for i in range(1, vertices + 1):
for j in range(1, vertices + 1):
if (min_dist[i][j] > min_dist[i][k] + min_dist[k][j]):
min_dist[i][j] = min_dist[i][k] + min_dist[k][j]
# Dijkstra Algorithm, start from v
def find_v_2_all_sp(v):
global min_dist
# keep a visit list: visit[i] is 0 ==> unvisited, 1 ==> visited
not_visit = set()
for i in range(1, (vertices + 1)):
not_visit.add(i)
# initialization:
not_visit.remove(v)
min_dist[v][v] = 0
for other_v in range(vertices + 1):
if cost_matrix[v][other_v] != 0:
min_dist[v][other_v] = cost_matrix[v][other_v]
min_dist[other_v][v] = cost_matrix[v][other_v]
while len(not_visit) != 0:
# for every other_v in not_visit set, find the one with min distance with v in min_dist[v][other_v]
min = MAX_VALUE
other_v_min = 0
for other_v in not_visit:
if min_dist[v][other_v] < min:
other_v_min = other_v
not_visit.remove(other_v_min)
for i in range(1, vertices + 1):
if cost_matrix[other_v_min][i] != 0:
if min_dist[v][i] > min_dist[v][other_v_min] + cost_matrix[other_v_min][i]:
min_dist[v][i] = min_dist[v][other_v_min] + cost_matrix[other_v_min][i]
min_dist[i][v] = min_dist[v][other_v_min] + cost_matrix[other_v_min][i]
def path_scanning(i):
global cost_matrix
global demand_matrix
global min_dist
global free_set
global all_routes
global capacity
global depot
global actual_total_cost
# global numberrr
# print('----------',i,'---------')
# print(free_set)
free_set_copy = set()
for s in free_set:
free_set_copy.add(s)
actual_total_cost = 0
all_routes = []
while len(free_set_copy) != 0:
one_route = []
start_node = depot
load = 0
cost = 0
task_v_to = depot
while load < capacity:
# 更新 start_node, load, cost
# 满足容量限制 且 最近 的set:
consider_set = set()
min_dist_value = MAX_VALUE
# 完成consider_set的建立
# consider_set里面装的都是距离最近的task:
for task in free_set_copy:
v_from = int(task[0])
# v_to = int(task[1])
# if demand_matrix[v_from][v_to] <= (capacity - load): # 满足容量限制
if min_dist[start_node][v_from] == min_dist_value: # 与已有最近的task等价,也是最近的
consider_set.add(task)
elif min_dist[start_node][v_from] < min_dist_value:
consider_set = set()
consider_set.add(task)
min_dist_value = min_dist[start_node][v_from] # 找到了更近的task,更新consider_set和min_dist_value
# l1 = len(consider_set)
consider_set_copy = set()
for s in consider_set:
consider_set_copy.add(s)
# 把不符合条件的去掉:
for task in consider_set_copy:
v_from = int(task[0])
v_to = int(task[1])
if demand_matrix[v_from][v_to] > (capacity - load):
consider_set.remove(task)
# l2 = len(consider_set)
#
# if l1 > l2:
# print(l1-l2)
if len(consider_set) == 0:
break
# 选择一个task去执行
task_choice = (-1, -1)
# max_dist_value = -1
# min_dist_value = MAX_VALUE
if len(consider_set) == 1:
for t in consider_set:
task_choice = t
# 距离最近的task有好几个
elif len(consider_set) > 1:
task_choice = choice(list(consider_set))
if task_choice == (-1, -1):
break
# else:
# numberrr += 1
# print('task choice:',task_choice)
task_v_from = int(task_choice[0])
task_v_to = int(task_choice[1])
free_tuple1 = (task_v_from, task_v_to)
free_tuple2 = (task_v_to, task_v_from)
free_set_copy.remove(free_tuple1)
free_set_copy.remove(free_tuple2)
load = load + demand_matrix[task_v_from][task_v_to]
cost = cost + min_dist[start_node][task_v_from] + cost_matrix[task_v_from][task_v_to]
start_node = task_v_to
one_route.append(task_choice)
# print(one_route)
all_routes.append(one_route)
cost = cost + min_dist[task_v_to][depot]
actual_total_cost += cost
def main():
global start
global run_time
global termination
global random_seed
global cost_matrix
global demand_matrix
global all_routes
global actual_total_cost
start = time.time()
# CARP_instance_file = '/Users/wangyutong/Repository/store/CS/大三上/人工智能/carp/Proj2_Carp/CARP_samples/egl-e1-A.dat'
# CARP_instance_file = sys.argv[1]
#
# termination = sys.argv[3]
#
# random_seed = sys.argv[5]
# random.seed(random_seed)
CARP_instance_file = 'val7A.dat'
# readlines()返回一个list
file_content_list = open(CARP_instance_file).readlines()
# 完成所有global的赋值,cost_matrix && demand_matrix && min_dist 的赋值 (均为2d数组), free_set的赋值
set_globals(file_content_list)
# for i in demand_matrix:
# print(i)
# print(min_dist)
# count = 0
# for c_l in cost_matrix:
# for c in c_l:
# if c != 0:
# count+=1
# # print(count)
# print(min_dist)
start_time = time.time()
for i in range(1000):
path_scanning(i)
# print('all_routes:',all_routes)
# print('---------------------------')
all_routes_list.append(all_routes)
actual_total_cost_list.append(actual_total_cost)
end_time = time.time()
print("totaltime",end_time-start_time)
min_cost = MAX_VALUE
min_idx = 0
for i in range(1, 1001):
print(actual_total_cost_list[i])
if actual_total_cost_list[i] < min_cost:
min_cost = actual_total_cost_list[i]
min_idx = i
all_routes = all_routes_list[min_idx]
actual_total_cost = actual_total_cost_list[min_idx]
# 接下来要用到:all routes & actual_total_cost
res_line1 = ''
res_line1 += 's '
for i in range(len(all_routes)):
# 得到 all_routes[i]
res_line1 += '0,'
for task in all_routes[i]:
res_line1 += '(' + str(task[0]) + ',' + str(task[1]) + ')' + ','
if i == len(all_routes) - 1:
res_line1 += '0'
else:
res_line1 += '0,'
print(res_line1)
res_line2 = 'q ' + str(actual_total_cost)
print(res_line2)
# print(required_edges)
# print(numberrr/500)
# print(all_routes)
# print('depot:',depot)
# t_cost = 0
#
# for each_r in all_routes:
# print(each_r)
# start = depot
# each_demand = 0
# for each_t in each_r:
# # print(each_t)
# # print('end')
# each_demand += demand_matrix[int(each_t[0])][int(each_t[1])]
# t_cost += min_dist[start][int(each_t[0])]
# t_cost += cost_matrix[int(each_t[0])][int(each_t[1])]
# start = int(each_t[1])
# print(each_demand)
# t_cost += min_dist[start][depot]
#
#
# res_line2 = 'q ' + str(t_cost)
# print(res_line2)
run_time = time.time() - start
# print(run_time)
if __name__ == '__main__':
main()
|
# Generated by Django 3.0.6 on 2020-05-20 22:56
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('bookflixapp', '0007_auto_20200520_1930'),
]
operations = [
migrations.DeleteModel(
name='PerfilCust',
),
]
|
# convert to R - doing
# do analysis -> t.test or anova? ANOVA
from csep.loglikelihood import calcLogLikelihood as loglikelihood
import gaModel.etasGaModelNP as etasGaModelNP
import models.model as model
import models.modelEtasGa as etasGa
"""
This file needs to be undestood -> what does it do?
"""
def loadModelSC(type, year, region, depth, i):
if type == 'gaModel':
filename = '../Zona3/scModel/gamodel' + region + '_' + \
str(depth) + '_' + str(year) + str(i) + '.txt'
modelo = etasGa.loadModelFromFile(filename)
if type == 'sc_hybrid_gaModel':
filename = '../Zona2/sc_hybrid_gaModel/hybrid_gaModel' + region + \
'_' + str(depth) + '_' + str(year) + '_' + str(i) + '.txt'
modelo = etasGa.loadModelFromFile(filename)
if type == 'sc_hybrid_ListaGA_New':
filename = '../Zona2/sc_hybrid_ListaGA_New/hybrid_ListaGA_New' + \
region + '_' + str(depth) + '_' + str(year) + '_' + str(i) + '.txt'
modelo = etasGa.loadModelFromFile(filename)
elif region == 'EastJapan':
filename = '../Zona3/scModel/eastgamodel' + region + \
'_' + str(depth) + '_' + str(year) + str(i) + '.txt'
modelo = etasGa.loadModelFromFile(filename)
else:
filename = '../Zona3/scModel/listgamodel' + region + \
'_' + str(depth) + '_' + str(year) + str(i) + '.txt'
modelo = etasGa.loadModelFromFile(filename)
fileEtasim = "../Zona/paper_exp/etasim1.txt"
modelo = etasGa.simpleHibrid(
modelo,
fileEtasim,
"../Zona/paper_exp/testeModelCatalog.txt")
return modelo
def saveModelSC(modelL, type, year, region, depth, i, minMag, maxMag):
file = type.split('_')
if type == 'gaModel':
etasGa.saveModelToFile(
modelL,
'../Zona2/magStudy/gamodelSC/' +
region +
'_' +
str(depth) +
'_' +
str(year) +
str(i) +
'_' +
str(minMag) +
".txt")
if type == 'sc_hybrid_gaModel':
filename = "../Zona2/dataForR/magStudy/sc_hybrid_gaModel/" + region + \
"_" + str(depth) + "_" + str(year) + '_' + str(minMag) + ".txt"
if type == 'sc_hybrid_ListaGA_New':
filename = "../Zona2/dataForR/magStudy/sc_hybrid_ListaGA_New/" + region + \
"_" + str(depth) + "_" + str(year) + '_' + str(minMag) + ".txt"
else:
etasGa.saveModelToFile(
modelL,
'../Zona2/magStudy/listgamodel/' +
region +
'_' +
str(depth) +
'_' +
str(year) +
str(i) +
'_' +
str(minMag) +
".txt")
def calcLogLikelihoodMagInterval(region, year, year_end, modelL):
modelO = model.loadModelFromFile(
'../Zona2/realData/' +
region +
'real' +
"_" +
str(year) +
'.txt')
modelL.loglikelihood = loglikelihood(modelL, modelO)
return modelL
def filterMag(modelo, minMag, maxMag):
modelTemp = etasGa.newModel(modelo.definitions, 1)
for magValues, bins, index in zip(
modelo.magnitudeValues, modelo.bins, range(len(modelo.bins))):
for mag in magValues:
if mag > 0:
if (minMag < mag and mag < maxMag):
modelTemp.bins[index] += 1
return modelTemp
def converter2leastBest(type, region, depth, minMag,
maxMag, year_begin, year_end):
file = type.split('_')
year = year_begin
while(year <= year_end):
data = list()
for i in range(10):
if type == 'gaModel':
filename = '../Zona2/magStudy/gamodelSC/' + region + '_' + \
str(depth) + '_' + str(year) + str(i) + '_' + \
str(minMag) + ".txtloglikelihood.txt"
else:
filename = '../Zona2/magStudy/listgamodel/' + region + '_' + \
str(depth) + '_' + str(year) + str(i) + '_' + \
str(minMag) + ".txtloglikelihood.txt"
if region == 'EastJapan':
filename = '../Zona2/magStudy/eastgamodel/'+region+'_'+str(depth)+'_'+str(year)+str(i)+'_'+str(minMag)+".txtloglikelihood.txt"
if (file[0] == 'clustered' or file[0] == 'clusteredII'):
if len(file) == 4:#new
filename = "../Zona2/magStudy/"+type+'/'+file[1]+'_'+file[2]+'_'+file[3]+region+"_"+str(depth)+"_"+str(year)+'_'+str(i)+'_'+str(minMag)+".txtloglikelihood.txt"
elif file[1] == 'hybrid':
filename ="../Zona2/magStudy/"+type+'/'+file[1]+'_'+file[2]+'_'+region+"_"+str(depth)+"_"+str(year)+'_'+str(i)+'_'+str(minMag)+".txtloglikelihood.txt"
else:
filename = "../Zona2/magStudy/"+type+'/'+region+"_"+str(depth)+"_"+str(year)+'_'+str(i)+'_'+str(minMag)+".txtloglikelihood.txt"
elif (file[0] == 'hybrid'):
filename = "../Zona2/magStudy/"+type+'/'+type+region+"_"+str(depth)+"_"+str(year)+'_'+str(i)+'_'+str(minMag)+".txtloglikelihood.txt"
else:
filename = "../Zona2/magStudy/"+type+'/'+region+"_"+str(depth)+"_"+str(year)+str(i)+'_'+str(minMag)+".txtloglikelihood.txt"
f = open(filename, "r")
for line in f:
info = line.split()
data.append(float(info[0]))
f.close()
if type == 'gaModel':
filename = "../Zona2/dataForR/magStudy/gamodelSC/" + region + \
"_" + str(depth) + "_" + str(year) + '_' + str(minMag) + ".txt"
if type == 'sc_hybrid_gaModel':
filename = "../Zona2/dataForR/magStudy/sc_hybrid_gaModel/" + region + \
"_" + str(depth) + "_" + str(year) + '_' + str(minMag) + ".txt"
if type == 'sc_hybrid_ListaGA_New':
filename = "../Zona2/dataForR/magStudy/sc_hybrid_ListaGA_New/" + region + \
"_" + str(depth) + "_" + str(year) + '_' + str(minMag) + ".txt"
else:
filename = "../Zona2/dataForR/magStudy/listgamodel/" + region + \
"_" + str(depth) + "_" + str(year) + '_' + str(minMag) + ".txt"
with open(filename, 'w') as f:
for i in range(10):
f.write(str(data[i]))
f.write("\n")
year += 1
def main():
print('filtering')
depth = 100
types = (
'sc_hybrid_gaModel',
'sc_hybrid_ListaGA_New',
'gaModel',
'listaGA_new')
regions = ('EastJapan', 'Kansai', 'Kanto', 'Tohoku')
year_end=2010
for region in regions:
for t in types:
year=2005
while(year<=year_end):
minMag = 3.0
while minMag<= 9.0:
maxMag = minMag + 1.0
for numExec in range(10):
modelo=loadModelSC(t, year, region, depth=depth, i=numExec)
modelo = filterMag(modelo, minMag=minMag, maxMag=maxMag)
modelo=calcLogLikelihoodMagInterval(region, year=year, year_end=year_end, modelL=modelo)
saveModelSC(modelo,t, year, region, depth, numExec, minMag, maxMag)
minMag=minMag+1.0
year+=1
print('converting')
types = (
'sc_hybrid_gaModel',
'sc_hybrid_ListaGA_New',
'gaModel',
'listaGA_new')
for region in regions:
for t in types:
minMag = 3.0
while minMag <= 9.0:
maxMag = minMag + 1.0
converter2leastBest(
t,
region,
depth,
minMag,
maxMag,
year_begin=2005,
year_end=2010)
minMag = minMag + 1.0
if __name__ == "__main__":
main()
|
from smtplib import SMTP_SSL
from email.message import EmailMessage
from getpass import getpass
from collections import defaultdict
def get_user_input(message, category=str):
while True:
try:
return category(input(message))
except ValueError:
print(f"Please input a {category}.")
class ShoppingList():
def __init__(self):
self.items = defaultdict(int)
def __str__(self):
return ("\n".join(f"{product} x {quantity}"
for product, quantity in self.items.items()))
def add_item(self, product, quantity):
self.items[product] += quantity
def email_to(self, from_email, password, *recipients):
email = EmailMessage()
email['Subject'] = "Shopping List"
email['From'] = from_email
email['To'] = recipients
message = str(self)
email.set_content(message)
s = SMTP_SSL('smtp.gmail.com', 465)
s.ehlo()
s.login(from_email, password)
s.send_message(email)
s.quit()
print("\nThe email has been sent.")
def main():
name = input("Input your name: ")
print(f"Hi, {name}!")
shopping_list = ShoppingList()
while True:
product = get_user_input("Input the product name (input \"stop\" when you are done): ")
if product == "stop":
break
quantity = get_user_input("Input the product quantity: ", int)
shopping_list.add_item(product, quantity)
print(f"\nThese products have been added to the shopping list:\n{shopping_list}")
email = input("\nEmail: ")
password = getpass("Password: ")
recipient = input("Recipient's email: ")
shopping_list.email_to(email, password, recipient)
print("\nHave a nice day!")
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
# See github page to report issues or to contribute:
# https://github.com/hssm/advanced-browser
class InternalFields:
def __init__(self):
self.noteColumns = []
self.cardColumns = []
def onBuildContextMenu(self, contextMenu):
nGroup = contextMenu.newSubMenu("- Note (internal) -")
cGroup = contextMenu.newSubMenu("- Card (internal) -")
for cc in self.noteColumns:
nGroup.addItem(cc)
for cc in self.cardColumns:
cGroup.addItem(cc)
def onAdvBrowserLoad(self, advBrowser):
"""Called when the Advanced Browser add-on has finished
loading. Create and add all custom columns owned by this
add-on here.
"""
# Clear existing state
self.noteColumns = []
self.cardColumns = []
cc = advBrowser.newCustomColumn(
type="nid",
name="Note ID",
onData=lambda c, n, t: n.id,
onSort=lambda: "n.id"
)
self.noteColumns.append(cc)
cc = advBrowser.newCustomColumn(
type="nguid",
name="Note Guid",
onData=lambda c, n, t: n.guid,
onSort=lambda: "n.guid"
)
self.noteColumns.append(cc)
cc = advBrowser.newCustomColumn(
type="nmid",
name="Model ID",
onData=lambda c, n, t: n.mid,
onSort=lambda: "n.mid"
)
self.noteColumns.append(cc)
cc = advBrowser.newCustomColumn(
type="nusn",
name="Note USN",
onData=lambda c, n, t: n.usn,
onSort=lambda: "n.usn"
)
self.noteColumns.append(cc)
cc = advBrowser.newCustomColumn(
type="nfields",
name="Note Fields",
onData=lambda c, n, t: u"\u25A0".join(n.fields),
onSort=lambda: "n.flds"
)
self.noteColumns.append(cc)
cc = advBrowser.newCustomColumn(
type="nflags",
name="Note Flags",
onData=lambda c, n, t: n.flags,
onSort=lambda: "n.flags"
)
self.noteColumns.append(cc)
cc = advBrowser.newCustomColumn(
type="ndata",
name="Note Data",
onData=lambda c, n, t: n.data,
onSort=lambda: "n.data"
)
self.noteColumns.append(cc)
cc = advBrowser.newCustomColumn(
type="cid",
name="Card ID",
onData=lambda c, n, t: c.id,
onSort=lambda: "c.id"
)
self.cardColumns.append(cc)
cc = advBrowser.newCustomColumn(
type="cdid",
name="Deck ID",
onData=lambda c, n, t: c.did,
onSort=lambda: "c.did"
)
self.cardColumns.append(cc)
cc = advBrowser.newCustomColumn(
type="codid",
name="Original Deck ID",
onData=lambda c, n, t: c.odid,
onSort=lambda: "c.odid"
)
self.cardColumns.append(cc)
cc = advBrowser.newCustomColumn(
type="cord",
name="Card Ordinal",
onData=lambda c, n, t: c.ord,
onSort=lambda: "c.ord"
)
self.cardColumns.append(cc)
cc = advBrowser.newCustomColumn(
type="cusn",
name="Card USN",
onData=lambda c, n, t: c.usn,
onSort=lambda: "c.usn"
)
self.cardColumns.append(cc)
cc = advBrowser.newCustomColumn(
type="ctype",
name="Card Type",
onData=lambda c, n, t: c.type,
onSort=lambda: "c.type"
)
self.cardColumns.append(cc)
cc = advBrowser.newCustomColumn(
type="cqueue",
name="Card Queue",
onData=lambda c, n, t: c.queue,
onSort=lambda: "c.queue"
)
self.cardColumns.append(cc)
cc = advBrowser.newCustomColumn(
type="cleft",
name="Card Left",
onData=lambda c, n, t: c.left,
onSort=lambda: "c.left"
)
self.cardColumns.append(cc)
cc = advBrowser.newCustomColumn(
type="codue",
name="Card Original Due", # I think?
onData=lambda c, n, t: c.odue,
onSort=lambda: "c.odue"
)
self.cardColumns.append(cc)
cc = advBrowser.newCustomColumn(
type="cflags",
name="Card Flags",
onData=lambda c, n, t: c.flags,
onSort=lambda: "c.flags"
)
self.cardColumns.append(cc)
iff = InternalFields()
|
import re
import error as e
data_type = {'int': r'^[0-9]+$',
'float': r'^[0-9].?[0-9]*',
'string': r'^(\").*(\")$'}
def type_check(words, values):
word_list = []
for i in words:
word_list.append(words[i])
for i in range(len(word_list)):
for type in data_type:
if word_list[i] == type:
try:
if word_list[i + 2] == '=':
if not re.match(data_type[type], word_list[i + 3]):
print(e.bred, e.fblue, 'Type Error : Value Of', type, 'is incorrect')
except Exception:
print('semantic eception', Exception)
value_type = {}
for key in values:
if values[key] in value_type:
if value_type[values[key]] != values[key]:
print('ValueError:', values[key], 'is not', key[0])
value_type[key[1]] = key[0]
value_type[values[key]] = key[0]
|
import os
from environment import environment
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.%s" % environment)
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
"""
Python module to read from the PurpleCrane GrowApp database, and write to CROP database.
There is not quite a 1:1 mapping between the CROP tables/columns and the GrowApp tables/columns.
The philosophy in this module is to perform all the necessary logic to do the transformation in the
get_xyz functions, that then return DataFrames containing exactly the data that we want to put into
the CROP tables.
We can then generalize the functions for writing tables into the CROP database, including the
logic for not adding data that is already there (we use the 'growapp_id' column for this).
"""
import logging
from urllib import parse
import pandas as pd
from sqlalchemy import and_
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.dialects.postgresql import insert
from .db import connect_db, session_open, session_close
from .structure import (
LocationClass,
CropTypeClass,
BatchClass,
BatchEventClass,
HarvestClass,
EventType,
)
from .growapp_structure import (
LocationClass as GrowAppLocationClass,
ZoneClass as GrowAppZoneClass,
AisleClass as GrowAppAisleClass,
StackClass as GrowAppStackClass,
ShelfClass as GrowAppShelfClass,
BenchClass as GrowAppBenchClass,
CropClass as GrowAppCropClass,
BatchClass as GrowAppBatchClass,
BatchEventClass as GrowAppBatchEventClass,
)
from .utils import get_crop_db_session, query_result_to_array, log_upload_event
from .constants import (
GROWAPP_IP,
GROWAPP_DB,
GROWAPP_USER,
GROWAPP_PASSWORD,
GROWAPP_SCHEMA,
SQL_CONNECTION_STRING,
SQL_DBNAME,
SQL_ENGINE,
)
BATCH_EVENT_TYPE_MAPPING = {
0: EventType.none,
10: EventType.weigh,
20: EventType.propagate,
30: EventType.transfer,
40: EventType.harvest,
99: EventType.edit,
}
def get_growapp_db_session(return_engine=False):
"""
Get an SQLAlchemy session on the GrowApp database.
Parameters
==========
return_engine: bool, if True return the sqlalchmy engine as well as session
Returns
=======
session: SQLAlchemy session object
engine (optional): SQLAlchemy engine
"""
conn_string = "%s://%s:%s@%s" % (
SQL_ENGINE,
GROWAPP_USER,
parse.quote(GROWAPP_PASSWORD),
GROWAPP_IP,
)
success, log, engine = connect_db(conn_string, GROWAPP_DB)
if not success:
logging.info(log)
return None
session = session_open(engine)
if return_engine:
return session, engine
else:
return session
def convert_growapp_foreign_key(growapp_df, growapp_column_name, CropDbClass):
"""Convert a foreign key column of data from a GrowApp table, and convert its values
to the corresponding foreign key ids in the given Crop db table.
Foreign key relations from GrowApp tables refer to the IDs in other GrowApp tables.
This function takes dataframe with such a column, and converts it's values to the
corresponding IDs in a Crop db table. The pairing is done by looking at the table
CropDbClass, and matching its `id` column with its `growapp_id` column.
Parameters
==========
growapp_df: Dataframe of data from a GrowApp table, a foreign key column of which
needs to be converted to Crop IDs
growapp_column_name: The name of the column that needs converting
CropDbClass: The Crop db class from which we should get the new values for the
foreign key.
Returns
=======
A copy of growapp_df, with values in growapp_column_name replaced with values from
CropDbClass's id column
"""
crop_session = get_crop_db_session()
query = crop_session.query(CropDbClass.id, CropDbClass.growapp_id)
crop_id_pairs = crop_session.execute(query).fetchall()
session_close(crop_session)
crop_id_pairs = pd.DataFrame(query_result_to_array(crop_id_pairs)).set_index(
"growapp_id"
)
growapp_df = (
growapp_df.join(crop_id_pairs, on=growapp_column_name)
.drop(columns=[growapp_column_name])
.rename(columns={"id": growapp_column_name})
)
return growapp_df
def add_new_location(zone, aisle, stack, shelf):
"""
Add a new location to the CROP database, and return its primary key.
Parameters
==========
zone: str
aisle: str
stack: int (aka column)
shelf: int
Returns
=======
location_id: int, PK of the newly created location in the CROP DB.
"""
session = get_crop_db_session()
location = LocationClass(zone=zone, aisle=aisle, column=stack, shelf=shelf)
session.add(location)
session.commit()
id = location.id
session_close(session)
logging.info(f"Returning new location with id {id}")
return id
def get_location_id(growapp_batch_id):
"""
Follow the chain of foreign keys in the growapp database, to get an
aisle/column/shelf, which we can then use to query the Location table in our
database, and get a location_id.
Parameters
==========
growapp_batch_id: uuid, foreign key corresponding to the Batch in the Growapp DB
Raises
======
RuntimeError: If the chain of location information is not found in the GrowApp DB.
ValueError: If the location is not found in the CROP DB.
Returns
=======
A location ID, i.e. a primary key for the CROP location table.
"""
grow_session = get_growapp_db_session()
batch_query = grow_session.query(GrowAppBatchClass.current_bench_id).filter(
GrowAppBatchClass.id == growapp_batch_id
)
results = grow_session.execute(batch_query).fetchall()
results_array = query_result_to_array(results)
if len(results_array) != 1:
raise RuntimeError("Couldn't find batch_id {}".format(growapp_batch_id))
growapp_bench_id = results_array[0]["current_bench_id"]
# now query the Bench table with that ID
bench_query = grow_session.query(GrowAppBenchClass.location_id).filter(
GrowAppBenchClass.id == growapp_bench_id
)
results = grow_session.execute(bench_query).fetchall()
results_array = query_result_to_array(results)
if len(results_array) != 1:
raise RuntimeError("Couldn't find bench_id {}".format(growapp_bench_id))
growapp_location_id = results_array[0]["location_id"]
# now query the Location table with that ID
location_query = grow_session.query(
GrowAppLocationClass.zone_id,
GrowAppLocationClass.aisle_id,
GrowAppLocationClass.stack_id,
GrowAppLocationClass.shelf_id,
).filter(GrowAppLocationClass.id == growapp_location_id)
results = grow_session.execute(location_query).fetchall()
results_array = query_result_to_array(results)
if len(results_array) != 1:
raise RuntimeError("Couldn't find location_id {}".format(growapp_location_id))
grow_zone_id = results_array[0]["zone_id"]
grow_aisle_id = results_array[0]["aisle_id"]
grow_stack_id = results_array[0]["stack_id"]
grow_shelf_id = results_array[0]["shelf_id"]
locname_query = grow_session.query(
GrowAppZoneClass.name.label("zone"),
GrowAppAisleClass.name.label("aisle"),
GrowAppStackClass.name.label("stack"),
GrowAppShelfClass.name.label("shelf"),
).filter(
and_(
GrowAppZoneClass.id == grow_zone_id,
GrowAppAisleClass.id == grow_aisle_id,
GrowAppStackClass.id == grow_stack_id,
GrowAppShelfClass.id == grow_shelf_id,
)
)
results = grow_session.execute(locname_query).fetchall()
results_array = query_result_to_array(results)
if len(results_array) > 1:
raise ValueError("Got multiple locations")
elif len(results_array) < 1:
raise ValueError(
"Couldn't find the GrowApp location "
f"{grow_zone_id}, {grow_aisle_id}, {grow_stack_id}, {grow_shelf_id}."
)
growapp_location = results_array[0]
session_close(grow_session)
# fill in missing "zone" by hand
logging.info(f"Found location {growapp_location}")
if not growapp_location["zone"]:
try:
aisle = growapp_location["aisle"]
except KeyError:
aisle = None
if aisle == "A" or aisle == "B":
growapp_location["zone"] = "Tunnel3"
elif aisle == "C":
growapp_location["zone"] = "Tunnel5"
elif aisle == "D":
growapp_location["zone"] = "Tunnel6"
elif aisle == "E":
growapp_location["zone"] = "Tunnel4"
else:
growapp_location["zone"] = "N/A"
# Query the Crop Location table to find the corresponding location ID.
crop_session = get_crop_db_session()
query = crop_session.query(LocationClass.id).filter(
and_(
LocationClass.zone == growapp_location["zone"],
LocationClass.aisle == growapp_location["aisle"],
LocationClass.column == int(growapp_location["stack"]),
LocationClass.shelf == int(growapp_location["shelf"]),
)
)
results = crop_session.execute(query).fetchall()
session_close(crop_session)
results_array = query_result_to_array(results)
if len(results_array) == 0:
logging.info(
f"Location {growapp_location} not found in the CROP DB. Will create it"
)
return add_new_location(
growapp_location["zone"],
growapp_location["aisle"],
int(growapp_location["stack"]),
int(growapp_location["shelf"]),
)
else:
return results_array[0]["id"]
def get_croptype_data():
"""
Read from the table of crops in the GrowApp database
Returns
=======
crop_df: pandas DataFrame of results
"""
session = get_growapp_db_session()
query = session.query(
GrowAppCropClass.id,
GrowAppCropClass.name,
GrowAppCropClass.seed_density,
GrowAppCropClass.propagation_period,
GrowAppCropClass.grow_period,
GrowAppCropClass.is_pre_harvest,
)
# get the results to an array of dicts before putting into
# pandas dataframe, to avoid
# https://github.com/pandas-dev/pandas/issues/40682
results = session.execute(query).fetchall()
results_array = query_result_to_array(results)
crop_df = pd.DataFrame(results_array)
crop_df.rename(columns={"id": "growapp_id"}, inplace=True)
session_close(session)
return crop_df
def get_batch_data(dt_from=None, dt_to=None):
"""
Read from the 'Batch' table in the GrowApp database, and transform
into the format expected by the corresponding table in the CROP db.
Parameters
==========
dt_from, dt_to: datetime, time bounds for the query
Returns
=======
batch_df: pandas DataFrame
"""
grow_session = get_growapp_db_session()
query = grow_session.query(
GrowAppBatchClass.id,
GrowAppBatchClass.tray_size,
GrowAppBatchClass.number_of_trays,
GrowAppBatchClass.crop_id,
)
if dt_from:
query = query.filter(GrowAppBatchClass.status_date > dt_from)
if dt_to:
query = query.filter(GrowAppBatchClass.status_date < dt_to)
results = grow_session.execute(query).fetchall()
session_close(grow_session)
results_array = query_result_to_array(results)
batch_df = pd.DataFrame(results_array)
if len(batch_df) == 0:
return batch_df
batch_df.rename(columns={"id": "growapp_id"}, inplace=True)
# we need to get the crop_id from our croptype table
batch_df = convert_growapp_foreign_key(batch_df, "crop_id", CropTypeClass)
batch_df = batch_df.rename(columns={"crop_id": "crop_type_id"})
return batch_df
def get_batchevent_data(dt_from=None, dt_to=None):
"""
Read from the 'BatchEvent' table in the GrowApp database, and transform
into the format expected by the corresponding table in the CROP db.
Parameters
==========
dt_from, dt_to: datetime, time bounds for the query
Returns
=======
batchevent_df: pandas DataFrame
"""
grow_session = get_growapp_db_session()
query = grow_session.query(
GrowAppBatchEventClass.id,
GrowAppBatchEventClass.type_,
GrowAppBatchEventClass.was_manual,
GrowAppBatchEventClass.batch_id,
GrowAppBatchEventClass.event_happened,
GrowAppBatchEventClass.description,
GrowAppBatchEventClass.next_action_days,
GrowAppBatchEventClass.next_action,
)
if dt_from:
query = query.filter(GrowAppBatchEventClass.event_happened > dt_from)
if dt_to:
query = query.filter(GrowAppBatchEventClass.event_happened < dt_to)
results = grow_session.execute(query).fetchall()
session_close(grow_session)
results_array = query_result_to_array(results)
batchevents_df = pd.DataFrame(results_array)
if len(batchevents_df) == 0:
return batchevents_df
# convert some columns to datetime
batchevents_df["next_action"] = pd.to_datetime(
batchevents_df["next_action"], errors="coerce"
)
# convert NaT to None
batchevents_df["next_action"] = (
batchevents_df["next_action"]
.astype(object)
.where(batchevents_df.next_action.notnull(), None)
)
batchevents_df["event_happened"] = pd.to_datetime(
batchevents_df["event_happened"], errors="coerce"
)
# look up event type in our scheme
batchevents_df["type_"] = batchevents_df["type_"].apply(
lambda x: BATCH_EVENT_TYPE_MAPPING[x]
)
batchevents_df.rename(
columns={
"id": "growapp_id",
"next_action": "next_action_time",
"event_happened": "event_time",
"type_": "event_type",
},
inplace=True,
)
batchevents_df.loc[:, "location_id"] = None
transfer_events = batchevents_df.loc[:, "event_type"] == EventType.transfer
batchevents_df.loc[transfer_events, "location_id"] = batchevents_df.loc[
transfer_events, "batch_id"
].apply(get_location_id)
# we need to get the batch_id from our batch table
batchevents_df = convert_growapp_foreign_key(batchevents_df, "batch_id", BatchClass)
# drop some unused columns
batchevents_df.drop(
columns=["next_action_days", "was_manual", "description"], inplace=True
)
return batchevents_df
def get_harvest_data(dt_from=None, dt_to=None):
"""
Combine info from the growapp Batch and BatchEvent tables to
fill a dataframe ready to go into the Harvest table in the CROP db.
Parameters
==========
dt_from, dt_to: datetime, time bounds for the query
Returns
=======
harvest_df: pandas DataFrame containing all columns needed for the Harvest table.
"""
grow_session = get_growapp_db_session()
grow_query = grow_session.query(
GrowAppBatchClass.id,
GrowAppBatchClass.harvested_event_id,
GrowAppBatchClass.yield_,
GrowAppBatchClass.waste_disease,
GrowAppBatchClass.waste_defect,
GrowAppBatchClass.overproduction,
)
if dt_from:
grow_query = grow_query.filter(GrowAppBatchClass.status_date > dt_from)
if dt_to:
grow_query = grow_query.filter(GrowAppBatchClass.status_date < dt_to)
results = grow_session.execute(grow_query).fetchall()
results_array = query_result_to_array(results)
session_close(grow_session)
df = pd.DataFrame(results_array)
if len(df) == 0:
return df
df = df[df.harvested_event_id.notnull()]
df.rename(
columns={
"id": "growapp_id",
"harvested_event_id": "batch_event_id",
"yield_": "crop_yield",
"overproduction": "over_production",
},
inplace=True,
)
# get the batchevent_id from our batchevent table
df = convert_growapp_foreign_key(df, "batch_event_id", BatchEventClass)
df.loc[:, "location_id"] = df.loc[:, "growapp_id"].apply(get_location_id)
return df
def write_new_data(data_df, DbClass):
"""
Write rows from the input dataframe to the DbClass table in the CROP database.
Relies on there being a column 'growapp_id' in the table, in order to
identify existing rows.
Parameters
==========
crop_df: pandas DataFrame containing data from the GrowApp db.
DbClass: SQLAlchemy ORM class, corresponding to target table, as defined in structure.py
Returns
=======
success: bool
"""
if len(data_df) == 0:
logging.info(f"==> No new data to write for {DbClass.__tablename__}")
return True
session, engine = get_crop_db_session(return_engine=True)
try:
DbClass.__table__.create(bind=engine)
except ProgrammingError:
# The table already exists.
pass
logging.info(f"==> Will write {len(data_df)} rows to {DbClass.__tablename__}")
# loop over all rows in the dataframe
for _, row in data_df.iterrows():
insert_stmt = insert(DbClass).values(**(row.to_dict()))
do_nothing_stmt = insert_stmt.on_conflict_do_nothing(
index_elements=["growapp_id"]
)
session.execute(do_nothing_stmt)
logging.info(f"Finished writing to {DbClass.__tablename__}")
session.commit()
session_close(session)
return True
def import_growapp_data(dt_from=None, dt_to=None):
"""
For initial creation and filling of the CROP database tables, we need to query
everything in the GrowApp DB. After that, can use timestamp ranges to filter
the batch and batchevent queries
Parameters
==========
dt_from: datetime, starting period for Batch and BatchEvent queries
dt_to: datetime, ending period for Batch and BatchEvent queries
Returns
=======
success: bool
"""
success = True
# always query the whole crop type table - it will be small
logging.info("Querying Growapp crop table")
croptype_df = get_croptype_data()
success &= write_new_data(croptype_df, CropTypeClass)
if success:
logging.info("Successfully wrote to CropType table")
else:
logging.info("Problem writing to CropType table")
logging.info("Querying Growapp batch table")
batch_df = get_batch_data(dt_from, dt_to)
success &= write_new_data(batch_df, BatchClass)
if success:
logging.info("Successfully wrote to Batch table")
else:
logging.info("Problem writing to Batch table")
logging.info("Querying Growapp batch event table")
batchevent_df = get_batchevent_data(dt_from, dt_to)
success &= write_new_data(batchevent_df, BatchEventClass)
if success:
logging.info("Successfully wrote to BatchEvent table")
else:
logging.info("Problem writing to BatchEvent table")
logging.info("Querying Growapp batch table to get harvest data")
harvest_df = get_harvest_data(dt_from, dt_to)
success &= write_new_data(harvest_df, HarvestClass)
if success:
logging.info("Successfully wrote to Harvest table")
else:
logging.info("Problem writing to Harvest table")
return success
|
from typing import Tuple, Callable, Optional
from computegraph.types import Function
from summer2.parameters import Time, Data
from summer2.experimental.model_builder import ModelBuilder
from .parameters import TestingToDetection, Population
from autumn.core.inputs import get_population_by_agegroup
from autumn.settings import COVID_BASE_AGEGROUPS
from autumn.model_features.curve.interpolate import build_sigmoidal_multicurve, get_scale_data
from jax import numpy as jnp
def find_cdr_function_from_test_data(
builder: ModelBuilder,
cdr_params: TestingToDetection,
iso3: str,
region: Optional[str],
year: int,
smoothing_period=1,
) -> Callable:
"""
Sort out case detection rate from testing numbers, sequentially calling the functions above as
required.
Args:
cdr_params: The user-requests re the testing process
iso3: The country
region: The subregion of the country being simulated, if any
year: The year from which the population data should come
smooth_period: The period in days over which testing data should be smoothed
Return:
The function that takes time as its input and returns the CDR
"""
# Get the numbers of tests performed
from autumn.core.inputs.testing.testing_data import get_testing_numbers_for_region
test_df = get_testing_numbers_for_region(iso3, region)
smoothed_test_df = test_df.rolling(window=smoothing_period).mean().dropna()
# Convert to per capita testing rates
total_pop = sum(get_population_by_agegroup(COVID_BASE_AGEGROUPS, iso3, region, year=year))
per_capita_tests_df = smoothed_test_df / total_pop
def cdr_from_tests_func(
tests_per_capita, assumed_cdr_parameter, assumed_tests_parameter, floor_value
):
# Find the single unknown parameter to the function - i.e. for minus b,
# where CDR = 1 - (1 - f) * exp(-b * t)
exponent_multiplier = (
jnp.log((1.0 - assumed_cdr_parameter) / (1.0 - floor_value)) / assumed_tests_parameter
)
cdr = 1.0 - jnp.exp(exponent_multiplier * tests_per_capita) * (1.0 - floor_value)
return cdr
# Construct the function based on this parameter
# def cdr_function(tests_per_capita, exponent_multipler, floor_value):
# return 1.0 - fnp.exp(exponent_multiplier * tests_per_capita) * (1.0 - floor_value)
# return cdr_function
# cdr_from_tests_func = create_cdr_function(*cdr_test_params)
# Get the final CDR function
times = per_capita_tests_df.index
# values = per_capita_tests_df.apply(cdr_from_tests_func)
# Add the smoothed per capita testing rates
per_capita_test_data = Data(jnp.array(per_capita_tests_df))
# Add our function that computes CDR for these rates (and parameters)
cdr_test_data = builder.get_mapped_func(
cdr_from_tests_func, cdr_params, {"tests_per_capita": per_capita_test_data}
)
# Add an output that transforms this into scale_data for the sigmoidal curve interpolator
cdr_ydata = Function(get_scale_data, [cdr_test_data])
# Define a smoothed sigmoidal curve function
cdr_smoothed_func = build_sigmoidal_multicurve(times)
# Return the final Function object that will be used inside the model
return Function(cdr_smoothed_func, [Time, cdr_ydata])
def get_cdr_func(
builder: ModelBuilder,
detect_prop: float,
testing_params: TestingToDetection,
pop_params: Population,
iso3: str,
) -> Tuple[callable, callable]:
"""
The master function that can call various approaches to calculating the proportion of cases
detected over time.
Currently just supporting two approaches, but would be the entry point if more approaches
were added:
- Testing-based case detection
- Constant case detection fraction
Args:
detect_prop: Back-up single value to set a constant case detection rate over time
testing_params: Parameters to specify the relationship between CDR and testing, if requested
pop_params: Population-related parameters
iso3: Country code
Returns:
The case detection rate function of time
"""
if testing_params:
cdr_func = find_cdr_function_from_test_data(
builder,
testing_params,
iso3,
pop_params.region,
pop_params.year,
testing_params.smoothing_period,
)
else:
cdr_func = detect_prop
non_detect_func = 1.0 - cdr_func
return cdr_func, non_detect_func
|
# Skal kode etter programkartet vi laget i stad:
# Beregning av bruttolønn, skattetrekk og netto utbetalt.
# Først trenger vi inpt/inndata fra brukeren.
timelonn = float(input('Hva er din timelønn? '))
antall_timer = float(input('Hvor mange timer har du arbeidet? '))
# Beregner bruttolønn.
bruttolonn = timelonn * antall_timer
# Print ut delberegninger underveis for å feilsøke.
# Finner riktig skattesats og beregner skatt & netto lønn.
if bruttolonn < 20000:
skattesats = 28
else:
skattesats = 35
skattetrekk_kr = (bruttolonn * skattesats) / 100
nettolonn = bruttolonn - skattetrekk_kr
# Skriver ut bruttolønn, skattesats, skatt i kroner og nettolønn.
print('Bruttolønn:', format(bruttolonn, '.2f'))
print('Skatteprosenten er :', format(skattesats, '.2f'), '%, og skattetrekket i kroner er', format(skattetrekk_kr, '.2f'))
print('Du får utbetalt', format(nettolonn, '.2f'))
|
/home/miaojian/miniconda3/lib/python3.7/_dummy_thread.py |
from django.test import TestCase
from django.urls import reverse
from .models import Post
from bs4 import BeautifulSoup
import requests
class ParserTests(TestCase):
url = "https://news.ycombinator.com/"
def test_fetch_posts(self):
response = requests.get(self.url)
self.assertEqual(response.status_code, 200)
bs = BeautifulSoup(response.text, "html.parser")
data = [
{
"url": k["href"],
"title": k.get_text()
} for k
in bs.find_all("a", attrs={"class": "storylink"})
]
self.assertTrue(data)
self.assertTrue(data[0]["url"])
self.assertTrue(data[0]["title"])
class PostsViewTests(TestCase):
url = reverse("posts:list")
def setUp(self):
for i in range(1, 50):
Post.objects.create(title="title%s" % i,
url="https://testing%s.com" % i)
def test_view_posts_default(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.json()), 5)
self.assertEqual(response.json()[0]["id"], 1)
self.assertEqual(response.json()[0]["title"], "title1")
self.assertURLEqual(response.json()[0]["url"], "https://testing1.com")
self.assertIn("created", response.json()[0])
def test_view_posts_order(self):
response = self.client.get(self.url + "?order=not_existed_field")
self.assertDictEqual(
response.json(),
{'error': 'Ordering attribute not_existed_field does not exist'})
response = self.client.get(self.url + "?order=title")
self.assertEqual(response.json()[0]["title"], "title1")
response = self.client.get(self.url + "?order=-title")
self.assertEqual(response.json()[0]["title"], "title9")
def test_view_posts_offset(self):
for offset in (-1, "bla"):
response = self.client.get(self.url + "?offset=%s" % offset)
self.assertDictEqual(
response.json(), {"error": "Offset %s is not valid" % offset})
response = self.client.get(self.url + "?offset=25")
self.assertListEqual(
[k["id"] for k in response.json()], [26, 27, 28, 29, 30])
response = self.client.get(self.url + "?offset=100")
self.assertListEqual(response.json(), [])
def test_view_posts_limit(self):
for limit in (-1, "bla"):
response = self.client.get(self.url + "?limit=%s" % limit)
self.assertDictEqual(
response.json(), {"error": "Limit %s is not valid" % limit})
response = self.client.get(self.url + "?limit=25")
self.assertEqual(len(response.json()), 25)
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
#pylint: disable=
"""
File : WorkflowManager.py
Author : Valentin Kuznetsov <vkuznet AT gmail dot com>
Description: Workflow management tools
"""
from __future__ import print_function
# system modules
import os
import re
import json
import httplib
# ReqMgr modules
from ReqMgr.tools.reqMgrClient import WorkflowManager
# DBS modules
import dbs3Client as dbs3
from dbs.apis.dbsClient import DbsApi
# DBS3 helper functions
DBS3 = r'https://cmsweb.cern.ch/dbs/prod/global/DBSReader'
def getDatasets(dataset_pattern):
"Return list of dataset for given dataset pattern"
dbsapi = DbsApi(url=DBS3, verifypeer=False)
reply = dbsapi.listDatasets(dataset=dataset_pattern, dataset_access_type='*')
return reply
def getDatasetStatus(dataset):
"Return dataset status"
dbsapi = DbsApi(url=DBS3, verifypeer=False)
reply = dbsapi.listDatasets(dataset=dataset, dataset_access_type='*', detail=True)
return reply[0]['dataset_access_type']
def getWorkload(url, workflow):
"Return workload list"
conn = httplib.HTTPSConnection(url,
cert_file = os.getenv('X509_USER_PROXY'),
key_file = os.getenv('X509_USER_PROXY'))
r1=conn.request("GET",'/reqmgr/view/showWorkload?requestName='+workflow)
r2=conn.getresponse()
workload=r2.read()
return workload.split('\n')
class WorkflowDataOpsMgr(WorkflowManager):
def __init__(self, workflow, **kwds):
"""
Extend WorkflowManager and assign data-ops attributes for
given workflow. The order of calls does matter !!!
"""
self.kwds = kwds
self.url = self.get('url', 'cmsweb.cern.ch')
WorkflowManager.__init__(self, workflow, self.url)
self.workload = getWorkload(self.url, workflow)
self.cacheID = self.winfo.get('StepOneConfigCacheID', '')
self.config = getConfig(self.url, self.cacheID)
self.pileup_dataset = self._pileup_dataset()
self.priority = self._priority()
self.era = self.get('era', 'Summer12')
self.lfn = self.get('lfn', '/store/mc')
self.special_name = self.get('specialName', '')
self.max_rss = self.get('maxRSS', 2300000)
self.max_vsize = self.get('maxVSize', 4100000000)
self.input_dataset = ''
self.pileup_scenario = ''
self.global_tag = self.get('globalTag', '')
self.campaign = self.get('campaign', '')
self.max_merge_events = self.get('maxMergeEvents', 50000)
self.activity = self.get('activity', 'reprocessing')
self.restrict = self.get('restrict', 'None')
self.site_use = self.get('site', None)
self.site_cust = self.get('site_cust', None)
self.xrootd = self.get('xrootd', 0)
self.ext_tag = self.get('ext', '')
self.team = self.get('team', '')
# perform various initialization
self._init()
# custom settings
# Construct processed dataset version
if self.pileup_scenario:
self.pileup_scenario = self.pileup_scenario+'_'
specialprocstring = kwds.get('specialName', '')
if specialprocstring:
self.special_name = specialprocstring + '_'
# ProcessingString
inprocstring = kwds.get('procstring', '')
if inprocstring:
self.procstring = inprocstring
else:
self.procstring = self.special_name + self.pileup_scenario +\
self.global_tag + self.ext_tag
# ProcessingVersion
inprocversion = kwds.get('procversion', '')
if inprocversion:
self.procversion = inprocversion
else:
self.procversion = self.dataset_version(self.era, self.procstring)
def dataset_version(self, era, partialProcVersion):
versionNum = 1
outputs = self.output_datasets
for output in outputs:
bits = output.split('/')
outputCheck = '/'+bits[1]+'/'+era+'-'+partialProcVersion+'*/'+bits[len(bits)-1]
datasets = getDatasets(outputCheck)
for dataset in datasets:
datasetName = dataset['dataset']
matchObj = re.match(r".*-v(\d+)/.*", datasetName)
if matchObj:
currentVersionNum = int(matchObj.group(1))
if versionNum <= currentVersionNum:
versionNum=versionNum+1
return versionNum
### private methods
def _init(self):
"Perform initialization and cross-checks"
self.input_dataset = self._input_dataset()
self.global_tag = self._global_tag()
self.ext_tag = self._ext_tag()
self.campaign = self._campaign()
self.era, self.lfn, self.special_name = self._era_lfn_name()
self.pileup_scenario = self._pileup_scenario()
self.max_rss = self._max_rss()
self.max_merge_events = self._max_merge_events()
self.team = self._team()
self.site_use, self.site_cust = self._sites()
# Checks attributes
checklist = [(self.era, ''), (self.lfn, ''), (self.pileup_scenario, 'Unknown')]
for att, val in checklist:
if att == val:
raise Exception('ERROR: %s == "%s"' % (att, val))
# Check status of input dataset
inputDatasetStatus = getDatasetStatus(self.input_dataset)
if inputDatasetStatus != 'VALID' and inputDatasetStatus != 'PRODUCTION':
raise Exception('ERROR: Input dataset is not PRODUCTION or VALID, status=%s' % inputDatasetStatus)
def get(self, key, default=''):
"Get extension tag"
val = self.kwds.get(key)
if not val:
val = default
return val
def _ext_tag(self):
"Get extension tag"
if self.ext_tag:
ext_tag = '_ext' + self.ext_tag
else:
ext_tag = ''
return ext_tag
def _global_tag(self):
"Extract required part of global tag from workflow info"
return self.winfo.get('GlobalTag', '').split('::')[0]
def _campaign(self):
"Return campaign from workflow info"
return self.winfo.get('Campaign', '')
def _max_rss(self):
"Return maxRSS"
max_rss = self.max_rss
if ('HiFall11' in self.workflow or 'HiFall13DR53X' in self.workflow) and \
'IN2P3' in self.site_use:
max_rss = 4000000
return max_rss
def _max_merge_events(self):
"Return max number of merge events"
if 'DR61SLHCx' in self.workflow:
return 5000
return self.max_merge_events
def _input_dataset(self):
"Return input dataset of workflow"
dataset = self.winfo.get('InputDataset', '')
if not dataset:
raise Exception("Error: no input dataset found for %s" % self.workflow)
return dataset
def _era_lfn_name(self):
"""
Return era/lfn/name for given workflow, so far we have hard-coded cases,
later it should be stored persistently and we should have APIs: get/put
to fetch/store/update this info in DB.
"""
workflow = self.workflow
campaign = self.campaign
era = 'Summer12'
lfn = '/store/mc'
specialName = ''
# Set era, lfn and campaign-dependent part of name if necessary
if 'Summer12_DR51X' in workflow:
era = 'Summer12'
lfn = '/store/mc'
if 'Summer12_DR52X' in workflow:
era = 'Summer12'
lfn = '/store/mc'
if 'Summer12_DR53X' in workflow or ('Summer12' in workflow and 'DR53X' in workflow):
era = 'Summer12_DR53X'
lfn = '/store/mc'
#this is incorrect for HiFall11 workflows, but is changed further down
if 'Fall11_R' in workflow or 'Fall11R' in workflow:
era = 'Fall11'
lfn = '/store/mc'
if 'Summer13dr53X' in workflow:
era = 'Summer13dr53X'
lfn = '/store/mc'
if 'Summer11dr53X' in workflow:
era = 'Summer11dr53X'
lfn = '/store/mc'
if 'Fall11_HLTMuonia' in workflow:
era = 'Fall11'
lfn = '/store/mc'
specialName = 'HLTMuonia_'
if 'Summer11_R' in workflow:
era = 'Summer11'
lfn = '/store/mc'
if 'LowPU2010_DR42' in workflow or 'LowPU2010DR42' in workflow:
era = 'Summer12'
lfn = '/store/mc'
specialName = 'LowPU2010_DR42_'
if 'UpgradeL1TDR_DR6X' in workflow:
era = 'Summer12'
lfn = '/store/mc'
if 'HiWinter13' in self.input_dataset:
era = 'HiWinter13'
lfn = '/store/himc'
if 'Spring14dr' in workflow:
era = 'Spring14dr'
lfn = '/store/mc'
if '_castor_' in workflow:
specialName = 'castor_'
if 'Winter13' in workflow and 'DR53X' in workflow:
era = 'HiWinter13'
lfn = '/store/himc'
if 'Summer11LegDR' in campaign:
era = 'Summer11LegDR'
lfn = '/store/mc'
if 'UpgradePhase1Age' in campaign:
era = 'Summer13'
lfn = '/store/mc'
specialName = campaign + '_'
if campaign == 'UpgradePhase2LB4PS_2013_DR61SLHCx':
era = 'Summer13'
lfn = '/store/mc'
specialName = campaign + '_'
if campaign == 'UpgradePhase2BE_2013_DR61SLHCx':
era = 'Summer13'
lfn = '/store/mc'
specialName = campaign + '_'
if campaign == 'UpgradePhase2LB6PS_2013_DR61SLHCx':
era = 'Summer13'
lfn = '/store/mc'
specialName = campaign + '_'
if campaign == 'UpgradePhase1Age0DES_DR61SLHCx':
era = 'Summer13'
lfn = '/store/mc'
specialName = campaign + '_'
if campaign == 'UpgradePhase1Age0START_DR61SLHCx':
era = 'Summer13'
lfn = '/store/mc'
specialName = campaign + '_'
if campaign == 'UpgradePhase1Age3H_DR61SLHCx':
era = 'Summer13'
lfn = '/store/mc'
specialName = campaign + '_'
if campaign == 'UpgradePhase1Age5H_DR61SLHCx':
era = 'Summer13'
lfn = '/store/mc'
specialName = campaign + '_'
if campaign == 'UpgradePhase1Age1K_DR61SLHCx':
era = 'Summer13'
lfn = '/store/mc'
specialName = campaign + '_'
if campaign == 'UpgradePhase1Age3K_DR61SLHCx':
era = 'Summer13'
lfn = '/store/mc'
specialName = campaign + '_'
#change back to old campaign names for UpgradePhase1
if 'UpgradePhase1Age' in campaign and 'dr61SLHCx' in specialName:
specialName = specialName.replace("dr61SLHCx","_DR61SLHCx")
if 'dr61SLHCx' in specialName:
print('WARNING: using new campaign name format')
if campaign == 'HiFall11_DR44X' or campaign == 'HiFall11DR44':
era = 'HiFall11'
lfn = '/store/himc'
specialName = 'HiFall11_DR44X' + '_'
if campaign == 'HiFall13DR53X':
era = 'HiFall13DR53X'
lfn = '/store/himc'
if campaign == 'UpgFall13d':
era = campaign
lfn = '/store/mc'
if campaign == 'Fall13dr':
era = campaign
lfn = '/store/mc'
if '_castor_tsg_' in workflow:
specialName = 'castor_tsg_'
elif '_castor_' in workflow:
specialName = 'castor_'
elif '_tsg_' in workflow:
specialName = 'tsg_'
elif '__' in workflow:
specialName = ''
else:
print('ERROR: unexpected special name string in workflow name')
sys.exit(0)
# Handle NewG4Phys
if campaign == 'Summer12DR53X' and 'NewG4Phys' in workflow:
specialName = 'NewG4Phys_'
# Handle Ext30
if campaign == 'Summer12DR53X' and 'Ext30' in workflow:
specialName = 'Ext30_'
# Handle BS2011
if campaign == 'LowPU2010DR42' and 'BS2011' in workflow:
specialName = 'LowPU2010_DR42_BS2011_'
return era, lfn, specialName
def _pileup_scenario(self):
"""
Return pileup scenario name based on given workflow
Code should be replaced with persistent store.
"""
workflow = self.workflow
campaign = self.campaign
pileupDataset = self._pileup_dataset()
if pileupDataset != 'None':
[subscribedOurSite, subscribedOtherSite] = checkAcceptedSubscriptionRequest(self.url, pileupDataset, siteSE)
if not subscribedOurSite:
print('ERROR: pileup dataset not subscribed/approved to required Disk endpoint')
sys.exit(0)
# Determine pileup scenario
# - Fall11_R2 & Fall11_R4 don't add pileup so extract pileup scenario from input
pileupScenario = ''
pileupScenario = getPileupScenario(self.winfo, self.config)
if campaign == 'Summer12_DR53X_RD':
pileupScenario = 'PU_RD1'
if pileupScenario == 'Unknown' and 'MinBias' in pileupDataset and 'LowPU2010DR42' not in workflow:
print('ERROR: unable to determine pileup scenario')
sys.exit(0)
elif 'Fall11_R2' in workflow or 'Fall11_R4' in workflow or 'Fall11R2' in workflow or 'Fall11R4' in workflow:
matchObj = re.match(r".*Fall11-(.*)_START.*", inputDataset)
if matchObj:
pileupScenario = matchObj.group(1)
else:
pileupScenario == 'Unknown'
elif pileupScenario == 'Unknown' and 'MinBias' not in pileupDataset:
pileupScenario = 'NoPileUp'
if pileupScenario == 'Unknown':
pileupScenario = ''
if 'LowPU2010_DR42' in workflow or 'LowPU2010DR42' in workflow:
pileupScenario = 'PU_S0'
if 'HiWinter13' in workflow and 'DR53X' in workflow:
pileupScenario = ''
if 'pAWinter13' in workflow and 'DR53X' in workflow:
pileupScenario = 'pa' # not actually the pileup scenario of course
if 'ppWinter13' in workflow and 'DR53X' in workflow:
pileupScenario = 'pp' # not actually the pileup scenario of course
return pileupScenario
def _pileup_dataset(self):
pileupDataset = 'None'
for line in self.workload:
if 'request.schema.MCPileup' in line:
pileupDataset = line[line.find("'")+1:line.find("'",line.find("'")+1)]
return pileupDataset
def _priority(self):
priority = -1
for line in self.workload:
if 'request.schema.RequestPriority' in line:
priority = line[line.find("=")+1:line.find("<br/")]
priority = priority.strip()
priority = re.sub(r'\'', '', priority)
return int(priority)
def _team(self):
"Return appropriate team"
priority = self._priority()
if self.site_use == 'HLT':
team = 'hlt'
elif priority < 100000:
team = 'reproc_lowprio'
else:
team = 'reproc_highprio'
return team
def _sites(self):
"Find appropriate site to use"
workflow = self.workflow
siteUse = ''
siteCust = self.site_cust
# Valid Tier-1 sites
sites = ['T1_DE_KIT', 'T1_FR_CCIN2P3', 'T1_IT_CNAF', 'T1_ES_PIC',
'T1_TW_ASGC', 'T1_UK_RAL', 'T1_US_FNAL', 'T2_CH_CERN', 'HLT']
if self.site_use == 'T2_US':
siteUse = ['T2_US_Caltech', 'T2_US_Florida', 'T2_US_MIT',
'T2_US_Nebraska', 'T3_US_Omaha', 'T2_US_Purdue',
'T2_US_UCSD', 'T2_US_Vanderbilt', 'T2_US_Wisconsin']
elif self.site_use == 'HLT':
siteUse = ['T2_CH_CERN_AI', 'T2_CH_CERN_HLT', 'T2_CH_CERN']
self.team = 'hlt'
else:
# Determine site where workflow should be run
count=0
for site in sites:
if site in workflow:
count=count+1
siteUse = site
# Find custodial location of input dataset if workflow name contains no T1 site or multiple T1 sites
if count==0 or count>1:
siteUse = findCustodialLocation(self.url, self.input_dataset)
if siteUse == 'None':
raise Exception('ERROR: No custodial site found for dataset=%s' % self.input_dataset)
siteUse = siteUse[:-4]
# Set the custodial location if necessary
if not self.site_use or self.site_use != 'T2_US':
if not self.site_cust:
siteCust = siteUse
else:
siteCust = self.site_cust
# Check if input dataset subscribed to disk endpoint
if 'T2_CH_CERN' in siteUse:
siteSE = 'T2_CH_CERN'
else:
siteSE = siteUse + '_Disk'
subscribedOurSite, subscribedOtherSite = \
checkAcceptedSubscriptionRequest(self.url, self.input_dataset, siteSE)
if not subscribedOurSite and not self.xrootd and 'Fall11R2' not in workflow:
raise Exception('ERROR: input dataset not subscribed/approved to required Disk endpoint')
if self.xrootd and not subscribedOtherSite:
raise Exception('ERROR: input dataset not subscribed/approved to any Disk endpoint')
if siteUse not in sites and options.site != 'T2_US' and \
siteUse != ['T2_CH_CERN_AI', 'T2_CH_CERN_HLT', 'T2_CH_CERN']:
raise Exception('ERROR: invalid site=%s' % siteUse)
if not siteCust:
raise Exception('ERROR: A custodial site must be specified')
return siteUse, siteCust
def getScenario(ps):
pss = 'Unknown'
if ps == 'SimGeneral.MixingModule.mix_E8TeV_AVE_16_BX_25ns_cfi':
pss = 'PU140Bx25'
if ps == 'SimGeneral.MixingModule.mix_2012_Summer_50ns_PoissonOOTPU_cfi':
pss = 'PU_S10'
if ps == 'SimGeneral.MixingModule.mix_E7TeV_Fall2011_Reprocess_50ns_PoissonOOTPU_cfi':
pss = 'PU_S6'
if ps == 'SimGeneral.MixingModule.mix_E8TeV_AVE_10_BX_25ns_300ns_spread_cfi':
pss = 'PU10bx25'
if ps == 'SimGeneral.MixingModule.mix_E8TeV_AVE_10_BX_50ns_300ns_spread_cfi':
pss = 'PU10bx50'
if ps == 'SimGeneral.MixingModule.mix_2011_FinalDist_OOTPU_cfi':
pss = 'PU_S13'
if ps == 'SimGeneral.MixingModule.mix_fromDB_cfi':
pss = 'PU_RD1'
if ps == 'SimGeneral.MixingModule.mix_2012C_Profile_PoissonOOTPU_cfi':
pss = 'PU2012CExt'
if ps == 'SimGeneral.MixingModule.mixNoPU_cfi':
pss = 'NoPileUp'
if ps == 'SimGeneral.MixingModule.mix_POISSON_average_cfi':
pss = 'PU'
if ps == 'SimGeneral.MixingModule.mix_CSA14_50ns_PoissonOOTPU_cfi':
pss = 'PU_S14'
return pss
def getPileupScenario(winfo, config):
"Get pileup scanario for given workflow dict and configuration"
workflow = winfo['RequestName']
pileup, meanPileUp, bunchSpacing, cmdLineOptions = getPileup(config)
scenario = getScenario(pileup)
if scenario == 'PU140Bx25' and meanPileUp != 'Unknown':
scenario = 'PU' + meanPileUp + 'bx25'
if scenario == 'PU140bx25' and 'Upgrade' in workflow:
scenario = 'PU140Bx25'
if scenario == 'PU':
scenario = 'PU' + meanPileUp + 'bx' + bunchSpacing
if meanPileUp == 'None' or bunchSpacing == 'None':
print('ERROR: unexpected pileup settings in config')
sys.exit(0)
if scenario == 'PU_RD1' and cmdLineOptions != 'None':
if '--runsAndWeightsForMC [(190482,0.924) , (194270,4.811), (200466,7.21), (207214,7.631)]' in cmdLineOptions:
scenario = 'PU_RD2'
return scenario
def getPileup(config):
"Helper function used in getPileupScenario"
pu = 'Unknown'
vmeanpu = 'None'
bx = 'None'
cmdLineOptions = 'None'
lines = config.split('\n')
for line in lines:
if 'process.load' and 'MixingModule' in line:
pu = line[line.find("'")+1:line.find("'",line.find("'")+1)]
if 'process.mix.input.nbPileupEvents.averageNumber' in line:
meanpu = line[line.find("(")+1:line.find(")")].split('.', 1)
vmeanpu = meanpu[0]
if 'process.mix.bunchspace' in line:
bx = line[line.find("(")+1:line.find(")")]
if 'with command line options' in line:
cmdLineOptions = line
return pu, vmeanpu, bx, cmdLineOptions
def getConfig(url, cacheID):
"Helper function to get configuration for given cacheID"
conn = httplib.HTTPSConnection(url,
cert_file = os.getenv('X509_USER_PROXY'),
key_file = os.getenv('X509_USER_PROXY'))
conn.request("GET",'/couchdb/reqmgr_config_cache/'+cacheID+'/configFile')
config = conn.getresponse().read()
return config
def findCustodialLocation(url, dataset):
"Helper function to find custodial location for given dataset"
conn = httplib.HTTPSConnection(url, cert_file = os.getenv('X509_USER_PROXY'), key_file = os.getenv('X509_USER_PROXY'))
r1=conn.request("GET",'/phedex/datasvc/json/prod/blockreplicas?dataset='+dataset)
r2=conn.getresponse()
result = json.loads(r2.read())
request=result['phedex']
if 'block' not in request.keys():
return "No Site"
if len(request['block'])==0:
return "No Site"
for replica in request['block'][0]['replica']:
if replica['custodial']=="y" and replica['node']!="T0_CH_CERN_MSS":
return replica['node']
return "None"
def checkAcceptedSubscriptionRequest(url, dataset, site):
"Helper function"
conn = httplib.HTTPSConnection(url,
cert_file = os.getenv('X509_USER_PROXY'),
key_file = os.getenv('X509_USER_PROXY'))
conn.request("GET",'/phedex/datasvc/json/prod/requestlist?dataset='+dataset+'&type=xfer')
resp = conn.getresponse()
result = json.load(resp)
requests=result['phedex']
if 'request' not in requests.keys():
return [False, False]
ourNode = False
otherNode = False
for request in result['phedex']['request']:
for node in request['node']:
if node['name']==site and node['decision']=='approved':
ourNode = True
elif 'Disk' in node['name'] and node['decision']=='approved':
otherNode = True
return ourNode, otherNode
|
import os
import numpy as np
import tensorflow as tf
import tensorlayer as tl
from config import FLAGS_CMNIST, FLAGS_CIFAR
from train import args
flags = FLAGS_CMNIST()
if args.dataset == 'CMNIST':
flags = FLAGS_CMNIST()
elif args.dataset == 'CIFAR_10':
flags = FLAGS_CIFAR_10()
else:
print('dataset error')
def get_CMNIST_train():
images_path = []
for i in range(flags.len_dataset):
tmp_path = '/home/asus/Workspace/dataset/cmnist_label/' + str(i) + ".jpg"
images_path.append(tmp_path)
# images_path = tl.files.load_file_list(path='/home/asus/Workspace/dataset/cmnist_label/', regx='.*.jpg', keep_prefix=True, printable=False)
targets = []
with open('/home/asus/Workspace/dataset/label.txt', 'r') as f:
numbers = f.readlines()
for i in range(len(numbers)):
targets.append(numbers[i])
if len(images_path) != len(targets):
raise AssertionError("The length of inputs and targets should be equal")
def generator_train():
for image_path, target in zip(images_path, targets):
yield image_path.encode('utf-8'), target
def _map_fn(image_path, target):
image = tf.io.read_file(image_path)
image = tf.image.decode_jpeg(image, channels=3) # get RGB with 0~1
image = tf.image.convert_image_dtype(image, dtype=tf.float32)
# image = tf.image.crop_central(image, [FLAGS.output_size, FLAGS.output_size, FLAGS.c_dim])
# image = tf.image.resize_images(image, FLAGS.output_size])
# image = image[45:173, 25:153, :] # central crop
# image = tf.image.resize([image], (output_size, output_size))[0]
# image = tf.image.crop_and_resize(image, boxes=[[]], crop_size=[64, 64])
# image = tf.image.resize_image_with_crop_or_pad(image, FLAGS.output_size, FLAGS.output_size) # central crop
# image = tf.image.random_flip_left_right(image)
image = image * 2 - 1
target = tf.reshape(target, ())
return image, target
train_ds = tf.data.Dataset.from_generator(generator_train, output_types=(tf.string, tf.int32))
ds = train_ds.shuffle(buffer_size=4096)
# ds = ds.shard(num_shards=hvd.size(), index=hvd.rank())
n_step_epoch = int(flags.len_dataset // flags.batch_size_train)
n_epoch = int(flags.step_num // n_step_epoch)
ds = ds.repeat(n_epoch)
ds = ds.map(_map_fn, num_parallel_calls=4)
ds = ds.batch(flags.batch_size_train)
ds = ds.prefetch(buffer_size=2)
return ds, images_path
def get_dataset_eval():
if flags.dataset == 'MNIST':
X_train, Y_train, _, _, X_test, _ = tl.files.load_mnist_dataset(shape=(-1, 28, 28, 1), path='../data')
X_train = X_train * 2 - 1
if flags.dataset == 'CIFAR_10':
X_train, Y_train, X_test, _ = tl.files.load_cifar10_dataset(shape=(-1, 32, 32, 3), path='../data')
X_train = X_train / 127.5 - 1
def generator():
for image, label in zip(X_train, Y_train):
yield image, label
def _map_fn(image, label):
return image, label
ds = tf.data.Dataset.from_generator(generator, output_types=(tf.float32, tf.int32))
ds = ds.map(_map_fn, num_parallel_calls=4)
ds = ds.batch(flags.batch_size_eval)
ds = ds.prefetch(buffer_size=4) # For concurrency
return ds
def get_CIFAR10_train():
X_train, y_train, _, _ = tl.files.load_cifar10_dataset(shape=(-1, 32, 32, 3), plotable=False)
def generator_train():
inputs = X_train
targets = y_train
if len(inputs) != len(targets):
raise AssertionError("The length of inputs and targets should be equal")
for _input, _target in zip(inputs, targets):
# yield _input.encode('utf-8'), _target.encode('utf-8')
yield _input, _target
def _map_fn_train(img, target):
# 1. Randomly crop a [height, width] section of the image.
# img = tf.image.random_crop(img, [24, 24, 3])
# 2. Randomly flip the image horizontally.
# img = tf.image.random_flip_left_right(img)
# 3. Randomly change brightness.
# img = tf.image.random_brightness(img, max_delta=63)
# 4. Randomly change contrast.
# img = tf.image.random_contrast(img, lower=0.2, upper=1.8)
# 5. Subtract off the mean and divide by the variance of the pixels.
# img = tf.image.per_image_standardization(img)
target = tf.reshape(target, ())
return img, target
ds = tf.data.Dataset.from_generator(
generator_train, output_types=(tf.float32, tf.int32)) # , output_shapes=((32, 32, 3), (1)))
ds = ds.shuffle(buffer_size=4096)
n_step_epoch = int(flags.len_dataset // flags.batch_size_train)
n_epoch = int(flags.step_num // n_step_epoch)
ds = ds.repeat(n_epoch)
ds = ds.map(_map_fn_train, num_parallel_calls=4)
ds = ds.batch(flags.batch_size_train)
ds = ds.prefetch(buffer_size=2)
return ds
|
"""
Class module defining the Project class and its Qt interface class ProjectView (derived from QTreeview).
"""
import yaml
import application.lib.objectmodel as objectmodel
class Project(object):
"""
Class implementing the concept of user project in a similar waya as in other integrated development environment.
The project is a set of code files, organised hyerachicaly in a tree, having parameters, and being saved in a project file with extension '.prj'.
"""
def __init__(self):
self._tree = objectmodel.Folder("[project]")
self._parameters = dict()
self._filename = None
self._lastState = self.saveToString()
def parameters(self):
return self._parameters
def setParameters(self, parameters):
self._parameters = parameters
def tree(self):
return self._tree
def setTree(self, tree):
self._tree = tree
def setFilename(self, filename):
self._filename = filename
def filename(self):
return self._filename
def saveToFile(self, filename):
string = self.saveToString()
file = open(filename, "w")
file.write(string)
file.close()
self.setFilename(filename)
self._lastState = string
def loadFromFile(self, filename):
file = open(filename, "r")
content = file.read()
file.close()
self.loadFromString(content)
self.setFilename(filename)
self._lastState = content
def hasUnsavedChanges(self):
if self._lastState != self.saveToString():
return True
return False
def saveToString(self):
converter = objectmodel.Converter()
treedump = converter.dump(self._tree)
return yaml.dump({'tree': treedump, 'parameters': self._parameters})
def loadFromString(self, string):
params = yaml.load(string)
converter = objectmodel.Converter()
self._tree = converter.load(params["tree"])
self._parameters = params["parameters"]
#############
# Qt gui #
#############
from PyQt4.QtGui import *
from PyQt4.QtCore import *
class ProjectModel(QAbstractItemModel):
def __init__(self, root, parent=None):
QAbstractItemModel.__init__(self, parent)
self._root = root
self._nodeList = []
self._dropAction = Qt.MoveAction
self._mimeData = None
def setProject(self, project):
self.beginResetModel()
self._root = project
self.endResetModel()
def project(self):
return self._root
def headerData(self, section, orientation, role):
if section == 1:
return QVariant(QString(u""))
def deleteNode(self, index):
parent = self.parent(index)
node = self.getNode(index)
parentNode = self.getNode(parent)
if parentNode is None:
parentNode = self._root
self.beginRemoveRows(parent, index.row(), index.row())
parentNode.removeChild(node)
self.endRemoveRows()
def getIndex(self, node):
if node in self._nodeList:
return self._nodeList.index(node)
self._nodeList.append(node)
index = self._nodeList.index(node)
return index
def getNode(self, index):
if not index.isValid():
return self._root
return self._nodeList[index.internalId()]
def parent(self, index):
if index == QModelIndex():
return QModelIndex()
node = self.getNode(index)
if node is None:
return QModelIndex()
if node.parent() is None:
return QModelIndex()
if node.parent().parent() is None:
return QModelIndex()
else:
grandparent = node.parent().parent()
row = grandparent.children().index(node.parent())
return self.createIndex(row, 0, self.getIndex(node.parent()))
def hasChildren(self, index):
node = self.getNode(index)
if node is None:
return True
if node.hasChildren():
return True
return False
def data(self, index, role=Qt.DisplayRole):
node = self.getNode(index)
if role == Qt.DisplayRole:
return QVariant(node.name())
return QVariant()
def index(self, row, column, parent):
parentNode = self.getNode(parent)
if parentNode is None:
if row < len(self._root.children()):
return self.createIndex(row, column, self.getIndex(self._root.children()[row]))
elif row < len(parentNode.children()):
return self.createIndex(row, column, self.getIndex(parentNode.children()[row]))
return QModelIndex()
def columnCount(self, parent):
return 1
def supportedDropActions(self):
return Qt.MoveAction | Qt.CopyAction
def setDropAction(self, action):
self._dropAction = action
def rowCount(self, parent):
if not parent.isValid():
return len(self._root.children())
node = self.getNode(parent)
return len(node.children())
def flags(self, index):
defaultFlags = QAbstractItemModel.flags(self, index)
if index.isValid():
return Qt.ItemIsDragEnabled | Qt.ItemIsDropEnabled | defaultFlags
else:
return Qt.ItemIsDropEnabled | defaultFlags
def mimeData(self, indexes):
mimeData = QMimeData()
mimeData.setData("projecttree/internalMove", "")
self._moveIndexes = indexes
return mimeData
def addNode(self, node, parent=QModelIndex()):
self.beginInsertRows(parent, 0, 0)
parentNode = self.getNode(parent)
parentNode.insertChild(0, node)
self.endInsertRows()
def dropMimeData(self, data, action, row, column, parent):
"""
This is the function that manages the drop on the the projectView QTreeView
"""
# To do: clean this fucntion to get the right drop behavior in any case
# print data,action,row,column,parent,self.getNode(parent),data.formats
if row == -1:
row = 0
if data is not None:
parentNode = self.getNode(parent)
if parentNode is None:
return False
if data.hasFormat("projecttree/internalMove"):
if self._dropAction == Qt.MoveAction:
parentNode = self.getNode(parent)
while type(parentNode) is not objectmodel.Folder:
if parentNode.parent() is None:
return False
parentNode = parentNode.parent()
parent = self.parent(parent)
for index in self._moveIndexes:
oldParent = index.parent()
oldParentNode = self.getNode(oldParent)
node = self.getNode(index)
rowOfChild = oldParentNode.children().index(node)
if oldParentNode == parentNode and rowOfChild == row:
return False
if node.isAncestorOf(parentNode):
return False
self.beginMoveRows(
oldParent, rowOfChild, rowOfChild, parent, 0)
oldParentNode.removeChild(node)
parentNode.insertChild(0, node)
self.endMoveRows()
elif data.hasUrls():
index = parent
# print index,data.url()
while type(parentNode) != objectmodel.Folder:
if parentNode.parent() is None:
return False
index = self.parent(index)
parentNode = parentNode.parent()
for url in data.urls():
if url.toLocalFile() != "":
fileNode = objectmodel.File(url=str(url.toLocalFile()))
self.beginInsertRows(index, len(
parentNode), len(parentNode))
parentNode.addChild(fileNode)
self.endInsertRows()
return True
class ProjectView(QTreeView):
def __init__(self, parent=None):
QTreeView.__init__(self, parent)
self.setAcceptDrops(True)
self.setDragEnabled(True)
self.setDropIndicatorShown(True)
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.connect(self, SIGNAL(
"customContextMenuRequested(const QPoint &)"), self.getContextMenu)
def dragMoveEvent(self, e):
e.accept()
def dragEnterEvent(self, e):
e.acceptProposedAction()
def getContextMenu(self, p):
menu = QMenu()
selectedItems = self.selectedIndexes()
if len(selectedItems) == 1:
renameAction = menu.addAction("Edit")
self.connect(renameAction, SIGNAL(
"triggered()"), self.editCurrentItem)
deleteAction = menu.addAction("Delete")
self.connect(deleteAction, SIGNAL(
"triggered()"), self.deleteCurrentItem)
menu.exec_(self.viewport().mapToGlobal(p))
def createNewFolder(self):
selectedIndices = self.selectedIndexes()
if len(selectedIndices) == 0:
index = QModelIndex()
else:
index = selectedIndices[0]
node = self.model().getNode(index)
while type(node) != objectmodel.Folder:
if node.parent() is None:
return
node = node.parent()
index = self.model().parent(index)
dialog = QInputDialog()
dialog.setWindowTitle("New Folder")
dialog.setLabelText("Name")
dialog.setTextValue("")
dialog.exec_()
if dialog.result() == QDialog.Accepted:
node = objectmodel.Folder(str(dialog.textValue()))
self.model().addNode(node, index)
def editCurrentItem(self):
selectedItems = self.selectedIndexes()
if len(selectedItems) == 1:
index = selectedItems[0]
node = self.model().getNode(index)
if node is None or type(node) != objectmodel.Folder:
return
dialog = QInputDialog()
dialog.setWindowTitle("Edit Folder")
dialog.setLabelText("Name")
dialog.setTextValue(node.name())
dialog.exec_()
if dialog.result() == QDialog.Accepted:
node.setName(str(dialog.textValue()))
def deleteCurrentItem(self):
selectedItems = self.selectedIndexes()
if len(selectedItems) == 1:
message = QMessageBox(QMessageBox.Question, "Confirm deletion",
"Are you sure that you want to delete this node?", QMessageBox.Yes | QMessageBox.No)
message.exec_()
if message.standardButton(message.clickedButton()) != QMessageBox.Yes:
return
self.model().deleteNode(selectedItems[0])
def openFile(self, node):
if type(node) == objectmodel.File:
self.emit(SIGNAL("openFile(PyQt_PyObject)"), node.url())
def mouseDoubleClickEvent(self, event):
index = self.indexAt(event.pos())
if index.isValid():
node = self.model().getNode(index)
if type(node) == objectmodel.File:
self.emit(SIGNAL("openFile(PyQt_PyObject)"), node.url())
event.accept()
return
QTreeView.mouseDoubleClickEvent(self, event)
def selectionChanged(self, selected, deselected):
if len(selected.indexes()) == 1:
node = self.model().getNode(selected.indexes()[0])
else:
pass
QTreeView.selectionChanged(self, selected, deselected)
|
H, W = map( int, input().split())
N = int( input())
A = list( map( int, input().split()))
reverse = 0
ANS = [ [] for _ in range(H)]
gyou = 0
cnt = 0
for i in range(N):
for j in range(A[i]):
cnt += 1
if reverse == 0:
ANS[gyou].append(i+1)
else:
ANS[gyou].insert(0, i+1)
if cnt == W:
cnt = 0
gyou += 1
reverse = (reverse+1)%2
for i in range(H):
for j in range(W-1):
print( str(ANS[i][j]), end = ' ')
print(ANS[i][W-1])
|
import re
def main():
with open('Fayek.bib', 'r') as f:
lines = f.read()
bibs = lines.split('@')[1:]
for bib in bibs:
# bib = re.sub(r'\n(?=[^{}]*})', '', bib) # remove new lines
# bib = re.sub(r' +(?=[^{}]*})', ' ', bib) # remove multiple space
bib = bib.replace(',\n', '<>').replace(', \n', '<>').replace(', \n', '<>').replace('\n','').replace('<>', ',\n')
bib = re.sub(' +', ' ', bib)
bib = '@' + bib.replace(',}', ',\n}') + '\n' # add new line after the last field
bib = bib.replace('\n ', '\n ')
with open(bib[bib.find('{') + 1:bib.find(',')] + '.bib', 'w') as f:
f.write(''.join(bib))
if __name__ == '__main__':
main() |
import time
from playwright import sync_playwright
# 以下コマンドラインでレコード機能
# python -m playwright codegen
def test_run(playwright):
browser = playwright.chromium.launch(headless=True)
context = browser.newContext()
page = context.newPage()
# Go to
page.goto("https://next.rikunabi.com/")
# ScreenShot 1
page.screenshot(path="playwright1.png")
# Click
page.click("text=\"ログイン\"")
# Fill input
page.fill("input[name=\"mainEmail\"]", "sample@foo.bar")
# Fill input
page.fill("input[name=\"passwd\"]", "passwordtest")
# Click
page.click("text=\"上記に同意してログイン\"")
# ScreenShot 2
page.screenshot(path="playwright2.png")
# その他情報
# page.check("#music") #チェックボックス
# page.selectOption("input[name=\"age\"]", "24") #プルダウン
# assert "パスワード不正" in page.innerText("#error_message")
time.sleep(4)
# Close page
page.close()
context.close()
browser.close()
with sync_playwright() as playwright:
test_run(playwright)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# author: cg错过
# time : 2017-12-12
class AllModuleRunAll:
# 为实现在规定之间内只执行一次,
# 例如在小时数为9的时候,这个小时内只执行一次检测
# 这个类只是提供一个判断条件
# 当然也可以利用此条件来执行几次,就看后面调用时的代码怎么写了
# 2017-12-14添加intOverAllCheckPicArrivals,intOverAllCheckPm2
# 2018-04-25添加intOverAllCountNum, intOverAllGetIntegralSit
intOverAllCheckTomcatNum = 0
intOverAllCheckNginxNum = 0
intOverAllCheckRedisNum = 0
intOverAllCheckLetterNum = 0
intOverAllCheckDiskNum = 0
intOverAllCheckPicArrivals = 0
intOverAllCheckPm2 = 0
intOverAllCountNum = 0
intOverAllGetIntegralSit = 0
'''
def setTomcatValue(self, value):
self.intOverAllCheckTomcatNum = value
def setNginxValue(self, value):
self.intOverAllCheckNginxNum = value
def setRedisValue(self, value):
self.intOverAllCheckRedisNum = value
def setLetterValue(self, value):
self.intOverAllCheckLetterNum = value
def setDiskValue(self, value):
self.intOverAllCheckDiskNum = value
'''
def initAllNum(self):
# 重置所有项目一天执行状态为0,即表示未执行,新一天将继续执行
intIndex = 0
if((self.intOverAllCheckTomcatNum == 1) | (self.intOverAllCheckNginxNum == 1) |
(self.intOverAllCheckRedisNum == 1) | (self.intOverAllCheckLetterNum == 1) |
(self.intOverAllCheckDiskNum == 1) | (self.intOverAllCheckPicArrivals ==1) |
(self.intOverAllCheckPm2 == 1) | (self.intOverAllCountNum == 1) | (self.intOverAllGetIntegralSit == 1)):
self.intOverAllCheckTomcatNum = 0
self.intOverAllCheckNginxNum = 0
self.intOverAllCheckRedisNum = 0
self.intOverAllCheckLetterNum = 0
self.intOverAllCheckDiskNum = 0
self.intOverAllCheckPicArrivals = 0
self.intOverAllCheckPm2 = 0
self.intOverAllCountNum = 0
self.intOverAllGetIntegralSit = 0
intIndex = 1
return intIndex
|
"""
distutilazy.util
----------------
utility functions
:license: MIT. For more details see LICENSE file or
https://opensource.org/licenses/MIT
"""
import os
import fnmatch
def find_files(root, pattern):
"""Find all files matching the glob pattern recursively
:param root: string
:param pattern: string
:return: list of file paths relative to root
"""
results = []
for base, dirs, files in os.walk(root):
matched = fnmatch.filter(files, pattern)
results.extend(os.path.join(base, f) for f in matched)
return results
def find_directories(root, pattern):
"""Find all directories matching the glob pattern recursively
:param root: string
:param pattern: string
:return: list of dir paths relative to root
"""
results = []
for base, dirs, files in os.walk(root):
matched = fnmatch.filter(dirs, pattern)
results.extend(os.path.join(base, d) for d in matched)
return results
|
from db_models.models.base.abstract_uploaded_media import AbstractUploadedMedia
class UploadedPhoto(AbstractUploadedMedia):
s3_bucket = 'gymapplife-uploaded-photo'
|
from app import cmx
variaveis = {
'usuario': f"INSERT INTO usuario (user, nome, senha, email, status_user) VALUES ('%s', '%s', '%s', '%s', 'false')",
'administrador': f"INSERT INTO administrador (user, nome, senha, email, chave) VALUES ('%s', '%s', '%s', '%s', '%s')",
'seleciona_um' :f"SELECT %s FROM %s WHERE %s = '%s'",
'atualiza' : f"UPDATE %s SET user = '%s', nome = '%s', senha = '%s', email='%s', chave='' WHERE IDadministrador = %s",
'exclui' : f"DELETE FROM %s WHERE %s='%s'"
}
# Retorna um
def seleciona_um(id, select ="*", tipo = 'administrador', atributo = 'IDadministrador'):
try:
cursor = cmx.connection.cursor()
print("selecionaum=",variaveis["seleciona_um"]%(select,tipo, atributo, id))
cursor.execute(variaveis["seleciona_um"]%(select,tipo, atributo, id))
data = cursor.fetchall()
return data
except Exception as ax:
return (400,ax)
# Retorna todos
def seleciona_todos(tipo):
cursor = cmx.connection.cursor()
string_sql = f"SELECT * FROM {tipo}"
print("string;",string_sql)
cursor.execute(string_sql)
data = cursor.fetchall()
return data
# Adiciona
def adiciona(tipo, data):
val = valida(tipo, data) if (tipo == "usuario" or tipo == "administrador") else 200
print("testinha:",variaveis[tipo]%data)
print("val:",val)
if val[0] == 200:
cursor = cmx.connection.cursor()
cursor.execute(variaveis[tipo]%data)
cmx.connection.commit()
return val
else:
return val
# Atualiza
def atualiza(tipo,data,id):
val = valida(tipo, data) if (tipo == "usuario" or tipo == "administrador") else 200
if val[0] == 200:
print("atualiza=",variaveis['atualiza']% (tipo,data[0],data[1],data[2],data[3],id))
cursor = cmx.connection.cursor()
cursor.execute(variaveis['atualiza']% (tipo,data[0],data[1],data[2],data[3],id))
cmx.connection.commit()
return val
else:
return val
# Exclui
def exclui(tipo, id, atributo = "user"):
if(seleciona_um(id,"*",tipo,atributo)[0] != 400):
cursor = cmx.connection.cursor()
print("exclui=",variaveis["seleciona_um"]%(tipo, atributo, id))
cursor.execute(variaveis["exclui"]%(tipo, atributo, id))
cmx.connection.commit()
return 200,'usuario excluido!'
else:
return 400,'usuario não encontrado!'
#Valida
def valida(tipo, data):
for dado in data:
if dado == "":
return (400, "Preenchimento Invalido!")
if seleciona_um(data[0],"*",tipo,"user") != ():
print("vadalida:",seleciona_um(data[0],"*",tipo,"user"))
return (403, "Id Proibido!")
return (200, "Preenchimento Valido")
|
from django.shortcuts import render
from django.views import generic
from blog.models import Post
# Create your views here.
#
# def home(request):
# return render(request,'index.html',{})
#
class Index(generic.ListView):
template_name = 'index.html'
context_object_name = "posts"
def get_queryset(self):
return Post.objects.order_by("-created_at")
class Show(generic.DeleteView):
model = Post
context_object_name = "post"
template_name = "show.html" |
#!/usr/bin/env python
#################################################################
#
# Copyright (c) 2012
# Fraunhofer Institute for Manufacturing Engineering
# and Automation (IPA)
#
#################################################################
#
# Project name: care-o-bot
# ROS stack name: cob_driver
# ROS package name: cob_hwboard
#
# Author: Eduard Herkel, email: eduard.herkel@ipa.fraunhofer.de
# Supervised by: Eduard Herkel, email: eduard.herkel@ipa.fraunhofer.de
#
# Date of creation: October 2012
#
# ToDo
#
#################################################################
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer. \n
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution. \n
# - Neither the name of the Fraunhofer Institute for Manufacturing
# Engineering and Automation (IPA) nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission. \n
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License LGPL as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License LGPL for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License LGPL along with this program.
# If not, see <http://www.gnu.org/licenses/>.
#
#################################################################
import roslib; roslib.load_manifest('cob_hwboard')
import rospy
from serial import *
from diagnostic_msgs.msg import DiagnosticArray
from diagnostic_msgs.msg import DiagnosticStatus
from diagnostic_msgs.msg import KeyValue
class HwBoard:
def __init__(self):
rospy.init_node('hwboard')
# get parameters from parameter server
if not rospy.has_param("~devicestring"):
rospy.logerr("parameter %s does not exist on ROS Parameter Server, aborting...","devicestring")
sys.exit()
devicestring_param = rospy.get_param("~devicestring")
if not rospy.has_param("~head_sensor"):
rospy.logerr("parameter %s does not exist on ROS Parameter Server, aborting...","head_sensor")
sys.exit()
self.head_sensor_param = rospy.get_param("~head_sensor")
if not rospy.has_param("~eye_sensor"):
rospy.logerr("parameter %s does not exist on ROS Parameter Server, aborting...","eye_sensor")
sys.exit()
self.eye_sensor_param = rospy.get_param("~eye_sensor")
if not rospy.has_param("~torso_module_sensor"):
rospy.logerr("parameter %s does not exist on ROS Parameter Server, aborting...","torso_module_sensor")
sys.exit()
self.torso_module_sensor_param = rospy.get_param("~torso_module_sensor")
if not rospy.has_param("~torso_sensor"):
rospy.logerr("parameter %s does not exist on ROS Parameter Server, aborting...","torso_sensor")
sys.exit()
self.torso_sensor_param = rospy.get_param("~torso_sensor")
if not rospy.has_param("~pc_sensor"):
rospy.logerr("parameter %s does not exist on ROS Parameter Server, aborting...","pc_sensor")
sys.exit()
self.pc_sensor_param = rospy.get_param("~pc_sensor")
if not rospy.has_param("~engine_sensor"):
rospy.logerr("parameter %s does not exist on ROS Parameter Server, aborting...","engine_sensor")
sys.exit()
self.engine_sensor_param = rospy.get_param("~engine_sensor")
# open serial connection
rospy.loginfo("trying to initializing serial connection")
try:
self.s = Serial(port=devicestring_param,baudrate=230400,bytesize=EIGHTBITS,parity=PARITY_NONE,stopbits=STOPBITS_ONE,timeout=3)
except serial.serialutil.SerialException:
rospy.logerr("Could not initialize serial connection on %s, aborting...",devicestring_param)
sys.exit()
rospy.loginfo("serial connection initialized successfully")
self.s.open()
def reset(self):
# initialize message and local variables
send_buff_array=[0xFF,0x0E,0x00,0x00,0x00]
message= ""
preamble_bytes = 4
preamble_error = 1
crc_error = 1
retry = 0
# calculate crc
crc = 0x00
for i in range(4):
data = send_buff_array[i]
for k in range(8):
feedback_bit = (crc^data) & 0x80
feedback_bit = (feedback_bit>>7) & 0xFF
if feedback_bit == 1:
crc = (crc<<1) & 0xFF
crc = crc^0x31
else:
crc = (crc<<1) & 0xFF
data = (data<<1) & 0xFF
send_buff_array[4] = crc
# send message
while (preamble_error == 1 or crc_error == 1) and retry < 8:
message= ""
for i in range(preamble_bytes):
message += chr(0x55)
for i in send_buff_array:
message += chr(i)
self.s.write(message)
#receive message
# check for first preamble byte of reveiced message
read_buff_array = []
buff = self.s.read(1)
preamble_count = 0
for i in buff:
read_buff_array.append(ord(i))
if read_buff_array[0] == 0x55:
# check for following preamble bytes
while read_buff_array[0] == 0x55 and preamble_count < 10:
read_buff_array = []
buff = self.s.read(1)
for i in buff:
read_buff_array.append(ord(i))
preamble_count = preamble_count + 1
buff = self.s.read(13)
# check preamble length
if preamble_count > 6:
preamble_error = 1
preamble_bytes = preamble_bytes + 1
retry = retry + 1
if preamble_bytes == 7:
preamble_bytes = 2
elif preamble_count < 2:
preamble_error = 1
preamble_bytes = preamble_bytes + 1
retry = retry + 1
if preamble_bytes == 7:
preamble_bytes = 2
else:
# preamble ok. evaluate message
preamble_error = 0
# get remaining message
for i in buff:
read_buff_array.append(ord(i))
#check crc
crc = 0x00
for i in range(14):
data = read_buff_array[i]
for k in range(8):
feedback_bit = (crc^data) & 0x80
feedback_bit = (feedback_bit>>7) & 0xFF
if feedback_bit == 1:
crc = (crc<<1) & 0xFF
crc = crc^0x31
else:
crc = (crc<<1) & 0xFF
data = (data<<1) & 0xFF
if crc != 0:
crc_error = 1
preamble_bytes = preamble_bytes + 1
retry = retry + 1
if preamble_bytes == 7:
preamble_bytes = 2
else:
crc_error = 0
# no preamble detected
else:
buff = s.read(14)
preamble_error = 1
preamble_bytes = preamble_bytes + 1
retry = retry + 1
if preamble_bytes == 7:
preamble_bytes = 2
def hwboard(self):
# initialize local variables
send_channel = 0
read_channel = 0
send_specifier = 0
read_specifier = 0
read_status = 0
read_data = 0
read_id = 0
read_crc = 0
# init ros-node
pub = rospy.Publisher('diagnostics',DiagnosticArray)
while not rospy.is_shutdown():
# init publisher message
pub_message = DiagnosticArray()
# init array for storing data
status_array = []
# init local variable for error detection
error_while_reading = 0
for send_specifier in range(0,7,3):
if send_specifier == 0:
count_range = range(6)
elif send_specifier == 3:
count_range = [0,1,2,3,6,7]
else:
count_range = [1,2,3,6,7]
for send_channel in count_range:
# init message and local variables
send_buff_array = [send_channel,send_specifier,0x00,0x00,0x00]
message = ""
preamble_bytes = 4
preamble_error = 1
crc_error = 1
retry = 0
# calculate crc
crc = 0x00
for i in range(4):
data = send_buff_array[i]
for k in range(8):
feedback_bit = (crc^data) & 0x80
feedback_bit = (feedback_bit>>7) & 0xFF
if feedback_bit == 1:
crc = (crc<<1) & 0xFF
crc = crc^0x31
else:
crc = (crc<<1) & 0xFF
data = (data<<1) & 0xFF
send_buff_array[4] = crc
# send message
while (preamble_error == 1 or crc_error == 1) and retry < 8:
message= ""
for i in range(preamble_bytes):
message += chr(0x55)
for i in send_buff_array:
message += chr(i)
self.s.write(message)
# receive message
# check for first preamble byte of reveiced message
read_buff_array = []
buff = self.s.read(1)
preamble_count = 0
for i in buff:
read_buff_array.append(ord(i))
if read_buff_array[0] == 0x55:
# check for following preamble bytes
while read_buff_array[0] == 0x55 and preamble_count < 10:
read_buff_array = []
buff = self.s.read(1)
for i in buff:
read_buff_array.append(ord(i))
preamble_count = preamble_count + 1
buff = self.s.read(13)
# check preamble length
if preamble_count > 6:
preamble_error = 1
preamble_bytes = preamble_bytes + 1
retry = retry + 1
if preamble_bytes == 7:
preamble_bytes = 2
elif preamble_count < 2:
preamble_error = 1
preamble_bytes = preamble_bytes + 1
retry = retry + 1
if preamble_bytes == 7:
preamble_bytes = 2
else:
# preamble ok. evaluate message
preamble_error = 0
# get remaining message
for i in buff:
read_buff_array.append(ord(i))
#check crc
crc = 0x00
for i in range(14):
data = read_buff_array[i]
for k in range(8):
feedback_bit = (crc^data) & 0x80
feedback_bit = (feedback_bit>>7) & 0xFF
if feedback_bit == 1:
crc = (crc<<1) & 0xFF
crc = crc^0x31
else:
crc = (crc<<1) & 0xFF
data = (data<<1) & 0xFF
if crc != 0:
crc_error = 1
preamble_bytes = preamble_bytes + 1
retry = retry + 1
if preamble_bytes == 7:
preamble_bytes = 2
else:
crc_error = 0
# no preamble detected
else:
buff = s.read(14)
preamble_error = 1
preamble_bytes = preamble_bytes + 1
retry = retry + 1
if preamble_bytes == 7:
preamble_bytes = 2
# get channel byte
read_channel = int(read_buff_array[0])
# get specifier byte
read_specifier = int(read_buff_array[1])
# get status byte
read_status = int(read_buff_array[2])
# get data bytes
read_data = 256 * int(read_buff_array[3])
read_data = read_data + int(read_buff_array[4])
# get id bytes
read_id = read_buff_array[5]<<8
read_id = (read_id | read_buff_array[6])<<8
read_id = (read_id | read_buff_array[7])<<8
read_id = read_id | read_buff_array[8]
# evaluate recieved message
if read_channel == send_channel:
if read_specifier == send_specifier:
if read_status == 0 or read_status == 8:
if send_specifier == 0:
read_data = read_data / 10.0
else:
read_data = read_data / 1000.0
erro_while_reading = 0
else:
read_data = 0
error_while_reading = 1
else:
read_data = 0
error_while_reading = 1
else:
read_data = 0
error_while_reading = 1
#prepare status object for publishing
# init sensor object
status_object = DiagnosticStatus()
# init local variable for data
key_value = KeyValue()
# set values for temperature parameters
if send_specifier == 0:
if read_data == 85:
level = 1
status_object.message = "sensor damaged"
elif read_data > 50:
level = 2
status_object.message = "temperature critical"
elif read_data >40:
level = 1
status_object.message = "temperature high"
elif read_data > 10:
level = 0
status_object.message = "temperature ok"
elif read_data > -1:
level = 1
status_object.message = "temperature low"
else:
level = 2
status_object.message = "temperature critical"
# mapping for temperature sensors
if read_id == self.head_sensor_param:
status_object.name = "Head Temperature"
status_object.hardware_id = "hwboard_channel " + str(send_channel)
elif read_id == self.eye_sensor_param:
status_object.name = "Eye Camera Temperature"
status_object.hardware_id = "hwboard_channel = " + str(send_channel)
elif read_id == self.torso_module_sensor_param:
status_object.name = "Torso Module Temperature"
status_object.hardware_id = "hwboard_channel =" + str(send_channel)
elif read_id == self.torso_sensor_param:
status_object.name = "Torso Temperature"
status_object.hardware_id = "hwboard_channel =" + str(send_channel)
elif read_id == self.pc_sensor_param:
status_object.name = "PC Temperature"
status_object.hardware_id = "hwboard_channel =" + str(send_channel)
elif read_id == self.engine_sensor_param:
status_object.name = "Engine Temperature"
status_object.hardware_id = "hwboard_channel = " + str(send_channel)
else:
level = 1
status_object.message = "cannot map if from yaml file to temperature sensor"
# set values for voltage parameters
elif send_specifier == 3:
if send_channel == 0:
if read_data > 58:
level = 2
status_object.message = "voltage critical"
elif read_data > 56:
level = 1
status_object.message = "voltage high"
elif read_data > 44:
level = 0
status_object.message = "voltage ok"
elif read_data > 42:
level = 1
status_object.message = "voltage low"
else:
level = 2
status_object.message = "voltage critical"
else:
if read_data > 27:
level = 2
status_object.message = "voltage critical"
elif read_data > 25:
level = 1
status_object.message = "voltage_high"
elif read_data > 23:
level = 0
status_object.message = "voltage ok"
elif read_data > 19:
level = 1
status_object.message = "voltage low"
else:
level = 2
status_object.message = "voltage critical"
if send_channel == 0:
status_object.name = "Akku Voltage"
status_object.hardware_id = "hwboard_channel = 0"
elif send_channel == 1:
status_object.name = "Torso Engine Voltage"
status_object.hardware_id = "hwboard_channel = 1"
elif send_channel == 2:
status_object.name = "Torso Logic Voltage"
status_object.hardware_id = "hwboard_channel = 2"
elif send_channel == 3:
status_object.name = "Tray Logic Voltage"
status_object.hardware_id = "hwboard_channel = 3"
elif send_channel == 6:
status_object.name = "Arm Engine Voltage"
status_object.hardware_id = "hwboard_channel = 6"
elif send_channel == 7:
status_object.name = "Tray Engine Voltage"
status_object.hardware_id = "hwboard_channel = 7"
# set values for current parameters
else:
if read_data > 15:
level = 2
status_object.message = "current critical"
elif read_data > 10:
level = 1
status_object.message = "current high"
elif read_data < 0:
level = 2
status_object.message = "current critical"
else:
level = 0
status_object.message = "current ok"
if send_channel == 1:
status_object.name = "Torso Engine Current"
status_object.hardware_id = "hwboard_channel = 1"
elif send_channel == 2:
status_object.name = "Torso Logic Current"
status_object.hardware_id = "hwboard_channel = 2"
elif send_channel == 3:
status_object.name = "Tray Logic Current"
status_object.hardware_id = "hwboard_channel = 3"
elif send_channel == 6:
status_object.name = "Arm Engine Current"
status_object.hardware_id = "hwboard_channel = 6"
elif send_channel == 7:
status_object.name = "Tray Engine Current"
status_object.hardware_id = "hwboard_channel = 7"
# evaluate error detection
if error_while_reading == 1:
level = 1
status_object.message = "detected error while receiving answer from hardware"
# append status object to publishing message
status_object.level = level
key_value.value = str(read_data)
status_object.values.append(key_value)
pub_message.status.append(status_object)
# publish message
pub.publish(pub_message)
rospy.sleep(1.0)
#######################################
#
#######################################
if __name__ == '__main__':
hwb = HwBoard()
hwb.reset()
hwb.hwboard()
|
import numpy as np
# fix the random seed for reproducibility
seed = 1337
np.random.seed(seed)
import convnet_models
import matplotlib.pyplot as plt
from keras.preprocessing.image import ImageDataGenerator
from keras.utils import np_utils, plot_model
from keras.models import load_model
from keras import backend as K
K.set_image_dim_ordering('th')
def model_trainer(convnet_models, batch_size = 100, modelname = 'random',
epoches = 15, verbose = 2, generator = False):
# read images
print('Loading data as numpy array of arrays...')
X_train = np.load('data_numpy/X_train_data.npy').astype(np.float32)
X_test = np.load('data_numpy/X_test_data.npy').astype(np.float32)
Y_train = np.load('data_numpy/Y_train_data.npy')
Y_test = np.load('data_numpy/Y_test_data.npy')
Y_test = np_utils.to_categorical(Y_test)
Y_train = np_utils.to_categorical(Y_train)
print('Done!')
# normalize the color channels over the entire data
# and center the data cloud
X_train /= 255
X_test /= 255
X_train -= np.mean(X_train, axis = 0)
X_test -= np.mean(X_test, axis = 0)
# build the model
number_of_classes = Y_test.shape[1]
model = convnet_models(number_of_classes)
if (generator == True):
# use a ImageDataGenerator on the same in order to obtain more variety in data from
# existing data
training_datagen = ImageDataGenerator(
featurewise_center=False,
samplewise_center=False,
featurewise_std_normalization=False,
samplewise_std_normalization=False,
zca_whitening=False,
rotation_range=0.,
width_shift_range=0.,
height_shift_range=0.,
shear_range=0.1,
zoom_range=0,
channel_shift_range=0.,
fill_mode='nearest',
cval=0.,
horizontal_flip=False,
vertical_flip=False,
rescale=None,
preprocessing_function=None,)
testing_datagen = ImageDataGenerator(
featurewise_center=False,
samplewise_center=False,
featurewise_std_normalization=False,
samplewise_std_normalization=False,
zca_whitening=False,
rotation_range=0.,
width_shift_range=0.,
height_shift_range=0.,
shear_range=0.,
zoom_range=0,
channel_shift_range=0.,
fill_mode='nearest',
cval=0.,
horizontal_flip=False,
vertical_flip=False,
rescale=None,
preprocessing_function=None, )
# compute quantities required for featurewise normalization
# (std, mean, and principal components if ZCA whitening is applied)
training_datagen.fit(X_train)
testing_datagen.fit(X_train)
# fits the model on batches with real-time data augmentation:
history = model.fit_generator(training_datagen.flow(X_train, Y_train, batch_size=batch_size),
steps_per_epoch=len(X_train)/batch_size,
epochs=epoches,
verbose=verbose,
validation_data=(testing_datagen.flow(X_test, Y_test, batch_size=batch_size)),
validation_steps = len(X_test)/batch_size)
# final evaluation of the model
scores = model.evaluate(X_test, Y_test, verbose=0)
print("Baseline Error: %.2f%%" % (100 - scores[1] * 100))
else:
# fit the model
history = model.fit(X_train, Y_train,
validation_data=(X_test, Y_test),
epochs=epoches,
batch_size=batch_size,
verbose=verbose)
# final evaluation of the model
scores = model.evaluate(X_test, Y_test, verbose=0)
print("Baseline Error: %.2f%%" % (100 - scores[1] * 100))
# save model for later use and for visualization
model.save('saved_models/' + modelname + '.h5')
#plot_model(model, to_file= '/visual_models/' + modelname + '.png') #TODO: Fix graphviz package
# visualize the model accuracy vs epoches
print(history.history.keys())
# summarize history for accuracy
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title(modelname + ' accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
plt.savefig(modelname + '_acc.png')
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title(modelname + ' loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
plt.savefig(modelname + '_loss.png')
model_trainer(convnet_models.custom, modelname = 'custom_model',
epoches = 250, batch_size = 128, verbose = 2, generator = False)
custom_cnn_model = load_model('saved_models/custom_model.h5') |
import pandas as pd
import matplotlib.pyplot as plt
dataframe = pd.read_csv('data/problem1data.txt', header=None)
datasetClass0 = # Put your code here (hint: see https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.loc.html)
datasetClass1 = # Put your code here
figure = plt.figure()
axis = figure.add_subplot(111)
axis.scatter(datasetClass0[0], datasetClass0[1], marker='o', label='Class 0')
axis.scatter(datasetClass1[0], datasetClass1[1], marker='x', label='Class 1')
plt.xlabel('Microchip Test 1')
plt.ylabel('Microchip Test 1')
plt.title('Plot of training data')
plt.legend()
plt.show()
|
import sqlite3
conn = sqlite3.connect('./resource/db_task.db',isolation_level=None)
cur = conn.cursor()
cur.execute('CREATE TABLE IF NOT EXISTS student_info( \
student_id INTEGER PRIMARY KEY, \
name TEXT, \
tel TEXT, \
region TEXT)')
cur.execute('CREATE TABLE IF NOT EXISTS student( \
student_id, \
os INTEGER NOT NULL CHECK (os>=0 and os<=100), \
cv INTEGER NOT NULL CHECK (cv>=0 and cv<=100), \
db INTEGER NOT NULL CHECK (db>=0 and db<=100), \
FOREIGN KEY(student_id) \
REFERENCES student_info(student_id))')
chk_student = []
cur.execute('select * from student_info')
for row in cur.fetchall():
chk_student.append(row[0])
# print(chk_student)
chk_grade = []
cur.execute('select * from student')
for row in cur.fetchall():
chk_grade.append(row[0])
# print(chk_grade)
#################################학생정보입력####################################
def student_insert():
student_id = int(input('학번 입력 : '))
name = input('이름 입력 : ')
tel = input('전화번호 입력 : ')
region = input('주소(지역) 입력 : ')
print()
cur.execute('INSERT INTO student_info VALUES(?,?,?,?)',(student_id, name, tel, region))
chk_student.append(student_id)
#################################성적정보입력####################################
def grade_insert():
while True:
id_chk = int(input('학번 검색 : '))
print()
if id_chk in chk_student:
cur.execute('SELECT * FROM student_info WHERE student_id = ?',(id_chk,))
for student in cur.fetchall():
print(student[0], end=', ')
print(student[1], end=', ')
print(student[2], end=', ')
print(student[3], end=', ')
if id_chk in chk_grade:
print('입력완료')
else:
print('미입력')
os = int(input('운영체제 : '))
cv = int(input('컴퓨터비전 : '))
db = int(input('데이터베이스 : '))
try:
cur.execute('INSERT INTO student VALUES(?,?,?,?)',(id_chk, os, cv, db))
print('성적 입력 성공!!')
chk_grade.append(id_chk)
except IntegrityError:
print('0~100점 사이의 점수만 입력해주세요.')
continue
print()
else:
print('존재하지 않는 학번입니다..')
break
#################################학생정보출력####################################
def student_select():
print('-------------------------------------------------')
print('학번\t이름\t전화번호\t주소\t성적입력여부')
print('-------------------------------------------------')
cur.execute('SELECT * FROM student_info')
stu_cnt = 0
for student in cur.fetchall():
stu_cnt += 1
print(student[0], end='\t')
print(student[1], end='\t')
print(student[2], end='\t')
print(student[3], end='\t')
if student[0] not in chk_grade:
print('미입력')
else:
print('입력완료')
print()
print(f'전체 학생수 : {stu_cnt}명')
#################################성적정보출력####################################
def grade_select():
print('--------------------------------------------------------------')
print('학번\t이름\t운영체제 컴퓨터비전 데이터베이스 총점\t평균')
print('--------------------------------------------------------------')
cur.execute('SELECT s.student_id, s.name, g.os, g.cv, g.db \
FROM student g, student_info s \
WHERE g.student_id = s.student_id')
grade_cnt = 0
for student in cur.fetchall():
grade_cnt += 1
print(student[0], end='\t')
print(student[1], end='\t')
print(student[2], end='\t ')
print(student[3], end='\t\t')
print(student[4], end='\t ')
score_sum = student[2]+student[3]+student[4]
print(score_sum, end='\t')
score_avg = format(score_sum/3, '.2f')
print(score_avg)
print()
print(f'전체 학생수: {grade_cnt}명')
#################################학생정보수정###############################
######
def student_update():
id = int(input('학번 입력 : '))
if id in chk_student:
while True:
print('-----------------------------------------')
print('1. 이름 변경')
print('2. 전화번호 변경')
print('3. 주소 변경')
print('4. 수정 완료')
print('-----------------------------------------')
update_menu = int(input('> 수정할 내용 : '))
if update_menu == 1:
update_name = input('변경할 이름 입력 : ')
cur.execute('UPDATE student_info SET name=? WHERE student_id=?',(update_name, id,))
elif update_menu == 2:
update_tel = input('변경할 전화번호 입력 : ')
cur.execute('UPDATE student_info SET tel=? WHERE student_id=?',(update_tel, id,))
elif update_menu == 3:
update_region = input('변경할 주소 입력 : ')
cur.execute('UPDATE student_info SET region=? WHERE student_id=?',(update_region, id,))
elif update_menu == 4:
break
else:
print('1부터 4의 숫자만 입력하세요.')
else:
print('학번이 존재하지 않습니다..')
#################################성적정보수정####################################
def grade_update():
id = int(input('학번 입력 : '))
if id in chk_student:
if id in chk_grade:
while True:
print('-----------------------------------------')
print('1. 운영체제 점수 수정')
print('2. 컴퓨터비전 점수 수정')
print('3. 데이터베이스 점수 수정')
print('4. 수정 완료')
print('-----------------------------------------')
update_menu = int(input('> 수정할 과목 : '))
if update_menu == 1:
update_os = int(input('변경할 점수 입력 : '))
cur.execute('UPDATE student SET os=? WHERE student_id=?',(update_os, id,))
elif update_menu == 2:
update_cv = int(input('변경할 점수 입력 : '))
cur.execute('UPDATE student SET cv=? WHERE student_id=?',(update_cv, id,))
elif update_menu == 3:
update_db = int(input('변경할 점수 입력 : '))
cur.execute('UPDATE student SET db=? WHERE student_id=?',(update_db, id,))
elif update_menu == 4:
break
else:
print('1부터 4의 숫자만 입력하세요.')
else:
print('성적 정보를 먼저 입력해주세요.')
else:
print('학번이 존재하지 않습니다..')
#################################학생정보삭제####################################
def student_delete():
id = int(input('학번 입력 : '))
if id in chk_student:
if id in chk_grade:
# cascade
cur.execute('DELETE FROM student WHERE student_id = ?',(id,))
chk_grade.remove(id)
cur.execute('DELETE FROM student_info WHERE student_id = ?',(id,))
chk_student.remove(id)
print(f'학번이 {id}인 학생이 삭제되었습니다.')
else:
cur.execute('DELETE FROM student_info WHERE student_id = ?',(id,))
chk_student.remove(id)
print(f'학번이 {id}인 학생이 삭제되었습니다.')
else:
print('학번이 존재하지 않습니다.')
#################################메뉴화면####################################
def menu_display():
print('-----------------------------------------')
print(' 학사관리 시스템 ')
print('-----------------------------------------')
print('1. 학생 정보 입력')
print('2. 학생 정보 출력')
print('3. 학생 정보 수정')
print('4. 학생 정보 삭제')
print('5. 학생 성적 입력')
print('6. 학생 성적 출력')
print('7. 학생 성적 수정')
print('X. 프로그램 종료')
print('-----------------------------------------')
menu = input('메뉴 선택 : ')
return menu
############################################################################
while True:
menu = menu_display()
if menu == '1':
student_insert()
elif menu == '2':
student_select()
elif menu == '3':
student_update()
elif menu == '4':
student_delete()
elif menu == '5':
grade_insert()
elif menu == '6':
grade_select()
elif menu == '7':
grade_update()
elif menu == 'x' or menu == 'X':
print('종료')
break
else:
print('메뉴를 다시 선택해주세요.')
# student_insert()
# student_select()
# grade_insert()
# grade_select()
# student_update()
# grade_update()
# student_delete()
conn.close()
|
# run in CMSSW_9_3_1
from WMCore.Configuration import Configuration
config = Configuration()
config.section_("General")
config.General.requestName = "tmp"
config.General.workArea = 'crab_dim6top_18Mai18'
config.General.transferLogs = True
config.section_("JobType")
config.JobType.pluginName = 'PrivateMC'
config.JobType.psetName = '../cfg/2017/GEN-SIM-LHE_LO_0j_CMSSW_9_3_1.py'
config.JobType.disableAutomaticOutputCollection = False
config.section_("Data")
config.Data.splitting = 'EventBased'
config.Data.unitsPerJob = 5000
config.Data.totalUnits = 5000000
config.Data.publication = True
config.Data.publishDBS = 'phys03'
#config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB())
config.section_("Site")
config.Site.storageSite = 'T2_AT_Vienna'
#config.Site.whitelist = ['T2_*']
config.section_("User")
if __name__ == '__main__':
gridpack_dir = "/afs/hephy.at/data/llechner01/TTXPheno/gridpacks/18052018_ref/ttgamma/order2/"
import os
from CRABAPI.RawCommand import crabCommand
for outputDatasetTag, gridpack in [
('dim6top_18Mai18_ref','ttgamma0j_rwgt_slc6_amd64_gcc630_CMSSW_9_3_0_tarball.tar.xz'),
]:
config.Data.outputDatasetTag = outputDatasetTag
config.JobType.inputFiles = [os.path.join(gridpack_dir, gridpack)]
config.General.requestName = gridpack.rstrip('.tar.xz').replace('-','m').replace('.','p')
config.Data.outputPrimaryDataset = config.General.requestName # dataset name
config.JobType.pyCfgParams = ['gridpack=../'+gridpack]
#crabCommand('submit', '--dryrun', config = config)
crabCommand('submit', config = config)
|
from requests import get, post
from io import BytesIO
from json import loads, dumps
import base64
import api
from tools import get_secret_key, check_file, decode_image, mkdir, isExist, getFile, createFile
from api import detect_faces, compare_photos, extract_descriptor, get_landmarks, swap_face, swap_video
from flask import Flask, render_template, request, session, redirect, make_response, send_file, jsonify, url_for
#auth = HTTPBasicAuth()
#r = redis.StrictRedis(host='127.0.0.1', port=6379)
app = Flask(__name__)
app.secret_key = get_secret_key()
app.config["MAX_CONTENT_LENGTH"] = 1024 * 1024 * 16
##@auth.get_password
# def get_password(username):
# user_info = r.get(username)
# if user_info == None:
# return None
# else:
# user_info = loads(user_info)
# return user_info[0]['password']
#@auth.error_handler
def unauthorized():
return make_response(jsonify({'error': 'Unauthorized access'}), 401)
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': 'Not found'}), 404)
@app.route('/api/face_detect', methods=['POST'])
#@auth.login_required
def face_detect():
img = request.files['img']
if check_file(img):
try:
faces = detect_faces(decode_image(img))[1]
return jsonify({'result': True, 'faces':{i: 4*faces[i].tolist() for i in range(len(faces))}})
except Exception as e:
return jsonify({"result": False, "error_msg": str(e)})
else:
return jsonify({'result': False, 'error_msg': 'File load failed'})
@app.route('/api/compare_faces', methods=['POST'])
#@auth.login_required
def compare_faces():
img1 = request.files['img1']
img2 = request.files['img2']
if check_file(img1) and check_file(img2):
try:
mn = compare_photos(img1, img2, request.values.get("alg"))
return jsonify({"result": True, "mn": mn})
except Exception as e:
return jsonify({"result": False, "error_msg": str(e)})
else:
return jsonify({'result': False, 'error_msg': 'File load failed'})
@app.route('/api/facetovec', methods=['POST'])
#@auth.login_required
def facetovec():
img = request.files['img']
if check_file(img):
try:
descs = extract_descriptor(decode_image(img))
return jsonify({"result": True, 'descs':{i: list(descs[i]) for i in range(len(descs))}})
except Exception as e:
return jsonify({"result": False, "error_msg": str(e)})
else:
return jsonify({'result': False, 'error_msg': 'File load failed'})
@app.route('/api/face_landmarks', methods=['POST'])
#@auth.login_required
def face_landmarks():
img = request.files['img']
if check_file(img):
try:
descs = get_landmarks(decode_image(img), False)
return jsonify({"result": True, 'descs':{i: descs[i].tolist() for i in range(len(descs))}})
except Exception as e:
return jsonify({"result": False, "error_msg": str(e)})
else:
return jsonify({'result': False, 'error_msg': 'File load failed'})
@app.route('/api/face_swap', methods=['POST'])
#@auth.login_required
def face_swap():
img1 = request.files['img1']
img2 = request.files['img2']
#video = request.files['video']
if check_file(img1) and check_file(img2):
try:
name = swap_face(createFile(auth.username(), img1), createFile(auth.username(), img2), auth.username())
return jsonify({"result": True, 'URL':url_for('download_image',_external=True, username=auth.username(), filename=name)})
except Exception as e:
return jsonify({"result": False, "error_msg": str(e)})
# elif check_file(video):
# try:
# swap_face(decode_image(img1), decode_image(img2), 1)
# except Exception as e:
# return jsonify({"result": False, "error_msg": str(e)})
else:
return jsonify({'result': False, 'error_msg': 'File load failed'})
@app.route('/api/nearest/<dataset>', methods=['POST'])
#@auth.login_required
def search_nearest(dataset):
img = base64.b64decode(request.form['img'])
#print(img)
id_i = request.form['id']
if img:
try:
descs = api.get_user(decode_image(img))
return jsonify({"result": True, 'name':descs[0][1]['name'], 'id':id_i})
except Exception as e:
return jsonify({"result": False, "error_msg": str(e),'id':id_i})
else:
return jsonify({'result': False, 'error_msg': 'File load failed', 'id':id_i})
@app.route('/api/enable_dataset/<dataset>', methods=['POST'])
#@auth.login_required
def enable_dataset(dataset):
api.user_tree = api.vptree.VPTree(api.loadDescriptors(dataset), api.cosine)
return jsonify({'result': True})
# @app.route('/api/register', methods=['POST'])
# def register():
# try:
# username = request.values.get("username")
# if get_password(username) == None:
# password = get_secret_key()
# user_info = dumps([{'password':password, 'datasets':[]}])
# r.set(username, user_info)
# mkdir(username)
# return jsonify({"result": True, "username": username, "password":password, "datasets":[]})
# else:
# return jsonify({"result": False, "error_msg": 'This username already exists'})
# except Exception as e:
# return jsonify({"result": False, "error_msg": str(e)})
@app.route('/api/download/<username>/<filename>', methods=['GET'])
#@auth.login_required
def download_image(username, filename):
try:
if username != auth.username():
return make_response(jsonify({'error': 'Not allowed acces'}), 403)
if not isExist(username, filename):
return make_response(jsonify({'error': 'File not found'}), 404)
return send_file(getFile(username, filename))
except Exception as e:
return jsonify({"result": False, "error_msg": str(e)})
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5050, debug=True)
|
# create tuples
coordinates = (4, 5)
print(coordinates[1])
coordinates2 = [(4, 5), (1, 2), (5, 7)]
print(coordinates2)
# can not change the value in tuples
coordinates[1] = 2
print(coordinates[1])
|
#!/usr/bin/env python3
#ensure the "mut_files" directory exists in the working directory.
import pysam
import vcf
import sys
import argparse
import pybedtools
from pybedtools import BedTool
def load_muts():
sample_files = ["mut_files/mut_M" + str(k) + ".txt" for k in range(1,9)] #get every file
muts = []
for fn in sample_files:
with open(fn) as fh:
d = [[l.split()[0], int(l.split()[1])] for l in fh] #a list of every [scaffold,site] in the sample
muts.append(d)
return muts #a list of lists of lists
def load_vcf(vcffile = "repfiltered_only_mutated.vcf.gz"):
vcfreader = vcf.Reader(filename = vcffile)
vcflocs = dict()
for record in vcfreader:
d = vcflocs.setdefault(record.CHROM, dict()) #dictionary with vcflocs{chr} = {pos : record}
d[record.POS] = record
vcflocs[record.CHROM] = d
return vcflocs
def load_sam(samfile = "fixed_alignment.bam"):
return pysam.AlignmentFile(samfile, "rb")
# def load_bed(bedfile):
# bed = BedTool(bedfile)
# bedranges = dict() #dictionary with bedranges[chr] = [range(x,y), range(z,a), ...]
# for interval in bed:
# l = bedranges.setdefault(interval.chrom, []) #list
# bedranges[interval.chrom] = l + [range(interval.start+1, interval.stop+1)] #switch BED to 1-based
# return bedranges
def load_repeats(repeatfile):
with open(repeatfile) as fh:
repeats = [[l.split()[0], int(l.split()[1])] for l in fh]
return repeats
def position_depth(samfile, chr, position):
regionstr = str(chr) + ':' + str(position) + '-' + str(position)
cols = [c for c in samfile.pileup(region = regionstr, truncate = True)]
if len(cols) == 0:
return 0
elif len(cols) > 1:
raise ValueError("There were too many columns returned for this position:" + regionstr)
elif cols[0].reference_pos != int(position) - 1:
raise ValueError("This pileup is for the wrong position:" + regionstr)
else:
return cols[0].nsegments
"""
scaffold, site, original_genotype, mutated_genotype, depth, branch_mutated, samples_mutated, mutation_recovered
"""
def generate_table_line(line, muts, vcf, sam, repeats, dng = False, norepfilter = False):
l = line.rstrip().split()
loc = [l[0], int(l[1])]
gt = l[2:4]
togt = set(l[4:6])
depth = position_depth(sam, l[0], l[1])
#in dng, there is SM/M1, SM/M2, etc. Otherwise, the samples are M1a, M2a, M3a ... etc
if norepfilter:
mutated_samples = ["M" + str(i) + j for i in range(1,9) for j in ["a","b","c"] if loc in muts[i - 1]]
elif dng:
mutated_samples = ["SM/M" + str(i) for i in range(1,9) if loc in muts[i-1]] #sample names are 1-based
else:
mutated_samples = ["M" + str(i) + "a" for i in range(1,9) if loc in muts[i-1]]
if mutated_samples == []:
return None
inrepeat = (loc in repeats)
recovered = False
nunknown = 0
if loc[0] in vcf:
chrd = vcf[loc[0]]
if loc[1] in chrd:
record = chrd[loc[1]]
nunknown = record.num_unknown
# gts = [record.genotype(s).gt_bases for s in mutated_samples]
# if not None in gts:
# gts = [set(g.split("/")) for g in gts]
if norepfilter:
#no rep filter
vcfgts = [record.genotype(s).gt_bases for s in mutated_samples]
vcfgts = list(zip(*[iter(vcfgts)]*3)) #list of tuples of len 3
for s in vcfgts:
if all([x == None for x in s]):
break
if all([set(x.split("/")) != togt for x in s]):
break
else: #made it thru the loop, count it as recovered
recovered = True
else:
for s in mutated_samples:
vcfgt = record.genotype(s).gt_bases
if vcfgt == None:
break
vcfg = set(vcfgt.split("/"))
if vcfg != togt:
break
else: #if we make it through the loop, we recovered the mutation
recovered = True
return loc[0], str(loc[1]), ''.join(gt), ''.join(togt), str(depth), ','.join(mutated_samples), str(recovered), str(inrepeat), str(nunknown)
def argparser():
parser = argparse.ArgumentParser(description =
"""
Generate a table to estimate callability using a mutation table, VCF and SAM file.
The mutation tables must be in a subdirectory called mut_files.
The output table is printed to STDOUT.
""")
#parser.add_argument("-m","--mutfile")
parser.add_argument("-v","--vcffile", help = "vcf file")
parser.add_argument("-s","--samfile", help = "sam/bam file")
# parser.add_argument("-b","--bedfile", help = "BED file containing repeat regions")
parser.add_argument("-r","--repeatfile", help = "File containing mutated locations in repeat regions.")
parser.add_argument("--dng", action = 'store_true', help = "Expect dng-style VCF, where the sample is coded as SM/MX rather than MXa")
parser.add_argument("--norepfilter", action = 'store_true', help = "The data is not replicate filtered. If any replicate is correct, the mutation will count as detected.")
args = parser.parse_args()
return args
def main():
args = argparser()
vcffile = args.vcffile
samfile = args.samfile
bedfile = args.repeatfile
muts = load_muts() #open all mut files and load into list
vcf = load_vcf(vcffile) if vcffile is not None else load_vcf()
sam = load_sam(samfile) if samfile is not None else load_sam()
repeats = load_repeats(bedfile)
mutfile = "mut_files/mutfile.txt" #iterate through full list and generate table
print("\t".join(["scaffold","site","original_genotype","mutated_genotype","depth","samples_mutated", "mutation_recovered", "in_repeat_region", "num_unknown_gts"]))
with open(mutfile) as fh:
for l in fh:
outline = generate_table_line(l, muts, vcf, sam, repeats, args.dng, args.norepfilter)
if outline != None:
print("\t".join(outline))
if __name__ == '__main__':
main()
|
from datetime import date
import boundaries
boundaries.register('Guelph wards',
domain='Guelph, ON',
last_updated=date(2012, 5, 15),
name_func=lambda f: 'Ward %s' % f.get('WARD'),
id_func=boundaries.attr('WARD'),
authority='City of Guelph',
encoding='iso-8859-1',
metadata={'geographic_code': '3523008'},
)
|
import itertools
import string
import ast
import random
from beatriz import *
from mimi import *
#Parametros: [lista cromosoma, int n, arreglo 2D matrizA, arreglo 2D matrizB]
def calcularAptitud(cromosoma, n, matrizA, matrizB):
#Definicion del diccionario que usare para saber las posiciones de la permutacion
alfabeto = {"A":1, "B":2, "C":3, "D":4, "E":5, "F":6, "G":7, "H":8, "I":9, "J":10, "K":11, "L":12, "M":13, "N":14, "O":15, "P":16,
"Q":17, "R":18, "S":19, "T":20, "U":21, "V":22, "W":23, "X":24, "Y":25, "Z":26}
#en esta variable se guardara el resultado de la aptitud del cromosoma
resultadoAptitud = 0
#Recorro cada elemeto de la matriz a para multiplicarlo con la permutacion del cromosoma en b
for columna in xrange(0, n):
for renglon in xrange(0, n):
elementoA = obtenerElemMatriz(matrizA, renglon, columna)
valorI = cromosoma[columna]
valorI = alfabeto[valorI]
valorI = -1 + int(valorI)
valorK = cromosoma[renglon]
valorK = alfabeto[valorK]
valorK = -1 + int(valorK)
elementoB = obtenerElemMatriz(matrizB, valorI, valorK)
producto = elementoA * elementoB
resultadoAptitud = resultadoAptitud + producto
return resultadoAptitud
def calcularProbabilidadUnitaria(aptitudesGeneracion, sumaAptitudes):
probabilidadesUnitarias = []
for indice in xrange(0, len(aptitudesGeneracion)):
probabilidadesUnitarias.append(float(aptitudesGeneracion[indice])/float(sumaAptitudes))
return probabilidadesUnitarias
def calcularProbabilidadAcumulada(probabilidadesUnitarias):
sumaAcumulada = 0
probabilidadesAcumuladas = [0]
for indice in xrange(0,len(probabilidadesUnitarias)):
sumaAcumulada = sumaAcumulada + probabilidadesUnitarias[indice]
if indice == -1 + len(probabilidadesUnitarias):
sumaAcumulada = round(sumaAcumulada)
probabilidadesAcumuladas.append(sumaAcumulada)
return probabilidadesAcumuladas
def mutarGeneracionCruzada(nuevosCromosomas, pm, n):
resultadoMutacion = []
for cromosoma in nuevosCromosomas:
cromosoma = list(cromosoma)
marcaA = random.randrange(n)
marcaB = random.randrange(n)
while marcaA == marcaB:
marcaB = random.randrange(n)
numDecAleat = generaAleatDec(1)
num = numDecAleat.get(1)
#print num
if num < pm:
aux = cromosoma[marcaA]
cromosoma[marcaA] = cromosoma[marcaB]
cromosoma[marcaB] = aux
cromosoma = tuple(cromosoma)
resultadoMutacion.append(cromosoma)
return resultadoMutacion
def crearPrimeraGeneracion(n):
alfabeto = {1:"A", 2:"B", 3:"C", 4:"D", 5:"E", 6:"F", 7:"G", 8:"H", 9:"I", 10:"J", 11:"K", 12:"L", 13:"M", 14:"N", 15:"O", 16:"P",17:"Q", 18:"R", 19:"S", 20:"T", 21:"U", 22:"V", 23:"W", 24:"X", 25:"Y", 26:"Z"}
generacion = []
cromosoma = []
for x in xrange(1,n+1):
cromosoma.append(alfabeto[x]);
for y in xrange(1,n+1):
random.shuffle(cromosoma)
#print cromosoma
generacion.append(tuple(cromosoma))
return generacion |
import sys
if len(sys.argv) > 1:
print ("Hello, " + sys.argv[1] + "!")
else:
print ("Hello World!") |
from machine import Pin,DAC,PWM
from time import sleep
buzzer = PWM(Pin(25))
i=0
while(1):
buzzer.freq(10)
sleep(0.5)
buzzer.deinit()
|
f = open('cars.info', 'w+')
fbg = open('bg.txt', 'w+')
for i in range(550):
f.write('pos/pos-' + str(i) + '.pgm 1 0 0 100 40\n')
for i in range(500):
fbg.write('neg/neg-' + str(i) + '.pgm\n')
for i in range(500, 512):
fbg.write('neg/neg-' + str(i) + '.pgm\n')
|
#!/usr/bin/env python3
##
## EPITECH PROJECT, 2020
## 107transfer_2019
## File description:
## unit_test
##
import unittest
import error
import function
class TestStringMethods(unittest.TestCase):
def setUp(self):
self.var = "USAGE\n" \
"\t./107transfer [num den]*\n" \
"\nDESCRIPTION\n" \
"\tnum\tpolynomial numerator defined by its coefficients\n" \
"\tden\tpolynomial denominator defined by its coefficients"
pass
def test_usage(self):
self.assertEqual(self.var, "USAGE\n"
"\t./107transfer [num den]*\n"
"\nDESCRIPTION\n"
"\tnum\tpolynomial numerator defined by its coefficients\n"
"\tden\tpolynomial denominator defined by its coefficients")
def test_is_float(self):
self.assertEqual(error.is_float("1.5"), 1)
def test_is_not_float(self):
self.assertNotEqual(error.is_float("1.5"), 0)
def test_nbr_arg(self):
with self.assertRaises(SystemExit) as cm:
error.error_nbr_arg(1)
self.assertEqual(cm.exception.code, 84)
def test_nbr_arg1(self):
with self.assertRaises(SystemExit) as cm:
error.error_nbr_arg(2)
self.assertEqual(cm.exception.code, 84)
def test_nbr_arg2(self):
with self.assertRaises(SystemExit) as cm:
error.error_nbr_arg(4)
self.assertEqual(cm.exception.code, 84)
def test_nbr_arg3(self):
self.assertEqual(error.error_nbr_arg(3), 0)
def test_is_number(self):
with self.assertRaises(SystemExit) as cm:
error.error_arg(5)
self.assertEqual(cm.exception.code, 84)
def test_calc_poly_num(self):
self.assertEqual(function.calc_poly([[2, 4, 6]], 0.000), [2.0])
def test_calc_poly_num1(self):
self.assertEqual(function.calc_poly([[2, 4, 6]], 0.001), [2.004006])
def test_calc_poly_num2(self):
self.assertEqual(function.calc_poly([[2, 4, 6]], 0.002), [2.008024])
def test_calc_poly_num3(self):
self.assertEqual(function.calc_poly([[2, 4, 6]], 0.003), [2.012054])
def test_calc_poly_num4(self):
self.assertEqual(function.calc_poly([[2, 4, 6]], 0.999), [11.984006])
def test_calc_poly_den(self):
self.assertEqual(function.calc_poly([[8, 8, 8]], 0.000), [8.0])
def test_calc_poly_den1(self):
self.assertEqual(function.calc_poly([[8, 8, 8]], 0.001), [8.008007999999998])
def test_calc_poly_den2(self):
self.assertEqual(function.calc_poly([[8, 8, 8]], 0.002), [8.016032])
def test_calc_poly_den3(self):
self.assertEqual(function.calc_poly([[8, 8, 8]], 0.003), [8.024071999999999])
def test_calc_poly_den4(self):
self.assertEqual(function.calc_poly([[8, 8, 8]], 0.999), [23.976008])
if __name__ == '__main__':
unittest.main()
|
from math import *
def in_p():
w = input("------Introduzca p: ")
return w
def in_q():
w = input("------Introduzca q: ")
return w
def in_s():
w = input("------Introduzca s: ")
return w
def in_it():
w = input("------Introduzca el numero de it: ")
return w
def in_x():
w = input("------Introduzca x: ")
return w
def in_e():
w = input("------Introduzca e(0|1): ")
return w
def get_n(p,q):
return int(p)*int(q);
def fiat_shamir(p,q,s,it):
n = int(p)*int(q);
it_e = [];
it_val = [];
x = in_x();
it_val += [x];
for i in range(0,int(it)):
e = in_e();
it_e += [e];
for i in range(0,int(it)):
a = (int((it_val[i]))**2)%n;
it_val += [a];
v = (s ** 2)%n;
print("N: "+str(n));
print("V: "+str(v));
print("X: "+str(it_val[i]));
if(int(it_e[i]) == 0):
y = int(it_val[i]) % n;
print("COMPROBAMOS: "+str((y**2)%n)+"="+str(a));
else:
y = (int(it_val[i]) * int(s))%n;
print("COMPROBAMOS: "+str((y**2)%n)+"="+str((a * v)%n));
#Entrada:
#a. p=7, q=5
#b. s=3
#c. i=2 (número de iteraciones)
#d. 1ª iteración: x=16, e=0
#e. 2ª iteración: x=2, e=1 |
import machine, oled_ssd1306
from utime import sleep_ms
from menus import *
line_y = {1: 3, 2: 12, 3: 21, 4: 30, 5: 39, 6: 48, 7: 57}
cursor_pos = 1
b1 = machine.Pin(4, machine.Pin.IN, machine.Pin.PULL_UP)
b2 = machine.Pin(14, machine.Pin.IN, machine.Pin.PULL_UP)
options = ('1', '2', '3', '4', '5', '6', '7')
def draw_marker(x, o=1):
# draws cursor along left side of screen for current selection
oled_ssd1306.scr.pixel(3, x, o)
oled_ssd1306.scr.pixel(4, x, o)
oled_ssd1306.scr.pixel(5, x, o)
oled_ssd1306.scr.pixel(3, x + 1, o)
oled_ssd1306.scr.pixel(4, x + 1, o)
oled_ssd1306.scr.pixel(5, x + 1, o)
def show_menu(menu):
# prints to screen <menu> list, justified left 9 pixels to fit cursor
global cursor_pos
cursor_pos = 1
oled_ssd1306.active_line = 0
oled_ssd1306.wipe()
for s in menu[0]:
oled_ssd1306.pt(s, x=9, noshow=1)
draw_marker(line_y[cursor_pos])
oled_ssd1306.scr.show()
nav(menu)
def select(x):
# either loads new menu based on selection or runs function related to selection if any
selection = x[0][cursor_pos - 1]
if x[1][cursor_pos -1]:
globals()[selection.replace(' ', '').lower() + '_func']()
else:
show_menu(globals()[selection])
def nav(x):
while 1:
if not b1.value():
sleep_ms(200)
select(x)
continue
if not b2.value():
sleep_ms(200)
cursor_move(x)
continue
def cursor_move(x):
# moves cursor down menu entries. round robins when reaching end
global cursor_pos
if cursor_pos + 1 > len(x[0]):
draw_marker(line_y[cursor_pos], 0)
cursor_pos = 1
draw_marker(line_y[cursor_pos])
oled_ssd1306.scr.show()
else:
draw_marker(line_y[cursor_pos], 0)
cursor_pos += 1
draw_marker(line_y[cursor_pos])
oled_ssd1306.scr.show()
sleep_ms(150) |
import os
import csv
# ======================================================================================================================
# Name:
# get_dictionaries()
# Purpose:
# Returns a list of strings that represent paths
# where files were found with file names within dictionaries directory
# ======================================================================================================================
def get_dictionaries(dict_type="allowed_dictionaries"):
main_dir_path = os.getcwd() # get current working directory (main directory of script that is run)
dict_path = os.path.join(main_dir_path, "resources", "dictionaries", dict_type)
# creates path for dictionaries from main directory
# dict_type = "dictionaries" or director name
path_triple = (next(os.walk(dict_path))) # generates triple with [path, directories and files]
file_list_paths = []
dir_path = path_triple[0] # directory path
top_files = path_triple[2] # file names
for file_name in top_files: # for each file in a list of files
full_path = (os.path.join(dir_path, file_name)) # create full path for that file
file_list_paths.append(full_path) # append path to the file list
return file_list_paths # returns all paths
# ======================================================================================================================
# Name:
# extract_dictionaries(dictionaries_paths)
# Purpose:
# Returns a dictionary of dictionary file with name of dictionary file and all words within it
# Arguments:
# A list of strings with paths to files to be opened
# ======================================================================================================================
def extract_dictionaries(dictionaries_paths):
all_dictionaries = dict() # creates new dictionary
for dictionary_path in dictionaries_paths: # For each file path in a list of file paths
tmp_dict_content = [] # empty list to prevent errors
tmp_dict_name = os.path.basename(dictionary_path) # extracts filename from path
try:
with open(dictionary_path) as f:
tmp_dict_content = f.read().splitlines() # splits contents of the file line by line into a list
tmp_dict_content = list(set(tmp_dict_content)) # remove duplicates from the list
tmp_dict_content.sort() # sort list alphabetically
except IOError as e:
pass
print("Error encountered during opening a dictionary file: " + str(e))
all_dictionaries[tmp_dict_name] = tmp_dict_content # assigns new dictionary key and list of words as value
return all_dictionaries # returns dictionaries
# ======================================================================================================================
# Name:
# load_dictionaries()
# Purpose:
# function to be called to link get_dictionaries() and extract_dictionaries(dictionaries_paths)
# ======================================================================================================================
def load_dictionaries(dict_type="allowed_dictionaries"):
dictionaries_paths = get_dictionaries(dict_type)
# retrieves paths for all dictionaries within dictionaries directory
all_dictionaries = extract_dictionaries(dictionaries_paths) # holds dictionaries
return all_dictionaries
# =============================================================================
# Name: generate_recursive_file_list()
# Purpose: Returns a list of strings that represent paths where files were found with file names
# =============================================================================
def generate_recursive_file_list(input_path):
import os
list_of_files = []
for dir_path, directories, files in os.walk(input_path):
for file_name in files:
full_path = (os.path.join(dir_path, file_name))
list_of_files.append(full_path) # appends directly to list
return list_of_files
# ======================================================================================================================
# Name:
# file_to_lines(file_path)
# Purpose:
# Returns a list of lines loaded from a file
# Arguments:
# A strings with path to file
# ======================================================================================================================
def file_to_lines(file_path):
lines = []
try:
#with open(file_path) as f:
with open(file_path, encoding='utf-8') as f:
# Added encoding to see if i can remove UnicodeDecodeError as in
# http://stackoverflow.com/questions/12752313/unicodedecodeerror-in-python-3-when-importing-a-csv-file
# as suggested http://stackoverflow.com/questions/2396238/memory-error-due-to-the-huge-input-file-size
for line in f:
lines.append(line.rstrip("\n")) # try that for memory issues
#f.read().splitlines() could not handle it in memory?
#lines = f.read().splitlines() # splits contents of the file line by line into a list
except IOError as e:
# print("Error encountered during opening : " + file_path + " with error: " + str(e))
pass
# ignore errors for now are those are very likely and are not important at that stage
except UnicodeDecodeError as ee:
#import tagpy
#short_file_name = os.path.relpath(file_path, os.path.split(tagpy.user_input)[0]) # TODO seperate settings (file)
##print("UnicodeDecodeError - passing file: "+short_file_name)
#print(str(ee)+" - passing file: "+ short_file_name) #TODO output this to the log of errors...
# ignore output for this.
pass
return lines
# ======================================================================================================================
# Name:
# save_nested_list_to_csv(file_name, list)
# Purpose:
# Returns a list of lines loaded from a file
# Arguments:
# file_name - name for the .csv file to be saved under
# list_for_csv - a nested list to be saved as csv file
# ======================================================================================================================
def save_to_csv(file_name, list_for_csv):
my_file = open(file_name + ".csv", 'w', encoding='utf-8') # encoding seems to be required to correctly save results
wr = csv.writer(my_file, quoting=csv.QUOTE_ALL)
wr.writerows(list_for_csv)
# ======================================================================================================================
# Name:
# save_to_file(file_name, data=None)
# Purpose:
# Creates a file with filename and extension if no data is supplied. Data is written if supplied
# Arguments:
# String containing name for the file to be created
# ======================================================================================================================
def save_to_file(file_name, extension, data=None):
try:
f = open(file_name+"."+extension, 'w', encoding='utf-8') # encoding seems to be required to correctly save results
if data:
for line in data:
f.write(line)
f.write('\n')
f.close()
except IOError:
print("there was a problem with saving to a file")
# ======================================================================================================================
# Name:
# generate_html_table(data_for_table)
# Purpose:
# Returns a list of lines corresponding to html table with data from argument
# Arguments:
# data_for_table - data to be inserted within the HTML code
# ======================================================================================================================
def generate_html_table(data_for_table):
html_table = []
html_table.append("<table class=\"sortable\">")
html_table.append(" <thead>")
table_labels = " <tr>"
for column in data_for_table[0]:
table_labels += "<th>"+str(column)+"</th>"
table_labels += "</tr>"
html_table.append(table_labels)
html_table.append(" </thead>")
html_table.append("")
html_table.append(" <tbody>")
# starts from position 1 - as in skips first entry while [:-1] skips last entry
for row in data_for_table[1:]:
tmp_row = " <tr>"
for column in row:
tmp_row += "<td>" + str(column) + "</td>"
tmp_row += "</tr>"
html_table.append(tmp_row)
html_table.append(" </tbody>")
html_table.append("</table>")
return html_table
# ======================================================================================================================
# Name:
# html_styles()
# Purpose:
# Returns a list of lines corresponding to HTML style
# ======================================================================================================================
def html_styles():
html_styles = []
html_styles.append("<style>")
html_styles.append("table {")
html_styles.append(" font-family: arial, sans-serif;")
html_styles.append(" font-size: 12px;")
html_styles.append(" border-collapse: collapse;")
html_styles.append(" width: 100%;")
html_styles.append("}")
html_styles.append("")
html_styles.append("td, th {")
html_styles.append(" border: 1px solid #000000;")
html_styles.append(" text-align: left;")
html_styles.append(" padding: 8px;")
html_styles.append("}")
html_styles.append("")
html_styles.append("tr:nth-child(even) {")
html_styles.append(" background-color: #E9E8E2;")
html_styles.append("}")
html_styles.append("mark {")
html_styles.append(" background-color: #8D38C9;")
html_styles.append(" color: yellow;")
html_styles.append("}")
html_styles.append("</style>")
return html_styles
# ======================================================================================================================
# Name:
# generate_html_page(html_table)
# Purpose:
# Returns a list of lines corresponding to complete html page
# Arguments:
# data_for_table - data to be inserted within the HTML code
# ======================================================================================================================
def generate_html_page(html_table):
html_page = []
html_page.append("<!DOCTYPE html>")
html_page.append("<html>")
html_page.append("<head>")
html_page.append("<script src=\"sorttable.js\"></script>")
html_page.append("")
html_page.extend(html_styles()) # table with results
html_page.append("")
html_page.append("</head>")
html_page.append("<body>")
html_page.append("")
html_page.extend(html_table) # table with results
html_page.append("")
html_page.append("</body>")
html_page.append("</html>")
return html_page
|
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 21 13:34:17 2016
@author: JO21372
"""
from setuptools import setup, find_packages
setup(
name="pyslgr",
packages=['pyslgr'],
version="0.7.2",
description="Python tools for speech, language, and gender recognition (SLGR)",
author='Human Language Technology Group, MIT Lincoln Laboratory',
author_email='none',
include_package_data=True,
zip_safe=False
)
|
archivo = open('doc.mp3', 'a')
contador = 1
while True:
contador = contador + 1
archivo.write(str(contador * contador))
archivo.close() |
# import dash_bootstrap_components as dbc
# import dash_html_components as html
# import dash
# import pandas as pd
# import dash_core_components as dcc
# from dash.dependencies import Input, Output
# import plotly.graph_objs as go
# import plotly.express as px
# from math import log10
import dash_bootstrap_components as dbc
import dash_html_components as html
import dash
import pandas as pd
import dash_core_components as dcc
from dash.dependencies import Input, Output
import plotly.graph_objs as go
import plotly.express as px
import numpy as np
import matplotlib.pyplot as plt
from math import log10
from plotly.tools import mpl_to_plotly
import seaborn as sns
df= pd.read_excel("C:/Users/Administrator/Desktop/new/P&L final.xlsx",sheet_name='data', skiprows=5,nrows=18, usecols="D:Q")
waterfall_data=df.copy()
waterfall_data.iloc[[1,3,5],1:]=waterfall_data.iloc[[1,3,5],1:]*(-1)
def finval(col):
sm=0
for i in waterfall_data[col][0:6]:
sm+=i
return sm
waterfall_data.loc[18]=[finval(x) for x in ["JAN","JAN", "FEB","MAR","APR","MAY","JUN","JUL","AUG","SEP","OCT","NOV","DEC","Total"]]
df[df["Indicator Name"]=="% of Income Budget"]
months = ["JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC"]
month_options = []
for month in months:
month_options.append({'label':month, 'value': month})
db=df.drop([ "Total"], axis=True).T
db.reset_index(inplace=True)
clss=["cat"]
clss.extend(list(df["Indicator Name"]))
db.columns=clss
def pcc(month, catgg):
months = ["JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC"]
if month =="JAN":
return 0.00
else:
for i in months:
if month==i:
m=round(100*(-df[df["Indicator Name"]==catgg][months[months.index(month)-1]]+df[df["Indicator Name"]==catgg][month])/(df[df["Indicator Name"]==catgg][months[months.index(month)-1]]), 1)
return "{} %".format(m[list(m.index)[0]])
else:
pass
def tcv(mnt, catgg):
return df[mnt][list(df["Indicator Name"]).index(catgg)]
app = dash.Dash(external_stylesheets=[dbc.themes.BOOTSTRAP])
dropdowns= html.Div([dcc.Dropdown (id= 'month_picker',
options=month_options,
value="JAN",
placeholder="Select Month",
style = dict(
width = '175px',
display = 'inline-block',
verticalAlign = "middle"
)
)])
wfgraph = dbc.Card([dbc.Card(
[
dbc.CardBody(html.Div([
html.Div(html.A(html.P("Statemennt"), style={'text-align': 'center', "font-weight": "bold"})),
dcc.Graph (config = {'displayModeBar': False},id='wfg')]),
),
],
)],style={"width": "36rem"})
donut_card = dbc.Card([
dbc.CardBody(
[
html.Div([
html.Div(html.A(html.P("Net Profit Margin %"), style={'text-align': 'center', "font-weight": "bold"})),
html.A(dcc.Graph(config={'displayModeBar':False},id="dnc"), style={'text-align': 'center', "font-weight": "bold", "margin-left":"0px"})
]),
]
),
],style={"width": "18rem"})
incpie = dbc.Card([
dbc.CardBody(
[
html.Div(html.A(html.P('% of Income Budget'), style={'text-align': 'center', "font-weight": "bold"})),
html.A(dcc.Graph(config={'displayModeBar':False},id="incpiechart"), style={'textAlign': 'center', "font-weight": "bold"})
]
),
], style={"width": "18rem"})
expie = dbc.Card([
dbc.CardBody(
[
html.Div(html.A(html.P('% of Expenses Budget'), style={'text-align': 'center', "font-weight": "bold"})),
html.A(dcc.Graph(config={'displayModeBar':False},id="expiechart"), style={'textAlign': 'center', "font-weight": "bold"}),
]
),
],style={"width": "18rem"})
dashbar = dbc.Card([
dbc.CardBody(
[
html.Div([
html.Div(html.A(html.P("Income and Expenses"), style={'text-align': 'center', "font-weight": "bold"})),
dcc.Graph(config={'displayModeBar':False},id="dash_bar_plot")
]),
]
),
],style={"width": "36rem"})
incomecard = dbc.Card([
dbc.CardBody(
[
html.Div([
html.Div(html.A(html.P("Income"), style={'text-align': 'center', "font-weight": "bold"})),
html.Div(html.A(html.P(id="intot"), style={'text-align': 'center', "font-weight": "bold"})),
dcc.Graph(config={'displayModeBar':False},id="inclc"),
html.Div(html.A(html.P(id="inper"), style={'text-align': 'center', "font-weight": "bold"})),
html.Div(html.A(html.P("vs. Previous month"), style={'text-align': 'center'}))
])
])
],style={"width": "18rem"})
expensescard = dbc.Card([
dbc.CardBody(
[
html.Div([
html.Div(html.A(html.P("Expenses"), style={'text-align': 'center', "font-weight": "bold"})),
html.Div(html.A(html.P(id="extot"), style={'text-align': 'center', "font-weight": "bold"})),
dcc.Graph(config={'displayModeBar':False},id="explc"),
html.Div(html.A(html.P(id="exper"), style={'text-align': 'center', "font-weight": "bold"})),
html.Div(html.A(html.P("vs. Previous month"), style={'text-align': 'center'}))
])
])
],style={"width": "18rem"})
accountsrecieveable = dbc.Card([
dbc.CardBody(
[
html.Div([
html.Div(html.A(html.P("Accounts Receivable"), style={'text-align': 'center', "font-weight": "bold"})),
html.Div(html.A(html.P(id="acrctot"), style={'text-align': 'center', "font-weight": "bold"})),
dcc.Graph(config={'displayModeBar':False},id="accrcv"),
html.Div(html.A(html.P(id="acrcper"), style={'text-align': 'center', "font-weight": "bold"})),
html.Div(html.A(html.P("vs. Previous month"), style={'text-align': 'center'}))
])
])
],style={"width": "18rem"})
accountspayable = dbc.Card([
dbc.CardBody(
[
html.Div([
html.Div(html.A(html.P("Accounts Payable"), style={'text-align': 'center', "font-weight": "bold"})),
html.Div(html.A(html.P(id="acpytot"), style={'text-align': 'center', "font-weight": "bold"})),
dcc.Graph(config={'displayModeBar':False},id="accpay"),
html.Div(html.A(html.P(id="acpyper"), style={'text-align': 'center', "font-weight": "bold"})),
html.Div(html.A(html.P("vs. Previous month"), style={'text-align': 'center'}))
])
])
],style={"width": "18rem"})
netprofitcard = dbc.Card([
dbc.CardBody(
[
html.Div([
html.Div(html.A(html.P("Net Profit"), style={'text-align': 'center', "font-weight": "bold"})),
html.Div(html.A(html.P(id="netprofittot"), style={'text-align': 'center', "font-weight": "bold"})),
dcc.Graph(config={'displayModeBar':False},id="nplc"),
html.Div(html.A(html.P(id="netprofitper"), style={'text-align': 'center', "font-weight": "bold"})),
html.Div(html.A(html.P("vs. Previous month"), style={'text-align': 'center'}))
])
])
],style={"width": "18rem"})
cashatEOM = dbc.Card([
dbc.CardBody(
[
html.Div([
html.Div(html.A(html.P("Cash at EOM"), style={'text-align': 'center', "font-weight": "bold"})),
html.Div(html.A(html.P(id="caeomtot"), style={'text-align': 'center', "font-weight": "bold"})),
dcc.Graph(config={'displayModeBar':False},id="caeom"),
html.Div(html.A(html.P(id="caeomper"), style={'text-align': 'center', "font-weight": "bold"})),
html.Div(html.A(html.P("vs. Previous month"), style={'text-align': 'center'}))
])
])
],style={"width": "18rem"})
quickratio = dbc.Card([
dbc.CardBody(
[
html.Div([
html.Div(html.A(html.P("Quick Ratio"), style={'text-align': 'center', "font-weight": "bold"})),
html.Div(html.A(html.P(id="qrtot"), style={'text-align': 'center', "font-weight": "bold"})),
dcc.Graph(config={'displayModeBar':False},id="qrlc"),
html.Div(html.A(html.P(id="qrper"), style={'text-align': 'center', "font-weight": "bold"})),
html.Div(html.A(html.P("vs. Previous month"), style={'text-align': 'center'}))
])
])
],style={"width": "18rem"})
currentratio = dbc.Card([
dbc.CardBody(
[
html.Div([
html.Div(html.A(html.P("Current Ratio"), style={'text-align': 'center', "font-weight": "bold"})),
html.Div(html.A(html.P(id="crtot"), style={'text-align': 'center', "font-weight": "bold"})),
dcc.Graph(config={'displayModeBar':False},id="crlc"),
html.Div(html.A(html.P(id="crper"), style={'text-align': 'center', "font-weight": "bold"})),
html.Div(html.A(html.P("vs. Previous month"), style={'text-align': 'center'}))
])
])
],style={"width": "18rem"})
lineone=html.Div([
dbc.Row(
[wfgraph, donut_card, dashbar
]
),
dbc.Row(
[incomecard,expensescard, incpie, accountsrecieveable,accountspayable
]),
dbc.Row(
[netprofitcard, cashatEOM, expie,quickratio, currentratio
]),
])
cards = html.Div([dbc.Row([
dbc.Col(dbc.Card(dropdowns, style={'height':'100%'}), width='auto'),
dbc.Col(dbc.Card(lineone, style={'height':'100%'}),width='100%' ),
]),])
app.layout= cards
@app.callback(Output ('wfg', 'figure'), [Input('month_picker', 'value')])
def waterfall_graph(mnth):
fig = go.Figure(go.Waterfall(
name = "WaterFallGraph", orientation = "v",
measure = ["Total", "relative", "relative", "relative", "relative", "relative", "total"],
x =waterfall_data.iloc[[0,1,2,3,4,5,18]]["Indicator Name"],
textposition = "outside",
text = waterfall_data.iloc[[0,1,2,3,4,5,18]][mnth],
y = waterfall_data.iloc[[0,1,2,3,4,5,18]][mnth],
increasing = {"marker":{"color":"lightseagreen"}},
decreasing = {"marker":{"color":"#EE5C42"}},
totals = {"marker":{"color":"#27408B"}},
connector = {"visible": False},),
layout = {'xaxis': {
'visible': True,
'showticklabels': True},
'yaxis': {
'visible': False,
'showticklabels': False},
'height': 300,
'width' : 520,
'margin': {'l': 0, 'b': 0, 't': 0, 'r': 0}
}
)
fig.update_layout(
title = "Profit and loss statement",
showlegend = False
).add_shape(
type="rect", fillcolor="#27408B", line=dict(color="#27408B"), opacity=1,
x0=-0.4, x1=0.4, xref="x", y0=0.0, y1=fig.data[0].y[0], yref="y")
fig.update_layout(
xaxis = dict(
tickangle=0,
tickmode = 'array',
tickvals = [0, 3, 6],
ticktext = ['Total Income', 'Total Operating Expenses', 'Net Profit']
)
)
return fig
@app.callback(Output ('dnc', 'figure'), [Input('month_picker', 'value')])
def donut_chart(mnth):
val = round(df.iloc[7][mnth]*100,1)
labels = [str(round(val-14.0,1)), str(val)]
data = [14.0,val]
n = len(data)
k = 10 ** int(log10(max(data)))
m = k * (1 + max(data) // k)
outer_values = [data[0], m-data[0]]
inner_values = [data[1], m - data[1]]
trace1 = go.Pie(
hole=0.6,
sort=False,
values=inner_values,
textinfo='text',
direction='clockwise',
marker={'colors': ['lightseagreen', "white"],
'line': {'color': 'white', 'width': 1}}
)
trace2 = go.Pie(
hole=0.8,
sort=False,
textinfo='text',
direction='clockwise',
values=outer_values,
marker={'colors': ["#EE5C42", "white"],
'line': {'color': 'white', 'width': 1}}
)
layout = go.Layout(
xaxis=dict(
autorange=True,
showgrid=False,
ticks='',
showticklabels=False
),
yaxis=dict(
autorange=True,
showgrid=False,
ticks='',
showticklabels=False
),
height=150,
width=250,
margin=dict(l= 0, b= 0, t= 0, r= 0)
)
fig = go.FigureWidget(data=[trace1, trace2], layout=layout)
fig.update_layout(showlegend=False)
fig.update_layout(showlegend=False,
annotations=[dict(text=labels[1]+"%", x=0.515, y=0.56, font_size=20, showarrow=False),
dict(text=labels[0]+"%", x=0.515, y=0.44, font_size=20, showarrow=False)])
return fig
@app.callback(Output ('incpiechart', 'figure'), [Input('month_picker', 'value')])
def inc_donut_chart(mnth):
labels = [str(round(df.iloc[15][mnth]*100)), str(100-round(df.iloc[15][mnth]*100))]
data = [(round(df.iloc[15][mnth]*100)), (100-round(df.iloc[15][mnth]*100))]
n = len(data)
k = 10 ** int(log10(max(data)))
m = k * (1 + max(data) // k)
outer_values = [data[0], m-data[0]]
inner_values = [m-data[0], data[0]]
trace1 = go.Pie(
hole=0.6,
sort=True,
values=inner_values,
textinfo='text',
direction='counterclockwise',
marker={'colors': ['black', "white"],
'line': {'color': 'white', 'width': 1}}
)
trace2 = go.Pie(
hole=0.8,
sort=False,
textinfo='text',
direction='clockwise',
values=outer_values,
marker={'colors': ["lightseagreen", "white"],
'line': {'color': 'white', 'width': 1}}
)
layout = go.Layout(
xaxis=dict(
autorange=True,
showgrid=False,
ticks='',
showticklabels=False
),
yaxis=dict(
autorange=True,
showgrid=False,
ticks='',
showticklabels=False
),
height=100,
width=180,
margin=dict(l= 0, b= 0, t= 0, r= 0)
)
fig = go.FigureWidget(data=[trace1, trace2], layout=layout)
fig.update_layout(showlegend=False)
fig.update_layout(showlegend=False,
annotations=[dict(text=labels[0]+"%", x=0.515, y=0.54, font_size=20, showarrow=False)])
return fig
@app.callback(Output ('expiechart', 'figure'), [Input('month_picker', 'value')])
def exp_donut_chart(mnth):
labels = [str(round(df.iloc[17][mnth]*100)), str(100-round(df.iloc[17][mnth]*100))]
data = [(round(df.iloc[17][mnth]*100)), (100-round(df.iloc[17][mnth]*100))]
n = len(data)
k = 10 ** int(log10(max(data)))
m = k * (1 + max(data) // k)
outer_values = [data[0], m-data[0]]
inner_values = [m-data[0], data[0]]
trace1 = go.Pie(
hole=0.6,
sort=True,
values=inner_values,
textinfo='text',
direction='counterclockwise',
marker={'colors': ['black', "white"],
'line': {'color': 'white', 'width': 1}}
)
trace2 = go.Pie(
hole=0.8,
sort=False,
textinfo='text',
direction='clockwise',
values=outer_values,
marker={'colors': ["lightseagreen", "white"],
'line': {'color': 'white', 'width': 1}}
)
layout = go.Layout(
xaxis=dict(
autorange=True,
showgrid=False,
ticks='',
showticklabels=False
),
yaxis=dict(
autorange=True,
showgrid=False,
ticks='',
showticklabels=False
),
height=100,
width=180,
margin=dict(l= 0, b= 0, t= 0, r= 0)
)
fig = go.FigureWidget(data=[trace1, trace2], layout=layout)
fig.update_layout(showlegend=False)
fig.update_layout(showlegend=False,
annotations=[
dict(text=labels[0]+"%", x=0.515, y=0.54, font_size=20, showarrow=False)])
return fig
@app.callback(
Output(component_id='inclc', component_property='figure'),
[Input(component_id='month_picker', component_property='value')]
)
def income_line(input_value):
return px.line(y=list(df.loc[0][1:-1]), x=list(df.columns[1:-1])).update_layout(
showlegend=False,
plot_bgcolor="white",
height=50,
width=180,
xaxis=dict(visible=False,
showticklabels= False),
yaxis=dict(visible=False,
showticklabels= False),
margin=dict(t=0,l=0,b=0,r=0)
)
@app.callback(
Output(component_id='explc', component_property='figure'),
[Input(component_id='month_picker', component_property='value')]
)
def expenses_line_graph(input_value):
return px.line(y=list(df.loc[8][1:-1]), x=list(df.columns[1:-1])).update_layout(
showlegend=False,
plot_bgcolor="white",
height=50,
width=180,
xaxis=dict(visible=False,
showticklabels= False),
yaxis=dict(visible=False,
showticklabels= False),
margin=dict(t=0,l=0,b=0,r=0)
)
@app.callback(
Output(component_id='accrcv', component_property='figure'),
[Input(component_id='month_picker', component_property='value')]
)
def acc_rec_graph(input_value):
return px.line(y=list(df.loc[12][1:-1]), x=list(df.columns[1:-1])).update_layout(
showlegend=False,
plot_bgcolor="white",
height=50,
width=180,
xaxis=dict(visible=False,
showticklabels= False),
yaxis=dict(visible=False,
showticklabels= False),
margin=dict(t=0,l=0,b=0,r=0)
)
@app.callback(
Output(component_id='accpay', component_property='figure'),
[Input(component_id='month_picker', component_property='value')]
)
def acc_pay_graph(input_value):
return px.line(y=list(df.loc[13][1:-1]), x=list(df.columns[1:-1])).update_layout(
showlegend=False,
plot_bgcolor="white",
height=50,
width=180,
xaxis=dict(visible=False,
showticklabels= False),
yaxis=dict(visible=False,
showticklabels= False),
margin=dict(t=0,l=0,b=0,r=0)
)
@app.callback(
Output(component_id='nplc', component_property='figure'),
[Input(component_id='month_picker', component_property='value')]
)
def net_profit_graph(input_value):
return px.line(y=list(df.loc[6][1:-1]), x=list(df.columns[1:-1]), ).update_layout(
showlegend=False,
plot_bgcolor="white",
height=50,
width=180,
xaxis=dict(visible=False,
showticklabels= False),
yaxis=dict(visible=False,
showticklabels= False),
margin=dict(t=0,l=0,b=0,r=0)
)
@app.callback(
Output(component_id='caeom', component_property='figure'),
[Input(component_id='month_picker', component_property='value')]
)
def cash_eom_graph(input_value):
return px.line(y=list(df.loc[9][1:-1]), x=list(df.columns[1:-1])).update_layout(
showlegend=False,
plot_bgcolor="white",
height=50,
width=180,
xaxis=dict(visible=False,
showticklabels= False),
yaxis=dict(visible=False,
showticklabels= False),
margin=dict(t=0,l=0,b=0,r=0)
)
@app.callback(
Output(component_id='qrlc', component_property='figure'),
[Input(component_id='month_picker', component_property='value')]
)
def qr_graph(input_value):
return px.line(y=list(df.loc[10][1:-1]), x=list(df.columns[1:-1])).update_layout(
showlegend=False,
plot_bgcolor="white",
height=50,
width=180,
xaxis=dict(visible=False,
showticklabels= False),
yaxis=dict(visible=False,
showticklabels= False),
margin=dict(t=0,l=0,b=0,r=0)
)
@app.callback(
Output(component_id='crlc', component_property='figure'),
[Input(component_id='month_picker', component_property='value')]
)
def cr_line(input_value):
return px.line(y=list(df.loc[11][1:-1]), x=list(df.columns[1:-1])).update_layout(
showlegend=False,
plot_bgcolor="white",
height=50,
width=180,
xaxis=dict(visible=False,
showticklabels= False),
yaxis=dict(visible=False,
showticklabels= False),
margin=dict(t=0,l=0,b=0,r=0)
)
@app.callback(
Output(component_id='dash_bar_plot', component_property='figure'),
[Input(component_id='month_picker', component_property='value')]
)
def dash_barmat_graph(input_value):
fig=go.Figure(data=[
go.Bar(x=db[1:]["cat"], y=db[1:]["Income"]-db[1:]["Operating Profit (EBIT)"], marker_color='#C1CDCD'),
go.Bar(x=db[1:]["cat"], y=db[1:]["Operating Profit (EBIT)"], marker_color='#C1CDCD'),
],
layout = {'xaxis': {
'visible': False,
'showticklabels': False},
'yaxis': {
'visible': False,
'showticklabels': False},
'height': 300,
'width' : 550,
'margin': {'l': 0, 'b': 0, 't': 0, 'r': 0}
}
)
fig.update_layout(barmode='stack')
[fig.add_annotation(
xref="x",
yref="y",
x=i-1,
y=db[1:]["Income"][i]-db[1:]["Operating Profit (EBIT)"][i]-200,
text=str(db[1:]["Income"][i]),
axref="x",
ayref="y",
ax=i-1,
ay=0,
arrowhead=2,
arrowwidth=3,
arrowcolor="green"
) for i in range(1,13)]
[fig.add_annotation(
xref="x",
yref="y",
x=i-1,
y=db[1:]["Income"][i]-db[1:]["Operating Profit (EBIT)"][i]+200,
text=str(db[1:]["Operating Profit (EBIT)"][i]),
axref="x",
ayref="y",
ax=i-1,
ay=db[1:]["Income"][i],
arrowhead=2,
arrowwidth=3,
arrowcolor="red"
) for i in range(1,13)]
[fig.add_annotation(
xref="x",
yref="y",
x=i-1,
y=db[1:]["Income"][i]-db[1:]["Operating Profit (EBIT)"][i],
text=str(db[1:]["Income"][i]-db[1:]["Operating Profit (EBIT)"][i]),
showarrow=False,
) for i in range(1,13)]
fig.update_layout(showlegend=False)
return fig
@app.callback(
Output(component_id='inper', component_property='children'),
Output(component_id='exper', component_property='children'),
Output(component_id='acrcper', component_property='children'),
Output(component_id='acpyper', component_property='children'),
Output(component_id='netprofitper', component_property='children'),
Output(component_id='caeomper', component_property='children'),
Output(component_id='qrper', component_property='children'),
Output(component_id='crper', component_property='children'),
[Input(component_id='month_picker', component_property='value')]
)
def percenchange(mnh):
return pcc(mnh, "Income"),pcc(mnh, "Expenses"),pcc(mnh, "Accounts Receivable"),pcc(mnh, "Accounts Payable"),pcc(mnh, "Net Profit "),pcc(mnh, "Cash at EOM"),pcc(mnh, "Quick Ratio"),pcc(mnh, "Current Ratio")
@app.callback(
Output(component_id='intot', component_property='children'),
Output(component_id='extot', component_property='children'),
Output(component_id='acrctot', component_property='children'),
Output(component_id='acpytot', component_property='children'),
Output(component_id='netprofittot', component_property='children'),
Output(component_id='caeomtot', component_property='children'),
Output(component_id='qrtot', component_property='children'),
Output(component_id='crtot', component_property='children'),
[Input(component_id='month_picker', component_property='value')]
)
def percenchange(mnh):
return tcv(mnh, "Income"),tcv(mnh, "Expenses"),tcv(mnh, "Accounts Receivable"),tcv(mnh, "Accounts Payable"),tcv(mnh, "Net Profit "),tcv(mnh, "Cash at EOM"),tcv(mnh, "Quick Ratio"),tcv(mnh, "Current Ratio")
if __name__ == "__main__":
app.run_server(debug=True, port=8183)
|
from django.db import models
class Coupon(models.Model):
coupon_id = models.AutoField(primary_key=True)
coupon_name = models.CharField(max_length=60)
image = models.ImageField(db_column='image', upload_to="voucher/coupons/", null=True, blank=True)
discount = models.IntegerField()
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
status = models.BooleanField(default=False)
def __str__(self):
return self.coupon_name
|
from copy import deepcopy
from functools import reduce
from itertools import starmap
from typing import List, Tuple, Iterable, Set
def read_block() -> str:
n = int(input())
text = str()
for _ in range(n):
line = input()
text += line
text = text.replace('\n', '')
text = text.replace('\r', '')
return text
def not_so_complex_hash(text: str, offset: int = 0, base: int = 256) -> List[int]:
hashed = [0] * 16
text_bytes = text.encode("iso-8859-1")
for i in range(len(text_bytes)):
hashed[(i + offset) % 16] = (hashed[(i + offset) % 16] + text_bytes[i]) % base
hashed = map(int, hashed)
return list(hashed)
def sum_two_hashed(hashed_a: List[int], hashed_b: List[int]) -> List[int]:
mapping = zip(hashed_a, hashed_b)
mapping = starmap(lambda a, b: (a + b) % 256, mapping)
return list(mapping)
def sum_hashed(*args) -> List[int]:
return reduce(sum_two_hashed, args)
def split_block(block: str) -> Tuple[str, str]:
preamble, body = block.split(''.join(['-'] * 6))
preamble += '---'
body = '---' + body
return preamble, body
def generate_solutions(preamble: str, body: str, hashed: List[int]) -> Tuple[int, List[str]]:
preamble_hashed = not_so_complex_hash(preamble)
mapping = (not_so_complex_hash(body, offset=additional + len(preamble)) for additional in range(16))
mapping = (sum_hashed(v, preamble_hashed) for v in mapping)
mapping = (list((b - a) % 256 for a, b in zip(v, hashed)) for v in mapping)
return enumerate(mapping)
def generate_print_section(preamble: str, body: str, hashed: List[int]) -> str:
offset = len(preamble) % 16
characters = None
total_characters = None
for additional, solution in generate_solutions(preamble, body, hashed):
new_characters = find_characters(additional, offset, solution)
new_total_characters = sum(len(v) for v in new_characters)
if characters is not None and total_characters < new_total_characters:
continue
characters = new_characters
total_characters = new_total_characters
message = generate_message(characters, offset)
return message
def generate_message(characters: List[List[int]], offset: int, base: int = 16) -> str:
message = str()
i = offset
while any(len(c) for c in characters):
message += chr(characters[i].pop(0))
i = (i + 1) % base
return message
def available_sizes(possibles: Iterable[List[List[int]]]) -> Set[int]:
mapping = list(map(lambda x: list(map(len, x)), possibles))
mapping = list(map(lambda x: set(x), mapping))
return reduce(set.intersection, mapping)
def generate_result(possibles: List[List[List[int]]], with_one_more) -> List[List[int]]:
to_check = [v for i, v in enumerate(possibles) if i not in with_one_more]
sizes = available_sizes(to_check)
if not sizes:
return list()
selected_size = min(sizes)
to_check = [v for i, v in enumerate(possibles) if i in with_one_more]
if to_check:
sizes = available_sizes(to_check)
if not selected_size + 1 in sizes:
return list()
result = list()
for i, possible in enumerate(possibles):
w = (i in with_one_more)
for a in possible:
if len(a) == selected_size + w:
result.append(a)
break
return result
def update_possibles(solution: List[int], possibles, with_one_more) -> List[int]:
min_sizes = list(map(lambda x: max(map(len, x)), possibles))
min_size = min(min_sizes)
solution = list(x + 256 if min_sizes[i] == min_size else x for i, x in enumerate(solution))
solution = list(x + 256 if min_sizes[i] == min_size + 1 and i in with_one_more else x for i, x in enumerate(solution))
return solution
def find_characters(additional: int, offset: int, solution: List[int]) -> List[List[int]]:
with_one_more = set((offset + i) % 16 for i in range(additional))
solution = list(x if 48 < x else x + 256 for x in solution)
possibles = list(possible_decomposes(v) for v in solution)
result = generate_result(possibles, with_one_more)
while not result:
solution = update_possibles(solution, possibles, with_one_more)
possibles = list(possible_decomposes(v) for v in solution)
result = generate_result(possibles, with_one_more)
assert solution == list(sum(v) for v in result)
return result
def possible_decomposes(value: int) -> List[List[int]]:
if value < 48:
raise Exception
result = list()
if 48 <= value <= 122:
result += [[value]]
if 96 < value:
d1 = possible_decomposes(value - 48)
for i in range(len(d1)):
d1[i].append(48)
result += d1
if 170 < value:
d1 = possible_decomposes(value - 122)
for i in range(len(d1)):
d1[i].append(122)
result += d1
result = list(sorted(value) for value in result)
result = sorted(result, key=lambda x: (len(x), min(x)))
assert all(sum(r) == value for r in result)
return result
def solve_case():
original = read_block()
altered = read_block()
original_hashed = not_so_complex_hash(original)
# altered_hashed = not_so_complex_hash(altered)
altered_preamble, altered_body = split_block(altered)
print_section = generate_print_section(altered_preamble, altered_body, original_hashed)
return print_section
def main():
n = int(input())
mapping = map(lambda idx: (idx + 1, solve_case()), range(n))
for i, result in mapping:
print(f'Case #{i}: {result}')
if __name__ == '__main__':
main()
|
from django.test import Client
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APITestCase, APIClient
from rest_framework.authtoken.models import Token
from authentication.models import UserProfile
from report.models import Report, Document, Folder
from report.serializers import ReportSerializer
import random
import json
import os
class ReportTests(APITestCase):
list_of_users = ['user1','user2','user3']
list_of_passwords = ['password1','password2', 'password3']
register_url = '/api/v1/users/register/'
login_url = '/api/v1/users/login/'
reports_url = '/api/v1/reports/'
folders_url = '/api/v1/reports/folders/'
base_dir = '/home/richard/secureshare/temp_keys/'
private_report_data = { "name": "This is a Report",
"short_description" : "This is a short description",
"long_description" : "This is a long description",
"private":"True"
}
public_report_data = {"name": "This is a Report",
"short_description": "This is a short description",
"long_description":"This is a long description people can see",
"private":"False"
}
serializer_class = ReportSerializer
def generate_users_receive_tokens(self):
size_of_list = len(self.list_of_users)
token_list = []
for i in range(0, size_of_list):
user = User.objects.create(
username=self.list_of_users[i],
password=self.list_of_passwords[i]
)
UserProfile.objects.create(
user=user
)
token = Token.objects.create(
user=user
)
token_list.append(token.key)
return token_list
def test_post(self):
token_list = self.generate_users_receive_tokens()
user_one_token = token_list[0]
user_two_token = token_list[1]
user_three_token = token_list[2]
user_one = User.objects.get(username=self.list_of_users[0])
user_two = User.objects.get(username=self.list_of_users[1])
user_three = User.objects.get(username=self.list_of_users[2])
self.client.credentials(HTTP_AUTHORIZATION='Token ' + str(user_one_token))
response = self.client.post(self.reports_url, {'data':json.dumps(self.public_report_data)},format='multipart')
self.assertEqual(
response.status_code,
status.HTTP_201_CREATED,
msg = "Incorrect response code when creating a valid report" + str(response.data)
)
file_list = response.data.pop('files')
self.assertEqual(
Report.objects.count(),
1,
msg="Report object not made after successful creation"
)
self.assertEqual(
Report.objects.get().name,
self.public_report_data['name'],
msg="Created Report object has incorrect name"
)
self.assertEqual(
Report.objects.get().short_description,
self.public_report_data['short_description'],
msg = "Created Report object has incorrect short description"
)
self.assertEqual(
Report.objects.get().long_description,
self.public_report_data['long_description'],
msg = "Created Report object has incorrect long description"
)
def test_post_files(self):
token_list = self.generate_users_receive_tokens()
user_one_token = token_list[0]
user_two_token = token_list[1]
user_three_token = token_list[2]
user_one = User.objects.get(username=self.list_of_users[0])
user_two = User.objects.get(username=self.list_of_users[1])
user_three = User.objects.get(username=self.list_of_users[2])
file_one = self.base_dir + "file_one"
file_one_content = "This is a test file, please work"
file_two = self.base_dir + "file_two"
file_two_content = "kas.jdfsf asdf .has as d///*&(* bunch of characters in it....skfsld;kaf2131"
file_one_ptr = open(file_one,'wb+')
file_one_ptr.write(bytes(file_one_content.encode('utf-8')))
file_two_ptr = open(file_two,'wb+')
file_two_ptr.write(bytes(file_two_content.encode('utf-8')))
encrypted_list = [True, True]
self.private_report_data['encrypted'] = encrypted_list
files = {"file_one":file_one_ptr, "file_two":file_two_ptr}
send_data = {'data':json.dumps(self.private_report_data),'file':[file_one_ptr,file_two_ptr]}
self.client.credentials(HTTP_AUTHORIZATION="Token " + str(user_one_token))
response = self.client.post(
self.reports_url,
send_data,
format='multipart'
)
self.assertEqual(
response.status_code,
status.HTTP_201_CREATED,
msg="Incorrect response code when creating report with files" + str(response.data)
)
report = Report.objects.get()
doc_list = report.files.all()
self.assertEqual(
len(doc_list),
2,
msg="Incorrect number of documents associated with report" + str(response.data)
)
os.remove(file_one)
os.remove(file_two)
for file in doc_list:
os.remove("/home/richard/secureshare/secureshare/media/" + str(file.file))
def test_get_private(self):
token_list = self.generate_users_receive_tokens()
user_one_token = token_list[0]
user_two_token = token_list[1]
user_three_token = token_list[2]
user_one = User.objects.get(username=self.list_of_users[0])
user_two = User.objects.get(username=self.list_of_users[1])
user_three = User.objects.get(username=self.list_of_users[2])
self.client.credentials(HTTP_AUTHORIZATION='Token ' + str(user_two_token))
self.client.post(self.reports_url, {'data':json.dumps(self.private_report_data)}, format='json')
response = self.client.get(self.reports_url)
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
msg="Valid user was unable to get list of reports"
)
self.assertEqual(
response.data[0]['name'],
self.private_report_data['name'],
msg="List of reports does not match name"
)
self.assertEqual(
response.data[0]['short_description'],
self.private_report_data['short_description'],
msg="List of reports does not match description"
)
self.assertEqual(
response.data[0]['long_description'],
self.private_report_data['long_description'],
msg="List of reports does not match long description"
)
self.client.credentials(HTTP_AUTHORIZATION='Token ' + str(user_three_token))
response = self.client.get(self.reports_url)
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
msg="A different user was denied a get request"
)
self.assertEqual(
len(response.data),
0,
msg="A different user was able to see a private report"
)
self.client.credentials(HTTP_AUTHORIZATION='Token ' + str(user_three_token))
self.client.post(self.reports_url, {'data':json.dumps(self.public_report_data)}, format='json')
response = self.client.get(self.reports_url)
self.assertEqual(
len(response.data),
1,
msg="User 3 get request returned incorrect number of reports"
)
self.client.credentials(HTTP_AUTHORIZATION='Token ' + str(user_two_token))
response = self.client.get(self.reports_url)
self.assertEqual(
len(response.data),
2,
msg="User 2 get request returned incorrect number of reports"
)
self.client.credentials(HTTP_AUTHORIZATION='Token ' + str(user_one_token))
response = self.client.get(self.reports_url)
self.assertEqual(
len(response.data),
1,
msg="User 1 get request returned incorrect number of reports"
)
def test_get_site_manager(self):
token_list = self.generate_users_receive_tokens()
user_one_token = token_list[0]
user_two_token = token_list[1]
user_three_token = token_list[2]
user_one = User.objects.get(username=self.list_of_users[0])
user_two = User.objects.get(username=self.list_of_users[1])
user_three = User.objects.get(username=self.list_of_users[2])
self.client.credentials(HTTP_AUTHORIZATION='Token ' + str(user_two_token))
self.client.post(self.reports_url, {'data':json.dumps(self.private_report_data)}, format='json')
profile = UserProfile.objects.get(user=user_one)
profile.site_manager = True
profile.save()
response = self.client.get(self.reports_url)
self.assertEqual(
len(response.data),
1,
msg="Site manager could not see private report"
)
def test_get_by_id(self):
token_list = self.generate_users_receive_tokens()
user_one_token = token_list[0]
user_two_token = token_list[1]
user_three_token = token_list[2]
user_one = User.objects.get(username=self.list_of_users[0])
user_two = User.objects.get(username=self.list_of_users[1])
user_three = User.objects.get(username=self.list_of_users[2])
self.client.credentials(HTTP_AUTHORIZATION='Token ' + str(user_one_token))
response = self.client.post(self.reports_url, {'data':json.dumps(self.public_report_data)}, format='json')
pk = response.data['pk']
response = self.client.get(self.reports_url + str(pk) + "/")
response.data.pop('pk')
response.data.pop('files')
self.assertEqual(
response.data['name'],
self.public_report_data['name'],
msg="Returned incorrect report information"
)
response = self.client.get(self.reports_url + "2/")
self.assertEqual(
response.status_code,
status.HTTP_400_BAD_REQUEST,
msg="Invalid id gave incorrect response code"
)
self.client.credentials(HTTP_AUTHORIZATION='Token ' + str(user_two_token))
response = self.client.post(self.reports_url, {'data':json.dumps(self.private_report_data)}, format='json')
private_pk = response.data['pk']
self.client.credentials(HTTP_AUTHORIZATION='Token ' + str(user_one_token))
response = self.client.get(self.reports_url + str(private_pk) + "/")
self.assertEqual(
response.status_code,
status.HTTP_400_BAD_REQUEST,
msg="Another user was able to request a private report"
)
profile = UserProfile.objects.get(user=user_three)
profile.site_manager = True
profile.save()
self.client.credentials(HTTP_AUTHORIZATION='Token ' + str(user_three_token))
response = self.client.get(self.reports_url + str(private_pk) + "/")
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
msg="Site manager denied request to a private report"
)
def test_delete(self):
token_list = self.generate_users_receive_tokens()
user_one_token = token_list[0]
user_two_token = token_list[1]
user_three_token = token_list[2]
user_one = User.objects.get(username=self.list_of_users[0])
user_two = User.objects.get(username=self.list_of_users[1])
user_three = User.objects.get(username=self.list_of_users[2])
self.client.credentials(HTTP_AUTHORIZATION="Token " + str(user_one_token))
self.client.post(self.reports_url, {'data':json.dumps(self.public_report_data)},format='multipart')
self.client.credentials(HTTP_AUTHORIZATION="Token " + str(user_two_token))
response = self.client.delete(self.reports_url+'1/')
self.assertEqual(
response.status_code,
status.HTTP_400_BAD_REQUEST,
msg="Incorrect status code when trying to delete another user's public report"
)
self.assertEqual(
Report.objects.count(),
1,
msg="A user deleted another user's report"
)
self.client.credentials(HTTP_AUTHORIZATION="Token " + str(user_one_token))
response = self.client.delete(self.reports_url+'1/')
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
msg="Incorrect status code when a user tried to delete their report"
)
self.assertEqual(
Report.objects.count(),
0,
msg="A user was not able to delete their own report"
)
def test_patch(self):
token_list = self.generate_users_receive_tokens()
user_one_token = token_list[0]
user_two_token = token_list[1]
user_three_token = token_list[2]
user_one = User.objects.get(username=self.list_of_users[0])
user_two = User.objects.get(username=self.list_of_users[1])
user_three = User.objects.get(username=self.list_of_users[2])
self.client.credentials(HTTP_AUTHORIZATION="Token " + str(user_one_token))
self.client.post(self.reports_url, {'data':json.dumps(self.public_report_data)},format='multipart')
old = self.public_report_data['name']
new = "this is a different field"
self.public_report_data['name'] = new
self.client.credentials(HTTP_AUTHORIZATION="Token " + str(user_one_token))
response = self.client.patch(self.reports_url+'1/',{'data':json.dumps(self.public_report_data)})
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
msg="Incorrect status code when a valid user tried to modify their report" + str(response.data)
)
self.assertEqual(
Report.objects.get().name,
new,
msg="The report object name was not changed"
)
self.public_report_data['name'] = old
self.client.credentials(HTTP_AUTHORIZATION="Token " + str(user_two_token))
response = self.client.patch(self.reports_url+'1/',{'data':json.dumps(self.public_report_data)})
self.assertEqual(
response.status_code,
status.HTTP_400_BAD_REQUEST,
msg="Incorrect status code when a user was able to modify another user's report"
)
self.assertEqual(
Report.objects.get().name,
new,
msg="A user was able to modify another user's report"
)
def test_folder_post(self):
token_list = self.generate_users_receive_tokens()
user_one_token = token_list[0]
user_two_token = token_list[1]
user_three_token = token_list[2]
user_one = User.objects.get(username=self.list_of_users[0])
user_two = User.objects.get(username=self.list_of_users[1])
user_three = User.objects.get(username=self.list_of_users[2])
self.client.credentials(HTTP_AUTHORIZATION="Token " + str(user_one_token))
self.client.post(self.reports_url, {'data':json.dumps(self.public_report_data)},format='multipart')
self.client.credentials(HTTP_AUTHORIZATION="Token " + str(user_two_token))
self.client.post(self.reports_url, {'data':json.dumps(self.private_report_data)},format='multipart')
response = self.client.get(self.reports_url)
report_list = response.data
pk_list = []
for report in report_list:
pk_list.append(report['pk'])
data = {"name":"folder_name", "reports":pk_list}
response = self.client.post(self.folders_url, data, format='json')
self.assertEqual(
response.status_code,
status.HTTP_201_CREATED,
msg="Incorrect response code when a user tried creating a folder"
)
self.assertEqual(
Folder.objects.count(),
1,
msg="Folder object was not created"
)
self.assertEqual(
Folder.objects.get().owner,
user_two,
msg="Folder object has incorrect owner"
)
self.assertEqual(
len(Folder.objects.get().reports.all()),
len(pk_list),
msg="Folder object has incorrect number of reports"
)
# def test_patch(self):
# token_list = self.generate_users_receive_tokens()
# token = token_list[2]
# self.client.credentials(HTTP_AUTHORIZATION='Token ' + token)
# response = self.client.patch(self.reports_url, self.private_report_data, format='json')
# self.assertEqual(
# response.status_code,
# status.HTTP_400_BAD_REQUEST
# )
# response = self.client.patch(self.reports_url + '1/', self.private_report_data, format='json')
# self.assertEqual(
# response.status_code,
# status.HTTP_400_BAD_REQUEST
# )
# response = self.client.post(self.reports_url, self.private_report_data, format='json')
# created_pk = response.data['pk']
# self.client.credentials(HTTP_AUTHORIZATION='Token ' + token_list[1])
# response = self.client.patch(self.reports_url + str(created_pk) + '/', self.private_report_data, format='json')
# self.assertEqual(
# response.status_code,
# status.HTTP_401_UNAUTHORIZED,
# msg="Accepted unauthorized request " + str(response.data)
# )
# self.client.credentials(HTTP_AUTHORIZATION='Token ' + token)
# response = self.client.patch(self.reports_url + str(created_pk) + '/', self.public_report_data, format='json')
# self.assertEqual(
# response.status_code,
# status.HTTP_202_ACCEPTED,
# msg="Authorized request denied"
# )
# self.assertEqual(
# response.data['name'],
# self.private_report_data['name'],
# msg="Incorrect field name"
# ) |
from rest_framework import filters, viewsets
from .models import Todo, TodoList
from .serializers import TodoSerializer, TodoListSerializer
class TodoViewSet(viewsets.ModelViewSet):
queryset = Todo.objects.all().order_by('-id')
serializer_class = TodoSerializer
filter_backends = (filters.SearchFilter,)
search_fields = (
'id',
'title',
'completed',
'priority',
'todo_list__id',
)
def get_queryset(self):
queryset = Todo.objects.filter(user=self.request.user)
todo_list = self.request.query_params.get('todo_list', None)
if todo_list is not None:
queryset = queryset.filter(todo_list__id=todo_list)
inbox = self.request.query_params.get('inbox', None)
if inbox is not None:
queryset = queryset.filter(todo_list__id__isnull=True)
return queryset
def perform_create(self, serializer):
serializer.save(user=self.request.user)
class TodoListViewSet(viewsets.ModelViewSet):
queryset = TodoList.objects.all().order_by('-id')
serializer_class = TodoListSerializer
def get_queryset(self):
queryset = TodoList.objects.all().filter(
user=self.request.user)
return queryset
def perform_create(self, serializer):
serializer.save(user=self.request.user)
|
# -*- coding: utf-8 -*-
#廣義歐幾里德除法
#返回100,000內任意兩整數的最大公因數
def GCD(a=1, b=1):
if a < 0: a = -1 * a #將a轉為正整數進行計算
if b < 0: b = -1 * b #將b轉為正整數進行計算
if a < b: c = a; a = b; b = c #交換a與b的次序,使得a≥b
if b == 0: return a #(r,0) = r
r = a % b
return GCD(r, b) #(a,b) = (r_-2,r_-1) = (r_-1,r_0) = … = (r_n,r_n+1) = (r_n,0) = r_n
if __name__ == '__main__':
while True:
try:
a = int(raw_input('The first number is '))
'''
if a > 100000:
print 'The number must be under 100,000.'
continue
'''
except ValueError:
print 'Invalid input.'
continue
break
while True:
try:
b = int(raw_input('The second number is '))
'''
if b > 100000:
print 'The number must be under 100,000.'
continue
'''
except ValueError:
print 'Invalid input.'
continue
break
print '(%d,%d) = %d' %(a, b, GCD(a, b))
|
import csv
import pandas as pd
from pandas import DataFrame
'''
전공과목 수정
'''
# column 이름은 ['학년']['전공']['교과코드-구분']['과목명']['학점']['담당교수']['시간']['강의실']로 설정
# []안의 이름 변경시 구분되는 col 이름 변경
# dataset은 csv파일 , 단위로 읽어서 저장되어 있습니다.
# .drop등 명령어로 특정부분 삭제 가능
f_major = open('computer.csv','r',encoding='utf-8') #Input 전공 파일 이름
out_major = open('major_mod.csv', 'w', encoding='utf-8' ,newline='') #Output 전공 파일 이름
rdr_m = csv.reader(f_major)
wr_m = csv.writer(out_major)
#wr_m.writerow(['학년']+['전공']+['교과코드-구분']+['과목명']+['학점']+['담당교수']+['시간'])
for line in rdr_m:
print(line)
if(line[0]== '*'or line[0]=='1'or line[0]=='2' or line[0]=='3'or line[0]=='4'):
line[2] = line[2].replace('-','')
wr_m.writerow((line[0],line[1],line[2],line[3],line[5],line[8],line[9]))
dataset = pd.read_csv('./major_mod.csv')
#print(dataset) #전체 호출
#print(dataset["과목명"]) #일부분 호출
f_major.close()
'''
교양과목 수정 #구현했수다
'''
count = 0
f_liberal_arts = open('lib.csv','r',encoding='utf-8') #Input 교양 파일 이름
out_liberal_arts = open('lib_mod.csv', 'w', encoding='utf-8', newline='') #Output 교양 파일 이름
rdr_l = csv.reader(f_liberal_arts)
wr_l = csv.writer(out_liberal_arts)
wr_l.writerow(['학년']+['전공']+['교과코드-구분']+['과목명']+['학점']+['담당교수']+['시간'])#+['강좌관리과'])
for line in rdr_l:
if(not line[0] and not line[1] and line[2]):
line[0]='*'
line[1]='교양'
line[2] = line[2].replace('-','')
wr_l.writerow((line[0],line[1],line[2],line[3],line[4],line[8],line[9]))
#dataset2 = pd.read_csv('./lib_mod.csv',sep =',')
#print(dataset2)
f_liberal_arts.close()
|
# Default arguments, variable-length arguments and scope
# In this chapter, you'll learn to write functions with default arguments so that the user doesn't always need to specify them, and variable-length arguments so they can pass an arbitrary number of arguments on to your functions. You'll also learn about the essential concept of scope.
# The keyword global
# Let's work more on your mastery of scope. In this exercise, you will use the keyword global within a function to alter the value of a variable defined in the global scope.
# Create a string: team
team = "teen titans"
# Define change_team()
def change_team():
"""Change the value of the global variable team."""
# Use team in global scope
global team
# Change the value of team in global: team
team = 'justice league'
# Print team
print(team)
# Call change_team()
change_team()
# Print team
print(team)
# Nested Functions I
# You've learned in the last video about nesting functions within functions. One reason why you'd like to do this is to avoid writing out the same computations within functions repeatedly. There's nothing new about defining nested functions: you simply define it as you would a regular function with def and embed it inside another function!
# In this exercise, inside a function three_shouts(), you will define a nested function inner() that concatenates a string object with !!!. three_shouts() then returns a tuple of three elements, each a string concatenated with !!! using inner(). Go for it!
# Define three_shouts
def three_shouts(word1, word2, word3):
"""Returns a tuple of strings
concatenated with '!!!'."""
# Define inner
def inner(word):
"""Returns a string concatenated with '!!!'."""
return word + '!!!'
# Return a tuple of strings
return (inner(word1), inner(word2), inner(word3))
# Call three_shouts() and print
print(three_shouts('a', 'b', 'c'))
# Nested Functions II
# Great job, you've just nested a function within another function. One other pretty cool reason for nesting functions is the idea of a closure. This means that the nested or inner function remembers the state of its enclosing scope when called. Thus, anything defined locally in the enclosing scope is available to the inner function even when the outer function has finished execution.
# Let's move forward then! In this exercise, you will complete the definition of the inner function inner_echo() and then call echo() a couple of times, each with a different argument. Complete the exercise and see what the output will be!
Define echo
def echo(n):
"""Return the inner_echo function."""
# Define inner_echo
def inner_echo(word1):
"""Concatenate n copies of word1."""
echo_word = word1 * n
return echo_word
# Return inner_echo
return inner_echo
# Call echo: twice
twice = echo(2)
# Call echo: thrice
thrice = echo(3)
# Call twice() and thrice() then print
print(twice('hello'), thrice('hello'))
# The keyword nonlocal and nested functions
# Let's once again work further on your mastery of scope! In this exercise, you will use the keyword nonlocal within a nested function to alter the value of a variable defined in the enclosing scope.
# Define echo_shout()
def echo_shout(word):
"""Change the value of a nonlocal variable"""
# Concatenate word with itself: echo_word
echo_word = word*2
# Print echo_word
print(echo_word)
# Define inner function shout()
def shout():
"""Alter a variable in the enclosing scope"""
# Use echo_word in nonlocal scope
nonlocal echo_word
# Change echo_word to echo_word concatenated with '!!!'
echo_word = echo_word + "!!!"
# Call function shout()
shout()
# Print echo_word
print(echo_word)
# Call function echo_shout() with argument 'hello'
echo_shout('hello')
# Functions with one default argument
# In the previous chapter, you've learned to define functions with more than one parameter and then calling those functions by passing the required number of arguments. In the last video, Hugo built on this idea by showing you how to define functions with default arguments. You will practice that skill in this exercise by writing a function that uses a default argument and then calling the function a couple of times.
# Define shout_echo
def shout_echo(word1, echo=1):
"""Concatenate echo copies of word1 and three
exclamation marks at the end of the string."""
# Concatenate echo copies of word1 using *: echo_word
echo_word = word1*echo
# Concatenate '!!!' to echo_word: shout_word
shout_word = echo_word + '!!!'
# Return shout_word
return shout_word
# Call shout_echo() with "Hey": no_echo
no_echo = shout_echo('Hey')
# Call shout_echo() with "Hey" and echo=5: with_echo
with_echo = shout_echo('Hey', 5)
# Print no_echo and with_echo
print(no_echo)
print(with_echo)
# Functions with multiple default arguments
# You've now defined a function that uses a default argument - don't stop there just yet! You will now try your hand at defining a function with more than one default argument and then calling this function in various ways.
# After defining the function, you will call it by supplying values to all the default arguments of the function. Additionally, you will call the function by not passing a value to one of the default arguments - see how that changes the output of your function!
# Define shout_echo
def shout_echo(word1, echo=1, intense=False):
"""Concatenate echo copies of word1 and three
exclamation marks at the end of the string."""
# Concatenate echo copies of word1 using *: echo_word
echo_word = word1 * echo
# Make echo_word uppercase if intense is True
if intense is True:
# Make uppercase and concatenate '!!!': echo_word_new
echo_word_new = echo_word.upper() + '!!!'
else:
# Concatenate '!!!' to echo_word: echo_word_new
echo_word_new = echo_word + '!!!'
# Return echo_word_new
return echo_word_new
# Call shout_echo() with "Hey", echo=5 and intense=True: with_big_echo
with_big_echo = shout_echo('Hey', 5, True)
# Call shout_echo() with "Hey" and intense=True: big_no_echo
big_no_echo = shout_echo('Hey', intense=True)
# Print values
print(with_big_echo)
print(big_no_echo)
# Functions with variable-length arguments(*args)
# Flexible arguments enable you to pass a variable number of arguments to a function. In this exercise, you will practice defining a function that accepts a variable number of string arguments.
# The function you will define is gibberish() which can accept a variable number of string values. Its return value is a single string composed of all the string arguments concatenated together in the order they were passed to the function call. You will call the function with a single string argument and see how the output changes with another call using more than one string argument. Recall from the previous video that, within the function definition, args is a tuple.
# Define gibberish
def gibberish(*args):
"""Concatenate strings in *args together."""
# Initialize an empty string: hodgepodge
hodgepodge = ''
# Concatenate the strings in args
for word in args:
hodgepodge += word
# Return hodgepodge
return hodgepodge
# Call gibberish() with one string: one_word
one_word = gibberish('luke')
# Call gibberish() with five strings: many_words
many_words = gibberish("luke", "leia", "han", "obi", "darth")
# Print one_word and many_words
print(one_word)
print(many_words)
# Functions with variable-length keyword arguments(**kwargs)
# Let's push further on what you've learned about flexible arguments - you've used *args, you're now going to use ** kwargs! What makes ** kwargs different is that it allows you to pass a variable number of keyword arguments to functions. Recall from the previous video that, within the function definition, kwargs is a dictionary.
# To understand this idea better, you're going to use ** kwargs in this exercise to define a function that accepts a variable number of keyword arguments. The function simulates a simple status report system that prints out the status of a character in a movie.
# Define report_status
def report_status(**kwargs):
"""Print out the status of a movie character."""
print("\nBEGIN: REPORT\n")
# Iterate over the key-value pairs of kwargs
for key, value in kwargs.items():
# Print out the keys and values, separated by a colon ':'
print(key + ": " + value)
print("\nEND REPORT")
# First call to report_status()
report_status(name="luke", affiliation="jedi", status="missing")
# Second call to report_status()
report_status(name="anakin", affiliation="sith lord", status="deceased")
# Bringing it all together(1)
# Recall the Bringing it all together exercise in the previous chapter where you did a simple Twitter analysis by developing a function that counts how many tweets are in certain languages. The output of your function was a dictionary that had the language as the keys and the counts of tweets in that language as the value.
# In this exercise, we will generalize the Twitter language analysis that you did in the previous chapter. You will do that by including a default argument that takes a column name.
# For your convenience, pandas has been imported as pd and the 'tweets.csv' file has been imported into the DataFrame tweets_df. Parts of the code from your previous work are also provided.
# Define count_entries()
def count_entries(df, col_name):
"""Return a dictionary with counts of
occurrences as value for each key."""
# Initialize an empty dictionary: cols_count
cols_count = {}
# Extract column from DataFrame: col
col = df[col_name]
# Iterate over the column in DataFrame
for entry in col:
# If entry is in cols_count, add 1
if entry in cols_count.keys():
cols_count[entry] += 1
# Else add the entry to cols_count, set the value to 1
else:
cols_count[entry] = 1
# Return the cols_count dictionary
return cols_count
# Call count_entries(): result1
result1 = count_entries(tweets_df, 'lang')
# Call count_entries(): result2
result2 = count_entries(tweets_df, 'source')
# Print result1 and result2
print(result1)
print(result2)
# Bringing it all together(2)
# Wow, you've just generalized your Twitter language analysis that you did in the previous chapter to include a default argument for the column name. You're now going to generalize this function one step further by allowing the user to pass it a flexible argument, that is , in this case, as many column names as the user would like!
# Once again, for your convenience, pandas has been imported as pd and the 'tweets.csv' file has been imported into the DataFrame tweets_df. Parts of the code from your previous work are also provided.
# Define count_entries()
def count_entries(df, *args):
"""Return a dictionary with counts of
occurrences as value for each key."""
#Initialize an empty dictionary: cols_count
cols_count = {}
# Iterate over column names in args
for col_name in args:
# Extract column from DataFrame: col
col = df[col_name]
# Iterate over the column in DataFrame
for entry in col:
# If entry is in cols_count, add 1
if entry in cols_count.keys():
cols_count[entry] += 1
# Else add the entry to cols_count, set the value to 1
else:
cols_count[entry] = 1
# Return the cols_count dictionary
return cols_count
# Call count_entries(): result1
result1 = count_entries(tweets_df, 'lang')
# Call count_entries(): result2
result2 = count_entries(tweets_df, 'lang', 'source')
# Print result1 and result2
print(result1)
print(result2)
|
"""Object to keep track of which widget class should be used for each BfObject or FbxObject
This can be subclassed for DCC implementations to add more custom widgets.
"""
import fbx
from brenfbx.core import bfCore
from brenpy.core import bpDebug
from brenfbx.qt import bfQtCore
# bf object imports
from brenfbx.fbxsdk.core import bfObject
from brenfbx.objects import bfCustomObjects
from brenfbx.objects.evaluators import bfEvaluators
from brenfbx.objects.evaluators import bfModifiers
from brenfbx.fbxsdk.scene.constraint import bfConstraint
from brenfbx.fbxsdk.scene.constraint import bfConstraintAim
# widget imports
from brenfbx.qt.object import bfQtObjectWidgets
from brenfbx.qt.object import bfCustomObjectWidgets
from brenfbx.qt.object import bfQtNodeWidgets
from brenfbx.qt.constraint import bfQtConstraintWidgets
from brenfbx.qt.object.evaluation_objects import bfQtEvaluationObjectWidgets
from brenfbx.qt.object.evaluation_objects import bfQtModifierWidgets
BF_MAPPING = [
# bf objects
# (bfCustomObjects.BfNoteObject, bfCustomObjectWidgets.BfNoteObjectEditorWidget),
# (bfCustomObjects.BfSceneFilterObject, bfCustomObjectWidgets.BfSceneFilterObjectEditorWidget),
# # modifiers
# (bfModifiers.BfAlignPositionModifier, bfQtModifierWidgets.BfAlignPositionModifierEditorWidget),
# (bfModifiers.BfAlignRotationModifier, bfQtModifierWidgets.BfAlignRotationModifierEditorWidget),
# (bfModifiers.BfAimModifier, bfQtModifierWidgets.BfAimModifierEditorWidget),
# (bfModifiers.BfPreRotateModifier, bfQtModifierWidgets.BfPreRotationModifierEditorWidget),
# (bfModifiers.BfRotateOrderModifier, bfQtModifierWidgets.BfRotateOrderModifierEditorWidget),
# (bfModifiers.BfRotateToPreRotateModifier, bfQtModifierWidgets.BfNodeModifierEditorWidget),
# (bfModifiers.BfAddChildModifier, bfQtModifierWidgets.BfAddChildModifierEditorWidget),
# # constraints
# (bfConstraint.BfConstraintParent, bfQtConstraintWidgets.BfConstraintParentEditorWidget),
# (bfConstraint.BfConstraintAim, bfQtConstraintWidgets.BfConstraintAimEditorWidget),
# (bfConstraint.BfConstraintPosition, bfQtConstraintWidgets.BfConstraintPositionEditorWidget),
# (bfConstraint.BfConstraintRotation, bfQtConstraintWidgets.BfConstraintRotationEditorWidget),
# (bfConstraint.BfConstraintScale, bfQtConstraintWidgets.BfConstraintScaleEditorWidget),
# # node modifier default
# (bfModifiers.BfNodeModifier, bfQtModifierWidgets.BfNodeModifierEditorWidget),
# # evaluation objects
(bfEvaluators.BfFbxBuild, bfQtEvaluationObjectWidgets.BfFbxBuildEditorWidget),
(bfEvaluators.BfEvaluationGroup, bfQtEvaluationObjectWidgets.BfEvaluationGroupEditorWidget),
# (bfObject.BfEvaluationObject, bfQtObjectWidgets.BfEvaluationObjectEditorWidget),
]
FBX_MAPPING = [
# (fbx.FbxConstraintParent, bfQtConstraintWidgets.BfConstraintParentEditorWidget),
# (fbx.FbxConstraintAim, bfQtConstraintWidgets.BfConstraintAimEditorWidget),
# (fbx.FbxConstraintPosition, bfQtConstraintWidgets.BfConstraintPositionEditorWidget),
# (fbx.FbxConstraintRotation, bfQtConstraintWidgets.BfConstraintRotationEditorWidget),
# (fbx.FbxConstraintScale, bfQtConstraintWidgets.BfConstraintScaleEditorWidget),
# (fbx.FbxNode, bfQtNodeWidgets.BfNodeEditorWidget),
# (fbx.FbxSkeleton, bfQtNodeWidgets.BfSkeletonEditorWidget),
# anything else defaults to FbxObjectEditorWidget
(fbx.FbxObject, bfQtObjectWidgets.BfObjectEditorWidget),
]
BF_AE_MAPPING = [
# bf objects
(bfCustomObjects.BfNoteObject, bfCustomObjectWidgets.BfNoteObjectAEWidget),
(bfCustomObjects.BfSceneFilterObject, bfCustomObjectWidgets.BfSceneFilterObjectAEWidget),
# modifiers
(bfModifiers.BfAlignPositionModifier, bfQtModifierWidgets.BfAlignPositionModifierAEWidget),
(bfModifiers.BfAlignRotationModifier, bfQtModifierWidgets.BfAlignRotationModifierAEWidget),
(bfModifiers.BfAimModifier, bfQtModifierWidgets.BfAimModifierAEWidget),
(bfModifiers.BfPreRotateModifier, bfQtModifierWidgets.BfPreRotationModifierAEWidget),
(bfModifiers.BfRotateOrderModifier, bfQtModifierWidgets.BfRotateOrderModifierAEWidget),
(bfModifiers.BfRotateToPreRotateModifier, bfQtModifierWidgets.BfNodeModifierAEWidget),
(bfModifiers.BfAddChildModifier, bfQtModifierWidgets.BfAddChildModifierAEWidget),
# constraints
(bfConstraint.BfConstraintParent, bfQtConstraintWidgets.BfConstraintParentAEWidget),
(bfConstraintAim.BfConstraintAim, bfQtConstraintWidgets.BfConstraintAimAEWidget),
(bfConstraint.BfConstraintPosition, bfQtConstraintWidgets.BfConstraintPositionAEWidget),
(bfConstraint.BfConstraintRotation, bfQtConstraintWidgets.BfConstraintRotationAEWidget),
(bfConstraint.BfConstraintScale, bfQtConstraintWidgets.BfConstraintScaleAEWidget),
# node modifier default
(bfModifiers.BfNodeModifier, bfQtModifierWidgets.BfNodeModifierAEWidget),
# evaluation objects
(bfEvaluators.BfFbxBuild, bfQtEvaluationObjectWidgets.BfFbxBuildAEWidget),
(bfEvaluators.BfEvaluationGroup, bfQtEvaluationObjectWidgets.BfEvaluationGroupAEWidget),
(bfObject.BfEvaluationObject, bfQtObjectWidgets.BfEvaluationObjectAEWidget),
]
FBX_AE_MAPPING = [
(fbx.FbxConstraintParent, bfQtConstraintWidgets.BfConstraintParentAEWidget),
(fbx.FbxConstraintAim, bfQtConstraintWidgets.BfConstraintAimAEWidget),
(fbx.FbxConstraintPosition, bfQtConstraintWidgets.BfConstraintPositionAEWidget),
(fbx.FbxConstraintRotation, bfQtConstraintWidgets.BfConstraintRotationAEWidget),
(fbx.FbxConstraintScale, bfQtConstraintWidgets.BfConstraintScaleAEWidget),
(fbx.FbxNode, bfQtNodeWidgets.BfNodeAEWidget),
(fbx.FbxSkeleton, bfQtNodeWidgets.BfSkeletonAEWidget),
]
def get_object_ae_widget_class(bf_object, bf_mapping=None, fbx_mapping=None):
"""Find appropriate object attributes editor widget class.
"""
if bf_mapping is None:
bf_mapping = BF_AE_MAPPING
if fbx_mapping is None:
fbx_mapping = FBX_AE_MAPPING
# first look for custom bf object widget
for bf_cls, ae_widget_cls in bf_mapping:
if isinstance(bf_object, bf_cls):
return ae_widget_cls
# if we're not using a custom bf object widget then find fbx object widget
for fbx_cls, ae_widget_cls in fbx_mapping:
if isinstance(bf_object.fbx_object(), fbx_cls):
return ae_widget_cls
# if we don't find a mapped editor then simply return None to indicate there's nothing we want to edit
return None
class BfObjectWidgetMapping(
bfQtCore.BfQtWidgetMappingBase
):
def __init__(self, *args, **kwargs):
super(BfObjectWidgetMapping, self).__init__(*args, **kwargs)
self._bf_object_widget_mapping = BF_MAPPING
self._bf_object_ae_widget_mapping = BF_AE_MAPPING
self._fbx_object_widget_mapping = FBX_MAPPING
self._fbx_object_ae_widget_mapping = FBX_AE_MAPPING
def bf_object_widget_mapping(self):
return self._bf_object_widget_mapping
def bf_object_ae_widget_mapping(self):
return self._bf_object_ae_widget_mapping
def add_bf_object_widget_mapping(self, value):
# check value
err_msg = "object mapping must be tuple of (BfObject, BfObjectWidget)"
if not isinstance(value, (list, tuple)):
raise bfCore.BfError(err_msg)
if len(value) != 2:
raise bfCore.BfError(err_msg)
if not isinstance(value[0], bfCore.BfObjectBase):
raise bfCore.BfError(err_msg)
if not isinstance(value[1], bfQtObjectWidgets.BfObjectEditorWidget):
raise bfCore.BfError(err_msg)
self._bf_object_widget_mapping.append(tuple(value))
return True
def fbx_object_widget_mapping(self):
return self._fbx_object_widget_mapping
def get_object_editor_widget_class(self, bf_object):
"""Find appropriate object editor widget class.
Choose from mapping of object class to object editor widgets
items towards the top of this list will take presidence over ones below
FbxObject should always be last in the list to serve as default editor
Or override this method in a subclass to return custom editors based,
with super as the fallback default.
"""
# first look for custom bf object widget
for bf_cls, editor_cls in self._bf_object_widget_mapping:
if isinstance(bf_object, bf_cls):
return editor_cls
# if we're not using a custom bf object widget then find fbx object widget
for fbx_cls, editor_cls in self._fbx_object_widget_mapping:
if isinstance(bf_object.fbx_object(), fbx_cls):
return editor_cls
# redundant error, in theory getting to this point should be impossible
raise bfQtCore.BfQtError("Failed to find suitable object editor class: {} {} {}".format(
bf_object.fbx_object().GetName(), bf_object, bf_object.fbx_object()
))
def get_object_ae_widget_class(self, bf_object):
"""Find appropriate object attributes editor widget class.
"""
return get_object_ae_widget_class(
bf_object,
bf_mapping=self._bf_object_ae_widget_mapping,
fbx_mapping=self._fbx_object_ae_widget_mapping
)
|
# Plotter that gets passed a series of moves in a csv file
# Written to be called from mods.cba.mit.edu
# After the "toMoves.js" module
# Nadya Peek 2016
#------IMPORTS-------
from pygestalt import nodes
from pygestalt import interfaces
from pygestalt import machines
from pygestalt import functions
from pygestalt.machines import elements
from pygestalt.machines import kinematics
from pygestalt.machines import state
from pygestalt.utilities import notice
from pygestalt.publish import rpc #remote procedure call dispatcher
import time
import io
import sys
import json
#------VIRTUAL MACHINE------
class virtualMachine(machines.virtualMachine):
def initInterfaces(self):
if self.providedInterface: self.fabnet = self.providedInterface #providedInterface is defined in the virtualMachine class.
else: self.fabnet = interfaces.gestaltInterface('FABNET', interfaces.serialInterface(baudRate = 115200, interfaceType = 'ftdi', portName = '/dev/ttyUSB1'))
def initControllers(self):
print "init controllers, x and y"
self.xAxisNode = nodes.networkedGestaltNode('X Axis', self.fabnet, filename = '086-005a.py', persistence = self.persistence)
self.yAxisNode = nodes.networkedGestaltNode('Y Axis', self.fabnet, filename = '086-005a.py', persistence = self.persistence)
self.zAxisNode = nodes.networkedGestaltNode('Z Axis', self.fabnet, filename = '086-005a.py', persistence = self.persistence)
self.xyzNode = nodes.compoundNode(self.xAxisNode, self.yAxisNode, self.zAxisNode)
def initCoordinates(self):
self.position = state.coordinate(['mm', 'mm', 'mm'])
def initKinematics(self):
self.xAxis = elements.elementChain.forward([elements.microstep.forward(4), elements.stepper.forward(1.8), elements.leadscrew.forward(8), elements.invert.forward(True)])
self.yAxis = elements.elementChain.forward([elements.microstep.forward(4), elements.stepper.forward(1.8), elements.leadscrew.forward(8), elements.invert.forward(True)])
self.zAxis = elements.elementChain.forward([elements.microstep.forward(4), elements.stepper.forward(1.8), elements.leadscrew.forward(8), elements.invert.forward(True)])
self.stageKinematics = kinematics.direct(3) #direct drive on all axes
def initFunctions(self):
self.move = functions.move(virtualMachine = self, virtualNode = self.xyzNode, axes = [self.xAxis, self.yAxis, self.zAxis], kinematics = self.stageKinematics, machinePosition = self.position,planner = 'null')
self.jog = functions.jog(self.move) #an incremental wrapper for the move function
pass
def initLast(self):
#self.machineControl.setMotorCurrents(aCurrent = 0.8, bCurrent = 0.8, cCurrent = 0.8)
#self.xNode.setVelocityRequest(0) #clear velocity on nodes. Eventually this will be put in the motion planner on initialization to match state.
pass
def publish(self):
#self.publisher.addNodes(self.machineControl)
pass
def getPosition(self):
return {'position':self.position.future()}
def setPosition(self, position = [None]):
self.position.future.set(position)
def setSpindleSpeed(self, speedFraction):
#self.machineControl.pwmRequest(speedFraction)
pass
#------IF RUN DIRECTLY FROM TERMINAL------
if __name__ == '__main__':
# The persistence file remembers the node you set. It'll generate the first time you run the
# file. If you are hooking up a new node, delete the previous persistence file.
stages = virtualMachine(persistenceFile = "fabnet.vmp")
# You can load a new program onto the nodes if you are so inclined. This is currently set to
# the path to the 086-005 repository on Nadya's machine.
#stages.xyzNode.loadProgram('../../../086-005/086-005a.hex')
# This is a widget for setting the potentiometer to set the motor current limit on the nodes.
# The A4982 has max 2A of current, running the widget will interactively help you set.
#stages.xyzNode.setMotorCurrent(0.7)
# This is for how fast the motors move
stages.xyzNode.setVelocityRequest(2)
# Pull the moves out of the provided file
# Where coordinates are provided as
# [x1, y1, z1],[x2, y2, z2]
moves = []
try:
movestr = sys.argv[1] # moves are passed as a string, this may break with very long strings
except:
print "No moves file provided"
#f = open(filename, 'r')
#for line in f.readlines():
# pass
#movestr = "[[[125,91,0],[154,91,0],[170,96,0],[178,103,0],[186,115,0],[197,140,0],[268,317,0],[242,317,0],[242,315,0],[240,311,0],[190,189,0],[188,189,0],[136,317,0],[111,317,0],[111,314,0],[113,310,0],[177,154,0],[163,123,0],[156,116,0],[148,112,0],[145,112,0],[125,110,0],[125,91,0]],[[125,91,2],[363,150,2]]]"
segs = json.loads(movestr)
for seg in segs:
for move in seg:
moves.append(move)
#print "Moves:"
#print moves
# Move!
for move in moves:
stages.move(move, 0)
status = stages.xAxisNode.spinStatusRequest()
# This checks to see if the move is done.
while status['stepsRemaining'] > 0:
time.sleep(0.001)
status = stages.xAxisNode.spinStatusRequest()
|
import feedparser
from settings import get_config
from models import JobOffer
def parse_stackoverflow():
""" Parses the results of the stackoverflow remote jobs search into a list of JobOffer objects """
config = get_config()
url = config.get('rss').get('stackoverflow')
feed = feedparser.parse(url)
job_offers = []
for entry in feed.entries:
tags = ""
if 'tags' in entry:
tags = extract_tags(entry["tags"])
job_offer = JobOffer(entry["link"], entry["title"], entry["description"], entry["author"], entry["updated"],
tags)
job_offers.append(job_offer)
return job_offers
def extract_tags(tags):
extracted_tags = []
for tag in tags:
extracted_tags.append(tag["term"])
return extracted_tags
def parse_we_work_remotely():
""" Parses the results of the weworkremotely remote programming jobs into a list of JobOffer objects """
config = get_config()
url = config.get('rss').get('weworkremotely')
feed = feedparser.parse(url)
job_offers = []
for entry in feed.entries:
job_offer = JobOffer(entry["link"], entry["title"], entry["summary"], entry["title"].split(":")[0],
entry["published"], "")
job_offers.append(job_offer)
return job_offers
|
from rest_framework.views import APIView
import logging
from rest_framework.response import Response
from user.models import *
from user.serializers import StudentSerializers, TeacherSerializers
from .login_token import *
from django.contrib.auth.hashers import check_password
from django.contrib.auth.hashers import make_password
from django.views.decorators.csrf import csrf_exempt
# Create your views here.
class LoginView(APIView):
permission_classes = [] # 取消全局token认证
logger = logging.getLogger('login.views')
def post(self,request):
"""
用户登录验证视图函数
:param request: 提交的form 请求
:return: 登录验证情况
"""
data = {'code':401}
print(request.data)
req = request.data
if req.get('role') == 'student':
# 学生登录验证
role = 'student'
student = Student.objects.filter(username=req.get('username')).first()
if student:
if check_password(req.get('password'), student.password):
data['code'] = 200
data['msg'] = 'login successful'
data['token'] = create_token(student, role)
self.logger.info("学生"+student.username+"登录成功!")
self.logger.info("login success")
return Response(data,status=200)
else:
data['msg'] = 'password is wrong'
return Response(data,status=401)
else:
data['msg'] = 'no user named'+req.get('username')
return Response(data,status=401)
else:
# 教师登录验证
role = 'teacher'
teacher = Teacher.objects.filter(username=req.get('username')).first()
if teacher:
if check_password(req.get('password'), teacher.password):
data['code'] = 200
data['msg'] = 'login success'
data['token'] = create_token(teacher, role)
self.logger.info("教师" + teacher.username + "登录成功!")
return Response(data, status=200)
else:
data['msg'] = 'password is wrong'
return Response(data, status=401)
else:
data['msg'] = 'no user named' + req.get('username')
return Response(data, status=401)
class RegisterView(APIView):
permission_classes = [] # 取消全局token认证
logger = logging.getLogger('login.views')
def post(self,request):
data ={'code':400}
print(request.data)
user = request.data.copy()
if request.data.get('password'):
user['password'] = make_password(request.data.get('password'))
if request.data.get('role') == 'student':
serializers = StudentSerializers(data=user)
else:
serializers = TeacherSerializers(data=user)
try:
if serializers.is_valid(raise_exception=True):
serializers.save()
data['code'] = 201
data['msg'] = "注册成功!"
# role = serializers.validated_data.get('role')
# data['token'] = create_token(user, role)
self.logger.info(request.data.get('role')+request.data.get('username') + "注册成功!")
return Response(data, status=201)
else:
data['msg'] = '用户名已存在!'
return Response(data, status=401)
except Exception as e:
print(e)
data['msg'] = '用户名已存在!'
return Response(data,status=401)
class JWTview(APIView):
# permission_classes =[Authenticated] # 使用自定义类的校验方法
@resolve_token
def get(self,request,args):
print(args)
if args.get('role') == 'student':
print('student')
student = Student.objects.get(uid=args.get('uid'))
serializer = StudentSerializers(student)
else:
print('teacher')
teacher = Teacher.objects.get(uid=args.get('uid'))
serializer = TeacherSerializers(teacher)
return Response({'code': 200, 'msg': 'token校验成功', 'user': serializer.data})
|
#!/usr/bin/env python
# Funtion:
# Filename:
# name = [1,2,3]
# try:
# day = 1
# except IndexError as e:
# print(e)
# except KeyError as e:
# print(e)
# except (IOError , ImportError) as e:
# print(e)
# except Exception as e:
# print(e)
# else:
# print("No Error")
# finally:
# print("No matter Error or not would run here!")
'''
name = [1,2,3]
data = {"Year":"2017", "Month":"7"}
# print(data["Day"])
try:
print(name[3]) # 这边已经出现异常IndexError ,所以直接跳出code,跳到KeyError 下去处理
print(data["Day"])
# except IndexError as e:
# print(e)
# except KeyError as e:
# print(e)
except (IndexError, KeyError) as e:
print(e)
'''
# class TjyError(Exception):
# def __init__(self, message):
# self.message = message
#
# def __str__(self):
# return self.message
# try:
# raise TjyError("数据库连接不上了")
# except TjyError as e:
# print(e)
# class Time(object):
# print("in it")
# def __init__(self, hour, minute, second):
# print("in init")
# self.hour = hour
# self.minute = minute
# self.second = second
# def __str__(self):
# print("hahahaha")
# return "%.2d:%.2d:%.2d" % (self.hour, self.minute, self.second)
#
# print("out ")
# tim = Time(2, 34, 32)
# print(tim)
# print(tim.hour)
class Person(object):
def __init__(self, name, gender):
self.name = name
self.gender = gender
class Student(Person):
def __init__(self, name, gender, score):
super(Student, self).__init__(name, gender)
self.score = score
def __str__(self):
return '(Student: %s, %s, %s)' % (self.name, self.gender, self.score)
__repr__ = __str__
s = Student('Bob', 'male', 88)
print(s) |
"""
@File: time_task.py
@CreateTime: 2020/1/6 上午11:05
@Desc: 定时任务schedule
"""
import schedule
import time
def one_job(message="stuff"):
print("I'm working on:", message)
def two_job(message='working'):
print("The Worker Status is:", message)
if __name__ == '__main__':
schedule.every(10).minutes.do(one_job) # 每十分钟执行一次任务
schedule.every().hour.do(one_job) # 每隔一小时执行一次任务
schedule.every(5).to(10).days.do(two_job) # 每隔5到10天执行一次任务
schedule.every().hour.do(two_job, message='sleep') # 带参数
schedule.every().day.at("10:30").do(one_job) # 每天的10:30执行任务
schedule.every().wednesday.at("13:15").do(one_job) # 每周三的13:15执行一次任务
while True:
schedule.run_pending() # 运行所有可以运行的任务
time.sleep(1)
|
Gstart = int(input('Input start gauge '))
Gend = int(input('Input final gauge '))
Tm = Gstart - Gend
print('You loaded',Tm,'m3')
if (Tm > 0 and Tm <= 24 ):
print('This is small load')
elif (Tm < 0):
print('Start gauge cant be smaller than end gauge')
elif (Tm > 24 and Tm < 30):
print('This is big load')
else:
print('You are overloaded')
|
import os
import pandas as pd
import seaborn as sns; sns.set_theme(color_codes=True)
file_name=os.path.join('folder_path','similarity_res.csv')
df=pd.read_csv(file_name,index_col='ligand') #
df=df.drop(columns=df.columns[df.isna().all()].tolist()) # removing columns with all na values
df=df.dropna() # removing rows with all na values
heatplt = sns.clustermap(df)
heatplt.savefig(os.path.join('save_folder_path','similarity_heatmap.png'))
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 29 10:01:28 2017
@author: 29907
"""
#2048_game.py -- Auto play 2048 game
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
#Open 2048 game website
browser=webdriver.Opera()
browser.get('http://gabrielecirulli.github.io/2048/')
html_ele=browser.find_element_by_class_name('container') #Select html element
for i in range(1000):
if i%4==0:
html_ele.send_keys(Keys.UP)
if i%4==1:
html_ele.send_keys(Keys.RIGHT)
if i%4==2:
html_ele.send_keys(Keys.DOWN)
if i%4==3:
html_ele.send_keyd(Keys.LEFT)
|
words = []
word = str(input("Enter string: "))
while word != "":
words.append(word)
word = str(input("Enter string: "))
repeats = []
for x in range(0, len(words), 1):
if words[x] in words[:x] and not repeats:
repeats.append(words[x])
if len(repeats)>0:
print("Strings repeated: ", " ".join(map(str, repeats)))
else:
print("No repeated strings entered") |
ngram = 4
from keras.datasets import imdb
import numpy as np
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.layers.embeddings import Embedding
from keras.preprocessing import sequence
from keras.callbacks import ModelCheckpoint
import json
import word_table as w_t
from keras.utils import np_utils
from error_analysis import serialize_errors
(X_train, y_train), (_, _) = imdb.load_data(path="imdb_full.pkl",
nb_words=None,
skip_top=0,
maxlen=None,
seed=113,
start_char=1,
oov_char=2,
index_from=3)
X_train = X_train
flatten = lambda l: [item for sublist in l for item in sublist]
X_train_cont = flatten(X_train)
Xdata = []
Ydata = []
for i in range(len(X_train_cont) - ngram):
Xdata.append(X_train_cont[i:i+ngram])
Ydata.append(X_train_cont[i+ngram])
top_words = np.max(np.array(Ydata));
ngram = 4
embedding_vecor_length = 64
model = Sequential()
model.add(Embedding(top_words, embedding_vecor_length, input_length=ngram))
model.add(LSTM(100, dropout_W = 0.2, dropout_U = 0.2))
model.add(Dense(top_words, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
print(model.summary())
samples = 2000
def generate_data():
while 1:
for i in range(len(Xdata)/samples):
xdata = np.array(Xdata[i*samples:(i+1)*samples])
ydata = np_utils.to_categorical(Ydata[i*samples:(i+1)*samples], top_words)
print i*samples,(i+1)*samples
yield xdata, ydata
model.fit_generator(generate_data(), samples_per_epoch = len(Xdata) - samples, nb_epoch=5, verbose=2)
model.save("imdb_lstm.h5")
|
import numpy as np
class epsilon(object):
def __init__(self, eps_start = 1.0, eps_decay = 0.999, eps_min = 0.0):
self.eps_start = eps_start
self.eps = self.eps_start
self.eps_decay = eps_decay
self.eps_min = eps_min
def update(self):
self.eps = max(self.eps*self.eps_decay, self.eps_min)
def get(self):
return self.eps
class policy(object):
def get_action(self, Q_state):
pass
def update(self, episode_counter):
pass
class random(policy):
def get_action(self, Q_state):
return np.random.randint(0,len(Q_state))
class maximum(policy):
def get_action(self, Q_state):
return np.argmax(Q_state)
class epsilon_greedy(policy):
def __init__(self, epsilon, schedule):
self.epsilon = epsilon
self.schedule = schedule
def get_action(self,Q_state, else_take = None):
eps = self.epsilon.get()
rv = np.random.random()
if rv > eps:
return np.argmax(Q_state)
else:
if else_take is None:
return np.random.choice(np.arange(len(Q_state)))
else:
return np.random.choice(else_take)
def update(self, episode_counter):
if episode_counter%self.schedule==0:
self.epsilon.update()
else:
pass |
#!/usr/bin/env python3
import numpy as np
from glob import iglob
from collections import defaultdict
from functools import partial
import matplotlib.pyplot as plt
import matplotlib.pylab as pylab
import pickle
#set up tex
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
#hatch
# plt.rcParams['hatch.linewidth'] = 0.1
# plt.rcParams['hatch.color'] = 'orange'
# plt.style.use('ggplot')
# plt.style.use('presentation')
plt.style.use(['seaborn', 'seaborn-talk'])
### load in xsections ###
### bkg ###
# debug = ["virtcoeff.m8", "real.m8"]
# files = ["lo.m8", "nlo.m8", "nll.m8", "nnll.m8", "nlo+nll.m8", "nlo+nnll.m8"]
# data = []
# debug_data = []
# for file in files:
# with open(file, "rb") as f:
# data.append(pickle.load(f))
# for file in debug:
# with open(file, "rb") as f:
# debug_data.append(pickle.load(f))
with open('WWqqbr', 'rb') as f:
bkg = pickle.load(f)
with open('ggWW4l_ktg_0.0_0.0', 'rb') as f:
sig_ktg0000 = pickle.load(f)
with open('ggWW4l_ktg_1.0_0.0', 'rb') as f:
sig_ktg1000 = pickle.load(f)
with open('ggWW4l_ktg_0.0_1.0', 'rb') as f:
sig_ktg0010 = pickle.load(f)
with open('ggWW4l_ktg_0.7_0.3', 'rb') as f:
sig_ktg0703 = pickle.load(f)
m3456 = np.linspace(10.0,990.0,num=50)
# process data
delta_lo_lo_ktg0000 = np.divide(sig_ktg0000['lo']['central'] - sig_ktg1000['lo']['central'], bkg['lo']['central'])
delta_lo_nlo_ktg0000 = np.divide(sig_ktg0000['lo']['central'] - sig_ktg1000['lo']['central'], bkg['nlo']['central'])
delta_lo_nnll_ktg0000 = np.divide(sig_ktg0000['lo']['central'] - sig_ktg1000['lo']['central'], bkg['nlo+nnll']['central'])
# print(np.divide(bkg['const']['central'], bkg['nlo']['central']))
# print(np.divide(bkg['const']['central'], bkg['nnll']['central']))
### plots ###
# bkg qqbWW
f1, ax1 = plt.subplots(1, 1)
# configuration
ax1.set_yscale('log')
ax1.set_ylim(1E-5, 1E+1)
ax1.set_xlim(100.0, 1000.0)
# plots
#ax1.step(m3456, bkg_qqbWW_lo, label=r"LO")
#ax1.step(m3456, bkg_qqbWW_nlo, label=r"NLO")
#ax1.step(m3456, bkg_qqbWW_nll, label=r"NLL")
#ax1.step(m3456, bkg_qqbWW_nnll, label=r"NNLL")
#ax1.step(m3456, bkg_qqbWW_nlo_nll, label=r"NLO+NLL")
# old style
# ax1.plot(m3456, data[5]['central'], linestyle='-', drawstyle='steps', linewidth=0.4, label=r"NLO+NNLL")
# ax1.plot(m3456, data[5]['max'], linestyle=':', drawstyle='steps', label=r"NLO+NNLL")
# ax1.plot(m3456, data[5]['min'], linestyle=':', drawstyle='steps', label=r"NLO+NNLL")
# fill_between_steps(ax1, m3456, data[5]['max'], data[5]['min'])
ax1.plot(m3456, bkg['nlo+nnll']['central'], linestyle='-', drawstyle='steps', linewidth=0.4, label=r"NLO+NNLL")
ax1.fill_between(m3456, bkg['nlo+nnll']['min'], bkg['nlo+nnll']['max'], step='pre', facecolor='none', hatch='xxxxxxxxxx', \
edgecolor="orange", linestyle='-', linewidth=0.05)
ax1.plot(m3456, bkg['const']['central'], linestyle='-', drawstyle='steps', linewidth=0.4, label=r"remainder")
ax1.fill_between(m3456, bkg['const']['min'], bkg['const']['max'], step='pre', facecolor='none', hatch='xxxxxxxxxx', \
edgecolor="orange", linestyle='-', linewidth=0.05)
# ax1.plot(m3456, data[5]['central'], linestyle='-', linewidth=0.4, label=r"NLO+NNLL")
# ax1.fill_between(m3456, data[5]['min'], data[5]['max'], facecolor='none', hatch='xxxxxxxxxx', \
# edgecolor="orange", linestyle='-', linewidth=0.05)
# ax1.plot(m3456, data[4]['central'], linestyle='-', drawstyle='steps', linewidth=0.4, label=r"NLO+NLL")
# ax1.fill_between(m3456, data[4]['min'], data[4]['max'], step='pre', facecolor='none', hatch='xxxxxxxxxx', \
# edgecolor="orange", linestyle='-', linewidth=0.05)
# ax1.plot(m3456, data[3]['central'], linestyle='-', drawstyle='steps', linewidth=0.4, label=r"NNLL")
# ax1.fill_between(m3456, data[3]['min'], data[3]['max'], step='pre', facecolor='none', hatch='xxxxxxxxxx', \
# edgecolor="orange", linestyle='-', linewidth=0.05)
# ax1.plot(m3456, data[2]['central'], linestyle='-', drawstyle='steps', linewidth=0.4, label=r"NLL")
# ax1.fill_between(m3456, data[2]['min'], data[2]['max'], step='pre', facecolor='none', hatch='xxxxxxxxxx', \
# edgecolor="orange", linestyle='-', linewidth=0.05)
# ax1.plot(m3456, data[1]['central'], linestyle='-', drawstyle='steps', linewidth=0.4, label=r"NLO")
# ax1.fill_between(m3456, data[1]['min'], data[1]['max'], step='pre', facecolor='none', hatch='xxxxxxxxxx', \
# edgecolor="orange", linestyle='-', linewidth=0.05)
# ax1.plot(m3456, data[0]['central'], linestyle='-', drawstyle='steps', linewidth=0.4, label=r"LO")
# ax1.fill_between(m3456, data[0]['min'], data[0]['max'], step='pre', facecolor='none', hatch='xxxxxxxxxx', \
# edgecolor="orange", linestyle='-', linewidth=0.05)
# ax1.plot(m3456, debug_data[0]['central'], linestyle='-', drawstyle='steps', label=r"virt")
# ax1.plot(m3456, debug_data[0]['max'], linestyle=':', drawstyle='steps', label=r"virt")
# ax1.plot(m3456, debug_data[0]['min'], linestyle=':', drawstyle='steps', label=r"virt")
# ax1.plot(m3456, -debug_data[1]['central'], linestyle='-', drawstyle='steps', label=r"real")
# ax1.plot(m3456, -debug_data[1]['max'], linestyle=':', drawstyle='steps', label=r"real")
# ax1.plot(m3456, -debug_data[1]['min'], linestyle=':', drawstyle='steps', label=r"real")
# labels
f1.suptitle(r"Background $q\bar{q} \to WW$")
ax1.legend()
# HWW + ggHWW (lo)
f2, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2, sharex='col', sharey='row')
# configuration
ax1.set_yscale('log')
ax1.set_ylim(1E-5,1E0)
ax3.set_ylim(0.0, 4.0)
ax1.set_xlim(100.0, 1000.0)
ax2.set_xlim(100.0, 1000.0)
ax1.plot(m3456, sig_ktg1000['lo']['central'], linestyle='-', drawstyle='steps', linewidth=0.4, label=r"\kappa_{t,g}=(1,0)")
ax1.fill_between(m3456, sig_ktg1000['lo']['min'], sig_ktg1000['lo']['max'], step='pre', facecolor='none', hatch='xxxxxxxxxx', \
edgecolor="orange", linestyle='-', linewidth=0.05)
ax1.plot(m3456, sig_ktg0010['lo']['central'], linestyle='-', drawstyle='steps', linewidth=0.4, label=r"\kappa_{t,g}=(0,1)")
ax1.fill_between(m3456, sig_ktg0010['lo']['min'], sig_ktg0010['lo']['max'], step='pre', facecolor='none', hatch='xxxxxxxxxx', \
edgecolor="orange", linestyle='-', linewidth=0.05)
ax1.plot(m3456, sig_ktg0000['lo']['central'], linestyle='-', drawstyle='steps', linewidth=0.4, label=r"\kappa_{t,g}=(0,0)")
ax1.fill_between(m3456, sig_ktg0000['lo']['min'], sig_ktg0000['lo']['max'], step='pre', facecolor='none', hatch='xxxxxxxxxx', \
edgecolor="orange", linestyle='-', linewidth=0.05)
# plots
# ax1.step(m3456, sig_ktg_10_00['lo'], label=r"$\kappa_{t,g} = (1,0)$")
# ax1.step(m3456, sig_ktg_00_10['lo'], label=r"$\kappa_{t,g} = (0,1)$")
# ax1.step(m3456, sig_ggHWW_lo_ktg0703, label=r"$\kappa_{t,g} = (0.7,0.3)$")
# ax1.step(m3456, sig_ktg_00_00['lo'], label=r"$\kappa_{t,g} = (0,0)$")
# ax2.step(m3456, sig_HWW_lo_ktg1000, label=r"$\kappa_{t,g} = (1,0)$")
# ax2.step(m3456, sig_HWW_lo_ktg0010, label=r"$\kappa_{t,g} = (0,1)$")
# ax2.step(m3456, sig_HWW_lo_ktg0703, label=r"$\kappa_{t,g} = (0.7,0.3)$")
# ax3.step(m3456, ggHWW_lo_kt1onkg1, label=r"$\frac{LO_{\kappa_{t,g} = (1,0)}}{LO_{\kappa_{t,g} = (0,1)}}$")
# ax3.axhline(y=1, color='black', lw=0.5)
# ax4.step(m3456, HWW_lo_kt1onkg1, label=r"$\frac{LO_{\kappa_{t,g} = (1,0)}}{LO_{\kappa_{t,g} = (0,1)}}$")
# ax4.axhline(y=1, color='black', lw=0.5)
f2.suptitle(r"Signal $gg \to WW$")
ax1.legend()
f3, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2, sharex='col', sharey='row')
ax1.set_ylim(-0.01, 0.1)
ax1.set_xlim(100.0, 1000.0)
ax2.set_xlim(100.0, 1000.0)
ax1.plot(m3456, delta_lo_lo_ktg0000, linestyle='-', drawstyle='steps', linewidth=0.4, label=r"\kappa_{t,g}=(0,1)")
ax1.plot(m3456, delta_lo_nlo_ktg0000, linestyle='-', drawstyle='steps', linewidth=0.4, label=r"\kappa_{t,g}=(0,1)")
ax1.plot(m3456, delta_lo_nnll_ktg0000, linestyle='-', drawstyle='steps', linewidth=0.4, label=r"\kappa_{t,g}=(0,1)")
# save plots
f1.set_size_inches(10,10)
f1.savefig("WW-bkg", dpi=300, format="pdf")
f2.set_size_inches(10,10)
f2.savefig("sig-lo", dpi=300, format="pdf")
f3.set_size_inches(10,10)
f3.savefig("delta-ktg0000", dpi=300, format="pdf")
|
from flask import *
from utiles import todict
empresa = Blueprint('empresa', __name__, url_prefix='/empresa', template_folder='empresa_templates')
@empresa.route('/')
def home():
return render_template('empresa.home.html')
@empresa.route('/api', methods=['GET', 'POST', 'DELETE'])
def api():
if request.method == 'GET':
datos = g.cache.gettable('empresas').select().execute().fetchall()
return jsonify(todict(datos))
|
class AbstractDataset():
def __init__(self, csvpath, config, batchsize, accbatchsize):
raise Exception("Abstract class used")
|
import sys
import os
import enum
import socket
import struct
server_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
server_address = ("127.0.0.1", 69)
class TftpProcessor(object):
class TftpPacketType(enum.Enum):
RRQ = 1
WRQ = 2
DATA = 3
ACK = 4
ERROR = 5
error_msg = {
0: "Not defined, see error message (if any).",
1: "File not found.", #
# 2: "Access violation.",
# 3: "Disk full or allocation exceeded.",
4: "Illegal TFTP operation.", #
# 5: "Unknown transfer ID.",
6: "File already exists.", #
# 7: "No such user."
}
def __init__(self):
self.packet_buffer = []
self.blocks_buffer = []
self.size = 0
self.filename = []
self.block_number = 0
def process_udp_packet(self, packet_data, packet_source):
print(f"Received a packet from {packet_source}")
in_packet = self._parse_udp_packet(packet_data)
out_packet = self._do_some_logic(in_packet)
# This shouldn't change.
self.packet_buffer.append(out_packet)
print(f" PACKET BUFFER {self.packet_buffer}")
def _parse_udp_packet(self, packet_bytes):
read = bytearray()
write = bytearray()
blockN = bytearray()
ackblockN = bytearray()
data = bytearray()
errorcode = bytearray()
i = 2
j = 2
(opcode,) = struct.unpack("!H", packet_bytes[0:2])
if opcode == TftpProcessor.TftpPacketType.RRQ.value: # read
while packet_bytes[i] != 0:
read.append(packet_bytes[i])
i += 1
self.filename = read.decode('ascii')
format_str = "!H{}sB{}sB".format(len(read), len('octet'))
packet_data = struct.pack(format_str, opcode, read, 0, 'octet'.encode('ascii'), 0)
return packet_data
elif opcode == TftpProcessor.TftpPacketType.WRQ.value: # write
while packet_bytes[i] != 0:
write.append(packet_bytes[i])
i += 1
self.filename = write.decode('ascii')
format_str = "!H{}sB{}sB".format(len(write), len('octet'))
packet_data = struct.pack(format_str, opcode, write, 0, 'octet'.encode('ascii'), 0)
return packet_data
elif opcode == TftpProcessor.TftpPacketType.DATA.value: # data
blockN = int(str(packet_bytes[2]) + str(packet_bytes[3]))
data = packet_bytes[4:]
format_str = "!HH{}s".format(len(data))
packet_data = struct.pack(format_str, opcode, blockN, data)
return packet_data
elif opcode == TftpProcessor.TftpPacketType.ACK.value: # ACK
blockN = int(str(packet_bytes[2]) + str(packet_bytes[3]))
format_str = ("!HH")
packet_data = struct.pack(format_str, opcode, blockN)
return packet_data
elif opcode == TftpProcessor.TftpPacketType.ERROR.value: # error
while j < 4:
errorcode.append(packet_bytes[j])
j += 1
packet_data = packet_bytes[2:len(packet_bytes) - 9].split(bytearray([0]))
packet_data.insert(0, 'ERROR'.encode('ascii'))
packet_data.insert(1, packet_bytes[2:3])
packet_data.insert(2, packet_bytes[4:len(packet_bytes)].split())
return packet_data
else: # opcode doesn't exist
msg = "Illegal TFTP operation"
format_str = "!HH{}sB".format(len(msg))
opcode = 5
errorn = 4
error_packet = struct.pack(format_str, opcode, errorn, msg.encode("ascii"), 0)
return error_packet
def _do_some_logic(self, input_packet ):
block = 0
opcode = input_packet[1]
if opcode == 1:
self.block_number=0
packet_to_buffer = self.read_request()
return packet_to_buffer
elif opcode == 2: # write case
if os.path.isfile(self.filename):
msg = "File already exists."
format_str = "!HH{}sB".format(len(msg))
opcode = 5
errorn = 6
error_packet = struct.pack(format_str, opcode, errorn, msg.encode("ascii"), 0)
return error_packet
else:
opcode = 4
format_str = "!HH"
ack_Packet = struct.pack(format_str, opcode, block)
return ack_Packet
elif opcode == 3:
Block_Data = struct.unpack("!H", input_packet[2:4])
Data = struct.unpack("!{}s".format(len(input_packet[4:])), input_packet[4:])
print('Data sent',Data)
packet_to_buffer = self.write_request(Block_Data, Data)
return packet_to_buffer
elif opcode == 4:
packet_to_buffer = self.read_request()
return packet_to_buffer
else:
msg = "Not defined, see error message (if any)."
format_str = "!HH{}sB".format(len(msg))
opcode = 5
errorn = 0
error_packet = struct.pack(format_str, opcode, errorn, msg.encode("ascii"), 0)
return error_packet
def read_request(self):
chunk_size = 512
if os.path.exists(self.filename):
with open(self.filename, 'r') as f:
while True:
read_data = f.read(chunk_size)
print(len(read_data))
opcode = 3
self.blocks_buffer.append(read_data)
if len(read_data) < 512:
self.blocks_buffer.append(read_data)
break
f.close()
print("opcode", opcode, "block", self.block_number)
self.block_number+=1
format_str = "!HH{}s".format(len(self.blocks_buffer[0]))
packet_data = struct.pack(format_str, opcode, self.block_number, self.blocks_buffer.pop(0).encode("latin-1"))
return packet_data
else:
msg = "File doesn't exist."
format_str = "!HH{}sB".format(len(msg))
opcode = 5
errorn = 1
error_packet = struct.pack(format_str, opcode, errorn, msg.encode("ascii"), 0)
return error_packet
def write_request(self, block, data):
print(self.filename)
print(data[0])
f = open(self.filename, 'a')
f.write(data[0].decode("latin-1"))
# block += 1
opcode = 4
format_str = "!HH"
ack_Packet = struct.pack(format_str, opcode, block[0])
f.close()
return ack_Packet
def get_next_output_packet(self):
return self.packet_buffer.pop(0)
def has_pending_packets_to_be_sent(self):
return len(self.packet_buffer) != 0
def check_file_name():
script_name = os.path.basename(__file__)
import re
matches = re.findall(r"(\d{4}_)+lab1\.(py|rar|zip)", script_name)
if not matches:
print(f"[WARN] File name is invalid [{script_name}]")
def setup_sockets(address):
print("[SERVER] Socket info:", server_socket)
print("[SERVER] Waiting...")
packet = server_socket.recvfrom(516)
data, client_address = packet
print("[SERVER] IN", data)
print("[SERVER] Socket info:", server_socket)
print("[SERVER] Waiting...")
print(f"TFTP server started on on [{address}]...")
return data, client_address
def get_arg(param_index, default=None):
try:
return sys.argv[param_index]
except IndexError as e:
if default:
return default
else:
print(e)
print(
f"[FATAL] The comamnd-line argument #[{param_index}] is missing")
exit(-1) # Program execution failed.
def main():
obj = TftpProcessor()
print("*" * 50)
print("[LOG] Printing command line arguments\n", ",".join(sys.argv))
check_file_name()
print("*" * 50)
ip_address = get_arg(1, "127.0.0.1")
server_socket.bind(server_address)
data, client_address = setup_sockets(ip_address)
while True:
TftpProcessor.process_udp_packet(obj, data, client_address)
if TftpProcessor.has_pending_packets_to_be_sent(obj):
server_socket.sendto(TftpProcessor.get_next_output_packet(obj), client_address)
else:
break
data, client_address = setup_sockets(ip_address)
if __name__ == "__main__":
main()
|
from flask import request
from requirementmanager.app import app
from requirementmanager.mongodb import (
requirement_tree_collection
)
from requirementmanager.dao.requirement_tree import (
RequirementTreeMongoDBDao
)
from requirementmanager.utils.handle_api import handle_response, verify_request
from requirementmanager.utils.wrap_elementui_tree import add_section_number
META_SUCCESS = {'status': 200, 'msg': '获取成功!'}
@app.route('/requirement/tree/list', methods=['GET'])
@verify_request(['access'], access='requirement_tree_list')
@handle_response
def requirement_tree_list():
project_id = request.args.get('project_id')
requirement_tree_dao = RequirementTreeMongoDBDao(
requirement_tree_collection
)
tree = requirement_tree_dao.get(project_id)
resp_data = tree.get_elementui_tree()
add_section_number(resp_data, 1)
print(resp_data)
return {
'meta': META_SUCCESS,
'data': resp_data
}
|
from flask import Flask
# from flask_appconfig import AppConfig
from flask_bootstrap import Bootstrap
from .session_setup import sess
from .frontend import frontend
from .nav import nav
from .model.base import db
def create_app(configfile=None):
app = Flask(__name__)
app.config['SECRET_KEY'] = 'toto-lea'
app.config['SESSION_TYPE'] = 'filesystem'
Bootstrap(app)
app.register_blueprint(frontend)
app.config['BOOTSTRAP_SERVE_LOCAL'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:1234@localhost/flaskapp'
nav.init_app(app)
db.init_app(app)
sess.init_app(app)
return app |
import pymysql
import requests
from bs4 import BeautifulSoup
from abc import *
import crawling
class AppstoreGameCrawling(crawling.Crawling, ABC):
def __init__(self, main_url, db_host, db_port, db_user, db_pw, db_name, db_charset):
super().__init__(main_url, db_host, db_port, db_user, db_pw, db_name, db_charset)
def crawler(self):
try:
header = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36'}
req = requests.get(super().MAIN_URL(), headers=header) ## 주간 차트를 크롤링 할 것임
cont = req.content
soup = BeautifulSoup(cont, 'html.parser')
# print(soup)
soup = soup.find_all("div", {"class": "item-info"})
# print(soup)
for i in range(len(soup)):
RANK_URL = self.get_url("https://www.mobileindex.com/" + soup[i].find("a")["href"])
RANK_NAME = soup[i].select("div.appTitle > a > span.appname")[0].get_text()
RANK_PUBLISHER = soup[i].select("div.appTitle > a > span.publisher")[0].get_text()
IMAGE_URL = soup[i].select("a > img")[0]["src"]
RANK_TYPE = str(i % 3)
self.connect_db(i//3, RANK_NAME, RANK_URL, IMAGE_URL, RANK_PUBLISHER, RANK_TYPE, "", "")
# print(str(i // 3 + 1) + " : " + RANK_NAME + " : " + RANK_URL + " : " + RANK_PUBLISHER + " : " + RANK_TYPE + " : " + IMAGE_URL)
f = open("./../../active_log.txt", "a")
f.write("table : appstore_game_rank UPDATED" + "\n")
print("table : appstore_game_rank UPDATED")
f.close()
except Exception as e:
super().error_logging(str(e))
print("Error Detected")
def get_url(self, URL):
header = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36'}
req = requests.get(URL, headers=header) ## 주간 차트를 크롤링 할 것임
cont = req.content
soup = BeautifulSoup(cont, 'lxml')
soup = soup.select("div.app-info > table > tbody > tr > td")
if soup[0].get_text() == "App Store":
return soup[4].find("a")["href"]
elif len(soup) > 5 and soup[5].get_text() == "App Store":
return soup[9].find("a")["href"]
elif len(soup) > 10 and soup[10].get_text() == "App Store":
return soup[14].find("a")["href"]
def connect_db(self, i, name, info_url, image_url, publisher, rank_type, tmp7, tmp8):
rank_number = i + 1
conn = pymysql.connect(host=super().DB_HOST(),
port=int(super().DB_PORT()),
user=super().DB_USER(),
password=super().DB_PW(),
db=super().DB_NAME(),
charset=super().DB_CHARSET())
curs = conn.cursor()
sql = """select name from appstore_game_rank where rank = %s and rank_type = %s"""
curs.execute(sql, rank_number, rank_type)
row = curs.fetchone()
if row[0] == name:
# print("same appstore game")
pass
else:
# print("change value " + str(rank_number) + " : " + title)
sql = """update appstore_game_rank set name=%s, url=%s, image_url=%s, publisher=%s where rank=%s and rank_type = %s"""
curs.execute(sql, (name, info_url, image_url, publisher, rank_number, rank_type))
conn.commit()
conn.close()
|
###
# Copyright (c) 2015, Michael Daniel Telatynski <postmaster@webdevguru.co.uk>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from supybot.commands import *
import supybot.conf as conf
import supybot.utils as utils
import supybot.plugins as plugins
import supybot.callbacks as callbacks
import requests
try:
from supybot.i18n import PluginInternationalization
_ = PluginInternationalization('CleverbotIO')
except ImportError:
_ = lambda x: x
class CleverbotIO(callbacks.Plugin):
"""CleverbotIO API Interface"""
threaded = True
public = True
botNick = False
def __init__(self, irc):
self.__parent = super(CleverbotIO, self)
self.__parent.__init__(irc)
conf.supybot.plugins.CleverbotIO.appUser.addCallback(self._configCallback)
conf.supybot.plugins.CleverbotIO.appKey.addCallback(self._configCallback)
self._createBot()
def _configCallback(self):
self._createBot()
self.log.info('Self Re-Initializing CleverbotIO')
def _checkConfig(self):
return (self.registryValue('appUser') and
self.registryValue('appKey'))
_createUrl = 'https://cleverbot.io/1.0/create'
def _createBot(self):
if not self._checkConfig():
return
payload = {
'user': self.registryValue('appUser'),
'key': self.registryValue('appKey')
}
r = requests.post(self._createUrl, data=payload)
j = r.json()
if j['status'] == 'success':
self.botNick = j['nick']
self.log.info('CleverbotIOs Instance (%s) Registered' % j['nick'])
else:
self.log.error('CleverbotIO Instance failed to Register: %s' %
j['status'])
_queryUrl = 'https://cleverbot.io/1.0/ask'
def _queryBot(self, irc, query):
if not (self._checkConfig() and self.botNick):
irc.error(_("""Plugin needs to be configured.
Check @config list plugins.CleverbotIO"""), Raise=True)
payload = {
'user': self.registryValue('appUser'),
'key': self.registryValue('appKey'),
'nick': self.botNick,
'text': query
}
r = requests.post(self._queryUrl, data=payload)
j = r.json()
if j['status'] == 'success':
irc.reply(j['response'])
def cleverbotio(self, irc, msg, args, text):
"""Manual Call to the Cleverbot.io API"""
self._queryBot(irc, text)
cleverbotio = wrap(cleverbotio, ['text'])
def invalidCommand(self, irc, msg, tokens):
chan = msg.args[0]
if irc.isChannel(chan) and self.registryValue('invalidCommand', chan):
self._queryBot(irc, msg.args[1])
Class = CleverbotIO
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
|
class Solution:
def countElements(self, arr) -> int:
count = 0
for number in arr:
if(number + 1 in arr):
count += 1
return count |
class Character(object):
def __init__(self, type, name, bonus, condition):
self.type = type
self.name = name
self.bonus = bonus
self.condition = condition
def create(self):
pass |
""" Renders and handles defined forms, turning them into submissions. """
import morepath
from onegov.core.security import Public, Private
from onegov.org.cli import close_ticket
from onegov.ticket import TicketCollection
from onegov.form import (
FormCollection,
PendingFormSubmission,
CompleteFormSubmission
)
from onegov.org import _, OrgApp
from onegov.org.layout import FormSubmissionLayout
from onegov.org.mail import send_ticket_mail
from onegov.org.models import TicketMessage, SubmissionMessage
from onegov.pay import Price
from purl import URL
from webob.exc import HTTPNotFound
def copy_query(request, url, fields):
url = URL(url)
for field in fields:
if field in request.GET:
url = url.query_param(field, request.GET[field])
return url.as_string()
def get_price(request, form, submission):
total = form.total()
if 'price' in submission.meta:
if total is not None:
total += Price(**submission.meta['price'])
else:
total = Price(**submission.meta['price'])
return request.app.adjust_price(total)
@OrgApp.html(model=PendingFormSubmission, template='submission.pt',
permission=Public, request_method='GET')
@OrgApp.html(model=PendingFormSubmission, template='submission.pt',
permission=Public, request_method='POST')
@OrgApp.html(model=CompleteFormSubmission, template='submission.pt',
permission=Private, request_method='GET')
@OrgApp.html(model=CompleteFormSubmission, template='submission.pt',
permission=Private, request_method='POST')
def handle_pending_submission(self, request, layout=None):
""" Renders a pending submission, takes it's input and allows the
user to turn the submission into a complete submission, once all data
is valid.
This view has two states, a completable state where the form values
are displayed without a form and an edit state, where a form is rendered
to change the values.
Takes the following query parameters for customization::
* ``edit`` render the view in the edit state
* ``return-to`` the view redirects to this url once complete
* ``title`` a custom title (required if external submission)
* ``quiet`` no success messages are rendered if present
"""
collection = FormCollection(request.session)
form = request.get_form(self.form_class, data=self.data)
form.action = request.link(self)
form.model = self
if 'edit' not in request.GET:
form.validate()
if not request.POST:
form.ignore_csrf_error()
elif not form.errors:
collection.submissions.update(self, form)
completable = not form.errors and 'edit' not in request.GET
price = get_price(request, form, self)
# check minimum price total if set
current_total_amount = price and price.amount or 0.0
minimum_total_amount = self.minimum_price_total or 0.0
if current_total_amount < minimum_total_amount:
if price is not None:
currency = price.currency
else:
# We just pick the first currency from any pricing rule we can find
# if we can't find any, then we fall back to 'CHF'. Although that
# should be an invalid form definition.
currency = 'CHF'
for field in form._fields.values():
if not hasattr(field, 'pricing'):
continue
rules = field.pricing.rules
if not rules:
continue
currency = next(iter(rules.values())).currency
break
completable = False
request.alert(
_(
"The total amount for the currently entered data "
"is ${total} but has to be at least ${minimum}. "
"Please adjust your inputs.",
mapping={
'total': Price(current_total_amount, currency),
'minimum': Price(minimum_total_amount, currency)
}
)
)
if completable and 'return-to' in request.GET:
if 'quiet' not in request.GET:
request.success(_("Your changes were saved"))
# the default url should actually never be called
return request.redirect(request.url)
if 'title' in request.GET:
title = request.GET['title']
else:
title = self.form.title
# retain some parameters in links (the rest throw away)
form.action = copy_query(
request, form.action, ('return-to', 'title', 'quiet'))
edit_link = URL(copy_query(
request, request.link(self), ('title', )))
# the edit link always points to the editable state
edit_link = edit_link.query_param('edit', '')
edit_link = edit_link.as_string()
return {
'layout': layout or FormSubmissionLayout(self, request, title),
'title': title,
'form': form,
'completable': completable,
'edit_link': edit_link,
'complete_link': request.link(self, 'complete'),
'model': self,
'price': price,
'checkout_button': price and request.app.checkout_button(
button_label=request.translate(_("Pay Online and Complete")),
title=title,
price=price,
email=self.email or self.get_email_field_data(form),
locale=request.locale
)
}
@OrgApp.view(model=PendingFormSubmission, name='complete',
permission=Public, request_method='POST')
@OrgApp.view(model=CompleteFormSubmission, name='complete',
permission=Private, request_method='POST')
def handle_complete_submission(self, request):
form = request.get_form(self.form_class)
form.process(data=self.data)
form.model = self
# we're not really using a csrf protected form here (the complete form
# button is basically just there so we can use a POST instead of a GET)
form.validate()
form.ignore_csrf_error()
if form.errors:
return morepath.redirect(request.link(self))
else:
if self.state == 'complete':
self.data.changed() # trigger updates
request.success(_("Your changes were saved"))
return morepath.redirect(request.link(
FormCollection(request.session).scoped_submissions(
self.name, ensure_existance=False)
))
else:
provider = request.app.default_payment_provider
token = request.params.get('payment_token')
price = get_price(request, form, self)
payment = self.process_payment(price, provider, token)
if not payment:
request.alert(_("Your payment could not be processed"))
return morepath.redirect(request.link(self))
elif payment is not True:
self.payment = payment
window = self.registration_window
if window and not window.accepts_submissions(self.spots):
request.alert(_("Registrations are no longer possible"))
return morepath.redirect(request.link(self))
show_submission = request.params.get('send_by_email') == 'yes'
self.meta['show_submission'] = show_submission
self.meta.changed()
collection = FormCollection(request.session)
submission_id = self.id
# Expunges the submission from the session
collection.submissions.complete_submission(self)
# make sure accessing the submission doesn't flush it, because
# it uses sqlalchemy utils observe, which doesn't like premature
# flushing at all
with collection.session.no_autoflush:
ticket = TicketCollection(request.session).open_ticket(
handler_code=self.meta.get('handler_code', 'FRM'),
handler_id=self.id.hex
)
TicketMessage.create(ticket, request, 'opened')
send_ticket_mail(
request=request,
template='mail_ticket_opened.pt',
subject=_("Your request has been registered"),
ticket=ticket,
receivers=(self.email, ),
content={
'model': ticket,
'form': form,
'show_submission': self.meta['show_submission']
}
)
if request.email_for_new_tickets:
send_ticket_mail(
request=request,
template='mail_ticket_opened_info.pt',
subject=_("New ticket"),
ticket=ticket,
receivers=(request.email_for_new_tickets, ),
content={
'model': ticket
}
)
request.app.send_websocket(
channel=request.app.websockets_private_channel,
message={
'event': 'browser-notification',
'title': request.translate(_('New ticket')),
'created': ticket.created.isoformat()
}
)
if request.auto_accept(ticket):
try:
ticket.accept_ticket(request.auto_accept_user)
# We need to reload the object with the correct polymorphic
# type
submission = collection.submissions.by_id(
submission_id, state='complete', current_only=True
)
handle_submission_action(
submission, request, 'confirmed', True, raises=True
)
except ValueError:
if request.is_manager:
request.warning(_("Your request could not be "
"accepted automatically!"))
else:
close_ticket(
ticket, request.auto_accept_user, request
)
request.success(_("Thank you for your submission!"))
return morepath.redirect(request.link(ticket, 'status'))
@OrgApp.view(model=CompleteFormSubmission, name='ticket', permission=Private)
def view_submission_ticket(self, request):
ticket = TicketCollection(request.session).by_handler_id(self.id.hex)
if not ticket:
raise HTTPNotFound()
return request.redirect(request.link(ticket))
@OrgApp.view(model=CompleteFormSubmission, name='confirm-registration',
permission=Private, request_method='POST')
def handle_accept_registration(self, request):
return handle_submission_action(self, request, 'confirmed')
@OrgApp.view(model=CompleteFormSubmission, name='deny-registration',
permission=Private, request_method='POST')
def handle_deny_registration(self, request):
return handle_submission_action(self, request, 'denied')
@OrgApp.view(model=CompleteFormSubmission, name='cancel-registration',
permission=Private, request_method='POST')
def handle_cancel_registration(self, request):
return handle_submission_action(self, request, 'cancelled')
def handle_submission_action(
self, request, action, ignore_csrf=False, raises=False,
no_messages=False, force_email=False):
if not ignore_csrf:
request.assert_valid_csrf_token()
if action == 'confirmed':
subject = _("Your registration has been confirmed")
success = _("The registration has been confirmed")
failure = _("The registration could not be confirmed because the "
"maximum number of participants has been reached")
def execute():
if self.registration_window and self.claimed is None:
return self.claim()
elif action == 'denied':
subject = _("Your registration has been denied")
success = _("The registration has been denied")
failure = _("The registration could not be denied")
def execute():
if self.registration_window and self.claimed is None:
return self.disclaim() or True
elif action == 'cancelled':
subject = _("Your registration has been cancelled")
success = _("The registration has been cancelled")
failure = _("The registration could not be cancelled")
def execute():
if self.registration_window and self.claimed:
return self.disclaim() or True
if execute():
ticket = TicketCollection(request.session).by_handler_id(self.id.hex)
send_ticket_mail(
request=request,
template='mail_registration_action.pt',
receivers=(self.email, ),
ticket=ticket,
content={
'model': self,
'action': action,
'ticket': ticket,
'form': self.form_obj,
'show_submission': self.meta.get('show_submission')
},
subject=subject,
force=force_email
)
SubmissionMessage.create(ticket, request, action)
if not no_messages:
request.success(success)
else:
if raises:
raise ValueError(request.translate(failure))
if not no_messages:
request.alert(failure)
return
return request.redirect(request.link(self))
|
from __future__ import division
from warnings import warn
import numpy as np
from scipy import special
from dipy.data import get_sphere
def density(points, Lambda):
"""Density function of the Angular Central Gaussian Distribution"""
q = Lambda.shape[0]
a_inv = special.gamma(q / 2.) / (2. * np.pi ** (q / 2.))
det_inv = np.linalg.det(Lambda) ** -.5
xpx = (np.linalg.solve(Lambda, points.T) * points.T).sum(0)
return a_inv * det_inv * xpx ** -(q / 2.)
def lambda_estimator(points, iters=1000, epsilon=1e-8):
"""Estimator of the parameter for the Angular Central Gaussian
Distribution according to Statistical Analysis for the Angular
Central Gaussian Distribution on the Sphere David E. Tyler,
Biometrika, Vol. 74, No. 3 (Sep., 1987), pp. 579-589 Eq. 3."""
q = points.shape[1]
Lambda = np.eye(q)
points_squared = points[:,:,None] * points[:,None,:]
for i in range(iters):
if np.linalg.det(Lambda) == 0:
M = np.eye(q) - Lambda
M /= np.linalg.norm(M)
Lambda = Lambda + M * epsilon
denominator = (
points * np.linalg.solve(Lambda, points.T).T
).sum(1)
denominator = denominator[:, None, None]
Lambda_new = q * (
(points_squared / denominator).sum(0) / (1. / denominator).sum(0)
)
if np.linalg.norm(Lambda - Lambda_new) < epsilon:
break
Lambda = Lambda_new
else:
warn('Convergence not achieved, stopping by number of iterations.')
return Lambda / np.linalg.norm(Lambda)
|
# encoding: utf-8
"""
Helper module for example applications. Mimics ZeroMQ Guide's zhelpers.h.
"""
from __future__ import print_function
import binascii
import os
from random import randint
import zmq
IDENTITY_PREFIX = 'id\x00'
RAW_MSG_FLAG = '\x00\x00'
def socket_set_hwm(socket, hwm=-1):
"""libzmq 2/3/4 compatible sethwm"""
try:
socket.sndhwm = socket.rcvhwm = hwm
except AttributeError:
socket.hwm = hwm
def dump(msg_or_socket):
"""Receives all message parts from socket, printing each frame neatly"""
if isinstance(msg_or_socket, zmq.Socket):
# it's a socket, call on current message
msg = msg_or_socket.recv_multipart()
else:
msg = msg_or_socket
print("----------------------------------------")
for part in msg:
print("[%03d]" % len(part), end=' ')
is_text = True
try:
print(part.decode('ascii'))
except UnicodeDecodeError:
print(r"0x%s" % (binascii.hexlify(part).decode('ascii')))
def set_id(zsocket, identity=None):
"""Set simple random printable identity on socket"""
if identity is None:
identity = u'%s-%04x-%04x' % (IDENTITY_PREFIX, randint(0, 0x10000), randint(0, 0x10000))
else:
identity = u'%s-%s' % (IDENTITY_PREFIX, identity)
zsocket.setsockopt_string(zmq.IDENTITY, identity)
return str(identity)
def zpipe(ctx):
"""build inproc pipe for talking to threads
mimic pipe used in czmq zthread_fork.
Returns a pair of PAIRs connected via inproc
"""
a = ctx.socket(zmq.PAIR)
b = ctx.socket(zmq.PAIR)
a.linger = b.linger = 0
a.hwm = b.hwm = 1
iface = "inproc://%s" % binascii.hexlify(os.urandom(8))
a.bind(iface)
b.connect(iface)
return a,b
def zpipes(ctx, thread_num, addr=None):
"""build inproc pipe for talking to threads by router and dealer
"""
if addr is None:
addr = str(os.getpid()) + '-' + binascii.hexlify(os.urandom(8))
a = nonblocking_socket(ctx, zmq.ROUTER)
b = [nonblocking_socket(ctx, zmq.DEALER) for i in xrange(thread_num)]
iface = "inproc://%s" % addr
a.bind(iface)
for i, s in enumerate(b):
set_id(s, str(i))
s.connect(iface)
return a, b
def poll(poller, ms):
polls = dict(poller.poll(ms))
ret = {}
for sock, mask in polls.iteritems():
oneret = []
if mask & zmq.POLLIN:
oneret.append(zmq.POLLIN)
if mask & zmq.POLLOUT:
oneret.append(zmq.POLLOUT)
ret[sock] = oneret
return ret
def nonblocking_socket(ctx, stype):
sock = ctx.socket(stype)
sock.linger = 0
return sock
def recv_multipart_timeout(sock, timeout):
poller = zmq.Poller()
poller.register(sock, zmq.POLLIN)
polls = poll(poller, timeout)
if polls.get(sock):
return sock.recv_multipart(zmq.NOBLOCK)
return None
def bind_to_random_port(sock, addr='tcp://*', min_port=49152, max_port=65536, max_tries=1000):
try:
return sock.bind_to_random_port(addr, min_port, max_port, max_tries)
except zmq.ZMQBindError:
return 0
def is_identity(string):
return string[0] == '\x00' or string.startswith(IDENTITY_PREFIX)
def split_identity(msg):
id_pos = 0
for pos, data in enumerate(msg):
if not is_identity(data):
break
else:
id_pos = pos + 1
return msg[:id_pos], msg[id_pos:]
|
import numpy as np
import copy
import pdb
import random as rdm
import time
import scipy.special as scp
import scipy.stats as scs
import scipy.optimize as scopt
import matplotlib.pyplot as plt
from scipy import optimize as scipyopt
import datetime
import utilities as utils
from loggedopt import Log
class InformationReuseBase(Log):
'''Base class for information reuse that deals with setup of the problem
and checking the inputs, and common functions.
Does not include any optimization logic
Note that algorithm specific functions return dictionaries, so that as long
as the dictionary has the requried keys it will still work. This way extra
stuff can be added to be logged without having to change function
definitions etc.
'''
def __init__(self, fobj, **kwargs):
'''
- fobj: quantity of interest.
- udim: number of uncertainties
- n_mc, n_quad: no. sample and quadrature points in HM'''
# Note: notation
# prefix of v indicates a vector
# prefix of m indicates a matrix
# prefix of f indicates a function
log_name = kwargs.setdefault('log_name', None)
Log.__init__(self, log_name=log_name)
self.fobj = fobj # fobj should take two inputs: dv, u.
self.udim = kwargs.setdefault('udim', 1)
self.xdim = kwargs.setdefault('xdim', 1)
self.n_init = int(kwargs.setdefault('n_init', 50))
self.n_boot = int(kwargs.setdefault('n_boot', 1e3))
self.var_req = float(kwargs.setdefault('var_req', 1e-4))
self.verbose = kwargs.setdefault('verbose', False)
self.bMC = kwargs.setdefault('bMC', False)
self.bPlot = kwargs.setdefault('bPlot', False)
self.bValidation = kwargs.setdefault('bValidation', False)
self.n_fixed = int(kwargs.setdefault('n_fixed', 1000))
self.iters = 0
self.db = []
self.total_samples = 0
self.checkInputs()
if kwargs.setdefault('check_overwrite', False):
self.overwriteLogFile(self.log_file)
else:
with open(self.log_file, 'w') as f: pass
def reset(self, **kwargs):
for k, v in kwargs.iteritems():
if k in dir(self):
setattr(self, k, v)
else:
raise KeyError('reset argument not an object attribute')
Log.__init__(self, log_name=self.log_name)
self.iters = 0
self.total_samples = 0
self.db = []
self.checkInputs()
if kwargs.setdefault('check_overwtire', False):
self.overwriteLogFile(self.log_file)
else:
with open(self.log_file, 'w') as f: pass
def checkInputs(self):
try:
if self.verbose:
print 'Checking input function with zero vectors'
q = self.fobj(np.zeros(self.xdim), np.zeros(self.udim))
except TypeError:
raise TypeError('''Something is wrong with the objective function,
Objective function must be of the form f(x, u)
and return a single output q, the quantity of interest''')
def evalq(self, fx, vx, vu):
vqsamp = np.zeros(vu.size / self.udim)
for ii, ui in enumerate(vu):
vqsamp[ii] = fx(vx, ui)
return vqsamp
def iteration(self, vx):
if self.verbose:
print '____________________________________________________________'
print 'Design: ', vx
print '____________________________________________________________'
if not self.db:
if self.bValidation:
ddict = self.runValidationIteration(vx, {}, reuse=False)
else:
ddict = self.runFirstIteration(vx)
else:
vd = np.zeros(len(self.db))
for il in range(len(self.db)):
ldict = copy.copy(self.db[il])
vl = np.array(ldict['design'])
vd[il] = np.linalg.norm(vl - np.array(vx))
ilmin = np.argmin(vd)
cdict = copy.copy(self.db[ilmin])
if self.bValidation:
ddict = self.runValidationIteration(vx, cdict, reuse=True)
else:
ddict = self.runGeneralIteration(vx, cdict)
self.total_samples += ddict['samples']
self.db.append(ddict)
if ddict is not None:
self.writeToLog(ddict)
return ddict
def runValidationIteration(self, vx, cdict, **kwargs):
M = self.n_fixed
vunew = self.sampleUncertainties(M)
vqx = self.evalq(self.fobj, vx, vunew)
vqc = []
if not kwargs.setdefault('reuse', False):
outdict = self.naiveMC(vqx)
else:
if self.bMC:
outdict = self.naiveMC(vqx)
else:
vqc = self.evalq(self.fobj, cdict['design'], vunew)
outdict = self.reuseMC(vqx, vqc, cdict)
iterdict = { k: v for k, v in outdict.iteritems()}
iterdict['design'] = [x for x in vx]
iterdict['samples'] = len(vqx) + len(vqc)
return iterdict
def runFirstIteration(self, vx, **kwargs):
iteration = 0
vqx = []
M, predMC = self.n_init/1.1, self.n_init/1.1
for guesses in range(20):
Mnew = max(int(1.1*predMC), int(1.1*len(vqx)))
if len(vqx) > 0:
Mnew = min(Mnew, 10*len(vqx))
if self.verbose:
print 'Total ', Mnew, ' samples'
vunew = self.sampleUncertainties(int(Mnew)-len(vqx))
vqx = np.concatenate([vqx, self.evalq(self.fobj, vx, vunew)])
outdict = self.naiveMC(vqx)
predMC = len(vqx)*outdict['mx_var']/self.var_req
if self.verbose:
print 'Variance: ', outdict['dvar']
if outdict['dvar'] < self.var_req:
break
if self.verbose:
print 'Estimator value: ', outdict['dhat'], ' variance', outdict['dvar']
iterdict = { k: v for k, v in outdict.iteritems()}
iterdict['samples'] = len(vqx)
iterdict['rho_samples'] = 0
iterdict['design'] = [x for x in vx]
iterdict['iteration'] = copy.copy(iteration)
return iterdict
def sampleUncertainties(self, num):
return -1. + 2.*np.random.random((num, self.udim))
# return 1 - np.random.gamma(2., 0.5, size=(num, self.udim))
# return np.random.randn(num, self.udim)
x1 = np.random.beta(2, 3, (num, self.udim))
x2 = -1 + np.random.beta(2, 3, (num, self.udim))
b1 = np.random.binomial(1, 0.5, num)
s = np.zeros((num, self.udim))
for ib, b in enumerate(b1):
if b == 1:
s[ib, :] = x1[ib, :]
else:
s[ib, :] = x2[ib, :]
return np.array(s)
def runGeneralIteration(self, vx, cdict, **kwargs):
iteration = self.db[-1]['iteration'] + 1
vc = cdict['design']
vqx, vqc, convIR, convMC = [], [], [], []
predM = self.n_init/1.1
bMC = copy.copy(self.bMC)
rho_samples = 0
for guesses in range(20):
Mnew = max(int(1.1*predM), int(1.1*len(vqx)))
if len(vqx) > 0:
Mnew = min(Mnew, 10*len(vqx))
if self.verbose:
print 'Total ', Mnew, ' samples'
if bMC:
vunew = self.sampleUncertainties(Mnew - len(vqx))
vqx = np.concatenate([vqx, self.evalq(self.fobj, vx, vunew)])
outdict = self.naiveMC(vqx)
predM = len(vqx)*outdict['mx_var']/self.var_req
else:
vunew = self.sampleUncertainties(Mnew - len(vqx))
vqx = np.concatenate([vqx, self.evalq(self.fobj, vx, vunew)])
vqc = np.concatenate([vqc, self.evalq(self.fobj, vc, vunew)])
rho_samples = np.corrcoef(np.array([vqx, vqc]))[0,1]
outdict = self.reuseMC(vqx, vqc, cdict)
predNaive = len(vqx)*outdict['mx_var']/self.var_req
predM = self.predictorIR(outdict, cdict)
if 2*predM > predNaive:
bMC = True
predM = int(predNaive)
if self.verbose:
print 'SWITCHING TO REGULAR MC'
if self.verbose:
print 'Estimator value: ', outdict['dhat'], ' variance', outdict['dvar']
if outdict['dvar'] < self.var_req:
break
if self.bPlot:
self.plotConvergence(convMC, convIR, cdict, vx, self.var_req)
iterdict = { k: v for k, v in outdict.iteritems()}
iterdict['samples'] = len(vqx) + len(vqc)
iterdict['rho_samples'] = rho_samples
iterdict['design'] = [x for x in vx]
iterdict['iteration'] = copy.copy(iteration)
return iterdict
def plotConvergence(**kwargs):
raise Exception('Plotting not implemented')
def naiveMC(**kwargs):
raise Exception('naiveMC function needs to be implemented')
def reuseMC(**kwargs):
raise Exception('reuseMC function needs to be implemented')
def predictorIR(**kwargs):
raise Exception('predictorIR function needs to be implemented')
class AlgebraicInformationReuse(InformationReuseBase):
'''Class that that performs algebraic information reuse from Ng and Willcox
(2014). Functionality at the moment just uses the mean as the estimator'''
def __init__(self, fobj, **kwargs):
self.estimator = kwargs.setdefault('estimator', 'mean')
self.mvweight = kwargs.setdefault('mvweight', 1.282)
InformationReuseBase.__init__(self, fobj, **kwargs)
self.checkAdditionalInputs()
def checkAdditionalInputs(self):
el = self.estimator.lower()
if el != 'mean' and (el != 'var' and el != 'ws'):
raise ValueError('''estimator for algebraic information reuse should
be ''mean'' or ''var'' or ''ws''')
def runFirstIteration(self, vx, **kwargs):
return InformationReuseBase.runFirstIteration(self, vx, **kwargs)
def runGeneralIteration(self, vx, cdict, **kwargs):
return InformationReuseBase.runGeneralIteration(self, vx, cdict, **kwargs)
def naiveMC(self, vqsamp):
mv, mv_cov = [], []
if self.estimator.lower() == 'mean':
dhat = np.mean(vqsamp)
dvar = np.var(vqsamp)/len(vqsamp)
elif self.estimator.lower() == 'var':
n, xsum, xsumm1 = len(vqsamp), sum(vqsamp), sum(vqsamp[0:-1])
vvsamp = [(n/(n-1.))*(q - (1./n)*xsum)*(q - (1./(n-1.))*xsumm1)
for q in vqsamp]
dhat = np.mean(vvsamp)
dvar = np.var(vvsamp)/len(vvsamp)
elif self.estimator.lower() == 'ws':
n, qsum, qsumm1 = len(vqsamp), sum(vqsamp), sum(vqsamp[0:-1])
vvsamp = [(n/(n-1.))*(q - (1./n)*qsum)*(q - (1./(n-1.))*qsumm1)
for q in vqsamp]
m = np.mean(vqsamp)
v = np.mean(vvsamp)
w = self.mvweight
dhat = m + w*np.sqrt(v)
mv_cov = (1./n)*np.cov(np.array(zip(vqsamp, vvsamp)).T)
fgrad = np.array([1, self.mvweight/(2.*np.sqrt(v))]).reshape([2,1])
dvar = float(fgrad.T.dot(mv_cov.dot(fgrad)))
mv = np.array((m, v))
return {'dhat': dhat, 'dvar': dvar,
'gamma': 0, 'eta': 0, 'M':len(vqsamp),
'mx_var': dvar, 'mv_cov': mv_cov, 'mv': mv,
'rho': 0}
def _getreuseterms(self, vqx, vqc, sc_var):
M = len(vqx)
abar = np.mean(vqx)
cbar = np.mean(vqc)
asum2 = sum([(ai-abar)**2 for ai in vqx])
csum2 = sum([(ci-cbar)**2 for ci in vqc])
acsum = sum([(ai - abar)*(ci - cbar) for ai, ci in zip(vqx, vqc)])
rho2 = acsum**2 / (asum2 * csum2)
eta = sc_var*M*(M-1) / csum2
gam = (1./(1. + eta)) * acsum / csum2
siga2 = (1./(M-1)) * asum2
sigc2 = (1./(M-1)) * csum2
return gam, eta, rho2, siga2, sigc2
def reuseMC(self, vqx, vqc, cdict):
'''Uses the method outlined in Ng and Willcox (2014) Journal of
Aircraft for information re-use'''
# Notation:
# sx - estimator at point x
# sc - estimator at point c
# mx - sample average at point x
# mc - sample average at point c
M = len(vqx)
samples = len(vqx) + len(vqc)
mv, mv_cov, sc_cov = [], [], []
if self.estimator.lower() == 'mean':
sc = cdict['dhat']
sc_var = cdict['dvar']
abar, cbar = np.mean(vqx), np.mean(vqc)
gam, eta, rho2, siga2, sigc2 = self._getreuseterms(vqx, vqc, sc_var)
dhat = abar + gam*(sc - cbar)
dvar = (1./M)*( siga2 + gam**2*sigc2*(1. + eta) - \
2*gam*np.sqrt(rho2*siga2*sigc2) )
mx_var = float(siga2/M)
mc_var = float(sigc2/M)
mxc_cov = float(rho2*np.sqrt(siga2*sigc2)/M)
if self.estimator.lower() == 'var':
sc = cdict['dhat']
sc_var = cdict['dvar']
n, xsum, xsumm1 = len(vqx), sum(vqx), sum(vqx[0:-1])
vvx = [(n/(n-1.))*(q - (1./n)*xsum)*(q - (1./(n-1.))*xsumm1)
for q in vqx]
n, csum, csumm1 = len(vqc), sum(vqc), sum(vqc[0:-1])
vvc = [(n/(n-1.))*(q - (1./n)*csum)*(q - (1./(n-1.))*csumm1)
for q in vqc]
abar, cbar = np.mean(vvx), np.mean(vvc)
gam, eta, rho2, siga2, sigc2 = self._getreuseterms(vvx, vvc, sc_var)
dhat = abar + gam*(sc - cbar)
dvar = (1./M)*( siga2 + gam**2*sigc2*(1. + eta) - \
2*gam*np.sqrt(rho2*siga2*sigc2) )
mx_var = float(siga2)/M
mc_var = float(sigc2)/M
mxc_cov = float(rho2*np.sqrt(siga2*sigc2))/M
elif self.estimator.lower() == 'ws':
sc_var = cdict['dvar']
sc_m, sc_v = cdict['mv'][0], cdict['mv'][1]
sc_cov = np.array(cdict['mv_cov'])
n, xsum, xsumm1 = len(vqx), sum(vqx), sum(vqx[0:-1])
vvx = [(n/(n-1.))*(q - (1./n)*xsum)*(q - (1./(n-1.))*xsumm1)
for q in vqx]
n, csum, csumm1 = len(vqc), sum(vqc), sum(vqc[0:-1])
vvc = [(n/(n-1.))*(q - (1./n)*csum)*(q - (1./(n-1.))*csumm1)
for q in vqc]
abar_m, cbar_m = np.mean(vqx), np.mean(vqc)
gam_m, eta_m, rho2_m, siga2_m, sigc2_m = self._getreuseterms(
vqx, vqc, sc_cov[0,0])
m = abar_m + gam_m*(sc_m - cbar_m)
mvar = (1./n)*( siga2_m + gam_m**2*sigc2_m*(1. + eta_m) - \
2*gam_m*np.sqrt(rho2_m*siga2_m*sigc2_m) )
abar_v, cbar_v = np.mean(vvx), np.mean(vvc)
gam_v, eta_v, rho2_v, siga2_v, sigc2_v = self._getreuseterms(
vvx, vvc, sc_cov[1,1])
v = abar_v + gam_v*(sc_v - cbar_v)
vvar = (1./n)*( siga2_v + gam_v**2*sigc2_v*(1. + eta_v) - \
2*gam_v*np.sqrt(rho2_v*siga2_v*sigc2_v) )
xcov = np.cov(np.array(zip(vqx, vvx)).T)
ccov = np.cov(np.array(zip(vqc, vvc)).T)
xccrosscov = np.cov(np.array(zip(vqx, vvx, vqc, vvc)).T)[0:2,2:4]
cxcrosscov = np.cov(np.array(zip(vqx, vvx, vqc, vvc)).T)[2:4,0:2]
gam = np.array([[gam_m, 0.], [0., gam_v]])
mv_cov = (1./n)*(xcov + (gam.dot(n*sc_cov + ccov)).dot(gam) -
(gam.dot(cxcrosscov) + xccrosscov.dot(gam)))
mv = np.array([m, v])
w = self.mvweight
dhat = m + w*np.sqrt(v)
fgrad = np.array([1, self.mvweight/(2.*np.sqrt(v))]).reshape([2,1])
dvar = float(fgrad.T.dot(mv_cov.dot(fgrad)))
mx_var = float(fgrad.T.dot(xcov.dot(fgrad)))/M
mc_var = float(fgrad.T.dot(ccov.dot(fgrad)))/M
mxc_cov = float(fgrad.T.dot(xccrosscov.dot(fgrad)))/M
eta = cdict['dvar']/mc_var
return {'dhat': dhat, 'dvar': dvar, 'mv': mv, 'mv_cov': mv_cov,
'gamma': gam, 'eta': eta, 'M': M, 'rho': 0,
'mx_var': mx_var, 'mc_var': mc_var, 'mxc_cov': mxc_cov}
def predictorIR(self, xdict, cdict):
mcx_cov = xdict['mxc_cov']
vmc = xdict['mc_var']
vmx = xdict['mx_var']
vsc = cdict['dvar']
n0 = float(xdict['M'])
def predvar(n):
n = float(n)
return vmx*n0/n - (mcx_cov*n0/n)**2 / (vsc+vmc*n0/n) - self.var_req
ans, info, ier, mesg = scopt.fsolve(predvar, n0, full_output=True)
if ier != 1:
if self.verbose:
print 'Predictor solver did not converge: ', mesg
print 'Adding a few more samples'
ans = int(n0)
return int(ans)
|
import socket
import json
import time
from termios import tcflush, TCIFLUSH
import sys
import Adafruit_BBIO.PWM as PWM
import Adafruit_BBIO.ADC as ADC
import Adafruit_BBIO.GPIO as GPIO
PWM.cleanup()
#set pins
frontright_pin = "P9_14" #PWM pin working
frontleft_pin = "P9_21" #PWM pin working
rearright_pin = "P9_42" #PWM pin #P9_22 is available PWM still but wasn't working for some reason
rearleft_pin = "P8_13" #PWM pin just changed this one
compressor_output_pin_high = "P8_18" #GPIO pin
compressor_output_pin_low = "P8_17" #GPIO pin
compressor_sensor = "P9_13" #GPIO pin
jumpdrive_pin = "P9_23" #GPIO pin
#shooter_pin = "P9_42" #PWM pin CHANGE
#conveyor_pin = "P9_22" #PWM pin
#spinner_pin = "P8_45" #PWM pin (constantly spinning regardless of anything else) (may want to change this)
#end of pin assignments
#create jumpdrive state
jumpstate = 0 #0 is... I forget... actually I haven't set it yet lets say 0 is mechanum mode
#whenever we get a toggle signal we need to change this
#this is more complicated than you would think
#need current state and past state variables to determine switch
lastjmpsig = 0 #initialize the last state to be 0
#after code always update this with last JumpDrive signal sent
#if(!lastjmpsig && JumpDrive) then toggle
#lastjmpsig = JumpDrive
#toggle()
# if(jumpstate)
# jumpstate = 0
# else
# jumpstate = 1
#initialize jumpdrive input
JumpDrive = 0 #please set to 1 later, well hmm...
#start PWMs at neutral
#print "pointa"
PWM.start(frontright_pin,7.5,50,0) #frequency of 50Hz results in 20ms period
PWM.start(frontleft_pin,7.5,50,0) #for Talon Motor Controllers pulses should last 1-2ms
PWM.start(rearright_pin,7.5,50,0) #which is 5-10% duty cycle (5 = backwards, 7.5 = neutral, 10 = forward)
PWM.start(rearleft_pin,7.5,50,0) #figure out which pins are PWM able
#PWM.start(shooter_pin,7.5,50,0) #the 7 PWM signals are the 4 wheels, shooter, conveyor, spinner
#PWM.start(conveyor_pin,7.5,50,0) #the conveyor is a boolean, either 7.5% or 10% (we will change the value) (easier to make PWM regularly)
#PWM.start(spinner_pin,10,50,0) #the spinner is always on full go (may need to adjust the value)
#print "pointb"
GPIO.setup(compressor_sensor, GPIO.IN) #this is the input that lets us know if it is fully compressed
GPIO.setup(compressor_output_pin_high, GPIO.OUT) #i dont understand this output (copied from Ahmed)
GPIO.setup(compressor_output_pin_low, GPIO.OUT) #i dont understand this output (copied from Ahmed)
GPIO.setup(jumpdrive_pin, GPIO.OUT) #is just the jumpstate
#note Ahmed uses threading which I don't understand so I'm
#just gonna throw everything into the big while loop down there
#the readability is lessened but it will still work
TCP_IP = '192.168.7.2'
TCP_PORT = 5005
BUFFER_SIZE = 512 # Normally 1024, but we want fast response
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((TCP_IP, TCP_PORT))
s.listen(1)
#PWM.set_duty_cycle(pin, pwmval) use this to change PWM duty cycle as we go along
try:
while 1:
conn, addr = s.accept()
print 'Connection address:', addr
while 1:
data = conn.recv(BUFFER_SIZE)
if not data: break
#print "received data:", data
controllerdata = json.loads(str(data))
#for k,v in controllerdata.items():
#print str(k) + " " + str(v)
#here is where we set all of the pins
#do jumpstate first then PWM after
#print "lastjmpsig", lastjmpsig
#print "JumpDrive", controllerdata['JumpDrive'] #this is a variable from controller data you made this one
if (not(lastjmpsig) and controllerdata['JumpDrive']):
#print "toggle jumpstate"
if jumpstate:
jumpstate = 0
else:
jumpstate = 1
lastjmpsig = controllerdata['JumpDrive']
#print jumpstate
if jumpstate:
GPIO.output(jumpdrive_pin, GPIO.HIGH)
else:
GPIO.output(jumpdrive_pin, GPIO.LOW)
#set compressor stuff (copied from Ahmed's code)
var = bool (GPIO.input(compressor_sensor))
if var:
#print "DETECTED"
GPIO.output(compressor_output_pin_high, GPIO.LOW)
GPIO.output(compressor_output_pin_low, GPIO.LOW)
else:
#print "TURN ON"
GPIO.output(compressor_output_pin_high, GPIO.HIGH)
GPIO.output(compressor_output_pin_low, GPIO.LOW)
#now compressor is set and also jumpdrive is set
#next are the 6 PWM pins
#find the duty cycle passed in based on jumpstate
#map the PWM val to 5 to 10 duty cycle
#need to have GUI variable be from 0 to 255 (alter that)
#then divide by 255 and multiply by the range (2.5) and add 7.5
#so write this code with that assumption I guess
#well actually we have data from 30-90 so if we multiply by 2.. hmm
#okay i fixed it in the code (it was super easy)
if jumpstate:
#assign duty cycles for reg drive
frduty = float(float(controllerdata['FrontRight'])/50.8 + 5.00)
flduty = float(float(controllerdata['FrontLeft'])/50.8 + 5.00)
rrduty = float(float(controllerdata['RearRight'])/50.8 + 5.00)
rlduty = float(float(controllerdata['RearLeft'])/50.8 + 5.00)
else:
#assign duty cycles for mech drive
frduty = float(float(controllerdata['MechFR'])/50.8 + 5.00) #50.8 = 127/2.5
flduty = float(float(controllerdata['MechFL'])/50.8 + 5.00) #which is pwmstop(fromGUI)/pwmrange(here)
rrduty = float(float(controllerdata['MechRR'])/50.8 + 5.00) #converts it to proper value
rlduty = float(float(controllerdata['MechRL'])/50.8 + 5.00)
shduty = float(float(controllerdata['Shooter']/50.8) + 5.00)
cvduty = float(float(controllerdata['Conveyor']/50.8) + 5.00)
PWM.set_duty_cycle(frontright_pin, frduty)
PWM.set_duty_cycle(frontleft_pin, flduty)
PWM.set_duty_cycle(rearright_pin, rrduty)
PWM.set_duty_cycle(rearleft_pin, rlduty)
#PWM.set_duty_cycle(shooter_pin, shduty)
#PWM.set_duty_cycle(conveyor_pin, cvduty)
print "frduty", frduty
print "flduty", flduty
print "rrduty", rrduty
print "rlduty", rlduty
print "shduty", shduty
print "cvduty", cvduty
print "jumpstate", jumpstate
conn.send('ack')
conn.close()
tcflush(sys.stdin, TCIFLUSH)
except KeyboardInterrupt: #when user inputs ctrl + c
print ""
print "Exit"
PWM.set_duty_cycle(frontright_pin,7.5) #set pwm values to neutral
PWM.set_duty_cycle(frontleft_pin,7.5)
PWM.set_duty_cycle(rearright_pin,7.5)
PWM.set_duty_cycle(rearleft_pin,7.5)
PWM.set_duty_cycle(shooter_pin,7.5)
PWM.set_duty_cycle(conveyor_pin,7.5)
|
from modulos.Hand import Hand
from modulos.Gamer import Gamer
from modulos.Deck import Deck
from modulos import Helpers
import os
class Table:
dealer: Hand
gamer: Gamer
split: Gamer
deck: Deck
screen = 60
bet_max = 300
second_card_is_hidden = True
split_active = False
dealer_active = False
plays: dict
def __init__(self):
self.dealer = Hand()
self.gamer = Gamer()
self.split = Gamer()
self.deck = Deck()
self.start()
def init_plays(self):
self.plays = {'h': '\033[1;36mH\033[mit', 's': '\033[1;36mS\033[mtand', 'u': 'S\033[1;36mu\033[mrrender'}
# Double
if (len(self.gamer.hand.get()) == 2 and not self.split_active and not self.dealer_active) or (
len(self.split.hand.get()) == 2 and self.split_active):
self.plays['d'] = '\033[1;36mD\033[mouble'
# Split
if self.gamer.split_verify() and not self.split_active and not self.dealer_active:
self.plays['p'] = 'S\033[1;36mp\033[mlit'
# Insurance
if self.second_card_is_hidden and not self.split_active and \
self.dealer.get()[0].get_number() == 'A' and self.gamer.insurance == 0:
self.plays['i'] = '\033[1;36mI\033[mnsurance'
# Exit
self.plays['e'] = '\033[1;36mE\033[mxit'
def start(self):
if not self.verify_deck():
self.end_game()
"""
Inicia o Jogo
"""
# Libera a mão
self.dealer.free()
self.gamer.hand.free()
self.split.hand.free()
self.second_card_is_hidden = True
self.gamer.bet = 0
self.split.bet = 0
# Dá as cartas do Dealer
self.dealer.add(self.deck.get_card())
self.dealer.add(self.deck.get_card())
# Dá as cartas do Jogador
self.gamer.hand.add(self.deck.get_card())
self.gamer.hand.add(self.deck.get_card())
# Verifica se o Gamer não teve Black Jack de cara
if self.gamer.hand.count() == 21:
self.hit()
else:
# Imprime a tela
self.print_table(new_game=True)
def verify_deck(self):
"""
Retorna se o deck ainda tem cartas
:rtype: bool
"""
if self.deck.qt() < 10:
self.deck = Deck()
return self.deck.qt_garbage() < 90 and (self.gamer.amount >= 1 or self.gamer.bet > 0 or self.split.bet > 0)
def hit(self, is_double=False):
"""
Dá um hit para a mão da vez
:param is_double: É pra dobrar?
:rtype: bool
"""
if not self.verify_deck():
self.end_hound()
return False
# Gamer 2 ativo?
if self.split_active:
self.split.hand.add(self.deck.get_card())
if is_double:
self.split.double()
self.stand()
# Dealer ativo?
elif self.dealer_active:
self.second_card_is_hidden = False
while self.dealer.count() < 17:
self.dealer.add(self.deck.get_card())
self.dealer_active = False
self.end_hound()
return True
# Gamer 1 Ativo?
else:
self.gamer.hand.add(self.deck.get_card())
if is_double:
self.gamer.double()
self.stand()
if not self.dealer_active and (self.gamer.hand.count() >= 21 or self.split.hand.count() >= 21):
self.stand()
elif not self.verify_deck() or (not self.dealer_active and self.gamer.hand.count() >= 21):
self.end_hound()
return False
self.print_table()
return True
def stand(self):
# Split ativo? Passa a vez pro dealer e dá o Hit dele
if self.split_active:
self.split_active = False
self.dealer_active = True
self.hit()
# Tem split? Passa a vez pro Gamer 2
elif self.split.hand.count() > 0:
self.split_active = True
self.print_table()
# Passa a vez pro Dealer e já executa o hit dele
else:
self.dealer_active = True
self.hit()
def insurance(self):
self.gamer.add_insurance()
self.print_table()
def end_hound(self):
"""
Faz a contagem e paga ou recolhe as apostas
"""
split = self.split.hand.count()
gamer = self.gamer.hand.count()
dealer = self.dealer.count()
win = ''
# Black Jack!
split_bj = len(self.split.hand.get()) == 2 and split == 21
gamer_bj = len(self.gamer.hand.get()) == 2 and gamer == 21
dealer_bj = len(self.dealer.get()) == 2 and dealer == 21
if split_bj or gamer_bj or dealer_bj:
# Gamer
if gamer_bj and not dealer_bj:
self.gamer.bet_wins()
win = 'Gamer 1 Ganha de Bancada'
elif dealer_bj and not gamer_bj:
self.gamer.bet_lost()
win = 'Bancada Ganha de Gamer 1'
elif dealer_bj and gamer_bj:
self.gamer.game_tie()
win = 'Gamer 1 e Bancada Empatam!'
# Split
if split > 0:
if split_bj and not dealer_bj:
self.split.bet_wins()
win += ' | Gamer 2 Ganha de Bancada'
elif dealer_bj and not split_bj:
self.split.bet_lost()
win += ' | Bancada Ganha de Gamer 2'
elif dealer_bj and split_bj:
self.split.game_tie()
win += ' | Gamer 2 e Bancada Empatam!'
self.gamer.amount += self.split.amount
self.split.amount = 0
self.split.bet = 0
# (sem Black Jack)
else:
# Gamer
if gamer == dealer or gamer > 21 < dealer:
self.gamer.game_tie()
win = 'Gamer 1 e Bancada Empatam!'
elif dealer > 21 >= gamer or dealer < gamer <= 21:
self.gamer.bet_wins()
win = 'Gamer 1 Ganha de Bancada'
else:
self.gamer.bet_lost()
win = 'Bancada Ganha de Gamer 1'
# Split
if split > 0:
if split == dealer or split > 21 < dealer:
self.split.game_tie()
win += ' | Gamer 2 e Bancada Empatam!'
elif dealer > 21 >= split or dealer < split <= 21:
self.split.bet_wins()
win += ' | Gamer 2 Ganha de Bancada'
else:
self.split.bet_lost()
win += ' | Bancada Ganha de Gamer 2'
self.gamer.amount += self.split.amount
self.split.amount = 0
self.split.bet = 0
self.print_table(win)
def slice(self):
if not self.gamer.split_verify():
return False
# Divide a mão
self.split.hand.free()
self.split.hand.add(self.gamer.hand.get()[1])
self.gamer.hand.cards.pop(1)
# Divide a aposta
self.split.amount = 0
self.split.bet = self.gamer.bet
self.gamer.amount -= self.split.bet
# Entrega cartas
self.gamer.hand.add(self.deck.get_card())
self.split.hand.add(self.deck.get_card())
self.print_table()
def end_game(self):
self.gamer.game_tie()
Helpers.print_color('Fim de Jogo!', color='red', style='bold', length=self.screen, align='center')
Helpers.print_color('─' * 14, align='center', length=self.screen, color='pink')
Helpers.print_color(f'Amount: ${self.gamer.amount:>7.1f}', align='center', length=self.screen, color='pink')
print()
Helpers.title(tam=self.screen, espaco=0, character='─')
exit()
def print_table(self, status='', new_game=False, end_game=False):
"""
Impressão de tela
:param end_game: É fim de Jogo?
:param new_game: É um novo jogo?
:param status: Situação atual do jogo.
'' - Aguardando jogada
Outros exemplos:
'Dealer Ganha!'
'Gamer 1 Ganha, Gamer 2 Perde!'
'Gamer 2 Ganha, Gamer 1 Perde!'
'Gamer 1 e Gamer 2 Ganham, Dealer Perde!'
'Empate!'
"""
os.system('cls' if os.name == 'nt' else 'clear') or None
tela = self.screen
Helpers.title('BlackJack', tam=tela, espaco=0, character='─')
# Aposta inicial realizada?
if self.gamer.bet == 0 and new_game and self.verify_deck() and not end_game:
while True:
error = False
bet = 0
try:
print(f'Caixa: {self.gamer.amount}')
bet = float(input('Aposta Inicial (0 para sair): '))
except ValueError:
error = True
if bet == 0:
self.print_table(end_game=True)
return
elif error or bet > self.bet_max or bet > self.gamer.amount:
Helpers.print_color('Valor inválido! Tente novamente.', color='red')
Helpers.print_color(f'Min: 1, Máx: {self.bet_max}', color='pink')
else:
break
print('-' * 20)
self.gamer.betting(bet)
self.print_table()
return
# Lixo e Deck
print(f'{"┌────┐":<{int(tela / 2)}}{"┌────┐":>{int(tela / 2)}}')
print(f'{f"│ {self.deck.qt_garbage():0>2} │ Death":<{int(tela / 2)}}', end='')
print(f'{f"Deck │ {self.deck.qt():0>2} │":>{int(tela / 2)}}')
print(f'{"└────┘":<{int(tela / 2)}}{"└────┘":>{int(tela / 2)}}')
# Dealer
dealer_count = self.dealer.count(self.second_card_is_hidden)
bigger = len(self.dealer.get())
if dealer_count == 21 and len(self.dealer.get()) == 2:
dealer_status = ' (Black Jack!)'
elif dealer_count <= 21:
dealer_status = ''
else:
dealer_status = ' (Estouro!)'
Helpers.print_color(f'~ Dealer: {dealer_count:0>2}{dealer_status} ~', color='cyan', align='center',
style='bold', length=tela)
cards_dealer = ''
cards_dealer_before = ''
cards_dealer_after = ''
for i, card in enumerate(self.dealer.get()):
cards_dealer_before += ' ┌─────┐ '
cards_dealer_after += ' └─────┘ '
cards_dealer += f' │ {card if i != 1 or not self.second_card_is_hidden else " ? "} │ '
Helpers.print_color(cards_dealer_before, color='blue', style='bold', align='center', length=tela)
Helpers.print_color(cards_dealer, color='blue', style='bold', align='center', length=tela)
Helpers.print_color(cards_dealer_after, color='blue', style='bold', align='center', length=tela)
Helpers.print_color('─' * bigger * 9, align='center', length=tela, color='pink')
Helpers.print_color('Dealer must draw to 16 and stand on all 17\'s', align='center', length=tela, color='pink')
Helpers.print_color('2 to 1 Insurance 2 to 1', align='center', length=tela, color='pink')
Helpers.print_color('Bet: min. 1, max 300', align='center', length=tela, color='pink')
print()
Helpers.title(tam=tela, espaco=0)
print()
# Gamer
gamer_count = self.gamer.hand.count()
bigger = len(self.gamer.hand.get())
if gamer_count == 21 and len(self.gamer.hand.get()) == 2:
gamer_status = ' (Black Jack!)'
elif gamer_count <= 21:
gamer_status = ''
else:
gamer_status = ' (Estouro!)'
Helpers.print_color(
f'~ Gamer 1: {gamer_count:0>2}{gamer_status} ~',
color='yellow' if not self.split_active else 'gray',
align='center',
style='bold' if not self.split_active else 'none',
length=tela
)
cards_gamer = ''
cards_gamer_before = ''
cards_gamer_after = ''
for i, card in enumerate(self.gamer.hand.get()):
cards_gamer_before += ' ┌─────┐ '
cards_gamer_after += ' └─────┘ '
cards_gamer += f' │ {card} │ '
color = 'green' if not self.split_active else 'gray'
bold = 'bold' if not self.split_active else 'none'
Helpers.print_color(cards_gamer_before, color=color, style=bold, align='center', length=tela)
Helpers.print_color(cards_gamer, color=color, style=bold, align='center', length=tela)
Helpers.print_color(cards_gamer_after, color=color, style=bold, align='center', length=tela)
# Gamer
split_count = self.split.hand.count()
if split_count == 21 and len(self.split.hand.get()) == 2:
split_status = ' (Black Jack!)'
elif split_count <= 21:
split_status = ''
else:
split_status = ' (Estouro!)'
if split_count > 0:
if len(self.split.hand.get()) > len(self.gamer.hand.get()):
bigger = len(self.split.hand.get())
Helpers.print_color('─' * bigger * 9, align='center', length=tela, color='pink')
split_count = self.split.hand.count()
Helpers.print_color(
f'~ Gamer 2: {split_count:0>2}{split_status} ~',
color='yellow' if self.split_active else 'gray',
align='center',
style='bold' if self.split_active else 'none',
length=tela
)
cards_split = ''
cards_split_before = ''
cards_split_after = ''
for i, card in enumerate(self.split.hand.get()):
cards_split_before += ' ┌─────┐ '
cards_split_after += ' └─────┘ '
cards_split += f' │ {card} │ '
color = 'green' if self.split_active else 'gray'
bold = 'bold' if self.split_active else 'none'
Helpers.print_color(cards_split_before, color=color, style=bold, align='center', length=tela)
Helpers.print_color(cards_split, color=color, style=bold, align='center', length=tela)
Helpers.print_color(cards_split_after, color=color, style=bold, align='center', length=tela)
# Aposta
Helpers.print_color('─' * bigger * 9, align='center', length=tela, color='pink')
Helpers.print_color(
f'Bet...: ${self.gamer.bet + self.split.bet:>7.1f}',
align='center', length=tela, color='pink'
)
if self.gamer.insurance > 0:
Helpers.print_color(f'Insur.: ${self.gamer.insurance:>7.1f}', align='center', length=tela, color='pink')
Helpers.print_color(f'Amount: ${self.gamer.amount:>7.1f}', align='center', length=tela, color='pink')
print()
Helpers.title(tam=tela, espaco=0)
print()
if end_game:
self.end_game()
elif status != '':
Helpers.print_color(status, color='red', style='bold', align='center', length=tela)
print()
Helpers.title(tam=tela, espaco=0)
if self.verify_deck():
input('Press [ENTER] to continue...')
self.start()
else:
self.end_game()
elif self.verify_deck():
self.init_plays()
Helpers.print_color(f'Gamer {"2" if self.split_active else "1"}', color='blue', style='bold', end='')
for i, value in enumerate(self.plays.values()):
if i == 0:
print(f' ({value}', end='')
else:
print(f' | {value}', end='')
print(')')
while True:
jogada = str(input('Jogada: ')).strip().lower()
if jogada in self.plays.keys():
break
Helpers.print_color('Opção inválida. Escolha uma das opções acima!')
print()
Helpers.title(tam=tela, espaco=0)
print()
if jogada == 'e':
self.end_game()
elif jogada == 'h':
self.hit()
elif jogada == 's':
self.stand()
elif jogada == 'u':
self.second_card_is_hidden = False
self.end_hound()
elif jogada == 'd':
self.hit(True)
elif jogada == 'p':
self.slice()
elif jogada == 'i':
self.insurance()
else:
self.print_table()
else:
self.end_game()
|
import sys
from distutils import sysconfig
s = "using python : {version} : {prefix} : {inc} ;\n".format(
version=sysconfig.get_python_version(),
prefix=sysconfig.get_config_var("prefix"),
inc=sysconfig.get_python_inc())
sys.stdout.write(s)
|
#oef3
import math
def maximum(a,b,c):
if (a>b):
if (a>c):
print("{} is het grootste getal.".format(a))
else:
print("{} is het grootste getal.".format(c))
elif (b>c):
print("{} is het grootste getal.".format(b))
else:
print("{} is het grootste getal.".format(c))
a = int(input("Geef het eerste getal op: "))
b = int(input("Geef het tweede getal op: "))
c = int(input("Geef het derde getal op: "))
maximum(a,b,c)
|
from flask import Flask, request
from MonsterGen import Monster, CR, random_trap, Npc, monster_loot, horde_loot
app = Flask(__name__)
@app.route('/monster')
def monster():
level = int(request.args['avg-level'])
players = int(request.args['num-players'])
difficulty = int(request.args['difficulty'])
return Monster(CR.party_adapter(
average_level=level,
num_players=players,
difficulty=difficulty,
)).to_dict()
@app.route('/monster-cr-type')
def monster_cr():
return Monster(
int(request.args['cr']),
request.args['type'],
).to_dict()
@app.route('/treasure')
def treasure():
return monster_loot(int(request.args['cr'])).to_dict()
@app.route('/treasure-horde')
def treasure_horde():
return horde_loot(int(request.args['cr'])).to_dict()
@app.route('/trap')
def trap():
return random_trap(int(request.args['cr'])).to_dict()
@app.route('/trap-type')
def trap_type():
cr = int(request.args['cr'])
damage_type = request.args['type']
return random_trap(cr, damage_type).to_dict()
@app.route('/npc')
def npc():
return Npc().to_dict()
@app.before_request
def before_request(): # CORS preflight
def _build_cors_prelight_response():
response = make_response()
response.headers.add("Access-Control-Allow-Origin", "*")
response.headers.add("Access-Control-Allow-Headers", "*")
response.headers.add("Access-Control-Allow-Methods", "*")
return response
if request.method == "OPTIONS":
return _build_cors_prelight_response()
@app.after_request
def after_request(response): # CORS headers
header = response.headers
header['Access-Control-Allow-Origin'] = '*'
return response
if __name__ == "__main__":
app.run()
|
from calendar import monthrange
from datetime import timedelta
from enum import IntEnum
class TimeFreq(IntEnum):
Hourly = 0
ThreeHourly = 1
Daily = 2
Monthly = 3
Yearly = 4
def time_freq_factory(name_or_abbr):
if name_or_abbr in ('HH', 'Hourly'):
return TimeFreq.Hourly
elif name_or_abbr in ('3H', 'ThreeHourly'):
return TimeFreq.ThreeHourly
elif name_or_abbr in ('DD', 'Daily'):
return TimeFreq.Daily
elif name_or_abbr in ('WW', 'Weekly'):
return TimeFreq.Weekly
elif name_or_abbr in ('MM', 'Monthly'):
return TimeFreq.Monthly
elif name_or_abbr in ('YY', 'Yearly'):
return TimeFreq.Yearly
else:
raise RuntimeError
def delta_time_freq(time_steps_array):
""" Deduces frequency of time_array by inspecting first elements of array """
d1 = time_steps_array[0]
if len(time_steps_array) >= 2:
d2 = time_steps_array[1]
else:
d2 = d1
delta_time = d2 - d1
days_in_month = monthrange(d1.year, d1.month)[1]
if delta_time.seconds / 3600 == 1:
time_freq = 'Hourly'
dt = timedelta(hours=1)
elif delta_time.seconds / 10800 == 1:
time_freq = 'ThreeHourly'
dt = timedelta(hours=3)
elif delta_time.days == 1:
time_freq = 'Daily'
dt = timedelta(days=1)
elif delta_time.days == days_in_month:
time_freq = 'Monthly'
dt = None
else:
time_freq = 'Yearly'
dt = None
return time_freq, dt
#print TimeFreq.Hourly < TimeFreq.Daily
#print TimeFreq.ThreeHourly.name
#print time_freq_factory('DD')
#print time_freq_factory('hkjhjkh')
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 11May 14 20:17:23 2020
@author: bfemenia
"""
# %% IMPORT SECTION
#-------------------
import pandas as pd
from astropy import units as u
from astropy.coordinates import Angle, Distance, Latitude, Longitude, SkyCoord
from reducerTFM import DiasCatalog as DiasCatalog
# %% MAIN CODE
#-------------
def generate_ip_dias(l_max=360, r_max=1.5, n_min=3, path='home/bfemenia/TFM/IP_files_DC'):
"""
Bruno Femenia Castella
This routine generates the selection of ip files over the Galactic plane
containing at least 3 clusters in Dias catalog and not beyond r_max degs of
size. Each of these sky patches witll be analyzed to derived eps=eps(l,b)
and min_pts=min_pets(l,b) which will guide the search for clusters in a
subsequent exploration of Gaia data.
Based on these files we evaluate the (eps,Nmin) to be used over different
regions of whole Galactic plane to find new candidates with DBSCAN.
Returns
-------
None.
"""
full_dc = DiasCatalog('Cluster', 'RAJ2000','DEJ2000', 'l', 'b', 'Class',
'Diam', entire=True)
gal_disk= full_dc.table[ (abs(full_dc.table['b']) < 20) & (full_dc.table['l'] < l_max) ]
patches=[]
for i, this_cluster in enumerate(gal_disk): #Iterating over ALL clusters
#in selected region |b|< 20 and l < l_max
c1 = SkyCoord(this_cluster['l']*u.deg,
this_cluster['b']*u.deg,
frame='galactic')
clusters = gal_disk[ (abs(gal_disk['b']-this_cluster['b']) <= r_max)]
if len(clusters) < n_min: #Requesting a minimum number of clusters in this patch!!
continue
candidates=[{'name':this_cluster['Cluster'], 'ang_sep':0.,
'l':this_cluster['l'], 'b':this_cluster['b']}]
for cluster in clusters:
c2 = SkyCoord(cluster['l']*u.deg, cluster['b']*u.deg, frame='galactic')
ang_sep = c1.separation(c2).deg
if (ang_sep <= r_max) and (ang_sep > 0):
new = dict({'name':cluster['Cluster'], 'ang_sep':ang_sep,
'l':cluster['l'], 'b':cluster['b']})
candidates.append(new)
if len(candidates) >= n_min:
df = pd.DataFrame.from_records(candidates)
df.sort_values('ang_sep', inplace=True)
patch = df[0:n_min].mean() #Use only n_min clusters at a time!!
patches.append(patch)
patches_df = pd.DataFrame.from_records(patches)
patches_df.drop_duplicates(subset=['l','b'], inplace=True) #Removing duplicates
patches_df.reset_index(drop=True, inplace=True)
#Adding rest of fields as defined by Rafa in his Thesis. Then save csv.
#----------------------------------------------------------------------
op_dict={'ra':[0.],
'dec':[0.],
'l':[0.],
'b':[0.],
'r':[0.],
'err_pos':[100],
'g_lim':[18.0],
'norm':[None],
'sample':[1.],
'dim3':[None],
'distance':['euclidean'],
'eps_min':[0.008],
'eps_max':[0.03],
'eps_num':[100],
'min_pts_min':[10],
'min_pts_max':[100],
'min_pts_num':[91]}
op_df=pd.DataFrame(data= op_dict)
for index, row in patches_df.iterrows():
fn = 'ip_file_DC_'+str(index).zfill(5)+'.csv'
c1 = SkyCoord(row['l']*u.deg, row['b']*u.deg, frame='galactic')
op_df[ 'ra']= c1.icrs.ra.deg
op_df['dec']= c1.icrs.dec.deg
op_df[ 'l']= row['l']
op_df[ 'b']= row['b']
op_df[ 'r']= row['ang_sep']
op_df.to_csv(fn, index=False, header=True)
return patches_df
|
# Generated by Django 2.0.3 on 2018-12-01 16:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deckShare', '0004_auto_20181201_1653'),
]
operations = [
migrations.RemoveField(
model_name='profile',
name='awaitingResponse',
),
migrations.AddField(
model_name='profile',
name='awaitingResponse',
field=models.ManyToManyField(blank=True, related_name='friendAwaiting', to='deckShare.Profile'),
),
migrations.RemoveField(
model_name='profile',
name='offeredFriendship',
),
migrations.AddField(
model_name='profile',
name='offeredFriendship',
field=models.ManyToManyField(blank=True, related_name='friendOffering', to='deckShare.Profile'),
),
]
|
from django.db import models
from django.utils import timezone
from django.urls import reverse
from django.contrib.auth.models import User
from django.db.models import Sum, Case, When, IntegerField
from django.db.models.functions import TruncDay, TruncMonth, TruncYear
today = timezone.now()
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
color = models.CharField(max_length=20, default='green')
image = models.ImageField(default='default.jpg')
def __str__(self):
return self.user.username
def get_absolute_url(self):
return reverse('profile', kwargs={'username': self.user.username})
@property
def team(self):
if self.team_set.all():
return self.team_set.all()[0]
return False
@property
def all_expenses(self):
return self.expense_set.all()
@property
def current_year_exps(self):
return self.expense_set.filter(
created__year=today.year).aggregate(
summary=Sum('amount'))
@property
def current_month_exps(self):
return self.expense_set.filter(
created__month=today.month).aggregate(
summary=Sum('amount'))
@property
def today_exps(self):
return self.expense_set.filter(
created__day=today.day).aggregate(summary=Sum('amount'))
@property
def expenses_per_day(self):
return self.expense_set.annotate(
day=TruncDay('created'), sum=Sum('amount')).order_by('day')
class Team(models.Model):
title = models.CharField(max_length=100)
balance = models.IntegerField(default=0)
members = models.ManyToManyField(Profile, through='Membership')
created = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.title
@property
def all_expenses(self):
return self.for_team.all().order_by('-created')
@property
def sum_of_current_year(self):
return self.for_team.filter(created__year=today.year).aggregate(summary=Sum('amount'))
@property
def team_category_exps_monthly(self):
return self.for_team.values(
'category__title', 'category__color').annotate(
month=TruncMonth('created'), summary=Sum('amount'))
@property
def team_members_exps(self):
return self.members.annotate(year = Case(
When(expense__created__year=today.year, then=today.year),
default=0,
output_field=IntegerField())
).filter(year=today.year).annotate(sum=Sum('expense__amount'))
@property
def team_month_exp(self):
return self.members.annotate(
month=TruncMonth('expense__created'),
summary=Sum('expense__amount'))
class Membership(models.Model):
profile = models.ForeignKey(Profile, on_delete=models.CASCADE)
team = models.ForeignKey(Team, on_delete=models.CASCADE)
role = models.CharField(max_length=50)
created = models.DateTimeField(auto_now_add=True)
class MemberRequest(models.Model):
from_user = models.ForeignKey(Profile, on_delete=models.CASCADE, related_name='from_user')
to_user = models.ForeignKey(Profile, on_delete=models.CASCADE, related_name='to_user')
timestamp = models.DateTimeField(auto_now_add=True)
def __str__(self):
return f'from {self.from_user.user.username} to {self.to_user.user.username}' |
class Space():
def __init__(self, location, id, type):
self.location=location
self.id=id
self.occupied=False
self.assigned_vehicle=None
self.space_type=type
def park(self, vehicle):
self.assigned_vehicle=vehicle
self.occupied=True
def unpark():
self.occupied=False
self.assigned_vehicle=None
class CarSpace(Space):
def __init__(self, location, id)
super.__init__(location, id, 'Car')
class BikeSpace(Space):
def __init__(self, location)
super.__init__(location, id, 'Bike')
class Vehicle():
def __init__(self, vehicle_number, model, type):
self.id=vehicle_number
self.model=model
self.vehicle_type=
class Car(Vehicle):
def __init__(self, vehicle_number, model):
super.__init__(vehicle_number, model, 'Car')
class Bike(Vehicle):
def __init__(self, vehicle_number, model):
super.__init__(vehicle_number, model, 'Bike')
class VehicleHandler():
def __init__(spaces):
self.spaces=spaces
self.vehicles_queue=[]
self.free_spaces=self.spaces
self.occupied_spaces=[]
def addSpace(space):
self.spaces.append(space)
self.free_spaces.append(space)
def parkVehicle(vehicle):
if len(free_space)<=0:
self.vehicles_queue.append(vehicle)
return
free_space = self.free_spaces[0]
self.free_spaces=self.free_spaces[1:]
free_space.park(vehicle)
self.occupied_spaces.append(free_space)
return
def unpark(vehicle):
space=None
for spaces in self.occupied_spaces:
if vehicle==spaces.assigned_vehicle:
space=spaces
if space==None:
return
space.unpark()
mark=None
if space.occupied:
for index in range(len(self.occupied_spaces)):
if space.id==self.occupied_spaces[index].id:
mark=index
break
self.free_spaces.append(self.occupied_spaces[mark])
self.occupied_spaces=self.occupied_spaces[:mark]+self.occupied_spaces[mark+1:]
if len(self.vehicles_queue)>0:
self.parkVehicle(self.vehicles_queue[0])
self.vehicles_queue=self.vehicles_queue[1:]
return
class CarHandler(VehicleHandler):
def __init__(spaces):
super.__init__(spaces)
class BikeHandler(VehicleHandler):
def __init__(spaces):
super.__init__(spaces)
class ParkingLot():
def __init__(car_spaces, bike_spaces):
self.CarHandler=CarHandler(car_spaces)
self.BikeHandler=BikeHandler(bike_spaces)
def park_vehicle(vehicle):
if vehicle.type=='Car':
self.CarHanlder.parkVehicle(car)
else:
self.CarHanlder.parkVehicle(car)
def unpark(vehicle):
if vehicle.type=='Car':
self.CarHanlder.parkVehicle(car)
else:
self.CarHanlder.parkVehicle(car)
|
from utils import annihilate, read_input
if __name__ == '__main__':
print(len(annihilate(read_input())))
|
from botSession import kuma
from localDb import welcome_chat
from botInfo import creator
from threading import Timer
def welcome(update, context):
chat_id = update.message.chat_id
alert_id = update.message.message_id
new_member = update.message.new_chat_members[0]
bot_status = new_member.is_bot
if chat_id in welcome_chat and not bot_status:
resp = True
user_id = new_member.id
if 'message' in welcome_chat[chat_id]:
if '{name}' in welcome_chat[chat_id]['message']:
user_name = new_member.first_name
if new_member.last_name:
user_name += ' ' + new_member.last_name
if len(user_name) > 12:
user_name = user_name[:12]
user_link = f'[{user_name}](tg://user?id={user_id})'
formatted_message = welcome_chat[chat_id]['message'].format(name=user_link)
welcome_message = update.message.reply_text(formatted_message, parse_mode='Markdown', quote=False)
else:
welcome_message = update.message.reply_text(welcome_chat[chat_id]['message'], quote=False)
msg_id = welcome_message.message_id
else:
msg_id = None
if 'sticker' in welcome_chat[chat_id]:
welcome_sticker = update.message.reply_sticker(welcome_chat[chat_id]['sticker'], quote=False)
sticker_id = welcome_sticker.message_id
else:
sticker_id = None
check = Timer(300, check_member, [chat_id, user_id, alert_id, msg_id, sticker_id])
# Ignore PyCharm Error
check.start()
else:
resp = None
return resp
def check_member(chat_id, user_id, alert_id, msg_id=None, sticker_id=None):
print(f'[INFO] Starting new member checking...')
left = False
user = kuma.get_chat_member(chat_id, user_id)
user_status = user.status
if 'left' in user_status or 'kick' in user_status:
left = True
if left:
kuma.delete_message(chat_id, alert_id)
if sticker_id:
kuma.delete_message(chat_id, sticker_id)
if msg_id:
kuma.edit_message_text('验证机器人已移除一位未通过验证的用户。', chat_id, msg_id)
print(f'[INFO] User {user_id} status: LEFT; NOT member.')
else:
print(f'[INFO] User {user_id} status: IN; IS member.')
if 'review' in welcome_chat[chat_id] and welcome_chat[chat_id]['username']:
referer = msg_id or sticker_id
group_username = welcome_chat[chat_id]['username']
kuma.send_message(creator,
f'Please review new member of @{group_username} '
f'by [this link](https://t.me/{group_username}/{referer})',
parse_mode='Markdown', disable_web_page_preview=True)
return not left
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
"""
===========
Annotation extraction tool
Takes filename as argument.
Extracts the annotations in json format
and stores them into the same directory.
===========
"""
import os.path
import json
import sys
import copy
from PyPDF2 import PdfFileReader
def extract_annotations(filename):
# see:
# http://stackoverflow.com/a/13748949/426266
path = os.path.realpath('%s' % filename)
doc = PdfFileReader(open(path, "rb"))
# docInfo = doc.getDocumentInfo()
annotations = []
nPages = doc.getNumPages()
# {'/C': [1, 1, 0], '/F': 4, '/M': "D:20211027095158+03'00'", '/P': IndirectObject(16, 0), '/T': 'jonas', '/AP': {'/N': IndirectObject(275, 0)}, '/NM': 'ee5655bf-c3a4-4c4e-8b51c658f659019c', '/Rect': [171.94587, 462.18182, 245.49779, 470.55646], '/Subj': 'Highlight', '/Subtype': '/Highlight', '/Contents': ' contributing factor', '/QuadPoints': [173.60804, 470.55645, 243.83562, 470.55645, 173.60804, 462.18183, 243.83562, 462.18183], '/CreationDate': "D:20211027095158+03'00'"}
# process annotations
for i in range(nPages) :
page0 = doc.getPage(i)
try :
for annot in page0['/Annots']:
subtype = annot.getObject()['/Subtype']
if subtype == "/Highlight":
highlight = annot.getObject()['/Contents']
# some annotations are bytes, some are just strings
if isinstance(highlight, bytes):
highlight = highlight.decode("utf-8")
# clean string
highlight = highlight.replace("\n", " ").replace("\r", " ").replace(" ", " ").strip()
# skip empty annotations
if highlight:
# print(highlight)
annotations.append(highlight)
# filter out empty annotations
# if it['/AP']['/Subj'] == 'Highlight':
# print('')
except Exception:
# there are no annotations on this page
pass
print(json.dumps(annotations))
# skipkeys=False,
# ensure_ascii=True,
# check_circular=True,
# allow_nan=True,
# cls=None,
# indent=4,
# separators=None,
# encoding="utf-8",
# default=None,
# sort_keys=True
if __name__ == "__main__":
# if len(sys.argv) != 2:
# print "Missing file path as argument"
# print "Usage: %s </filepath/filename.pdf>" % sys.argv[0]
# sys.exit(1)
# # check if argument is a pdf file
argument = sys.argv[1]
# if not os.path.isfile(argument) and argument.endsWith('.pdf'):
# print "Argument must be a pdf file"
# sys.exit(1)
extract_annotations(argument)
|
import torch
from torch.nn import functional as F
def binary_clf_curve(
preds: torch.Tensor,
target: torch.Tensor,
sample_weights = None,
pos_label: int = 1.,
):
"""
adapted from https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/metrics/_ranking.py
"""
if sample_weights is not None and not isinstance(sample_weights, torch.Tensor):
sample_weights = torch.tensor(sample_weights, device=preds.device, dtype=torch.float)
# remove class dimension if necessary
if preds.ndim > target.ndim:
preds = preds[:, 0]
desc_score_indices = torch.argsort(preds, descending=True)
preds = preds[desc_score_indices]
target = target[desc_score_indices]
if sample_weights is not None:
weight = sample_weights[desc_score_indices]
else:
weight = 1.
# pred typically has many tied values. Here we extract
# the indices associated with the distinct values. We also
# concatenate a value for the end of the curve.
distinct_value_indices = torch.where(preds[1:] - preds[:-1])[0]
threshold_idxs = F.pad(distinct_value_indices, (0, 1), value=target.size(0) - 1)
target = (target == pos_label).to(torch.long)
tps = torch.cumsum(target * weight, dim=0)[threshold_idxs]
if sample_weights is not None:
# express fps as a cumsum to ensure fps is increasing even in
# the presence of floating point errors
fps = torch.cumsum((1 - target) * weight, dim=0)[threshold_idxs]
else:
fps = 1 + threshold_idxs - tps
return fps, tps, preds[threshold_idxs]
def precision_recall_curve_compute(
preds: torch.Tensor,
target: torch.Tensor,
):
fps, tps, thresholds = binary_clf_curve(
preds=preds, target=target
)
eps = 1e-6
precision = tps / (tps + fps + eps)
recall = tps / (tps[-1] + eps)
# stop when full recall attained
# and reverse the outputs so recall is decreasing
last_ind = torch.where(tps == tps[-1])[0][0]
sl = slice(0, last_ind.item() + 1)
# need to call reversed explicitly, since including that to slice would
# introduce negative strides that are not yet supported in pytorch
precision = torch.cat([reversed(precision[sl]), torch.ones(1, dtype=precision.dtype, device=precision.device)])
recall = torch.cat([reversed(recall[sl]), torch.zeros(1, dtype=recall.dtype, device=recall.device)])
thresholds = reversed(thresholds[sl]).clone()
return precision, recall, thresholds
def average_precision_compute(
preds: torch.Tensor,
target: torch.Tensor,
):
precision, recall, _ = precision_recall_curve_compute(preds, target)
# Return the step function integral
# The following works because the last entry of precision is
# guaranteed to be 1, as returned by precision_recall_curve
return -torch.sum((recall[1:] - recall[:-1]) * precision[:-1])
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
ref:
- PY2: https://docs.python.org/2/library/functions.html#hex
- PY3: https://docs.python.org/3/library/functions.html#hex
"""
assert hex(255) == "0xff"
assert hex(-42) == "-0x2a"
assert float.hex(3.14) == "0x1.91eb851eb851fp+1"
assert float.hex(-0.618) == "-0x1.3c6a7ef9db22dp-1"
|
import arcade
import movement_2
import settings
SPRITE_SCALING = 0.25
SCREEN_HEIGHT = 880
SCREEN_WIDTH = 1080
MOVEMENT_SPEED = 10
class Ball:
def __init__(self, position_x, position_y, change_x, change_y, radius, color):
# Take the parameters of the init function above, and create instance variables out of them.
self.position_x = position_x
self.position_y = position_y
self.change_x = change_x
self.change_y = change_y
self.radius = radius
self.color = color
def draw(self):
""" Draw the balls with the instance variables we have. """
arcade.draw_circle_filled(self.position_x, self.position_y, self.radius, self.color)
def update(self):
# Move the ball
self.position_y += self.change_y
self.position_x += self.change_x
# See if the ball hit the edge of the screen. If so, change direction
if self.position_x < self.radius:
self.position_x = self.radius
if self.position_x > SCREEN_WIDTH - self.radius:
self.position_x = SCREEN_WIDTH - self.radius
if self.position_y < self.radius:
self.position_y = self.radius
if self.position_y > SCREEN_HEIGHT - self.radius:
self.position_y = SCREEN_HEIGHT - self.radius
class Maze3View(arcade.View):
def on_show(self):
arcade.set_background_color(arcade.color.RUBY_RED)
def __init__(self, width, height, title):
# Call the parent class's init function
super().__init__(width, height, title)
# Create our ball
self.ball = Ball(1080, 200, 0, 0, 15, arcade.color.BLIZZARD_BLUE)
self.end_ball = Ball(25, 840, 0, 0, 15, arcade.color.GREEN_YELLOW)
def on_draw(self):
arcade.start_render()
self.ball.draw()
self.end_ball.draw()
arcade.draw_text("Press END", -25, 840,
arcade.color.BLACK, font_size=35, anchor_x="center")
arcade.draw_rectangle_filled(360, 345, 25, 480, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(360, 780, 25, 200, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(0, 780, 300, 25, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(350, 780, 230, 25, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(248, 660, 25, 250, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(248, 320, 25, 250, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(145, 433, 180, 25, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(145, 548, 180, 25, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(68, 490, 25, 140, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(135, 230, 25, 250, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(68, 320, 25, 250, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(20, 630, 210, 25, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(150, 710, 210, 25, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(500, 780, 300, 25, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(460, 692, 25, 200, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(550, 692, 25, 200, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(960, 780, 400, 25, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(455, 510, 210, 25, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(750, 510, 210, 25, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(700, 610, 25, 210, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(700, 890, 25, 150, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(820, 680, 25, 200, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(960, 590, 25, 200, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(1270, 590, 600, 25, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(960, 200, 25, 480, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(843, 270, 25, 480, arcade.color.BLACK_BEAN)
#box with gem
arcade.draw_rectangle_filled(630, 410, 250, 25, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(518, 345, 25, 120, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(743, 345, 25, 120, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(555, 290, 100, 25, arcade.color.BLACK_BEAN)
arcade.draw_rectangle_filled(705, 290, 100, 25, arcade.color.BLACK_BEAN)
def on_update(self, delta_time):
self.ball.update()
def on_key_press(self, key, modifiers):
if key == arcade.key.LEFT:
self.ball.change_x = MOVEMENT_SPEED
elif key == arcade.key.RIGHT:
self.ball.change_x = -MOVEMENT_SPEED
elif key == arcade.key.UP:
self.ball.change_y = -MOVEMENT_SPEED
elif key == arcade.key.DOWN:
self.ball.change_y = MOVEMENT_SPEED
elif key == arcade.key.E:
self.director.next_view()
def on_key_release(self, key, modifiers):
""" Called whenever a user releases a key. """
if key == arcade.key.LEFT or key == arcade.key.RIGHT:
self.ball.change_x = 0
elif key == arcade.key.UP or key == arcade.key.DOWN:
self.ball.change_y = 0
if __name__ == "__main__":
from utils import FakeDirector
window = arcade.Window(settings.WIDTH, settings.HEIGHT)
my_view = Maze3View()
my_view.director = FakeDirector(close_on_next_view=True)
window.show_view(my_view)
arcade.run()
|
"""admin URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import path
from mongodb.views import *
urlpatterns = [
path('', login),
path('deal/<_id>', deal, name='deal'),
path('certificate/<_id>', certificate, name='certificate'),
path('refuse/<_id>',refuse, name='refuse'),
]
|
class Person:
def __init__(self, firstname, lastname, age):
self.firstname = firstname
self.lastname = lastname
self.age = age
def display(self):
print("\n--- x --- X --- x ---")
print("First name: %s\nLast name: %s\nAge: %s" % self.firstname, self.lastname, self.age)
class Student(Person):
id = 0
def __init__(self, firstname, lastname, age, recordBook):
Person.__init__(self, firstname, lastname, age)
self.studentID = Student.id
self.recordBook = recordBook
Student.id += 1
def display(self):
print("\n--- x --- X --- x ---")
print("First name: {}\nLast name: {}\nAge: {}".format(self.firstname, self.lastname, self.age))
print("StudentId: %s" % self.studentID)
print("Record Book:\n" + "\n".join(
[" {}: {}".format(i + 2, self.recordBook[i]) for i in range(len(self.recordBook))]))
a = Student("Vladimir", "Pshenichka", 11, [4, 2, 3, 0])
a.display()
b = Student("Olga", "Slita", 45, [1, 2, 4, 2])
b.display()
c = Student("Igor", "Kalinin", 27, [0, 0, 2, 7])
c.display()
class Professor(Person):
id = 0
def __init__(self, firstname, lastname, age, degree):
Person.__init__(self, firstname, lastname, age)
self.professorID = Professor.id
self.degree = degree
Professor.id += 1
def display(self):
print("\n--- x --- X --- x ---")
print("First name: {}\nLast name: {}\nAge: {}".format(self.firstname, self.lastname, self.age))
print("ProfessorId: %s" % self.professorID)
print("Degree: %s" % self.degree)
d = Professor("Ekaterina", "Gusarova", 37, "master")
d.display()
e = Professor("Alexander", "Belozubov", 40, "master")
e.display()
f = Professor("Alexey", "Pismak", 33, "master")
f.display()
|
__author__ = 'ejullap'
import re
from _datetime import datetime
class SpeedingDetector:
camera_logs = []
speed_limit = float()
camera_positions = []
speeding_cars = dict()
def parse_speed_log(self, log_name):
speed_log = open(log_name, 'r')
camera_logs = {}
for log_line in speed_log:
if log_line.startswith("Speed limit"):
if "mph" in log_line:
self.speed_limit = float(re.findall("\d+.\d+", log_line)[0])*1.61
else:
self.speed_limit = float(re.findall("\d+.\d+", log_line)[0])
elif log_line.startswith("Speed camera"):
numbersInString = [int(s) for s in log_line.split() if s.isdigit()]
self.camera_positions.append(numbersInString[1])
elif log_line.startswith("Start of log"):
if len(camera_logs) != 0:
self.camera_logs.append(camera_logs)
camera_logs = {}
elif log_line.startswith("Vehicle"):
licensePlate = self.get_license_plate(log_line)
timestamp = self.get_timestamp(log_line)
camera_logs[licensePlate] = timestamp
def get_license_plate(self, log_line):
return log_line[8:16]
def get_timestamp(self, log_line):
timestamp = log_line[-10:].strip("\n").strip(".").strip(" ")
return timestamp
def get_all_speeding_vehicles(self):
for i in range(0, len(self.camera_positions)-2):
self.get_speeding_vehicle_between_cameras(i, i+1)
for vehicle in self.speeding_cars:
print("Vehicle " + str(vehicle) + " was speeding " + str(self.speeding_cars[vehicle]) + " over limit")
def get_speeding_vehicle_between_cameras(self, camera1, camera2):
distance = self.camera_positions[camera2] - self.camera_positions[camera1]
for vehicle in self.camera_logs[camera1]:
camera1_time = self.camera_logs[camera1][vehicle]
camera2_time = self.camera_logs[camera2][vehicle]
time_for_distance = (datetime.strptime(camera2_time, "%H:%M:%S") - datetime.strptime(camera1_time, "%H:%M:%S")).total_seconds()
speed = (3600 * distance / 1000) / time_for_distance
if speed > self.speed_limit:
self.speeding_cars[vehicle] = round(speed - self.speed_limit, 2)
speedDetector = SpeedingDetector()
speedDetector.parse_speed_log("speed_logs.txt")
speedDetector.get_all_speeding_vehicles() |
from math import tanh, cosh
def f_and_fprime(x):
return tanh(x), (1./cosh(x))**2
#print f_and_fprime(2.)
#quit()
x=1.08
err = 10.**-15.
for i in range(10):
print i,x
f,fprime = f_and_fprime(x)
step = -f/fprime
x += step
if abs(f) > err: print False
else: print True
print x
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.