text stringlengths 8 6.05M |
|---|
#!/usr/bin/env python3
import rospy
import numpy as np
from sensor_msgs.msg import Image,CameraInfo
from cv_bridge import CvBridge, CvBridgeError
import tf
from std_msgs.msg import String,Int64MultiArray
from geometry_msgs.msg import *
from vision_msgs.msg import Detection2DArray
def objects_callback(data):
global boundingboxes
boundingboxes = data
def image_info_callback(data):
global K
K = 1/data.K[4]
def image_callback3(data):
pass
#print(data)
# print("Henlo")
def get_depth(img,x,y,cx,cy,fx,fy):
center_x , center_y = cx,cy
#print(type(img))
unit_scaling = 1
constant_x = unit_scaling/fx
constant_y = unit_scaling/fy
depth = img[int(y)][int(x)]
#print(depth)
return [ (x - center_x)*depth*constant_x ,(y - center_y)*depth*constant_y , depth*unit_scaling]
def depth_image_callback(data):
global K,boundingboxes,labels,labels2,check,L
if(check=="Detect"):
bridge = CvBridge()
try:
img = bridge.imgmsg_to_cv2(data, "32FC1")
except CvBridgeError as e:
print(e)
if(len(boundingboxes.detections)):
for i in range(len(boundingboxes.detections)):
id = i
totalSampledX,totalSampledY,totalSampledCenter=0,0,0
box_size_x,box_size_y = boundingboxes.detections[i].bbox.size_x,boundingboxes.detections[i].bbox.size_y
center = Point()
xAxis = Point()
yAxis = Point()
center.x = boundingboxes.detections[i].bbox.center.x
center.y = boundingboxes.detections[i].bbox.center.y
center3D = get_depth(img,center.x +0.5 , center.y + 0.5, img.shape[1]/2 - 0.5, img.shape[0]/2 -0.5, 1/K,1/K)
for j in range(int(box_size_x)):
for k in range(4):
xAxis.x, xAxis.y = center.x + j ,center.y + k
axisSampledX = get_depth(img,xAxis.x , xAxis.y , img.shape[1]/2 - 0.5, img.shape[0]/2 -0.5, 1/K,1/K)
yAxis.x, yAxis.y = center.x+ k, center.y - j
axisSampledY = get_depth(img,yAxis.x , yAxis.y , img.shape[1]/2 - 0.5, img.shape[0]/2 -0.5, 1/K,1/K)
axisMeanX =[0,0,0]
axisMeanX[0] += axisSampledX[0]
axisMeanX[1] += axisSampledX[1]
axisMeanX[2] += axisSampledX[2]
totalSampledX+=1
axisMeanY =[0,0,0]
axisMeanY[0] += axisSampledY[0]
axisMeanY[1] += axisSampledY[1]
axisMeanY[2] += axisSampledY[2]
totalSampledY+=1
for i in range(len(axisMeanX)):
axisMeanX[i] = axisMeanX[i]/totalSampledX
axisMeanY[i] = axisMeanY[i]/totalSampledY
n_xAxis = Vector3(axisMeanX[0] - center3D[0], axisMeanX[1] - center3D[1], axisMeanX[2] - center3D[2])
n_yAxis = Vector3(axisMeanY[0] - center3D[0], axisMeanY[1] - center3D[1], axisMeanY[2] - center3D[2])
n_zAxis = np.cross([axisMeanX[0] - center3D[0], axisMeanX[1] - center3D[1],
axisMeanX[2] - center3D[2]],[axisMeanY[0] - center3D[0], axisMeanY[1] - center3D[1], axisMeanY[2] - center3D[2]])
n_zAxis =Vector3(n_zAxis[0],n_zAxis[1],n_zAxis[2])
M =[ [n_xAxis.x, n_yAxis.x,n_zAxis.x,0],
[n_xAxis.y, n_yAxis.y, n_zAxis.y,0],
[n_xAxis.z, n_yAxis.z, n_zAxis.z,0],
[ 0, 0, 0, 1 ]]
for a in range(3):
for b in range(3):
M[a][b] = M[a][b]/(max(M[0][a],M[1][a],M[2][a])- min(M[0][a],M[1][a],M[2][a]))
M = np.asarray(M)
q= tf.transformations.quaternion_from_matrix(M)
q*=tf.transformations.quaternion_from_euler(0, -3.14/2,-3.14/2)
q = q/(sum(q**2)**0.5 )
br = tf.TransformBroadcaster()
br.sendTransform((center3D[0], center3D[1], center3D[2]),
q,
rospy.Time.now(),
"object_"+str(id),
"camera_depth_frame")
if "object_"+str(id) not in labels:
L.append([center3D[0], center3D[1], center3D[2],q,"object_"+str(id)])
labels.append("object_"+str(id))
else:
print(len(L))
pub = rospy.Publisher('id_array', Int64MultiArray, queue_size=1)
ar = Int64MultiArray()
for i in L:
br = tf.TransformBroadcaster()
br.sendTransform((i[0], i[1], i[2]),
i[3],
rospy.Time.now(),
i[4],
"camera_depth_frame")
ar.data.append(int(i[4][7:]))
rospy.sleep(0.002)
pub.publish(ar)
def callback(data):
global check,L
check = data.data
def image_callback5(data):
pass
#print(data)
# print("Henlo")
def listener():
rospy.init_node('listener', anonymous=True)
rospy.Subscriber("/objects", Detection2DArray, objects_callback)
rospy.Subscriber("/check", String, callback)
#rospy.Subscriber("/camera/color/image_raw", Image, image_callback2)
#rospy.Subscriber("/camera/color/camera_info", CameraInfo, image_callback3)
rospy.Subscriber("/camera/depth/image_rect_raw", Image,depth_image_callback )
rospy.Subscriber("/camera/depth/camera_info",CameraInfo, image_info_callback)
rospy.spin()
if __name__ == '__main__':
global labels,labels2,L
L=[]
labels=[]
labels2=[]
listener()
|
import cv2 as cv
import matplotlib.pyplot as plt
from copy import deepcopy
def read_image(img):
return cv.imread(img)
def create_binary_mask(rgb_image):
imgYCrCb = cv.cvtColor(rgb_image, cv.COLOR_RGB2YCrCb)
mask = cv.inRange(imgYCrCb, (0, 133, 77), (255, 173, 127))
return mask
def fit_ellipse(mask_img, rgb_image):
contours, hierarchy = cv.findContours(mask_img, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE)
elipse_mask = deepcopy(rgb_image)
h = rgb_image.shape[0]
w = rgb_image.shape[1]
for contour in contours:
if len(contour) > 50:
ellipse = cv.fitEllipse(contour)
elipse_mask = cv.ellipse(elipse_mask, ellipse, (255, 255, 255), -1, cv.LINE_AA)
for i in range(h):
for j in range(w):
if((elipse_mask[i,j] != [255, 255, 255]).all()):
elipse_mask[i,j] = [0, 0, 0]
for i in range(h):
for j in range(w):
if((elipse_mask[i, j] != [255, 255, 255]).all()):
rgb_image[i, j] = [0, 0, 0]
return rgb_image
if __name__ == '__main__':
"""NA RAZIE TAK ZOSTAWIAM ZEBYS WIEDZIAŁ JAK UZYWAC FUNKCJI"""
img1 = read_image("D:/Wykrywanie twarzy/face_data/data/11-1m.bmp")
img1_rgb = cv.cvtColor(img1, cv.COLOR_BGR2RGB)
mask = create_binary_mask(img1_rgb)
img_to_show = fit_ellipse(mask, img1_rgb)
plt.subplot(1, 1, 1)
plt.imshow(img_to_show)
plt.show()
|
#!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2015, Robotnik Automation SLL
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Robotnik Automation SSL nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import rospy
import time
import threading
try:
from topic_health_monitor import TopicHealthMonitor
except ImportError:
from rcomponent.topic_health_monitor import TopicHealthMonitor
from robotnik_msgs.msg import State
DEFAULT_FREQ = 10.0
# Class Template of Robotnik component for Pyhton
class RComponent:
def __init__(self):
self._node_name = rospy.get_name()
self.ros_read_params()
self._real_freq = 0.0
# Saves the state of the component
self._state = State.INIT_STATE
# Saves the previous state
self._previous_state = State.INIT_STATE
# flag to control the initialization of the component
self._initialized = False
# flag to control the initialization of ROS stuff
self._ros_initialized = False
# flag to control that the control loop is running
self._running = False
# Variable used to control the loop frequency
self._time_sleep = 1.0 / self._desired_freq
# State msg to publish
self._msg_state = State()
# Timer to publish state
self._publish_state_timer = 1
self._t_publish_state = threading.Timer(self._publish_state_timer, self.publish_ros_state)
# to save the time of the last state transition
self._t_state_transition = rospy.Time(0)
# dict to save all the topic health monitor objects
self._data_health_monitors = {}
def ros_read_params(self):
'''
Gets params from param server
'''
try:
self._desired_freq = rospy.get_param('~desired_freq', default=DEFAULT_FREQ)
except rospy.ROSException as e:
rospy.logerr('%s' % (e))
exit(-1)
# Checks value of freq
if self._desired_freq <= 0.0:
rospy.loginfo('%s::init: Desired freq to %f is not possible. Setting _desired_freq to %f' %
(self._node_name, self._desired_freq, DEFAULT_FREQ))
self._desired_freq = DEFAULT_FREQ
def setup(self):
'''
Initializes de hand
@return: True if OK, False otherwise
'''
self._initialized = True
return 0
def ros_setup(self):
'''
Creates and inits ROS components
'''
if self._ros_initialized:
return 0
# Publishers
self._state_publisher = rospy.Publisher('~state', State, queue_size=10)
# Subscribers
# topic_name, msg type, callback, queue_size
# self.topic_sub = rospy.Subscriber('topic_name', Int32, self.topic_cb, queue_size = 10)
# Service Servers
# self.service_server = rospy.Service('~service', Empty, self.service_cb)
# Service Clients
# self.service_client = rospy.ServiceProxy('service_name', ServiceMsg)
# ret = self.service_client.call(ServiceMsg)
self._ros_initialized = True
self.publish_ros_state()
return 0
def shutdown(self):
'''
Shutdowns device
@return: 0 if it's performed successfully, -1 if there's any problem or the component is running
'''
if self._running or not self._initialized:
return -1
rospy.loginfo('%s::shutdown' % self._node_name)
# Cancels current timers
self._t_publish_state.cancel()
self._state_publisher.unregister()
self._initialized = False
return 0
def ros_shutdown(self):
'''
Shutdows all ROS components
@return: 0 if it's performed successfully, -1 if there's any problem or the component is running
'''
if self._running or not self._ros_initialized:
return -1
# Performs ROS topics & services shutdown
self._state_publisher.unregister()
self._ros_initialized = False
return 0
def stop(self):
'''
Creates and inits ROS components
'''
self._running = False
return 0
def start(self):
'''
Runs ROS configuration and the main control loop
@return: 0 if OK
'''
self.ros_setup()
if self._running:
return 0
self._running = True
self.control_loop()
return 0
def control_loop(self):
'''
Main loop of the component
Manages actions by state
'''
while self._running and not rospy.is_shutdown():
t1 = time.time()
if self._state == State.INIT_STATE:
self.init_state()
elif self._state == State.STANDBY_STATE:
self.standby_state()
elif self._state == State.READY_STATE:
self.ready_state()
elif self._state == State.EMERGENCY_STATE:
self.emergency_state()
elif self._state == State.FAILURE_STATE:
self.failure_state()
elif self._state == State.SHUTDOWN_STATE:
self.shutdown_state()
self.all_state()
t2 = time.time()
tdiff = (t2 - t1)
t_sleep = self._time_sleep - tdiff
if t_sleep > 0.0:
try:
rospy.sleep(t_sleep)
except rospy.exceptions.ROSInterruptException:
rospy.loginfo('%s::control_loop: ROS interrupt exception' % self._node_name)
self._running = False
t3 = time.time()
self._real_freq = 1.0/(t3 - t1)
self._running = False
# Performs component shutdown
self.shutdown_state()
# Performs ROS shutdown
self.ros_shutdown()
rospy.loginfo('%s::control_loop: exit control loop' % self._node_name)
return 0
def ros_publish(self):
'''
Publish topics at standard frequency
'''
return 0
def init_state(self):
'''
Actions performed in init state
'''
if not self._initialized:
self.setup()
else:
self.switch_to_state(State.STANDBY_STATE)
return
def standby_state(self):
'''
Actions performed in standby state
'''
self.switch_to_state(State.READY_STATE)
return
def ready_state(self):
'''
Actions performed in ready state
'''
return
def shutdown_state(self):
'''
Actions performed in shutdown state
'''
if self.shutdown() == 0:
self.switch_to_state(State.INIT_STATE)
return
def emergency_state(self):
'''
Actions performed in emergency state
'''
return
def failure_state(self):
'''
Actions performed in failure state
'''
return
def switch_to_state(self, new_state):
'''
Performs the change of state
'''
if self._state == new_state:
return
if new_state == State.INIT_STATE:
self.switch_to_init_state()
elif new_state == State.STANDBY_STATE:
self.switch_to_standby_state()
elif new_state == State.READY_STATE:
self.switch_to_ready_state()
elif new_state == State.EMERGENCY_STATE:
self.switch_to_emergency_state()
elif new_state == State.FAILURE_STATE:
self.switch_to_failure_state()
elif new_state == State.SHUTDOWN_STATE:
self.switch_to_shutdown_state()
self._previous_state = self._state
self._state = new_state
rospy.loginfo('%s::switch_to_state: from %s to %s' %
(self._node_name, self.state_to_string(self._previous_state), self.state_to_string(self._state)))
self._t_state_transition = rospy.Time.now()
return
def switch_to_init_state(self):
'''
Function called during the transition to init_state
'''
pass
def switch_to_standby_state(self):
'''
Function called during the transition to standby_state
'''
pass
def switch_to_ready_state(self):
'''
Function called during the transition to ready_state
'''
pass
def switch_to_emergency_state(self):
'''
Function called during the transition to emergency_state
'''
pass
def switch_to_failure_state(self):
'''
Function called during the transition to failure_state
'''
pass
def switch_to_shutdown_state(self):
'''
Function called during the transition to shutdown_state
'''
pass
def all_state(self):
'''
Actions performed in all states
'''
self.ros_publish()
return
def state_to_string(self, state):
'''
@param state: state to set
@type state: State
@returns the equivalent string of the state
'''
if state == State.INIT_STATE:
return 'INIT_STATE'
elif state == State.STANDBY_STATE:
return 'STANDBY_STATE'
elif state == State.READY_STATE:
return 'READY_STATE'
elif state == State.EMERGENCY_STATE:
return 'EMERGENCY_STATE'
elif state == State.FAILURE_STATE:
return 'FAILURE_STATE'
elif state == State.SHUTDOWN_STATE:
return 'SHUTDOWN_STATE'
else:
return 'UNKNOWN_STATE'
def publish_ros_state(self):
'''
Publish the State of the component at the desired frequency
'''
self._msg_state.state = self._state
self._msg_state.state_description = self.state_to_string(self._state)
self._msg_state.desired_freq = self._desired_freq
self._msg_state.real_freq = self._real_freq
if rospy.is_shutdown() == False:
self._state_publisher.publish(self._msg_state)
self._t_publish_state = threading.Timer(self._publish_state_timer, self.publish_ros_state)
self._t_publish_state.start()
def get_state_transition_elapsed_time(self):
'''
@returns the elapsed time since the last state transition as rospy.Time.Duration
'''
return rospy.Time.now() - self._t_state_transition
def get_state_transition_time(self):
'''
@returns the current value of the state transition time
'''
return self._t_state_transition
def add_topics_health(self, subscriber, topic_id='', timeout=5.0, required=True):
'''
@brief Adds a topic health for the subscriber
@param subscriber as a rospy.Subscriber to check
@param topic_id as string. Id associated to the topic. If empty it will use the full topic name
@param timeout as double. Timeout to consider that the topic is not receiving data anymore
@param required as bool. Flag to include this topic when it checks the overall status of the topics
@return 0 if ok
@return -1 if error
'''
if type(subscriber) is not rospy.Subscriber:
rospy.logerr('%s::add_topics_health: the object subscribed is not the correct type -> %s',
self._node_name, type(subscriber))
return -1
_topic_id = ''
if topic_id != '':
_topic_id = topic_id
else:
_topic_id = subscriber.resolved_name
if timeout <= 0:
rospy.logerr('%s::add_topics_health: timeout (%.lf) has to be >= 0. Setting 1.', self._node_name, timeout)
timeout = 1.0
self._data_health_monitors[_topic_id] = TopicHealthMonitor(subscriber, timeout, required)
rospy.loginfo('%s::add_topics_health: Add topic %s', self._node_name, _topic_id)
return 0
def tick_topics_health(self, topic_id):
'''
@brief Ticks a topic health as it is receiving data
@return 0 if OK
@return -1 if the id doesn't exist
'''
if topic_id not in self._data_health_monitors:
rospy.logerr('%s::tick_topics_health: the topic %s does not exist!', self._node_name, topic_id)
return -1
self._data_health_monitors[topic_id].tick()
def check_topics_health(self, topic_id=''):
'''
@brief Checks the topic health of all the subscribed topics or specific ones
@param topic as string, topic to check. If empty all the topics are checked as a group
@return true if health is ok, false otherwise
'''
if len(self._data_health_monitors) == 0:
rospy.logwarn('%s::check_topics_health: no topics to check!', self._node_name)
return True
if topic_id != '':
if topic_id not in self._data_health_monitors:
rospy.logerr('%s::check_topics_health: the topic %s does not exist!', self._node_name, topic_id)
return False
else:
return self._data_health_monitors[topic_id].is_receiving()
ret = True
topics_not_received = []
for i in self._data_health_monitors:
if self._data_health_monitors[i].is_required() and self._data_health_monitors[i].is_receiving() == False:
ret = False
topics_not_received.append(i)
if ret == False:
rospy.logwarn_throttle(5, '%s::check_topics_health: topic(s) %s not receiving' %
(self._node_name, str(topics_not_received)))
return ret
"""
def topic_cb(self, msg):
'''
Callback for inelfe_video_manager state
@param msg: received message
@type msg: std_msgs/Int32
'''
# DEMO
rospy.loginfo('RComponent:topic_cb')
def service_cb(self, req):
'''
ROS service server
@param req: Required action
@type req: std_srv/Empty
'''
# DEMO
rospy.loginfo('RComponent:service_cb')
"""
'''
def main():
rospy.init_node("rcomponent")
_name = rospy.get_name().replace('/','')
arg_defaults = {
'topic_state': 'state',
'_desired_freq': DEFAULT_FREQ,
}
args = {}
for name in arg_defaults:
try:
if rospy.search_param(name):
args[name] = rospy.get_param('~%s'%(name)) # Adding the name of the node, because the para has the namespace of the node
else:
args[name] = arg_defaults[name]
#print name
except rospy.ROSException as e:
rospy.logerr('%s: %s'%(e, _name))
rc_node = RComponent(args)
rospy.loginfo('%s: starting'%(_name))
rc_node.start()
if __name__ == "__main__":
main()
'''
|
import pytest
from aws_lambda_context import LambdaContext
from moto import mock_ecs
from src.handler import *
@mock_ecs
def test_that_the_lambda_handler_succeeds_with_context(ecs, sns_event):
lambda_context = LambdaContext()
lambda_context.function_name = "lambda_handler"
lambda_context.aws_request_id = "abc-123"
ecs_cluster_name = os.environ.get("ecs_cluster_name", "telemetry")
riemann_consumer_ecs_service_name = "riemann-consumer"
ecs.create_cluster(clusterName=ecs_cluster_name)
ecs.create_service(
cluster=ecs_cluster_name, serviceName=riemann_consumer_ecs_service_name
)
response = lambda_handler(event=sns_event, context=lambda_context)
assert response["success"] is True
assert response["serviceName"] == riemann_consumer_ecs_service_name
assert response["status"] == "ACTIVE"
@mock_ecs
def test_that_the_lambda_handler_succeeds_without_context(ecs, sns_event):
ecs_cluster_name = os.environ.get("ecs_cluster_name", "telemetry")
riemann_consumer_ecs_service_name = "riemann-consumer"
ecs.create_cluster(clusterName=ecs_cluster_name)
ecs.create_service(
cluster=ecs_cluster_name, serviceName=riemann_consumer_ecs_service_name
)
response = lambda_handler(event=sns_event, context=None)
assert response["success"] is True
assert response["serviceName"] == riemann_consumer_ecs_service_name
assert response["status"] == "ACTIVE"
@mock_ecs
def test_that_the_lambda_handler_fails_when_providing_an_invalid_ecs_service(
ecs, sns_event
):
ecs_cluster_name = os.environ.get("ecs_cluster_name", "telemetry")
ecs.create_cluster(clusterName=ecs_cluster_name)
ecs.create_service(
cluster=ecs_cluster_name, serviceName="not-a-riemann-service-name"
)
response = lambda_handler(event=sns_event, context=None)
assert response["success"] is False
assert "ServiceNotFoundException" in response["errorMessage"]
@pytest.fixture(autouse=True)
def initialise_environment_variables():
os.environ["ecs_cluster_name"] = "test-cluster"
@pytest.fixture(scope="function")
def aws_credentials():
"""Mocked AWS Credentials for moto."""
os.environ["AWS_ACCESS_KEY_ID"] = "testing"
os.environ["AWS_SECRET_ACCESS_KEY"] = "testing"
os.environ["AWS_SECURITY_TOKEN"] = "testing"
os.environ["AWS_SESSION_TOKEN"] = "testing"
os.environ["AWS_REGION"] = "eu-west-2"
os.environ["AWS_DEFAULT_REGION"] = "eu-west-2"
@pytest.fixture(scope="function")
def ecs(aws_credentials):
with mock_ecs():
yield boto3.client("ecs", region_name="eu-west-2")
@pytest.fixture
def sns_event():
return {
"Records": [
{
"EventVersion": "1.0",
"EventSubscriptionArn": "arn:aws:sns:us-east-2:123456789012:sns-lambda:21be56ed-a058-49f5-8c98-aedd2564c486",
"EventSource": "aws:sns",
"Sns": {
"SignatureVersion": "1",
"Timestamp": "2019-01-02T12:45:07.000Z",
"Signature": "tcc6faL2yUC6dgZdmrwh1Y4cGa/ebXEkAi6RibDsvpi+tE/1+82j...65r==",
"SigningCertUrl": "https://sns.us-east-2.amazonaws.com/SimpleNotificationService-ac565b8b1a6c5d002d285f9598aa1d9b.pem",
"MessageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e",
"Message": "Hello from SNS!",
"MessageAttributes": {
"Test": {"Type": "String", "Value": "TestString"},
"TestBinary": {"Type": "Binary", "Value": "TestBinary"},
},
"Type": "Notification",
"UnsubscribeUrl": "https://sns.us-east-2.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:us-east-2:123456789012:test-lambda:21be56ed-a058-49f5-8c98-aedd2564c486",
"TopicArn": "arn:aws:sns:us-east-2:123456789012:sns-lambda",
"Subject": "TestInvoke",
},
}
]
}
|
# Generated by Django 3.2 on 2021-04-30 22:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('movie', '0007_quote'),
]
operations = [
migrations.AlterModelOptions(
name='quote',
options={'verbose_name': 'Киноцитата', 'verbose_name_plural': 'Киноцитаты'},
),
migrations.AddField(
model_name='quote',
name='hero',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Кто сказал'),
),
]
|
#!/usr/bin/env python
#
# Copyright (c) 2019 Opticks Team. All Rights Reserved.
#
# This file is part of Opticks
# (see https://bitbucket.org/simoncblyth/opticks).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
* https://stackoverflow.com/questions/5181320/under-what-circumstances-are-rmul-called
(mul) a * b
(rmul) c * 2
(rmul) d * 6
(rmul) e * 24
120
* https://jameshensman.wordpress.com/2010/06/14/multiple-matrix-multiplication-in-numpy/
* http://www.lfd.uci.edu/~gohlke/code/transformations.py.html
"""
from operator import mul
from functools import reduce
class A(object):
@classmethod
def prod(cls, a, b):
return a * b
def __init__(self, name, val):
self.name = name
self.val = val
def __repr__(self):
return self.name
def __mul__(self, other):
print("(mul) %r * %r " % (self, other))
return self.val * other.val
def __rmul__(self, other):
print("(rmul) %r * %r " % (self, other))
return self.val * other
if __name__ == '__main__':
a = A("a",1)
b = A("b",2)
c = A("c",3)
d = A("d",4)
e = A("e",5)
res = reduce(mul, [a,b,c,d,e] )
print(res)
|
hrs = raw_input("Enter Hours:")
try:
h = float(hrs)
except:
h = float(raw_input("Enter a numeric hours:"))
rate = raw_input("Enter Hourly Rate:")
try:
r = float(rate)
except:
r = float(raw_input("Enter a numeric rate:"))
if h <= 40:
pay = h * r
else:
pay = 40 * r + (h - 40) * r * 1.5
print pay
|
print("Or")
a="Or "
print(a*3)
print(a*100)
|
#!/usr/bin/ipython
import GreenF
import sys
import DensityN
import numpy as np
import matplotlib.pyplot as plt
print("please select variable set:\n")
print("1) m=1, alpha=0, beta=0, B0=1")
print("2) m=1, alpha=1, beta=0, B0=0")
print("3) m=1, alpha=0, beta=1, B0=0")
print("4) m=10, alpha=1, beta=0, B0=0")
print("5) m=10, alpha=1, beta=0, B0=1")
#parameter
E = np.linspace(-7, 7, 500)
sets = np.array([
[1.0, 1e-4, 0.0, 1.0],
[1.0, 1.0, 0.0, 0.0],
[1.0, 0.0, 1.0, 0.0],
[10.0, 1.0, 0.0, 0.0],
[10.0, 1.0, 0.0, 1.0],
[1.0, 1.0, 0.0, 1.0]])
if len(sys.argv) == 1:
i=int(input("set=")) - 1
elif len(sys.argv) == 2:
i = int(sys.argv[1]) - 1
elif len(sys.argv) == 5:
i = 0
for j in range(4):
sets[i, j] = float(sys.argv[j+1])
for i in range(5):
a = GreenF.GF( sets[i,0], sets[i,1], sets[i,2], sets[i,3]\
,None, eta = 1e-3, R_to_0 = 1e-3)
b = DensityN.DOS(sets[i,0], sets[i,1], sets[i,2], sets[i,3])
DOS_G = np.zeros(E.shape)
DOS_Gmi = np.zeros(E.shape)
DOS_Gpl = np.zeros(E.shape)
DOS_N = np.zeros(E.shape, dtype=np.complex_)
DOS_Npl = np.zeros(E.shape, dtype=np.complex_)
DOS_Nmi = np.zeros(E.shape, dtype=np.complex_)
for j in range(E.shape[0]):
DOS_G[j] = a.N(E[j])
DOS_Gmi[j] = a.Nmi(E[j])
DOS_Gpl[j] = a.Npl(E[j])
DOS_Npl[j] = b.Npl(E[j])
DOS_Nmi[j] = b.Nmi(E[j])
DOS_N[j] = b.N(E[j])
plt.plot(E, DOS_G, 'r:', linewidth=1, label='green ')
plt.plot(E, DOS_Gpl, 'k:', linewidth=1, label='green +')
plt.plot(E, DOS_Gmi, 'g:', linewidth=1, label='green -')
plt.plot(E, np.real(DOS_N), 'r--', label="DOS")
plt.plot(E, np.real(DOS_Npl), 'k--', label='k+ part')
plt.plot(E, np.real(DOS_Nmi), 'g--', label='k- part')
plt.title("set "+str(i+1)+") "+"m="+str(sets[i,0])+"; alpha="+str(sets[i,1])+"; beta="+str(sets[i,2])+"; B0="+str(sets[i,3]))
plt.ylim([-2,7])
plt.legend()
#plt.show()
plt.savefig("DOS_%d.pdf"%(i+1))
plt.clf()
|
import sys
from tree import binary_tree
input_list = [1, -2, 9, -3, 5, -1, 11];
tree = binary_tree(input_list);
#print all the different levels of the tree
print("LIST OF ALL LEVELS:");
tree.print_tree(); |
#!/usr/bin/env python
# coding: utf-8
# # # Python Program to check values of Riemann's Zeta-Function
#
# Powered by: Dr. Hermann Völlinger, DHBW Stuttgart(Germany); September 2020
#
# See https://en.wikipedia.org/wiki/Riemann_zeta_function
#
# YouTube Video: https://www.youtube.com/watch?v=sZhl6PyTflw&vl=en
#
# The Riemann zeta function or Euler–Riemann zeta function, ζ(s), is a function of a complex variable s that analytically
# continues the sum of the Dirichlet serie which converges when the real part of s is greater than 1.
#
# More general representations of ζ(s) for all s are given below. The Riemann zeta function plays a pivotal role in
# analytic number theory and has applications in physics, probability theory, and applied statistics.
# As a function of a real variable, Leonhard Euler first introduced and studied it in the first half of the eighteenth century
# without using complex analysis, which was not available at the time. Bernhard Riemann's 1859 article "On the Number of
# Primes Less Than a Given Magnitude" extended the Euler definition to a complex variable, proved its meromorphic continuation
# and functional equation, and established a relation between its zeros and the distribution of prime numbers.[2]
#
# The values of the Riemann zeta function at even positive integers were computed by Euler. The first of them, ζ(2), provides
# a solution to the Basel problem. In 1979 Roger Apéry proved the irrationality of ζ(3). The values at negative integer
# points, also found by Euler, are rational numbers and play an important role in the theory of modular forms.
# Many generalizations of the Riemann zeta function, such as Dirichlet series, Dirichlet L-functions and L-functions, are known.
#
#
#
# In[1]:
print("***LATEX syntax of zeta-fct for re(z)>1: '$ displaystyle \\zeta(s)=\sum_{n=1}^\infty 1/n^s $'***")
# In[2]:
print ("************************************************************************")
print ("****** The bridge between zeta-fct in 'Complex Analysis' and prim- *****")
print ("****** numbers in 'Number Theory' is given by EulerProduct formula *****")
print ("************************************************************************")
from IPython.display import Image
Image('EulerProduct.jpg')
# In[3]:
print ("***** Here is the example of a plot of the zeta function ******")
print ("*** See the non-trival zeros at 'critical' line real(z)=0.5 ***")
print ("***************************************************************")
from IPython.display import Image
Image('riemann-zeta1.jpg')
# In[4]:
print ("*** Here is the example of a plot of the zeta function in more detail***")
print ("*** See two zeros at at the points z=0.5 + 14,12...z=0.5-14,12...***")
print ("*********************************************************************")
from IPython.display import Image
Image('riemann-zeta2.jpg')
# In[5]:
# Import libaries
from itertools import count, islice
from scipy.special import binom
# In[6]:
# Program/Source Code
# Here is the source code of a Python program to calculate the zeta function values
# The program output is shown below.
def zeta(s, t = 100):
if s == 1:
return float("inf")
term = (1 / 2 ** (n + 1)
* sum((-1) ** k * binom(n, k) * (k +1 ) ** -s
for k in range (n + 1))
for n in count(0))
return sum(islice(term, t)) / (1 - 2 ** (1- s))
# In[7]:
print ("value of zeta(2)=pi²/6 ~ 1,644934")
zeta(2)
# In[8]:
#pi * pi / 6
# In[9]:
print ("value of zeta(4)=(pi²)*(pi²)/90 ~ 1,0823236")
zeta(4)
# In[10]:
zeta(1)
# In[11]:
zeta(0)
# In[12]:
print("zeta(-1)= -1/12 ~ -0,0833333333")
zeta(-1)
# In[13]:
print("****'Trival' zeros are for z=-2,-4,-6,...****")
# In[14]:
zeta(-2)
# In[15]:
zeta(-4)
# In[16]:
zeta(-6)
# In[17]:
import time
print("****current date and time **************")
print("Date and Time:",time.strftime("%d.%m.%Y %H:%M:%S"))
print("end")
|
"""
Math 560
Project 2
Fall 2021
project2.py
Partner 1: QiangQiang Liu
Partner 2: Zelin Jin
Date: 11/01/2021
"""
# Import math and other p2 files.
import math
from p2tests import *
"""
BFS/DFS function
INPUTS
maze: A Maze object representing the maze.
alg: A string that is either 'BFS' or 'DFS'.
OUTPUTS
path: The path from maze.start to maze.exit.
"""
"""
find_path function
INPUTS
struct: The target struct to store the values.
maze: A Maze object representing the maze.
Functionality: Find the path of the maze by using the given struct and setting the prev parameter for nodes on path.
"""
def find_path(struct, maze):
# Find the neighbours of start node and push it to struct.
for vertex_t in maze.start.neigh:
vertex_t.dist = 1
vertex_t.prev = maze.start
struct.push(vertex_t)
# Keep doing operations on nodes until the struct is empty.
while not struct.isEmpty():
# Take out the given node.
current = struct.pop()
# Traverse the neighbours for the current node.
for vertex_t in current.neigh:
# If current distance is smaller than the stored value, renew the path and push this node into the struct.
if vertex_t.dist > current.dist + 1:
vertex_t.dist = current.dist + 1
# The prev node is set to current node because the current distance is smaller.
vertex_t.prev = current
struct.push(vertex_t)
def bdfs(maze, alg):
# If the alg is not BFS or DFS, raise exception.
if (alg != 'BFS') and (alg != 'DFS'):
raise Exception('Incorrect alg! Need BFS or DFS!')
# Initialization. Set distance to start point to infinity, previous node to null and flag visited to false.
for i in range(0, len(maze.maze) * len(maze.maze[0])):
maze.adjList[i].dist = math.inf
maze.adjList[i].prev = None
maze.adjList[i].visited = False
# Renew the parameters for the start node.
res = []
maze.start.dist = 0
maze.start.visited = True
# if the alg is DFS, create a new stack and call the function find_path.
if alg == 'DFS':
stack = Stack()
find_path(stack, maze)
# If the alg is BFS, create a new queue nad call the function find_path.
if alg == 'BFS':
queue = Queue()
find_path(queue, maze)
# Create a temp pointer and set it to exit node.
temp = maze.exit
# Find the path by storing the rank for each node into a list.
while temp is not None:
res.append(temp.rank)
temp = temp.prev
# Return the reversed list.
return res[::-1]
"""
Main function.
"""
if __name__ == "__main__":
testMazes(True)
|
#!/usr/bin/env python
from collections import namedtuple
class Student:
def __init__(self, name, number, tb1_experiments, tb2_experiments, pair_number, lang="en"):
self.name = name
self.number = number
self.tb1_experiments = tb1_experiments
self.pair_number = pair_number
self.lang = lang
def __lt__(self, other):
return self.pair_number < other.pair_number
Experiment = namedtuple("Experiment", ["number", "title", "acronym", "count", "writeup", "reserve", "fixed", "undesirable"])
Experiment.__new__.__defaults__ = (4, True, False, False, False)
teaching_length = 11
|
from flask import Flask
from flask_restful import Resource, Api
app = Flask(__name__)
api = Api(app)
class product(Resource):
def get(self):
return {
'products' : ['ice cream',
'chocolate',
'Fruit']
}
api.add_resources(product, '/')
if __name__ == '__main__':
app.run(host = '0.0.0.0', port = 80, debug = True) |
import c4d
import random
from c4d import gui
def main():
obj1 = doc.SearchObject("Cube");
#Where Cube is made editable, Type: c4d.PolygonObject
#For each point on "Cube"
for x in range(obj1.GetPointCount()):
#Set that point with itself minus a random number from 0 to 10
#__sub__() is defined to subtract from all components
#on the c4d.Vector.
obj1.SetPoint(x,(obj1.GetPoint(x)- random.random()*10 ));
#Update OpenGL Viewport
obj1.Message(c4d.MSG_UPDATE)
c4d.EventAdd()
if __name__=='__main__':
main()
#SEE SCREENSHOTS IN POINTLEVELMANIPULATION FOLDER
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__mtime__ = '2019/5/9'
from selenium import webdriver
from common.base import Base
import time
class AddBugPage(Base):
loc_test=("link text","测试")
loc_bug =("xpath",".//*[@id='subNavbar']/ul/li[1]/a")
# loc_add =("xpath",".//*[@id='mainContent']/div[2]/div[2]/p/a")
loc_add = ("xpath", ".//*[@id='mainMenu']/div[3]/a[3]/i")
loc_mokuai=("xpath",".//*[@id='module_chosen']/a/span")
loc_mokuai2=("xpath",".//*[@id='module_chosen']/div/ul/li[2]")
loc_project = ("xpath",".//*[@id='project_chosen']/a/span")
loc_project2=("xpath",".//*[@id='project_chosen']/div/ul/li")
loc_truck = ("xpath",".//*[@id='openedBuild_chosen']/ul")
loc_truck2=("xpath",".//*[@id='openedBuild_chosen']/div/ul/li")
loc_leixing1=("xpath",".//*[@id='type_chosen']/a/div[1]")
loc_leixing2=("xpath",".//*[@id='type_chosen']/div/ul/li[2]")
loc_biaoti=("id","title")
loc_content=("xpath","html/body")
loc_save=("id","submit")
loc_yzTitle=("xpath",".//*[@id='bugList']/tbody/tr[1]/td[4]/a")
# def __init__(self,driver:webdriver.Firefox):
# self.driver=driver
# self.zentao = Base(self.driver)
def add_bug(self,title):
self.click(self.loc_test)
self.click(self.loc_bug)
self.click(self.loc_add)
self.click(self.loc_mokuai)
self.click(self.loc_mokuai2)
self.click(self.loc_truck)
self.click(self.loc_truck2)
self.click(self.loc_project)
self.click(self.loc_project2)
self.click(self.loc_leixing1)
self.click(self.loc_leixing2)
self.sendKeys(self.loc_biaoti,title)
frame = self.findElement(("class name","ke-edit-iframe"))
# 富文本不能clear()
self.driver.switch_to_frame(frame)
self.sendKeys(self.loc_content,"测试测试测试测试测试")
self.driver.switch_to_default_content()
self.click(self.loc_save)
def is_add_bug_success(self,_text):
return self.is_text_in_element(self.loc_yzTitle,_text)
if __name__ == '__main__':
driver = webdriver.Firefox()
bug = AddBugPage(driver)
from page.login_page import LoginPge
login = LoginPge(driver)
login.login()
timestr = time.strftime("%Y_%m_%d_%H_%M_%S")
title = "测试提交的bug"+timestr
bug.add_bug(title)
result = bug.is_add_bug_success(title)
print(result)
|
queries = ["" for i in range(0, 11)]
### 0. List all airport codes and their cities. Order by the city name in the increasing order.
### Output column order: airportid, city
queries[0] = """
select airportid, city
from airports
order by city;
"""
### 1. Write a query to find the names of the customers whose names are at least 15 characters long, and the second letter in the name is "l".
### Order by name.
queries[1] = """
select name
from customers
where name like '_l%' and length(name)>14 order by name;
"""
### 2. Write a query to find any customers who flew on their birthday. Hint: Use "extract" function that operates on the dates.
### Order output by Customer Name.
### Output columns: all columns from customers
queries[2] = """
select customers.customerid,name,birthdate,frequentflieron
from customers,flewon
where customers.customerid=flewon.customerid and extract(day FROM birthdate)=extract(day FROM flewon.flightdate) and extract(month FROM birthdate)=extract(month FROM flewon.flightdate) order by customers.name;
"""
### 3. Write a query to generate a list: (source_city, source_airport_code, dest_city, dest_airport_code, number_of_flights) for all source-dest pairs with at least 2 flights.
### Order first by number_of_flights in decreasing order, then source_city in the increasing order, and then dest_city in the increasing order.
### Note: You must generate the source and destination cities along with the airport codes.
queries[3] = """
select S.city as source_city, source_airport_code, D.city as dest_city, dest_airport_code, number_of_flights
from (select source as source_airport_code, dest as dest_airport_code, count(*) as number_of_flights
from flights
group by source,dest) as I, airports as S, airports as D
where I.number_of_flights>=2 and S.airportid=I.source_airport_code and D.airportid=I.dest_airport_code
order by number_of_flights desc, source_city asc, dest_city asc;
"""
### 4. Find the name of the airline with the maximum number of customers registered as frequent fliers.
### Output only the name of the airline. If multiple answers, order by name.
queries[4] = """
select airlines.name
from (select customers.frequentflieron , count(*)
from customers
group by customers.frequentflieron)as nums(airlineid, num),airlines
where num=(select max(num) from
(select customers.frequentflieron , count(*)
from customers
group by customers.frequentflieron)as nums(airlineid, num)) and airlines.airlineid=nums.airlineid
"""
### 5. For all flights from OAK to ATL, list the flight id, airline name, and the
### duration in hours and minutes. So the output will have 4 fields: flightid, airline name,
### hours, minutes. Order by flightid.
### Don't worry about timezones -- assume all times are reported using the same timezone.
queries[5] = """
select flightid, airlines.name, extract(hour from flights.local_arrival_time - flights.local_departing_time), extract(minute from flights.local_arrival_time - flights.local_departing_time)
from flights, airlines
where flights.source='OAK' and flights.dest='ATL' and flights.airlineid=airlines.airlineid;
"""
### 6. Write a query to find all the empty flights (if any); recall that all the flights listed
### in the flights table are daily, and that flewon contains information for a period of 9
### days from August 1 to August 9, 2016. For each such flight, list the flightid and the date.
### Order by flight id in the increasing order, and then by date in the increasing order.
queries[6] = """
create table date(realdate date);
insert into date values(to_date('2016-08-01', 'YYYY-MM-DD'));
insert into date values(to_date('2016-08-02', 'YYYY-MM-DD'));
insert into date values(to_date('2016-08-03', 'YYYY-MM-DD'));
insert into date values(to_date('2016-08-04', 'YYYY-MM-DD'));
insert into date values(to_date('2016-08-05', 'YYYY-MM-DD'));
insert into date values(to_date('2016-08-06', 'YYYY-MM-DD'));
insert into date values(to_date('2016-08-07', 'YYYY-MM-DD'));
insert into date values(to_date('2016-08-08', 'YYYY-MM-DD'));
insert into date values(to_date('2016-08-09', 'YYYY-MM-DD'));
(select flightid,realdate
from flights,date)
except
(select flightid,flightdate
from flewon)
order by flightid asc, realdate asc;
"""
###select flightid
###from flights, flewon
###where flights;
### 7. Write a query to generate a list of customers who don't list Southwest as their frequent flier
### airline, but actually flew more (by number of flights) on Southwest than their preferred airline.
### Output columns: customerid, customer_name
### Order by: customerid
queries[7] = """
select customerid,name
from customers as C
where C.frequentflieron!='SW' and
(select count(*)
from (flights natural join flewon) as F
where F.customerid=C.customerid and F.airlineid='SW')
>=all
(select count(*)
from (flights natural join flewon) as F
where F.customerid=C.customerid and F.airlineid!='SW'
group by F.airlineid)
order by customerid
"""
### 8. Write a query to generate a list of customers who flew twice on two consecutive days, but did
### not fly otherwise in the 10 day period. The output should be simply a list of customer ids and
### names. Make sure the same customer does not appear multiple times in the answer.
### Order by the customer name.
queries[8] = """
select distinct C.customerid,C.name
from customers as C
where
2=(select count(*)
from flewon
where c.customerid=flewon.customerid
group by customerid) and
1=((select extract(day from flightdate) from flewon as F
where F.customerid=C.customerid
limit 1 offset 0)-
(select extract(day from flightdate)from flewon as F
where F.customerid=C.customerid
limit 1 offset 1))
order by C.name
"""
###select customerid, name
###from customers as C, flewon as F
###where C.customerid=F.customerid and
###2=(select count(*)
###from flewon
###group by customerid)and
### 9. Write a query to find the names of the customer(s) who visited the most cities in the 10 day
### duration. A customer is considered to have visited a city if he/she took a flight that either
### departed from the city or landed in the city.
### Output columns: name
### Order by: name
queries[9] = """
with L(id,location) as
((select flewon.customerid as id, flights.source as location
from flewon, flights
where flewon.flightid=flights.flightid)
UNION
(select flewon.customerid as id,flights.dest as location
from flewon, flights
where flewon.flightid=flights.flightid))
select distinct name
from customers as C,L
where C.customerid=L.id and
(select count(distinct location)
from L
where C.customerid=L.id
group by L.id)
>=all
(select count(distinct location)
from L
group by L.id)
"""
### 10. Write a query that outputs a list: (AirportID, Total-Number-of-Flights, Airport-rank), where
### we rank the airports
### by the total number of flights that depart that airport. So the airport with the maximum number
### of flights departing gets rank 1, and so on. If two airports tie, then they should
### both get the same rank, and the next rank should be skipped.
### Order the output in the increasing order by rank.
queries[10] = """
select airportid, num, rank() over (order by N.num desc)
from(select airportid, count(F.*)
from airports as A,flights as F
where A.airportid=F.source
group by airportid) as N(airportid, num)
order by rank
"""
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 3 17:04:17 2018
@author: daehyun
"""
from pickle import loads
from pymongo import MongoClient
from cytoolz import pipe, filter, partial
from pandas import DataFrame
import matplotlib.pyplot as plt
# %%
db = MongoClient('lithium.local')['FERMI_20144077']
bad_runs = (
178, # good1
464, 468, # good2
186, 196, *range(198, 204), # alt1
*range(266, 272), *range(292, 315), # alt2
359) # alt3
ignore_bad_runs = partial(filter, lambda n: n not in bad_runs)
runs = pipe(
range(463, 487), # good2
ignore_bad_runs, sorted, tuple)
imgs = pipe(db['reduced'].find({'run': {'$in': runs}, 'target': 'dfimgs'}), list, DataFrame)
selected = DataFrame({'run': imgs['run'], 'n': imgs['n'], 'img': imgs['reduced'].map(loads)}).merge(
DataFrame([
[463,0.10],
[465,0.25],
[466,0.40],
[467,0.55],
[469,0.85],
[470,1.00],
[471,1.15],
[472,1.30],
[473,1.45],
[474,1.60],
[475,0.10],
[476,0.10],
[477,0.25],
[478,0.40],
[479,0.55],
[480,0.70],
[481,0.85],
[482,1.00],
[483,1.15],
[484,1.30],
[485,1.45],
[486,1.60]], columns=['run', 'phase']), on='run').sort_values('phase')
del imgs
# %%
summed = DataFrame({'phase': selected['phase'],
'summed': selected['img']*selected['n'],
'n': selected['n']}).groupby('phase')[['n', 'summed']].apply(sum)
groupped = DataFrame({'n': summed['n'],
'img': summed['summed'] / summed['n']}).reset_index()
del summed
# %%
for _, phase, img in groupped[['phase', 'img']].itertuples():
plt.figure(figsize=(8, 8))
plt.pcolormesh(img, cmap='Greys')
plt.axis('equal')
plt.axis('off')
plt.clim(0, 5)
plt.savefig('img{:03.0f}.png'.format(phase*100))
|
import pyeapi
from pprint import pprint
import yaml
from my_funcs import read_yaml
from jinja2 import FileSystemLoader, StrictUndefined
from jinja2.environment import Environment
yaml_devices = read_yaml("devices.yaml")
env = Environment(undefined=StrictUndefined)
env.loader = FileSystemLoader('.')
template_file = 'ex4.j2'
template = env.get_template(template_file)
arista_devs = yaml_devices.keys()
for elem in arista_devs:
arista_output = template.render(**yaml_devices[elem]["data"])
cmds = arista_output.splitlines()
connection = pyeapi.client.connect(**yaml_devices[elem])
device = pyeapi.client.Node(connection)
device.config(cmds)
output = device.enable("show ip interface brief")
pprint(output)
|
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import url, include, handler404, handler500
from shop import views
from django.contrib.auth import views as auth_views
urlpatterns = [
# -- Pour les produits
url(r'^categorie/(?P<id>\d+)/$', views.categorie, name="categorie"),
url(r'^categorie/(?P<id>\d+)/(?P<page>\d+)/$', views.categorie, name="categorie"),
url(r'^categorie_mere/(?P<id>\d+)/$', views.categorie_mere, name="categorie_mere"),
url(r'^categorie_mere/(?P<id>\d+)/(?P<page>\d+)/$', views.categorie_mere, name="categorie_mere"),
url(r'^marque/(?P<id>\d+)/$', views.marque, name="marque"),
url(r'^marque/(?P<id>\d+)/(?P<page>\d+)/$', views.marque, name="marque"),
url(r'^detail-prod/(?P<id>\d+)/$', views.detailprod, name="detail-prod"),
url(r'^image/(?P<id>\d+)/$', views.image, name="image"),
url(r'^tout-prod/$', views.tou_prod, name="tou-prod"),
url(r'^tout-prod/(?P<page>\d+)/$', views.tou_prod, name="tou-prod"),
url(r'^reparation/$', views.reparation, name="reparation"),
url(r'^categorie_marque/(?P<idm>\d+)/(?P<idc>\d+)/$', views.cat_marque, name="categorie-marque"),
url(r'^categorie_marque/(?P<idm>\d+)/(?P<idc>\d+)/(?P<page>\d+)/$', views.cat_marque, name="categorie-marque"),
url(r'^boutique/(?P<id>\d+)/$', views.boutique, name="boutique"),
url(r'^boutique/(?P<id>\d+)/(?P<page>\d+)/$', views.boutique, name="boutique"),
url(r'^mesmarque/$ ', views.mesmarque, name="mesmarque"),
url(r'^add_in_cart/(?P<product_id>\d+)/(?P<qty>\d+)/$', views.ajouter_panier, name="ajouter_panier"),
url(r'^produit-promo/$', views.produit_promo, name="produit-promo"),
url(r'^produit-promo/(?P<page>\d+)/$', views.produit_promo, name="produit-promo"),
url(r'^categorie_promo/(?P<id>\d+)/$', views.categorie_promo, name="categorie-promo"),
url(r'^categorie_promo/(?P<id>\d+)/(?P<page>\d+)/$', views.categorie_promo, name="categorie-promo"),
url(r'^recherche/$', views.recherche, name="recherche"),
url(r'^recherche/(?P<page>\d+)/$', views.recherche, name="recherche"),
url(r'^my_cart/$', views.my_cart, name="my_cart"),
url(r'^add_in_cart/(?P<product_id>\d+)/(?P<qty>\d+)/$', views.panier, name="panier"),
url(r'^vider_panier/$', views.supprime_panier, name="supprime_panier"),
url(r'^supprime_produit/(?P<id>\d+)/(?P<quantity>\d+)$', views.supprime_produit, name="supprime_produit"),
url(r'^modifie_produit/(?P<id>\d+)/(?P<quantity>\d+)$', views.modifie_produit, name="modifie_produit"),
url(r'^abonner/(?P<idc>\d+)/(?P<idb>\d+)$', views.abonner, name="abonner"),
url(r'^deabonner/(?P<idc>\d+)/(?P<idb>\d+)$', views.deabonner, name="deabonner"),
url(r'^dernier_prod/(?P<idb>\d+)/$', views.dernier_prod, name="dernier_prod"),
# -- Pour les utiliteurs
# On import les vues de Django, avec un nom spécifique
url(r'^vente/$', views.vente, name="vente"),
url(r'^connexion/$', views.connexion, name="connexion"),
url(r'^deconnexion$', auth_views.logout_then_login, name="deconnexion"),
url(r'^inscription/$', views.inscription, name="inscription"),
url(r'^inscription-vendeur/$', views.inscription_vendeur, name="inscription-vendeur"),
url(r'^compte_vendeur/$', views.compte_vendeur, name="compte_vendeur"),
url(r'^compte_client/$', views.compte_client, name="compte_client"),
url(r'^compt-admin/$', views.compt_admin, name="compt-admin.html"),
url(r'^confirm-achat/$', views.confirm_achat, name="confirmer-achat"),
url(r'^detail_commande/(?P<id>\d+)/$', views.detail_commande, name="detail_commande"),
url(r'^livrer/(?P<id>\d+)/$', views.livrer, name="livrer"),
url(r'^mes_produit/(?P<i>\d+)/$', views.produit, name="produit"),
url(r'^confirm_sup/(?P<i>\d+)/(?P<id>\d+)/$', views.confirm_sup, name="confirm_sup"),
url(r'^supprimer_element/(?P<i>\d+)/(?P<id>\d+)/$', views.supprimer_element, name="supprimer_element"),
url(r'^ajouter_produit/$', views.ajouter_produit, name="ajouter_produit"),
url(r'^ajouter_produit/(?P<id>\d+)/$', views.ajouter_produit, name="ajouter_produit"),
url(r'^ajouter_promos/$', views.ajouter_promos, name="ajouter_promos"),
url(r'^ajouter_promos/(?P<id>\d+)/$', views.ajouter_promos, name="ajouter_promos"),
url(r'^ajouter_marque/$', views.ajouter_marque, name="ajouter_marque"),
url(r'^ajouter_categorie/$', views.ajouter_categorie, name="ajouter_categorie"),
url(r'^ajouter_boutique/$', views.ajouter_boutique, name="ajouter_boutique"),
# url(r'^address_shop/$', views.address_shop , name="address_shop"),
# L 'emplacement d'un individus
url(r'^pays/$', views.pays, name="pays"),
url(r'^pays_region/(?P<id>\d+)/$', views.region, name="region"),
url(r'^pays_region_localite/(?P<id>\d+)/$', views.localite, name="localite"),
url(r'^pays_region_localite_address/(?P<id>\d+)/$', views.address, name="address"),
]
|
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
from django.utils.translation import ugettext_lazy as _
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Fieldset
class RegistrationForm(UserCreationForm):
""" Registration Form
Registration form for new user. Use Crispy forms.
Model:
django.contrib.auth.models.User
Extends:
django.contrib.auth.forms.UserCreationForm
Variables:
email {string} -- required, unique, user email
username {string} -- user name
password1 {string} -- user password
password2 {string} -- confirmed user password
Methods:
clean_email -- raise error if email not unique
save -- make user.is_active = False
"""
email = forms.EmailField(required=True)
def __init__(self, *args, **kwargs):
super(RegistrationForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.html5_required = True
self.helper.form_show_errors = True
self.helper.form_tag = False
self.helper.label_class = 'col-sm-3'
self.helper.field_class = 'col-sm-9'
self.helper.layout = Layout(
Fieldset(
'Registration',
'email',
'username',
'password1',
'password2'
),
)
class Meta:
model = User
fields = ['email', 'username', 'password1', 'password2']
def clean_email(self):
email = self.cleaned_data.get('email')
if User.objects.filter(email=email).exists():
raise forms.ValidationError(_('Duplicate email'))
return email
def save(self, commit=True):
user = super(RegistrationForm, self).save(commit=False)
if commit:
user.is_active = False
user.save()
return user
class AuthenticationForm(AuthenticationForm):
""" Authentication Form
Authentication form for user. Use Crispy forms.
Model:
django.contrib.auth.models.User
Extends:
django.contrib.auth.forms.AuthenticationForm
Variables:
username {string} -- user name
password {string} -- user password
"""
def __init__(self, *args, **kwargs):
super(AuthenticationForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.html5_required = True
self.helper.form_show_errors = True
self.helper.form_tag = False
self.helper.label_class = 'col-sm-3'
self.helper.field_class = 'col-sm-9'
self.helper.layout = Layout(
Fieldset(
'Sign in',
'username',
'password',
),
)
class Meta:
model = User
fields = ['username', 'password']
|
hours=2
minuts=0
seconds=0
import time
from turtle import *
setup()
t=Turtle()
while True:
t.clear()
t.write(str(hours).zfill(2)+":"+str(minuts).zfill(2)+":"+str(seconds).zfill(2), font=("arial",30,"normal"))
seconds=seconds+1
time.sleep(1)
if seconds==60:
seconds=0
minuts=minuts+1
if minuts==60:
minuts=0
hours=hours+1
if hours==24:
hour=0
|
import json
from django.shortcuts import render
from django.http import JsonResponse,HttpResponse
from rest_framework import serializers
from .models import Employee
from .serializer import EmployeeSerializer,UserSerializer
from django.contrib.auth.models import User
from django.views.decorators.csrf import csrf_exempt
from rest_framework.parsers import JSONParser
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
# Create your views here.
@api_view(['GET','POST'])
def employeeListView(request):
if request.method=='GET':
employees=Employee.objects.all()
serilizer=EmployeeSerializer(employees,many=True)
return Response(serilizer.data)
elif request.method=='POST':
serializer_d=EmployeeSerializer(data=request.data)
if serializer_d.is_valid():
serializer_d.save()
return Response(serializer_d.data)
else:
return Response(serializer_d.errors)
@api_view(['GET','PUT','DELETE'])
def employeeDetailViewuser(request,pk):
try:
employee=Employee.objects.get(pk=pk)
except Employee.DoesNotExist:
return Response(status=404)
if request.method=='DELETE':
employee.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
elif request.method=='GET':
serializer=EmployeeSerializer(employee)
return Response(serializer.data)
elif request.method=='PUT':
serializer_d=EmployeeSerializer(employee,data=request.data)
if serializer_d.is_valid():
serializer_d.save()
return Response(serializer_d.data)
else:
return Response(serializer_d.errors)
@api_view(['GET'])
def userListView(request):
if request.method=='GET':
users=User.objects.all()
userserializer=UserSerializer(users,many=True)
return Response(userserializer.data)
|
from matplotlib.colors import colorConverter
import matplotlib.pyplot as plt
import numpy as np
import matplotlib as mpl
# create dummy data
zvals = np.ones((100,100))# np.random.rand(100,100)*10-5
zvals2 = np.random.rand(100,100)*10-5
# generate the colors for your colormap
color1 = colorConverter.to_rgba('white')
color2 = colorConverter.to_rgba('black')
# make the colormaps
cmap1 = mpl.colors.LinearSegmentedColormap.from_list('my_cmap',['green','blue'],256)
cmap2 = mpl.colors.LinearSegmentedColormap.from_list('my_cmap2',[color1,color2],256)
cmap2._init() # create the _lut array, with rgba values
# create your alpha array and fill the colormap with them.
# here it is progressive, but you can create whathever you want
alphas = np.linspace(0, 0.8, cmap2.N+3)
cmap2._lut[:,-1] = alphas
print(zvals2)
#img2 = plt.imshow(zvals, interpolation='nearest', cmap=cmap1, origin='lower')
#img3 = plt.imshow(zvals2, interpolation='nearest', cmap=cmap2, origin='lower')
#plt.show() |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-03 10:07
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('KawsWebEnter', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='BaseUrlForTest',
fields=[
('id', models.IntegerField(primary_key=True, serialize=False)),
('webname', models.CharField(blank=True, max_length=40, null=True)),
('weburl', models.CharField(blank=True, max_length=100, null=True)),
],
),
migrations.RemoveField(
model_name='testuser',
name='adress',
),
]
|
# coding: utf-8
from __future__ import absolute_import
import struct
from datetime import datetime
import logging
from celery import signals
from celery.task.control import revoke
import zmq
import gevent.monkey # we need gevent to use 'then' on AsyncResult
gevent.monkey.patch_all()
from . import worker
from .state import env
from .constants import *
logger = logging.getLogger(__name__)
def on_retrain_finished(result):
env['is_training'] = False
if str(result.result) == 'terminated':
return
if env['socket'] is None: # socket not created yet
return
socket = env['socket'] # maybe we need a lock when sending the message
socket.send("", flags=zmq.SNDMORE)
socket.send(
struct.pack('<I', MESSAGE_TYPE_RETRAIN_ENDED), flags=zmq.SNDMORE)
socket.send(struct.pack('<I', env['msg_id']), flags=zmq.SNDMORE)
socket.send(struct.pack('<I', 1))
logger.info(
'Sent RETRAIN_ENDED for msg_id {}.'.format(env['msg_id']))
def trigger_retrain(backend_module, data):
env['retrain_res'] = worker.retrain.delay(backend_module, data)
env['retrain_res'].then(on_retrain_finished)
env['is_training'] = True
def abort_retrain():
env['retrain_res'].revoke(terminate=True)
logger.info('Terminated training for msg_id {}.'.format(env['msg_id']))
def trigger_fetch(backend_module):
env['fetch_res'] = worker.fetch.apply_async(backend_module)
# For how to use signal:
# http://docs.celeryproject.org/en/latest/userguide/signals.html
# Caveat: task_xxxx signals will be executed in worker process.
@signals.after_task_publish.connect
def after_task_publish_handler(**kwargs):
if env['socket'] is None: # not created yet
return
socket = env['socket']
socket.send("", flags=zmq.SNDMORE)
socket.send(
struct.pack('<I', MESSAGE_TYPE_RETRAIN_STARTED), flags=zmq.SNDMORE)
socket.send(struct.pack('<I', env['msg_id']), flags=zmq.SNDMORE)
socket.send(struct.pack('<I', 1))
logger.info(
'Sent RETRAIN_STARTED for msg_id {}.'.format(env['msg_id']))
|
#!/usr/bin/env python3
# Splits data into training, validation, test and probe sets
data_path = 'dataset/um/'
with open(data_path + 'all.dta', 'r') as dat, open(data_path + 'all.idx', 'r') as idx:
with open(data_path + 'base.dta', 'w') as base, open(data_path + 'valid.dta', 'w') as valid, \
open(data_path + 'test.dta', 'w') as test, open(data_path + 'probe.dta', 'w') as probe:
for d, i in zip(dat, idx):
fold = i[0]
if fold == '1':
base.write(d)
elif fold == '2':
valid.write(d)
elif fold == '3':
test.write(d)
elif fold == '4':
probe.write(d)
|
import yfinance as yf
import pandas as pd
import numpy as np
import pandas_datareader as web
import matplotlib.pyplot as plt
from datetime import date
import datetime as dt
#This code can help one see how their stocks have performed compared to the market index. (SPY)
ans1 = input("Enter your Ticker symbol:")
ans2 = input("Enter your Ticker symbol:")
ans3 =input("Enter your Ticker symbol:")
tickers_list = ['spy', ans1, ans2, ans3]
date= input("Enter the date you would like to start from(YYYY-MM-DD Format):")
data = yf.download(tickers_list,date)['Adj Close']
print(data.head())
final = ((data.pct_change()+1).cumprod()).plot(figsize=(10, 7))
plt.legend()
plt.title("Returns", fontsize=11)
plt.ylabel('Cumulative Returns', fontsize=18)
plt.xlabel('Year-month', fontsize=10)
plt.grid(which="major", color='k', linestyle='-.', linewidth=0.4)
plt.show()
|
import paramiko
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
#明文实现的ssh连接
ssh.connect(hostname="10.20.220.105",port="22",username="jor",password="123456")
stdin,stdout,stderr = ssh.exec_command('df')
res,err = stdout.read(),stderr.read()
result = res if res else err
print(result.decode())
print()
ssh.close()
#
# not found in Konwn hosts
# (RSA) to the list of the Know Hosts |
class DataExtractor:
def __init__(self,txtFile):
self.txtFile = txtFile
def parseTextFile(self):
with open(self.txtFile) as f:
textMessages = f.read()
msgs = []
outputs = []
for textMessage in textMessages.split('\n'):
if(not(textMessage.strip() == '')):
words = textMessage.split('\t')
outputs.append(words[0])
msgs.append(words[1].replace('.','').replace(',',''))
return (msgs,outputs)
|
def solution(n, m):
gcd = 1
for i in range(min(n, m), 0, -1):
if n % i == 0 and m % i == 0:
gcd = i
break
lcm = 0
for i in range(max(n, m), n * m + 1):
if i % n == 0 and i % m == 0:
lcm = i
break
return [gcd, lcm] |
from django.shortcuts import render, redirect
from .models import Contact
# Create your views here.
def contact_form(request):
if request.method == 'POST':
user_type = request.POST['user_type']
name = request.POST['user_name']
email = request.POST['user_email']
phone_number = request.POST['user_phone']
company_name = request.POST['user_company']
message = request.POST['user_message']
if 'user_subscribe' in request.POST:
subscriber = True
else:
subscriber = False
contact = Contact(user_type = user_type,
name = name,
email = email,
phone_number = phone_number,
company_name = company_name,
message = message,
subscriber = subscriber)
contact.save()
return redirect('home') |
#!/usr/bin/env python
import requests
import glob
import os
import json
dv_endpoint = 'http://localhost:8080/dataverse_stub/published'
def release_all( data_dir = 'data' ):
for fn in glob.glob('%s/*.json' % data_dir ):
with open(fn,'r') as inp:
x = json.load( inp )
dataset_release( x['datasetId'] )
os.remove( fn )
def dataset_release( dset_id ):
print('releasing %s ' % dset_id )
r = requests.post( dv_endpoint, data = {'datasetId':dset_id} )
print( r.text )
if __name__ == '__main__':
release_all()
|
from .influx_object import InfluxObject
from .csv_object import CSVObject
from .exporter_object import ExporterObject
from .base_object import BaseObject
from .__version__ import __version__
from .config_object import Configuration
from .command_object import export_csv_to_influx
|
from mpi4py import MPI
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
if rank == 0:
data = {'a': 7, 'b': 3.14}
comm.isend(data, dest=1, tag=11)
comm.isend(data, dest=2, tag=11)
comm.isend(data, dest=3, tag=11)
elif rank == 1 or rank == 2 or rank == 3:
data = comm.irecv(source=0, tag=11)
print("my rank is ", rank, " ", data.wait(), " \n")
print("")
|
#!/usr/bin/env python
# -*- coding: cp1252 -*-
# AUTHOR
#
# Daniel Jimenez Martinez (razorbreak@gmail.com)
#
# DESCRIPTION
#
# AiMasterMind es un juego en el que dos IAs (Inteligencias Artificiales) se
# enfrentan por ver quien de ellas descubre una contraseņa generada
# aleatoriamente antes que su rival.
# La idea de este juego se basa en el MasterMind original.
#
# Cada IA puede usar cualquier tipo de algoritmo para su resolucion, tratando
# siempre de buscar la forma mas rapida y con menor coste posible.
#
# La implementacion de las IAs se realiza en los ficheros externos "ai_player_X.py"
# (mas info en los mismos).
#
# Modos de juego:
# ---------------
# EQUILIBRADO: ganara aquella que descubra la misma contraseņa en menos
# intentos. El turno y el orden se escogen aleatoriamente.
#
# ALEATORIO: cada rival tendra su propia contraseņa aleatoria para adivinar.
# El orden se establecera aleatoriamente antes de comenzar.
#
# INCLUDES
import aimastermind
import random
import collections
import pygame
###
import ai_player_1
import ai_player_2
import ai_player_3
import ai_player_4
# import ai_player_N
###
# VARS
# Aņadir a la lista nuevos jugadores IA siguiendo el patron (no olvidar los import)
player_list = []
player_list.append((ai_player_1.AiPlayer(),1))
#player_list.append((ai_player_2.AiPlayer(),2))
#player_list.append((ai_player_3.AiPlayer(),3))
#player_list.append((ai_player_4.AiPlayer(),4))
#player_list.append((ai_player_N.AiPlayer(),N) #Seguir el mismo patron
#
# Editar segun la configuracion de juego deseada
#
pass_size = 10 #Longitud de la contraseņa (en digitos)
digit_size = 25 #Valores que pueden tomar los digitos (de 0 hasta SIZE)
game_mode = 0 #Selector de modo de juego (0:equilibrado, 1:aleatorio).
nom = 3 #Number Of Matches: cantidad de veces que se jugara una partida
n_players = len(player_list) #Number of players
time_round = 3 #Redondeo de la visualizacion de los tiempos de ejecucion (no. decimales)
ascii_mode = 0 #1:enabled, 0:disabled
displace = 97 #Solo modifica el dibujado en pantalla, para mostrar ASCII.
# 48->0
# 57->9
# 97->a
# 122->z
#
#Si se quiere mostrar letras de la 'a' a la 'z' basta con poner
# 97 en displace y 25 en digit_size
extreme_visuals = False #Used to prevent extreme large window
# PRE-CONFIGURACION DE GRAFICOS Y PANTALLA
pygame.init()
BLACK = (0,0,0)
WHITE = (255,255,255)
RED = (255,0,0)
GREEN = (0,255,0)
BLUE = (0,0,255)
Font_Led = "Resources/Fonts/Led.ttf"
Font_Visitor1 = "Resources/Fonts/visitor-tt1.ttf"
Font_Visitor2 = "Resources/Fonts/visitor-tt2.ttf"
tick_winner = pygame.image.load("Resources/Graphics/tick_winner.png")
columns = (2 if n_players>=2 else 1)
rows = int(round(float(n_players)/2))
win_width = 180 + 40*((pass_size-3) if pass_size>3 else 0)
win_height = 150
if win_width*columns > 1440:
win_width = 380
extreme_visuals = True
resolution = (win_width*columns,win_height*rows)
screen = pygame.display.set_mode(resolution)
caption = "AiMasterMind -"+("Equilibrado" if game_mode==0 else "Turnos")+"- by Razorbreak"
pygame.display.set_caption(caption)
clock = pygame.time.Clock()
FPS = 10 # Frames Per Second
exit = False
# Dibujar texto en pantalla, usando la fuente, tamaņo y posicion establecida
def drawText(message,font_name,size,color,position,screen):
screen.blit(pygame.font.Font(font_name,size).render(message,True,color),position)
def drawArray(array,position,screen):
s = " " # Un espacio o _ son 10*16px
if not extreme_visuals:
for d in range(len(array)):
if ascii_mode:
if type(array[d]) is int:
s += "["+str(chr(array[d]+displace))+"] "
else:
s += "["+str(array[d])+"] "
else:
s += "["+str(array[d])+"] "
else:
s += "NOT AVAILABLE"
drawText(s,Font_Visitor1,20,BLACK,position,screen)
# LOOP DE GRAFICOS PRINCIPAL
timers = [0.0 for i in range(n_players)]
positions = collections.defaultdict(lambda:0)
startgame = False
continuous = False
wins = collections.defaultdict(lambda:0)
winner = -1
print "\n\n\n=== PARTIDA No."+str(sum(wins.values())+1)+" ==="
newgame = aimastermind.AiMasterMind(pass_size,digit_size,game_mode,nom,n_players) #Instanciamos un objetos de la clase
while not exit:
# FETCH EVENTS
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit = True #Finish game
if event.type == pygame.KEYDOWN:
#print "Keyboard:",event.key #Debug - Keyboard codes
if event.key == pygame.K_r: #Press R to restart board
print "\n\n\n\nRESETEANDO JUEGO...\n\n"
newgame = aimastermind.AiMasterMind(pass_size,digit_size,game_mode,nom,n_players) #Instanciamos un objetos de la clase
random.shuffle(player_list)
timers = [0.0 for i in range(n_players)]
positions = collections.defaultdict(lambda:0)
for n in range(n_players):
player_list[n][0].recibirRespuesta([],finalizado=True)
wins = collections.defaultdict(lambda:0)
startgame = False
continuous = False
start_time = pygame.time.get_ticks()
winner = -1
if event.key == pygame.K_q: #Press Q to quit
exit = True
if event.key == pygame.K_c: #Press C to change game mode
game_mode = (0 if game_mode==1 else 1)
caption = "AiMasterMind -"+("Equilibrado" if game_mode==0 else "Turnos")+"- by Razorbreak"
pygame.display.set_caption(caption)
print "\n\n\n\nRESETEANDO JUEGO...\n\n"
newgame = aimastermind.AiMasterMind(pass_size,digit_size,game_mode,nom,n_players) #Instanciamos un objetos de la clase
random.shuffle(player_list)
timers = [0.0 for i in range(n_players)]
positions = collections.defaultdict(lambda:0)
for n in range(n_players):
player_list[n][0].recibirRespuesta([],finalizado=True)
wins = collections.defaultdict(lambda:0)
startgame = False
continuous = False
winner = -1
start_time = pygame.time.get_ticks()
if event.key == pygame.K_s: #Press S to start match
print "\n\n\n=== PARTIDA No."+str(sum(wins.values())+1)+" ==="
random.shuffle(player_list)
newgame = aimastermind.AiMasterMind(pass_size,digit_size,game_mode,nom,n_players) #Instanciamos un objetos de la clase
timers = [0.0 for i in range(n_players)]
positions = collections.defaultdict(lambda:0)
for n in range(n_players):
player_list[n][0].recibirRespuesta([],finalizado=True)
start_time = pygame.time.get_ticks()
startgame = True
winner = -1
if event.key == pygame.K_a: #Press A to launch every match
if (not startgame)&(sum(wins.values())<nom):
print "\n\n\n=== PARTIDA No."+str(sum(wins.values())+1)+" ==="
random.shuffle(player_list)
newgame = aimastermind.AiMasterMind(pass_size,digit_size,game_mode,nom,n_players) #Instanciamos un objetos de la clase
timers = [0.0 for i in range(n_players)]
positions = collections.defaultdict(lambda:0)
for n in range(n_players):
player_list[n][0].recibirRespuesta([],finalizado=True)
start_time = pygame.time.get_ticks()
continuous = True
startgame = True
winner = -1
if event.key == pygame.K_d: #Press D to change between ASCII and Integer
ascii_mode = not(ascii_mode)
# RENDERIZADO
x0=5;x1=30;x2=x1+30;x3=0
y0=5;y1=30;y2=y1+30;y3=y2+30
screen.fill(WHITE)
clock.tick(FPS)
#Dibujado de los limites de la pantalla
if columns==2:
pygame.draw.line(screen,BLUE,(resolution[0]/2,0),(resolution[0]/2,resolution[1]),1)
for i in range(rows-1):
pygame.draw.line(screen,BLUE,(0,win_height*(i+1)),(resolution[0],win_height*(i+1)),1)
#Codigo control y dibujado de los jugadores
for pj in range(n_players):
mx = (win_width if (pj+1)%2==0 else 0)
my = (win_height*(pj/2) if (pj+1)>2 else 0)
if startgame:
#Codigo de control
tim = pygame.time.get_ticks()
pj_solution = player_list[pj][0].generarSolucion(pass_size,digit_size)
timers[pj] += float(pygame.time.get_ticks() - tim)/1000
pj_response = newgame.checkTry(pj_solution,pj)
player_list[pj][0].recibirRespuesta(pj_response)
if pj_response.count(0)==pass_size:
wins[player_list[pj][1]] += 1
print "\n\n >>> JUGADOR "+player_list[pj][0].playerID+" GANA! <<<"
winner = pj
print newgame,"\n"
print dict(wins),"\n"
for n in range(n_players):
player_list[n][0].recibirRespuesta([],finalizado=True)
print newgame.printStatisticsByPlayer(n,player_list[n][0].playerID)
if not(continuous & (sum(wins.values()) < nom)):
startgame = False
continuous = False
else:
print "\n\n\n=== PARTIDA No."+str(sum(wins.values())+1)+" ==="
random.shuffle(player_list)
newgame = aimastermind.AiMasterMind(pass_size,digit_size,game_mode,nom,n_players) #Instanciamos un objetos de la clase
timers = [0.0 for i in range(n_players)]
positions = collections.defaultdict(lambda:0)
start_time = pygame.time.get_ticks()
break
elif (not startgame) & (winner == pj):
screen.blit(tick_winner,(win_width+mx-40,y1+my-15))
discoveredPass = newgame.getDiscoveredDigits(pj)
#Codigo de dibujado
drawArray(discoveredPass[0],(x1+mx,y1+my),screen)
drawArray(discoveredPass[1],(x1+mx,y2+my),screen)
drawText("PASS:",Font_Visitor2,12,BLACK,(x0+5+mx,y1+5+my),screen)
drawText("TRY:",Font_Visitor2,12,BLACK,(x0+5+mx,y2+5+my),screen)
positions[pj] = newgame.getPercentDiscovered(pj)
drawText("Completado: "+str(positions[pj])+"%",Font_Visitor1,15,BLACK,(x0+mx,y3+my),screen)
drawText("Intentos: "+str(newgame.getNumberOfTries(pj)),Font_Visitor1,15,BLACK,(x0+mx,y3+15+my),screen)
drawText("Tiempo: "+str(round(timers[pj],time_round)),Font_Visitor1,15,BLACK,(x0+mx,y3+30+my),screen)
drawText("Wins: "+str(wins[player_list[pj][1]]),Font_Visitor1,15,BLACK,(win_width-60+mx,y3+30+my),screen)
#Indicamos la posicion de cada jugador segun su % completado y tiempo tardado
pos = 1
for i in sorted(positions,key=positions.get,reverse=True):
mx = (win_width if (i+1)%2==0 else 0)
my = (win_height*(i/2) if (i+1)>2 else 0)
drawText(player_list[i][0].playerID+" ("+str(pos)+")",Font_Visitor1,15,BLACK,(x0+mx,y0+my),screen)
pos += 1
#Fin codigo dibujado
pygame.display.flip() #Volcado del render sobre la pantalla
print "\n\n\n=== FIN DEL JUEGO ==="
pygame.quit()
|
class Solution:
def addDigits(self, num: int) -> int:
while True:
ans = 0
while num:
ans += num % 10
num //= 10
if ans < 10:
return ans
else:
num = ans
# 进阶 O(1)的时间复杂度?
# 差值为9的倍数,%9可得各位和
class Solution:
def addDigits(self, num: int) -> int:
if num == 0: return 0
return (num - 1) % 9 + 1 |
from django.contrib.auth.forms import UserCreationForm
from django.views.generic import ListView, CreateView, UpdateView, DeleteView, DetailView
from .models import Equipo, Ticket, Empleado
from .forms import EquipoForm, TicketForm, EmpleadoForm, UserRegisterForm
from django.contrib import messages
from django.shortcuts import render, redirect
from django.db.models import Q
# Create your views here.
class EquipoUpdateView(UpdateView) :
form_class = EquipoForm
template_name = 'createEquipo.html'
model = Equipo
success_url = "http://127.0.0.1:8000/myapp/listaEquipo"
class EquipoCreateView(CreateView) :
form_class = EquipoForm
template_name = 'createEquipo.html'
queryset = Equipo
success_url = "http://127.0.0.1:8000/myapp/listaEquipo"
class EquipoListView(ListView):
model = Equipo
template_name = 'listaEquipo.html'
class EquipoDetailView(DetailView):
model = Equipo
template_name = 'detailEquipo.html'
queryset = Equipo.objects.all()
class EquipoDeleteView(DeleteView):
model = Equipo
template_name = 'deleteEquipo.html'
success_url = "http://127.0.0.1:8000/myapp/listaEquipo"
class TicketUpdateView(UpdateView) :
form_class = TicketForm
template_name = 'createTicket.html'
model = Ticket
success_url = "http://127.0.0.1:8000/myapp/listaTicket"
class TicketCreateView(CreateView) :
form_class = TicketForm
template_name = 'createTicket.html'
queryset = Ticket
success_url = "http://127.0.0.1:8000/myapp/listaTicket"
class TicketListView(ListView):
model = Ticket
template_name = 'listaTicket.html'
class TicketDetailView(DetailView):
model = Ticket
template_name = 'detailTicket.html'
queryset = Ticket.objects.all()
class TicketDeleteView(DeleteView):
form_class = TicketForm
template_name = 'deleteTicket.html'
model = Ticket
success_url = "http://127.0.0.1:8000/myapp/listaTicket"
class EmpleadoUpdateView(UpdateView) :
form_class = EmpleadoForm
template_name = 'createEmpleado.html'
model = Empleado
success_url = "http://127.0.0.1:8000/myapp/listaEmpleado"
class EmpleadoCreateView(CreateView) :
form_class = EmpleadoForm
template_name = 'createEmpleado.html'
queryset = Empleado
success_url = "http://127.0.0.1:8000/myapp/listaEmpleado"
class EmpleadoListView(ListView):
model = Empleado
template_name = 'listaEmpleado.html'
class EmpleadoDetailView(DetailView):
model = Empleado
template_name = 'detailEmpleado.html'
queryset = Empleado.objects.all()
class EmpleadoDeleteView(DeleteView):
model = Empleado
template_name = 'deleteEmpleado.html'
success_url = "http://127.0.0.1:8000/myapp/listaEmpleado"
def buscarEquipo(request):
if request.GET["eq"]:
bequipo = request.GET["eq"]
equipobusca = Equipo.objects.filter(
Q(numero_de_serie__icontains=bequipo) |
Q(tipo_equipo__icontains=bequipo) |
Q(proveedor_nombre__icontains=bequipo) |
Q(proveedor_tlf__icontains=bequipo)
).distinct()
return render(request, "buscarEquipo.html",{"equipos":equipobusca, "query":bequipo})
else:
return render(request, "buscarEquipo.html")
def register(request):
if request.method == 'POST':
form = UserRegisterForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data['username']
messages.success(request, f'Usuario {username} creado')
return redirect('http://127.0.0.1:8000/myapp/listaEmpleado')
else:
form = UserRegisterForm()
context = { 'form' : form}
return render(request, 'register.html', context)
|
#!/usr/bin/env python
from distutils.core import setup
setup( name='IPMap',
version='1.0',
description='Map IPv4 addresses to Hilbert curve',
author='Daniel Miller',
author_email='bonsaiviking@gmail.com',
url='https://github.com/bonsaiviking/IPMap',
packages=['ipmap'],
requires=['PIL'],
)
|
from random import choice
print "Hi I want to play Rock, Paper Scissiors!"
print "Ready? Let's play best out of three!"
print "Make your move! (r,p, s, or q to quit)"
move = raw_input ("Enter: ")
def rock_paper_scissors(move, my_score, your_score):
x = choice ("rps")
if x == "r" and move == "r":
print "I play Rock."
print "We have a tie!"
return my_score, your_score
elif x == "r" and move == "s":
print "I play rock!"
print "Sorry, but I won this round!"
return my_score+1, your_score
elif x == "r" and move == "p":
print "I play rock!"
print "Oh man, you win this round :("
return my_score, your_score+1
elif x == "p" and move == "r":
print "I play paper!"
print "Sorry, but I won this round."
return my_score+1, your_score
elif x == "p" and move == "p":
print "I play paper!"
print "We have a tie!"
return my_score, your_score
elif x == "p" and move == "s":
print "I play paper!"
print "Oh man, you win this round:("
return my_score, your_score+1
elif x == "s" and move == "r":
print "I play Scissors."
print "Oh man, you win this round ;("
return my_score, your_score+1
elif x == "s" and move == "p":
print "I play Scissors!"
print "Sorry, but I won this round."
return my_score+1, your_score
elif x == "s" and move == "s":
print "I play scissors!"
print "We have a tie!"
return my_score, your_score
elif move == "q":
print "GAME OVER!"
print "Final score is: You: %d, Me :%d" % (your_score, my_score)
my_score = 0
your_score = 0
while move != "q":
my_score, your_score = rock_paper_scissors(move, my_score, your_score)
print my_score, your_score
move = raw_input ("Enter: ")
|
from django.contrib import admin
from .models import Answer, Question, QuestionItem, Result, ResultItem, Test
class AnswerAdmin(admin.StackedInline):
# model = Answer.answers.through
model = Question.answers.through
extra = 0
class QuestionItemAdmin(admin.StackedInline):
model = QuestionItem
extra = 0
class TestAdmin(admin.ModelAdmin):
inlines = [QuestionItemAdmin]
class QuestionAdmin(admin.ModelAdmin):
inlines = (AnswerAdmin,)
fields = [
"question",
]
def get_inline_instances(self, request, obj=None):
if not obj:
return list()
return super().get_inline_instances(request, obj)
class ResultAdmin(admin.ModelAdmin):
list_display = ["id", "user", "test", "stats"]
def stats(self, obj):
correct_answer = obj.resultitem_set.filter(
answer__correct_answer=True
).count()
ratio = round(correct_answer * 100 / obj.resultitem_set.count(), 2)
return str(ratio) + " %"
admin.site.register(Test, TestAdmin)
admin.site.register(Question, QuestionAdmin)
admin.site.register(QuestionItem)
admin.site.register(Answer)
admin.site.register(Result, ResultAdmin)
admin.site.register(ResultItem)
|
# Write a program that accepts sequence of lines as input and prints the lines after making all characters in the sentence capitalized.
# Suppose the following input is supplied to the program:
# Hello world
# Practice makes perfect
# Then, the output should be:
# HELLO WORLD
# PRACTICE MAKES PERFECT
print('Please, write all you want. When you finish just leave a blank space before press enter')
lines = list()
raw = input()
while (raw.strip() != ''):
lines.append(raw.upper())
raw = input()
for line in lines:
print(line)
|
#-*- coding:utf8 -*-
# Copyright (c) 2020 barriery
# Python release: 3.7.0
# Create time: 2020-03-14
import json
from google.protobuf import text_format
from . import database
from .proto import entity_pb2
from .proto import result_pb2
def parse_json_to_entity(jsonObj, entype):
def parse_App(jsonObj):
entity = entity_pb2.Entity()
entity.name = jsonObj.get('Name')
for item in ['CloudLayer', 'NetworkLayer', 'EndLayer']:
if item not in jsonObj:
raise Exception(f'expect {item}, but not found.')
entity.cloudlayer.MergeFrom(parse_json_to_entity(
jsonObj.get('CloudLayer'), 'CloudLayer'))
entity.networklayer.MergeFrom(parse_json_to_entity(
jsonObj.get('NetworkLayer'), 'NetworkLayer'))
entity.endlayer.MergeFrom(parse_json_to_entity(
jsonObj.get('EndLayer'), 'EndLayer'))
# do not need links
return entity
def parse_CloudLayer(jsonObj):
entity = entity_pb2.CloudLayer()
for item in ['Datacenters']:
if item not in jsonObj:
raise Exception(f'expect {item}, but not found.')
for datacenter in jsonObj.get('Datacenters'):
entity.datacenters.append(parse_json_to_entity(
datacenter, 'Datacenters'))
return entity
def parse_Datacenters(jsonObj):
entity = entity_pb2.Datacenter()
# entity.id = jsonObj.get('DatacenterID')
entity.location = jsonObj.get('Location')
# entity.name = jsonObj.get('Name')
for item in ['CloudNodes']:
if item not in jsonObj:
raise Exception(f'expect {item}, but not found.')
for cloudnode in jsonObj.get('CloudNodes'):
entity.cloudnodes.append(parse_json_to_entity(
cloudnode, 'CloudNodes'))
return entity
def parse_CloudNodes(jsonObj):
entity = entity_pb2.CloudNode()
entity.id = jsonObj.get('CloudNodeID')
# entity.name = jsonObj.get('Name')
# entity.location = jsonObj.get('Location')
for item in ['Containers']:
if item not in jsonObj:
raise Exceptio(f'expect {item}, but not found.')
for container in jsonObj.get('Containers'):
entity.containers.append(parse_json_to_entity(
container, 'Containers'))
return entity
def parse_Containers(jsonObj):
entity = entity_pb2.Container()
entity.id = jsonObj.get('ContainerID')
# entity.name = jsonObj.get('Name')
cpu = jsonObj.get('CpuNumber')
if cpu is None:
print('error: Container no cpu')
entity.cpu = float(cpu) if cpu is not None else 1
memory = jsonObj.get('Memory')
if memory is None:
print('error: Container no memory')
entity.memory = float(memory) if memory is not None else 1024
store = jsonObj.get('Store')
if store is None:
print('error: Container no store')
entity.store = float(store) if store is not None else 1024
return entity
def parse_NetworkLayer(jsonObj):
entity = entity_pb2.NetworkLayer()
# 目前没有 networklayer 资源
return entity
for item in ['NetNodes', 'EdgeServers']:
if item not in jsonObj:
if item == 'EdgeServers':
continue
raise Exception(f'expect {item}, but not found.')
for subJsonObj in jsonObj.get(item):
if item == 'EdgeServers':
entity.edgeservers.append(parse_json_to_entity(
subJsonObj, item))
elif item == 'NetNodes':
entity.netnodes.append(parse_json_to_entity(
subJsonObj, item))
return entity
def parse_NetNodes(jsonObj):
entity = entity_pb2.NetNode()
entity.id = jsonObj.get('NetNodeID')
# entity.name = jsonObj.get('Name')
entity.location = jsonObj.get('Location')
# Containers can also in EdgeServers
for item in ['Containers']:
if item in jsonObj:
entity.containers.append(parse_json_to_entity(
jsonObj.get(item), item))
return entity
def parse_EdgeServers(jsonObj):
entity = entity_pb2.EdgeServer()
entity.id = jsonObj.get('EdgeServerID')
# entity.name = jsonObj.get('Name')
entity.location = jsonObj.get('Location')
# Containers can also in EdgeServers
for item in ['Containers']:
if item in jsonObj:
entity.containers.append(parse_json_to_entity(
jsonObj.get(item), item))
return entity
def parse_EndLayer(jsonObj):
entity = entity_pb2.EndLayer()
items = ['Devices', 'Workers', 'Applications', 'Rooms']
empty = True
general_room = None
for item in items:
if item not in jsonObj:
continue
empty = False
for subJsonObj in jsonObj.get(item):
if item == 'Rooms':
entity.rooms.append(parse_json_to_entity(
subJsonObj, item))
else:
if general_room is None:
general_room = entity_pb2.Room()
general_room.name = 'GeneralRoom'
general_room.location = 'null'
if item == 'Devices':
general_room.devices.append(parse_json_to_entity(
subJsonObj, item))
elif item == 'Workers':
general_room.workers.append(parse_json_to_entity(
subJsonObj, item))
elif item == 'Applications':
general_room.applications.append(parse_json_to_entity(
subJsonObj, item))
if general_room is not None:
entity.rooms.append(general_room)
if empty:
raise Exception(f'expect {items}, but not found anything.')
return entity
def parse_Rooms(jsonObj):
entity = entity_pb2.Room()
# entity.name = jsonObj.get('Name')
location = jsonObj.get('Location')
entity.location = location if location is not None else 'null'
items = ['Devices', 'Workers', 'Applications']
empty = True
for item in items:
if item not in jsonObj:
continue
empty = False
for subJsonObj in jsonObj.get(item):
if item == 'Devices':
entity.devices.append(parse_json_to_entity(
subJsonObj, item))
elif item == 'Workers':
entity.workers.append(parse_json_to_entity(
subJsonObj, item))
elif item == 'Applications':
entity.applications.append(parse_json_to_entity(
subJsonObj, item))
if empty:
raise Exception(f'expect {items}, but not found anything.')
return entity
def parse_Devices(jsonObj):
entity = entity_pb2.Device()
entity.name = jsonObj.get('Name')
entity.id = jsonObj.get('DeviceID')
return entity
def parse_Workers(jsonObj):
entity = entity_pb2.Worker()
entity.name = jsonObj.get('Name')
entity.id = jsonObj.get('WorkerID')
return entity
def parse_Applications(jsonObj):
entity = entity_pb2.Application()
entity.name = jsonObj.get('Name')
entity.id = jsonObj.get('ApplicationID')
return entity
parse = {
'App': parse_App,
'CloudLayer': parse_CloudLayer,
'Datacenters': parse_Datacenters,
'CloudNodes': parse_CloudNodes,
'Containers': parse_Containers,
'NetworkLayer': parse_NetworkLayer,
'NetNodes': parse_NetNodes,
'EdgeServers': parse_EdgeServers,
'EndLayer': parse_EndLayer,
'Rooms': parse_Rooms,
'Devices': parse_Devices,
'Workers': parse_Workers,
'Applications': parse_Applications,
}
return parse[entype](jsonObj)
def receive(jsonObj):
return parse_json_to_entity(jsonObj, 'App')
def queryResources(database):
'''
with open('./resource.prototxt') as f:
resources = entity_pb2.Entity()
text_format.Parse(f.read(), resources)
return resources
'''
entity = entity_pb2.Entity()
entity.name = 'resources'
cloudlayer = entity_pb2.CloudLayer()
datacenters_dict = {}
servers = database.queryNewestItems('serverinfo',
timelabel='time')
for server in servers:
[sid, name, ip, cpuUnusage, diskIORead, diskIOWrite,
diskUsed, memoryUsed, networkUploadRate,
networkDownloadRate, time, cpuNum, dataCenter,
memoryAvailable, diskAvailable] = server
cpuUsage = 1 - cpuUnusage
print(f"id: %d, " % sid + \
f"name: {name}, " + \
f"cpuNum: {cpuNum * (1 - cpuUsage)}, " + \
f"memoryAvailable: {memoryAvailable}, " + \
f"diskAvailable: {diskAvailable}")
cloudnode = entity_pb2.CloudNode()
cloudnode.id = '%d' % sid
cloudnode.location = name
cloudnode.cpu = cpuNum * (1 - cpuUsage)
cloudnode.memory = memoryAvailable
cloudnode.store = diskAvailable
if dataCenter not in datacenters_dict:
datacenters_dict[dataCenter] = entity_pb2.Datacenter()
datacenters_dict[dataCenter].location = dataCenter
datacenters_dict[dataCenter].cloudnodes.append(cloudnode)
for location, datacenter in datacenters_dict.items():
cloudlayer.datacenters.append(datacenter)
entity.cloudlayer.MergeFrom(cloudlayer)
'''
containers = database.queryNewestItems("dockerinfo",
timelabel='time')
for container in containers:
[sid, name, dockerid, cpuUsage, memUsage,
diskIORead, diskIOWrite, memoryUsed,
networkIORead, networkIOWrite, time,
cpuMem, memoryAvailable, diskAvailable] = container
print(f"id: {dockerid}, " + \
f"serverid: %d, " % sid + \
f"name: {name}, " + \
f"cpuNum: {cpuNum * (1 - cpuUsage)}, " + \
f"memoryAvailable: {memoryAvailable}, " + \
f"diskAvailable: {diskAvailable}")
'''
networklayer = entity_pb2.NetworkLayer()
entity.networklayer.MergeFrom(networklayer) # 暂时没有networklayer
endlayer = entity_pb2.EndLayer()
rooms_dict = {}
devices = database.queryNewestItems('deviceinfo',
timelabel='time',
condition='inroom="True"')
for device in devices:
[did, localip, inroom, token, dtype, model,
name, data, location, timestamp] = device
print(f"id: {did}, " + \
f"inroom: {inroom}, " + \
f"name: {name}, " + \
f"location: {location}")
if inroom != "True":
continue
device = entity_pb2.Device()
device.name = name
device.id = did
if location not in rooms_dict:
rooms_dict[location] = entity_pb2.Room()
rooms_dict[location].location = location
rooms_dict[location].devices.append(device)
for location, room in rooms_dict.items():
endlayer.rooms.append(room)
entity.endlayer.MergeFrom(endlayer)
return entity
def dfs(demand_idx, demand, resources, used, match, dfs_type, res):
for resource_idx, resource in enumerate(resources):
if bipartite_graph_match(demand, resource, dfs_type, res) \
and not used[resource_idx]:
used[resource_idx] = True
if match[resource_idx] == -1 or \
dfs(match[resource_idx], demand, resources,
used, match, dfs_type, res):
match[resource_idx] = demand_idx
return True
return False
def bipartite_graph_match(demands, resources, match_type, res):
if match_type == 'cloudlayer':
dn = len(demands.datacenters)
an = len(resources.datacenters)
match_num = 0
match = [-1 for x in range(an)]
for demand_idx, demand in enumerate(demands.datacenters):
used = [0 for x in range(an)]
if dfs(demand_idx, demand, resources.datacenters,
used, match, 'datacenters', res):
match_num += 1
if match_num != dn:
return False
return True
elif match_type == 'datacenters':
dn = len(demands.cloudnodes)
an = len(resources.cloudnodes)
match_num = 0
match = [-1 for x in range(an)]
for demand_idx, demand in enumerate(demands.cloudnodes):
used = [0 for x in range(an)]
if dfs(demand_idx, demand, resources.cloudnodes,
used, match, 'cloudnodes', res):
match_num += 1
if match_num != dn:
return False
res.update({demands.cloudnodes[di].id:resources.cloudnodes[ai].id
for ai, di in enumerate(match)})
container_match = {}
for ai, di in enumerate(match):
for container in demands.cloudnodes[di].containers:
container_match[container.id] = resources.cloudnodes[ai].id
res.update(container_match)
return True
elif match_type == 'networklayer':
# netnodes part
netnodes_dn = len(demands.netnodes)
netnodes_an = len(resources.netnodes)
netnodes_match_num = 0
netnodes_match = [-1 for x in range(netnodes_an)]
for demand_idx, demand in enumerate(demands.netnodes):
netnodes_used = [0 for x in range(netnodes_an)]
if dfs(demand_idx, demand, resources.netnodes,
netnodes_used, netnodes_match,
'netnodes', res):
netnodes_match_num += 1
if netnodes_match_num != netnodes_dn:
return False
# edgeservers part
edgeservers_dn = len(demands.edgeservers)
edgeservers_an = len(resources.edgeservers)
edgeservers_match_num = 0
edgeservers_match = [-1 for x in range(edgeservers_an)]
for demand_idx, demand in enumerate(demands.edgeservers):
edgeservers_used = [0 for x in range(edgeservers_an)]
if dfs(demand_idx, demand, resources.edgeservers,
edgeservers_used, edgeservers_match,
'edgeservers', res):
edgeservers_match_num += 1
if edgeservers_match_num != edgeservers_dn:
return False
res.update({demands.netnodes[di].id:resources.netnodes[ai].id
for ai, di in enumerate(netnodes_match) if di != -1})
res.update({demands.edgeservers[di].id:resources.edgeservers[ai].id
for ai, di in enumerate(edgeservers_match) if di != -1})
container_match = {}
for ai, di in enumerate(netnodes_match):
if di == -1:
continue
for container in demands.netnodes[di].containers:
container_match[container.id] = resources.netnodes[ai].id
for ai, di in enumerate(edgeservers_match):
if di == -1:
continue
for container in demands.edgeservers[di].containers:
container_match[container.id] = resources.edgeservers[ai].id
res.update(container_match)
return True
elif match_type == 'endlayer':
dn = len(demands.rooms)
an = len(resources.rooms)
match_num = 0
match = [-1 for x in range(an)]
for demand_idx, demand in enumerate(demands.rooms):
used = [0 for x in range(an)]
if dfs(demand_idx, demand, resources.rooms,
used, match, 'rooms', res):
match_num += 1
if match_num != dn:
return False
return True
elif match_type == 'rooms':
if demands.location != 'null' and \
demands.location != resources.location:
return False
# devices part
devices_dn = len(demands.devices)
devices_an = len(resources.devices)
devices_match_num = 0
devices_match = [-1 for x in range(devices_an)]
for demand_idx, demand in enumerate(demands.devices):
devices_used = [0 for x in range(devices_an)]
if dfs(demand_idx, demand, resources.devices,
devices_used, devices_match,
'devices', res):
devices_match_num += 1
if devices_match_num != devices_dn:
return False
# workers part
workers_dn = len(demands.workers)
workers_an = len(resources.workers)
workers_match_num = 0
workers_match = [-1 for x in range(workers_an)]
for demand_idx, demand in enumerate(demands.workers):
workers_used = [0 for x in range(workers_an)]
if dfs(demand_idx, demand, resources.workers,
workers_used, workers_match,
'workers', res):
workers_match_num += 1
if workers_match_num != workers_dn:
return False
# applications part
applications_dn = len(demands.applications)
applications_an = len(resources.applications)
applications_match_num = 0
applications_match = [-1 for x in range(applications_an)]
for demand_idx, demand in enumerate(demands.applications):
applications_used = [0 for x in range(applications_an)]
if dfs(demand_idx, demand, resources.applications,
applications_used, applications_match,
'applications', res):
applications_match_num += 1
if applications_match_num != applications_dn:
return False
res.update({demands.devices[di].id:resources.devices[ai].id
for ai, di in enumerate(devices_match) if di != -1})
res.update({demands.workers[di].id:resources.workers[ai].id
for ai, di in enumerate(workers_match) if di != -1})
res.update({demands.applications[ai].id:resources.applications[ai].id
for ai, di in enumerate(applications_match) if di != -1})
return True
elif match_type in ['cloudnodes', 'netnodes', 'edgeservers']:
demand_cpu = 0
demand_mem = 0
demand_sto = 0
for container in demands.containers:
demand_cpu += container.cpu
demand_mem += container.memory
demand_sto += container.store
if demand_cpu > resources.cpu:
return False
if demand_mem > resources.memory:
return False
if demand_sto > resources.store:
return False
return True
elif match_type in ['devices', 'workers', 'applications']:
if demands.name != resources.name:
return False
return True
else:
raise Exception(f'error match type: {match_type}')
def schedule(demands, resources, rtype='json'):
res = {}
cloudlayer = bipartite_graph_match(
demands.cloudlayer, resources.cloudlayer,
'cloudlayer', res)
if not cloudlayer:
print('cloudlayer can not schedule.')
return None
networklayer = bipartite_graph_match(
demands.networklayer, resources.networklayer,
'networklayer', res)
if not networklayer:
print('networklayer can not schedule.')
return None
endlayer = bipartite_graph_match(
demands.endlayer, resources.endlayer,
'endlayer', res)
if not endlayer:
print('endlayer can not schedule.')
return None
return pack(demands, resources, res, rtype)
def search_for_type(entity, eid):
def lookup_App(entity, eid):
res = lookup_CloudLayer(entity.cloudlayer, eid)
if res:
return res
res = lookup_NetworkLayer(entity.networklayer, eid)
if res:
return res
res = lookup_EndLayer(entity.endlayer, eid)
return res
def lookup_CloudLayer(entity, eid):
res = None
for d in entity.datacenters:
res = lookup_Datacenter(d, eid)
if res:
break
return res
def lookup_Datacenter(entity, eid):
if eid == entity.id:
return 'Datacenter'
res = None
for c in entity.cloudnodes:
res = lookup_CloudNode(c, eid)
if res:
break
return res
def lookup_CloudNode(entity, eid):
if eid == entity.id:
return 'CloudNode'
res = None
for c in entity.containers:
res = lookup_Container(c, eid)
if res:
break
return res
def lookup_Container(entity, eid):
if eid == entity.id:
return 'Container'
return None
def lookup_NetworkLayer(entity, eid):
res = None
for n in entity.netnodes:
res = lookup_Netnode(n, eid)
if res:
break
if res:
return res
for e in entity.edgeservers:
res = lookup_EdgeServer(e, eid)
if res:
break
return res
def lookup_Netnode(entity, eid):
if eid == entity.id:
return 'NetNode'
res = None
for c in entity.containers:
res = lookup_Container(c, eid)
if res:
break
return res
def lookup_EdgeServer(entity, eid):
if eid == entity.id:
return 'EdgeServer'
res = None
for c in entity.containers:
res = lookup_Container(c, eid)
if res:
break
return res
def lookup_EndLayer(entity, eid):
res = None
for r in entity.rooms:
res = lookup_Room(r, eid)
if res:
break
return res
def lookup_Room(entity, eid):
res = None
for d in entity.devices:
res = lookup_Device(d, eid)
if res:
break
if res:
return res
for w in entity.workers:
res = lookup_Worker(w, eid)
if res:
break
if res:
return res
for a in entity.applications:
res = lookup_Application(a, eid)
if res:
break
return res
def lookup_Device(entity, eid):
if eid == entity.id:
return entity.name
return None
def lookup_Worker(entity, eid):
if eid == entity.id:
return entity.name
def lookup_Application(entity, eid):
if eid == entity.id:
return entity.name
return lookup_App(entity, eid)
def pack(demands, resources, res, rtype):
if rtype == 'json':
result = []
for did, rid in res.items():
item = {'did': did,
'rid': rid,
'dtype': search_for_type(demands, did),
'rtype': search_for_type(resources, rid)}
result.append(item)
return json.dumps(result)
elif rtype == 'proto':
raise Exception(f'not support now')
result = result_pb2.Result()
result.appname = demands.name
for did, sid in res.items():
mth = result_pb2.Match()
mth.demandid = did
mth.resourceid = sid
result.matchs.append(mth)
return result
else:
raise Exception('error rtype: {}'.format(rtype))
def writeToTable(database, table, result):
for match in result.matchs:
params = {'appName': result.appname,
'demandid': match.demandid,
'resourceid': match.resourceid}
database.insert(table, params)
def print_proto(proto):
text = text_format.MessageToString(proto, as_utf8=True)
print(text)
def save_proto(proto, filename):
with open(filename, 'w') as f:
f.write(text_format.MessageToString(proto, as_utf8=True))
if __name__ == '__main__':
raise Exception('You can not run this script directly.')
database_manager = database.DatabaseManager(
config.DATABASE.remote_ip, config.DATABASE.remote_usr,
config.DATABASE.remote_pwd, config.DATABASE.database_usr,
config.DATABASE.database_pwd, config.DATABASE.database_name)
with open('./demand.hrm') as f:
jsonObj = json.loads(f.read())
demands = receive(jsonObj)
resources = queryResources(database_manager)
save_proto(demands, 'demands.prototxt')
save_proto(resources, 'resources.prototxt')
print_proto(demands)
print_proto(resources)
result = schedule(demands, resources, rtype='json')
writeToTable(database_manager, 'matchtable', result)
database_manager.__del__()
|
# -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import hashlib
import random
class ScrapyPages:
#网页内容保存目录
contentDir = "D:\\pages\\"
#网页链接保存目录
atagsDir = "D:\\atags\\"
#保存网页与文件名对应关系的文件
mappingFile = "D:\\mapping"
#已处理的链接
handledLinks = []
#待处理的链接
unhandledLinks = []
def openPage(self, url):
driver = webdriver.Firefox()
driver.get(url)
return driver
#根据url获得文件名
def urlToFilename(self, url):
m = hashlib.sha256()
m.update(url.encode('utf-8'))
filename = m.hexdigest()
tfile = open(self.mappingFile, 'wb+')
tfile.write(filename + ', ' + url + '\n')
return filename
#这个过程也要改动
#保存页面内容(仅文字)
def savePageContent(self, filename, content):
tfile= open(self.contentDir + filename,'wb')
tfile.write(content)
print('writing file-',filename)
tfile.close()
#处理页面中的a标签:
#1、将a标签存入相应的文件中
#2、将其更新如links中
def handleAnchor(self, filename, anchors):
hrefs = self.extractHrefFromAnchor(anchors)
self.addHrefsToLinks(hrefs)
self.saveAnchor(filename, hrefs)
#将新提取到的链接(未出现过的)加入待处理链接中
def addHrefsToLinks(self, hrefs):
for href in hrefs:
if href in self.handledLinks or href in self.unhandledLinks:
continue
else:
self.unhandledLinks += [href]
#保存页面链接
def saveAnchor(self, filename, hrefs):
afile= open(self.atagsDir + filename,'wb')
afile.write('\n'.join(hrefs))
afile.close()
#从a标签中提取链接 并判断链接
def extractHrefFromAnchor(self, anchors):
hrefs = []
for anchor in anchors:
try:
href = anchor.get_attribute('href').encode('utf-8')
if href.find('http://news.baidu.com') == -1 or href in hrefs:
continue
else:
hrefs += [href]
except AttributeError:
pass
except selenium.common.exceptions.StaleElementReferenceException:
pass
return hrefs
#爬取网页的入口函数
def scrapy(self, initURL):
driver = webdriver.Firefox()
self.unhandledLinks += [initURL]
while len(self.unhandledLinks) != 0:
#随机访问未访问的链接?
pos = random.randint(0,len(self.unhandledLinks) - 1)
print('-pos-',pos)
link = self.unhandledLinks[pos]
del self.unhandledLinks[pos]
driver.get(link)
content = str(driver.find_element_by_xpath(".//html").text.encode('utf-8'))
if len(content) > 512:
atags = driver.find_elements_by_tag_name("a")
filename = self.urlToFilename(link)
self.handleAnchor(filename, atags)
self.savePageContent(filename,link + '\n' + content)
driver.close()
pageScrapy = ScrapyPages()
pageScrapy.scrapy('http://news.baidu.com/ns?word=%B1%BB%B5%F7%B2%E9&tn=news&from=news&cl=2&rn=20&ct=1') |
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import logging
import shlex
from dataclasses import dataclass
from pants.backend.shell.subsystems.shell_setup import ShellSetup
from pants.backend.shell.target_types import (
RunShellCommandWorkdirField,
ShellCommandCommandField,
ShellCommandExecutionDependenciesField,
ShellCommandExtraEnvVarsField,
ShellCommandLogOutputField,
ShellCommandOutputDirectoriesField,
ShellCommandOutputFilesField,
ShellCommandOutputRootDirField,
ShellCommandRunnableDependenciesField,
ShellCommandSourcesField,
ShellCommandTarget,
ShellCommandTimeoutField,
ShellCommandToolsField,
ShellCommandWorkdirField,
)
from pants.backend.shell.util_rules.builtin import BASH_BUILTIN_COMMANDS
from pants.core.goals.run import RunFieldSet, RunInSandboxBehavior, RunRequest
from pants.core.target_types import FileSourceField
from pants.core.util_rules.adhoc_process_support import (
AdhocProcessRequest,
AdhocProcessResult,
ExtraSandboxContents,
MergeExtraSandboxContents,
ResolvedExecutionDependencies,
ResolveExecutionDependenciesRequest,
parse_relative_directory,
)
from pants.core.util_rules.adhoc_process_support import rules as adhoc_process_support_rules
from pants.core.util_rules.environments import EnvironmentNameRequest
from pants.core.util_rules.system_binaries import BashBinary, BinaryShims, BinaryShimsRequest
from pants.engine.environment import EnvironmentName
from pants.engine.fs import Digest, Snapshot
from pants.engine.internals.native_engine import EMPTY_DIGEST
from pants.engine.process import Process
from pants.engine.rules import Get, collect_rules, rule
from pants.engine.target import (
GeneratedSources,
GenerateSourcesRequest,
Target,
WrappedTarget,
WrappedTargetRequest,
)
from pants.engine.unions import UnionRule
from pants.util.docutil import bin_name
from pants.util.frozendict import FrozenDict
from pants.util.logging import LogLevel
logger = logging.getLogger(__name__)
class GenerateFilesFromShellCommandRequest(GenerateSourcesRequest):
input = ShellCommandSourcesField
output = FileSourceField
@dataclass(frozen=True)
class ShellCommandProcessFromTargetRequest:
target: Target
async def _prepare_process_request_from_target(
shell_command: Target,
shell_setup: ShellSetup.EnvironmentAware,
bash: BashBinary,
) -> AdhocProcessRequest:
description = f"the `{shell_command.alias}` at `{shell_command.address}`"
working_directory = shell_command[ShellCommandWorkdirField].value
assert working_directory is not None, "working_directory should always be a string"
command = shell_command[ShellCommandCommandField].value
if not command:
raise ValueError(f"Missing `command` line in `{description}.")
execution_environment = await Get(
ResolvedExecutionDependencies,
ResolveExecutionDependenciesRequest(
shell_command.address,
shell_command.get(ShellCommandExecutionDependenciesField).value,
shell_command.get(ShellCommandRunnableDependenciesField).value,
),
)
dependencies_digest = execution_environment.digest
output_files = shell_command.get(ShellCommandOutputFilesField).value or ()
output_directories = shell_command.get(ShellCommandOutputDirectoriesField).value or ()
# Resolve the `tools` field into a digest
tools = shell_command.get(ShellCommandToolsField, default_raw_value=()).value or ()
tools = tuple(tool for tool in tools if tool not in BASH_BUILTIN_COMMANDS)
resolved_tools = await Get(
BinaryShims,
BinaryShimsRequest.for_binaries(
*tools,
rationale=f"execute {description}",
search_path=shell_setup.executable_search_path,
),
)
runnable_dependencies = execution_environment.runnable_dependencies
extra_sandbox_contents = []
extra_sandbox_contents.append(
ExtraSandboxContents(
EMPTY_DIGEST,
resolved_tools.path_component,
FrozenDict(resolved_tools.immutable_input_digests or {}),
FrozenDict(),
FrozenDict(),
)
)
if runnable_dependencies:
extra_sandbox_contents.append(
ExtraSandboxContents(
EMPTY_DIGEST,
f"{{chroot}}/{runnable_dependencies.path_component}",
runnable_dependencies.immutable_input_digests,
runnable_dependencies.append_only_caches,
runnable_dependencies.extra_env,
)
)
merged_extras = await Get(
ExtraSandboxContents, MergeExtraSandboxContents(tuple(extra_sandbox_contents))
)
extra_env = dict(merged_extras.extra_env)
if merged_extras.path:
extra_env["PATH"] = merged_extras.path
return AdhocProcessRequest(
description=description,
address=shell_command.address,
working_directory=working_directory,
root_output_directory=shell_command.get(ShellCommandOutputRootDirField).value or "",
argv=(bash.path, "-c", command, shell_command.address.spec),
timeout=shell_command.get(ShellCommandTimeoutField).value,
input_digest=dependencies_digest,
output_files=output_files,
output_directories=output_directories,
fetch_env_vars=shell_command.get(ShellCommandExtraEnvVarsField).value or (),
append_only_caches=FrozenDict.frozen(merged_extras.append_only_caches),
supplied_env_var_values=FrozenDict(extra_env),
immutable_input_digests=FrozenDict.frozen(merged_extras.immutable_input_digests),
log_on_process_errors=_LOG_ON_PROCESS_ERRORS,
log_output=shell_command[ShellCommandLogOutputField].value,
capture_stdout_file=None,
capture_stderr_file=None,
)
@rule
async def run_adhoc_result_from_target(
request: ShellCommandProcessFromTargetRequest,
shell_setup: ShellSetup.EnvironmentAware,
bash: BashBinary,
) -> AdhocProcessResult:
scpr = await _prepare_process_request_from_target(request.target, shell_setup, bash)
return await Get(AdhocProcessResult, AdhocProcessRequest, scpr)
@rule
async def prepare_process_request_from_target(
request: ShellCommandProcessFromTargetRequest,
shell_setup: ShellSetup.EnvironmentAware,
bash: BashBinary,
) -> Process:
# Needed to support `experimental_test_shell_command`
scpr = await _prepare_process_request_from_target(request.target, shell_setup, bash)
return await Get(Process, AdhocProcessRequest, scpr)
class RunShellCommand(RunFieldSet):
required_fields = (
ShellCommandCommandField,
RunShellCommandWorkdirField,
)
run_in_sandbox_behavior = RunInSandboxBehavior.NOT_SUPPORTED
@rule(desc="Running shell command", level=LogLevel.DEBUG)
async def shell_command_in_sandbox(
request: GenerateFilesFromShellCommandRequest,
) -> GeneratedSources:
shell_command = request.protocol_target
environment_name = await Get(
EnvironmentName, EnvironmentNameRequest, EnvironmentNameRequest.from_target(shell_command)
)
adhoc_result = await Get(
AdhocProcessResult,
{
environment_name: EnvironmentName,
ShellCommandProcessFromTargetRequest(
shell_command
): ShellCommandProcessFromTargetRequest,
},
)
output = await Get(Snapshot, Digest, adhoc_result.adjusted_digest)
return GeneratedSources(output)
async def _interactive_shell_command(
shell_command: Target,
bash: BashBinary,
) -> Process:
description = f"the `{shell_command.alias}` at `{shell_command.address}`"
working_directory = shell_command[RunShellCommandWorkdirField].value
if working_directory is None:
raise ValueError("Working directory must be not be `None` for interactive processes.")
command = shell_command[ShellCommandCommandField].value
if not command:
raise ValueError(f"Missing `command` line in `{description}.")
command_env = {
"CHROOT": "{chroot}",
}
execution_environment = await Get(
ResolvedExecutionDependencies,
ResolveExecutionDependenciesRequest(
shell_command.address,
shell_command.get(ShellCommandExecutionDependenciesField).value,
shell_command.get(ShellCommandRunnableDependenciesField).value,
),
)
dependencies_digest = execution_environment.digest
relpath = parse_relative_directory(working_directory, shell_command.address)
boot_script = f"cd {shlex.quote(relpath)}; " if relpath != "" else ""
return Process(
argv=(
bash.path,
"-c",
boot_script + command,
f"{bin_name()} run {shell_command.address.spec} --",
),
description=f"Running {description}",
env=command_env,
input_digest=dependencies_digest,
)
@rule
async def run_shell_command_request(bash: BashBinary, shell_command: RunShellCommand) -> RunRequest:
wrapped_tgt = await Get(
WrappedTarget,
WrappedTargetRequest(shell_command.address, description_of_origin="<infallible>"),
)
process = await _interactive_shell_command(wrapped_tgt.target, bash)
return RunRequest(
digest=process.input_digest,
args=process.argv,
extra_env=process.env,
)
def rules():
return [
*collect_rules(),
*adhoc_process_support_rules(),
UnionRule(GenerateSourcesRequest, GenerateFilesFromShellCommandRequest),
*RunShellCommand.rules(),
]
_LOG_ON_PROCESS_ERRORS = FrozenDict(
{
127: (
f"`{ShellCommandTarget.alias}` requires the names of any external commands used by this "
f"shell command to be specified in the `{ShellCommandToolsField.alias}` field. If "
f"`bash` cannot find a tool, add it to the `{ShellCommandToolsField.alias}` field."
)
}
)
|
import sys
print sys.argv
x = sys.argv
y = x[1::]
print list(y)
x=1
for i in y:
x = int(i) * int(x)
print str(x) |
class Memory:
def __init__(self,internal, secondary, ram):
self.internal=internal
self.secondary=secondary
self.ram=ram
def getinfo(self):
print("INTERNAL :{} \n secondary :{} \n and ram :{}\n".format(self.internal,self.secondary,self.ram))
class Properties:
def __init__(self, model, brand, price, memory):
self.model=model
self.brand=brand
self.price=price
self.memory=memory
def mobile_info(self):
print("model Name:",self.model)
print("brand :",self.brand)
print("price :",self.price ,end='\n')
self.memory.getinfo()
m=Memory("62 gb","128gb","6gb")
p=Properties("samsung","m31",18000,m)
p.mobile_info() |
import argparse
import numpy as np
import torch
import torchvision.transforms as transforms
from torch.utils.tensorboard import SummaryWriter
from load_data import generate_data
from trainer import Trainer
# Command Line Arguments
parser = argparse.ArgumentParser()
parser.add_argument("--train-epoch", default=50, type=int, help='Epoch of train Classification')
parser.add_argument("--ae-epoch", default=50, type=int, help='Epoch of train Auto Encoder')
parser.add_argument("--load", action='store_true', help='To load model weight')
parser.add_argument("--save", action='store_true', help='To save model weight')
parser.add_argument("--ae-lr", default=0.0005, type=float, help='Learing Rate of AutoEncoder')
parser.add_argument("--classify-lr", default=0.0005, type=float, help='Learing Rate of Classifier')
parser.add_argument("--seed", default=42, type=int, help='Seed Value')
args = parser.parse_args()
# Get from calc_img_stats() with Imbalanced Data
transform_mean = [0.4920, 0.4825, 0.4500]
transform_std = [0.2039, 0.2009, 0.2026]
# Preprocessing for Training
train_transform = transforms.Compose(
[
transforms.RandomHorizontalFlip(),
transforms.RandomRotation(degrees=20),
transforms.RandomResizedCrop(32),
transforms.ToTensor(),
transforms.Normalize(transform_mean, transform_std)
])
# Preprocessing for Evaluate
evaluate_transform = transforms.Compose(
[
transforms.ToTensor(),
transforms.Normalize(transform_mean, transform_std)
])
# Set Seed
np.random.seed(args.seed)
torch.manual_seed(args.seed)
torch.cuda.manual_seed(args.seed)
# Log
writer = SummaryWriter(log_dir=f'logs/ensemble-{args.ae_epoch}-{args.train_epoch}-{args.ae_lr}-{args.classify_lr}')
# Define Imbalance Ratio
train_imbalance_class_ratio = np.array([1., 1., .5, 1., .5, 1., 1., 1., 1., .5])
# Generate Datasets
train_loader, train1_loader, train2_loader, val_loader, test_loader = generate_data(train_imbalance_class_ratio, train_transform, evaluate_transform, over_sample_batch_size=128, under_sample_batch_size=64, val_batch_size=64, test_batch_size=4)
trainer = Trainer(train0_loader=train_loader, train1_loader=train1_loader, train2_loader=train2_loader, val_loader=val_loader, ae_epoch=args.ae_epoch, train_epoch=args.train_epoch, writer=writer)
if args.load:
trainer.load_model()
# Train
trainer.train()
# Model Save
if args.save:
trainer.save_model()
# Test
trainer.test(test_loader=test_loader)
|
from mesa import Model, Agent
from mesa.time import RandomActivation
from mesa.space import SingleGrid
from mesa.datacollection import DataCollector
class SchellingAgent(Agent):
'''
Define the Agent
One of the core class
'''
def __init__(self, pos, model, agent_type):
'''
Create a new Schelling agent.
Args:
unique_id: Unique identifier for the agent.
x, y: Agent initial location.
agent_type: Indicator for the agengt’s type (minority = 1, majority = 0)
'''
super().__init__(pos, model)
self.pos = pos
self.type = agent_type
def step(self):
# Iterate to see how many similar agents at the beginning (it's 0 initially)
similiar = 0
# `neighbor_iter` method is defined in mesa/space.py/Grid, in order to iterate over position neighbours.
for neighbor in self.model.grid.neighbor_iter(self.pos):
if neighbor.type == self.type: # Why this syntax works? Need to look up it.
similiar += 1
# If unhappy, move
# If around similar agents are less homophily (3), then move to an empty cell.
# model.homophily is defined in the Model class below.
if similiar < self.model.homophily:
# `move_to_empty` is defined in mesa/space.py/Grid
self.model.grid.move_to_empty(self)
# If around similar agents are not less homophily (3), then plus the similar agents.
else:
self.model.happy += 1
class Schelling(Model):
'''
Define the Model
The other core class
'''
'''
mesa/space.py/Grid has 3 properties:
- width
- height
- torus
So `minority_pc` and `homophily` are customized properties here.
'''
def __init__(self, height = 20, width = 20, density = 0.8,
minority_pc = 0.2, homophily = 3):
self.height = height
self.width = width
self.density = density
self.minority_pc = minority_pc
self.homophily = homophily
# Scheduler is used `RandomActivation`, which is defined in mesa/time.py/RandomActivation.
# Specify *time* of the model.
self.schedule = RandomActivation(self)
# `SingleGrid` is defined in mesa/space.py/SingleGrid.
# Grid which strictly enforces one object per cell.
# Specify *space* of the model.
# width, height, torus are the native properties.
self.grid = SingleGrid(width, height, torus = True)
# Without happy agents initially
self.happy = 0
# DataCollector collects 3 types of data:
# model-level data, agent-level data, and tables
# A DataCollector is instantiated with 2 dictionaries of reporter names and associated variable names or functions for each, one for model-level data and one for agent-level data; a third dictionary provides table names and columns. Variable names are converted into functions which retrieve attributes of that name.
self.datacollector = DataCollector(
{'happy': 'happy'}, # Model-level count of happy agents, only one agent-level reporter
# For testing purposes, agent’s individual x and y
# lambda function, it is like:
# lambda x, y: x ** y
{'x': lambda a: a.pos[0], 'y': lambda a: a.pos[1]},
)
# Set up agents
# We use grid iterator that returns
# the coordinates of a cell as well
# as its contents. (coord_iter)
# coord_iter is defined in mesa/space.py, which, which returns coordinates as well as cell contents.
for cell in self.grid.coord_iter():
# Grid cells are indexed by [x][y] (tuple), where [0][0] is assumed to be the bottom-left and [width-1][height-1] is the top-right. If a grid is toroidal, the top and bottom, and left and right, edges wrap to each other.
x = cell[1]
y = cell[2]
if self.random.random() < self.density:
if self.random.random() < self.minority_pc:
agent_type = 1
else:
agent_type = 0
agent = SchellingAgent((x, y), self, agent_type)
# position_agent is defined in mesa/space.py. Position an agent on the grid. This is used when first placing agents!
self.grid.position_agent(agent, (x, y))
# schedule.add() method is defined in mesa/time.py.
# Add an Agent object to the schedule.
#
# Aggs:
# agent: An Agent to be added to the schedule. Note: the agent must have a step() method.
self.schedule.add(agent)
self.running = True
# datacollector.collect() method is defined in mesa/datacollection.py. When the collect(…) method is called, it collects these attributes and executes these functions one by one and store the results.
self.datacollector.collect(self)
# Oh, I did’t understand step(…) method previously. Now I know as a consequential method, it executes all stages for all agents.
def step(self):
'''
Run one step of the model. If all agents are happy, halt the model.
'''
self.happy = 0 # Reset counter of happy agents
self.schedule.step()
# collect data
self.datacollector.collect(self)
# Method get_agent_count is defined in mesa/time.py. It returns the current number agents in the queue.
if self.happy == self.schedule.get_agent_count():
self.running = False
# comments on 2021/04/26 22:36
|
#coding:gb2312
#在3.4的基础上,添加一条print语句,指出哪位无法赴约;修改名单,将无法赴约的替换为新的;再次打印一系列信息发出邀请。
Friends=['cby','sch','fjy','lq']
message=", "+"Would You like To Have Dinner With Me"+"?"
print(Friends[-1].title()+", "+"Can't Have Dinner With Me"+".")
Friends[-1]='ljy'
print(Friends[0].title()+message)
print(Friends[1].title()+message)
print(Friends[2].title()+message)
print(Friends[-1].title()+message)
|
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import logging
from dataclasses import dataclass
from itertools import chain
from typing import Any
from pants.backend.docker.goals.package_image import DockerPackageFieldSet
from pants.backend.docker.subsystems import dockerfile_parser
from pants.backend.docker.subsystems.docker_options import DockerOptions
from pants.backend.docker.target_types import DockerImageTags, DockerImageTagsRequest
from pants.backend.docker.util_rules import (
docker_binary,
docker_build_args,
docker_build_context,
docker_build_env,
dockerfile,
)
from pants.backend.docker.util_rules.docker_build_context import (
DockerBuildContext,
DockerBuildContextRequest,
)
from pants.backend.helm.dependency_inference.deployment import (
FirstPartyHelmDeploymentMapping,
FirstPartyHelmDeploymentMappingRequest,
)
from pants.backend.helm.subsystems import post_renderer
from pants.backend.helm.subsystems.post_renderer import SetupHelmPostRenderer
from pants.backend.helm.target_types import HelmDeploymentFieldSet
from pants.engine.addresses import Address, Addresses
from pants.engine.engine_aware import EngineAwareParameter
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.engine.target import Targets, WrappedTarget, WrappedTargetRequest
from pants.engine.unions import UnionMembership
from pants.util.logging import LogLevel
from pants.util.strutil import bullet_list, softwrap
logger = logging.getLogger(__name__)
@dataclass(frozen=True)
class HelmDeploymentPostRendererRequest(EngineAwareParameter):
field_set: HelmDeploymentFieldSet
def debug_hint(self) -> str | None:
return self.field_set.address.spec
def metadata(self) -> dict[str, Any] | None:
return {"address": self.field_set.address.spec}
async def _obtain_custom_image_tags(
address: Address, union_membership: UnionMembership
) -> DockerImageTags:
wrapped_target = await Get(
WrappedTarget, WrappedTargetRequest(address, description_of_origin="<infallible>")
)
image_tags_requests = union_membership.get(DockerImageTagsRequest)
found_image_tags = await MultiGet(
Get(DockerImageTags, DockerImageTagsRequest, image_tags_request_cls(wrapped_target.target))
for image_tags_request_cls in image_tags_requests
if image_tags_request_cls.is_applicable(wrapped_target.target)
)
return DockerImageTags(chain.from_iterable(found_image_tags))
@rule(desc="Prepare Helm deployment post-renderer", level=LogLevel.DEBUG)
async def prepare_post_renderer_for_helm_deployment(
request: HelmDeploymentPostRendererRequest,
union_membership: UnionMembership,
docker_options: DockerOptions,
) -> SetupHelmPostRenderer:
mapping = await Get(
FirstPartyHelmDeploymentMapping, FirstPartyHelmDeploymentMappingRequest(request.field_set)
)
docker_addresses = [addr for _, addr in mapping.indexed_docker_addresses.values()]
logger.debug(
softwrap(
f"""
Resolving Docker image references for targets:
{bullet_list([addr.spec for addr in docker_addresses])}
"""
)
)
docker_contexts = await MultiGet(
Get(
DockerBuildContext,
DockerBuildContextRequest(
address=addr,
build_upstream_images=False,
),
)
for addr in docker_addresses
)
docker_targets = await Get(Targets, Addresses(docker_addresses))
field_sets = [DockerPackageFieldSet.create(tgt) for tgt in docker_targets]
async def resolve_docker_image_ref(address: Address, context: DockerBuildContext) -> str | None:
docker_field_sets = [fs for fs in field_sets if fs.address == address]
if not docker_field_sets:
return None
additional_image_tags = await _obtain_custom_image_tags(address, union_membership)
docker_field_set = docker_field_sets[0]
image_refs = docker_field_set.image_refs(
default_repository=docker_options.default_repository,
registries=docker_options.registries(),
interpolation_context=context.interpolation_context,
additional_tags=tuple(additional_image_tags),
)
# Choose first non-latest image reference found, or fallback to 'latest'.
found_ref: str | None = None
fallback_ref: str | None = None
for registry in image_refs:
for tag in registry.tags:
ref = tag.full_name
if ref.endswith(":latest"):
fallback_ref = ref
else:
found_ref = ref
break
resolved_ref = found_ref or fallback_ref
if resolved_ref:
logger.debug(f"Resolved Docker image ref '{resolved_ref}' for address {address}.")
else:
logger.warning(f"Could not resolve a valid image ref for Docker target {address}.")
return resolved_ref
docker_addr_ref_mapping = {
addr: await resolve_docker_image_ref(addr, ctx)
for addr, ctx in zip(docker_addresses, docker_contexts)
}
def find_replacement(value: tuple[str, Address]) -> str | None:
_, addr = value
return docker_addr_ref_mapping.get(addr)
replacements = mapping.indexed_docker_addresses.transform_values(find_replacement)
return SetupHelmPostRenderer(
replacements, description_of_origin=f"the `helm_deployment` {request.field_set.address}"
)
def rules():
return [
*collect_rules(),
*docker_binary.rules(),
*docker_build_args.rules(),
*docker_build_context.rules(),
*docker_build_env.rules(),
*dockerfile.rules(),
*dockerfile_parser.rules(),
*post_renderer.rules(),
]
|
False #data type, true or false, called boolean
True
print(1 == 2) #False
print(1 < 2) #True
print(1 > 2) #False
print(1 >= 1) #True
print(1 <= 2) #True
print(1 != 1) #False
a = 1
b = 2
print(a == b) #False
name = "Chad"
print(name == "Chad") #True
print(name != "George") #True b/c this is a true statement
print(name == "chad") #False (b/c of lower case 'c')
if 1 < 3:
print("this will print") #b/c the above conditional is True
if 1 > 3:
print("this will NOT print") #b/c the conditional is False.
#Falsy Expressions:
None #Falsy
False #Falsy
0 #Falsy
"" #Falsy
[] #Falsy
{} #Falsy
#Exercises
print( 1== 3)
print( 4<= 4)
print("a" == "a")
print(10 > 11)
print("b" > "c")
my_number = 5
print(my_number < 2)
print(my_number > 2)
print(my_number < 10)
name = "Bob"
if name == "Bob":
print("YES these strings are the same!")
name = "Dobby"
if name != "Bob":
print("NO these strings are not the same!") |
import codecs
import http.client
import json
import ssl
import threading
from multiprocessing import Pool
from urllib.request import urlopen
"""Script to print the last comments of Hacker News users before they are banned.
Reads from the Hacker News API: https://github.com/HackerNews/API)
This is my first time writing python. Feedback is welcome.
"""
# Global variables
_DECODER = codecs.getreader("utf-8")
# To prevent SSL failure after too many calls.
_CONTEXT = ssl._create_unverified_context()
_LOCK = threading.Lock()
# _BAN_THRESHOLD_SECONDS = 3 days
# Bigger values increases false positives.
# Examples: Recently unbanned users or https://hacker-news.firebaseio.com/v0/item/12053367.json
# Smaller value misses the case for https://hacker-news.firebaseio.com/v0/item/10551997.json
_BAN_THRESHOLD_SECONDS = 60 * 60 * 24 * 3
_MOD_NAME = "dang"
# Increasing the number of threads beyond 8 on my machine causes Http errors.
_MAX_NUM_THREADS = 8
DEBUG = False
def __get_user(id):
return json.load(_DECODER(urlopen("https://hacker-news.firebaseio.com/v0/user/"
+ id
+ ".json",
context=_CONTEXT)))
def __get_comment(id):
return json.load(_DECODER(urlopen("https://hacker-news.firebaseio.com/v0/item/"
+ str(id)
+ ".json",
context=_CONTEXT)))
def __is_comment(data):
return data is not None \
and "type" in data \
and data["type"] == "comment" \
and "by" in data \
and "text" in data \
and "time" in data
def __is_possible_ban(data):
"""Checks if mod wrote a comment to announce a ban.
Args:
data: json item from Hacker News.
Returns:
Returns true if the comment text contains the word "banned".
"""
return __is_comment(data) \
and "banned" in data["text"] \
and "parent" in data
def __is_user_banned(user_comment, mod_comment):
"""Checks if the user is banned.
Checks the timestamp of the last submitted item from the user.
If the time is _BAN_THRESHOLD_SECONDS after the mod's comment then the user is still active.
Args:
user_comment: json item representing the user's comment.
mod_comment: json item representing the mod's comment
Returns:
Returns true if the user is banned.
"""
if __is_comment(user_comment) is False:
return False
user_data = __get_user(user_comment["by"])
recent_comment = __get_comment(user_data["submitted"][0])
if DEBUG:
print("Time delta: " + str(mod_comment["time"] - recent_comment["time"]))
return mod_comment["time"] > recent_comment["time"] - _BAN_THRESHOLD_SECONDS
def __write_to_file(comment):
_LOCK.acquire()
with open("README.md", 'a') as out:
# remove escape characters
comment_text = comment["text"].replace("/", "/")
comment_text = comment_text.replace("'", "'")
comment_text = comment_text.replace(""", "\"")
out.write("*\"" + comment_text + "\"*" + "\n")
out.write(" [--" + comment["by"] + "](https://news.ycombinator.com/user?id=" + comment["by"] + ")\n\n\n")
_LOCK.release()
def __process_item(id):
"""Checks if the parent user is banned and writes their last comment to file.
Determins if the mod's comment is a ban announcment.
If yes then check the user to see if they are banned.
Write the user's last comment to file if they are banned.
Last comment is defined as the parent comment of the mod's comment,
which might not be the last submitted item from the user.
Args:
id: Id for Hacker News API.
"""
if DEBUG:
print("Processing: " + str(id))
mod_comment = __get_comment(id)
if __is_possible_ban(mod_comment):
parent_comment = __get_comment(mod_comment["parent"])
if __is_user_banned(parent_comment, mod_comment):
__write_to_file(parent_comment)
return 1
return 0
def run_job():
"""Main function of the script
Gets Id of all comments of the mod. Use a thread pool to fetch the json data for each Id.
Then check if item is related to a banned user.
"""
mod_data = __get_user(_MOD_NAME)
submit_ids = mod_data["submitted"]
# Uncomment for test data.
#submit_ids = [10551997, 7867166, 12041458, 12059888, 11631519]
pool = Pool(_MAX_NUM_THREADS)
results = pool.map(__process_item, submit_ids)
pool.close()
pool.join()
print("Finished")
print("Total banned users detected: " + str(sum(results)))
run_job()
|
def gcdTwoNumbers(a,b):
if(b==1):
return b #base case
if(a%b==0):
return b
return gcdTwoNumbers(a,b/2)
result1=gcdTwoNumbers(2,1)
result2=gcdTwoNumbers(8,4)
result3=gcdTwoNumbers(160,8)
print ("The results are:",result1,result2,result3) |
from pathlib import Path
import pandas as pd
base_path = Path('.\\code\\')
summary = pd.DataFrame(columns=['dice',
'precision',
'recall',
'true_positives',
'true_negatives'])
for i in base_path.glob('res*'):
name = str(i)[8:-4]
with open(str(i)) as f:
f.readline()
for line in f.readlines():
line = list(line.strip().split(','))
summary.loc[name, line[0]] = line[1]
#summary.loc[name, 'std'] = line[1]
f.close()
print(summary)
summary.to_csv('summary.csv', sep=';', encoding='utf-8') |
#!/usr/bin/python
import sys
import re
import urllib
import urlparse
for line in sys.stdin:
regexp = '(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) (.+) (.+) \[(.+)\] \"([A-Z]+) (.+) HTTP/.\..\" (.+) (.+)'
match = re.match(regexp, urllib.unquote(line.strip()))
if match:
ip, client, username, time, type, file, status, size = match.groups()
## match = re.match('.+\.\w+' ,file)
## if match:
# print re.sub('^(https?://.+)?/+', '/', file.split('?')[0]).lower()
url = urlparse.urlparse(file)
urlPath = url.path
urlQuery = url.query
if urlQuery: urlPath += '?' + urlQuery
print urlPath
|
from collections import defaultdict, deque, Counter
import sys
from decimal import *
from heapq import heapify, heappop, heappush
import math
import random
import string
from copy import deepcopy
from itertools import combinations, permutations, product
from operator import mul, itemgetter
from functools import reduce, lru_cache
from bisect import bisect_left, bisect_right
def input():
return sys.stdin.readline().rstrip()
def getN():
return int(input())
def getNM():
return map(int, input().split())
def getList():
return list(map(int, input().split()))
def getArray(intn):
return [int(input()) for i in range(intn)]
sys.setrecursionlimit(1000000000)
mod = 10 ** 9 + 7
INF = float('inf')
dx = [1, 0, -1, 0]
dy = [0, 1, 0, -1]
#############
# Main Code #
#############
# N進数
n = 12
list = []
digit = 7
i = 0
while n != 0:
list.insert(0, str(n % digit))
n //= digit
i += 1
print(''.join(list))
# 文字列2進数を整数に変換
print(int('010100', 2))
# nを超えない最大のbase ** mは何か
def max_pow(n, base):
if n == 0:
return None
opt = 1
cnt = 0
while base ** (cnt + 1) <= n:
opt *= base
cnt += 1
return opt, cnt
# (16, 4)
print(max_pow(27, 2))
# 任意の整数で割り続ける
def spliter(n, split):
splited = n
cnt = 0
while splited % split == 0:
if splited == 0:
break
splited //= split
cnt += 1
# print(cnt)
return splited, cnt
# (3, 4)
print(spliter(48, 2))
# -2進数(いるこれ?)
def minus_digit(rev_n):
if rev_n == 0:
print('0')
return
cnt = 0
rep = rev_n
lista = []
while rep != 0:
split = (abs(rep) % 2 ** (cnt + 1)) // 2 ** cnt
if split == 0:
lista.append(0)
else:
lista.append(1)
rep -= (split * ((-2) ** cnt))
cnt += 1
lista.reverse()
return''.join(map(str, lista))
# 11100
print(minus_digit(12))
# 1(1), 2(1 + 2), 3(1 + 2 + 3)...を超えられるか
def factime(ny):
cnt = 1
sum = 0
while True:
if sum + cnt > ny:
return cnt - 1
break
sum += cnt
cnt += 1
# 2
print(factime(2))
|
import re
class Option:
'''
'''
def __init__(self, need, limit, name=None, sign=None):
'''
'''
self.need = need
self.limit = limit
self.name = name
self.sign = sign
self.param = []
def add_param(self, item):
'''
加入参数
'''
count = len(self.param)
if self.need > count and (self.limit == None or (self.limit != None and count < self.limit)):
self.param.append(item)
return True
return False
class Operation:
'''
'''
def __init__(self, need, limit):
'''
初始化,默认参数的限制
'''
bale = Option(need, limit)
self.options = [bale]
self.signs = {'-': bale}
self.names = {}
def add_option(self, sign=None, name=None, limit=0, need=0):
'''
添加可选参数
'''
option = Option(need, limit, name, sign)
if None != name:
self.names[name] = option
if None != sign:
self.signs[sign] = option
self.options.append(option)
def parse(self, arguments):
'''
分析命令行参数
'''
option = self.signs['-']
for item in arguments:
if re.match(r"-[a-zA-Z]", item):
sign = item[1:]
option = self.signs[sign]
elif re.match(r"--[a-z][-a-z]*", item):
name = item[2:]
option = self.names[name]
else:
if not option.add_param(item):
option = self.signs['-']
option.add_param(item)
|
from django.contrib.auth import authenticate
from django.shortcuts import render
from rest_framework.exceptions import PermissionDenied
from rest_framework.permissions import IsAuthenticated, IsAdminUser
from rest_framework.response import Response
from rest_framework import viewsets, generics, status, parsers, response, decorators
from rest_framework.decorators import api_view
from rest_framework.views import APIView
from .serializers import *
from .models import RespondentProfile, StaffMemberProfile, CarouselDisplay, Kudos
class UserCreate(viewsets.ModelViewSet):
authentication_classes = ()
permission_classes = ()
queryset = YaridAccount.objects.order_by('-id')
serializer_class = UserSerializer
class LoginView(APIView):
permission_classes = ()
def post(self, request, ):
username = request.data.get("username")
password = request.data.get("password")
user = authenticate(username=username, password=password)
if user:
return Response({"token": user.auth_token.key, "username": user.username})
# return Response({"token": "logged in"})
else:
return Response({"error": "Wrong Credentials"}, status=status.HTTP_400_BAD_REQUEST)
class Respondents(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
queryset = RespondentProfile.objects.order_by('-id')
serializer_class = RespondentProfileSerializers
def create(self, request, *args, **kwargs):
if not request.user.is_superuser:
raise PermissionDenied(
request.user.username + " You can not delete this studentProfile. Only Marks_Recorders are deleters")
return super().create(request, *args, **kwargs)
def destroy(self, request, *args, **kwargs):
if not request.user.is_superuser:
raise PermissionDenied(
request.user.username + " You can not delete this studentProfile. Only Marks_Recorders are deleters")
return super().destroy(request, *args, **kwargs)
def update(self, request, *args, **kwargs):
if not request.user.is_superuser:
raise PermissionDenied(
request.user.username + " You can not update this studentProfile. Only Marks_Recorders are updaters")
return super().update(request, *args, **kwargs)
class SurveyTopics(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
queryset = SurveyTopics.objects.order_by('-id')
serializer_class = SurveyTopicsSerializers
def create(self, request, *args, **kwargs):
if not request.user.is_superuser:
raise PermissionDenied(
request.user.username + " You can not create this course, you dont have the rights to")
return super().create(request, *args, **kwargs)
def destroy(self, request, *args, **kwargs):
course = SurveyTopics.objects.get(pk=self.kwargs["pk"])
if not request.user == course.poster_name:
raise PermissionDenied(
request.user.username + " You can not delete this course. Only Marks_Recorders are deleters")
return super().destroy(request, *args, **kwargs)
def update(self, request, *args, **kwargs):
course = SurveyTopics.objects.get(pk=self.kwargs["pk"])
if not request.user == course.poster_name:
raise PermissionDenied(
request.user.username + " You can not update this course. Only Marks_Recorders are updaters")
return super().update(request, *args, **kwargs)
class Kudos(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
queryset = Kudos.objects.order_by('-id')
serializer_class = KudosSerializers
def create(self, request, *args, **kwargs):
if not request.user.is_superuser:
raise PermissionDenied(
request.user.username + " You can not create this course, you dont have the rights to")
return super().create(request, *args, **kwargs)
def destroy(self, request, *args, **kwargs):
marks = Kudos.objects.get(pk=self.kwargs["pk"])
if not request.user == marks.poster_name:
raise PermissionDenied(
request.user.username + " You can not delete the Marks. Only Marks_Recorders are deleters")
return super().destroy(request, *args, **kwargs)
def update(self, request, *args, **kwargs):
marks = Kudos.objects.get(pk=self.kwargs["pk"])
if not request.user == marks.poster_name:
raise PermissionDenied(
request.user.username + " You can not update the Marks. Only Marks_Recorders are updaters")
return super().update(request, *args, **kwargs)
class QuestionPostsView(viewsets.ModelViewSet):
# permission_classes = (IsAuthenticated, IsAdminUser) # used for allowing admins only
queryset = QuestionPosts.objects.order_by('-id')
serializer_class = NewsPostsSerializers
def create(self, request, *args, **kwargs):
if not request.user.is_authenticated:
raise PermissionDenied(
request.user.username + " You were not authenticated ")
return super().create(request, *args, **kwargs)
def destroy(self, request, *args, **kwargs):
newspost = QuestionPosts.objects.get(pk=self.kwargs["pk"])
if not request.user == newspost.poster_name:
raise PermissionDenied(
request.user.username + " You can not delete this newspost. Only creaters are deleters")
return super().destroy(request, *args, **kwargs)
def update(self, request, *args, **kwargs):
newspost = QuestionPosts.objects.get(pk=self.kwargs["pk"])
if not request.user == newspost.poster_name:
raise PermissionDenied(
request.user.username + " You can not update this newspost. Only creaters are updaters")
return super().update(request, *args, **kwargs)
class CarouselDisplay(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
queryset = CarouselDisplay.objects.order_by('-id')
serializer_class = CarouselDisplaySerializers
# this is to help in posting base64 images
# class ProfileViewSet(viewsets.ModelViewSet):
# serializer_class = ProfileSerializer
# queryset = Profile.objects.all()
#
# @decorators.action(
# detail=True,
# methods=['PUT'],
# serializer_class=ProfilePicSerializer,
# parser_classes=[parsers.MultiPartParser],
# )
# def pic(self, request, pk):
# obj = self.get_object()
# serializer = self.serializer_class(obj, data=request.data,
# partial=True)
# if serializer.is_valid():
# serializer.save()
# return response.Response(serializer.data)
# return response.Response(serializer.errors,
# status.HTTP_400_BAD_REQUEST)
@api_view(['GET', ])
def getallstudentprofiles(self):
students = RespondentProfile.objects.all()
serializer = RespondentProfileSerializers(students, many=True)
return Response({"all students": serializer.data})
@api_view(['GET', 'POST'])
def obtain_auth_token(self):
for user in User.objects.all():
Token.objects.get_or_create(user=user)
return Response({"all students": "created tokens for the already existant users"})
# class SysUserCreation(generics.CreateAPIView):
# authentication_classes = ()
# permission_classes = ()
# serializer_class = CreatingSysUsers
|
import requests
url = "http://127.0.0.1:5000/pms/dm/v1.0/application/update"
|
from string import ascii_lowercase
def main():
# Shift characters by 2 positions
cipher = ('g fmnc wms bgblr rpylqjyrc gr zw fylb. rfyrq ufyr amknsrcpq ypc '
'dmp. bmgle gr gl zw fylb gq glcddgagclr ylb rfyr\'q ufw rfgq rcv'
'r gq qm jmle. sqgle qrpgle.kyicrpylq() gq pcamkkclbcb. lmu ynnjw'
' ml rfc spj.')
url = 'map'
translation = str.maketrans(ascii_lowercase, ascii_lowercase[2:] +
ascii_lowercase[:2])
message = cipher.translate(translation)
answer = url.translate(translation)
print(message)
print("Answer: " + answer)
if __name__ == '__main__':
main()
|
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import pytest
from pants.backend.python.subsystems.setup import PythonSetup
from pants.core.goals.generate_lockfiles import UnrecognizedResolveNamesError
from pants.testutil.option_util import create_subsystem
def test_resolves_to_interpreter_constraints_validation() -> None:
def create(resolves_to_ics: dict[str, list[str]]) -> dict[str, tuple[str, ...]]:
return create_subsystem(
PythonSetup,
resolves={"a": "a.lock"},
resolves_to_interpreter_constraints=resolves_to_ics,
).resolves_to_interpreter_constraints
assert create({"a": ["==3.7.*"]}) == {"a": ("==3.7.*",)}
with pytest.raises(UnrecognizedResolveNamesError):
create({"fake": []})
def test_resolves_to_constraints_file() -> None:
def create(resolves_to_constraints_file: dict[str, str]) -> dict[str, str]:
return create_subsystem(
PythonSetup,
resolves={"a": "a.lock", "tool1": "tool1.lock", "tool2": "tool2.lock"},
resolves_to_constraints_file=resolves_to_constraints_file,
).resolves_to_constraints_file()
assert create({"a": "c1.txt", "tool1": "c2.txt"}) == {"a": "c1.txt", "tool1": "c2.txt"}
assert create({"__default__": "c.txt", "tool2": "override.txt"}) == {
"a": "c.txt",
"tool1": "c.txt",
"tool2": "override.txt",
}
with pytest.raises(UnrecognizedResolveNamesError):
create({"fake": "c.txt"})
def test_resolves_to_no_binary_and_only_binary() -> None:
def create(resolves_to_projects: dict[str, list[str]]) -> dict[str, list[str]]:
subsystem = create_subsystem(
PythonSetup,
resolves={"a": "a.lock", "tool1": "tool1.lock", "tool2": "tool2.lock"},
resolves_to_no_binary=resolves_to_projects,
resolves_to_only_binary=resolves_to_projects,
)
only_binary = subsystem.resolves_to_only_binary()
no_binary = subsystem.resolves_to_no_binary()
assert only_binary == no_binary
return only_binary
assert create({"a": ["p1"], "tool1": ["p2"]}) == {
"a": ["p1"],
"tool1": ["p2"],
}
assert create({"__default__": ["p1"], "tool2": ["override"]}) == {
"a": ["p1"],
"tool1": ["p1"],
"tool2": ["override"],
}
# Test that we don't fail on :all:.
assert create({"a": [":all:"], "tool1": [":all:"]}) == {
"a": [":all:"],
"tool1": [":all:"],
}
# Test name canonicalization.
assert create({"a": ["foo.BAR"], "tool1": ["Baz_Qux"]}) == {
"a": ["foo-bar"],
"tool1": ["baz-qux"],
}
with pytest.raises(UnrecognizedResolveNamesError):
create({"fake": []})
|
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import accuracy_score
from sklearn.metrics import classification_report
#데이터 읽어들이기
wine = pd.read_csv("./data/winequality-white.csv", sep=';', encoding='utf-8')
#데이터를 레이블과 데이터로 분리하기
y = wine["quality"]
# x = wine.iloc[:, 0:4]
x = wine.drop(["quality"], axis=1)
# print(x)
#y 레이블 변경하기
newlist = []
for v in list(y):
if v <= 4:
newlist += [0]
elif v <= 7:
newlist += [1]
else:
newlist += [2]
y = newlist
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size = 0.2, random_state=66)
#학습하기
model = RandomForestClassifier(n_estimators=50, criterion='entropy', class_weight='balanced_subsample', warm_start=True, oob_score=True, min_samples_leaf=1, random_state=50)
model.fit(x_train, y_train)
aaa = model.score(x_test, y_test)
#평가하기
y_pred = model.predict(x_test)
print(classification_report(y_test, y_pred))
print("정답률=", accuracy_score(y_test, y_pred))
print(aaa) |
from mlxtend.plotting import plot_decision_regions
from mlxtend.classifier import LogisticRegression
import pandas
import numpy as np
from sklearn import linear_model
from sklearn.model_selection import cross_val_score as cvsc
set_sizes = [100,500,1000,5000,10000,50000,100000,500000,1000000,5000000,10000000,50000000,100000000]
column_names = ["id","vendor_id","pickup_datetime","dropoff_datetime","passenger_count","pickup_longitude","pickup_latitude"
,"dropoff_longitude","dropoff_latitude","store_and_fwd_flag","trip_duration","Short_or_long"]
"""Read in dataset"""
i=8
dataframe = pandas.read_csv("C:\\Users\\bboyd\\Downloads\\train\\train.csv",
sep=',',header=0,names=column_names,index_col=0,usecols=[0,1,2,3,4,5,6,7,8,10,11] ,nrows = set_sizes[i])
dataframe.dropna(axis=0, how='all')
Y = dataframe["Short_or_long"]
X = dataframe[["passenger_count","pickup_longitude","pickup_latitude","dropoff_longitude","dropoff_latitude"]]
X = X.get_values()
print(type(Y))
Y = Y.get_values()
X_train = X[:int(set_sizes[i]*0.7) ]
X_test = X[int(set_sizes[i]*0.3) : ]
len = Y.size;
train_len = (int) ((len*.7))
Y_train = Y[ : train_len ]
test_len = (int) (len - train_len)
Y_test = Y[ train_len : ]
print("len ", len)
print("Y train " , Y_train.size)
print("X train " , (int) (X_train.size/6))
print("Y test " , Y_test.size)
print("X test " , (int) (X_test.size/6))
print(type(Y_test))
lr = LogisticRegression(eta=0.05,
l2_lambda=0.0,
epochs=50,
minibatches=1, # for Gradient Descent
random_seed=1,
print_progress=3)
lr.fit(X_train, Y_train)
print("...")
pre = lr.predict(X_train)
correct = 0
total = 0
i2 = 0
while(i2 < Y_test.size):
if(Y_test[i2] == pre[i2]):
correct += 1
i2+=1
total += 1
print("--------")
print(pre[0])
print(Y_test[0])
acc = correct/total
print("ACC " , acc)
#print("score," , lr.score(X_test,Y_test))
logreg = linear_model.LogisticRegression(C=1e5)
logreg.fit(X_train, Y_train)
cv = cvsc( logreg, X, Y, cv=10)
correct = 0
total = 0
i2 = 0
while(i2 < Y_test.size):
if(Y_test[i2] == cv[i2]):
correct += 1
i2+=1
total += 1
print("--------")
print(pre[0])
print(Y_test[0])
acc = correct/total
print("cv split" , acc)
print(cvsc( logreg, X, Y, cv=10))
|
import logging
from db_objects import Story, User, Task
from db_session import session
logging.basicConfig()
logger = logging.getLogger("db_query")
logger.setLevel(logging.DEBUG)
# Query one task
task = (session.query(Task).join(Story).join(User)
.filter(Story.story_title == 'Story 001')
.filter(User.user_name == 'User 003')).one()
logger.info('Task in Story 001 and owned by User 003 is:')
logger.info('\tid = ' + str(task.id))
logger.info('\ttask_title = ' + task.task_title)
logger.info('\tstory_id = ' + str(task.story_id))
logger.info('\tuser_id = ' + str(task.user_id))
# Delete this task and query again
session.delete(task)
session.commit()
num = (session.query(Task).join(Story).join(User)
.filter(Story.story_title == 'Story 001')
.filter(User.user_name == 'User 003')).count()
logger.info(str(num) + " task queried")
# Add the task back and query again
session.add(Task(id=3, task_title='Task 003', story_id=1, user_id=3))
session.commit()
task = (session.query(Task).join(Story).join(User)
.filter(Story.story_title == 'Story 001')
.filter(User.user_name == 'User 003')).one()
logger.info('Task in Story 001 and owned by User 003 is:')
logger.info('\tid = ' + str(task.id))
logger.info('\ttask_title = ' + task.task_title)
logger.info('\tstory_id = ' + str(task.story_id))
logger.info('\tuser_id = ' + str(task.user_id))
|
from fnp.baseline.task2.utils import *
import pandas as pd
import argparse
from sklearn.model_selection import train_test_split
def write_file(file_name, data):
with open(file_name, "w") as f:
for sent in data:
i = 1
for token, tag in sent:
f.write(str(i) + "\t" + token + "\t_\t" + tag + "\t_\t_\t_\t_\t_\t_\n")
i += 1
f.write("\n")
def merge_series(hometags):
for doc in hometags:
for i in range(1, len(doc)-1):
if doc[i][1] == "_" and doc[i-1][1] == "C" and doc[i+1][1] == "C":
doc[i] = (doc[i][0], "C")
if doc[i][1] == "_" and doc[i-1][1] == "E" and doc[i+1][1] == "E":
doc[i] = (doc[i][0], "E")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--inrepo', type=str, default="./fnp2020-fincausal-task2.csv", help= 'input repo')
parser.add_argument('--train_out', type=str, default="./fnp2020-fincausal-task2.train.conllu",
help='train output filename')
parser.add_argument('--test_out', type=str, default="./fnp2020-fincausal-task2.test.conllu",
help='test output filename')
parser.add_argument('--mergeseries', dest="merge_series", action='store_true',
help='merge empty tokens between two tagged token')
parser.set_defaults(merge_series=False)
args = parser.parse_args()
df = pd.read_csv(args.inrepo, delimiter=';', header=0)
lodict_ = []
for rows in df.itertuples():
list_ = [rows[2], rows[3], rows[4]]
map1 = ['sentence', 'cause', 'effect']
dict_ = s2dict(list_, map1)
lodict_.append(dict_)
print('transformation example: ', lodict_[1])
map_ = [('cause', 'C'), ('effect', 'E')]
hometags = make_causal_input(lodict_, map_)
if args.merge_series:
merge_series(hometags)
size = 0.2
seed = 42
train, test = train_test_split(hometags, test_size=size, random_state=seed)
write_file(args.train_out, train)
write_file(args.test_out, test)
|
l = [1,2,3,4,5,6,7,8,9,10]
len(l)
l.append(11)
cpy = l.copy()
count = l.count(11)
l.clear()
sliced = l[1:4]
for i in l:
print(i)
if 11 in l:
print("Found 11")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
remotail.py
~~~~~~~~~~~
Tail multiple remote files on a terminal window
:copyright: (c) 2013 by Abhinav Singh.
:license: BSD, see LICENSE for more details.
"""
VERSION = (0, 1, 1)
__version__ = '.'.join(map(str, VERSION[0:3])) + ''.join(VERSION[3:])
__description__ = 'Tail multiple remote files on a terminal window'
__author__ = 'Abhinav Singh'
__author_email__ = 'mailsforabhinav@gmail.com'
__homepage__ = 'https://github.com/abhinavsingh/remotail'
__license__ = 'BSD'
import argparse
import urlparse
import getpass
import multiprocessing
import Queue
import paramiko
import select
import socket
import urwid
import logging
logging.basicConfig(level=logging.INFO, filename="/tmp/remotail.log")
logger = logging.getLogger('remotail')
remotail = None
class Container(urwid.Columns):
def keypress(self, size, key):
key = super(Container, self).keypress(size, key)
if key in ('right', 'tab'):
self.focus_position = self.focus_position + 1 if self.focus_position < len(self.contents) - 1 else 0
elif key == 'left':
self.focus_position = self.focus_position - 1 if self.focus_position > 0 else len(self.contents) - 1
else:
return key
class CommandLine(urwid.Edit):
_allowed_cmds = ['enable', 'disable']
def keypress(self, size, key):
key = super(CommandLine, self).keypress(size, key)
if key == 'enter':
self._execute(self.get_edit_text())
self.set_edit_text('')
else:
return key
def _execute(self, input):
args = input.split()
if args[0] in self._allowed_cmds:
if args[0] == 'enable':
remotail.enable(args[1])
elif args[0] == 'disable':
remotail.disable(args[1])
else:
logger.error('%s command not found' % args[0])
PALETTES = {
'default': [
('outer-title', 'white,bold', 'dark blue',),
('outer-header', 'white', 'dark blue',),
('outer-footer','black,bold', 'dark cyan',),
('outer-footer-text', 'black,bold', 'dark cyan',),
('inner-title', 'black,bold', 'dark green',),
('inner-header', 'black', 'dark green',),
],
}
class UI(object):
"""Console UI for showing captured logs.
-------------------frame-------------------------
| header |
| ---------------columns-------------- |
| | ----------frame---------- | |
| | | header | | |
| | | ---listbox--- | | |
| | | | | | | |
| body | | body | | | .... | |
| | | | | | | |
| | | ------------- | | |
| | | footer | | |
| | ------------------------- | |
| ------------------------------------ |
| footer |
-------------------------------------------------
"""
palette = PALETTES['default']
header_text = [('outer-title', 'Remotail v%s' % __version__,),]
footer_text = [('outer-footer-text', '> '),]
boxes = dict()
def __init__(self):
self.columns = Container([])
self.header = urwid.AttrMap(urwid.Text(self.header_text, align='center'), 'outer-header')
self.footer = urwid.AttrMap(CommandLine(self.footer_text), 'outer-footer')
self.frame = urwid.Frame(self.columns, header=self.header, footer=self.footer)
self.frame.set_focus('footer')
self.loop = urwid.MainLoop(self.frame, self.palette)
def add_column(self, alias):
header = urwid.AttrMap(urwid.Text([('inner-title', alias,),]), 'inner-header')
listbox = urwid.ListBox(urwid.SimpleListWalker([]))
self.boxes[alias] = (urwid.Frame(listbox, header=header), self.columns.options())
self.columns.contents.append(self.boxes[alias])
def del_column(self, alias):
self.columns.contents.remove(self.boxes[alias])
del self.boxes[alias]
class Channel(object):
def __init__(self, filepath):
self.filepath = filepath
self.connected = False
def __enter__(self):
try:
self.client = paramiko.SSHClient()
self.client.load_system_host_keys()
self.client.set_missing_host_key_policy(paramiko.WarningPolicy())
self.client.connect(self.filepath['host'], self.filepath['port'], self.filepath['username'], self.filepath['password'])
self.transport = self.client.get_transport()
self.channel = self.transport.open_session()
self.connected = True
return self.channel
except Exception as e:
return e
def __exit__(self, type, value, traceback):
if self.connected:
self.client.close()
self.channel.close()
TAIL_MSG_TYPE_DATA = 1
TAIL_MSG_TYPE_NOTIFY = 2
class Tail(multiprocessing.Process):
def __init__(self, filepath, queue):
super(Tail, self).__init__()
self.filepath = Remotail.filepath_to_dict(filepath)
self.queue = queue
def run(self):
with Channel(self.filepath) as channel:
if isinstance(channel, Exception):
self.put(str(channel), TAIL_MSG_TYPE_NOTIFY)
else:
self.put('connected', TAIL_MSG_TYPE_NOTIFY)
channel.exec_command('tail -f %s' % self.filepath['path'])
try:
while True:
if channel.exit_status_ready():
self.put('channel exit status ready', TAIL_MSG_TYPE_NOTIFY)
break
r, w, e = select.select([channel], [], [])
if channel in r:
try:
data = channel.recv(1024)
if len(data) == 0:
self.put('EOF', TAIL_MSG_TYPE_NOTIFY)
break
self.put(data)
except socket.timeout as e:
self.put(str(e), TAIL_MSG_TYPE_NOTIFY)
except KeyboardInterrupt as e:
pass
except Exception as e:
self.put(str(e), TAIL_MSG_TYPE_NOTIFY)
def put(self, msg, type=None):
type = type if type else TAIL_MSG_TYPE_DATA
self.queue.put(dict(
alias = self.filepath['alias'],
data = msg,
type = type
))
class Remotail(object):
def __init__(self, filepaths):
self.queue = multiprocessing.Queue()
self.procs = dict()
self.ui = UI()
self.filepaths = filepaths
# TODO: ugly hack to get going right now
# replace with something better
global remotail
remotail = self
@staticmethod
def filepath_to_dict(filepath):
url = urlparse.urlparse(filepath)
filepath = dict(
username = url.username if url.username else getpass.getuser(),
password = url.password,
host = url.hostname,
port = url.port if url.port else 22,
path = url.path,
alias = url.scheme
)
assert filepath['alias'] is not ''
return filepath
def enable(self, filepath):
proc = Tail(filepath, self.queue)
self.procs[proc.filepath['alias']] = proc
self.ui.add_column(proc.filepath['alias'])
proc.start()
def disable(self, alias):
proc = self.procs[alias]
del self.procs[alias]
proc.terminate()
self.ui.del_column(proc.filepath['alias'])
def start(self):
for filepath in self.filepaths:
self.enable(filepath)
self.ui.loop.watch_file(self.queue._reader, self.display)
try:
self.ui.loop.run()
except Exception as e:
logger.info(e)
finally:
for alias in self.procs:
proc = self.procs[alias]
proc.terminate()
proc.join()
def display(self):
line = self.queue.get_nowait()
text = urwid.Text(line['data'].strip())
box = self.ui.boxes[line['alias']][0].body
box.body.append(text)
box.set_focus(len(box.body)-1)
def main():
parser = argparse.ArgumentParser(
description='Remotail v%s' % __version__,
epilog='Having difficulty using Remotail? Report at: %s/issues/new' % __homepage__
)
parser.add_argument('--file-path', default=list(), action='append', help='alias://user:pass@host:port/file/path/to/tail')
parser.add_argument('--config', help='Config file containing one --file-path per line')
args = parser.parse_args()
filepaths = args.file_path
if args.config:
try:
filepaths += open(args.config, 'rb').read().strip().split()
except IOError as e:
logger.error(e)
try:
Remotail(filepaths).start()
except KeyboardInterrupt as e:
pass
if __name__ == '__main__':
main()
|
"""Treadmill cell checkout.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import collections
import logging
import sqlite3
import sys
import click
import pandas as pd
from treadmill import cli
_LOGGER = logging.getLogger(__name__)
def _print_query(conn, sql, index_col=None):
"""Print query results."""
row_factory = conn.row_factory
try:
conn.row_factory = None
frame = pd.read_sql(
sql, conn, index_col=index_col
)
columns = {
col: col.replace('_', '-')
for col in frame.columns
}
frame.rename(columns=columns, inplace=True)
if not frame.empty:
pd.set_option('max_rows', None)
pd.set_option('expand_frame_repr', False)
print('---')
print(frame)
print('')
finally:
conn.row_factory = row_factory
def _print_check_result(description, data, status):
"""Print check result."""
print(' {:.<69} {}'.format(
description.format(**data),
status
))
def _run_check(conn, check, verbose, index_col):
"""Run check."""
query = check['query']
metric = check['metric']
alerts = check.get('alerts', [])
cursor = conn.execute(metric.format(query=query))
check_failed = False
empty = True
for row in cursor:
empty = False
row = dict(zip(row.keys(), row))
for alert in alerts:
match = True
status = 'pass'
for key, prop_value in alert.get('match', {}).items():
value = row.get(key)
match = match and (value == prop_value)
if not match:
continue
for key, limit in alert['threshold'].items():
value = row.get(key)
if value >= limit:
status = 'fail'
check_failed = True
_print_check_result(
alert['description'],
row,
status
)
if empty:
for alert in alerts:
_print_check_result(
alert['description'],
{},
'pass'
)
# Alert will be triggerred, display the results thtat
# caused the alert.
if verbose >= 1 or check_failed:
_print_query(
conn,
query,
index_col=index_col
)
print('')
# pylint: disable=C0103
#
# pylint does not like 'db' as variable name.
def init():
"""Top level command handler."""
@click.group(cls=cli.make_commands(__name__,
chain=True,
invoke_without_command=True))
@click.option('--cell', required=True,
envvar='TREADMILL_CELL',
callback=cli.handle_context_opt,
expose_value=False)
@click.option('-v', '--verbose', count=True)
@click.option('--db', help='Path to output sqlite db.')
def run(verbose, db):
"""Run interactive checkout."""
del verbose
del db
@run.resultcallback()
def run_checkouts(checkouts, verbose, db):
"""Run interactive checkout."""
# Too many nested blocks, need to refactor.
#
# pylint: disable=R1702
common_args = {}
if not db:
db = ':memory:'
conn = sqlite3.connect(db)
for checkout in checkouts:
try:
metadata = checkout(conn=conn, **common_args)
index_col = metadata.get('index')
all_query = metadata['query']
conn.commit()
print(checkout.__doc__)
if verbose >= 2:
_print_query(
conn,
all_query,
index_col=index_col
)
row_factory = conn.row_factory
conn.row_factory = sqlite3.Row
for check in metadata.get('checks', []):
_run_check(conn, check, verbose, index_col)
except Exception as err: # pylint: disable=W0703
_LOGGER.exception('%s', str(err))
del run_checkouts
return run
|
"""
Model objects for the CLB mimic. Please see the `Rackspace Cloud Load
Balancer API docs
<http://docs.rackspace.com/loadbalancers/api/v1.0/clb-devguide/content/API_Operations.html>`
for more information.
"""
from __future__ import absolute_import, division, unicode_literals
from copy import deepcopy
from random import randrange
import attr
from six import text_type
from twisted.internet.interfaces import IReactorTime
from twisted.python import log
from mimic.canned_responses.loadbalancer import load_balancer_example
from mimic.model.clb_errors import (
considered_immutable_error,
invalid_json_schema,
loadbalancer_not_found,
lb_deleted_xml,
node_not_found,
not_found_xml,
updating_node_validation_error
)
from mimic.util.helper import (EMPTY_RESPONSE,
invalid_resource,
not_found_response,
one_of_validator,
seconds_to_timestamp,
set_resource_status)
@attr.s
class Node(object):
"""
An object representing a CLB node, which is a unique combination of
IP-address and port. Please see section 4.4 (Nodes) of the CLB
documentation for more information.
:ivar int id: The ID of the node
:ivar str address: The IP address of the node
:ivar int port: The port of the node
:ivar str type: One of (PRIMARY, SECONDARY). Defaults to PRIMARY.
:ivar int weight: Between 1 and 100 inclusive. Defaults to 1.
:ivar str condition: One of (ENABLED, DISABLED, DRAINING). Defaults to
:ivar str status: "Online"
ENABLED.
"""
address = attr.ib(validator=attr.validators.instance_of(text_type))
port = attr.ib(validator=attr.validators.instance_of(int))
type = attr.ib(validator=one_of_validator("PRIMARY", "SECONDARY"),
default="PRIMARY")
weight = attr.ib(validator=attr.validators.instance_of(int), default=1)
condition = attr.ib(
validator=one_of_validator("ENABLED", "DISABLED", "DRAINING"),
default="ENABLED")
id = attr.ib(validator=attr.validators.instance_of(int),
default=attr.Factory(lambda: randrange(999999)))
status = attr.ib(validator=attr.validators.instance_of(text_type),
default="ONLINE")
feed_events = attr.ib(default=[])
@classmethod
def from_json(cls, json_blob):
"""
Create a new node from JSON.
:param dict json_blob: the JSON dictionary containing node information
:return: a :class:`Node` object
:raises: :class:`TypeError` or :class:`ValueError` if the values
are incorrect.
"""
# status cannot be in the JSON
if "status" in json_blob:
raise ValueError("'status' not allowed in the JSON")
json_blob['port'] = int(json_blob['port'])
return Node(**json_blob)
def as_json(self):
"""
:return: a JSON dictionary representing the node.
"""
return attr.asdict(self, filter=lambda aa, _: aa.name != "feed_events")
def same_as(self, other):
"""
:return: `True` if the other node has the same IP address and port
as this node (but compares nothing else), `False` otherwise.
"""
return self.address == other.address and self.port == other.port
@attr.s
class CLB(object):
"""
An object representing a load balancer. Currently just takes the JSON
as an attribute, and provides __getitem__ and __setitem__ to access it.
These should be moved to real attributes as soon as possible.
"""
_json = attr.ib()
nodes = attr.ib(default=attr.Factory(list))
def __getitem__(self, key):
"""
Convenience function during the full conversion to the object model
to access JSON keys.
"""
return self._json[key]
def __setitem__(self, key, value):
"""
Convenience function during the full conversion to the object model
to set JSON keys.
"""
self._json[key] = value
def update(self, new_json_dict):
"""
Convenience function during the full conversion to the object model
to update all JSON keys.
"""
self._json.update(new_json_dict)
def short_json(self):
"""
:return: a short JSON dict representation of this object to be used
when listing load balancers. Does not include the node list, but does
include a "nodeCount" attribute, even if there are no nodes.
"""
entries = ('name', 'protocol', 'id', 'port', 'algorithm', 'status',
'timeout', 'created', 'virtualIps', 'updated')
result = {entry: self._json[entry] for entry in entries}
result['nodeCount'] = len(self.nodes)
return result
def full_json(self):
"""
:return: a longer, detailed JSON dict reprentation of this object that
includes all the nodes, if there are any present. Does not include
a "nodeCount" attribute.
"""
result = deepcopy(self._json)
if len(self.nodes) > 0:
result["nodes"] = [node.as_json() for node in self.nodes]
return result
@attr.s
class BadKeysError(Exception):
"""
When trying to alter the settings of a load balancer, this exception will
be raised if you attempt to alter an attribute which doesn't exist.
"""
keys = attr.ib()
code = attr.ib(validator=attr.validators.instance_of(int), default=400)
def to_json(self):
"""
:return: a JSON dict representation of this error.
"""
return {'message': 'Attempt to alter a bad attribute',
'code': self.code}
@attr.s
class BadValueError(Exception):
"""
When trying to alter the settings of a load balancer, this exception will
be raised if you attempt to set a valid attribute to an invalid setting.
"""
value = attr.ib()
accepted_values = attr.ib()
code = attr.ib(validator=attr.validators.instance_of(int), default=400)
def to_json(self):
"""
:return: a JSON dict representation of this error.
"""
return {'message': ('Unsupported status {0} not one of {1}'.format(
self.value, self.accepted_values)),
'code': self.code}
def node_feed_xml(events):
"""
Return feed of node events
"""
feed = '<feed xmlns="http://www.w3.org/2005/Atom">{entries}</feed>'
entry = ('<entry><summary>{summary}</summary>'
'<updated>{updated}</updated></entry>')
entries = [entry.format(summary=summary, updated=updated)
for summary, updated in events]
return feed.format(entries=''.join(entries))
@attr.s
class RegionalCLBCollection(object):
"""
A collection of CloudLoadBalancers, in a given region, for a given tenant.
"""
clock = attr.ib(validator=attr.validators.provides(IReactorTime))
node_limit = attr.ib(default=25,
validator=attr.validators.instance_of(int))
lbs = attr.ib(default=attr.Factory(dict))
meta = attr.ib(default=attr.Factory(dict))
def lb_in_region(self, clb_id):
"""
Returns true if the CLB ID is registered with our list of load
balancers.
"""
return clb_id in self.lbs
def add_load_balancer(self, lb_info, lb_id):
"""
Returns response of a newly created load balancer with
response code 202, and adds the new lb to the store's lbs.
:param dict lb_info: Configuration for the load balancer. See
Openstack docs for creating CLBs.
:param string lb_id: Unique ID for this load balancer.
"""
status = "ACTIVE"
# Loadbalancers metadata is a list object, creating a metadata store
# so we dont have to deal with the list
meta = {}
if "metadata" in lb_info:
for each in lb_info["metadata"]:
meta.update({each["key"]: each["value"]})
self.meta[lb_id] = meta
log.msg(self.meta)
if "lb_building" in self.meta[lb_id]:
status = "BUILD"
current_timestring = seconds_to_timestamp(self.clock.seconds())
self.lbs[lb_id] = CLB(load_balancer_example(lb_info, lb_id, status,
current_timestring),
nodes=[Node.from_json(blob)
for blob in lb_info.get("nodes", [])])
return {'loadBalancer': self.lbs[lb_id].full_json()}, 202
def _verify_and_update_lb_state(self, lb_id, set_state=True,
current_timestamp=None):
"""
Based on the current state, the metadata on the lb and the time since the LB has
been in that state, set the appropriate state in self.lbs
Note: Reconsider if update metadata is implemented
"""
current_timestring = seconds_to_timestamp(current_timestamp)
if self.lbs[lb_id]["status"] == "BUILD":
self.meta[lb_id]["lb_building"] = self.meta[lb_id]["lb_building"] or 10
self.lbs[lb_id]["status"] = set_resource_status(
self.lbs[lb_id]["updated"]["time"],
self.meta[lb_id]["lb_building"],
current_timestamp=current_timestamp
) or "BUILD"
elif self.lbs[lb_id]["status"] == "ACTIVE" and set_state:
if "lb_pending_update" in self.meta[lb_id]:
self.lbs[lb_id]["status"] = "PENDING-UPDATE"
log.msg(self.lbs[lb_id]["status"])
if "lb_pending_delete" in self.meta[lb_id]:
self.lbs[lb_id]["status"] = "PENDING-DELETE"
if "lb_error_state" in self.meta[lb_id]:
self.lbs[lb_id]["status"] = "ERROR"
self.lbs[lb_id]["updated"]["time"] = current_timestring
elif self.lbs[lb_id]["status"] == "PENDING-UPDATE":
if "lb_pending_update" in self.meta[lb_id]:
self.lbs[lb_id]["status"] = set_resource_status(
self.lbs[lb_id]["updated"]["time"],
self.meta[lb_id]["lb_pending_update"],
current_timestamp=current_timestamp
) or "PENDING-UPDATE"
elif self.lbs[lb_id]["status"] == "PENDING-DELETE":
self.meta[lb_id]["lb_pending_delete"] = self.meta[lb_id]["lb_pending_delete"] or 10
self.lbs[lb_id]["status"] = set_resource_status(
self.lbs[lb_id]["updated"]["time"],
self.meta[lb_id]["lb_pending_delete"], "DELETED",
current_timestamp=current_timestamp
) or "PENDING-DELETE"
self.lbs[lb_id]["updated"]["time"] = current_timestring
elif self.lbs[lb_id]["status"] == "DELETED":
# see del_load_balancer above for an explanation of this state change.
self.lbs[lb_id]["status"] = set_resource_status(
self.lbs[lb_id]["updated"]["time"], 3600, "DELETING-NOW",
current_timestamp=current_timestamp
) or "DELETED"
if self.lbs[lb_id]["status"] == "DELETING-NOW":
del self.lbs[lb_id]
def set_attributes(self, lb_id, kvpairs):
"""
Sets zero or more attributes on the load balancer object.
Currently supported attributes include: status.
"""
supported_keys = ["status"]
badKeys = []
for k in kvpairs:
if k not in supported_keys:
badKeys.append(k)
if len(badKeys) > 0:
raise BadKeysError(keys=badKeys)
if "status" in kvpairs:
supported_statuses = [
"ACTIVE", "ERROR", "PENDING_DELETE", "PENDING_UPDATE"
]
s = kvpairs["status"]
if s not in supported_statuses:
raise BadValueError(
value=s, accepted_values=supported_statuses
)
self.lbs[lb_id].update(kvpairs)
def get_load_balancers(self, lb_id):
"""
Returns the load balancers with the given lb id, with response
code 200. If no load balancers are found returns 404.
"""
if lb_id in self.lbs:
self._verify_and_update_lb_state(lb_id, False, self.clock.seconds())
log.msg(self.lbs[lb_id]["status"])
return {'loadBalancer': self.lbs[lb_id].full_json()}, 200
return not_found_response("loadbalancer"), 404
def get_node(self, lb_id, node_id):
"""
Returns the node on the load balancer
"""
if lb_id in self.lbs:
self._verify_and_update_lb_state(lb_id, False, self.clock.seconds())
if self.lbs[lb_id]["status"] == "DELETED":
return (
invalid_resource(
"The loadbalancer is marked as deleted.", 410),
410)
for each in self.lbs[lb_id].nodes:
if node_id == each.id:
return {"node": each.as_json()}, 200
return not_found_response("node"), 404
return not_found_response("loadbalancer"), 404
def get_node_feed(self, lb_id, node_id):
"""
Return load balancer's node's atom feed
"""
if lb_id not in self.lbs:
return not_found_xml("Load balancer")
self._verify_and_update_lb_state(lb_id, False, self.clock.seconds())
if self.lbs[lb_id]["status"] == "DELETED":
return lb_deleted_xml()
for node in self.lbs[lb_id].nodes:
if node_id == node.id:
return node_feed_xml(node.feed_events), 200
return not_found_xml("Node")
def list_load_balancers(self):
"""
Returns the list of load balancers with the given tenant id with response
code 200. If no load balancers are found returns empty list.
:return: A 2-tuple, containing the HTTP response and code, in that order.
"""
for lb_id in self.lbs:
self._verify_and_update_lb_state(lb_id, False, self.clock.seconds())
log.msg(self.lbs[lb_id]["status"])
return (
{'loadBalancers': [lb.short_json() for lb in self.lbs.values()]},
200)
def list_nodes(self, lb_id):
"""
Returns the list of nodes remaining on the load balancer
"""
if lb_id in self.lbs:
self._verify_and_update_lb_state(lb_id, False, self.clock.seconds())
if lb_id not in self.lbs:
return not_found_response("loadbalancer"), 404
if self.lbs[lb_id]["status"] == "DELETED":
return invalid_resource("The loadbalancer is marked as deleted.", 410), 410
node_list = [node.as_json()
for node in self.lbs[lb_id].nodes]
return {"nodes": node_list}, 200
else:
return not_found_response("loadbalancer"), 404
def _delete_node(self, lb_id, node_id):
"""
Deletes a node by ID.
"""
if self.lbs[lb_id].nodes:
previous_size = len(self.lbs[lb_id].nodes)
self.lbs[lb_id].nodes[:] = [node
for node in self.lbs[lb_id].nodes
if node.id != node_id]
return len(self.lbs[lb_id].nodes) < previous_size
return False
def delete_node(self, lb_id, node_id):
"""
Determines whether the node to be deleted exists in the session store,
deletes the node, and returns the response code.
"""
current_timestamp = self.clock.seconds()
if lb_id in self.lbs:
self._verify_and_update_lb_state(lb_id, False, current_timestamp)
if self.lbs[lb_id]["status"] != "ACTIVE":
# Error message verified as of 2015-04-22
return considered_immutable_error(
self.lbs[lb_id]["status"], lb_id)
self._verify_and_update_lb_state(
lb_id, current_timestamp=current_timestamp)
if self._delete_node(lb_id, node_id):
return None, 202
else:
return not_found_response("node"), 404
return not_found_response("loadbalancer"), 404
def delete_nodes(self, lb_id, node_ids):
"""
Bulk-delete multiple LB nodes.
"""
if not node_ids:
resp = {
"message": "Must supply one or more id's to process this request.",
"code": 400}
return resp, 400
if lb_id not in self.lbs:
return not_found_response("loadbalancer"), 404
current_timestamp = self.clock.seconds()
self._verify_and_update_lb_state(lb_id, False, current_timestamp)
if self.lbs[lb_id]["status"] != "ACTIVE":
# Error message verified as of 2015-04-22
resp = {"message": "LoadBalancer is not ACTIVE",
"code": 422}
return resp, 422
# We need to verify all the deletions up front, and only allow it through
# if all of them are valid.
all_ids = [node.id for node in self.lbs[lb_id].nodes]
non_nodes = set(node_ids).difference(all_ids)
if non_nodes:
nodes = ','.join(map(str, sorted(non_nodes)))
resp = {
"validationErrors": {
"messages": [
"Node ids {0} are not a part of your loadbalancer".format(nodes)
]
},
"message": "Validation Failure",
"code": 400,
"details": "The object is not valid"}
return resp, 400
for node_id in node_ids:
# It should not be possible for this to fail, since we've already
# checked that they all exist.
assert self._delete_node(lb_id, node_id) is True
self._verify_and_update_lb_state(
lb_id, current_timestamp=current_timestamp)
return EMPTY_RESPONSE, 202
def add_node(self, node_list, lb_id):
"""
Add one or more nodes to a load balancer. Fails if one or more of the
nodes provided has the same address/port as an existing node. Also
fails if adding the nodes would exceed the maximum number of nodes on
the CLB.
:param list node_list: a `list` of `dict` containing specification for
nodes
:return: a `tuple` of (json response as a dict, http status code)
"""
if lb_id in self.lbs:
current_timestamp = self.clock.seconds()
self._verify_and_update_lb_state(lb_id, False, current_timestamp)
if self.lbs[lb_id]["status"] != "ACTIVE":
return considered_immutable_error(
self.lbs[lb_id]["status"], lb_id)
nodes = [Node.from_json(blob) for blob in node_list]
for existing_node in self.lbs[lb_id].nodes:
for new_node in nodes:
if existing_node.same_as(new_node):
resource = invalid_resource(
"Duplicate nodes detected. One or more nodes "
"already configured on load balancer.", 413)
return (resource, 413)
# If there were no duplicates
new_nodeCount = len(self.lbs[lb_id].nodes) + len(nodes)
if new_nodeCount <= self.node_limit:
self.lbs[lb_id].nodes = self.lbs[lb_id].nodes + nodes
else:
resource = invalid_resource(
"Nodes must not exceed {0} "
"per load balancer.".format(self.node_limit), 413)
return (resource, 413)
self._verify_and_update_lb_state(
lb_id, current_timestamp=current_timestamp)
return {"nodes": [node.as_json() for node in nodes]}, 202
return not_found_response("loadbalancer"), 404
def update_node(self, lb_id, node_id, node_updates):
"""
Update the weight, condition, or type of a single node. The IP, port,
status, and ID are immutable, and attempting to change them will cause
a 400 response to be returned.
All success and error behavior verified as of 2016-06-16.
:param str lb_id: the load balancer ID
:param str node_id: the node ID to update
:param dict node_updates: The JSON dictionary containing node
attributes to update
:param current_timestamp: What the current time is
:return: a `tuple` of (json response as a dict, http status code)
"""
feed_summary = (
"Node successfully updated with address: '{address}', port: '{port}', "
"weight: '{weight}', condition: '{condition}'")
# first, store whether address and port were provided - if they were
# that's a validation error not a schema error
things_wrong = {k: True for k in ("address", "port", "id")
if k in node_updates}
node_updates = {k: node_updates[k] for k in node_updates
if k not in ("address", "port")}
# use the Node.from_json to check the schema
try:
Node.from_json(dict(address="1.1.1.1", port=80, **node_updates))
except (TypeError, ValueError):
return invalid_json_schema()
# handle the possible validation (as opposed to schema) errors
if not 1 <= node_updates.get('weight', 1) <= 100:
things_wrong["weight"] = True
if things_wrong:
return updating_node_validation_error(**things_wrong)
# Now, finally, check if the LB exists and node exists
if lb_id in self.lbs:
self._verify_and_update_lb_state(lb_id, False, self.clock.seconds())
if self.lbs[lb_id]["status"] != "ACTIVE":
return considered_immutable_error(
self.lbs[lb_id]["status"], lb_id)
for i, node in enumerate(self.lbs[lb_id].nodes):
if node.id == node_id:
params = attr.asdict(node)
params.update(node_updates)
self.lbs[lb_id].nodes[i] = Node(**params)
self.lbs[lb_id].nodes[i].feed_events.append(
(feed_summary.format(**params),
seconds_to_timestamp(self.clock.seconds())))
return ("", 202)
return node_not_found()
return loadbalancer_not_found()
def del_load_balancer(self, lb_id):
"""
Returns response for a load balancer
is in building status for 20
seconds and response code 202, and adds the new lb to ``self.lbs``.
A loadbalancer, on delete, goes into PENDING-DELETE and remains in DELETED
status until a nightly job(maybe?)
"""
if lb_id in self.lbs:
current_timestamp = self.clock.seconds()
if self.lbs[lb_id]["status"] == "PENDING-DELETE":
msg = ("Must provide valid load balancers: {0} are immutable and "
"could not be processed.".format(lb_id))
# Dont doubt this to be 422, it is 400!
return invalid_resource(msg, 400), 400
self._verify_and_update_lb_state(lb_id, True, current_timestamp)
if any([self.lbs[lb_id]["status"] == "ACTIVE",
self.lbs[lb_id]["status"] == "ERROR",
self.lbs[lb_id]["status"] == "PENDING-UPDATE"]):
del self.lbs[lb_id]
return EMPTY_RESPONSE, 202
if self.lbs[lb_id]["status"] == "PENDING-DELETE":
return EMPTY_RESPONSE, 202
if self.lbs[lb_id]["status"] == "DELETED":
self._verify_and_update_lb_state(
lb_id, current_timestamp=current_timestamp)
msg = "Must provide valid load balancers: {0} could not be found.".format(lb_id)
# Dont doubt this to be 422, it is 400!
return invalid_resource(msg, 400), 400
return not_found_response("loadbalancer"), 404
@attr.s
class GlobalCLBCollections(object):
"""
A :obj:`GlobalCLBCollections` is a set of all the
:obj:`RegionalCLBCollection` objects owned by a given tenant. In other
words, all the objects that a single tenant owns globally in a
cloud load balancer service.
"""
clock = attr.ib()
regional_collections = attr.ib(default=attr.Factory(dict))
def collection_for_region(self, region_name):
"""
Get a :obj:`RegionalCLBCollection` for the region identified by the
given name.
"""
if region_name not in self.regional_collections:
self.regional_collections[region_name] = (
RegionalCLBCollection(self.clock)
)
return self.regional_collections[region_name]
|
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2TkAgg
import numpy as np
import random
import tkinter as Tk
from grafica import Grafica
import vectorEntrenamiento as vE
root = Tk.Tk()
root.wm_title("Adaline")
C_ZERO = 0
MIN_VAL = -1.5
MAX_VAL = 1.5
MAX_DECIMALES = 5
X0 = -1
a2 = []
maxEpocas = Tk.StringVar()
lr = Tk.StringVar()
maxError = Tk.StringVar()
numAdalines = Tk.StringVar()
pruebaX = Tk.StringVar()
pruebaY = Tk.StringVar()
numAdalines.set('2')
maxEpocas.set('200')
lr.set('0.1')
maxError.set('0.2')
i = 0
W0_1 = []
W0_2 = []
W_1 = []
W_2 = []
adalines = int(numAdalines.get())
while(i < adalines):
W0_1.append( vE.Entrada( vE.getRandom( MIN_VAL, MAX_VAL ) ) )
W_1.append( [ vE.Entrada( vE.getRandom( MIN_VAL, MAX_VAL ) ), vE.Entrada( vE.getRandom( MIN_VAL, MAX_VAL ) ) ] )
W0_2.append( vE.Entrada(vE.getRandom( MIN_VAL, MAX_VAL ) ) )
W_2.append( [ vE.Entrada( vE.getRandom( MIN_VAL, MAX_VAL ) ), vE.Entrada( vE.getRandom( MIN_VAL, MAX_VAL ) ) ] )
i += 1
class Ventana():
def __init__(self):
self.fig = plt.figure()
canvas = FigureCanvasTkAgg( self.fig, master=root )
self.grafica = Grafica( self.fig )
self.grafica.setCanvas( canvas )
self.ax = self.grafica.ax
canvas.show()
canvas.get_tk_widget().grid( row = 0, column = 0, columnspan = 3 )
canvas._tkcanvas.grid( row=1, column = 0 )
self.lblLr = Tk.Label(master=root, text="Learning rate: ")
self.lblLr.grid( row = 2, column = 0 )
self.entryLr = Tk.Entry(master=root, bd=5, textvariable=lr)
self.entryLr.grid( row = 2, column = 1 )
self.lblEpocas = Tk.Label(master=root, text="Epocas: ")
self.lblEpocas.grid( row = 3, column = 0)
self.entryEpocas = Tk.Entry(master=root, bd=5, textvariable=maxEpocas)
self.entryEpocas.grid( row = 3, column = 1)
self.lblError = Tk.Label(master = root, text='Error max:')
self.lblError.grid( row = 4, column = 0)
self.entryError = Tk.Entry(master = root, bd=5, textvariable=maxError)
self.entryError.grid( row = 4, column = 1 )
self.btnEntrenar = Tk.Button(master=root, text="Entrenar", command=self.red)
self.btnEntrenar.grid( row = 6, column = 0)
self.lblEstado = Tk.Label(master=root, text="Estado: Configurando")
self.lblEstado.grid( row = 7, column = 0 , columnspan = 3)
self.lblPesos = Tk.Label( master = root, text = "")
self.lblPesos.grid( row = 8, column = 0, columnspan = 3)
self.btnPrueba = Tk.Button(master = root, text="Probar", command = self.red)
self.btnPrueba.grid( row = 9)
self.lblPrueba = Tk.Label( master = root, text = "" )
self.lblPrueba.grid( row = 9, column = 1, rowspan = 2 )
root.protocol('WM_DELETE_WINDOW', self._quit)
def red(self):
iteracion = 0
i = 0
errorMax = float(maxError.get())
epocasMax = float(maxEpocas.get())
adalines = int(numAdalines.get())
while(iteracion < epocasMax):
while(i < adalines):
for vector in self.grafica.vectoresEntrenamiento:
#forward prop
a1 = vE.logsig( self.resC1( vector, i) )
a2.insert( i, vE.logsig( self.resC2(a1, i ) ) )
#backward prop
e = vector.getClase() - a2[i]
s2 = -2 * (1 - a2[i]**2) * float(e)
s1 = np.diagonal(1 - a1**2) * W_2[i].T * s2
#actualizar pesos
W_2 = W_2 - float(lr) * s2 * a1
W0_2 = W0_2 - float(lr) * s2
W_1 = W_1 - self.calcular(vector, s1, float(lr))
W0_1 = W0_1 - float(lr) * s1
i += i
iteracion += iteracion
def resC1(self, vector, it):
pos = 0
suma = X0 * W0_1[it].getValor()
for i in enumerate(W_1[it]):
suma += vector.getCoordenadas()[pos] * W_1[it][pos].getValor()
pos += 1
return suma
def resC2(self, vector, it):
pos = 0
suma = X0 * W0_2[it].getValor()
for i in enumerate(W_2[it]):
suma += vector * W_2[it][pos].getValor()
pos += 1
return suma
def calcular(self, vector, S, LR):
suma = 0
for i in enumerate(vector):
suma += vector.getCoordenadas()[i] * S * LR
return suma
def _quit(self):
root.quit()
root.destroy() |
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 4 18:06:21 2021
@author: chanchanchan
"""
import streamlit as st
import pandas as pd
from matplotlib import pyplot as plt
import plotly.express as px
import plotly.graph_objects as go
import DissertationPlotwithDataMain as main
def app():
st.header('Time Domain Interpretation')
st.subheader('Arrival Time Identification Methods')
input_fa = go.Figure()
input_fa.add_trace(go.Scatter(x = main.data3_time_new, y = main.data3_input_new, mode = 'lines', name = '3 kHz'))
input_fa.add_trace(go.Scatter(x = main.data3_time_new, y = main.data4_input_new, mode = 'lines', name = '4 kHz'))
input_fa.add_trace(go.Scatter(x = main.data3_time_new, y = main.data5_input_new, mode = 'lines', name = '5 kHz'))
input_fa.add_trace(go.Scatter(x = main.data3_time_new, y = main.data6_input_new, mode = 'lines', name = '6 kHz'))
input_fa.add_trace(go.Scatter(x = main.data3_time_new, y = main.data7_input_new, mode = 'lines', name = '7 kHz'))
input_fa.update_layout( title={'text':"Input Signals",'y':0.85,'x':0.5,'xanchor': 'center','yanchor': 'top'}, xaxis_title="Time (ms)", yaxis_title="Output Voltage (Arbitary Units)")
st.write(input_fa)
st.sidebar.write('Output Signal:')
output_3_checkbox = st.sidebar.checkbox("3kHz", value=True)
output_4_checkbox = st.sidebar.checkbox("4kHz", value = True)
output_5_checkbox = st.sidebar.checkbox("5kHz", value = True)
output_6_checkbox = st.sidebar.checkbox("6kHz", value = True)
output_7_checkbox = st.sidebar.checkbox("7kHz", value = True)
output_fa = go.Figure()
if output_3_checkbox:
output_fa.add_trace(go.Scatter(x = main.data3_time_new, y = main.data3_output_new, mode = 'lines', name = '3 kHz'))
if output_4_checkbox:
output_fa.add_trace(go.Scatter(x = main.data3_time_new, y = main.data4_output_new, mode = 'lines', name = '4 kHz'))
if output_5_checkbox:
output_fa.add_trace(go.Scatter(x = main.data3_time_new, y = main.data5_output_new, mode = 'lines', name = '5 kHz'))
if output_6_checkbox:
output_fa.add_trace(go.Scatter(x = main.data3_time_new, y = main.data6_output_new, mode = 'lines', name = '6 kHz'))
if output_7_checkbox:
output_fa.add_trace(go.Scatter(x = main.data3_time_new, y = main.data7_output_new, mode = 'lines', name = '7 kHz'))
output_fa.update_layout( title={'text':"Output Signals",'y':0.85,'x':0.5,'xanchor': 'center','yanchor': 'top'}, xaxis_title="Time (ms)", yaxis_title="Output Voltage (Arbitary Units)")
st.write(output_fa)
|
import math
import time
import serial
import Frequency_Determination as FD
import FECLib as fec
def loadCharacterSet(SetName):
fullName = 'Source/CharacterSets/'+SetName+'.txt'
file = open(fullName,'r')
fullSet = file.read()
validChars = fullSet.split(',')
return (validChars)
class InputValidation:
# prompts the user for a valid input
def __init__(self,CharSet, MaxLength, padWithLastChar = True):
self.set = CharSet
self.recvState = False
self.maxLength = MaxLength
self.pad = padWithLastChar
def inputRecv(self):
return(self.recvState)
def reset(self):
self.recvState = False
def getInput(self):
self.reset()
while self.recvState == False:
self.recvState = True
msg = raw_input("Enter Message: ")
if len(msg) > self.maxLength:
print ('Input Message too long')
self.recvState = False
else :
msgLength = len(msg)
for index in range(msgLength):
if msg[index] not in self.set:
print('Message Contains invalid Character: '+msg[index])
self.recvState = False
# if the message does not take up the entire expected block
# it will be padded with the last character in the alphabet
if self.pad == True:
msg = msg + (self.set[len(self.set)-1])*(self.maxLength - len(msg))
return(msg)
def NumToBin(number, outStringLength):
binNum = bin(number)
binNum = binNum[2:]
zeroPadLength = int(outStringLength-len(binNum))
for i in range (zeroPadLength):
binNum = '0'+binNum
return(binNum)
class SrcEncoder:
# the source encoder converts the strings to a bit stream based on the minimum
# bits required for a particular alphabet
def __init__(self,CharSet, method):
if method == 'basic': # the basic method accounts for no compression
alphabetSize = len(CharSet)
self.CodeLength = math.ceil(math.log(alphabetSize, 2))
self.charset = CharSet
self.mapSet = []
for i in range(len(self.charset)):
self.mapSet = self.mapSet + [NumToBin(i,self.CodeLength)]
else :
raise NameError('method argument Not provided')
def EncodeData(self,inputData):
encodedString = ""
for index in range(len(inputData)):
char = inputData[index]
alphabetElement = self.charset.index(char)
binary = self.mapSet[alphabetElement]
encodedString += binary
return(encodedString)
class ChannelEncoder:
# object to add redundancy to the data
def __init__(self, method, blockSize=None, msgSize=None):
if method == 'none':
self.method = 'none'
self.blockSize = 0
self.codeRate = 0
elif method == 'HardHamming':
self.method = 'HardHamming'
self.hamming_obj = fec.hammingCode(blockSize, msgSize)
else:
raise NameError('Invalid Method to Channel Encoder')
def EncodeData(self, data):
if self.method == 'none':
return(data)
elif self.method == 'HardHamming':
return self.hamming_obj.HardHammingEncode(data)
else:
raise nameError('Bad Method for Encoder')
def interleave(self, message):
x = len(message)
n_cols = self.max_cols(x)
if x%n_cols != 0:
raise ValueError("In interleave(), len(message)=%d is not a multiple of ncols=%d" % (len(message),n_cols))
result = []
for i in range(0, n_cols):
result.extend(message[i:len(message):n_cols])
str_result = ''
for i in range(0, len(result)):
str_result = str_result + str(result[i])
return str_result
def max_cols(self,x):
possible_cols = []
for i in range(1, x):
if x%i == 0:
possible_cols = possible_cols + [i]
return int(max(possible_cols))
def SymbolMapping(data, numSymbols):
bitsPerSymbol = math.log(numSymbols,2)
if bitsPerSymbol % 1 != 0:
raise NameError('Number of symbols not a power of 2 - Fix')
bitsPerSymbol = int(bitsPerSymbol)
mismatchLength = len(data) % bitsPerSymbol
if mismatchLength != 0:
print('data requires zero padding')
data = data + '0'*(bitsPerSymbol - mismatchLength)
brokenData = list(map(''.join,zip(*[iter(data)]*bitsPerSymbol)))
symbols = []
for binSymbol in brokenData:
symbols = symbols +[int(binSymbol,2)]
return (symbols)
class serialCommObj:
def __init__(self, port, baud):
self.Connection = serial.Serial(port,baud)
time.sleep(2)
def setFreqAndDuty(self,freq, duty):
# maximum frequency is 500 Hz
# duty is a fraction e [0:1]
if duty > 1:
duty = 1
if duty < 0:
duty = 1
roundedPeriod = round(1000/freq)
highTimeInMilliseconds = (round(duty*roundedPeriod));
lowTimeInMilliseconds = (roundedPeriod - highTimeInMilliseconds)
UpTimeString = 'u'+str(int(highTimeInMilliseconds))+'\n'
DownTimeString = 'd'+str(int(lowTimeInMilliseconds))+'\n'
ActualPeriod = highTimeInMilliseconds+lowTimeInMilliseconds
print('Actual Transmit Freq: '+str(1000/ActualPeriod))
self.Connection.write(str(UpTimeString))
self.Connection.write(str(DownTimeString))
return True
def setUpDown(self, UpDownList):
UpTimeString = 'u'+str(UpDownList[0])+'\n'
DownTimeString = 'd'+str(UpDownList[1])+'\n'
print('Send Freq: '+str(float(1000)/(UpDownList[0]+UpDownList[1]))+' Send Duty: '+str(float(UpDownList[0])/(UpDownList[1]+UpDownList[0])))
self.Connection.write(str(UpTimeString))
self.Connection.write(str(DownTimeString))
return True
def singleDutyPhase(self, freq, phaseIndex):
# qPeriod = round(0.25*(1000/freq))
# comStr = phaseIndex+str(qPeriod)+'\n'
# self.Connection.write(bytes(comStr,'ASCII'))
return True
class Channel:
# the channel is the arduino system.
# ie data sent to the channel is serially sent to the arduino
# refactor this to inheritance later
def __init__(self,ArdScript, UsedFrequencies, SymbolPeriod, header = False, holdFrequency =None, headerFrequency = None):
self.ArdScript = ArdScript
self.SerialObj = serialCommObj('COM6',9600)
time.sleep(1)
self.f1 = holdFrequency
self.f2 = headerFrequency
self.header = header
if ArdScript =='FixedFrequencyAndDuty':
if self.header == False:
self.SerialObj.setFreqAndDuty(100,0) # effectively an off state
else:
self.SerialObj.setUpDown(FD.SenderGetUpAndDown(self.f1)) # setting the hold frequency
else:
raise NameError ('Channel Model unimplemented')
self.FrequencySet = UsedFrequencies
self.SymbolPeriod = SymbolPeriod
self.count = 0
def setFrequencySetAndTime(self, newFreqs, newPeriod):
self.FrequencySet = newFreqs
self.SymbolPeriod = newPeriod
def sendHeader(self):
# the header is the sender side of a protocol when a autostart and sync are required
# the protocol is non-trivial and may not be easily understood in code
# Supporting documents are provided which explain the protocol
print('===START HEADER===')
self.SerialObj.setUpDown(FD.SenderGetUpAndDown(self.f2))
time.sleep(8)
self.SerialObj.setUpDown(FD.SenderGetUpAndDown(self.f1))
time.sleep(4)
self.SerialObj.setUpDown(FD.SenderGetUpAndDown(self.f2))
time.sleep(1)
self.SerialObj.setUpDown(FD.SenderGetUpAndDown(self.f1))
time.sleep(4)
self.SerialObj.setUpDown(FD.SenderGetUpAndDown(self.f2))
time.sleep(1)
self.SerialObj.setUpDown(FD.SenderGetUpAndDown(self.f1))
time.sleep(4)
self.SerialObj.setUpDown(FD.SenderGetUpAndDown(self.f2))
time.sleep(1)
self.SerialObj.setUpDown(FD.SenderGetUpAndDown(self.f1))
time.sleep(4)
self.SerialObj.setUpDown(FD.SenderGetUpAndDown(self.f2))
time.sleep(4)
print('===END HEADER===')
def sendHeaderV2(self):
print('START HEADER V2')
self.SerialObj.setUpDown(FD.SenderGetUpAndDown(self.f2))
time.sleep(4)
print(time.time())
self.SerialObj.setUpDown(FD.SenderGetUpAndDown(self.f1))
time.sleep(8)
print('END HEADER')
def send(self,Symbol):
#if self.header == True :
# self.sendHeaderV2()
#print('=== START MESSAGE ===')
#print(time.time())
#for Symbol in SymbolList:
self.SerialObj.setUpDown(FD.SenderGetUpAndDown(self.FrequencySet[Symbol]))
time.sleep(self.SymbolPeriod)
#print('=== END MESSAGE ===')
#self.count = self.count +1
return self.FrequencySet[Symbol]
def re_header(self):
if self.header == False:
self.SerialObj.setFreqAndDuty(100, 0)
else :
self.SerialObj.setFreqAndDuty(100, 0)
time.sleep(3)
self.SerialObj.setUpDown(FD.SenderGetUpAndDown(self.f1)) # reset
def setSingleFreq(self, SingleFrequency):
# used in the calibration
self.SerialObj.setUpDown(FD.SenderGetUpAndDown(SingleFrequency)) |
from django.conf.urls import url
from django.contrib import admin
from rest_framework.authtoken import views
from .views import (
UserCreateAPIView,
UserLoginAPIView
)
urlpatterns = [
#url(r'^login/$', UserLoginAPIView.as_view(), name='login'),
url(r'^sign-up/$', UserCreateAPIView.as_view(), name='signup'),
url(r'^auth-token/$', views.obtain_auth_token, name='get_auth_token'),
] |
import class_demo6
d = class_demo6.Demo(12, 13)
print(d.do_something())
# 檔名: module_demo.py
# 作者: Kaiching Chang
# 時間: July, 2014
|
# rolling-around-a-cube.py
# Maulik Doshi + Section F + maulikd
# 15-112 Term Project Fall 2014
# Rolling Around a Cube
from __future__ import with_statement
from visual import *
import pygame # Used purely for the music functionality
import math, random, os
pygame.mixer.init()
intromusic = pygame.mixer.music.load("music/bgmusic.ogg")
# Tacky Background Music
# http://www.freesfx.co.uk/sfx/background%20music
# Plays the music that is loaded
def playMusic():
pygame.mixer.music.play(-1)
# Stops the music that is loaded
def stopMusic():
pygame.mixer.music.stop()
# Pauses the music that is playing
def pauseMusic():
pygame.mixer.music.pause()
# Unpauses the music that was paused
def unpauseMusic():
pygame.mixer.music.unpause()
# Returns the text instructions for the game
def getInstructions():
instructions = '''Welcome to Rolling Around a Cube!
- Press the backspace key to go back to the previous menu
- During gameplay, press 'r' to restart with a new arrangement
- Press 'p' to pause, 'u' to unpause
- Click points to create paths
- Press the down key to run
- Guide the ball so it travels around the cube
- Less tries and time is better score
- Lower the score the better
Thank You for Playing!'''
return instructions
# Reads a file (Taken from Course Notes)
def readFile(filename, mode="rt"):
# rt = "read text"
with open(filename, mode) as fin:
return fin.read()
# Writes to a file (Taken from Course Notes)
def writeFile(filename, contents, mode="wt"):
# wt = "write text"
with open(filename, mode) as fout:
fout.write(contents)
# Almost Equals adapted from Course Notes
def almostEqual(d1, d2):
epsilon = 10
return (abs(d2 - d1) < epsilon)
# Class for the actual gameplay of the project
class Gameplay(object):
# Constructor for the Gameplay class
def __init__(self, scene, level=1, side=600):
if(level == 1):
self.bgcolor = (102/255.0,0/255.0,102/255.0)
elif(level == 2):
self.bgcolor = (0/255.0,51/255.0,102/255.0)
elif(level == 3):
self.bgcolor = (102/255.0,102/255.0,0/255.0)
elif(level == 4):
self.bgcolor = (102/255.0,51/255.0,0/255.0)
elif(level == 5):
self.bgcolor = (102/255.0,0/255.0,0/255.0)
self.scene = scene
self.scene.background = self.bgcolor
self.level = level
self.numOfObstacles = self.level * 4
self.scene.userzoom = False
self.scene.userspin = False
self.side = side
self.initAnimation()
# Initialize other variables needed in the class
def initAnimation(self):
self.gameRange = 500
self.solving = False
self.solved = False
self.paused = False
self.time = 0
self.tries = 0
self.rotateRange = 600
self.scene.range = self.gameRange
self.frames = [frame(), frame(), frame(), frame()]
self.currentFrame = 0
self.collide = False
self.curvePts = [[],[],[],[]]
self.start = []
self.end = []
self.curves = [[],[],[],[]]
self.firstPoint = [[],[],[],[]]
self.endPoint = [[],[],[],[]]
self.clickedPts = [[],[],[],[]]
self.quit = False
self.oob = True
(self.x, self.y, self.z, self.r) = (-267, 267, 320, 15)
self.addPoints = [[],[],[],[]]
self.obstacles = []
self.numOfFrames = 4
self.ballFrame = 0
self.ballMove = False
for x in xrange(4): self.initializePoints(x)
self.timer = label(pos=(0,0,320), height=20, text=str(self.tries) + "-" + "0.00", align='center', depth=0.01, color=color.white, frame=self.currentFrame)
self.tempCol = label(pos=(0,0,320), height=20, text="COLLISION", align='center', depth=0.01, color=color.red, background=color.black, frame=self.currentFrame)
self.tempCol.visible = False
self.tempOOB = label(pos=(0,0,320), height=20, text="OUT OF BOUNDS", align='center', depth=0.01, color=color.red, background=color.black, frame=self.currentFrame)
self.tempOOB.visible = False
self.drawObjects()
# Draw the beginning and ending squares
def drawBeginEnds(self):
self.start.append(Rectangle(0, 0, 50, 50, color.red, 0, self.frames))
self.start.append(Rectangle(0, 550, 50, 600, color.red, 1, self.frames))
self.start.append(Rectangle(0, 0, 50, 50, color.red, 2, self.frames))
self.start.append(Rectangle(0, 550, 50, 600, color.red, 3, self.frames))
self.end.append(Rectangle(550, 550, 600, 600, color.green, 0, self.frames))
self.end.append(Rectangle(550, 0, 600, 50, color.green, 1, self.frames))
self.end.append(Rectangle(550, 550, 600, 600, color.green, 2, self.frames))
self.end.append(Rectangle(550, 0, 600, 50, color.green, 3, self.frames))
# Generates a random rgb color for the obstacles
def randomColor(self):
r = random.randint(0,255)
g = random.randint(0,255)
b = random.randint(0,255)
return (r/255.0, g/255.0, b/255.0)
# Draws the rectangle obstacles on screen with random color and position
def drawObstacles(self):
num = 0
f = 0
self.obsColors = []
while(num < self.numOfObstacles):
x0 = random.randint(50,540)
y0 = random.randint(50,540)
s = random.randint(1,8)
di = random.choice(["up", "right"])
while(True):
x1 = random.randint(60,550)
if(abs(x0-x1) < 10): continue
if(x0 < x1): break
while(True):
y1 = random.randint(60,550)
if(abs(y0-y1) < 10): continue
if(y0 < y1): break
self.obsColors.append(self.randomColor())
self.obstacles.append(Rectangle(x0, y0, x1, y1, self.obsColors[num], f, self.frames, di, s))
f = ((f + 1) % 4) # 4 frames (0, 1, 2, 3)
num += 1
self.unhighlight()
# Draws the beginning and ending black points to start and end paths
def drawFirstEndPoints(self):
self.firstPoint[0] = points(frame=self.frames[0], pos=(self.x, self.y, self.z), size=50, color=color.black)
self.firstPoint[1] = points(frame=self.frames[1], pos=(self.z, -self.y, -self.x), size=50, color=color.black)
self.firstPoint[2] = points(frame=self.frames[2], pos=(-self.x, self.y, -self.z), size=50, color=color.black)
self.firstPoint[3] = points(frame=self.frames[3], pos=(-self.z, -self.y, self.x), size=50, color=color.black)
self.endPoint[0] = points(frame=self.frames[0], pos=(-self.x, -self.y, self.z), size=50, color=color.black)
self.endPoint[1] = points(frame=self.frames[1], pos=(self.z, self.y, self.x), size=50, color=color.black)
self.endPoint[2] = points(frame=self.frames[2], pos=(self.x, -self.y, -self.z), size=50, color=color.black)
self.endPoint[3] = points(frame=self.frames[3], pos=(-self.z, self.y, -self.x), size=50, color=color.black)
# Draws the curves to move between faces of the cube
def drawCurvePts(self):
self.addPoints[0] = [(-self.x, -self.y, self.z), (-self.x+50, -self.y, self.z), (self.z, -self.y, -self.x)]
self.addPoints[1] = [(self.z, self.y, self.x), (self.z, self.y, self.x-50), (-self.x, self.y, -self.z)]
self.addPoints[2] = [(self.x, -self.y, -self.z), (self.x-50, -self.y, -self.z), (-self.z, -self.y, self.x)]
self.addPoints[3] = [(-self.z, self.y, -self.x), (-self.z, self.y, -self.x+50), (self.x, self.y, self.z)]
for x in xrange(4):
curve(pos=self.addPoints[x], frame=self.frames[x], color=color.blue)
# Darkens all objects that are not in the current frame
def unhighlight(self):
for x in xrange(len(self.obstacles)):
if(self.obstacles[x].getFrame() != self.currentFrame):
self.obstacles[x].changeColor((0,0,0))
else:
self.obstacles[x].changeColor(self.obsColors[x])
for y in xrange(4):
if(self.start[y].getFrame() != self.currentFrame):
self.start[y].changeColor((0,0,0))
else:
self.start[y].changeColor((1,0,0))
if(self.end[y].getFrame() != self.currentFrame):
self.end[y].changeColor((0,0,0))
else:
self.end[y].changeColor((0,1,0))
# Deletes all objects on screen
def deleteAllObjects(self):
# To delete, visibility must be False and then use del command
for x in xrange(4):
self.frames[0].visible = False
del self.frames[0]
self.box.visible = False
del self.box
self.timer.visible = False
del self.timer
# Initializes the first point at the starting location
def initializePoints(self, frame):
if(frame == 0):
self.curvePts[0] = [(self.x, self.y, self.z)]
elif(frame == 1):
self.curvePts[1] = [(self.z, -self.y, -self.x)]
elif(frame == 2):
self.curvePts[2] = [(-self.x, self.y, -self.z)]
elif(frame == 3):
self.curvePts[3] = [(-self.z, -self.y, self.x)]
# Draws all the objects needed for Gameplay on screen in position
def drawObjects(self):
self.box = box(pos=(0,0,0), axis=(1,0,0), length=self.side, height=self.side, width=self.side, color=color.white, opacity=0.1)
self.drawBeginEnds()
self.drawObstacles()
self.drawFirstEndPoints()
self.drawCurvePts()
self.ball = sphere(pos=(self.x,self.y,self.z), radius=self.r, color=color.yellow, frame=self.frames[self.ballFrame])
# Handles the rotation between different faces of the cube
def rotation(self, direction):
for x in xrange(self.gameRange, self.rotateRange+1, 10):
sleep(0.001)
self.scene.range = x
(start, end) = (1, 21)
if (direction == "right"):
self.currentFrame = ((self.currentFrame+1)%4)
coeff = -1
else:
self.currentFrame = ((self.currentFrame-1)%4)
coeff = 1
self.unhighlight()
for x in xrange(start, end):
sleep(0.001)
self.box.rotate(angle=coeff*(1*math.pi/((end-start)*2)), axis=(0,1,0), origin=(0,0,0))
for y in xrange(self.numOfFrames):
self.frames[y].rotate(angle=coeff*(1*math.pi/((end-start)*2)), axis=(0,1,0), origin=(0,0,0))
for x in xrange(self.rotateRange, self.gameRange-1, -10):
sleep(0.001)
self.scene.range = x
# Rotates up specifically for end of game
def rotateUp(self):
(start, end) = (1,21)
for x in xrange(start, end):
sleep(0.001)
self.box.rotate(angle=(1*math.pi/((end-start)*2)), axis=(1,0,0), origin=(0,0,0))
for y in xrange(self.numOfFrames):
self.frames[y].rotate(angle=(1*math.pi/((end-start)*2)), axis=(1,0,0), origin=(0,0,0))
for x in xrange(self.gameRange, 100, -5):
sleep(0.01)
self.scene.range = x
# Detects when player (ball) has gone outside the box
def outOfBounds(self):
gap = 25
if(self.oob == True):
if(self.currentFrame == 0 or self.currentFrame == 2):
if(self.ball.pos.x < self.x-gap or self.ball.pos.x > -self.x+gap
or self.ball.pos.y < -self.y-gap or self.ball.pos.y > self.y+gap):
return True
else: return False
elif(self.currentFrame == 1 or self.currentFrame == 3):
if(self.ball.pos.z < self.x-gap or self.ball.pos.z > -self.x+gap
or self.ball.pos.y < -self.y-gap or self.ball.pos.y > self.y+gap):
return True
else: return False
else: return False
# Short animation for when collision is detected
def collisionDisplay(self):
self.timer.visible = False
for x in xrange(3):
self.tempCol.visible = False
sleep(0.3)
self.tempCol.visible = True
sleep(0.3)
sleep(0.5)
self.tempCol.visible = False
self.timer.visible = True
# Short animation for when out-of-bounds is detected
def oobDisplay(self):
self.timer.visible = False
for x in xrange(3):
self.tempOOB.visible = False
sleep(0.3)
self.tempOOB.visible = True
sleep(0.3)
sleep(0.5)
self.tempOOB.visible = False
self.timer.visible = True
# Moves along the path that the user draws and keeps moving between frames
def moveAlongPath(self):
if(self.curvePts[self.currentFrame] == []):
self.ballMove = False
self.rotation("right") # Rotate after finishing face
self.ballFrame += 1
if(len(self.curvePts[self.currentFrame]) > 1):
self.ballMove = True
self.downPressed()
else:
if(len(self.curvePts[self.currentFrame]) == 1):
self.curvePts[self.currentFrame].pop()
else:
if(len(self.curvePts[self.currentFrame]) > 3):
collision = self.obsCollide()
oob = self.outOfBounds()
if(collision == True or oob == True):
self.collide = True
if(collision):
self.collisionDisplay()
if(oob):
self.oobDisplay()
self.ballMove = False
self.postCollisionTasks()
return
x0, x1 = self.curvePts[self.currentFrame][0][0], self.curvePts[self.currentFrame][1][0]
y0, y1 = self.curvePts[self.currentFrame][0][1], self.curvePts[self.currentFrame][1][1]
z0, z1 = self.curvePts[self.currentFrame][0][2], self.curvePts[self.currentFrame][1][2]
if(almostEqual(self.ball.pos.x,x1) and almostEqual(self.ball.pos.y,y1) and almostEqual(self.ball.pos.z,z1)):
self.ball.pos = (x1,y1,z1) # Almost equal used for small errors
self.curvePts[self.currentFrame].pop(0)
else: # Calculates slope and travles the distance
d = ((x1-x0)**2+(y1-y0)**2+(z1-z0)**2)**0.5
d /= 8.0
xStep = (x1 - x0)/d
yStep = (y1 - y0)/d
zStep = (z1 - z0)/d
self.ball.pos += (xStep,yStep,zStep)
# Reset the ball on current frame after a collision or out-of-bounds
def postCollisionTasks(self):
self.initializePoints(self.currentFrame)
for curve in self.curves[self.currentFrame]:
curve.visible = False
del curve
for point in self.clickedPts[self.currentFrame]:
point.visible = False
del point
self.ball.pos = self.curvePts[self.currentFrame][0]
# Check that the ball is in the end square to win that face
def checkWin(self):
gap = 25
point = self.ball.pos
if(self.currentFrame == 0):
if(point[0] > self.end[self.currentFrame].getPos().x - gap and point[1] < self.end[self.currentFrame].getPos().y + gap):
return True
return False
elif(self.currentFrame == 1):
if(point[1] > self.end[self.currentFrame].getPos().y - gap and point[2] < self.end[self.currentFrame].getPos().z + gap):
return True
return False
elif(self.currentFrame == 2):
if(point[0] < self.end[self.currentFrame].getPos().x + gap and point[1] < self.end[self.currentFrame].getPos().y + gap):
return True
return False
elif(self.currentFrame == 3):
if(point[1] > self.end[self.currentFrame].getPos().y - gap and point[2] > self.end[self.currentFrame].getPos().z - gap):
return True
return False
# Returns True if ball collides with obstacle, False otherwise
def obsCollide(self):
(x,y,z) = (self.ball.pos.x, self.ball.pos.y, self.ball.pos.z)
for obs in self.obstacles:
if(obs.collision(x,y,z) == True and obs.getFrame() == self.currentFrame):
return True
return False
# Moves obstacles every iteration of the game by a specific amount direction
def moveObstacle(self):
for obs in self.obstacles:
obs.move()
# Last point needs to be within the end square, ideally on that black point
def lastPoint(self):
gap = 25
point = self.curvePts[self.currentFrame][-1]
if(self.currentFrame == 0):
if(point[0] > self.end[self.currentFrame].getPos().x - gap and point[1] < self.end[self.currentFrame].getPos().y + gap):
return True
return False
elif(self.currentFrame == 1):
if(point[1] > self.end[self.currentFrame].getPos().y - gap and point[2] < self.end[self.currentFrame].getPos().z + gap):
return True
return False
elif(self.currentFrame == 2):
if(point[0] < self.end[self.currentFrame].getPos().x + gap and point[1] < self.end[self.currentFrame].getPos().y + gap):
return True
return False
elif(self.currentFrame == 3):
if(point[1] > self.end[self.currentFrame].getPos().y - gap and point[2] > self.end[self.currentFrame].getPos().z - gap):
return True
return False
# Add the points needed to move between faces once ball rolls
def downPressed(self):
if(self.ballFrame != 4 and self.ballFrame == self.currentFrame and self.lastPoint() == True):
self.ballMove = True
for elem in self.addPoints[self.currentFrame]:
self.curvePts[self.currentFrame].append(elem)
# Steps needed for the solve function
def solveSteps(self, x, y, step):
self.ball.pos.x = x
self.ball.pos.y = y
if(self.outOfBounds() == False and self.obsCollide() == False): result = True
else: result = False
if(step == 0): self.ball.pos.x -= 8
elif(step == 1): self.ball.pos.y += 8
elif(step == 2): self.ball.pos.x += 8
elif(step == 3): self.ball.pos.y -= 8
return result
# Automatically solves the first face of the cube, no points anymore
def solve(self):
while(self.checkWin() == False):
rate(50)
self.moveObstacle()
if(self.solveSteps(self.ball.pos.x + 8, self.ball.pos.y, 0) == True):
self.ball.pos.x += 8
self.curvePts[self.currentFrame].append((self.ball.pos.x, self.ball.pos.y, self.ball.pos.z))
self.curves[0].append(curve(pos=self.curvePts[self.currentFrame], frame=self.frames[self.currentFrame], color=color.white))
elif(self.solveSteps(self.ball.pos.x - 8, self.ball.pos.y, 2) == True):
self.ball.pos.x -= 8
self.curvePts[self.currentFrame].append((self.ball.pos.x, self.ball.pos.y, self.ball.pos.z))
self.curves[0].append(curve(pos=self.curvePts[self.currentFrame], frame=self.frames[self.currentFrame], color=color.white))
if(self.solveSteps(self.ball.pos.x, self.ball.pos.y - 8, 1) == True):
self.ball.pos.y -= 8
self.curvePts[self.currentFrame].append((self.ball.pos.x, self.ball.pos.y, self.ball.pos.z))
self.curves[0].append(curve(pos=self.curvePts[self.currentFrame], frame=self.frames[self.currentFrame], color=color.white))
elif(self.solveSteps(self.ball.pos.x, self.ball.pos.y + 8, 3) == True):
self.ball.pos.y += 8
self.curvePts[self.currentFrame].append((self.ball.pos.x, self.ball.pos.y, self.ball.pos.z))
self.curves[0].append(curve(pos=self.curvePts[self.currentFrame], frame=self.frames[self.currentFrame], color=color.white))
self.curvePts[self.currentFrame] = [self.curvePts[self.currentFrame][-1]]
for elem in self.addPoints[self.currentFrame]:
self.curvePts[self.currentFrame].append(elem)
self.ballMove = True
self.solving = False
# Hides all objects on screen
def hideAll(self):
for x in xrange(4):
self.frames[x].visible = False
self.box.visible = False
self.timer.visible = False
# Shows all objects back on the screen
def showAll(self):
for x in xrange(4):
self.frames[x].visible = True
self.box.visible = True
self.timer.visible = True
# Handles key presses during the game
def onKeyPressed(self):
k = self.scene.kb.getkey()
if (self.paused == False):
if (k == "r"):
self.deleteAllObjects()
self.initAnimation()
elif (k == "p"):
pauseMusic()
self.paused = True
self.hideAll()
instructions = getInstructions()
self.pauseTitle = label(pos=(0,400,0), text='Help Screen', height=35, color=color.white, background=color.black, opacity=1)
self.pauseBox = box(pos=(0,330,0), length=395.5, height=59, width=0)
self.pauseText = label(pos=(0,326.2,0), text='(Paused)', height=15, color=color.white, background=color.black)
self.pauseHelp = label(pos=(0,0,0), text=instructions, height=16, color=color.black, background=color.white, opacity=1, align="center")
elif (k == "s"):
if(self.currentFrame == 0):
self.solved = True
self.timer.text = "None"
self.solving = True
elif (k == "backspace"):
self.quit = True
elif (k == "right"):
self.rotation("right")
elif (k == "left"):
self.rotation("left")
elif (k == "down"):
if(self.ballFrame == self.currentFrame and self.lastPoint() == True):
if(self.solved == False):
self.tries += 1
self.timer.text = str(self.tries) + "-" + str(self.score)
else: self.timer.text = "None"
self.downPressed()
else:
if (k == "u"):
self.pauseTitle.visible = False
self.pauseBox.visible = False
self.pauseText.visible = False
self.pauseHelp.visible = False
del self.pauseTitle
del self.pauseBox
del self.pauseText
del self.pauseHelp
self.paused = False
unpauseMusic()
self.showAll()
# Handles all mouse presses and mouse clicks in the game
def onMousePressed(self):
m = self.scene.mouse.getclick()
newMouse = self.scene.mouse.project(normal=(0,0,1),d=self.z)
if(self.currentFrame == 0):
self.curvePts[self.currentFrame].append((newMouse.x, newMouse.y, newMouse.z))
self.curves[0].append(curve(pos=self.curvePts[self.currentFrame], frame=self.frames[self.currentFrame], color=color.white))
self.clickedPts[0].append(points(frame=self.frames[self.currentFrame], pos=(newMouse.x, newMouse.y, newMouse.z), size=50, color=color.cyan))
elif(self.currentFrame == 1):
self.curvePts[self.currentFrame].append((newMouse.z, newMouse.y, -newMouse.x))
self.curves[1].append(curve(pos=self.curvePts[self.currentFrame], frame=self.frames[self.currentFrame], color=color.white))
self.clickedPts[1].append(points(frame=self.frames[self.currentFrame], pos=(newMouse.z, newMouse.y, -newMouse.x), size=50, color=color.cyan))
elif(self.currentFrame == 2):
self.curvePts[self.currentFrame].append((-newMouse.x, newMouse.y, -newMouse.z))
self.curves[2].append(curve(pos=self.curvePts[self.currentFrame], frame=self.frames[self.currentFrame], color=color.white))
self.clickedPts[2].append(points(frame=self.frames[self.currentFrame], pos=(-newMouse.x, newMouse.y, -newMouse.z), size=50, color=color.cyan))
elif(self.currentFrame == 3):
self.curvePts[self.currentFrame].append((-newMouse.z, newMouse.y, newMouse.x))
self.curves[3].append(curve(pos=self.curvePts[self.currentFrame], frame=self.frames[self.currentFrame], color=color.white))
self.clickedPts[3].append(points(frame=self.frames[self.currentFrame], pos=(-newMouse.z, newMouse.y, newMouse.x), size=50, color=color.cyan))
# Updates scores at the end of the game by using file IO
def updateScores(self):
if(self.level == 1):
path = "scores/stage1bs.txt"
elif(self.level == 2):
path = "scores/stage2bs.txt"
elif(self.level == 3):
path = "scores/stage3bs.txt"
elif(self.level == 4):
path = "scores/stage4bs.txt"
elif(self.level == 5):
path = "scores/stage5bs.txt"
if (not os.path.exists(path)):
writeFile(path, str(self.score))
else:
s = readFile(path)
if(self.score < float(s)):
if (os.path.exists(path)):
os.remove(path)
writeFile(path, str(self.score))
# Runs the game of this class
def run(self):
while(True):
rate(50)
if(self.quit == False):
if(self.scene.kb.keys == True): self.onKeyPressed()
if(self.paused == False):
self.moveObstacle()
if(self.solving == True): self.solve()
if(self.ballMove == True): self.moveAlongPath()
if(self.ballFrame != 4):
if(self.solved == False):
self.time += 0.025
self.score = round(self.time, 1)
self.timer.text = str(self.tries) + "-" + str(self.score)
else:
self.timer.text = "None"
else:
if(self.solved == False):
self.score *= self.tries
sleep(1)
self.timer.text = "Score: " + str(self.score)
self.updateScores()
else:
sleep(1)
self.timer.text = "Score: None"
self.rotateUp()
self.go = label(pos=(0,0,0), height=30, text="GAME OVER", align='center', depth=0.01, color=color.white, frame=self.currentFrame, font="monospace")
sleep(1)
self.go.visible = False
del self.go
self.quit = True
if(self.scene.mouse.events == 1): self.onMousePressed()
else: self.scene.mouse.events = 0 # Used to fix bugs dealing with too many mouse presses
else:
self.deleteAllObjects()
levels = PlayLevels(self.scene)
levels.run()
# Class creating Rectangles as obstacles for the game
class Rectangle(object):
# Constructor that does math to allow users to easily create on faces
def __init__(self, x1, y1, x2, y2, color, frameNumber, framesList, direction="up", speed=0):
if(direction == "up"):
self.moveUp = True
elif(direction == "right"):
self.moveUp = False
if(direction == "rotate"):
self.rotate = True
else:
self.rotate = False
self.current = frameNumber
self.frames = framesList
self.speed = speed
self.width = 0
self.moveOneWay = True
self.positions(x1,y1,x2,y2)
self.rect = box(pos=(self.x, self.y, self.z), axis=self.axis, length=self.length, height=self.height,
width=self.width, color=color, frame=self.frames[self.current])
# Calculates the positions of the obstacles in relation to the face
def positions(self, x1, y1, x2, y2):
self.length = x2-x1
self.height = y2-y1
self.x = (((x1-300) + self.length/2.0))
self.y = (((300-y1) - self.height/2.0))
self.z = 301
if (self.current == 0):
(self.x, self.y, self.z) = (self.x, self.y, self.z)
self.axis = (1,0,0)
elif (self.current == 1):
(self.x, self.y, self.z) = (self.z, self.y, -self.x)
self.axis = (0,0,1)
elif (self.current == 2):
(self.x, self.y, self.z) = (-self.x, self.y, -self.z)
self.axis = (1,0,0)
elif (self.current == 3):
(self.x, self.y, self.z) = (-self.z, self.y, self.x)
self.axis = (0,0,1)
# Moves the obstacles in the specified direction
def move(self):
if(self.moveOneWay == True):
if(self.moveUp == True):
self.rect.pos += (0,self.speed,0)
if (self.rect.pos.y+self.height/2.0 > 300):
self.moveOneWay = False
else:
if(self.current == 0):
self.rect.pos += (self.speed,0,0)
if (self.rect.pos.x+self.length/2.0 > 300):
self.moveOneWay = False
elif(self.current == 1):
self.rect.pos -= (0,0,self.speed)
if (self.rect.pos.z-self.length/2.0 < -300):
self.moveOneWay = False
elif(self.current == 2):
self.rect.pos -= (self.speed,0,0)
if (self.rect.pos.x-self.length/2.0 < -300):
self.moveOneWay = False
elif(self.current == 3):
self.rect.pos += (0,0,self.speed)
if (self.rect.pos.z+self.length/2.0 > 300):
self.moveOneWay = False
else:
if(self.moveUp == True):
self.rect.pos -= (0,self.speed,0)
if (self.rect.pos.y-self.height/2.0 < -300):
self.moveOneWay = True
else:
if(self.current == 0):
self.rect.pos -= (self.speed,0,0)
if (self.rect.pos.x-self.length/2.0 < -300):
self.moveOneWay = True
elif(self.current == 1):
self.rect.pos += (0,0,self.speed)
if (self.rect.pos.z+self.length/2.0 > 300):
self.moveOneWay = True
elif(self.current == 2):
self.rect.pos += (self.speed,0,0)
if (self.rect.pos.x+self.length/2.0 > 300):
self.moveOneWay = True
elif(self.current == 3):
self.rect.pos -= (0,0,self.speed)
if (self.rect.pos.z-self.length/2.0 < -300):
self.moveOneWay = True
# Collision algorithm used to detect between ball and rectangles
def collision(self, x, y, z):
frame = self.current
if(frame == 0 or frame == 2):
if((x > (self.rect.pos.x - self.rect.length/2.0)) and (x < (self.rect.pos.x + self.rect.length/2.0)) and
(y > (self.rect.pos.y - self.rect.height/2.0)) and (y < (self.rect.pos.y + self.rect.height/2.0))):
return True
else: return False
elif(frame == 1 or frame == 3):
if((z > (self.rect.pos.z - self.rect.length/2.0)) and (z < (self.rect.pos.z + self.rect.length/2.0)) and
(y > (self.rect.pos.y - self.rect.height/2.0)) and (y < (self.rect.pos.y + self.rect.height/2.0))):
return True
else: return False
# Getter function for the position of the obstacle
def getPos(self):
return self.rect.pos
# Getter function for the frame of the obstacles
def getFrame(self):
return self.current
# Setter function to change color of obstacle
def changeColor(self, c):
self.rect.color = c
# Class for the initial Menu display
class Menu(object):
# Constructor for the class
def __init__(self, scene):
self.scene = scene
self.scene.background = color.black
self.scene.range = 12
self.scene.userspin = False
self.scene.userzoom = False
self.frame = frame()
self.drawAll()
# Draws all the objects needed on the screen
def drawAll(self):
x = 20.59
self.title = label(pos=(0,9,0),text='Rolling Around a Cube', height=44, color=color.white, frame=self.frame, background=color.blue, opacity=1)
self.subbox = box(pos=(0,7.13,0), length=x, height=1.5, width=0, frame=self.frame)
self.subtitle = label(pos=(0,7,0),text='By: Maulik Doshi', height=15, color=color.white, frame=self.frame)
self.play = box(pos=(0,3.5,0), length=x, height=3.3, color=color.green, width=0, frame=self.frame)
self.playTitle = label(pos=(0,3.5,0), text='PLAY', height=20, frame=self.frame, box=False, font="monospace")
self.ins = box(pos=(0,-0.6,0), length=x, height=3.3, color=color.yellow, width=0, frame=self.frame)
self.insTitle = label(pos=(0,-0.6,0), text='INSTRUCTIONS', height=20, frame=self.frame, box=False, font="monospace")
self.set = box(pos=(0,-4.7,0), length=x, height=3.3, color=color.red, width=0, frame=self.frame)
self.setTitle = label(pos=(0,-4.7,0), text='SETTINGS', height=20, frame=self.frame, box=False, font="monospace")
self.best = box(pos=(0,-8.8,0), length=x, height=3.3, color=color.orange, width=0, frame=self.frame)
self.bestTitle = label(pos=(0,-8.8,0), text='BEST SCORES', height=20, frame=self.frame, box=False, font="monospace")
# Handles the event when the play button is pressed
def onPlayPressed(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.play.pos.x, self.play.pos.y)
(l, h) = (self.play.length, self.play.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
return True
return False
# Handles the event when the instructions button is pressed
def onInsPressed(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.ins.pos.x, self.ins.pos.y)
(l, h) = (self.ins.length, self.ins.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
return True
return False
# Handles the event when the settings button is pressed
def onSetPressed(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.set.pos.x, self.set.pos.y)
(l, h) = (self.set.length, self.set.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
return True
return False
# Handles the event when the best scores button is pressed
def onBestPressed(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.best.pos.x, self.best.pos.y)
(l, h) = (self.best.length, self.best.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
return True
return False
# Highlights the play button when mouse over
def highlightPlay(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.play.pos.x, self.play.pos.y)
(l, h) = (self.play.length, self.play.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
self.play.color = color.white
else: self.play.color = color.green
# Highlights the instructions button when mouse over
def highlightIns(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.ins.pos.x, self.ins.pos.y)
(l, h) = (self.ins.length, self.ins.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
self.ins.color = color.white
else: self.ins.color = color.yellow
# Highlights the settings button when mouse over
def highlightSet(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.set.pos.x, self.set.pos.y)
(l, h) = (self.set.length, self.set.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
self.set.color = color.white
else: self.set.color = color.red
# Highlights the best scores button when mouse over
def highlightBest(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.best.pos.x, self.best.pos.y)
(l, h) = (self.best.length, self.best.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
self.best.color = color.white
else: self.best.color = color.orange
# Highlights all of the buttons when needed
def highlightAll(self):
self.highlightPlay()
self.highlightIns()
self.highlightSet()
self.highlightBest()
# Runs the menu display
def run(self):
while True:
rate(50)
self.highlightAll()
if(self.scene.mouse.events == 1):
m = self.scene.mouse.getclick()
if(self.onPlayPressed() == True):
self.frame.visible = False
del self.frame
levels = PlayLevels(self.scene)
levels.run()
elif(self.onInsPressed() == True):
self.frame.visible = False
del self.frame
ins = Instructions(self.scene)
ins.run()
elif(self.onSetPressed() == True):
self.frame.visible = False
del self.frame
sett = Settings(self.scene)
sett.run()
elif(self.onBestPressed() == True):
self.frame.visible = False
del self.frame
best = BestScores(self.scene)
best.run()
else: self.scene.mouse.events = 0
# Class for the best scores display
class BestScores(object):
# Constructor for the class
def __init__(self, scene):
self.scene = scene
self.scene.background = color.orange
self.scene.range = 12
self.scene.userspin = False
self.scene.userzoom = False
self.frame = frame()
self.drawAll()
# Handles the event to go back to the menu using backspace
def onBackPressed(self, event):
if(event == "backspace"):
return True
return False
# Find scores from files to display, None if there aren't any yet
def findScores(self):
self.scores = 5*["None"]
path = "scores/stage1bs.txt"
if (not os.path.exists(path)):
self.scores[0] = "None"
else:
self.scores[0] = readFile(path)
path = "scores/stage2bs.txt"
if (not os.path.exists(path)):
self.scores[1] = "None"
else:
self.scores[1] = readFile(path)
path = "scores/stage3bs.txt"
if (not os.path.exists(path)):
self.scores[2] = "None"
else:
self.scores[2] = readFile(path)
path = "scores/stage4bs.txt"
if (not os.path.exists(path)):
self.scores[3] = "None"
else:
self.scores[3] = readFile(path)
path = "scores/stage5bs.txt"
if (not os.path.exists(path)):
self.scores[4] = "None"
else:
self.scores[4] = readFile(path)
# Draws all the objects needed on the screen
def drawAll(self):
self.findScores()
self.title = label(pos=(0,9,0),text='Best Scores', height=35, color=color.white, frame=self.frame, background=color.black, opacity=1)
self.s1 = label(pos=(0,5.8,0),text='Stage 1', height=20, color=color.white, frame=self.frame, background=color.black, opacity=0.3)
self.bs1 = label(pos=(0,4.4,0),text=str(self.scores[0]), height=15, color=color.white, frame=self.frame, background=color.black, opacity=1)
self.s2 = label(pos=(0,2.3,0),text='Stage 2', height=20, color=color.white, frame=self.frame, background=color.black, opacity=0.3)
self.bs2 = label(pos=(0,0.9,0),text=str(self.scores[1]), height=15, color=color.white, frame=self.frame, background=color.black, opacity=1)
self.s3 = label(pos=(0,-1.2,0),text='Stage 3', height=20, color=color.white, frame=self.frame, background=color.black, opacity=0.3)
self.bs3 = label(pos=(0,-2.6,0),text=str(self.scores[2]), height=15, color=color.white, frame=self.frame, background=color.black, opacity=1)
self.s4 = label(pos=(0,-4.7,0),text='Stage 4', height=20, color=color.white, frame=self.frame, background=color.black, opacity=0.3)
self.bs4 = label(pos=(0,-6.1,0),text=str(self.scores[3]), height=15, color=color.white, frame=self.frame, background=color.black, opacity=1)
self.s5 = label(pos=(0,-8.2,0),text='Stage 5', height=20, color=color.white, frame=self.frame, background=color.black, opacity=0.3)
self.bs5 = label(pos=(0,-9.6,0),text=str(self.scores[4]), height=15, color=color.white, frame=self.frame, background=color.black, opacity=1)
# Runs the best scores menu
def run(self):
while(True):
rate(50)
if(self.scene.kb.keys == 1):
k = self.scene.kb.getkey()
if(self.onBackPressed(k) == True):
self.frame.visible = False
del self.frame
menu = Menu(self.scene)
menu.run()
# Class for the levels screen
class PlayLevels(object):
# Constructor for the class
def __init__(self, scene):
self.scene = scene
self.scene.background = color.green
self.scene.range = 12
self.scene.userspin = False
self.scene.userzoom = False
self.frame = frame()
self.drawAll()
# Draws all the objects needed on the screen
def drawAll(self):
x = 20.59
self.title = label(pos=(0,8.8,0), text='Stages', height=35, color=color.white, frame=self.frame, background=color.black, opacity=1)
self.lv1box = box(pos=(0,5.1,0), length=x, height=2.7, width=0, frame=self.frame, color=color.black)
self.lv1 = label(pos=(0,5.1,0), text='Stage 1: Easy', height=15, color=color.white, frame=self.frame, background=(102/255.0,0/255.0,102/255.0), opacity=1)
self.lv2box = box(pos=(0,1.7,0), length=x, height=2.7, width=0, frame=self.frame, color=color.black)
self.lv2 = label(pos=(0,1.7,0), text='Stage 2: Medium', height=15, color=color.white, frame=self.frame, background=(0/255.0,51/255.0,102/255.0), opacity=1)
self.lv3box = box(pos=(0,-1.7,0), length=x, height=2.7, width=0, frame=self.frame, color=color.black)
self.lv3 = label(pos=(0,-1.7,0), text='Stage 3: Hard', height=15, color=color.white, frame=self.frame, background=(102/255.0,102/255.0,0/255.0), opacity=1)
self.lv4box = box(pos=(0,-5.1,0), length=x, height=2.7, width=0, frame=self.frame, color=color.black)
self.lv4 = label(pos=(0,-5.1,0), text='Stage 4: Extreme', height=15, color=color.white, frame=self.frame, background=(102/255.0,51/255.0,0/255.0), opacity=1)
self.lv5box = box(pos=(0,-8.5,0), length=x, height=2.7, width=0, frame=self.frame, color=color.black)
self.lv5 = label(pos=(0,-8.5,0), text='Stage 5: Intense', height=15, color=color.white, frame=self.frame, background=(102/255.0,0/255.0,0/255.0), opacity=1)
# Handles the backspace key to go back to a previous menu
def onBackPressed(self):
k = self.scene.kb.getkey()
if(k == "backspace"):
return True
return False
# Highlights the first stage button
def highlightS1(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.lv1box.pos.x, self.lv1box.pos.y)
(l, h) = (self.lv1box.length, self.lv1box.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
self.lv1box.color = color.white
else: self.lv1box.color = color.black
# Highlights the second stage button
def highlightS2(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.lv2box.pos.x, self.lv2box.pos.y)
(l, h) = (self.lv2box.length, self.lv2box.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
self.lv2box.color = color.white
else: self.lv2box.color = color.black
# Highlights the third stage button
def highlightS3(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.lv3box.pos.x, self.lv3box.pos.y)
(l, h) = (self.lv3box.length, self.lv3box.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
self.lv3box.color = color.white
else: self.lv3box.color = color.black
# Highlights the fourth stage button
def highlightS4(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.lv4box.pos.x, self.lv4box.pos.y)
(l, h) = (self.lv4box.length, self.lv4box.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
self.lv4box.color = color.white
else: self.lv4box.color = color.black
# Highlights the fifth stage button
def highlightS5(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.lv5box.pos.x, self.lv5box.pos.y)
(l, h) = (self.lv5box.length, self.lv5box.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
self.lv5box.color = color.white
else: self.lv5box.color = color.black
# Handles the button press of the first stage
def onS1Pressed(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.lv1box.pos.x, self.lv1box.pos.y)
(l, h) = (self.lv1box.length, self.lv1box.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
return True
return False
# Handles the button press of the second stage
def onS2Pressed(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.lv2box.pos.x, self.lv2box.pos.y)
(l, h) = (self.lv2box.length, self.lv2box.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
return True
return False
# Handles the button press of the third stage
def onS3Pressed(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.lv3box.pos.x, self.lv3box.pos.y)
(l, h) = (self.lv3box.length, self.lv3box.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
return True
return False
# Handles the button press of the fourth stage
def onS4Pressed(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.lv4box.pos.x, self.lv4box.pos.y)
(l, h) = (self.lv4box.length, self.lv4box.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
return True
return False
# Handles the button press of the fifth stage
def onS5Pressed(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.lv5box.pos.x, self.lv5box.pos.y)
(l, h) = (self.lv5box.length, self.lv5box.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
return True
return False
# Highlights all based on mouse position
def highlightAll(self):
self.highlightS1()
self.highlightS2()
self.highlightS3()
self.highlightS4()
self.highlightS5()
# Runs the display for the levels screen
def run(self):
while(True):
rate(50)
self.highlightAll()
if(self.scene.kb.keys == True):
if(self.onBackPressed() == True):
self.frame.visible = False
del self.frame
menu = Menu(self.scene)
menu.run()
if(self.scene.mouse.events == 1):
m = self.scene.mouse.getclick()
if(self.onS1Pressed() == True):
self.frame.visible = False
del self.frame
game = Gameplay(self.scene, 1)
game.run()
elif(self.onS2Pressed() == True):
self.frame.visible = False
del self.frame
game = Gameplay(self.scene, 2)
game.run()
elif(self.onS3Pressed() == True):
self.frame.visible = False
del self.frame
game = Gameplay(self.scene, 3)
game.run()
elif(self.onS4Pressed() == True):
self.frame.visible = False
del self.frame
game = Gameplay(self.scene, 4)
game.run()
elif(self.onS5Pressed() == True):
self.frame.visible = False
del self.frame
game = Gameplay(self.scene, 5)
game.run()
else: self.scene.mouse.events = 0
# Class for the instructions screen
class Instructions(object):
# Constructor for the class
def __init__(self, scene):
self.scene = scene
self.scene.background = color.yellow
self.scene.range = 12
self.scene.userspin = False
self.scene.userzoom = False
self.frame = frame()
self.drawAll()
# Draws the objects needed for the instructions
def drawAll(self):
words = getInstructions()
self.title = label(pos=(0,9,0), text='Instructions', height=35, color=color.white, frame=self.frame, background=color.black, opacity=1)
self.ins = label(pos=(0,0,0), text=words, height=16, color=color.black, frame=self.frame, background=color.white, opacity=1, align="center")
# Handles the backspace key to go back to the previous menu
def onBackPressed(self):
k = self.scene.kb.getkey()
if(k == "backspace"):
return True
return False
# Runs the different stages display
def run(self):
while(True):
rate(50)
if(self.scene.kb.keys == True):
if(self.onBackPressed() == True):
self.frame.visible = False
del self.frame
menu = Menu(self.scene)
menu.run()
# The class for settings menu
class Settings(object):
# Constructor for the class
def __init__(self, scene):
self.scene = scene
self.scene.background = color.red
self.scene.range = 12
self.scene.userspin = False
self.scene.userzoom = False
self.frame = frame()
self.drawAll()
# Draws the objects needed for the class
def drawAll(self):
self.title = label(pos=(0,9,0),text='Settings', height=35, color=color.white, frame=self.frame, background=color.black, opacity=1)
self.heading = label(pos=(0,6.1,0),text='Music', height=20, color=color.white, frame=self.frame, background=color.black, opacity=0.3)
self.onBox = box(pos=(0,1.1,0), length=10, height=6.7, width=0, frame=self.frame, color=color.black)
self.onLabel = label(pos=(0,1.1,0), text="ON", frame=self.frame, height=17, background=color.black, font="monospace")
self.offBox = box(pos=(0,-6.55,0), length=10, height=6.7, width=0, frame=self.frame, color=color.black)
self.offLabel = label(pos=(0,-6.55,0), text="OFF", frame=self.frame, height=17, background=color.black, font="monospace")
# Handles the backspace key to go back to a the previous menu
def onBackPressed(self):
k = self.scene.kb.getkey()
if(k == "backspace"):
return True
return False
# Highlights the music on button
def highlightOn(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.onBox.pos.x, self.onBox.pos.y)
(l, h) = (self.onBox.length, self.onBox.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
self.onBox.color = color.white
else: self.onBox.color = color.black
# Highlights the music off button
def highlightOff(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.offBox.pos.x, self.offBox.pos.y)
(l, h) = (self.offBox.length, self.offBox.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
self.offBox.color = color.white
else: self.offBox.color = color.black
# Handles the on-button mouse press
def onPressed(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.onBox.pos.x, self.onBox.pos.y)
(l, h) = (self.onBox.length, self.onBox.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
return True
return False
# Handles the off-button mouse press
def offPressed(self):
(x, y) = (self.scene.mouse.pos.x, self.scene.mouse.pos.y)
(xBox, yBox) = (self.offBox.pos.x, self.offBox.pos.y)
(l, h) = (self.offBox.length, self.offBox.height)
if(x > (xBox - l/2.0) and x < (xBox + l/2.0) and y > (yBox - h/2.0) and y < (yBox + h/2.0)):
return True
return False
# Runs the settings display
def run(self):
while(True):
rate(50)
self.highlightOn()
self.highlightOff()
if(self.scene.kb.keys == True):
if(self.onBackPressed() == True):
self.frame.visible = False
del self.frame
menu = Menu(self.scene)
menu.run()
if(self.scene.mouse.events == 1):
m = self.scene.mouse.getclick()
if(self.onPressed() == True):
playMusic()
elif(self.offPressed() == True):
stopMusic()
else: self.scene.mouse.events = 0
# Runs the game starting with the display and menu --> class-driven from there
def runGame():
scene = display(title="Rolling Around a Cube", width=600, height=620, autoscale=False)
playMusic()
menu = Menu(scene)
menu.run()
runGame()
|
# -*- coding: utf-8 -*-
# @Author: Safer
# @Date: 2016-08-19 00:55:40
# @Last Modified by: Safer
# @Last Modified time: 2016-08-22 23:52:12
import sys
from PyQt5.QtWidgets import QApplication, QMessageBox
from db import DB
if __name__ == '__main__':
app = QApplication(sys.argv)
db = DB()
db.from_('test')
######## select ########
db.where_('id > 0')
db.columns_('*')
results = db.find_()
print(results)
######## create ########
# db.create_({'id': '3', 'name': 'safer'})
######## update ########
# db.where_('id = 2')
# db.update_({'name': 'saferssssssss'})
######## delete ########
# db.where_('id = 2')
# db.delete_()
|
import pandas
import time
def table():
# today's USD exchange rate trend
url = 'https://www.fubon.com/Fubon_Portal/banking/Personal/deposit/exchange_rate/exchange_rate1_photo.jsp?urlParameter=1D¤cy=USD'
pd = pandas.read_html(url)
currency = pd[0]
return currency
def show_buy_table(buy_price):
temp = currency[0:1][['銀行賣出']].values
if temp <= buy_price:
print('BUY USD NOW')
print('The price is :', float(temp))
print('\n')
else:
print('You still need to wait...\n')
def show_sell_table(sell_price):
temp = currency[0:1][['銀行買入']].values
if temp >= sell_price:
print('SELL USD NOW')
print('The price is :', float(temp))
print('\n')
else:
print('You still need to wait...\n')
def service_costermus():
tag = input('Buy USD(1)),Sell USD(2), None(3)\n')
if tag == '1':
service = 1
return service
elif tag == '2':
service = 2
return service
else:
service = 3
return service
def sleeptime(hour,min,sec):
return hour*3600 + min*60 + sec
if __name__ == '__main__':
delaytime = sleeptime(0,1,0)
while True:
# update the table when you try to type your price
currency = table()
print('|||Current Rate|||')
print(currency)
service = service_costermus()
# buy
if service == 1:
buy_price = input('The lowest price that you want to BUY:\n')
show_buy_table(float(buy_price))
# sell
elif service == 2:
sell_price = input('The highest price that you want to SELL\n')
show_sell_table(float(sell_price))
# none
else:
break;
print('############################') |
import sys
import numpy as np
from matplotlib import pyplot as plt
import seaborn as sns
import equation6
import conic_parameters
sys.path.append('../conic-projection')
from conproj_utils import Conic
import crw_misc_utils
def polar_plot(r, th, ax, **kwargs):
ax.plot(r*np.cos(th), r*np.sin(th), **kwargs)
def omega(R, theta):
"""(1/R) (d R/d theta) by finite differences"""
dR_dtheta = np.pad(np.diff(R)/np.diff(theta), (0, 1), 'edge')
return dR_dtheta/R
def alpha(R, theta):
"""Angle of tangent to curve R(theta)"""
t = np.tan(theta)
om = omega(R, theta)
tana = (1 + om*t)/(t - om)
return np.arctan(tana)
try:
beta = float(sys.argv[1])
except IndexError:
sys.exit('Usage: {} BETA'.format(sys.argv[0]))
try:
fix_K = float(sys.argv[2])
except IndexError:
fix_K = None
ntheta = 400
nearly = 1.0 # - 1e-5
xmax = 1.5
R0 = np.sqrt(beta)/(1 + np.sqrt(beta))
shell = equation6.Shell(beta=beta)
theta = np.linspace(0.0, nearly*shell.th_infty, ntheta)
R, th1 = shell.radius(theta, method='brent', full=True)
alph = alpha(R, theta)
R_approx = crw_misc_utils.radius(theta, crw_misc_utils.th1_approx(theta, beta))
m = R_approx > 0.0
th_tail = conic_parameters.theta_tail(beta, xi=None,
f=conic_parameters.finf_CRW)
print('th1_infty =', np.degrees(shell.th1_infty), np.degrees(th1[-4:]))
print('alpha_infty =', np.degrees(alph[-4:]))
# Gradient: d phi_1 / d phi
grad = np.diff(shell.th1_infty - th1) / np.diff(shell.th_infty - theta)
# Theoretical estimate:
grad0 = beta*(np.pi / (shell.th1_infty
- np.sin(shell.th1_infty)*np.cos(shell.th1_infty)) - 1)
print('gradient:', grad0, grad[-4:])
b_a = np.tan(th_tail)
x_tail = np.linspace(-xmax, xmax, 3)
y_tail = -b_a*(x_tail - 1.0)
# 30 Aug 2016 - add in the attempted quadratic fit to phi_1 vs phi
ht = conic_parameters.HeadTail(beta)
print('Original tail parameters:')
print('beta = {:.4f}, tau = {:.2f}, J = {:.2f}, K = {:.2f}'.format(beta, ht.tau_t, ht.J, ht.K))
# ht.K *= ht.tau_t**2
if fix_K is not None:
ht.K = fix_K
print('Corrected K = {:.2f}'.format(ht.K))
def fquad(phi, J=ht.J, K=ht.K):
return J*phi + K*phi**2
shell2 = equation6.Shell(beta=beta, xi=1.0, innertype='anisotropic')
theta2 = np.linspace(0.0, nearly*shell2.th_infty, ntheta)
R2, th12 = shell2.radius(theta2, method='brent', full=True)
alph2 = alpha(R2, theta2)
th_tail2 = conic_parameters.theta_tail(beta, xi=1.0)
print('th1_infty_2 =', np.degrees(shell2.th1_infty), np.degrees(th12[-4:]))
print('alpha_infty_2 =', np.degrees(alph2[-4:]))
b_a2 = np.tan(th_tail2)
y_tail2 = -b_a2*(x_tail - 1.0)
figfilename = sys.argv[0].replace('.py', '-{:05d}.pdf').format(int(1e4*beta))
fig, (ax, axx, axxx) = plt.subplots(3, 1)
polar_plot(R, theta, ax)
polar_plot(R_approx[m], theta[m], ax, ls='None', marker='.', alpha=0.2)
polar_plot(R2, theta2, ax, lw=0.6)
ax.plot(x_tail, y_tail, '--')
ax.plot(x_tail, y_tail2, '--')
ax.set_xlim(-xmax, xmax)
ax.set_ylim(-0.2*xmax, 1.2*xmax)
ax.set_aspect('equal', adjustable='box')
phi = shell.th_infty - theta
phi1 = shell.th1_infty - th1
axx.plot(phi, phi1, alpha=0.7, label=r'$\theta_1 - \theta_{1,\infty}$ (CRW)')
axx.plot(phi, phi1, alpha=0.7, label=r'$\alpha - \theta_{1,\infty}$ (CRW)')
axx.plot(shell2.th_infty - theta2, shell2.th1_infty - th12,
alpha=0.7, label=r'$\theta_1 - \theta_{1,\infty}$ ($k = 0$)')
axx.plot(shell2.th_infty - theta2, alph2 - shell2.th1_infty,
alpha=0.7, label=r'$\alpha - \theta_{1,\infty}$ ($k = 0$)')
# 30 Aug 2016: plot the phi_1 = J phi + K phi^2 approximation
axx.plot(phi, fquad(phi), lw=0.5, color='k',
label='$J, K = {:.2f}, {:.2f}$'.format(ht.J, ht.K))
axx.plot(phi, fquad(phi, K=0.0), lw=0.5, ls='--', color='k', label=None)
axx.set_xlim(0.0, 0.8)
m = np.isfinite(phi1) & (phi < 0.8)
ymax = phi1[m].max()
print('ymax =', ymax)
axx.set_ylim(0.0, ymax)
axx.set_xlabel(r'$\theta - \theta_{\infty}$')
axx.legend(loc='upper left', fontsize='small')
axxx.plot(phi, -(phi1 - fquad(phi, K=0.0))/phi**2)
axxx.set_xlim(0.0, 0.8)
axxx.set_ylim(0.0, None)
fig.set_size_inches(4.0, 8.0)
fig.savefig(figfilename)
print(figfilename)
|
import os
import math
train_data = '/home/s1459234/data/conll2017_data/Turkic-DEL/train/cleaned-ug-ud-train.conllu'
out_train_data = '/home/s1459234/data/conll2017_data/Turkic-DEL/cleaned-ug-ud-train.conllu'
out_dev_data = '/home/s1459234/data/conll2017_data/Turkic-DEL/cleaned-ug-ud-dev.conllu'
num_sents = 100
train_split = math.floor(num_sents * 0.8)
dev_split = num_sents - train_split
f_train = open(out_train_data, 'w')
f_dev = open(out_dev_data, 'w')
counter = 0
with open(train_data) as f:
for line in f:
line = line.strip()
if counter <= train_split:
f_train.write(line + '\n')
else:
f_dev.write(line + '\n')
if line == '':
counter += 1
print(counter) |
import os
import urllib.request
from stat import S_ISDIR
from json import loads
import re
import sys
import shutil
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PARTICIIPANTS_DIR = "participants"
participantsFolder = "%s/site/_participants"%(BASE_DIR,)
solutionsFolder = "%s/site/_solutions"%(BASE_DIR,)
BASE_URL = sys.argv[1] if len(sys.argv) > 1 else ''
def htmlPageIn(folder):
files = os.listdir(folder)
for f in files:
if re.compile("[iI][nN][dD][eE][xX]\.[hH][tT][mM][lL]?").match(f):
return f
return None
def generateSolution(team, folder):
print("Getting solutions for...", folder, team)
solutions = os.listdir(folder)
result = []
for solution in solutions:
if S_ISDIR(os.stat("%s/%s"%(folder, solution)).st_mode):
solution_page = open("%s/%s_%s.md"%(solutionsFolder, team, solution), 'w')
solution_page.write("---\n")
solution_page.write("layout: solution\n")
solution_page.write("team: %s\n"%(team,))
solution_page.write("sol: %s\n"%(solution,))
indexPreview = htmlPageIn("%s/%s"%(folder, solution))
if indexPreview:
solution_page.write("demo: %s\n"%(indexPreview))
solution_page.write("---\n")
readmeContent = getReadmeContent("%s/%s"%(folder, solution))
# copy folder
try:
shutil.copytree("%s/%s"%(folder, solution), "%s/%s_%s"%(solutionsFolder, team, solution))
except:
pass
if readmeContent:
solution_page.write(readmeContent)
else:
solution_page.write(solution + "\n")
solution_page.close()
result.append("%s_%s"%(team, solution))
return result
def getReadmeContent(folder):
files = os.listdir(folder)
for f in files:
if re.compile("[rR][eE][aA][dD][mM][eE]\.(md|markdown)").match(f):
return open("%s/%s"%(folder, f), 'r').read()
return None
def setup_site():
print("Processing site collections...")
def clean_folder(name):
print("Recreating", name, "folder")
try:
os.rmdir(name)
except Exception as e:
pass
if not os.path.exists(name):
os.mkdir(name)
clean_folder(participantsFolder)
clean_folder(solutionsFolder)
def process_participants():
i = 0
for folderName in os.listdir("%s/%s"%(BASE_DIR, PARTICIIPANTS_DIR)):
if S_ISDIR(os.stat("%s/%s/%s"%(BASE_DIR, PARTICIIPANTS_DIR, folderName)).st_mode):
names = folderName.split("_")
print("Discovering team...", names, "Gathering metadata from github API...")
meta = {}
for name in names:
url = "https://api.github.com/users/%s"%(name,)
print("Requesting to", url)
try:
with urllib.request.urlopen(url) as data:
meta[name] = loads(data.read())
except:
meta[name] = {}
teamPage = open("%s/%s.md"%(participantsFolder, folderName), 'w')
solutions = generateSolution(folderName, "%s/%s/%s"%(BASE_DIR, PARTICIIPANTS_DIR, folderName))
# print header
teamPage.write("---\n")
teamPage.write("layout: team\n")
teamPage.write("name: %s\n"%(folderName,))
teamPage.write("sort: %s\n"%(i,))
i += 1
teamPage.write("team: \n")
for k, v in meta.items():
teamPage.write(" - id : \"%s\" \n"%(k,))
for meta, value in v.items():
teamPage.write(" %s : \"%s\" \n"%(meta, value))
teamPage.write("solution_count: %s\n"%(len(solutions,)))
teamPage.write("---\n")
# print folder Readme content
readMeFile = getReadmeContent("%s/%s/%s"%(BASE_DIR, PARTICIIPANTS_DIR, folderName))
if readMeFile:
teamPage.write(readMeFile)
else:
teamPage.write("## Solutions\n")
for solution in solutions:
teamPage.write("- [%s](%s/solutions/%s.html)\n"%(solution,BASE_URL,solution))
# print solution list instead
setup_site()
process_participants() |
# Submitter: loganw1(Wang, Logan)
from collections import defaultdict
from goody import type_as_str
import prompt
class Bag:
def __init__(self, *values):
d = defaultdict(int)
if values !=None:
for value in values:
for value1 in value:
d[value1] +=1
self.data_struct = d
def __repr__(self,):
return 'Bag(' +str([a for a,i in self.data_struct.items() for x in range(i)])+ ')'
def __str__(self):
s= 'Bag('
for key,value in self.data_struct.items():
s+= str(key)+'['+str(value)+'],'
if len(self.data_struct) >0:
s = s[:-1]
return s+')'
def __len__(self):
sum=0
for value in self.data_struct.values():
sum+= value
return sum
def unique(self):
sum=0
for key in self.data_struct:
sum+=1
return sum
def __contains__(self, item):
if item in self.data_struct:
return True
else:
return False
def count(self,item):
if item in self.data_struct:
return self.data_struct[item]
else:
return 0
def add(self,item):
self.data_struct[item] +=1
def __add__(self, other):
new_bag = Bag()
if type(other) != Bag:
raise TypeError('object added should be of type Bag(), object added instead type:' + type(other))
for item in self.data_struct.keys():
for num in range(self.data_struct[item]):
new_bag.add(item)
for item in other.data_struct.keys():
for num in range(other.data_struct[item]):
new_bag.add(item)
return new_bag
def remove(self, item):
if item in self.data_struct:
self.data_struct[item] -=1
if self.data_struct[item] <=0:
del(self.data_struct[item])
else:
raise ValueError("item not found in Bag")
def __eq__(self,compareObj:'Bag')->bool:
if type(compareObj) == Bag:
if len(self) == len(compareObj) and self.unique() == compareObj.unique():
for key,val in self.data_struct.items():
if val != compareObj.data_struct[key]:
return False
return True
else:
return False
else:
return False
def __iter__(self):
def gen(l):
for item in l:
yield item
return gen([a for a,i in self.data_struct.items() for x in range(i)])
if __name__ == '__main__':
#Simple tests before running driver
#Put your own test code here to test Bag before doing the bsc tests
#Debugging problems with these tests is simpler
b = Bag(['d','a','d','b','c','b','d'])
print(repr(b))
print(all((repr(b).count('\''+v+'\'')==c for v,c in dict(a=1,b=2,c=1,d=3).items())))
for i in b:
print(i)
b2 = Bag(['a','a','b','x','d'])
print(repr(b2+b2))
print(str(b2+b2))
print([repr(b2+b2).count('\''+v+'\'') for v in 'abdx'])
b = Bag(['a','b','a'])
print(repr(b))
print()
import driver
driver.default_file_name = 'bscp21W21.txt'
# driver.default_show_exception = True
# driver.default_show_exception_message = True
# driver.default_show_traceback = True
driver.driver()
|
#No Context Bot by @robuyasu#3100
from discord.ext.commands import Bot
from discord.ext import commands
from itertools import cycle
from TwitApi import TwitApi
from twitter.error import TwitterError
import discord
import asyncio
import twitter
import sys, traceback
import os
import random
Client = discord.Client()
client = commands.Bot(command_prefix='!')
ContextOn = True
RobId = "154732271742615553"
CurrentVersion = open("./text/version.txt").read()
RobId = "154732271742615553"
def post_status(message,postcmd=False):
if len(message.attachments) >= 1:
attaches = []
for item in message.attachments:
attaches.append(item["url"])
try:
if postcmd == False:
print("Touchdown")
pst = TwitApi.PostUpdate(message.content[:250] or " ",media=attaches)
return pst
else:
print("Touchdown")
Content = message.content.split(" ")
pst = TwitApi.PostUpdate((" ".join(Content[1:]))[:250],media=attaches)
return pst
except TwitterError:
return False
else:
try:
if postcmd == False:
print("Touchdown")
pst = TwitApi.PostUpdate(message.content[:250] or " ")
return pst
else:
print("Touchdown")
Content = message.content.split(" ")
pst = TwitApi.PostUpdate( (" ".join(Content[1:]))[:250] )
return pst
except TwitterError:
return False
async def post_tweets():
await client.wait_until_ready()
await asyncio.sleep(5)
while not client.is_closed:
if ContextOn:
CurrentMessages = client.logs_from(client.get_channel('488054001795989524'),limit=25)
MsgList = []
async for msg in CurrentMessages:
MsgList.append(msg)
ChosenMsg = random.choice(MsgList)
stats = post_status(ChosenMsg)
if stats:
await client.send_message(ChosenMsg.author,"%s, your message has been tweeted to the twitter account! Check it out here: %s"%(ChosenMsg.author.mention,"https://twitter.com/statuses/" + str(stats.id)))
await client.send_message(client.get_channel("488474777766461450"),"https://twitter.com/statuses/" + str(stats.id))
else:
await client.say("An error has occured in post_tweets(), client a nil value.")
for i in range(10): #Waits for 10 minutes
if ContextOn:
TMinus = "Posting in %s minute(s)" % (10-i)
await client.change_presence(game=discord.Game(name=TMinus))
await asyncio.sleep(60)
@client.event
async def on_ready():
print("No Context Bot has been started up. To stop the program, say !bootdown . Version " + CurrentVersion)
@client.event
async def on_message(message):
await client.process_commands(message) #Makes sure to process the command
initial_extensions = [
'cmds.ClientOwnerOnly',
'cmds.ModeratorOnly',
'cmds.UserAccessible'
]
if __name__ == '__main__': #Loads commands/extensions
for extension in initial_extensions:
try:
client.load_extension(extension)
print("Loaded extension")
except Exception as e:
print(f'Failed to load extension {extension}.', file=sys.stderr)
traceback.print_exc()
client.loop.create_task(post_tweets())
client.run(os.environ.get('TOKEN'))
|
from urllib import request,parse
from http import cookiejar
# 创建Filecookiejar的实例
filename = "cookie.txt"
cookie = cookiejar.MozillaCookieJar(filename)
# 生成cookie的管理器
cookie_handler = request.HTTPCookieProcessor(cookie)
# 创建一个http请求guanlq
http_handler = request.HTTPHandler()
# 生成https管理器
https_handler = request.HTTPSHandler()
# 创建请求管理器
opener = request.build_opener(http_handler,https_handler,cookie_handler)
def login():
'''
负责初次登陆
需要输入用户名密码,用来获取登陆cookie凭证
:return:
'''
url = "http://www.renren.com/PLogin.do"
# 此键值需要从登陆form的两个对应unput中提取name属性
data = {
"emali":"17315110762",
"password":"582452951"
}
# 把数据进行编码
data = parse.urlencode(data)
req = request.Request(url,data=data.encode())
# 使用opener发起请求
rsp = opener.open(req)
# 保存cookie文件
# ignore_discard表示即使cookie将要被丢弃也要保存下来
# ignore_expires表示如果该文件中cookie即使已经过期,保存
cookie.save(ignore_discard=True,ignore_expires=True)
if __name__ == '__main__':
login()
|
import pandas as pd
import os
class Person():
name = ''
address = ''
def __init__(self, name, address):
self.name = name
self.address = address |
import os
import pandas as pd
import filter
for fname in os.listdir('prksn_test'):
if fname.endswith('.csv'):
print(fname)
data = filter.filter(f'prksn_test/{fname}')
data.sort_values(by=['time', 'part']).to_csv(f'processed_prksn/{fname}', sep=';', header=False, index=False, float_format='%.3f')
|
__author__ = "Narwhale"
import linecache
#数据处理
fields=('bid','uid','username','v_class','content','img','time','source','rt_num','cm_num','rt_uid'
,'rt_username','rt_v_class','rt_content','rt_img','src_rt_num','src_cm_num','gender','rt_mid'
,'location','rt_mid','mid','lat','lon','lbs_type','lbs_title','poiid','links','hashtags','ats'
,'rt_links','rt_hashtags','rt_ats','v_url','rt_v_url')
keys = {fields[k]:k for k in range(0,len(fields))}
f = linecache.getlines('twitter数据挖掘片段.txt') #返回的是以每行字符串为元素的列表形式,可以通过索引取得数据
# print(f[0])
lines = [x[1:-1].split(',') for x in f]
|
import boto3
import pprint
import os
import pprint
REGION = os.getenv('AWS_REGION', 'us-west-2')
def convert_list_to_dict(obj, key='Key', value='Value'):
return {e[key]: e[value] for e in obj}
def describe_all_instances(client):
instances = []
paginator = client.get_paginator('describe_instances')
response = paginator.paginate()
for page in response:
for reservations in page['Reservations']:
for instance in reservations['Instances']:
instance['Tags'] = convert_list_to_dict(instance['Tags'])
instances.append(instance)
pprint.pprint(instances)
return instances
def main():
client = boto3.client('cloudwatch', REGION)
ec2_client = boto3.client('ec2', REGION)
instances = describe_all_instances(ec2_client)
# print(instances)s
if __name__ == '__main__':
main()
# responses = ec2client.describe_instances(
# Filters=[
# {
# 'Name': 'image-id',
# 'Values': [
# 'ami-f08b0388',
# ]
# },
# ],
# InstanceIds=[
# 'i-0dd58e465f9ef8a1f'
# ]
# )
# for reservations in responses['Reservations']:
# for instances in reservations['Instances']:
# for tags in instances['Tags']:
# if tags['Key'] == 'Name':
# print(tags['Value'])
# print(responses)
# if (instance['Value']):
# ssm.add(instance['Value'])
# ssm.add(instance['Key'])
# print(reservations)
# ec2 = boto3.client('ec2', region_name=region)
# responses = ec2.describe_instances(
# Filters=[
# {
# 'Name': 'platform',
# 'Values': [
# 'windows',
# ]
# },
# ],
# )
# instances = []
# for reservations in responses['Reservations']:
# for instance in reservations['Instances']:
# if (instance['InstanceId']):
# instances.append(instance['InstanceId'])
# # print(instances)
# print(responses) |
import itertools
import numpy as np
list1 = np.arange(1,5,1)
list2 = []
for i in range(1,len(list1)+1):
iter = itertools.combinations(list1,i)
list2.append(list(iter))
print(list2) |
# from markdown2 import markdown as md2html
from markdown import markdown as md2html
from IPython.display import HTML, display
bg_color = 'background-color:#d8e7ff;' #e2edff;'
def show_answer(excercise_tag):
TYPE, s = answers[excercise_tag]
s = s[1:] # Remove newline
if TYPE == "HTML": s = s
elif TYPE == "MD" : s = md2html(s)
elif TYPE == "TXT" : s = '<code style="'+bg_color+'">'+s+'</code>'
s = ''.join([
'<div ',
'style="',bg_color,'padding:0.5em;">',
str(s),
'</div>'])
display(HTML(s))
answers = {}
answers['thesaurus 1'] = ["TXT",r"""
Data Assimilation (DA) Ensemble Stochastic Data
Filtering Sample Random Measurements
Kalman filter (KF) Set of draws Monte-Carlo Observations
State estimation
Data fusion
"""]
answers['thesaurus 2'] = ["TXT",r"""
Statistical inference Ensemble member Quantitative belief Recursive
Inverse problems Sample point Probability Sequential
Inversion Realization Relative frequency Iterative
Estimation Single draw Serial
Approximation Particle
Regression
Fitting
"""]
answers['why Gaussian'] = ['MD',r"""
* Pragmatic: leads to least-squares problems, which lead to linear systems of equations.
This was demonstrated by the simplicity of the parametric Gaussian-Gaussian Bayes' rule.
* The central limit theorem (CLT) and all of its implications.
* The intuitive condition "ML estimator = sample average" implies the sample is drawn from a Gaussian.
* For more, see chapter 7 of: [Probability theory: the logic of science](https://books.google.com/books/about/Probability_Theory.html?id=tTN4HuUNXjgC) (Edwin T. Jaynes), which is an excellent book for understanding probability and statistics.
"""]
answers['pdf_G_1'] = ['MD',r'''
pdf_values = 1/sqrt(2*pi*P)*exp(-0.5*(x-mu)**2/P)
# Version using the scipy (sp) library:
# pdf_values = sp.stats.norm.pdf(x,loc=mu,scale=sqrt(P))
''']
answers['pdf_U_1'] = ['MD',r'''
def pdf_U_1(x,mu,P):
# Univariate (scalar), Uniform pdf
pdf_values = ones((x-mu).shape)
a = mu - sqrt(3*P)
b = mu + sqrt(3*P)
pdf_values[x<a] = 0
pdf_values[x>b] = 0
height = 1/(b-a)
pdf_values *= height
return pdf_values
''']
answers['BR deriv'] = ['MD',r'''
<a href="https://en.wikipedia.org/wiki/Bayes%27_theorem#Derivation" target="_blank">Wikipedia</a>
''']
answers['BR grid normalization'] = ['MD',r'''
Because it can compute $p(y)$ as
the factor needed to normalize to 1,
as required by the definition of pdfs.
That's what the `#normalization` line does.
Here's the proof that the normalization (which makes `pp` sum to 1) is equivalent to dividing by $p(y)$:
$$\texttt{sum(pp)*dx} \approx \int p(x) p(y|x) \, dx = \int p(x,y) \, dx = p(y) \, .$$
''']
answers['Dimensionality a'] = ['MD',r'''
$N^m$
''']
answers['Dimensionality b'] = ['MD',r'''
$15 * 360 * 180 = 972'000 \approx 10^6$
''']
answers['Dimensionality c'] = ['MD',r'''
$10^{10^6}$
''']
answers['BR Gauss'] = ['MD',r'''
We can ignore factors that do not depend on $x$.
\begin{align}
p(x|y)
&= \frac{p(x) \, p(y|x)}{p(y)} \\\
&\propto p(x) \, p(y|x) \\\
&= N(x|b,B) \, N(y|x,R) \\\
&\propto \exp \Big( \frac{-1}{2} \Big( (x-b)^2/B + (x-y)^2/R \Big) \Big) \\\
&\propto \exp \Big( \frac{-1}{2} \Big( (1/B + 1/R)x^2 - 2(b/B + y/R)x \Big) \Big) \\\
&\propto \exp \Big( \frac{-1}{2} \Big( x - \frac{b/B + y/R}{1/B + 1/R} \Big)^2 \cdot (1/B + 1/R) \Big) \, .
\end{align}
The last line can be identified as $N(x|\mu,P)$ as defined above.
''']
answers['KG 2'] = ['MD',r'''
Because it
* drags the estimate from $b$ "towards" $y$.
* is between 0 and 1.
* weights the observation noise level (R) vs. the total noise level (B+R).
* In the multivariate case (and with $H=I$), the same holds for its eigenvectors.
''']
answers['BR Gauss code'] = ['MD',r'''
P = 1/(1/B+1/R)
mu = P*(b/B+y/R)
# Gain version:
# KG = B/(B+R)
# P = (1-KG)*B
# mu = b + KG*(y-b)
''']
answers['LinReg deriv'] = ['MD',r'''
$$ \frac{d J_K}{d\alpha} = 0 = \ldots $$
''']
answers['LinReg F_k'] = ['MD',r'''
$$ F_k = \frac{k+1}{k} $$
''']
answers['LinReg func'] = ['MD',r'''
kk = arange(1,k+1)
alpha = sum(kk*yy[:k]) / sum(kk**2)
''']
answers['KF func'] = ['MD',r'''
# Forecast
muf[k+1] = F(k)*mua[k]
PPf[k+1] = F(k)*PPa[k]*F(k) + Q
# Analysis
PPa[k+1] = 1/(1/PPf[k+1] + H*1/R*H)
mua[k+1] = PPa[k+1] * (muf[k+1]/PPf[k+1] + yy[k]*H/R)
# Analysis -- Kalman gain version:
#KG = PPf[k+1]*H / (H*PPf[k+1]*H + R)
#PPa[k+1] = (1-KG)*PPf[k+1]
#mua[k+1] = muf[k+1]+KG*(yy[k]-muf[k+1])
''']
answers['KF KG fail'] = ['MD',r'''
Because `PPa[0]` is infinite. And while the limit (as `PPf` goes to +infinity) of `KG = PPf*H / (H*PPf*H + R)` is `H (= 1)`, its numerical evaluation fails (as it should). Note that the infinity did not cause any problems numerically for the "weighted average" form.
''']
answers['LinReg plot'] = ['MD',r'''
Let $\alpha_K$ denote the linear regression estimates (of the slope) based on the observations $y_{1:K} = \\{y_1,\ldots,y_K\\}$.
Simiarly, let $\mu_K$ denote the KF estimate of $x_K$ based on $y_{1:K}$.
It can bee seen in the plot that
$
K \alpha_K = \mu_K \, .
$
''']
answers['KF = LinReg a'] = ['MD',r'''
We'll proceed by induction. With $P_0 = \infty$, we get $P_1 = R$, which initializes (4). Now, from (3):
$$
\begin{align}
P_{K+1} &= 1\Big/\big(1/R + \textstyle (\frac{K}{K+1})^2 / P_K\big)
\\\
&= R\Big/\big(1 + \textstyle (\frac{K}{K+1})^2 \frac{\sum_{k=1}^K k^2}{K^2}\big)
\\\
&= R\Big/\big(1 + \textstyle \frac{\sum_{k=1}^K k^2}{(K+1)^2}\big)
\\\
&= R(K+1)^2\Big/\big((K+1)^2 + \sum_{k=1}^K k^2\big)
\\\
&= R(K+1)^2\Big/\sum_{k=1}^{K+1} k^2
\, ,
\end{align}
$$
which concludes the induction.
The proof for (b) is similar.
''']
answers['Asymptotic P'] = ['MD',r'''
The fixed point $P_\infty$ should satisfy
$P_\infty = 1/\big(1/R + 1/[F^2 P_\infty]\big)$.
This yields $P_\infty = R (1-1/F^2)$.
''']
answers["Hint: Lorenz energy"] = ["MD",r'''
Hint: what's its time-derivative?
''']
answers["Lorenz energy"] = ["MD",r'''
\begin{align}
\frac{d}{dt}
\sum_i
x_i^2
&=
2 \sum_i
x_i \dot{x}_i
\end{align}
Next, insert the quadratic terms from the ODE,
$
\dot x_i = (x_{i+1} − x_{i-2}) x_{i-1}
\, .
$
Finally, apply the periodicity of the indices.
''']
answers["error evolution"] = ["MD",r"""
* (a). $\frac{d \varepsilon}{dt} = \frac{d (x-z)}{dt}
= \frac{dx}{dt} - \frac{dz}{dt} = f(x) - f(z) \approx f(x) - [f(x) - \frac{df}{dx}\varepsilon ] = F \varepsilon$
* (b). Differentiate $e^{F t}$.
* (c).
* (1). Dissipates to 0.
* (2). No.
A balance is always reached between
the uncertainty reduction $(1-K)$ and growth $F^2$.
Also recall the asymptotic value of $P_k$ computed from
[the previous tutorial](T3 - Univariate Kalman filtering.ipynb#Exc-3.14-'Asymptotic-P':).
* (d). [link](https://en.wikipedia.org/wiki/Logistic_function#Logistic_differential_equation)
* (e). $\frac{d \varepsilon}{dt} \approx F \varepsilon + (f-g)$
"""]
answers["doubling time"] = ["MD",r"""
xx = output_63[0][:,-1] # Ensemble of particles at the end of integration
v = np.var(xx, axis=0) # Variance (spread^2) of final ensemble
v = mean(v) # homogenize
d = sqrt(v) # std. dev.
eps = [FILL IN SLIDER VALUE] # initial spread
T = [FILL IN SLIDER VALUE] # integration time
rate = log(d/eps)/T # assuming d = eps*exp(rate*T)
print("Doubling time (approx):",log(2)/rate)
"""]
answers['Gaussian sampling a'] = ['MD',r'''
Firstly, a linear (affine) transformation can be decomposed into a sequence of sums. This means that $\mathbf{x}$ will be Gaussian.
It remains only to calculate its moments.
By the [linearity of the expected value](https://en.wikipedia.org/wiki/Expected_value#Linearity),
$$E(\mathbf{x}) = E(\mathbf{L} \mathbf{z} + \mathbf{b}) = \mathbf{L} E(\mathbf{z}) + \mathbf{b} = \mathbf{b} \, .$$
Moreover,
$$\newcommand{\b}{\mathbf{b}} \newcommand{\x}{\mathbf{x}} \newcommand{\z}{\mathbf{z}} \newcommand{\L}{\mathbf{L}}
E((\x - \b)(\x - \b)^T) = E((\L \z)(\L \z)^T) = \L E(\z^{} \z^T) \L^T = \L \mathbf{I}_m \L^T = \L \L^T \, .$$
''']
answers['Gaussian sampling b'] = ['MD',r'''
Type `randn??` in a code cell and execute it.
''']
answers['Gaussian sampling c'] = ['MD',r'''
z = randn((m,1))
x = b + L @ z
''']
answers['Gaussian sampling d'] = ['MD',r'''
b_vertical = 10*ones((m,1))
E = b_vertical + L @ randn((m,N))
#E = np.random.multivariate_normal(b,P,N).T
''']
answers['Average sampling error'] = ['MD',r'''
Procedure:
1. Repeat the experiment many times.
2. Compute the average error ("bias") of $\overline{\mathbf{x}}$. Verify that it converges to 0 as $N$ is increased.
3. Compute the average *squared* error. Verify that it is approximately $\text{diag}(\mathbf{P})/N$.
''']
answers['ensemble moments'] = ['MD',r'''
x_bar = np.sum(E,axis=1)/N
P_bar = zeros((m,m))
for n in range(N):
anomaly = (E[:,n] - x_bar)[:,None]
P_bar += anomaly @ anomaly.T
#P_bar += np.outer(anomaly,anomaly)
P_bar /= (N-1)
''']
answers['Why (N-1)'] = ['MD',r'''
* [Unbiased](https://en.wikipedia.org/wiki/Variance#Sample_variance)
* Suppose we compute the square root of this estimate. Is this an unbiased estimator for the standard deviation?
''']
answers['ensemble moments vectorized'] = ['MD',r'''
* (a). Show that element $(i,j)$ of the matrix product $\mathbf{A}^{} \mathbf{B}^T$
equals element $(i,j)$ of the sum of the outer product of their columns: $\sum_n \mathbf{a}_n \mathbf{b}_n^T$. Put this in the context of $\overline{\mathbf{P}}$.
* (b). Use the following
code:
x_bar = np.sum(E,axis=1,keepdims=True)/N
A = E - x_bar
P_bar = A @ A.T / (N-1)
''']
# Skipped
answers['Why matrix notation'] = ['MD',r'''
- Removes indices
- Highlights the linear nature of many computations.
- Tells us immediately if we're working in state space or ensemble space
(i.e. if we're manipulating individual dimensions, or ensemble members).
- Helps with understanding subspace rank issues
- Highlights how we work with the entire ensemble, and not individual members.
- Suggest a deterministic parameterization of the distributions.
''']
answers['estimate cross'] = ['MD',r'''
def estimate_cross_cov(E1,E2):
N = E1.shape[1]
assert N==E2.shape[1]
A1 = E1 - np.mean(E1,axis=1,keepdims=True)
A2 = E2 - np.mean(E2,axis=1,keepdims=True)
CC = A1 @ A2.T / (N-1)
return CC
''']
answers['errors'] = ['MD',r'''
* (a). Error: discrepancy from estimator to the parameter targeted.
Residual: discrepancy from explained to observed data.
* (b). Bias = *average* (i.e. systematic) error.
* (c). [Wiki](https://en.wikipedia.org/wiki/Mean_squared_error#Proof_of_variance_and_bias_relationship)
''']
# Also comment on CFL condition (when resolution is increased)?
answers['Cov memory'] = ['MD',r'''
* (a). $m$-by-$m$
* (b). Using the [cholesky decomposition](https://en.wikipedia.org/wiki/Cholesky_decomposition#Computation),
at least 2 times $m^3/3$.
* (c). Assume $\mathbf{P}$ stored as float (double). Then it's 8 bytes/element.
And the number of elements in $\mathbf{P}$: $m^2$. So the total memory is $8 m^2$.
* (d). 8 trillion bytes. I.e. 8 million MB.
''']
answers['EnKF v1'] = ['MD',r'''
def my_EnKF(N):
E = mu0[:,None] + P0_chol @ randn((m,N))
for k in range(1,K+1):
# Forecast
t = k*dt
E = f(E,t-dt,dt)
E += Q_chol @ randn((m,N))
if not k%dkObs:
# Analysis
y = yy[k//dkObs-1] # current obs
hE = h(E,t)
PH = estimate_cross_cov(E,hE)
HPH = estimate_mean_and_cov(hE)[1]
Perturb = R_chol @ randn((p,N))
KG = divide_1st_by_2nd(PH, HPH+R)
E += KG @ (y[:,None] - Perturb - hE)
mu[k] = mean(E,axis=1)
''']
answers['rmse'] = ['MD',r'''
rmses = sqrt(np.mean((xx-mu)**2, axis=1))
average = np.mean(rmses)
''']
answers['Repeat experiment a'] = ['MD',r'''
* (a). Set `p=1` above, and execute all cells below again.
''']
answers['Repeat experiment b'] = ['MD',r'''
* (b). Insert `seed(i)` for some number `i` above the call to the EnKF or above the generation of the synthetic truth and obs.
''']
answers['Repeat experiment cd'] = ['MD',r'''
* (c). Void.
* (d). Use: `Perturb = D_infl * R_chol @ randn((p,N))` in the EnKF algorithm.
''']
answers['jagged diagnostics'] = ['MD',r'''
Because they are only defined at analysis times, i.e. every `dkObs` time step.
''']
answers['RMSE hist'] = ['MD',r'''
* The MSE will be (something close to) chi-square.
* That the estimator and truth are independent, Gaussian random variables.
''']
answers['Rank hist'] = ['MD',r'''
* U-shaped: Too confident
* A-shaped: Too uncertain
* Flat: well calibrated
''']
# Pointless...
# Have a look at the phase space trajectory output from `plot_3D_trajectory` above.
# The "butterfly" is contained within a certain box (limits for $x$, $y$ and $z$).
answers['RMSE vs inf error'] = ['MD',r'''
It follows from [the fact that](https://en.wikipedia.org/wiki/Lp_space#Relations_between_p-norms)
$ \newcommand{\x}{\mathbf{x}} \|\x\|_2 \leq m^{1/2} \|\x\|\_\infty \text{and} \|\x\|_1 \leq m^{1/2} \|\x\|_2$
that
$$
\text{RMSE}
= \frac{1}{K}\sum_k \text{RMSE}_k
\leq \| \text{RMSE}\_{0:k} \|\_\infty
$$
and
$$ \text{RMSE}_k = \| \text{Error}_k \|\_2 / \sqrt{m} \leq \| \text{Error}_k \|\_\infty$$
''']
answers['Twin Climatology'] = ['MD',r'''
config = Climatology(**defaults)
avergs = config.assimilate(HMM,xx,yy).average_in_time()
print_averages(config,avergs,[],['rmse_a','rmv_a'])
''']
answers['Twin Var3D'] = ['MD',r'''
config = Var3D(**defaults)
...
''']
answers['forward_euler'] = ['MD', r'''
Missing line:
xyz_step = xyz + dxdt(xyz, h, sigma=SIGMA, beta=BETA, rho=RHO) * h
''']
answers['log_growth'] = ['MD', r'''
Missing lines:
nrm = sqrt( (x_pert_k - x_control_k) @ (x_pert_k - x_control_k).T )
log_growth_rate = (1.0 / T) * log(nrm / eps)
''']
answers['power_method'] = ['MD', r'''
Missing lines:
v = M @ v
v = v / sqrt(v.T @ v)
mu = v.T @ M @ v
''']
answers['power_method_convergence_rate'] = ['HTML', r'''
Suppose we have a random vector <span style="font-size:1.25em">$\mathbf{v}_0$</span>. If <span style="font-size:1.25em">$\mathbf{M}$</span> is diagonalizable, then we can write <span style="font-size:1.25em">$\mathbf{v}_0$</span> in a basis of eigenvectors, i.e.,
<h3>$$v_0 = \sum_{j=1}^n \alpha_j \nu_j,$$ </h3>
where <span style="font-size:1.25em">$\nu_j$</span> is an eigenvector for the eigenvalue <span style="font-size:1.25em">$\mu_j$</span>, and <span style="font-size:1.25em">$\alpha_j$</span> is some coefficient in <span style="font-size:1.25em">$\mathbb{R}$</span>. We consider thus, with probability one, <span style="font-size:1.25em">$\alpha_1 \neq 0$</span>.
In this case, we note that
<h3>
$$\mathbf{M}^k \mathbf{v}_0 = \mu_1^k \left( \alpha_1 \nu_1 + \sum_{j=2}^n \alpha_j \left(\frac{\mu_j}{\mu_1}\right)^k \nu_j\right).
$$</h3>
But
<h3>$$\frac{\rvert \mu_j\rvert}{\rvert\mu_1\rvert} <1$$</h3>
for each <span style="font-size:1.25em">$j>1$</span>, so that the projection of <span style="font-size:1.25em">$\mathbf{M}^k \mathbf{v}_0$</span> into each eigenvector <span style="font-size:1.25em">$\{\nu_j\}_{j=2}^n$</span> goes to zero at a rate of at least
<h3>
$$\mathcal{O} \left(\left[ \frac{\lambda_2}{\lambda_1} \right]^k \right).$$
</h3>
We need only note that <span style="font-size:1.25em">$\mathbf{M}^k \mathbf{v}_0$</span> and <span style="font-size:1.25em">$\mathbf{v}_{k}$</span> share the same span.
''']
answers['lyapunov_exp_power_method'] = ['HTML', r'''
<ol>
<li>Consider, if <span style="font-size:1.25em">$ \widehat{\mu}_k \rightarrow \mu_1$</span> as <span style="font-size:1.25em">$k \rightarrow \infty$</span>, then for all <span style="font-size:1.25em">$\epsilon>0$</span> there exists a <span style="font-size:1.25em">$T_0$</span> such that,<h3>$ \rvert \mu_1 \rvert - \epsilon < \rvert \widehat{\mu}_k\rvert < \rvert \mu_1 \rvert + \epsilon $, </h3>
<br>
for all <span style="font-size:1.25em">$k > T_0$</span>. In particular, we will choose some <span style="font-size:1.25em">$\epsilon$ </span> sufficiently small such that,
<h3>$$\begin{align}
\rvert \mu_1 \rvert - \epsilon > 0.
\end{align}$$</h3>
<br>
This is possible by the assumption <span style="font-size:1.25em">$\rvert \mu_1 \rvert >0$</span>.
We will write,
<h3>$\widehat{\lambda}_T =\frac{1}{T} \sum_{k=1}^{T_0} \log\left(\rvert \widehat{\mu}_1 \rvert\right) + \frac{1}{T} \sum_{k=T_0 +1}^T \log \left(\rvert \widehat{\mu}_1 \rvert\right)$. </h3>
<br>
We note that <span style="font-size:1.25em">$\log$</span> is monotonic, so that for <span style="font-size:1.25em">$T> T_0$</span>,
<h3>$\frac{1}{T} \sum_{k=T_0 +1}^T \log\left(\rvert \mu_1\rvert - \epsilon \right) < \frac{1}{T} \sum_{k=T_0 +1}^T \log\left(\rvert\widehat{\mu}_k \rvert \right) <\frac{1}{T} \sum_{k=T_0 +1}^T \log\left(\rvert \mu_1\rvert + \epsilon \right)$.</h3>
<br>
But that means,
<h3>$\frac{T - T_0}{T} \log\left(\rvert \mu_1\rvert - \epsilon \right) < \frac{1}{T} \sum_{k=T_0 +1}^T \log\left(\rvert \widehat{\mu}_k \rvert \right) <\frac{T - T_0}{T} \log\left(\rvert \mu_1\rvert + \epsilon \right)$.</h3>
<br>
Notice that in limit,
<h3>$\lim_{T\rightarrow \infty}\frac{1}{T}\sum_{k=1}^{T_0} \log\left(\rvert \widehat{\mu}_k\rvert \right) = 0$, </h3>
<br>
and therefore we can show,
<h3>$\log\left(\rvert \mu_1 \rvert - \epsilon \right) < \lim_{T \rightarrow \infty} \widehat{\lambda}_T < \log\left(\rvert \mu_1 \rvert + \epsilon \right),$</h3>
<br>
for all <span style="font-size:1.25em">$\epsilon >0$</span>. This shows that
<h3>$ \lim_{T \rightarrow \infty} \widehat{\lambda}_T = \log\left(\rvert \mu_1\rvert \right). $</h3></li>
<br>
<li>
The Lyapunov exponents for the fixed matrix <span style="font-size:1.25em">$\mathbf{M}$</span> are determined by the log, absolute value of the eigenvalues.
</li>
</ol>
''']
answers['fixed_point'] = ['HTML', r'''
Suppose for all components <span style="font-size:1.25em">$x^j$</span> we choose <span style="font-size:1.25em">$x^j = F$</span>. The time derivative at this point is clearly zero.
''']
answers['probability_one'] = ['HTML', r'''
We relied on the fact that there is probability one that a Gaussian distributed vector has a nonzero projection into the eigenspace for the leading eigenvalue. Consider why this is true.
Let <span style="font-size:1.25em">$\{\mathbf{v}_j \}_{j=1}^n $</span> be any orthonormal basis such that <span style="font-size:1.25em">$\mathbf{v}_1$</span> is an eigenvector for <span style="font-size:1.25em">$\mu_1$</span>. Let
<h3>$$
\chi(\mathbf{x}) : \mathbb{R}^n \rightarrow \{0, 1\}
$$</h3>
<br>
be the indicator function on the span of <span style="font-size:1.25em">$\{\mathbf{v}_j\}_{j=2}^n$</span>, i.e., the hyper-plane orthogonal to <span style="font-size:1.25em">$\mathbf{v}_1$</span>. The probability of choosing a Gaussian distributed random vector that has no component in the span of <span style="font-size:1.25em">$\mathbf{v}_1$</span> is measured by integrating
<h2>$$
\frac{1}{\left(2\pi\right)^n}\int_{\mathbb{R}} \cdots \int_{\mathbb{R}}\chi\left(\sum_{j=1}^n \alpha_j v_j \right)
e^{\frac{-1}{2} \sum_{j=1}^n \alpha_j^2 }
{\rm d}\alpha_1 \cdots {\rm d} \alpha_n.
$$</h2>
<br>
But <span style="font-size:1.25em">$\chi \equiv 0$</span> whenever <span style="font-size:1.25em">$\alpha_1 \neq 0$</span>, and <span style="font-size:1.25em">${\rm d} \alpha_1 \equiv 0$</span> on this set. This means that the probability of selecting a Gaussian distributed vector with <span style="font-size:1.25em">$\alpha_1 =0$</span> is equal to zero.
<br>
In more theoretical terms, this corresponds to the hyper-plane having measure zero with respect to the Lebesgue measure.
''']
answers['gram-schmidt'] = ['HTML', r'''
The vectors, <span style="font-size:1.25em">$\{\mathbf{x}_0^1, \mathbf{x}_0^2 \}$</span> are related to the vectors <span style="font-size:1.25em">$\{\mathbf{x}_1^1, \mathbf{x}_1^2 \}$</span> by propagating forward via the matrix <span style="font-size:1.25em">$\mathbf{M}$</span>, and the Gram-Schmidt step. Thus by writing,
<h3>$$\begin{align}
\widehat{\mathbf{x}}_1^2 &\triangleq \mathbf{y}^2_1 + \langle \mathbf{x}_1^1, \widehat{\mathbf{x}}^2_1\rangle \mathbf{x}_1^1
\end{align}$$</h3>
<br>
it is easy to see
<h3>$$\begin{align}
\mathbf{M} \mathbf{x}_0^1 &= U^{11}_1 \mathbf{x}_1^1 \\
\mathbf{M} \mathbf{x}_0^2 &= U^{22}_1 \mathbf{x}_1^2 + U^{12}_1 \mathbf{x}_1^1.
\end{align}$$</h3>
<br>
This leads naturally to an upper triangular matrix recursion. Define the following matrices, for <span style="font-size:1.25em">$k \in \{1,2, \cdots\}$</span>
<h3>$$\begin{align}
\mathbf{U}_k \triangleq \begin{pmatrix}
U_k^{11} & U_k^{12} \\
0 & U_k^{22}
\end{pmatrix} & & \mathbf{E}_{k-1} \triangleq \begin{pmatrix}
\mathbf{x}_{k-1}^{1} & \mathbf{x}_{k-1}^{2},
\end{pmatrix}
\end{align}$$</h3>
<br>
then in matrix form, we can write the recursion for an arbitrary step $k$ as
<h3>$$\begin{align}
\mathbf{M} \mathbf{E}_k = \mathbf{E}_{k+1} \mathbf{U}_{k+1}
\end{align}$$</h3>
<br>
where the coefficients of <span style="font-size:1.25em">$\mathbf{U}_k$</span> are defined by the Gram-Schmidt step. described above.
''']
answers['schur_decomposition'] = ['HTML', r'''
We can compute the eigenvalues as the roots of the characteristic polynomial. Specifically, the characteristic polynomial is equal to
<h3>$$\begin{align}
\det\left( \mathbf{M} - \lambda \mathbf{I} \right) &= \det\left( \mathbf{Q} \mathbf{U} \mathbf{Q}^{\rm T} - \lambda\mathbf{I} \right) \\
&=\det\left( \mathbf{Q}\left[ \mathbf{U} - \lambda\mathbf{I}\right] \mathbf{Q}^{\rm T} \right) \\
&=\det\left( \mathbf{Q}\right) \det\left( \mathbf{U} - \lambda\mathbf{I}\right) \det\left(\mathbf{Q}^{\rm T} \right) \\
&=\det\left( \mathbf{Q} \mathbf{Q}^{\rm T} \right) \det\left( \mathbf{U} - \lambda\mathbf{I}\right) \\
&=\det\left( \mathbf{U} - \lambda\mathbf{I}\right)
\end{align}$$ </h3>
<br>
By expanding the determinant in co-factors, it is easy to show that the determinant of the right hand side equals
<h3>$$\begin{align}
\prod_{j=1}^n (U^{jj} - \lambda).
\end{align}$$ </h3>
<br>
By orthogonality, it is easy to verify that
<h3>$$\begin{align}
\left(\mathbf{Q}^j\right)^{\rm T} \mathbf{M} \mathbf{Q}^j = U^{jj}.
\end{align}$$</h3>
''']
answers['lyapunov_vs_es'] = ['HTML', r'''
We define the <b><em>i</em>-th Lyapunov exponent</b> as
<h3>$$\begin{align}
\lambda_i & \triangleq \lim_{k\rightarrow \infty} \frac{1}{k}\sum_{j=1}^k \log\left(\left\rvert U_j^{ii}\right\rvert \right)
\end{align}$$</h3>
<br>
and the <b><em>i</em>-th (backward) Lyapunov vector at time <em>k</em></b> to be the $i$-th column of <span style="font-size:1.25em"> $\mathbf{E}_k$ </span>.
''']
answers['naive_QR'] = ['MD', r'''
Example solution:
perts[:, i] = perts[:, i] - sqrt(perts[:, i].T @ perts[:, j]) perts[:, j]
perts[:, i] = perts[:, i] / sqrt(perts[:,i].T @ perts[:, i])
''']
answers['real_schur'] = ['HTML', r'''
Let <span style='font-size:1.25em'>$\mathbf{M}$</span> be any matrix in <span style='font-size:1.25em'>$\mathbb{R}^{n\times n}$</span> with eigenvalues ordered
<h3>
$$
\begin{align}
\rvert \mu_1 \rvert \geq \cdots \geq \rvert \mu_s \rvert.
\end{align}
$$
</h3>
<br>
A real Schur decomposition of <span style='font-size:1.25em'>$\mathbf{M}$</span> is defined via
<h3>
$$
\begin{align}
\mathbf{M} = \mathbf{Q} \mathbf{U} \mathbf{Q}^{\rm T}
\end{align}
$$
</h3>
<br>
where <span style='font-size:1.25em'>$\mathbf{Q}$</span> is an orthogonal matrix and <span style='font-size:1.25em'>$\mathbf{U}$</span> is a block upper triangular matrix, such that
<h3>$$
\begin{align}
\mathbf{U} \triangleq
\begin{pmatrix}
U^{11} & U^{12} & \cdots & U^{1n} \\
0 & U^{22} & \cdots & U^{2n} \\
\vdots & \vdots & \ddots & \vdots \\
0 & 0 & \cdots & U^{nn}
\end{pmatrix}.
\end{align}
$$
</h3>
<br>
Moreover, the eigenvalues of <span style='font-size:1.25em'>$\mathbf{U}$</span> must equal the eigenvalues of <span style='font-size:1.25em'>$\mathbf{M}$</span>, such that:
<ol>
<li> each diagonal block <span style='font-size:1.25em'>$U^{ii}$</span> is either a scalar or a $2\times 2$ matrix with complex conjugate eigenvalues, and </li>
<li> the eigenvalues of the diagonal blocks <span style='font-size:1.25em'>$U^{ii}$</span> are ordered descending in magnitude.
</ol>
''']
|
s = 'azcbobobegghakl'
length = len(s)
count = 0
i = 0
for i in range(length):
if (i + 2) < length:
if s[i] == 'b':
if (s[i]+s[i+1]+s[i+2]) == 'bob':
count += 1
i += 1
print 'Number of times bob occurs is: ' + str(count)
|
#!/usr/bin/env python
# this is modified csdata.py
from __future__ import print_function
import fastjet as fj
import fjcontrib
import fjext
import fjtools
import tqdm
import argparse
import os
import numpy as np
import array
import copy
import random
import uproot
import pandas as pd
import time
from pyjetty.mputils import logbins
from pyjetty.mputils import MPBase
from pyjetty.mputils import BoltzmannEvent
from pyjetty.mputils import CEventSubtractor
from pyjetty.mputils import RTreeWriter
from pyjetty.mputils import DataIO, DataBackgroundIO
from pyjetty.mputils import fill_tree_data, JetAnalysis, JetAnalysisWithRho
from pyjetty.mputils import ColorS, pwarning, perror, pinfo, pdebug
from pyjetty.alice_analysis.process.base import thermal_generator as thg
import ROOT
ROOT.gROOT.SetBatch(True)
npart_cents='''
90pc@6,
80pc@15,
70pc@31,
60pc@56,
50pc@90,
40pc@133,
30pc@186,
20pc@248,
10pc@325,
'''
# centrality bins with h_dndeta_lores_antyr_npart
# 90pc@6,
# 80pc@15,
# 70pc@31,
# 60pc@56,
# 50pc@90,
# 40pc@133,
# 30pc@186,
# 20pc@248,
# 10pc@325,
# centrality bins with h_dndeta_lores_antyr_nch
nch_cents='''
90pc@277,
80pc@555,
70pc@1158,
60pc@2272,
50pc@3945,
40pc@6239,
30pc@9293,
20pc@13245,
10pc@18467,
'''
def main():
parser = argparse.ArgumentParser(description='test groomers', prog=os.path.basename(__file__))
parser.add_argument('-o', '--output-filename', default="centrality_output.root", type=str)
parser.add_argument('datalist', help='run through a file list', default='', type=str)
parser.add_argument('--overwrite', help="overwrite output", default=False, action='store_true')
parser.add_argument('--nev', help='number of events to run', default=0, type=int)
parser.add_argument('--max-eta', help='max eta for particles', default=0.9)
parser.add_argument('--thermal', help='enable thermal generator',action='store_true', default=False)
parser.add_argument('--thermal-default', help='enable thermal generator',action='store_true', default=False)
parser.add_argument('--particles', help='stream particles',action='store_true', default=False)
parser.add_argument('--npart-cut', help='npart cut on centrality low,high hint:' + npart_cents, default='325,450', type=str)
parser.add_argument('--nch-cut', help='nch cut on centrality low,high hint:' + nch_cents, default='18467,50000', type=str)
args = parser.parse_args()
try:
npart_min = int(args.npart_cut.split(',')[0])
npart_max = int(args.npart_cut.split(',')[1])
except:
perror('unable to parse npart centrality selection - two integer numbers with a coma in-between needed - specified:', args.npart_cut)
return 1
try:
nch_min = int(args.nch_cut.split(',')[0])
nch_max = int(args.nch_cut.split(',')[1])
except:
perror('unable to parse nch centrality selection - two integer numbers with a coma in-between needed - specified:', args.nch_cut)
return 1
outf = ROOT.TFile(args.output_filename, 'recreate')
outf.cd()
t = ROOT.TTree('t', 't')
tw = RTreeWriter(tree=t)
hpt_antyr = ROOT.TH1F('hpt_antyr', 'hpt_antyr', 100, 0, 100)
hpt_antyr_c = ROOT.TH1F('hpt_antyr_c', 'hpt_antyr_c', 100, 0, 100)
hpt_therm = ROOT.TH1F('hpt_therm', 'hpt_therm', 100, 0, 100)
hpt_therm_c = ROOT.TH1F('hpt_therm_c', 'hpt_therm_c', 100, 0, 100)
data = DataIO(name='Sim Pythia Detector level', file_list=args.datalist, random_file_order=False, tree_name='tree_Particle_gen')
dndeta_selector = fj.SelectorAbsEtaMax(abs(args.max_eta)) & fj.SelectorPtMin(0.15)
tg_default = None
if args.thermal_default:
tg_default = thg.ThermalGenerator()
print(tg_default)
tg_central = None
if args.thermal:
tg_central = thg.ThermalGenerator(beta=0.5, N_avg=3000, sigma_N=500)
print(tg_central)
delta_t = 0
start_t = time.time()
iev = 1
while data.load_event(offset=0):
iev = iev + 1
if args.nev > 0:
if iev > args.nev:
iev = iev - 1
break
if iev % 1000 == 0:
delta_t = time.time() - start_t
pinfo('processing event', iev, ' - ev/sec =', iev/delta_t, 'elapsed =', delta_t)
# find jets on detector level
if len(data.particles) < 1:
pwarning(iev, 'pp event skipped N parts', len(data.particles))
continue
# print(data.event)
dndeta0_parts = dndeta_selector(data.particles)
dndeta0 = len(dndeta0_parts)/(abs(args.max_eta*2.))
[hpt_antyr.Fill(p.perp()) for p in dndeta0_parts]
if args.particles:
tw.fill_branches(dndeta=dndeta0, p=data.particles)
else:
tw.fill_branches(dndeta=dndeta0)
tw.fill_branches_attribs(data.event, ['sigma', 'npart', 'nch', 'nchfwd', 'nchselect'], prefix='antyr_')
if data.event.npart < npart_min or data.event.npart >= npart_max:
tw.fill_branches(cent10npart=0)
else:
tw.fill_branches(cent10npart=1)
[hpt_antyr_c.Fill(p.perp()) for p in dndeta0_parts]
if data.event.nch < nch_min or data.event.nch >= nch_max:
tw.fill_branches(cent10nch=0)
else:
tw.fill_branches(cent10nch=1)
if tg_default:
thg_particles = tg_default.load_event()
dndetathg_default = dndeta_selector(thg_particles)
if args.particles:
tw.fill_branches(dndeta_thg_0=len(dndetathg_default)/(abs(args.max_eta*2.)), p_thg_0=thg_particles)
else:
tw.fill_branches(dndeta_thg_0=len(dndetathg_default)/(abs(args.max_eta*2.)))
if tg_central:
thg_parts_central = tg_central.load_event()
dndetathg_central = dndeta_selector(thg_parts_central)
[hpt_therm_c.Fill(p.perp()) for p in dndetathg_central]
if args.particles:
tw.fill_branches(dndeta_thg_c=len(dndetathg_central)/(abs(args.max_eta*2.)), p_thg_c=thg_parts_central)
else:
tw.fill_branches(dndeta_thg_c=len(dndetathg_central)/(abs(args.max_eta*2.)))
tw.fill_tree()
delta_t = time.time()-start_t
pinfo('processed events', iev, ' - ev/sec =', iev/delta_t, 'elapsed =', delta_t)
outf.Write()
outf.Close()
if __name__ == '__main__':
main()
|
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import textwrap
from dataclasses import dataclass
from pathlib import Path
from typing import Iterable
from _pytest.fixtures import FixtureRequest
from pants.jvm.resolve.common import ArtifactRequirement, ArtifactRequirements, Coordinate
from pants.jvm.resolve.coursier_fetch import CoursierResolvedLockfile
from pants.jvm.resolve.lockfile_metadata import LockfileContext
from pants.util.docutil import bin_name
@dataclass(frozen=True)
class JVMLockfileFixtureDefinition:
lockfile_rel_path: Path
requirements: tuple[Coordinate, ...]
def __init__(
self, lockfile_rel_path: Path | str, requirements: Iterable[Coordinate | str]
) -> None:
coordinates: list[Coordinate] = []
for requirement in requirements:
if isinstance(requirement, Coordinate):
coordinates.append(requirement)
elif isinstance(requirement, str):
coordinate = Coordinate.from_coord_str(requirement)
coordinates.append(coordinate)
else:
raise TypeError(
f"Unsupported type `{type(requirement)}` for JVM coordinate. Expected `Coordinate` or `str`."
)
object.__setattr__(
self,
"lockfile_rel_path",
lockfile_rel_path if isinstance(lockfile_rel_path, Path) else Path(lockfile_rel_path),
)
object.__setattr__(self, "requirements", tuple(coordinates))
@classmethod
def from_json_dict(cls, kwargs) -> JVMLockfileFixtureDefinition:
lockfile_rel_path = kwargs["lockfile_rel_path"]
if not lockfile_rel_path:
raise ValueError("`path` must be specified as a relative path to a lockfile")
requirements = kwargs["requirements"] or []
return cls(
lockfile_rel_path=Path(lockfile_rel_path),
requirements=requirements,
)
def load(self, request: FixtureRequest) -> JVMLockfileFixture:
lockfile_path = request.node.path.parent / self.lockfile_rel_path
lockfile_contents = lockfile_path.read_bytes()
lockfile = CoursierResolvedLockfile.from_serialized(lockfile_contents)
# Check the lockfile's requirements against the requirements in the lockfile.
# Fail the test if the lockfile needs to be regenerated.
artifact_reqs = ArtifactRequirements(
[ArtifactRequirement(coordinate) for coordinate in self.requirements]
)
if not lockfile.metadata:
raise ValueError(f"Expected JVM lockfile {self.lockfile_rel_path} to have metadata.")
if not lockfile.metadata.is_valid_for(artifact_reqs, LockfileContext.TOOL):
raise ValueError(
f"Lockfile fixture {self.lockfile_rel_path} is not valid. "
"Please re-generate it using: "
f"{bin_name()} internal-generate-test-lockfile-fixtures ::"
)
return JVMLockfileFixture(lockfile, lockfile_contents.decode(), artifact_reqs)
@dataclass(frozen=True)
class JVMLockfileFixture:
lockfile: CoursierResolvedLockfile
serialized_lockfile: str
requirements: ArtifactRequirements
def requirements_as_jvm_artifact_targets(
self, *, version_in_target_name: bool = False, resolve: str | None = None
) -> str:
targets = ""
for requirement in self.requirements:
maybe_version = f"_{requirement.coordinate.version}" if version_in_target_name else ""
maybe_resolve = f'resolve="{resolve}",' if resolve else ""
targets += textwrap.dedent(
f"""\
jvm_artifact(
name="{requirement.coordinate.group}_{requirement.coordinate.artifact}{maybe_version}",
group="{requirement.coordinate.group}",
artifact="{requirement.coordinate.artifact}",
version="{requirement.coordinate.version}",
{maybe_resolve}
)
"""
)
return targets
|
"""Abstraction for dealing with products and the files they contain."""
import os
from xml.dom.minidom import parseString
from .coda_aware import CODA_Aware
class Product(CODA_Aware):
"""A CODA product, composed mainly of NetCDF files."""
def __init__(self, uuid, work_dir=""):
self.files = []
self.work_dir = work_dir
self.uuid = uuid
uuid_query = "Products('{}')".format(self.uuid)
nodes = self.query(uuid_query + "/Nodes")
dom = parseString(nodes.text)
self.root = dom.getElementsByTagName("title").item(
1).firstChild.nodeValue
try:
os.makedirs(os.path.join(work_dir, self.root))
except FileExistsError:
pass
manifest_file = self.get("xfdumanifest.xml")
with open(manifest_file) as manifest:
manifest_dom = parseString(manifest.read())
files = [x.attributes.get("href").nodeValue for x in
manifest_dom.getElementsByTagName("fileLocation")]
self.files = [f if not f.startswith("./") else f[2:] for f in files]
def get(self, filename):
"""If needed, retrieve, and return absolute path to file."""
if self.files and not filename in self.files:
raise FileNotFoundError("The requested file is not part of this "
"product.")
path = os.path.join(self.work_dir, self.root, filename)
if not os.path.exists(path):
# Get the file from the server and put it there
result = self.query("Products('{}')/Nodes('{}')/Nodes('{"
"}')/$value".format(self.uuid, self.root,
filename))
with open(path, "wb") as f:
for chunk in result.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
return os.path.abspath(path)
|
import logging
import os
from .routes import setup_routes
from aiohttp import web
async def init():
app = web.Application()
setup_routes(app)
return app
def serve():
logging.basicConfig(level=logging.DEBUG)
port = int(os.environ.get("PORT", 8080))
app = init()
web.run_app(app, port=port)
|
import math
#Test 5 dimension lists
list1 = [5,1,2,6,2,1]
list2 = [1,3,5,0,3,2]
list3 = [5,1,2,6,2,3]
list4 = [99,99,99,99,99,4]
list5 = [5,-1,-2,-4,2,5]
data = [list1, list2, list3, list4, list5]
def similarity(a,b):
distance = 0
zipped = zip(a,b)
for x, y in zipped:
if x < 0 or y < 0:
continue
distance += (x - y) ** 2
return 1 / math.sqrt((distance+1))
def check(input): #input is a list of length 'x'
i = 0
j = 0
ans = 0
for person in data:
j += 1
if similarity(person, input) > ans:
ans = similarity(person, input)
i = j
print "the best list is list" + str(i) + " with similarity " + str(ans)
return {
1:list1,
2:list2,
3:list3,
4:list4,
5:list5
}[i]
def query(input,query):
i = check(input) # i is the list with the best similarity
return i[query] # returns rating of the query from the one with closest similarity.
# check([5,1,2,6,2])
# check([90,90,90,90,90])
#print similarity(list1,list2)
#print '\n'
#print similarity(list1,list3)
#print '\n'
#print similarity(list1,list4)
#print '\n'
#print similarity(list1,list5)
|
import asyncio
from random import Random
from math import ceil
from shared.utils import get_time
from shared.utils import get_rnd
from shared.utils import get_rnd_seed
from shared.LogParser import LogParser
from shared.LockManager import LockManager
from shared.RedisManager import RedisManager
# ------------------------------------------------------------------
class WebsocketBase():
"""common dictionaries for all instances of the
class (keeping track of all sessions etc.)
"""
serv_id = None
n_serv_sess = 0
managers = dict()
widget_inits = dict()
# ------------------------------------------------------------------
def __init__(self, ws_send, *args, **kwargs):
self.ws_send = ws_send
self.sess_id = None
self.user_id = None
self.sess_name = None
self.user_group = None
self.user_group_id = None
self.has_init_sess = False
self.is_sess_offline = True
self.is_sess_open = False
self.log_send_packet = False
self.sess_ping_time = None
# is it allowed to restore sessions as part of development
# or do we always reload web pages on server reloads
self.can_restore_existing_sess = True
# self.can_restore_existing_sess = False
# debug the setup / restoration of sync groups
self.debug_sync_group = False
# self.debug_sync_group = True
self.debug_sync_group = (
self.debug_sync_group and self.base_config.debug_opts['dev']
)
# validate all session widgets on every few seconds
self.validate_widget_time_sec = 0
self.min_validate_widget_time_sec = 10
self.valid_loop_sleep_sec = 0.01
self.basic_widget_sleep_sec = 1
self.sess_expire_sec = 15
self.serv_expire_sec = 30
self.user_expire_sec = 43200
self.widget_expire_sec = self.user_expire_sec
self.cleanup_sleep_sec = 60
self.n_id_digits = 4
self.n_serv_msg = 0
# session ping/pong heartbeat
self.sess_ping = {
# interval for sending ping/pong events
'send_interval_msec': 2500,
# how much delay is considered ok for a slow session
'max_interval_good_msec': 500,
# how much delay is considered ok for a disconnected session
'max_interval_slow_msec': 1000,
# how much delay before the client socket is forcibly closed
# and set in a reconnection attempt loop
'max_interval_bad_msec': 5000,
}
# self.sess_ping = {
# # interval for sending ping/pong events
# 'send_interval_msec': 2000,
# # how much delay is considered ok for a slow session
# 'max_interval_good_msec': 2000,
# # how much delay is considered ok for a disconnected session
# 'max_interval_slow_msec': 6000,
# }
self.widget_module_dir = 'frontend_manager.py.widgets'
self.util_module_dir = 'frontend_manager.py.utils'
self.loop_prefix = 'ws;loop;'
self.heartbeat_prefix = 'ws;heartbeat;'
self.sync_group_id_prefix = 'grp_'
self.sync_group_title_prefix = 'Group '
self.icon_prefix = 'icn_'
self.asyncio_queue = asyncio.Queue()
self.log = LogParser(base_config=self.base_config, title=__name__)
self.allowed_widget_types = self.base_config.allowed_widget_types
self.all_widget_types = [
a for a in (
self.base_config.allowed_widget_types['synced']
+ self.base_config.allowed_widget_types['not_synced']
)
]
self.redis_port = self.base_config.redis_port
self.site_type = self.base_config.site_type
self.allow_panel_sync = self.base_config.allow_panel_sync
self.is_simulation = self.base_config.is_simulation
self.redis = RedisManager(
name=self.__class__.__name__, base_config=self.base_config, log=self.log
)
rnd_seed = get_rnd_seed()
self.rnd_gen = Random(rnd_seed)
self.inst_data = self.base_config.inst_data
if WebsocketBase.serv_id is None:
self.set_server_id()
# setup the locker for this server
self.locker = self.setup_locker()
# update the lock_namespace (after the session id has been set, maybe
# later other session parameters would be needed?)
self.update_lock_namespace()
return
# ------------------------------------------------------------------
def setup_locker(self):
# prefix for all lock names in redis
lock_prefix = 'ws;lock;'
# dynamic lock names, based on the current properties
lock_namespace = {
'loop_state': lambda: 'loop_state;serv' + str(self.serv_id),
'serv': lambda: 'serv;' + str(self.serv_id),
'user': lambda: 'serv;' + str(self.serv_id) + ';user;' + str(self.user_id),
'sess': lambda: 'serv;' + str(self.serv_id) + ';sess;' + str(self.sess_id),
}
self.get_widget_lock_name = (
lambda name: 'widget;' + str(name) + ';serv;' + self.serv_id
)
# after setting up redis, initialise the lock manager
locker = LockManager(
log=self.log,
redis=self.redis,
base_config=self.base_config,
lock_namespace=lock_namespace,
lock_prefix=lock_prefix,
is_passive=True,
)
# name of lock for sess configuration
self.sess_config_lock = 'sess_config_lock'
# name of lock for cleanup loop
self.cleanup_loop_lock = 'cleanup_loop_lock'
# maximal time to keep the lock for a session to configure
# (init or cleanup), in case nominal cleanup fails
self.expires_sec = {
'sess_config_expire': 25,
# same for the cleanup loop
'cleanup_loop_expire': 30,
# same for widget initialisations
'widget_init_expire': 25,
}
return locker
# ------------------------------------------------------------------
def get_expite_sec(self, name, is_lock_check=False):
expire_sec = self.expires_sec[name]
if is_lock_check:
expire_sec = max(1, ceil(expire_sec * 0.9)),
return expire_sec
# ------------------------------------------------------------------
def update_lock_namespace(self):
"""after the session id has been set, update the widget locks
"""
lock_namespace = {}
# add all registered widget types as possible locks
widget_types = []
for values in self.allowed_widget_types.values():
widget_types += values
# the lambda must be executed for the correct value to be taken
def build_lambda(name):
return (lambda: name)
for widget_type in widget_types:
lock_name = self.get_widget_lock_name(widget_type)
lock_namespace[lock_name] = build_lambda(lock_name)
self.locker.locks.update_lock_namespace(lock_namespace)
return
# ------------------------------------------------------------------
async def add_server_attr(self, name, key, value):
self.locker.locks.validate('serv')
attr = getattr(WebsocketBase, name)
attr[key] = value
return
# ------------------------------------------------------------------
async def remove_server_attr(self, name, key):
self.locker.locks.validate('serv')
attr = getattr(WebsocketBase, name)
attr.pop(key, None)
return
# ------------------------------------------------------------------
async def get_server_attr(self, name):
self.locker.locks.validate('serv')
attr = getattr(WebsocketBase, name)
return attr
# ------------------------------------------------------------------
def set_server_id(self):
"""derive a server id
it is mandatory to have unique ids across servers, and so
a randome number generator is used. for deployment,
a larger a date/time msec prefix can be used
"""
WebsocketBase.serv_id = (
'serv_' + str(self.base_config.app_port) + '_'
+ get_rnd(n_digits=self.n_id_digits, out_type=str, is_unique_seed=True)
)
return
# ------------------------------------------------------------------
async def set_sess_id(self):
"""derive a session id
in order to make sure we have unique ids across servers, the
unique server name is included, supplemented by an incremental
counter for sessions for this server
"""
id_str = '{:0' + str(self.n_id_digits) + 'd}'
with self.locker.locks.acquire('serv'):
self.sess_id = (
self.serv_id + '_sess_' + id_str.format(WebsocketBase.n_serv_sess)
)
WebsocketBase.n_serv_sess += 1
return
|
""" Script to read the energy of the prompt signals of preselected events of atmospheric NC neutrino background and
calculate the spectrum of atmospheric NC neutrino background as function of the visible energy.
1. Read txt files where the energy of the prompt signals of the preselected events are saved.
Read also txt files where the energy of the delayed signals of preselected events are saved.
2. check, if delayed energy passes delayed energy cut as a cross-check of the preselection (cuts used in script
prompt_signal_preselected_evts.py is 2785.98 nPE < E_delayed < 3708.98 nPE)
3. Convert the energy of the prompt signal from number of pe to visible energy in the detector in MeV
This conversion is based on the analysis in script check_conversion_npe_mev.py, where protons and neutrons with
different kinetic energy are simulated and the visible energy as function of number of PE was plotted. With a
linear fit, you get the conversion function E_vis(nPE) = 0.0007475*nPE in MeV.
This also define the energy window of the prompt signal:
nPE(10 MeV) = 13377.9 nPE
nPE(100 MeV) = 133779.3 nPE
4. Put all these calculated visible energies into histogram to get the spectrum of atmospheric NC neutrino
background as function of the visible energy
5. Consider the event rate of NC interactions on C12 inside the detector (calculated with cross-sections and
neutrino fluxes) and calculate the 'real' spectrum of atmospheric NC background, JUNO will measure after 10
years of data taking
6. Consider the PSD efficiency calculated with pulse_shape_analysis.py and calculate the spectrum of atmospheric
NC background, JUNO will measure after 10 years of data taking, after Pulse Shape Analysis
"""
import datetime
import sys
import NC_background_functions
import numpy as np
from matplotlib import pyplot as plt
# get the date and time, when the script was run:
date = datetime.datetime.now()
now = date.strftime("%Y-%m-%d %H:%M")
# path, where the txt files with the number of pe of prompt signal are saved:
input_path = "/home/astro/blum/juno/atmoNC/data_NC/output_detsim/old_analysis_r16m/"
# path, where corresponding evtID is saved:
input_path_evtID = "/home/astro/blum/juno/atmoNC/data_NC/output_preselection/preselection_detsim/"
# path, where the output files are saved:
output_path = "/home/astro/blum/juno/atmoNC/data_NC/output_detsim/results_atmoNC/"
# set the file number of the first file to be analyzed:
first_file = 0
# set the file number of the last file to be analyzed:
last_file = 999
# number of simulated NC events per user_atmoNC_{}.root file:
number_events_in_rootfile = 100
""" prompt signal energy window: """
# minimal visible energy of prompt signal in MeV:
min_energy = 10.0
# maximal visible energy of prompt signal in MeV:
max_energy = 100.0
# bin-width of visible energy for histogram in MeV (must be the same like for DM signal, Reactor, CCatmo and DSNB;
# 100 keV = 0.1 MeV):
bin_width_energy = 0.4
# preallocate number of events that are rejected by prompt energy cut:
number_rejected_prompt_cut = 0
# preallocate number of events where nPE of prompt signal is below min_energy:
number_rejected_prompt_cut_min = 0
# preallocate number of events where nPE of prompt signal is above max_energy:
number_rejected_prompt_cut_max = 0
""" delayed energy cut parameters: """
# minimum energy of delayed signal in nPE:
# TODO-me: check the values for delayed energy cut!!!!!!!!
min_nPE_delayed = 2785.98
# maximum energy of delayed signal in nPE:
max_nPE_delayed = 3708.98
# preallocate number of events that are rejected by delayed energy cut:
number_rejected_del_cut = 0
# preallocate number of events with nPE=0 for delayed signal:
number_nPE0_delayed = 0
# preallocate number of events where nPE of delayed signal is below min_PE_delayed:
number_rejected_del_cut_min = 0
# preallocate number of events where nPE of delayed signal is above max_PE_delayed:
number_rejected_del_cut_max = 0
""" Pulse Shape discrimination: """
# efficiency in percent, of how many NC events are cut away by PSD:
efficiency_PSD = 90.0
""" cut efficiencies: """
# TODO-me: include the cut efficiencies!!!!!!!
# leak efficiency of the volume cut in percent:
efficiency_volume_cut_leak = 100.054
# radius cut in m:
r_cut = 16.0
# time exposure in years:
time_in_years = 10
# time exposure in seconds:
time_seconds = 10 * 3.156 * 10 ** 7
# set booleans, that define, which plots are shown or saved (boolean):
PLOT_FLUX = False
SHOW_FLUXPLOT = True
SAVE_FLUXPLOT = False
PLOT_EVT_RATE = False
SHOW_EVT_RATE = True
SAVE_EVT_RATE = False
# number of preselected events:
number_preselected = 0
# total number of simulated NC events:
number_NC_events_simu = (last_file + 1 - first_file) * number_events_in_rootfile
# preallocate histogram, where visible energy is saved:
# set bin-edges of e_vis histogram in MeV:
bins_evis = np.arange(min_energy, max_energy + 2*bin_width_energy, bin_width_energy)
# preallocate empty array to build default e_vis-histogram:
e_vis_empty = np.array([])
# build default e_vis histogram:
e_vis_array, bin_edges_evis = np.histogram(e_vis_empty, bins_evis)
# loop over the files with the number of pe:
for index in range(first_file, last_file+1, 1):
# print("read file {0:d}.....".format(index))
# file name:
input_file_delayed = input_path + "number_pe_delayed_file{0:d}.txt".format(index)
# read this file:
number_pe_delayed = np.loadtxt(input_file_delayed)
# file name:
input_file = input_path + "number_pe_file{0:d}.txt".format(index)
# read this file:
number_pe_file = np.loadtxt(input_file)
# file name, where evtID of preselected events are saved:
input_file_evtID = input_path_evtID + "evtID_preselected_{0:d}.txt".format(index)
# read this file:
evtID_preselected, x_reco, y_reco, z_reco = np.loadtxt(input_file_evtID, unpack=True)
# number of preselected events:
number_preselected = number_preselected + len(number_pe_file)
# loop over all entries in number_pe_file:
for index1 in range(len(number_pe_file)):
# convert number_pe to E_vis:
e_vis = NC_background_functions.conversion_npe_to_evis(number_pe_file[index1])
# check, if energy is in the correct time window:
if min_energy <= e_vis <= max_energy:
# add e_vis to default evis histogram:
e_vis_array += np.histogram(e_vis, bins_evis)[0]
else:
# event is rejected by prompt energy cut:
number_rejected_prompt_cut += 1
if e_vis < min_energy:
# nPE below min_energy:
number_rejected_prompt_cut_min += 1
elif e_vis > max_energy:
# nPE above max_energy:
number_rejected_prompt_cut_max += 1
# check delayed energy cut:
# TODO-me: Apply also delayed energy cut -> Until now, all preselected events pass this cut!
if number_pe_delayed[index1] < min_nPE_delayed or number_pe_delayed[index1] > max_nPE_delayed:
# event is rejected by delayed energy cut:
number_rejected_del_cut += 1
if number_pe_delayed[index1] == 0:
# delayed signal lies in prompt signal and is not calculated correctly:
number_nPE0_delayed += 1
elif 0 < number_pe_delayed[index1] < min_nPE_delayed:
# nPE below min_nPE_delayed:
number_rejected_del_cut_min += 1
# print("------- nPE = {0:.0f} in file {1:d}".format(number_pe_delayed[index1], index))
elif number_pe_delayed[index1] > max_nPE_delayed:
# nPE above max_nPE_delayed:
number_rejected_del_cut_max += 1
print("++++ nPE = {0:.0f} in file {1:d}".format(number_pe_delayed[index1], index))
# number of NC events in e_vis_array (from simulation):
number_IBDlike_events_simu = np.sum(e_vis_array)
print("number of NC events from simulation = {0:d}".format(number_NC_events_simu))
print("number of preselected events from simulation = {0:d}".format(number_preselected))
print("number of IBD-like from simulation = {0:d}\n".format(number_IBDlike_events_simu))
print("number of events (of preselect. evts), that would be rejected by delayed energy cut = {0:d}"
.format(number_rejected_del_cut))
print("number of events (of preselect. evts) with nPE=0 of delayed signal = {0:d}"
.format(number_nPE0_delayed))
print("number of events (of preselect. evts) with nPE < min_PE_delayed = {0:d}"
.format(number_rejected_del_cut_min))
print("number of events (of preselect. evts) with nPE > max_PE_delayed = {0:d}"
.format(number_rejected_del_cut_max))
print("number of events (of preselect. evts) rejected by prompt energy cut = {0:d}".format(number_rejected_prompt_cut))
print("number of events (of preselect. evts) with nPE of prompt signal < min_energy = {0:d}"
.format(number_rejected_prompt_cut_min))
print("number of events (of preselect. evts) with nPE of prompt signal > max_energy = {0:d}"
.format(number_rejected_prompt_cut_max))
""" Event rate calculation: """
# calculate the theoretical event rate in NC events/sec in JUNO for neutrino energies from 0 MeV to 10 GeV (float)
# (event_rate = A * (flux_nue*xsec_nue + flux_nuebar*xsec_nuebar + flux_numu*xsec_numu + flux_numubar*xsec_numubar)):
event_rate = NC_background_functions.event_rate(bin_width_energy, r_cut, output_path, PLOT_FLUX, SHOW_FLUXPLOT,
SAVE_FLUXPLOT, PLOT_EVT_RATE, SHOW_EVT_RATE, SAVE_EVT_RATE)
# number of NC events in JUNO after 10 years:
number_NC_events_JUNO = event_rate * time_seconds
# number of IBD-like events in JUNO after 10 years:
number_IBDlike_events_JUNO = int(number_NC_events_JUNO * number_IBDlike_events_simu / number_NC_events_simu)
# normalize the spectrum of simulated events to the spectrum, JUNO will measure after 10 years:
e_vis_array_JUNO = float(number_IBDlike_events_JUNO) / float(number_IBDlike_events_simu) * e_vis_array
""" display simulated spectrum: """
h1 = plt.figure(1, figsize=(15, 8))
plt.plot(bins_evis[:-1], e_vis_array, drawstyle="steps", linestyle="-", color="orange",
label="atmospheric NC background\n(number of events = {0:d})".format(number_IBDlike_events_simu))
plt.xlim(xmin=min_energy, xmax=max_energy)
plt.ylim(ymin=0.0)
plt.xlabel("visible energy of prompt signal in MeV")
plt.ylabel("number of IBD-like events per bin (bin-width = {0:.2f} MeV)".format(bin_width_energy))
plt.title("Simulated spectrum of atmospheric NC neutrino events with IBD-like signature")
plt.legend()
plt.grid()
plt.savefig(output_path + "atmoNC_spectrum_simulated_bins{0:.0f}keV.png".format(bin_width_energy*1000))
# plt.show()
plt.close()
""" display visible spectrum in JUNO after 10 years: """
h2 = plt.figure(2, figsize=(15, 8))
plt.plot(bins_evis[:-1], e_vis_array_JUNO, drawstyle="steps", linestyle="-", color="orange",
label="atmospheric NC background\n(number of events = {0:d})".format(number_IBDlike_events_JUNO))
plt.xlim(xmin=min_energy, xmax=max_energy)
plt.ylim(ymin=0.0)
plt.xlabel("visible energy of prompt signal in MeV")
plt.ylabel("number of IBD-like events per bin (bin-width = {0:.2f} MeV)".format(bin_width_energy))
plt.title("Expected spectrum of atmospheric NC neutrino events with IBD-like signature in JUNO after {0:.0f} years"
.format(time_in_years))
plt.legend()
plt.grid()
plt.savefig(output_path + "atmoNC_spectrum_JUNO_bins{0:.0f}keV.png".format(bin_width_energy*1000))
# plt.show()
plt.close()
""" display visible spectrum in JUNO after 10 years after Pulse Shape Discrimination: """
# calculate spectrum after PSD:
e_vis_array_JUNO_PSD = e_vis_array_JUNO * (100.0-efficiency_PSD)/100.0
# number of events after PSD:
number_IBDlike_events_JUNO_PSD = number_IBDlike_events_JUNO * (100.0-efficiency_PSD)/100.0
h3 = plt.figure(3, figsize=(15, 8))
plt.plot(bins_evis[:-1], e_vis_array_JUNO_PSD, drawstyle="steps", linestyle="-", color="orange",
label="atmospheric NC background after PSD\n(number of events = {0:0.1f})"
.format(number_IBDlike_events_JUNO_PSD))
plt.xlim(xmin=min_energy, xmax=max_energy)
plt.ylim(ymin=0.0)
plt.xlabel("visible energy of prompt signal in MeV")
plt.ylabel("number of IBD-like events per bin (bin-width = {0:.2f} MeV)".format(bin_width_energy))
plt.title("Expected spectrum of atmospheric NC neutrino events with IBD-like signature in JUNO after {0:.0f} years\n"
"after Pulse Shape Discrimination".format(time_in_years) + " (cut efficiency $\\epsilon_{NC}$ = " +
"{0:0.1f} %)"
.format(efficiency_PSD))
plt.legend()
plt.grid()
plt.savefig(output_path + "atmoNC_spectrum_JUNO_afterPSD_bins{0:.0f}keV.png".format(bin_width_energy*1000))
# plt.show()
plt.close()
""" save e_vis_array_JUNO to txt file (txt file must have same shape like files in folder
/home/astro/blum/PhD/work/MeVDM_JUNO/gen_spectrum_v2/).
Save the array before Pulse Shape Discrimination. PSD efficiencies for real IBD signals and NC events is then
applied afterwards before calculating the Limits: """
# save e_vis_array_JUNO to txt-spectrum-file and information about simulation in txt-info-file:
print("... save data of spectrum to file...")
np.savetxt(output_path + 'NCatmo_onlyC12_bin{0:.0f}keV.txt'
.format(bin_width_energy * 1000), e_vis_array_JUNO, fmt='%1.5e',
header='Spectrum in IBD-like events/bin of atmospheric NC background events that mimic IBD signals '
'(calculated with atmoNC_spectrum.py, {0}):'
'\n{3:d} NC events are simulated with JUNO detector software (tut_detsim.py).'
'\nNumber of IBD-like NC events from spectrum = {1:.5f}, binning of E_visible = {2:.3f} MeV,'
'\nNC interactions of nu_e, nu_e_bar, nu_mu and nu_mu_bar with C12 of liquid scintillator are '
'simulated with GENIE.'
'\nDeexcitation of residual isotopes are simulated with modified DSNB-NC.exe generator.'
'\nThen the final products are simulated with JUNO detector simulation and cuts are applied to get'
'\nthe number of NC events that mimic an IBD signal:'
.format(now, number_IBDlike_events_JUNO, bin_width_energy, number_NC_events_simu))
np.savetxt(output_path + 'NCatmo_info_onlyC12_bin{0:.0f}keV.txt'
.format(bin_width_energy * 1000),
np.array([min_energy, max_energy, bin_width_energy, time_in_years, r_cut, number_NC_events_simu,
number_IBDlike_events_JUNO, event_rate]),
fmt='%1.9e',
header='Information to simulation NCatmo_onlyC12_bin{0:.0f}keV.txt (analyzed files: user_atmoNC_{1:d}.root '
'to user_atmoNC_{2:d}.root):\n'
'values below: E_visible[0] in MeV, E_visible[-1] in MeV, interval_E_visible in MeV,'
'\nexposure time t_years in years, applied volume cut for radius in meter,'
'\nnumber of simulated NC events, number of IBD-like NC events in spectrum, '
'\ntheoretical NC event rate in JUNO detector in NC events/sec,'
'\nslope A of the linear conversion function E_vis[MeV] = A * nPE:'
.format(bin_width_energy * 1000, first_file, last_file))
|
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 5 15:41:26 2017
@author: sglusnev
"""
from flask import Flask, Response, jsonify
from flask_restplus import Api, Resource, fields, reqparse
from flask_cors import CORS, cross_origin
import os
# the app
app = Flask(__name__)
CORS(app)
api = Api(app, version='1.0', title='APIs for Python Functions', validate=False)
# we can create namespace to organize the api and docs
ns = api.namespace('primality', 'Returns a list of all primes below a given upper bound')
# load the algo
from prime_sieve import Eratosthenes as algo
'''
We import our function `Erasosthenes` from the file prime_sieve.py.
You create all the classes and functions that you want in that file,
and import them into the app.
'''
# model the input data
model_input = api.model('Enter the upper bound:', {
"UPPER_BOUND": fields.Integer(maximum=10e16)})
# the input data type here is Integer. You can change this to whatever works for your app.
# On Bluemix, get the port number from the environment variable PORT
# When running this app on the local machine, default the port to 8080
port = int(os.getenv('PORT', 8080))
# The ENDPOINT
@ns.route('/sieve') # the endpoint
class SIEVE(Resource):
@api.response(200, "Success", model_input) # return a formatted response
@api.expect(model_input) # expcect the required the input data
def post(self): # prefer POST
parser = reqparse.RequestParser() # parse the args
parser.add_argument('UPPER_BOUND', type=int) # get the data
args = parser.parse_args()
inp = int(args["UPPER_BOUND"]) # our input data
result = algo(inp) # apply algo
return jsonify({"primes": result})
# run
if __name__ == '__main__':
app.run(host='0.0.0.0', port=port, debug=False) # deploy with debug=False |
import cv2
import numpy as np
import math
import time
from typing import NamedTuple
hsv_lower = np.array([20, 100, 95])
hsv_upper = np.array([90, 255, 255])
#hsv_lower = np.array([20, 50, 180])
#hsv_upper = np.array([90, 170, 255])
def analyze_video(video_path):
cap = cv2.VideoCapture(video_path)
prev_frame_time = 0
new_frame_time = 0
while True:
# time.sleep(.5)
ret, frame = cap.read()
detect = find_circles(frame)
new_frame_time = time.time()
fps = 1/(new_frame_time-prev_frame_time)
prev_frame_time = new_frame_time
fps = str(int(fps))
cv2.putText(detect, fps, (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 1, (252, 186, 3), 2)
cv2.imshow('tennis', detect)
if chr(cv2.waitKey(1) & 255) == 'q':
break
def analyze_camera():
"""
imgname = 'r7.jpeg'
img = cv2.imread('input\\' + imgname)
img = find_circles(img)
cv2.imshow('tennis', img)
cv2.imwrite('output\\' + imgname, img)
cv2.waitKey(0)
"""
cap = cv2.VideoCapture(0)
prev_frame_time = 0
new_frame_time = 0
while True:
# time.sleep(.5)
ret, frame = cap.read()
detect = find_circles(frame)
new_frame_time = time.time()
fps = 1/(new_frame_time-prev_frame_time)
prev_frame_time = new_frame_time
fps = str(int(fps))
cv2.putText(detect, fps, (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 1, (252, 186, 3), 2)
cv2.imshow('tennis', detect)
if chr(cv2.waitKey(1) & 255) == 'q':
break
def draw_circles(img, circles):
cimg = img.copy()
if type(circles) == type(None):
return cimg
circles = np.round(circles[0, :]).astype("int")
#print(circles)
for (x, y, r) in circles:
cv2.circle(cimg, (x, y), r, (0, 255, 0), 2)
return cimg
def find_circles(image):
ratio = image.shape[1] / image.shape[0]
h, w = 700, int(700 * ratio)
image = cv2.resize(image, (w, h), interpolation=cv2.INTER_AREA)
original = image.copy()
blur = cv2.GaussianBlur(image, (11, 11), 0)
hsv = cv2.cvtColor(blur, cv2.COLOR_BGR2HSV)
mask = cv2.inRange(hsv, hsv_lower, hsv_upper)
mask = cv2.erode(mask, None, iterations=2)
mask = cv2.dilate(mask, None, iterations=2)
#return mask
cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
cnts = cnts[0] if len(cnts) == 2 else cnts[1]
if len(cnts) > 0:
# find the largest contour in the mask, then use
# it to compute the minimum enclosing circle and
# centroid
c = max(cnts, key=cv2.contourArea)
((x, y), radius) = cv2.minEnclosingCircle(c)
M = cv2.moments(c)
# only proceed if the radius meets a minimum size
if radius > 0:
# draw the circle and centroid on the frame,
# then update the list of tracked points
cv2.circle(original, (int(x), int(y)), int(radius), (0, 0, 255), 2)
return original
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
gray_blur = cv2.GaussianBlur(gray, (15, 15), 0)
gray_lap = cv2.Laplacian(gray_blur, cv2.CV_8UC1, ksize=5)
dilate_lap = cv2.dilate(gray_lap, (3, 3))
lap_blur = cv2.bilateralFilter(dilate_lap, 5, 9, 9)
#circles = cv2.HoughCircles(lap_blur, cv2.HOUGH_GRADIENT, 1, 100, param1 = 50, param2 = 30, minRadius = 60, maxRadius = 0)
circles = cv2.HoughCircles(lap_blur, cv2.HOUGH_GRADIENT, 16, 200, param2=450, minRadius=60, maxRadius=0)
cimg = draw_circles(original, circles)
return cimg
image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
hsv = cv2.inRange(image, hsv_lower, hsv_upper)
mask = cv2.GaussianBlur(hsv, (15, 15), 0)
return mask
gray = cv2.cvtColor(mask, cv2.COLOR_BGR2GRAY)
return gray
# print(mask.shape)
# img = mask.clone()
# h = img.rows
# w = img.cols
"""
# Find contours
cnts = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# Extract contours depending on OpenCV version
cnts = cnts[0] if len(cnts) == 2 else cnts[1]
# Iterate through contours and filter by the number of vertices
for c in cnts:
perimeter = cv2.arcLength(c, True)
approx = cv2.approxPolyDP(c, 0.04 * perimeter, True)
if len(approx) > 5:
cv2.drawContours(original, [c], -1, (255, 80, 80), -1)
print('ball found')
else:
print('ball not found')
"""
if __name__ == '__main__':
#analyze_video('input\\tennis.mp4')
analyze_camera() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.