blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3c63fe02ede7587da3872d2f6648382246af942d
|
09fafd03fc39cb890b57f143285925a48d114318
|
/tool_angle/DynamixelSDK/python/tests/protocol2_0/read_write.py
|
345cc047828cda6bc773f29f448adc7eb29f2018
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
SamKaiYang/robot_control_hiwin_ros
|
83914e1af44da69631b079ad5eeff4bd49e6abc9
|
50457391013b4cad90b932ffc5afa078f00da7bb
|
refs/heads/master
| 2023-08-17T03:21:57.466251
| 2021-09-18T06:32:30
| 2021-09-18T06:32:30
| 292,339,605
| 1
| 2
|
BSD-3-Clause
| 2020-09-22T17:04:20
| 2020-09-02T16:45:12
|
Python
|
UTF-8
|
Python
| false
| false
| 5,851
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
# Copyright 2017 ROBOTIS CO., LTD.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
# Author: Ryu Woon Jung (Leon)
#
# ********* Read and Write Example *********
#
#
# Available Dynamixel model on this example : All models using Protocol 2.0
# This example is designed for using a Dynamixel PRO 54-200, and an USB2DYNAMIXEL.
# To use another Dynamixel model, such as X series, see their details in E-Manual(emanual.robotis.com) and edit below variables yourself.
# Be sure that Dynamixel PRO properties are already set as %% ID : 1 / Baudnum : 1 (Baudrate : 57600)
#
import os
if os.name == 'nt':
import msvcrt
def getch():
return msvcrt.getch().decode()
else:
import sys, tty, termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
def getch():
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
from dynamixel_sdk import * # Uses Dynamixel SDK library
# Control table address
ADDR_PRO_TORQUE_ENABLE = 64 # Control table address is different in Dynamixel model
ADDR_PRO_GOAL_POSITION = 116
ADDR_PRO_PRESENT_POSITION = 132
# Protocol version
PROTOCOL_VERSION = 2.0 # See which protocol version is used in the Dynamixel
# Default setting
DXL_ID = 1 # Dynamixel ID : 1
BAUDRATE = 57600 # Dynamixel default baudrate : 57600
DEVICENAME = '/dev/ttyUSB0' # Check which port is being used on your controller
# ex) Windows: "COM1" Linux: "/dev/ttyUSB0" Mac: "/dev/tty.usbserial-*"
TORQUE_ENABLE = 1 # Value for enabling the torque
TORQUE_DISABLE = 0 # Value for disabling the torque
DXL_MINIMUM_POSITION_VALUE = 10 # Dynamixel will rotate between this value
DXL_MAXIMUM_POSITION_VALUE = 4000 # and this value (note that the Dynamixel would not move when the position value is out of movable range. Check e-manual about the range of the Dynamixel you use.)
DXL_MOVING_STATUS_THRESHOLD = 20 # Dynamixel moving status threshold
index = 0
dxl_goal_position = [DXL_MINIMUM_POSITION_VALUE, DXL_MAXIMUM_POSITION_VALUE] # Goal position
# Initialize PortHandler instance
# Set the port path
# Get methods and members of PortHandlerLinux or PortHandlerWindows
portHandler = PortHandler(DEVICENAME)
# Initialize PacketHandler instance
# Set the protocol version
# Get methods and members of Protocol1PacketHandler or Protocol2PacketHandler
packetHandler = PacketHandler(PROTOCOL_VERSION)
# Open port
if portHandler.openPort():
print("Succeeded to open the port")
else:
print("Failed to open the port")
print("Press any key to terminate...")
getch()
quit()
# Set port baudrate
if portHandler.setBaudRate(BAUDRATE):
print("Succeeded to change the baudrate")
else:
print("Failed to change the baudrate")
print("Press any key to terminate...")
getch()
quit()
# Enable Dynamixel Torque
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, DXL_ID, ADDR_PRO_TORQUE_ENABLE, TORQUE_ENABLE)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
else:
print("Dynamixel has been successfully connected")
while 1:
print("Press any key to continue! (or press ESC to quit!)")
if getch() == chr(0x1b):
break
# Write goal position
dxl_comm_result, dxl_error = packetHandler.write4ByteTxRx(portHandler, DXL_ID, ADDR_PRO_GOAL_POSITION, dxl_goal_position[index])
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
while 1:
# Read present position
dxl_present_position, dxl_comm_result, dxl_error = packetHandler.read4ByteTxRx(portHandler, DXL_ID, ADDR_PRO_PRESENT_POSITION)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
print("[ID:%03d] GoalPos:%03d PresPos:%03d" % (DXL_ID, dxl_goal_position[index], dxl_present_position))
if not abs(dxl_goal_position[index] - dxl_present_position) > DXL_MOVING_STATUS_THRESHOLD:
break
# Change goal position
if index == 0:
index = 1
else:
index = 0
# Disable Dynamixel Torque
dxl_comm_result, dxl_error = packetHandler.write1ByteTxRx(portHandler, DXL_ID, ADDR_PRO_TORQUE_ENABLE, TORQUE_DISABLE)
if dxl_comm_result != COMM_SUCCESS:
print("%s" % packetHandler.getTxRxResult(dxl_comm_result))
elif dxl_error != 0:
print("%s" % packetHandler.getRxPacketError(dxl_error))
# Close port
portHandler.closePort()
|
[
"tt00621212@gmail.com"
] |
tt00621212@gmail.com
|
f720e543aa89c239ab37938bc6cca36d4f8e35d2
|
7787db9eaf80ac4a366648902ee945112bca127a
|
/Leetcode300/60. Permutation Sequence.py
|
3fe3df33732b91259adff0fb0e8803262ded93f4
|
[] |
no_license
|
LYXalex/Leetcode-PythonSolution
|
0de7af69373171affe15f2074bacc74955d09a2c
|
2ae3529366227efb5f2ad81a8b039ad71e8d1ed5
|
refs/heads/main
| 2023-06-22T18:49:32.492547
| 2021-07-14T02:12:05
| 2021-07-14T02:12:05
| 325,213,787
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 771
|
py
|
class Solution:
def getPermutation(self, n: int, k: int) -> str:
ans, nums = "", [str(i + 1) for i in range(n)]
factorial = [0] * (n + 1)
factorial[0] = 1
for i in range(1, n):
factorial[i] = factorial[i - 1] * i
k -= 1
for i in range(1, n + 1):
index = k // factorial[n - i]
ans += nums[index]
nums.pop(index)
k -= index * (factorial[n - i])
return ans
def getPermutation1(self, n: int, k: int) -> str:
ans = ''
nums = list(map(str, range(1, n + 1)))
fact = math.factorial(len(nums) - 1)
k -= 1
while k:
i, k = divmod(k, fact)
ans += nums.pop(i)
fact //= len(nums)
ans += ''.join(nums)
return ans
|
[
"yul801@ucsd.edu"
] |
yul801@ucsd.edu
|
9b5cc1f531bb419eff65159e5e7de7bc03c0bcf9
|
32819d5a91c8ffc6f9594cbeb3eb66a19de6c89e
|
/tracklets/python/bag_to_kitti.py
|
3fa47e3cd9a0c75f0483fb0791a22eb793b71d7d
|
[] |
no_license
|
td2014/didi-competition
|
89b4dfa33c3252c214b56d7199a0b4d49e8c0945
|
a92ca1cb36907bcf5db6c8e454063e45451f5842
|
refs/heads/master
| 2021-01-19T11:54:28.623417
| 2017-04-19T17:24:31
| 2017-04-19T17:24:31
| 88,005,585
| 0
| 0
| null | 2017-04-12T03:25:23
| 2017-04-12T03:25:22
| null |
UTF-8
|
Python
| false
| false
| 19,193
|
py
|
#! /usr/bin/python
""" Udacity Self-Driving Car Challenge Bag Processing
"""
from __future__ import print_function
from cv_bridge import CvBridge, CvBridgeError
from collections import defaultdict
import os
import sys
import cv2
import math
import imghdr
import argparse
import functools
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import PyKDL as kd
from bag_topic_def import *
from bag_utils import *
from generate_tracklet import *
def get_outdir(base_dir, name=''):
outdir = os.path.join(base_dir, name)
if not os.path.exists(outdir):
os.makedirs(outdir)
return outdir
def obs_prefix_from_topic(topic):
words = topic.split('/')
start, end = (1, 4) if topic.startswith(OBJECTS_TOPIC_ROOT) else (1, 3)
prefix = '_'.join(words[start:end])
name = words[2] if topic.startswith(OBJECTS_TOPIC_ROOT) else words[1]
return prefix, name
def check_format(data):
img_fmt = imghdr.what(None, h=data)
return 'jpg' if img_fmt == 'jpeg' else img_fmt
def write_image(bridge, outdir, msg, fmt='png'):
results = {}
image_filename = os.path.join(outdir, str(msg.header.stamp.to_nsec()) + '.' + fmt)
try:
if hasattr(msg, 'format') and 'compressed' in msg.format:
buf = np.ndarray(shape=(1, len(msg.data)), dtype=np.uint8, buffer=msg.data)
cv_image = cv2.imdecode(buf, cv2.IMREAD_ANYCOLOR)
if cv_image.shape[2] != 3:
print("Invalid image %s" % image_filename)
return results
results['height'] = cv_image.shape[0]
results['width'] = cv_image.shape[1]
# Avoid re-encoding if we don't have to
if check_format(msg.data) == fmt:
buf.tofile(image_filename)
else:
cv2.imwrite(image_filename, cv_image)
else:
cv_image = bridge.imgmsg_to_cv2(msg, "bgr8")
cv2.imwrite(image_filename, cv_image)
except CvBridgeError as e:
print(e)
results['filename'] = image_filename
return results
def camera2dict(msg, write_results, camera_dict):
camera_dict["timestamp"].append(msg.header.stamp.to_nsec())
if write_results:
camera_dict["width"].append(write_results['width'] if 'width' in write_results else msg.width)
camera_dict['height'].append(write_results['height'] if 'height' in write_results else msg.height)
camera_dict["frame_id"].append(msg.header.frame_id)
camera_dict["filename"].append(write_results['filename'])
def gps2dict(msg, gps_dict):
gps_dict["timestamp"].append(msg.header.stamp.to_nsec())
gps_dict["lat"].append(msg.latitude)
gps_dict["long"].append(msg.longitude)
gps_dict["alt"].append(msg.altitude)
def rtk2dict(msg, rtk_dict):
rtk_dict["timestamp"].append(msg.header.stamp.to_nsec())
rtk_dict["tx"].append(msg.pose.pose.position.x)
rtk_dict["ty"].append(msg.pose.pose.position.y)
rtk_dict["tz"].append(msg.pose.pose.position.z)
rotq = kd.Rotation.Quaternion(
msg.pose.pose.orientation.x,
msg.pose.pose.orientation.y,
msg.pose.pose.orientation.z,
msg.pose.pose.orientation.w)
rot_xyz = rotq.GetRPY()
rtk_dict["rx"].append(0.0) #rot_xyz[0]
rtk_dict["ry"].append(0.0) #rot_xyz[1]
rtk_dict["rz"].append(rot_xyz[2])
def imu2dict(msg, imu_dict):
imu_dict["timestamp"].append(msg.header.stamp.to_nsec())
imu_dict["ax"].append(msg.linear_acceleration.x)
imu_dict["ay"].append(msg.linear_acceleration.y)
imu_dict["az"].append(msg.linear_acceleration.z)
def get_yaw(p1, p2):
if abs(p1[0] - p2[0]) < 1e-2:
return 0.
return math.atan2(p1[1] - p2[1], p1[0] - p2[0])
def dict_to_vect(di):
return kd.Vector(di['tx'], di['ty'], di['tz'])
def list_to_vect(li):
return kd.Vector(li[0], li[1], li[2])
def frame_to_dict(frame):
r, p, y = frame.M.GetRPY()
return dict(tx=frame.p[0], ty=frame.p[1], tz=frame.p[2], rx=r, ry=p, rz=y)
def get_obstacle_pos(
front,
rear,
obstacle,
velodyne_to_front,
gps_to_centroid):
front_v = dict_to_vect(front)
rear_v = dict_to_vect(rear)
obs_v = dict_to_vect(obstacle)
yaw = get_yaw(front_v, rear_v)
rot_z = kd.Rotation.RotZ(-yaw)
diff = obs_v - front_v
res = rot_z * diff
res += list_to_vect(velodyne_to_front)
# FIXME the gps_to_centroid offset of the obstacle should be rotated by
# the obstacle's yaw. Unfortunately the obstacle's pose is unknown at this
# point so we will assume obstacle is axis aligned with capture vehicle
# for now.
res += list_to_vect(gps_to_centroid)
return frame_to_dict(kd.Frame(kd.Rotation(), res))
def interpolate_to_camera(camera_df, other_dfs, filter_cols=[]):
if not isinstance(other_dfs, list):
other_dfs = [other_dfs]
if not isinstance(camera_df.index, pd.DatetimeIndex):
print('Error: Camera dataframe needs to be indexed by timestamp for interpolation')
return pd.DataFrame()
for o in other_dfs:
o['timestamp'] = pd.to_datetime(o['timestamp'])
o.set_index(['timestamp'], inplace=True)
o.index.rename('index', inplace=True)
merged = functools.reduce(lambda left, right: pd.merge(
left, right, how='outer', left_index=True, right_index=True), [camera_df] + other_dfs)
merged.interpolate(method='time', inplace=True, limit=100, limit_direction='both')
filtered = merged.loc[camera_df.index] # back to only camera rows
filtered.fillna(0.0, inplace=True)
filtered['timestamp'] = filtered.index.astype('int') # add back original timestamp integer col
if filter_cols:
if not 'timestamp' in filter_cols:
filter_cols += ['timestamp']
filtered = filtered[filter_cols]
return filtered
def estimate_obstacle_poses(
cap_front_rtk,
#cap_front_gps_offset,
cap_rear_rtk,
#cap_rear_gps_offset,
obs_rear_rtk,
obs_rear_gps_offset, # offset along [l, w, h] dim of car, in obstacle relative coords
):
# offsets are all [l, w, h] lists (or tuples)
assert(len(obs_rear_gps_offset) == 3)
# all coordinate records should be interpolated to same sample base at this point
assert len(cap_front_rtk) == len(cap_rear_rtk) == len(obs_rear_rtk)
velo_to_front = [-1.0922, 0, -0.0508]
rtk_coords = zip(cap_front_rtk, cap_rear_rtk, obs_rear_rtk)
output_poses = [
get_obstacle_pos(c[0], c[1], c[2], velo_to_front, obs_rear_gps_offset) for c in rtk_coords]
return output_poses
def check_oneof_topics_present(topic_map, name, topics):
if not isinstance(topics, list):
topics = [topics]
if not any(t in topic_map for t in topics):
print('Error: One of %s must exist in bag, skipping bag %s.' % (topics, name))
return False
return True
def main():
parser = argparse.ArgumentParser(description='Convert rosbag to images and csv.')
parser.add_argument('-o', '--outdir', type=str, nargs='?', default='/output',
help='Output folder')
parser.add_argument('-i', '--indir', type=str, nargs='?', default='/data',
help='Input folder where bagfiles are located')
parser.add_argument('-f', '--img_format', type=str, nargs='?', default='jpg',
help='Image encode format, png or jpg')
parser.add_argument('-m', dest='msg_only', action='store_true', help='Messages only, no images')
parser.add_argument('-d', dest='debug', action='store_true', help='Debug print enable')
parser.set_defaults(msg_only=False)
parser.set_defaults(debug=False)
args = parser.parse_args()
img_format = args.img_format
base_outdir = args.outdir
indir = args.indir
msg_only = args.msg_only
debug_print = args.debug
bridge = CvBridge()
include_images = False if msg_only else True
filter_topics = CAMERA_TOPICS + CAP_FRONT_RTK_TOPICS + CAP_REAR_RTK_TOPICS \
+ CAP_FRONT_GPS_TOPICS + CAP_REAR_GPS_TOPICS
# For bag sets that may have missing metadata.csv file
default_metadata = [{
'obstacle_name': 'obs1',
'object_type': 'Car',
'gps_l': 2.032,
'gps_w': 1.4478,
'gps_h': 1.6256,
'l': 4.2418,
'w': 1.4478,
'h': 1.5748,
}]
#FIXME scan from bag info in /obstacles/ topic path
OBSTACLES = ['obs1']
OBSTACLE_RTK_TOPICS = [OBJECTS_TOPIC_ROOT + '/' + x + '/rear/gps/rtkfix' for x in OBSTACLES]
filter_topics += OBSTACLE_RTK_TOPICS
bagsets = find_bagsets(indir, filter_topics=filter_topics, set_per_file=True, metadata_filename='metadata.csv')
if not bagsets:
print("No bags found in %s" % indir)
exit(-1)
for bs in bagsets:
print("Processing set %s" % bs.name)
sys.stdout.flush()
if not check_oneof_topics_present(bs.topic_map, bs.name, CAP_FRONT_RTK_TOPICS):
continue
if not check_oneof_topics_present(bs.topic_map, bs.name, CAP_REAR_RTK_TOPICS):
continue
camera_cols = ["timestamp", "width", "height", "frame_id", "filename"]
camera_dict = defaultdict(list)
gps_cols = ["timestamp", "lat", "long", "alt"]
cap_rear_gps_dict = defaultdict(list)
cap_front_gps_dict = defaultdict(list)
rtk_cols = ["timestamp", "tx", "ty", "tz", "rx", "ry", "rz"]
cap_rear_rtk_dict = defaultdict(list)
cap_front_rtk_dict = defaultdict(list)
# For the obstacles, keep track of rtk values for each one in a dictionary (key == topic)
obstacle_rtk_dicts = {k: defaultdict(list) for k in OBSTACLE_RTK_TOPICS}
dataset_outdir = os.path.join(base_outdir, "%s" % bs.name)
get_outdir(dataset_outdir)
if include_images:
camera_outdir = get_outdir(dataset_outdir, "camera")
bs.write_infos(dataset_outdir)
readers = bs.get_readers()
stats_acc = defaultdict(int)
def _process_msg(topic, msg, stats):
timestamp = msg.header.stamp.to_nsec()
if topic in CAMERA_TOPICS:
if debug_print:
print("%s_camera %d" % (topic[1], timestamp))
write_results = {}
if include_images:
write_results = write_image(bridge, camera_outdir, msg, fmt=img_format)
write_results['filename'] = os.path.relpath(write_results['filename'], dataset_outdir)
camera2dict(msg, write_results, camera_dict)
stats['img_count'] += 1
stats['msg_count'] += 1
elif topic in CAP_REAR_RTK_TOPICS:
rtk2dict(msg, cap_rear_rtk_dict)
stats['msg_count'] += 1
elif topic in CAP_FRONT_RTK_TOPICS:
rtk2dict(msg, cap_front_rtk_dict)
stats['msg_count'] += 1
elif topic in CAP_REAR_GPS_TOPICS:
gps2dict(msg, cap_rear_gps_dict)
stats['msg_count'] += 1
elif topic in CAP_FRONT_GPS_TOPICS:
gps2dict(msg, cap_front_gps_dict)
stats['msg_count'] += 1
elif topic in OBSTACLE_RTK_TOPICS:
rtk2dict(msg, obstacle_rtk_dicts[topic])
stats['msg_count'] += 1
else:
pass
for reader in readers:
last_img_log = 0
last_msg_log = 0
for result in reader.read_messages():
_process_msg(*result, stats=stats_acc)
if last_img_log != stats_acc['img_count'] and stats_acc['img_count'] % 1000 == 0:
print("%d images, processed..." % stats_acc['img_count'])
last_img_log = stats_acc['img_count']
sys.stdout.flush()
if last_msg_log != stats_acc['msg_count'] and stats_acc['msg_count'] % 10000 == 0:
print("%d messages processed..." % stats_acc['msg_count'])
last_msg_log = stats_acc['msg_count']
sys.stdout.flush()
print("Writing done. %d images, %d messages processed." %
(stats_acc['img_count'], stats_acc['msg_count']))
sys.stdout.flush()
camera_df = pd.DataFrame(data=camera_dict, columns=camera_cols)
cap_rear_gps_df = pd.DataFrame(data=cap_rear_gps_dict, columns=gps_cols)
cap_front_gps_df = pd.DataFrame(data=cap_front_gps_dict, columns=gps_cols)
cap_rear_rtk_df = pd.DataFrame(data=cap_rear_rtk_dict, columns=rtk_cols)
if not len(cap_rear_rtk_df.index):
print('Error: No capture vehicle rear RTK entries exist.'
'Skipping bag %s.' % bag.name)
continue
cap_front_rtk_df = pd.DataFrame(data=cap_front_rtk_dict, columns=rtk_cols)
if not len(cap_rear_rtk_df.index):
print('Error: No capture vehicle front RTK entries exist.'
'Skipping bag %s.' % bag.name)
continue
if include_images:
camera_df.to_csv(os.path.join(dataset_outdir, 'capture_vehicle_camera.csv'), index=False)
cap_rear_gps_df.to_csv(os.path.join(dataset_outdir, 'capture_vehicle_rear_gps.csv'), index=False)
cap_front_gps_df.to_csv(os.path.join(dataset_outdir, 'capture_vehicle_front_gps.csv'), index=False)
cap_rear_rtk_df.to_csv(os.path.join(dataset_outdir, 'capture_vehicle_rear_rtk.csv'), index=False)
cap_front_rtk_df.to_csv(os.path.join(dataset_outdir, 'capture_vehicle_front_rtk.csv'), index=False)
obs_rtk_df_dict = {}
for obs_topic, obs_rtk_dict in obstacle_rtk_dicts.items():
obs_prefix, obs_name = obs_prefix_from_topic(obs_topic)
obs_rtk_df = pd.DataFrame(data=obs_rtk_dict, columns=rtk_cols)
if not len(obs_rtk_df.index):
print('Warning: No entries for obstacle %s in %s. Skipping.' % (obs_name, bs.name))
continue
obs_rtk_df.to_csv(os.path.join(dataset_outdir, '%s_rtk.csv' % obs_prefix), index=False)
obs_rtk_df_dict[obs_topic] = obs_rtk_df
if len(camera_dict['timestamp']):
# Interpolate samples from all used sensors to camera frame timestamps
camera_df['timestamp'] = pd.to_datetime(camera_df['timestamp'])
camera_df.set_index(['timestamp'], inplace=True)
camera_df.index.rename('index', inplace=True)
camera_index_df = pd.DataFrame(index=camera_df.index)
cap_rear_gps_interp = interpolate_to_camera(camera_index_df, cap_rear_gps_df, filter_cols=gps_cols)
cap_rear_gps_interp.to_csv(
os.path.join(dataset_outdir, 'capture_vehicle_rear_gps_interp.csv'), header=True)
cap_front_gps_interp = interpolate_to_camera(camera_index_df, cap_front_gps_df, filter_cols=gps_cols)
cap_front_gps_interp.to_csv(
os.path.join(dataset_outdir, 'capture_vehicle_front_gps_interp.csv'), header=True)
cap_rear_rtk_interp = interpolate_to_camera(camera_index_df, cap_rear_rtk_df, filter_cols=rtk_cols)
cap_rear_rtk_interp.to_csv(
os.path.join(dataset_outdir, 'capture_vehicle_rear_rtk_interp.csv'), header=True)
cap_rear_rtk_interp_rec = cap_rear_rtk_interp.to_dict(orient='records')
cap_front_rtk_interp = interpolate_to_camera(camera_index_df, cap_front_rtk_df, filter_cols=rtk_cols)
cap_front_rtk_interp.to_csv(
os.path.join(dataset_outdir, 'capture_vehicle_front_rtk_interp.csv'), header=True)
cap_front_rtk_interp_rec = cap_front_rtk_interp.to_dict(orient='records')
if not obs_rtk_df_dict:
print('Warning: No obstacles or obstacle RTK data present. '
'Skipping Tracklet generation for %s.' % bs.name)
continue
collection = TrackletCollection()
for obs_topic in obstacle_rtk_dicts.keys():
obs_rtk_df = obs_rtk_df_dict[obs_topic]
obs_interp = interpolate_to_camera(camera_index_df, obs_rtk_df, filter_cols=rtk_cols)
obs_prefix, obs_name = obs_prefix_from_topic(obs_topic)
obs_interp.to_csv(
os.path.join(dataset_outdir, '%s_rtk_interpolated.csv' % obs_prefix), header=True)
# Plot obstacle and front/rear rtk paths in absolute RTK ENU coords
fig = plt.figure()
plt.plot(
obs_interp['tx'].tolist(),
obs_interp['ty'].tolist(),
cap_front_rtk_interp['tx'].tolist(),
cap_front_rtk_interp['ty'].tolist(),
cap_rear_rtk_interp['tx'].tolist(),
cap_rear_rtk_interp['ty'].tolist())
fig.savefig(os.path.join(dataset_outdir, '%s-%s-plot.png' % (bs.name, obs_name)))
plt.close(fig)
# Extract lwh and object type from CSV metadata mapping file
md = bs.metadata if bs.metadata else default_metadata
if not bs.metadata:
print('Warning: Default metadata used, metadata.csv file should be with .bag files.')
for x in md:
if x['obstacle_name'] == obs_name:
mdr = x
obs_tracklet = Tracklet(
object_type=mdr['object_type'], l=mdr['l'], w=mdr['w'], h=mdr['h'], first_frame=0)
# NOTE these calculations are done in obstacle oriented coordinates. The LWH offsets from
# metadata specify offsets from lower left, rear, ground corner of the vehicle. Where +ve is
# along the respective length, width, height axis away from that point. They are converted to
# velodyne/ROS compatible X,Y,Z where X +ve is forward, Y +ve is left, and Z +ve is up.
lrg_to_gps = [mdr['gps_l'], -mdr['gps_w'], mdr['gps_h']]
lrg_to_centroid = [mdr['l'] / 2., -mdr['w'] / 2., mdr['h'] / 2.]
gps_to_centroid = np.subtract(lrg_to_centroid, lrg_to_gps)
# Convert NED RTK coords of obstacle to capture vehicle body frame relative coordinates
obs_tracklet.poses = estimate_obstacle_poses(
cap_front_rtk=cap_front_rtk_interp_rec,
#cap_front_gps_offset=[0.0, 0.0, 0.0],
cap_rear_rtk=cap_rear_rtk_interp_rec,
#cap_rear_gps_offset=[0.0, 0.0, 0.0],
obs_rear_rtk=obs_interp.to_dict(orient='records'),
obs_rear_gps_offset=gps_to_centroid,
)
collection.tracklets.append(obs_tracklet)
# end for obs_topic loop
tracklet_path = os.path.join(dataset_outdir, 'tracklet_labels.xml')
collection.write_xml(tracklet_path)
else:
print('Warning: No camera image times were found. '
'Skipping sensor interpolation and Tracklet generation.')
if __name__ == '__main__':
main()
|
[
"rwightman@gmail.com"
] |
rwightman@gmail.com
|
90956d31df1505110d10941aa299f2e1806f1211
|
5e4d6f451ac63dd6a6b22077649bd90ea279557e
|
/kagglechallenge/draw_bb_mask_result.py
|
381f9ecec27f82df9f9c5406243b8c7bb57ba656
|
[] |
no_license
|
Cpires97/Ship-seg
|
ad76937462f0ecf76c3bc1982391c831fc5fd364
|
09f39b6c1196d072d76275aa60c8522e0efe21ff
|
refs/heads/main
| 2023-02-11T09:33:17.534022
| 2021-01-20T23:48:12
| 2021-01-20T23:48:12
| 324,422,266
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 749
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 20 20:54:44 2020
@author: Carlos Pires
"""
import json
import cv2
import os
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
results=os.listdir('./results_train_seg4_focal_weighteddl__batch8_1_densenet121_50epochs/')
filename='00a3ab3cc.jpg'
matches = [x for x in results if x.split('_')[0] == filename.split('.')[0]]
for obj in matches:
with open('./results_train_seg4_focal_weighteddl__batch8_1_densenet121_50epochs/'+obj) as json_file:
bb_file = json.load(json_file)
bb_pr_mask=np.asarray(bb_file['pr_mask'],dtype=np.uint8)
im = Image.fromarray(bb_pr_mask.squeeze())
plt.figure()
plt.imshow(im)
plt.show()
|
[
"c.david.pires@tecnico.ulisboa.pt"
] |
c.david.pires@tecnico.ulisboa.pt
|
cd0e62cb3546e2f5fdeec9751751721c87a062d3
|
483fdaa8184d0a90cd0ab2d57dff6102fc2af37e
|
/django_auto_periodic/django_auto_periodic/settings.py
|
29335994cc98cf233b209539339b921abd6b4aaa
|
[] |
no_license
|
a8568730/Hello-Celery
|
89f68028fca73ce5287689ccd18dddd565d60430
|
4a9e98bbdeed17de5e56bc2aad2c077869c24912
|
refs/heads/master
| 2020-09-24T02:17:33.088379
| 2019-12-11T02:52:18
| 2019-12-11T02:52:18
| 225,638,105
| 0
| 0
| null | 2019-12-09T11:02:34
| 2019-12-03T14:20:18
|
Python
|
UTF-8
|
Python
| false
| false
| 3,145
|
py
|
"""
Django settings for django_auto_periodic project.
Generated by 'django-admin startproject' using Django 2.2.8.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'cp-t)h0@0@icyzlj#q&0!r_rmp+%b!r$98+^yv@ho$^jcukubq'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'app_one',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'django_auto_periodic.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_auto_periodic.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
|
[
"a8568730@hotmail.com"
] |
a8568730@hotmail.com
|
dc81f5525c0ba30ab81f34e1c4efb219c370c548
|
e7bc5db4fad3d1d14b611a4775e10b4524c467c3
|
/Snake.py
|
559d12c83f1445bf250c07a977d6a502d5998c01
|
[] |
no_license
|
JaredWogan/SnakeGame
|
1e0682e896a2d250012853a0302b2c164a1646cc
|
3bd33bcc5a42799d67aa19505143b60af02899c4
|
refs/heads/main
| 2023-04-23T16:33:10.138685
| 2021-05-15T04:20:07
| 2021-05-15T04:20:07
| 367,501,776
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,650
|
py
|
# Snake Game, Jared Wogan
# Version 1.0
# May 14, 2020
import random
import curses
import os
cmd = 'mode 50,20'
os.system(cmd)
screen = curses.initscr()
curses.curs_set(0)
screen_h, screen_w = screen.getmaxyx()
# print(sh,sw)
window = curses.newwin(screen_h, screen_w, 0, 0)
window.keypad(1)
window.timeout(100)
snake_x = int(screen_w/4)
snake_y = int(screen_h/2)
snake = [
[snake_y, snake_x],
[snake_y, snake_x-1],
[snake_y, snake_x-2]
]
food = [int(screen_h/2), int(screen_w/2)]
window.addch(int(food[0]), int(food[1]), curses.ACS_PI)
key = curses.KEY_RIGHT
while True:
# print('Food:',food)
# print('Snake Head:',snake[0])
next_key = window.getch()
key = key if next_key == -1 else next_key
if snake[0][0] in [0, screen_h] or snake[0][1] in [0, screen_w] or snake[0] in snake[1:]:
curses.endwin()
quit()
new_head = [snake[0][0], snake[0][1]]
if key == curses.KEY_DOWN:
new_head[0] += 1
if key == curses.KEY_UP:
new_head[0] -= 1
if key == curses.KEY_LEFT:
new_head[1] -= 1
if key == curses.KEY_RIGHT:
new_head[1] += 1
snake.insert(0, new_head)
if snake[0] == food:
food = None
while food is None:
new_food = [
random.randint(1, screen_h-1),
random.randint(1, screen_w-1)
]
food = new_food if new_food not in snake else None
window.addch(food[0], food[1], curses.ACS_PI)
else:
tail = snake.pop()
window.addch(int(tail[0]), int(tail[1]), ' ')
window.addch(int(snake[0][0]), int(snake[0][1]), curses.ACS_CKBOARD)
|
[
"jared.wogan@gmail.com"
] |
jared.wogan@gmail.com
|
ba466d5a4237575f95a4a64bdee2b013bb2d053f
|
c873d8a3fb03e5b1d38a728444af193c53fd943e
|
/safe/common/types.py
|
2d1e1f3f252af3a285f60ba6f14540667a8dab5c
|
[] |
no_license
|
robertvunabandi/safe
|
9c8deb8368010e036e280e3170f2ebc7fded44b3
|
2f5875ae131a76ab3811f0ea6e3cfc3332ac53f6
|
refs/heads/master
| 2021-07-05T14:12:11.907918
| 2019-08-29T18:22:01
| 2019-08-29T18:22:01
| 199,324,540
| 1
| 0
| null | 2020-10-27T22:03:28
| 2019-07-28T18:43:18
|
Python
|
UTF-8
|
Python
| false
| false
| 549
|
py
|
import enum
class StrEnum(enum.Enum):
def __str__(self) -> str:
return str(self.value)
def __repr__(self) -> str:
return str(self.value)
@enum.unique
class RootCommand(StrEnum):
"""
The types of commands that one can run with safe. For each
command below, one can run
safe COMMAND args-for-command
Or:
safe COMMAND --help
to figure out their structure. Note that commands are
lowercase, not uppercase.
"""
CONVERT = "convert"
CONFIG = "config"
SHELL = "shell"
|
[
"rvunabandi@gmail.com"
] |
rvunabandi@gmail.com
|
918d66fda0b057681dabd242074cb2234a407ed2
|
584d04cf290c9e4d3998aa92db1e66b169a9cb76
|
/grog/evaluation/plot.py
|
08022e2c0e19487bf0830d045887b7f3529004d5
|
[
"MIT"
] |
permissive
|
EIHW/Speech_Separation_DC
|
6ae5f6d1c4d07300b5ececc07ae39a40886e572c
|
30730bf801c3e4fca52012eae0529526d4f547f1
|
refs/heads/master
| 2023-01-09T05:16:41.733617
| 2020-02-29T10:48:31
| 2020-02-29T10:48:31
| 237,009,424
| 0
| 0
|
MIT
| 2023-01-05T08:39:28
| 2020-01-29T15:00:43
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 2,889
|
py
|
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
N_METRICS = 4
def mean_metrics(eval_result, name='SDR'):
return list(map(lambda source_result: np.nanmean(source_result[name]), eval_result))
def mean_all_metrics(eval_result):
cases = map(lambda source_result: "%s - %s" % (source_result['reference_name'], source_result['estimated_name']), eval_result)
return (
list(cases),
mean_metrics(eval_result, 'SDR'),
mean_metrics(eval_result, 'SIR'),
mean_metrics(eval_result, 'ISR'),
mean_metrics(eval_result, 'SAR')
)
def plot_metrics(metrics):
x = []
y = []
y_mean_baseline_sdr = []
for against_sources, against_rev_sources, baseline in metrics:
# Decide which matching we want to take (rev or normal)
#max_index = np.argmax([np.sum(values[1]), np.sum(values_rev[1])])
# Get best results form (rev or normal)
#cases, mean_sdrs, mean_sirs = values if max_index == 0 else values_rev
metric_values = mean_all_metrics(against_sources)
assert len(metric_values) - 1 == N_METRICS
x.extend(metric_values[0])
y.append(metric_values[1:])
if baseline:
y_mean_baseline_sdr.extend(mean_metrics(baseline, 'SDR'))
x = np.arange(len(x))
y = np.array(y).transpose(1, 0, 2).reshape((N_METRICS, -1))
mean_mean_y = np.mean(y, axis=1)
baseline_mean_mean_sir = np.mean(y_mean_baseline_sdr)
fig, ax = plt.subplots(figsize=(15,4))
y_mean_sdrs, y_mean_sirs, y_mean_isrs, y_mean_sars = y
ax.scatter(x, y_mean_sdrs, label='Mean SDRs',s=10)
ax.scatter(x, y_mean_sirs, label='Mean SIRs',s=10)
ax.scatter(x, y_mean_isrs, label='Mean ISRs',s=10)
ax.scatter(x, y_mean_sars, label='Mean SARs',s=10)
#ax.scatter(np.arange(len(y_mean_baseline_sdr)), y_mean_baseline_sdr, label='Baseline Mean SDRs')
mean_mean_sdr, mean_mean_sir, mean_mean_isr, mean_mean_sar = mean_mean_y
#ax.plot(x, [mean_mean_sdr] * len(x), label='Mean-Mean SDR')
#ax.plot(x, [mean_mean_sir] * len(x), label='Mean-Mean SIR')
#ax.plot(x, [mean_mean_isr] * len(x), label='Mean-Mean ISR')
#ax.plot(x, [mean_mean_sar] * len(x), label='Mean-Mean SAR')
#ax.plot(x, [baseline_mean_mean_sir] * len(x), label='Baseline Mean-Mean SDR')
print("Mean-Mean SDR:\f%s" % mean_mean_sdr)
print("Mean-Mean SIR:\f%s" % mean_mean_sir)
print("Mean-Mean ISR:\f%s" % mean_mean_isr)
print("Mean-Mean SAR:\f%s" % mean_mean_sar)
#ax.annotate("%.2f" % mean_mean_sdr, xy=(0, mean_mean_sdr))
#ax.annotate("%.2f" % baseline_mean_mean_sir, xy=(0, baseline_mean_mean_sir))
ax.xaxis.set_major_locator(MaxNLocator(integer=True))
plt.xticks(rotation=90)
plt.ylabel('Decibel')
plt.xlabel('Sample: Reference - Estimated')
ax.legend(loc='upper right')
plt.show()
|
[
"max@maxammann.org"
] |
max@maxammann.org
|
73115b0870359a7e40732e9c8b7211f0a2ba8d05
|
2b7bdaa058b442bdf712daca6ef9feb62904bd3a
|
/ina260-simple.py
|
b529ae6bc055c8bc0d76aeefe9ffc347810b9c39
|
[] |
no_license
|
wilcodl/rpi-gpio-scripts
|
2005beb82223006fd63bdae475aef1089cc58132
|
b43eee2ccccf45ed658df777486108ac38c1a105
|
refs/heads/master
| 2022-04-08T12:55:27.174104
| 2020-04-02T20:21:28
| 2020-04-02T20:21:28
| 241,214,499
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 238
|
py
|
import time
import board
import adafruit_ina260
i2c = board.I2C()
ina260 = adafruit_ina260.INA260(i2c)
while True:
print("Current: %.2f mA Voltage: %.2f V Power:%.2f mW"
%(ina260.current, ina260.voltage, ina260.power))
time.sleep(1)
|
[
"wilcodl@gmail.com"
] |
wilcodl@gmail.com
|
34f0670e8aeb7d023253c8831b64d9244700ec3b
|
fe7b668225c0ddd6c577618f54a1a72da2ab4d8b
|
/array/66_plus_one.py
|
605eab59654773284acf81df6a8ce46bda6c47f5
|
[] |
no_license
|
oksiweb/LeetCodeProblems
|
540ecbfea94471855ecc468321dbe635a061eff4
|
2516a6adb95d866feb2bc1bcd85025d11c3d2458
|
refs/heads/master
| 2020-03-08T09:28:35.913521
| 2018-06-02T19:27:54
| 2018-06-02T19:27:54
| 128,048,116
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 266
|
py
|
def plusOne(digits):
first_el = digits[0]
n = len(digits)
if n > 1 or first_el+1 == 10:
n_str = int(''.join(map(str, digits))) + 1
return list(map(int, str(n_str)))
else:
digits[0] += 1
return digits
print(plusOne([9,9]))
|
[
"oksi.web@gmail.com"
] |
oksi.web@gmail.com
|
f62214df217c0aecb2ce119647a853cf0f35d3ce
|
25cf15f81982348cdee729baa5c6c8ca19ab4506
|
/Ziza/wsgi.py
|
c79977b9708e1726230579565b9e29481b153794
|
[] |
no_license
|
HarunColic/ZizaRepo
|
ca962f42cbb3a521e3121174d6bf615187dfb67c
|
79cd051b88a39d678abd8aa329fd7cfdca40cb42
|
refs/heads/master
| 2020-03-26T15:17:32.182469
| 2020-03-03T12:00:46
| 2020-03-03T12:00:46
| 145,034,327
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 389
|
py
|
"""
WSGI config for untitled project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Ziza.settings')
application = get_wsgi_application()
|
[
"haruncolic@hotmail.com"
] |
haruncolic@hotmail.com
|
9866af7cbe800397c0510aacfc4aaca46ce6abad
|
cded75114c04a7dde2fb3728b931d0c3aee1398b
|
/mail/smtp.py
|
ef5ae9ba0f1d3f52e98f7bc293f505c916460e4d
|
[] |
no_license
|
dachrisch/expense_helper
|
3887617a2f0575fb750366d2b1404b23e64da091
|
b95f21c3267c185475e0957cc5be92187aacd57b
|
refs/heads/master
| 2016-08-04T08:16:47.576235
| 2012-03-27T14:38:06
| 2012-03-27T14:38:06
| 3,530,927
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,337
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Created on Feb 23, 2012
@author: cda
'''
import smtplib
import logging
from contextlib import contextmanager
class SmtpConnector(object):
def __init__(self, smtp):
self.log = logging.getLogger('SmtpConnector')
self.smtp = smtp
@contextmanager
def create_connection(self, username, password):
try:
yield self.__login(username, password)
finally:
self._close()
def __login(self, username, password):
self.log.info('logging in [%s]' % username)
self.smtp.ehlo()
self.smtp.starttls()
self.smtp.ehlo()
self.smtp.login(username, password)
return self
def _close(self):
self.log.info('closing smtp connection.')
self.smtp.quit()
def email(self, email):
self.log.info('delivering mail [%(Subject)s]...' % email)
self.log.debug('sending [%d] bytes from [%s] to [%s]...' % (len(email.as_string()), email['From'], email['To']))
self.smtp.sendmail(email['From'], email['To'], email.as_string())
@staticmethod
def connector_for(server):
log = logging.getLogger('SmtpConnectorFactory')
log.info('connecting to server [%s]...' % server)
return SmtpConnector(smtplib.SMTP(server, 587))
|
[
"christian.daehn@it-agile.de"
] |
christian.daehn@it-agile.de
|
e7e2c3876b216ae6b6ffbc6d753849b3bf85159f
|
7eeb87fbe6a7d38b4e73a7397fb3fcaae24dde90
|
/Baco/settings.py
|
1c707f8f7412d3c48df5c0d2e391be8ff7434d45
|
[] |
no_license
|
omarCastillo/baco
|
1b51b0c88b2bcbfd88a0549e754cf59a47ee7cda
|
0a9ba109e1c8c6f305371df4e01c1f915998f8ed
|
refs/heads/master
| 2021-04-28T03:16:32.948605
| 2018-02-19T23:43:30
| 2018-02-19T23:43:30
| 122,134,832
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,490
|
py
|
"""
Django settings for Baco project.
Generated by 'django-admin startproject' using Django 1.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'g!)cop(7=3ea6s8-n!gn6-yi7xfp--(z4m4qbv_53c7^*-mr@n'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
DJANGO_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
PROJECT_APPS = [
'modules.Producto',
'modules.Contacto',
]
THIRD_APPS = [
'rest_framework',
'rest_framework_swagger',
'corsheaders',
]
INSTALLED_APPS = DJANGO_APPS + PROJECT_APPS + THIRD_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
]
ROOT_URLCONF = 'Baco.urls'
CORS_ORIGIN_ALLOW_ALL = True
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Baco.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
# }
# }
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'bacodata',
'USER': 'omar',
'PASSWORD': '',
'HOST': 'localhost',
'PORT': '5432',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'es-mx'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
],
'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',)
}
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
ALLOWED_HOSTS = ['*']
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.AllowAny',
)
}
EMAIL_HOST = 'smtp.mailtrap.io'
EMAIL_HOST_USER = 'aca21f271cac5c'
EMAIL_HOST_PASSWORD = 'a8e7fe7813bb65'
EMAIL_PORT = '2525'
|
[
"omar.castillo.rosales@gmail.com"
] |
omar.castillo.rosales@gmail.com
|
cfddaa298ea1f40bff37e2364cebc5674e2bf0ba
|
55ca810b514e65768b77262d6a2fd864a5b3aaec
|
/christmasbot/bin/rst2s5.py
|
d39d83e64bfaa4736b8562f357d6d0c490ed6289
|
[
"MIT"
] |
permissive
|
Mechdriver/slack-christmas-bot
|
07dbfaff8f7ff4434c0e6abb3a7b7e74d7743958
|
e29b6c59622b62a3f96d3a5f1d5620e5f8020061
|
refs/heads/master
| 2021-01-12T12:23:06.921505
| 2017-03-16T20:57:38
| 2017-03-16T20:57:38
| 72,477,872
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 691
|
py
|
#!/Users/coursehero/Repos/slack-christmas-bot/christmasbot/bin/python2.7
# $Id: rst2s5.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: Chris Liechti <cliechti@gmx.net>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing HTML slides using
the S5 template system.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates S5 (X)HTML slideshow documents from standalone '
'reStructuredText sources. ' + default_description)
publish_cmdline(writer_name='s5', description=description)
|
[
"ztbehnke62@gmail.com"
] |
ztbehnke62@gmail.com
|
15cd6bfe31456c18e2e8d3eb6d43ce0586b4e8eb
|
47cac700fa41aace629be955e14ed7ce02b034e9
|
/extras/matrixtrans.py
|
0318895dfe1947e765be9eed3910ddb2914cb57a
|
[] |
no_license
|
labxtreme/newcodes
|
9ae7ec33f57704c245008783b2777a7763636ce7
|
8c9658fa8df34a7fb422decdb8b2cd2d07297046
|
refs/heads/master
| 2020-03-19T19:54:26.877507
| 2018-06-12T06:41:00
| 2018-06-12T06:41:00
| 136,878,786
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 292
|
py
|
m,n=map(int,input("Enter m and n of the matrix:").split())
A=[list(map(int,input("Enter the whole row %d :"%(row+1) ).split())) for row in range(m)]
B=[[A[col][row] for col in range(m)] for row in range(n)]
print("A")
for var in A :
print(*var)
print("B")
for var in B :
print(*var)
|
[
"lakshaya.bathla@gmail.com"
] |
lakshaya.bathla@gmail.com
|
7ffaaf57725dbfa83635af9250b7e2068b878741
|
9db1b16e8250aa68c4f6c1452e8a290350a1c6e4
|
/adataExcel.py
|
8a84841d4960adb814b1cceccefe67847c5fbc32
|
[] |
no_license
|
babymin705/python-excel
|
f5098a4049c48364c793cbe8d54712b3abac59af
|
4ec0789ccff91e092831391d8508c08068d0b751
|
refs/heads/master
| 2023-08-22T09:57:15.054389
| 2021-10-12T06:42:27
| 2021-10-12T06:42:27
| 410,773,977
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,165
|
py
|
#!/usr/bin/python3
import mysql.connector
# from os import walk
import os
from pathlib import Path
import datetime
from dateutil.relativedelta import relativedelta
import openpyxl
cnx = mysql.connector.connect(user='root', password='ADATA@STVH0tel',host='127.0.0.1',database='adata')
# Read the files in the folder
f = []
mypath = '/var/www/adata/adata_stats/'
for(dirpath, dirnames, filenames) in os.walk(mypath):
# p = Path(filenames)
# f.extend(filenames)
for name in filenames:
f.append((os.path.join(dirpath, name)))
for filename in f:
p = Path('/var/www/adata/adata_stats/',filename)
# Create the unix timestamp based on file name
name = str(p.stem).split('-')
first_day = datetime.datetime(int(name[0]), int(name[1]), 1) #first day
last_day = first_day + relativedelta(hour=23, minute=59, second=00, day=31) #last day
unix_first_day_in_month = int(round(first_day.timestamp()))
unix_last_day_in_month = int(round(last_day.timestamp()))
# Read all the data in the excel file
wb_obj = openpyxl.load_workbook(p)
# sheet = wb_obj.active
# col_names = []
# for column in sheet.iter_cols(min_row=1, max_col=sheet.max_column):
# col_names.append(column[0].value)
# print(col_names)
# data = {'hotel':[], 'occ':[], 'arr':[]}
keywords = ["aor","arr","rooms_available"]
print("*** start file %s ****"%(filename))
for sheet in wb_obj.worksheets:
print("*** start sheet %s ****"%(sheet))
for i, row in enumerate(sheet.iter_rows(values_only=True)):
if i != 0:
if(row[0]):
hotel_name = row[0]
hotel_aor = row[1].strip() #average_occupancy_rate
hotel_arr = row[2].strip() #average_room_rate
# skip if hotel_arr and hotel_aor is 0
if(hotel_aor != "0.00" and hotel_arr != "0.00"):
# Search the hotel name in hotels table
search_hotel_query = ("SELECT id, rooms FROM hotels WHERE name = %s")
cursor = cnx.cursor()
cursor.execute(search_hotel_query, (hotel_name,))
hotel = cursor.fetchone()
# get the total rooms for available rooms purpose
if(hotel):
# check in stats table
search_stat_query = ('SELECT * FROM stats where hotel_id=%s and start_date = %s')
stat_data = (hotel[0], unix_first_day_in_month)
cursor.execute(search_stat_query, stat_data)
stat = cursor.fetchone()
if(not stat):
hotel_room_available = hotel[1]
hotel_id = hotel[0]
hotel_data = [float(hotel_aor), float(hotel_arr), float(hotel_room_available)]
# insert the data inside the stats table
stats_data = (hotel_id,unix_first_day_in_month,unix_last_day_in_month)
add_stats_query = ("INSERT INTO stats (hotel_id, start_date, end_date, send, updated) "
"VALUES (%s,%s,%s,1,1)")
cursor.execute(add_stats_query, stats_data)
stats_id = cursor.lastrowid
# add to stat_details
for i, keyword in enumerate(keywords):
add_stats_detail_query = ("INSERT INTO stat_details (stats_id, keyword, value) "
"VALUES (%s, %s, %s)")
stats_detail = (stats_id, keyword, hotel_data[i])
cursor.execute(add_stats_detail_query, stats_detail)
else:
print("*** missing hotel %s"%hotel_name)
cursor.close()
cnx.commit()
cnx.close()
|
[
"linda.wong@selectv.co"
] |
linda.wong@selectv.co
|
b326be0d3157e8e4c6291097ba566e5d4d202d38
|
0247690e0b33e919c8611f6feef37867052bbf51
|
/mayan/apps/events/classes.py
|
7c34b0368df856fcc39496b4c7ed1f8016eb9b45
|
[
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
tobennanwokike/mayan-edms
|
2a499daf9ceb5d9a41c71270135fe652ad304ce1
|
89c145adde90eef849903907394b1c79e88470fd
|
refs/heads/master
| 2020-03-28T00:22:12.704262
| 2018-08-17T08:52:12
| 2018-08-17T08:52:12
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,928
|
py
|
from __future__ import unicode_literals
import logging
from django.apps import apps
from django.contrib.auth import get_user_model
from django.core.exceptions import PermissionDenied
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from actstream import action
from .permissions import permission_events_view
logger = logging.getLogger(__name__)
@python_2_unicode_compatible
class EventTypeNamespace(object):
_registry = {}
@classmethod
def all(cls):
return sorted(cls._registry.values())
@classmethod
def get(cls, name):
return cls._registry[name]
def __init__(self, name, label):
self.name = name
self.label = label
self.event_types = []
self.__class__._registry[name] = self
def __str__(self):
return force_text(self.label)
def add_event_type(self, name, label):
event_type = EventType(namespace=self, name=name, label=label)
self.event_types.append(event_type)
return event_type
def get_event_types(self):
return EventType.sort(event_type_list=self.event_types)
@python_2_unicode_compatible
class EventType(object):
_registry = {}
@staticmethod
def sort(event_type_list):
return sorted(
event_type_list, key=lambda x: (x.namespace.label, x.label)
)
@classmethod
def all(cls):
# Return sorted permisions by namespace.name
return EventType.sort(event_type_list=cls._registry.values())
@classmethod
def get(cls, name):
try:
return cls._registry[name]
except KeyError:
return _('Unknown or obsolete event type: %s') % name
@classmethod
def refresh(cls):
for event_type in cls.all():
event_type.get_stored_event_type()
def __init__(self, namespace, name, label):
self.namespace = namespace
self.name = name
self.label = label
self.stored_event_type = None
self.__class__._registry[self.id] = self
def __str__(self):
return force_text('{}: {}'.format(self.namespace.label, self.label))
def commit(self, actor=None, action_object=None, target=None):
AccessControlList = apps.get_model(
app_label='acls', model_name='AccessControlList'
)
Action = apps.get_model(
app_label='actstream', model_name='Action'
)
ContentType = apps.get_model(
app_label='contenttypes', model_name='ContentType'
)
Notification = apps.get_model(
app_label='events', model_name='Notification'
)
results = action.send(
actor or target, actor=actor, verb=self.id,
action_object=action_object, target=target
)
for handler, result in results:
if isinstance(result, Action):
for user in get_user_model().objects.all():
notification = None
if user.event_subscriptions.filter(stored_event_type__name=result.verb).exists():
if result.target:
try:
AccessControlList.objects.check_access(
permissions=permission_events_view,
user=user, obj=result.target
)
except PermissionDenied:
pass
else:
notification, created = Notification.objects.get_or_create(
action=result, user=user
)
else:
notification, created = Notification.objects.get_or_create(
action=result, user=user
)
if result.target:
content_type = ContentType.objects.get_for_model(model=result.target)
relationship = user.object_subscriptions.filter(
content_type=content_type,
object_id=result.target.pk,
stored_event_type__name=result.verb
)
if relationship.exists():
try:
AccessControlList.objects.check_access(
permissions=permission_events_view,
user=user, obj=result.target
)
except PermissionDenied:
pass
else:
notification, created = Notification.objects.get_or_create(
action=result, user=user
)
if not notification and result.action_object:
content_type = ContentType.objects.get_for_model(model=result.action_object)
relationship = user.object_subscriptions.filter(
content_type=content_type,
object_id=result.action_object.pk,
stored_event_type__name=result.verb
)
if relationship.exists():
try:
AccessControlList.objects.check_access(
permissions=permission_events_view,
user=user, obj=result.action_object
)
except PermissionDenied:
pass
else:
notification, created = Notification.objects.get_or_create(
action=result, user=user
)
def get_stored_event_type(self):
if not self.stored_event_type:
StoredEventType = apps.get_model('events', 'StoredEventType')
self.stored_event_type, created = StoredEventType.objects.get_or_create(
name=self.id
)
return self.stored_event_type
@property
def id(self):
return '%s.%s' % (self.namespace.name, self.name)
class ModelEventType(object):
"""
Class to allow matching a model to a specific set of events.
"""
_inheritances = {}
_proxies = {}
_registry = {}
@classmethod
def get_for_class(cls, klass):
return cls._registry.get(klass, ())
@classmethod
def get_for_instance(cls, instance):
StoredEventType = apps.get_model(
app_label='events', model_name='StoredEventType'
)
events = []
class_events = cls._registry.get(type(instance))
if class_events:
events.extend(class_events)
proxy = cls._proxies.get(type(instance))
if proxy:
events.extend(cls._registry.get(proxy))
pks = [
event.id for event in set(events)
]
return EventType.sort(
event_type_list=StoredEventType.objects.filter(name__in=pks)
)
@classmethod
def get_inheritance(cls, model):
return cls._inheritances[model]
@classmethod
def register(cls, model, event_types):
cls._registry.setdefault(model, [])
for event_type in event_types:
cls._registry[model].append(event_type)
@classmethod
def register_inheritance(cls, model, related):
cls._inheritances[model] = related
@classmethod
def register_proxy(cls, source, model):
cls._proxies[model] = source
|
[
"roberto.rosario.gonzalez@gmail.com"
] |
roberto.rosario.gonzalez@gmail.com
|
1b2c6861f8b770d3e0adc653245cf15c1653ed49
|
255e19ddc1bcde0d3d4fe70e01cec9bb724979c9
|
/all-gists/d9933aefec50d5a14e37/snippet.py
|
87ad5fd6c98ff7ea83980693401ab52e949be51f
|
[
"MIT"
] |
permissive
|
gistable/gistable
|
26c1e909928ec463026811f69b61619b62f14721
|
665d39a2bd82543d5196555f0801ef8fd4a3ee48
|
refs/heads/master
| 2023-02-17T21:33:55.558398
| 2023-02-11T18:20:10
| 2023-02-11T18:20:10
| 119,861,038
| 76
| 19
| null | 2020-07-26T03:14:55
| 2018-02-01T16:19:24
|
Python
|
UTF-8
|
Python
| false
| false
| 7,254
|
py
|
# Author: Kyle Kastner
# License: BSD 3-Clause
# For a reference on parallel processing in Python see tutorial by David Beazley
# http://www.slideshare.net/dabeaz/an-introduction-to-python-concurrency
# Loosely based on IBM example
# http://www.ibm.com/developerworks/aix/library/au-threadingpython/
# If you want to download all the PASCAL VOC data, use the following in bash...
"""
#! /bin/bash
# 2008
wget http://host.robots.ox.ac.uk/pascal/VOC/voc2008/VOCtrainval_14-Jul-2008.tar
# 2009
wget http://host.robots.ox.ac.uk/pascal/VOC/voc2009/VOCtrainval_11-May-2009.tar
# 2010
wget http://host.robots.ox.ac.uk/pascal/VOC/voc2010/VOCtrainval_03-May-2010.tar
# 2011
wget http://host.robots.ox.ac.uk/pascal/VOC/voc2011/VOCtrainval_25-May-2011.tar
# 2012
wget http://host.robots.ox.ac.uk/pascal/VOC/voc2012/VOCtrainval_11-May-2012.tar
# Latest devkit
wget http://host.robots.ox.ac.uk/pascal/VOC/voc2012/VOCdevkit_18-May-2011.tar
"""
try:
import Queue
except ImportError:
import queue as Queue
import threading
import time
import glob
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import os
import itertools
import random
class VOCThread(threading.Thread):
"""Image Thread"""
def __init__(self, queue, out_queue):
threading.Thread.__init__(self)
self.queue = queue
self.out_queue = out_queue
def run(self):
while True:
# Grabs image path from queue
image_path_group, mask_path_group = self.queue.get()
image_group = [plt.imread(i) for i in image_path_group]
mask_group = [plt.imread(m) for m in mask_path_group]
# Place images in out queue
self.out_queue.put((image_group, mask_group))
# Signals to queue job is done
self.queue.task_done()
class VOC_dataset(object):
def __init__(self, minibatch_size=3, which_set="train",
voc_path="/data/lisa/data/PASCAL-VOC/VOCdevkit/"):
image_paths = []
mask_paths = []
years = ["VOC2008", "VOC2009", "VOC2010", "VOC2011", "VOC2012"]
for year in years:
voc_year_path = os.path.join(voc_path, year)
image_path = os.path.join(voc_year_path, "JPEGImages")
more_image_paths = glob.glob(os.path.join(image_path, "*.jpg"))
image_paths += more_image_paths
mask_path = os.path.join(voc_year_path, "SegmentationClass")
more_mask_paths = glob.glob(os.path.join(mask_path, "*.png"))
mask_paths += more_mask_paths
def match_paths(seg_file):
names = []
for year in years:
voc_year_path = os.path.join(voc_path, year)
fp = os.path.join(voc_year_path, "ImageSets", "Segmentation")
with open(os.path.join(fp, seg_file)) as f:
names += [fi.strip() for fi in f.readlines()]
ims = []
masks = []
s_ims = sorted(image_paths)
s_masks = sorted(mask_paths)
# Go through short list of names, find first match for each im and
# mask and append
for n in names:
for i in s_ims:
if n in i:
ims.append(i)
break
# slower but logic is easier
for m in s_masks:
if n in m:
masks.append(m)
break
assert len(ims) == len(masks)
return ims, masks
if which_set == "train":
image_paths, mask_paths = match_paths("train.txt")
elif which_set == "trainval":
image_paths, mask_paths = match_paths("trainval.txt")
else:
raise ValueError("Unknown argument to which_set %s" % which_set)
# no segmentations for the test set, assertion will fail
#test_image_paths, test_mask_paths = match_paths("test.txt")
self.image_paths = image_paths
self.mask_paths = mask_paths
assert len(self.image_paths) == len(self.mask_paths)
self.n_per_epoch = len(image_paths)
self.n_samples_seen_ = 0
# Test random order
# random.shuffle(self.image_paths)
self.buffer_size = 5
self.minibatch_size = minibatch_size
self.input_qsize = 15
self.min_input_qsize = 10
if len(self.image_paths) % self.minibatch_size != 0:
print("WARNING: Sample size not an even multiple of minibatch size")
print("Truncating...")
self.image_paths = self.image_paths[:-(
len(self.image_paths) % self.minibatch_size)]
self.mask_paths = self.mask_paths[:-(
len(self.mask_paths) % self.minibatch_size)]
assert len(self.image_paths) % self.minibatch_size == 0
assert len(self.mask_paths) % self.minibatch_size == 0
assert len(self.image_paths) == len(self.mask_paths)
self.grouped_images = zip(*[iter(self.image_paths)] *
self.minibatch_size)
self.grouped_masks = zip(*[iter(self.mask_paths)] *
self.minibatch_size)
assert len(self.grouped_images) == len(self.grouped_masks)
# Infinite...
self.grouped_elements = itertools.cycle(zip(self.grouped_images,
self.grouped_masks))
self.queue = Queue.Queue()
self.out_queue = Queue.Queue(maxsize=self.buffer_size)
self._init_queues()
def _init_queues(self):
for i in range(1):
self.it = VOCThread(self.queue, self.out_queue)
self.it.setDaemon(True)
self.it.start()
# Populate queue with some paths to image data
for n, _ in enumerate(range(self.input_qsize)):
group = self.grouped_elements.next()
self.queue.put(group)
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
return self._step()
def reset(self):
self.n_samples_seen_ = 0
def _step(self):
if self.n_samples_seen_ >= self.n_per_epoch:
self.reset()
raise StopIteration("End of epoch")
image_group, mask_group = self.out_queue.get()
self.n_samples_seen_ += self.minibatch_size
if self.queue.qsize() <= self.min_input_qsize:
for i in range(self.input_qsize):
group = self.grouped_elements.next()
self.queue.put(group)
return image_group, mask_group
if __name__ == "__main__":
# Example usage
ds = VOC_dataset(which_set="trainval")
start = time.time()
#n_minibatches_to_run = 5000
itr = 1
while True:
image_group, mask_group = ds.next()
# time.sleep approximates running some model
time.sleep(1)
stop = time.time()
tot = stop - start
print("Threaded time: %s" % (tot))
print("Minibatch %s" % str(itr))
print("Time ratio (s per minibatch): %s" % (tot / float(itr)))
itr += 1
# test
#if itr >= n_minibatches_to_run:
# break
|
[
"gistshub@gmail.com"
] |
gistshub@gmail.com
|
ec3a52a1cc8af36e6101356ceb5018f96a75ae1f
|
aadaeaf925ca7d2d1dd26a0e80d470cc9f16d278
|
/update.py
|
3281406ad2b7a40a0e6b95bede7199f0399ccf48
|
[] |
no_license
|
IamSadiq/py-mongo
|
b0f9f146aa0fb18e86371272f006f5cf9c6c9d6c
|
226e7d74d7dc2581c16aa1ca46a3b15f9affb7c1
|
refs/heads/master
| 2020-04-17T05:50:55.354676
| 2019-01-17T21:48:31
| 2019-01-17T21:48:31
| 166,300,074
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 551
|
py
|
# UPDATE COLLECTION
import pymongo
myclient = pymongo.MongoClient("mongodb://localhost:27017/")
mydb = myclient["mydatabase"]
customers = mydb["customers"]
myquery = { "address": "Valley 345" }
newvalues = { "$set": { "address": "Canyon 123" } }
customers.update_one(myquery, newvalues)
#print "customers" after the update:
for x in customers.find():
print(x)
myquery = { "address": { "$regex": "^S" } }
newvalues = { "$set": { "name": "Minnie" } }
x = customers.update_many(myquery, newvalues)
print(x.modified_count, "documents updated.")
|
[
"asiddik5@gmail.com"
] |
asiddik5@gmail.com
|
1ba4f9d594773677b2fe504dcb260d142873a309
|
9fcdbab59e964b53a7e2866a67adb72ee8fb86d0
|
/Particles.py
|
c361a0bcf126c7f80cc8ebcde92b6f5a4b4013f5
|
[
"MIT"
] |
permissive
|
anishakadri/PionDecay
|
04c2929f5d258dd03d37249cb8253a45645668b6
|
03e888398badc17d95bca2d39d77ef86838beaba
|
refs/heads/master
| 2020-03-19T10:01:23.513345
| 2018-06-12T01:51:17
| 2018-06-12T01:51:17
| 136,336,561
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 17,545
|
py
|
#HELP:
# 1) case of -ve velocity for muons and electrons
# 2) compare decay time and exit time for muons
# try a check decay pos, raise exception, use try to call loop
#3) what to do when lines exceed the width??-- google
import properties as pp
import fourmomentum as rel
import numpy as np
import scipy.optimize as spo
class Particle:
"""
A class definining an instance of a particle;
defined by its energy-momentum four vector, and the initial and
final vertices of its path. All quantities are required to be
in natural units.
"""
def __init__(self, E=0.00, p= [0.00,0.00,0.00], i=[0.00,0.00,0.00],f=[0.00,0.00,0.00]):
self.EP= rel.Fourvector(E, p)
self.initial = np.array(i)
self.final = np.array(f)
if len(p) > 3:
raise Exception("Error: Momentum Vector parameter size")
if len(i) > 3:
raise Exception("Error: Initial Vector parameter size")
if len(f) > 3:
raise Exception("Error: Final Vector parameter size")
def __repr__(self):
return "%s(Energy-Momentum %r, intial=%r, final=%r)" % ("Particle", self.EP, self.initial, self.final)
def __str__(self):
return "[%g, %r]" % (self.__E, self.__p)
def momentum(self):
"""Returns the momentum of a particle in the lab frame."""
return self.EP.momentum()
def mass(self):
"""Returns the rest mass of a particle,
which is equivalent to the rest energy in natural units."""
return np.sqrt((self.energy()**2)-self.EP.magp_sq())
def energy(self):
"""Returns the total energy of the particle."""
return self.EP.energy()
def kin_energy(self):
"""Returns the kinetic energy of a particle."""
return self.energy() - self.rest_energy()
def beta(self):
"""Returns the beta value of a particle.
For a particle travelling at a speed v, Beta = v/c ."""
if self.energy() == 0.0:
return "Total Energy = 0, particle does not exist."
elif self.EP.magp()/self.energy() >= 1:
return "Error: Particle travelling faster than light."
else:
return self.EP.magp()/self.energy()
def gamma(self):
"""Returns the gamma factor for a particle.
In particular, the gamma factor of the particles rest frame
with respect to the lab frame."""
return (self.energy()/self.mass())
class Pion(Particle):
"""
Creates a Pion, with a given total energy (E), a vector momentum (p),
and initial (i) and final (f) vertices of its path in cartesian coordinates.
Although a momentum can be entered upon initialisation, the momentum for the
pion is automatically generated to agree with the Energy-Momentum relation.
"""
def __init__(self, E=0.00, p= [0.00,0.00,0.00], i=[0.00,0.00,0.00],f=[0.00,0.00,0.00]):
Particle.__init__(self, E, p, i, f)
p_mag = np.sqrt(E**2-(pp.pion_mass)**2)
p = p_mag*np.array([0.0, 0.0, 1.0])
self.EP = rel.Fourvector(E, p)
self.initial = i
self.final = f
self.__decay_time = 0
self.__exit_time = 0
if len(p) > 3:
raise Exception("Momentum Vector parameter size")
if len(i) > 3:
raise Exception("Initial Vector parameter size")
if len(f) > 3:
raise Exception("Final Vector parameter size")
if E < pp.pion_mass:
raise Exception("Total energy is less than Pion rest energy.")
def random_decay(self):
"""Decays a pion into either an electron or a muon, using the predefined
branching ratio."""
x= np.random.random_integers(0, pp.electron_ratio, 1) #binomial not as accurate at sampling.
for i in x:
if i == 1:
return self.decay_electron()
else:
return self.decay_muon()
def final_position(self):
"""Calculates the final position of a particle, whether it decays within
the chamber or exits one of the chamber sides."""
exit_pos = self.exit_position() #Lab note: have to define as set variables
decay_pos = self.decay_position()
if self.__exit_time > self.__decay_time:
self.final = decay_pos
return self.final
else:
self.final = exit_pos
return self.final
def exit_position(self):
"""Calculates the final position of a particle,
if it exits one of the chamber sides."""
velocity = (self.momentum()*(1/pp.pion_mass))*pp.c
exit_time = (100.00-self.initial[2])/(velocity[2])
self.__exit_time = exit_time
return self.initial + (exit_time* velocity)
def decay_position(self):
"""Calculates the final position of a particle, if it decays."""
velocity = (self.momentum()*(1/pp.pion_mass))*pp.c
pion_time = np.random.exponential(pp.pion_tau) # Lab note: exponential(poisson) distribution of mean lifetime
lab_time = self.gamma()*pion_time
self.__decay_time = lab_time
return self.initial + (lab_time*(velocity))
def decay_muon(self):
"""Decays a pion into a muon."""
mu_start = self.final_position()
if self.__decay_time > self.__exit_time:
return None
else:
muon_energy = ((pp.pion_mass**2) + (pp.muon_mass**2))/(2*pp.pion_mass)
p_mag = np.sqrt(muon_energy**2 - pp.muon_mass**2)
phi = np.random.uniform(0.0, 2*np.pi) # Lab note: particles uniformly distributed in phi
theta = np.arccos(np.random.uniform(-1.0, 1.0)) # Lab note: particles uniformly distributed in cos theta(since azimuthal angle)
p_dir = np.array([np.sin(phi)*np.cos(theta), np.sin(phi)*np.sin(theta), np.cos(phi)])
EP_pionframe = rel.Fourvector(muon_energy,p_mag*p_dir)
EP_labframe = EP_pionframe.boost(-self.beta())
return Muon(EP_labframe.energy(), EP_labframe.momentum(), i=mu_start, f=[])
def decay_electron(self):
"""Decays a pion into an electron."""
el_start = self.final_position()
if self.__decay_time > self.__exit_time:
return None
else:
electron_energy = ((pp.pion_mass**2) + (pp.electron_mass**2))/(2*pp.pion_mass)
p_mag = np.sqrt(electron_energy**2-pp.electron_mass**2)
phi = np.random.uniform(0.0, 2*np.pi)
theta = np.arccos(np.random.uniform(-1.0,1.0))
p_dir = np.array([np.sin(phi)*np.cos(theta), np.sin(phi)*np.sin(theta), np.cos(phi)])
EP_pionframe = rel.Fourvector(electron_energy,p_mag*p_dir)
EP_labframe = EP_pionframe.boost(-self.beta())
return Electron(EP_labframe.energy(), p=EP_labframe.momentum(), i=el_start, f=[])
def energy_deposit(self, x):
"""Calculates the energy deposited in an NaI scintillator by a pion,
for every x cm travelled through it. Takes one argument, x."""
return 4.8*x
class Muon(Particle):
"""
Creates a Muon, with a given total energy (E), a vector momentum (p),
and initial (i) and final (f) vertices of its path in cartesian coordinates.
"""
def __init__(self, E=0.00, p= [0.00,0.00,0.00], i=[0.00,0.00,0.00],f=[0.00,0.00,0.00]):
Particle.__init__(self, E, p, i, f)
self.EP = rel.Fourvector(E, p)
self.initial = i
self.final = f
self.__decay_time = 0
self.__exit_time = 0
if len(p) > 3:
raise Exception("Momentum Vector parameter size")
if len(i) > 3:
raise Exception("Initial Vector parameter size")
if len(f) > 3:
raise Exception("Final Vector parameter size")
if E < pp.muon_mass:
raise Exception("Total energy is less than Muon rest energy.")
def exit_position(self):
"""Calculates the final position of a particle,
if it exits one of the chamber sides."""
v = (self.momentum()/pp.muon_mass)*pp.c
Vx, Vy, Vz = v[0], v[1], v[2]
x0, y0, z0 = self.initial[0], self.initial[1], self.initial[2]
if Vz > 0: # for particle travelling in +z direction
if (Vx)**2 + (Vy)**2 == 0.0: #if no radial velocity, then exits parallel to z axis
t = (100.00-z0)/(Vz)
self.__exit_time = t
return self.initial + (v*t)
else: #if it has radial velocity, it exits when radius of position is > 2.5
def func(t):
return (x0 + Vx*np.fabs(t))**2 + (y0 + Vy*np.fabs(t))**2 -6.25
t = spo.fsolve(func, 1)
exit_point = self.initial + (v*np.fabs(t))
if exit_point[2] > 100.0: #if leaving circular face, then exit is at z=100
t = (100.00-z0)/(Vz)
self.__exit_time = t
return self.initial + (v*t)
else:
self.__exit_time = t
return exit_point
elif Vz < 0: #for particle travelling in -z direction
if (Vx)**2 + (Vy)**2 == 0.0: #if no radial velocity, exits parallel to z axis.
t = z0/(Vz)
self.__exit_time = t
return self.initial + (v*t)
else:
def func(t):
return (x0 + Vx*np.fabs(t))**2 + (y0 + Vy*np.fabs(t))**2 -6.25
t = spo.fsolve(func, 1)
exit_point = self.initial + (v*np.fabs(t))
if exit_point[2] > 100.0: #if leaving circular face, then exit is at z=0
t = z0/(Vz)
self.__exit_time = t
return self.initial + (v*t)
else:
self.__exit_time = t
return exit_point
else: #if no velocity in z-axis
if (Vx)**2 + (Vy)**2 == 0.0: #if no velocity at all, it doesn't exit
return self.initial
else:
def func(t): #if only radial velocity, it exits parallel to x-y plane
return (x0 + Vx*np.fabs(t))**2 + (y0 + Vy*np.fabs(t))**2 -6.25
t = spo.fsolve(func, 3)
self.__exit_time = t
return self.initial + (v*np.fabs(t))
def decay_position(self):
"""Calculates the final position of a particle, if it decays."""
velocity = (self.momentum()*(1/pp.muon_mass))*pp.c
muon_time = np.random.exponential(pp.muon_tau)
lab_time = self.gamma()*muon_time
self.__decay_time = lab_time
return self.initial + (lab_time*(velocity))
def final_position(self):
"""Calculates the final position of a particle, whether it decays within
the chamber or exits one of the chamber sides."""
exit_pos = self.exit_position() #Lab note: have to define as set variables, since decay position is randomly generated
decay_pos = self.decay_position()
if self.__exit_time > self.__decay_time:
self.final = decay_pos
return self.final
else:
self.final = exit_pos
return self.final
def michel(self):
x, y = np.random.random(2)
if x > y:
return x*53.
if y> x:
return y*53.
def decay_electron(self):
"""Decays a muon into a lectron if it is still in the chamber."""
el_start = self.final_position()
if self.__decay_time > self.__exit_time:
return None
else:
electron_energy = self.michel()
p_mag = np.sqrt(electron_energy**2 - pp.electron_mass**2)
phi = np.random.uniform(0.0, 2*np.pi)
theta = np.arccos(np.random.uniform(-1.0,1.0))
p_dir = np.array([np.sin(phi)*np.cos(theta), np.sin(phi)*np.sin(theta), np.cos(phi)])
EP_pionframe = rel.Fourvector(electron_energy,p_mag*p_dir)
EP_labframe = EP_pionframe.boost(-self.beta())
return Electron(E= EP_labframe.energy(), p=EP_labframe.momentum(), i=el_start, f=[])
def energy_deposit(self, x):
"""Calculates the energy deposited in an NaI scintillator by a muon,
for every x cm travelled through it. Takes one argument, x."""
return 4.8*x
class Electron(Particle):
"""
Creates an electron, with a given total energy (E), a vector momentum (p),
and initial (i) and final (f) vertices of its path in cartesian coordinates.
"""
def __init__(self, E=0.00, p=[0.00,0.00,0.00], i=[0.00,0.00,0.00],f=[0.00,0.00,0.00]):
Particle.__init__(self, E, p, i, f)
self.EP = rel.Fourvector(E, p)
self.initial = i
self.final = f
self.__exit_time = 0
if len(p) > 3:
raise Exception("Error: Momentum Vector parameter size")
if len(i) > 3:
raise Exception("Error: Initial Vector parameter size")
if len(f) > 3:
raise Exception("Error: Final Vector parameter size")
if E < pp.electron_mass:
raise Exception("Total energy is less than electron rest energy.")
def exit_position(self):
"""Calculates the final position of a particle,
if it exits one of the chamber sides."""
v = (self.momentum()/pp.electron_mass)*pp.c
Vx, Vy, Vz = v[0], v[1], v[2]
x0, y0, z0 = self.initial[0], self.initial[1], self.initial[2]
if Vz > 0: # for particle travelling in +z direction
if (Vx)**2 + (Vy)**2 == 0.0: #if no radial velocity, then exits parallel to z axis
t = (100.00-z0)/(Vz)
self.__exit_time = t
return self.initial + (v*t)
else: #if it has radial velocity, it exits when radius of position is > 2.5
def func(t):
return (x0 + Vx*np.fabs(t))**2 + (y0 + Vy*np.fabs(t))**2 -6.25
t = spo.fsolve(func, 1)
exit_point = self.initial + (v*np.fabs(t))
if exit_point[2] > 100.0: #if leaving circular face, then exit is at z=100
t = (100.00-z0)/(Vz)
self.__exit_time = t
return self.initial + (v*t)
else:
self.__exit_time = t
return exit_point
elif Vz < 0: #for particle travelling in -z direction
if (Vx)**2 + (Vy)**2 == 0.0: #if no radial velocity, exits parallel to z axis.
t = z0/(Vz)
self.__exit_time = t
return self.initial + (v*t)
else:
def func(t):
return (x0 + Vx*np.fabs(t))**2 + (y0 + Vy*np.fabs(t))**2 -6.25
t = spo.fsolve(func, 1)
exit_point = self.initial + (v*np.fabs(t))
if exit_point[2] > 100.0: #if leaving circular face, then exit is at z=0
t = z0/(Vz)
self.__exit_time = t
return self.initial + (v*t)
else:
self.__exit_time = t
return exit_point
else: #if no velocity in z-axis
if (Vx)**2 + (Vy)**2 == 0.0: #if no velocity at all, it doesn't exit
return self.initial
else:
def func(t): #if only radial velocity, it exits parallel to x-y plane
return (x0 + Vx*np.fabs(t))**2 + (y0 + Vy*np.fabs(t))**2 -6.25
t = spo.fsolve(func, 3)
self.__exit_time = t
return self.initial + (v*np.fabs(t))
def final_position(self):
"""where the electron leaves the chamber"""
self.final = self.exit_position()
return self.final
def energy_deposit(self, x):
"""Calculates the energy deposited in an NaI scintillator by
an electron, for every x cm travelled through it.
Takes one argument, x."""
return self.energy()*np.exp(-x/2.6) #N.B: DISTANCE X IN UNITS OF CM.
|
[
"noreply@github.com"
] |
noreply@github.com
|
09c067d91c42ab9faf5f187b126606363f8eb59e
|
ab692ed499950bcc348a66cf096f2714eaf96b9d
|
/keywords.py
|
16ea7154713dca8e264f6269b37349f46659e4f6
|
[] |
no_license
|
mathewtpower/artifact-card-bot
|
c9d5e4a260c7eba70d04c1e2947d43da3f52fa1d
|
7c56c8f096e809eeff36b7f13cafa823b39f7be5
|
refs/heads/master
| 2023-01-08T07:11:23.539436
| 2020-10-19T01:38:36
| 2020-10-19T01:38:36
| 269,209,802
| 0
| 3
| null | 2020-07-30T17:38:03
| 2020-06-03T22:43:19
|
Python
|
UTF-8
|
Python
| false
| false
| 4,581
|
py
|
keywords = {
'Regeneration': "The unit heals this amount during the combat phase. Regeneration is applied before checking for death.",
'After Combat': "An effect that triggers after the Combat Phase.",
'Aura': "An effect which applies to units when they enter a specified area and is removed when they leave.",
'Bounce': "Return target to owners fountain if it's a hero, or the hand if it's a creep or item.",
'Burn': "Remove mana from the enemy player.",
'Cleave': "During the combat phase, deal Cleave damage to all adjacent enemies. Cleave damage doesn't hit towers.",
'Cross Lane': "Cross Lane cards are cast by heroes in one lane, but can target objects in a different lane.",
'Cursed': "Destroyed when replaced by another item.",
'Death Effect': "An effect that is processed after this unit dies.",
'Decay': "This unit will take extra damage in combat. Ignores armor entirely.",
'Devour': "When this unit is placed on top of another unit, it gains that unit's Attack and Health.",
'Disarm': "A disarmed unit does not attack its target during battles. Lasts until the end of round by default.",
'Dispel': "Remove an enchantment.",
'Enchant': "An enchantment lasts until dispelled, remaining even through death.",
'Feeble': "When a unit deals Attack damage to this unit in combat, excess damage is dealt to your tower.",
'Fountain': "When heroes die they are placed in the Fountain zone for a full round before becoming ready to redeploy. When heroes enter the Fountain they are fully healed and temporary effects on them are purged.",
'Jump': "Select a new target for this effect.",
'Lifesteal': "This unit heals equal to its Attack if it survives a combat in which it damaged another unit.",
'Lock': "Cards cannot be played as long as they are locked. Lock is applied for a duration in rounds. At the end of a round, all locked cards lose 1 Lock. Locked cards are revealed.",
'Minion': "An effect which applies to this unit when adjacent to an allied hero.",
'Mulch': "When this card is played draw a card from your deck which costs less mana.",
'Pierce': "Piercing damage is not reduced by the target's armor.",
'Piercing': "Piercing damage is not reduced by the target's armor.",
'Pillager': "This unit steals 2 gold from the opponent whenever it damages their tower.",
'Play Effect': "An additional effect that is processed at the time you play this creep.",
'Purge': "Removes modifications and temporary effects, but not damage. Purging does not affect base abilities or external effects, such as those from equipped items and continuous effects from auras.",
'Push': "Move a unit 1 slot randomly left or right to an occupied spot.",
'Quickcast': "After you play this, you get the initiative coin and may immediately take another action. If you use this action to pass, you will retain initiative and may act first next round.",
'Quickstrike': "Units with quickstrike attack before units without quickstrike. Regeneration and decay are applied at the same time as combat damage, after quickstrike damage resolves.",
'Reflect': "When targeted or attacked, damage that would be done to this unit is instead dealt to the caster or attacker.",
'Retaliate': "When attacked during a battle (even outside of the combat phase), deal this much extra damage to the attackers.",
'Rooted': "Can't be moved.",
'Scheme': "An effect which triggers when the card's owner passes.",
'Siege': "During the combat phase, deal Siege damage to the enemy tower.",
'Sneak Attack': "A unit deals its Attack damage to its target in a one-way battle. Combat attributes such as Armor, Retaliate, and Piercing are applied.",
'Stun': "A stunned unit is silenced (cannot use any active abilities and cannot be used to play cards of its color) and disarmed (does not attack its target during battles). Lasts until the end of round by default.",
'Swap': "Move a unit to the targeted slot. If that slot was occupied, the unit in that space move to the original unit's position.",
'Tower Enchantment': "A tower enchantment is a spell that adds a permanent effect to a tower or lane. Tower enchantments are not units and do not occupy combat positions.",
'Taunt': "When a unit taunts all of its enemy neighbors change their combat target to that unit. Targets reset after each round.",
'Trample': "This unit deals excess Attack damage to the tower when it battles in combat.",
'Untargetable': "Can't be targeted by enemy spells of abilities."
}
|
[
"mathew.t.power@gmail.com"
] |
mathew.t.power@gmail.com
|
351daf0695f3655bb2f5c218f0f16aa10b92f746
|
159a527d5333f848fa58fed8c39ee6303a507c62
|
/RPi_Drone/simpleCali.py
|
1c52e69d2795f1f083679fe9fbd2b707efe1af8c
|
[] |
no_license
|
GitDubs/Pi_Drone
|
ca3f15f4e33797dd276f8b39c6ac62186ace8d10
|
d3bf8817ce12cd6483128b7dd233a7e132b6e2e9
|
refs/heads/master
| 2020-04-15T19:56:46.744054
| 2020-03-24T16:02:14
| 2020-03-24T16:02:14
| 164,971,308
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 455
|
py
|
import pigpio
import time
import os
import atexit
def exit_handler():
pi.set_servo_pulsewidth(ESC, 0)
ESC = 18
pi = pigpio.pi();
pi.set_servo_pulsewidth(ESC, 0)
max_value = 2500
min_value = 1400
pi.set_servo_pulsewidth(ESC, max_value)
print "connect battery"
input = raw_input()
pi.set_servo_pulsewidth(ESC, min_value)
time.sleep(5)
pi.set_servo_pulsewidth(ESC, 1800)
print "press enter to stop"
input = raw_input()
atexit.register(exit_handler)
|
[
"user.email"
] |
user.email
|
45b45006fa8d12f989dc183ab1e0f50b4e2d9667
|
3032a58254a0d61403cc75476438bf60a119c2ea
|
/ADB Scripts/GMLAB Scripts/GM9 Pro/Functional/GPSGoogleMap.py
|
7d7bc345cf7bbd1792137326ec544a1abf74c86b
|
[] |
no_license
|
anithamini/useful-info
|
1e05528d61609ca4249920e41c88957ed1476fd7
|
a393db8d8e727d29d185d75f7920e21770a39e70
|
refs/heads/master
| 2020-04-14T15:42:06.627213
| 2019-01-03T07:02:16
| 2019-01-03T07:02:16
| 163,935,084
| 2
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,142
|
py
|
import os
import re
from time import sleep
def GPS_ON():
print("Enabling GPS.........")
os.system("adb shell settings put secure location_providers_allowed +gps")
def GPS_OFF():
print("Disabling GPS.........")
os.system("adb shell settings put secure location_providers_allowed -gps")
def kill_map():
print("Closing the GoogleMaps Application.......")
os.system("adb shell am force-stop com.google.android.apps.maps")
sleep(1)
def Google_map_launch():
print("Launching the GoogleMaps Application.......")
os.system("adb shell am start -n com.google.android.apps.maps/com.google.android.maps.MapsActivity")
def search_location():
os.system("adb shell input tap 349 109")
sleep(2)
os.system("adb shell input text 'waverock SEZ'")
os.system("adb shell input keyevent 66")
def sim_test():
os.system("adb shell getprop >gsm.txt ")
with open("gsm.txt","r+") as fh:
lines=fh.readlines()
for line in lines:
#print(line)
string1="[gsm.sim.state]: [READY,READY]"
string2 = "[gsm.sim.state]: [READY,NOT_READY]"
string3 = "[gsm.sim.state]: [NOT_READY,READY]"
string4 = "[gsm.sim.state]: [ABSENT,READY]"
string5 = "[gsm.sim.state]: [READY,ABSENT]"
if (string1 in line or string2 in line or string3 in line or string4 in line or string5 in line):
print("Sim present, so procedding the test")
return 1
else:
print("sim not present, please insert the sim and start the test")
return 0
def switch_mobiledata():
print("Enabling the MobileData")
os.system("adb shell svc data enable")
sleep(3)
def mobiledata_off():
print("Disabling the MobileData")
os.system("adb shell svc data disable")
sleep(1)
def validation():
os.system("adb shell dumpsys location>text.txt")
str1="mStarted=false"
with open("text.txt","r") as fd:
buf=fd.read()
if(re.search(str1,buf,re.I)):
return(True)
else:
return(False)
def checkmobiledata():
os.system("adb shell getprop>mobiledata.txt")
fp=open("mobiledata.txt","r+")
buff=fp.read()
str1="[gsm.defaultpdpcontext.active]: [true]"
if str1 in buff:
print(str1)
return 1
else:
return 0
res=sim_test()
if res:
print("sim is present")
switch_mobiledata()
pre=checkmobiledata()
if pre:
print("mobile data on")
sleep(2)
if(validation()):
print("GPS is disabled.....So enabling GPS now")
GPS_ON()
sleep(3)
kill_map()
Google_map_launch()
sleep(2)
search_location()
sleep(3)
kill_map()
mobiledata_off()
GPS_OFF()
|
[
"akesiboyina@innominds.com"
] |
akesiboyina@innominds.com
|
92b17db645312bc9f6e200fd79d75a87100ff9fb
|
6cb41daf3a767a960d64cc631abffd69acc96e33
|
/python/strings/isIPv4Address.py
|
ad90d93ea269e2c8e5a6c7551d3bf1ade7175175
|
[] |
no_license
|
kevinislas2/Iprep
|
2ce0e5b77c8cb5a1e714bc94afdb4ecc6de9b2c0
|
9f0be3dfab106e2f29933f746475ed0be8d11a71
|
refs/heads/master
| 2023-04-03T23:09:12.085674
| 2018-12-02T20:55:44
| 2018-12-02T20:55:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 963
|
py
|
'''
An IP address is a numerical label assigned to each device (e.g., computer, printer)
participating in a computer network that uses the Internet Protocol for communication.
There are two versions of the Internet protocol, and thus two versions of addresses. One of them is the IPv4 address.
IPv4 addresses are represented in dot-decimal notation, which consists of four decimal numbers,
each ranging from 0 to 255 inclusive, separated by dots, e.g., 172.16.254.1.
Given a string, find out if it satisfies the IPv4 address naming rules.
'''
def isIPv4Address(inputString):
arr = inputString.split(".")
if(len(arr) != 4):
return False
for i in range(len(arr)):
if(arr[i] == ""):
return False
for c in arr[i]:
if(ord(c) > 57):
return False
val = int(arr[i])
if(val < 0 or val > 255 or str(val) != arr[i]):
return False
return True
|
[
"kevin.islas.abud@gmail.com"
] |
kevin.islas.abud@gmail.com
|
b1ff178eefe0307fd51681e85df4fbbba93522bc
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/otherforms/_freshening.py
|
4611789aff9058da179cb60091496dbac47b7753
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379
| 2017-01-28T02:00:50
| 2017-01-28T02:00:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 230
|
py
|
#calss header
class _FRESHENING():
def __init__(self,):
self.name = "FRESHENING"
self.definitions = freshen
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['freshen']
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
21b460ada54b6896a22076f3c95bc5e47cdb682a
|
0fa92ef71d5233652ee7d6bcfbda848e9147e726
|
/test_one_sentence.py
|
ff2dca8e8aa0fb86dd5529d857c579476043cab8
|
[] |
no_license
|
AhmedNageh08/Arabic_Tweets_Classification
|
b22c31d296e1ea7495a2c9fb9bd1a10b71e3780d
|
129952d129d04cbbd87f606d2a4b4c07b3e93db8
|
refs/heads/main
| 2023-01-28T21:07:22.483398
| 2020-12-12T15:03:29
| 2020-12-12T15:03:29
| 320,846,398
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 343
|
py
|
#Predict using one tweet
text =["هي وصلت لدرجادي 😯 لا الموضوع كدا محتاج وقفه 💔😂😂"]
#Import the saved file of the model
pkl_filename = "/content/drive/My Drive/SVC_2.pkl"
with open(pkl_filename, 'rb') as file:
pickle_model = pickle.load(file)
result = pickle_model.predict(text)
print(result)
|
[
"noreply@github.com"
] |
noreply@github.com
|
cba78bc85c93e9030484557e09496d98b0f28c3d
|
d1b9b0c2c434bf6e95b3ae1a29a9ef8353c55ec1
|
/polls/migrations/0017_mas_working.py
|
8e70587032977127a95ac774d6dcaeb399c615a8
|
[] |
no_license
|
felixchuo/difits
|
c7ead1bfad8c1c58460d875ffb899dc9b66044a5
|
c64d05751348e87f0705695bfb20f2478ce417f7
|
refs/heads/master
| 2021-01-10T23:24:24.003392
| 2016-10-11T11:27:59
| 2016-10-11T11:27:59
| 70,586,913
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 866
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('polls', '0016_auto_20160105_1151'),
]
operations = [
migrations.CreateModel(
name='Mas_Working',
fields=[
('masWorkingStep', models.AutoField(serialize=False, primary_key=True)),
('masWorkStepAnswer', models.CharField(max_length=255)),
('masStepNo', models.CharField(max_length=10)),
('masRightFeedback', models.CharField(max_length=255)),
('masWrongFeedback', models.CharField(max_length=255)),
('masSuggestHint', models.CharField(max_length=255)),
('mastery', models.ForeignKey(to='polls.Mastery')),
],
),
]
|
[
"felixchuo@gmail.com"
] |
felixchuo@gmail.com
|
a83bcd9b97839b9506dfe2f82ea1ba573778a0ef
|
a96834fd85cecd390a7d717825421b3762e9bcb3
|
/quiz2.py
|
ffb82c77ab88c069a59dd2901603881a1e55fa04
|
[] |
no_license
|
denyyi/bible_quiz
|
e26c1663cff50aa0ffe4035c690a90f5df457dd2
|
eb50ecb5253e5537de8bab735108eb641a50db4c
|
refs/heads/master
| 2022-09-25T14:56:29.458197
| 2020-06-07T04:05:00
| 2020-06-07T04:05:00
| 270,054,980
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 197
|
py
|
import random
class QA:
def __init__(self, question, correctAnswer, otherAnswers):
self.question = question
self.correctAnswer = correctAnswer
self.otherAnswers = otherAnswers
|
[
"noreply@github.com"
] |
noreply@github.com
|
ba8545b2330ee37673a22bad6042f69b2d78cff5
|
22213458a431ea69d13340c1abc2077b98e95f8e
|
/utils.py
|
e90d66b0af2d1ede4ebca451fb061d60c9f0a40b
|
[] |
no_license
|
galenys/MetaWord2Vec
|
8ef8bcbc68b89d4bdde7c5bd2a9daf034bae1039
|
ef5c1ae89c4b495fac4e49b0260957fe4817bef6
|
refs/heads/master
| 2023-05-11T03:34:31.758605
| 2023-04-27T16:40:17
| 2023-04-27T16:40:17
| 283,776,312
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 844
|
py
|
import numpy as np
def print_lines_in_file(file_name):
num_lines = sum(1 for line in open(file_name))
print(f"Number of lines: {num_lines}")
def get_embeddings():
dict = {}
file = open("data/embedding.txt", "rt")
text = file.read()
file.close()
for entry in text.split("\n"):
try:
(word, array) = entry.split(" => ")
array = list(map(float, array.split(", ")[1:-1]))
array = np.asarray(array)
dict[word] = array
except:
pass
return dict
def get_closest_word(vec, embeddings):
closest = ""
min_distance = 1e100
for (key, value) in embeddings.items():
distance = np.linalg.norm(value - vec)
if distance < min_distance:
min_distance = distance
closest = key
return closest
|
[
"shivbhatia10@gmail.com"
] |
shivbhatia10@gmail.com
|
b31731558780703b63f5a3256cb39d8a1303eea3
|
d9bf42f156a7369c4fb2223b0609ab357263f443
|
/old/kivy-opencv/main.py
|
44a48b1e938ce443966620f240c1736e04e93a69
|
[] |
no_license
|
emersonoliveiradev/kivy-studies
|
e77b2f02f15a303c6d43a9dd1558df166fe3133f
|
a1f7fc1753f8c43fde0c5910129a6d1a9aca3940
|
refs/heads/master
| 2022-07-02T12:00:05.578266
| 2020-05-05T23:25:58
| 2020-05-05T23:25:58
| 261,371,935
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 531
|
py
|
from kivy.app import App
from kivy.uix.screenmanager import ScreenManager, Screen
from kivy.uix.boxlayout import BoxLayout
from kivy.clock import Clock
from kivy.uix.label import Label
from kivy.config import Config
Config.set('graphics', 'width', '480')
Config.set('graphics', 'height', '640')
import cv2
class Manager(ScreenManager):
pass
class Menu(Screen):
pass
class MyDetection(Screen):
pass
class MainApp(App):
def build(self):
return Manager()
if __name__ == '__main__':
MainApp().run()
|
[
"emersonhaw@gmail.com"
] |
emersonhaw@gmail.com
|
acdd29dacd20142f02beaa838fffab2ae12bff7c
|
5010e5351315e085fc1a519b91a17381d1a12acd
|
/dashboard/tests/mock_data/generate_mock_data.py
|
1ce7f28c9821f35c7affc114b7286eca90f57a8c
|
[] |
no_license
|
BenCh94/gas_dash
|
cb84f80f4fd3a6a0ba966f2f9bb8248e67d5e507
|
caab7ac095ac3e091d060e6eab69bb746db084fe
|
refs/heads/develop
| 2023-02-17T11:52:21.328885
| 2022-02-02T11:19:41
| 2022-02-02T11:19:41
| 145,233,799
| 0
| 2
| null | 2023-02-08T00:44:51
| 2018-08-18T16:06:07
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 1,252
|
py
|
import requests as r
import json
import pandas as pd
# # Generating data for use in testing portfolio update methods.
# # Data collected on 13/01/2020
# iex_base = 'https://cloud.iexapis.com/v1'
# # Insert production api key below
# api_token = ''
# # Get 2 years benchmark data
# query = f'/stock/voo/chart/2y?token={api_token}'
# url = iex_base + query
# iex_req = r.get(url)
# data = iex_req.json()
# with open('benchmark_data.json', 'w') as outfile:
# json.dump(data, outfile)
# # Get price charts for stocks
# tickers = 'DIS,TWTR,MTCH'
# stock_query = f'/stock/market/batch?token={api_token}&symbols={tickers}&types=chart&range=5y&chartCloseOnly=true'
# stock_url = iex_base + stock_query
# iex_req_stocks = r.get(stock_url)
# stock_data = iex_req_stocks.json()
# with open('stock_charts.json', 'w') as stock_outfile:
# json.dump(stock_data, stock_outfile)
# with open('benchmark_data.json') as json_file:
# data = json.load(json_file)
# bench_df = pd.DataFrame(data)
# bench_df.to_csv('benchmark.csv')
# with open('stock_charts.json') as json_file:
# stock_data = json.load(json_file)
# for key, value in stock_data.items():
# chart = pd.DataFrame(value['chart'])
# chart.to_csv(f'{key}_chart.csv')
|
[
"bchadwick94@gmail.com"
] |
bchadwick94@gmail.com
|
60a8649088a68aed511d9a485ab1af7a7f87fd62
|
d46a5395d112b99c857b323fc6f1d84891354485
|
/bgmmodel.py
|
cbd3c84d5ed958c6daae14e5787f1b51e1bc0fc5
|
[] |
no_license
|
zh616110538/animerecommend
|
bf669ac8ea42b684c8d634a46032086f29ebd09c
|
93d7f4a43b0d99a7491d4b072d382ab26f04af94
|
refs/heads/master
| 2020-03-28T15:42:48.748299
| 2018-09-29T14:11:36
| 2018-09-29T14:11:36
| 148,618,911
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,307
|
py
|
#-*-coding:utf-8-*-
import pickle
import time
def taglist(l):
dic = {'AnimeScore':0}
redic = {'0':'AnimeScore'}
count = 1
for i in l:
for item in i:
for tag in item['tag']:
if int(tag[1]) > 50 and not '20' in tag[0]:
if not tag[0] in dic:
dic[tag[0]] = count
redic[count] = tag[0]
count+=1
return dic,redic
def genanimelist(dic,anilist):
l = []
for bunch in anilist:
for item in bunch:
newtag = [0 for i in range(0,len(dic))]
newtag[0] = float(item['score'])/5
popularity = int(item['popularity'])
for tag in item['tag']:
if tag[0] in dic:
newtag[dic[tag[0]]] = int(tag[1])/popularity*10
item['tag'] = newtag
l.append(item)
return l
l = []
with open('bgm.dat','rb')as f:
try:
while True:
x = pickle.load(f)
l.append(x)
except Exception:
pass
dic,redic = taglist(l)
all = genanimelist(dic,l)
for i in dic:
print(i)
# for i in all:
# print(i)
# time.sleep(0.2)
with open('anime.dat','wb') as f:
pickle.dump(dic,f)
pickle.dump(redic,f)
pickle.dump(all,f)
|
[
""
] | |
7968594fee8934fcf7cce6881d096977fc70e252
|
ac3c90bfdeff033d3877371ce530ac8c02c65694
|
/resnet.py
|
c622d4c68928f5a210737bb734f87e6ed51b89bc
|
[] |
no_license
|
AahilA/videoGameGenre
|
ac071c49d3106aee42e36ee046c735c879d61a9e
|
09edbff69f1ef227b572853d228226e0eccdb3ae
|
refs/heads/main
| 2023-02-19T19:57:20.525128
| 2021-01-18T12:22:06
| 2021-01-18T12:22:06
| 328,041,023
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,509
|
py
|
import torch
import torch.nn as nn
#Peter figure out padding math
class block(nn.Module):
def __init__(self, in_chan, out_chan, stride=1):
super(block,self).__init__()
self.conv1 = nn.Conv2d(in_chan, out_chan, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn = nn.BatchNorm2d(out_chan)
self.conv2 = nn.Conv2d(out_chan, out_chan, kernel_size=3, stride=1, padding=1, bias=False)
self.relu = nn.ReLU()
self.shortcut = nn.Sequential()
if stride != 1 or in_chan != out_chan:
self.shortcut = nn.Sequential(
nn.Conv2d(in_chan, out_chan, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(out_chan)
)
def forward(self, x):
out = self.relu(self.bn(self.conv1(x)))
out = self.bn(self.conv2(out))
out += self.shortcut(x)
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, num_classes=10):
super(ResNet, self).__init__()
self.input_chan = 64
self.conv1 = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=1, bias=False),
nn.BatchNorm2d(64)
)
self.relu = nn.ReLU()
self.max_pool = nn.MaxPool2d(3,2)
#Resnet Layers
self.layer1 = self._make_layer(block, 3, 64, 1)
self.layer2 = self._make_layer(block, 4, 128, 2)
self.layer3 = self._make_layer(block, 6, 256, 2)
self.layer4 = self._make_layer(block, 3, 512, 2)
self.avg_pool = nn.AvgPool2d(8)
self.fc = nn.Linear(512, num_classes)
def _make_layer(self, block, num_blocks, out_chan, stride):
layers = []
layers.append(block(self.input_chan, out_chan, stride))
self.input_chan = out_chan
for i in range(num_blocks - 1):
layers.append(block(self.input_chan,out_chan, 1))
return nn.Sequential(*layers)
#TODO ADD POOLING
def forward(self, x):
x = self.conv1(x)
x = self.relu(x)
#Pooling
x = self.max_pool(x)
#Resnet
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
#Pooling
x = self.avg_pool(x)
x = x.reshape(x.shape[0], -1)
x = self.fc(x)
return x
def testRunning():
net = ResNet(block, 65)
y = net(torch.randn(1, 3, 256, 256))
print(y.size())
print(y)
# testRunning()
|
[
"asa97@cornell.edu"
] |
asa97@cornell.edu
|
cec5a465126042fd03b4663dc6672c08cd539861
|
d6589ff7cf647af56938a9598f9e2e674c0ae6b5
|
/ice-20201109/alibabacloud_ice20201109/client.py
|
88953d6b4eec3b55cb3d1cba4ff70b9ae9734e45
|
[
"Apache-2.0"
] |
permissive
|
hazho/alibabacloud-python-sdk
|
55028a0605b1509941269867a043f8408fa8c296
|
cddd32154bb8c12e50772fec55429a9a97f3efd9
|
refs/heads/master
| 2023-07-01T17:51:57.893326
| 2021-08-02T08:55:22
| 2021-08-02T08:55:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 85,614
|
py
|
# -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from typing import Dict
from Tea.core import TeaCore
from alibabacloud_tea_openapi.client import Client as OpenApiClient
from alibabacloud_tea_openapi import models as open_api_models
from alibabacloud_tea_util.client import Client as UtilClient
from alibabacloud_endpoint_util.client import Client as EndpointUtilClient
from alibabacloud_ice20201109 import models as ice20201109_models
from alibabacloud_tea_util import models as util_models
from alibabacloud_openapi_util.client import Client as OpenApiUtilClient
class Client(OpenApiClient):
"""
*\
"""
def __init__(
self,
config: open_api_models.Config,
):
super().__init__(config)
self._endpoint_rule = 'regional'
self._endpoint_map = {
'ap-northeast-1': 'ice.aliyuncs.com',
'ap-northeast-2-pop': 'ice.aliyuncs.com',
'ap-south-1': 'ice.aliyuncs.com',
'ap-southeast-1': 'ice.aliyuncs.com',
'ap-southeast-2': 'ice.aliyuncs.com',
'ap-southeast-3': 'ice.aliyuncs.com',
'ap-southeast-5': 'ice.aliyuncs.com',
'cn-beijing': 'ice.aliyuncs.com',
'cn-beijing-finance-1': 'ice.aliyuncs.com',
'cn-beijing-finance-pop': 'ice.aliyuncs.com',
'cn-beijing-gov-1': 'ice.aliyuncs.com',
'cn-beijing-nu16-b01': 'ice.aliyuncs.com',
'cn-chengdu': 'ice.aliyuncs.com',
'cn-edge-1': 'ice.aliyuncs.com',
'cn-fujian': 'ice.aliyuncs.com',
'cn-haidian-cm12-c01': 'ice.aliyuncs.com',
'cn-hangzhou': 'ice.aliyuncs.com',
'cn-hangzhou-bj-b01': 'ice.aliyuncs.com',
'cn-hangzhou-finance': 'ice.aliyuncs.com',
'cn-hangzhou-internal-prod-1': 'ice.aliyuncs.com',
'cn-hangzhou-internal-test-1': 'ice.aliyuncs.com',
'cn-hangzhou-internal-test-2': 'ice.aliyuncs.com',
'cn-hangzhou-internal-test-3': 'ice.aliyuncs.com',
'cn-hangzhou-test-306': 'ice.aliyuncs.com',
'cn-hongkong': 'ice.aliyuncs.com',
'cn-hongkong-finance-pop': 'ice.aliyuncs.com',
'cn-huhehaote': 'ice.aliyuncs.com',
'cn-huhehaote-nebula-1': 'ice.aliyuncs.com',
'cn-north-2-gov-1': 'ice.aliyuncs.com',
'cn-qingdao': 'ice.aliyuncs.com',
'cn-qingdao-nebula': 'ice.aliyuncs.com',
'cn-shanghai-et15-b01': 'ice.aliyuncs.com',
'cn-shanghai-et2-b01': 'ice.aliyuncs.com',
'cn-shanghai-finance-1': 'ice.aliyuncs.com',
'cn-shanghai-inner': 'ice.aliyuncs.com',
'cn-shanghai-internal-test-1': 'ice.aliyuncs.com',
'cn-shenzhen': 'ice.aliyuncs.com',
'cn-shenzhen-finance-1': 'ice.aliyuncs.com',
'cn-shenzhen-inner': 'ice.aliyuncs.com',
'cn-shenzhen-st4-d01': 'ice.aliyuncs.com',
'cn-shenzhen-su18-b01': 'ice.aliyuncs.com',
'cn-wuhan': 'ice.aliyuncs.com',
'cn-wulanchabu': 'ice.aliyuncs.com',
'cn-yushanfang': 'ice.aliyuncs.com',
'cn-zhangbei': 'ice.aliyuncs.com',
'cn-zhangbei-na61-b01': 'ice.aliyuncs.com',
'cn-zhangjiakou': 'ice.aliyuncs.com',
'cn-zhangjiakou-na62-a01': 'ice.aliyuncs.com',
'cn-zhengzhou-nebula-1': 'ice.aliyuncs.com',
'eu-central-1': 'ice.aliyuncs.com',
'eu-west-1': 'ice.aliyuncs.com',
'eu-west-1-oxs': 'ice.aliyuncs.com',
'me-east-1': 'ice.aliyuncs.com',
'rus-west-1-pop': 'ice.aliyuncs.com',
'us-east-1': 'ice.aliyuncs.com',
'us-west-1': 'ice.aliyuncs.com'
}
self.check_config(config)
self._endpoint = self.get_endpoint('ice', self._region_id, self._endpoint_rule, self._network, self._suffix, self._endpoint_map, self._endpoint)
def get_endpoint(
self,
product_id: str,
region_id: str,
endpoint_rule: str,
network: str,
suffix: str,
endpoint_map: Dict[str, str],
endpoint: str,
) -> str:
if not UtilClient.empty(endpoint):
return endpoint
if not UtilClient.is_unset(endpoint_map) and not UtilClient.empty(endpoint_map.get(region_id)):
return endpoint_map.get(region_id)
return EndpointUtilClient.get_endpoint_rules(product_id, region_id, endpoint_rule, network, suffix)
def list_smart_jobs_with_options(
self,
request: ice20201109_models.ListSmartJobsRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.ListSmartJobsResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.ListSmartJobsResponse(),
self.do_rpcrequest('ListSmartJobs', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
async def list_smart_jobs_with_options_async(
self,
request: ice20201109_models.ListSmartJobsRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.ListSmartJobsResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.ListSmartJobsResponse(),
await self.do_rpcrequest_async('ListSmartJobs', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
def list_smart_jobs(
self,
request: ice20201109_models.ListSmartJobsRequest,
) -> ice20201109_models.ListSmartJobsResponse:
runtime = util_models.RuntimeOptions()
return self.list_smart_jobs_with_options(request, runtime)
async def list_smart_jobs_async(
self,
request: ice20201109_models.ListSmartJobsRequest,
) -> ice20201109_models.ListSmartJobsResponse:
runtime = util_models.RuntimeOptions()
return await self.list_smart_jobs_with_options_async(request, runtime)
def describe_related_authorization_status_with_options(
self,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.DescribeRelatedAuthorizationStatusResponse:
req = open_api_models.OpenApiRequest()
return TeaCore.from_map(
ice20201109_models.DescribeRelatedAuthorizationStatusResponse(),
self.do_rpcrequest('DescribeRelatedAuthorizationStatus', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
async def describe_related_authorization_status_with_options_async(
self,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.DescribeRelatedAuthorizationStatusResponse:
req = open_api_models.OpenApiRequest()
return TeaCore.from_map(
ice20201109_models.DescribeRelatedAuthorizationStatusResponse(),
await self.do_rpcrequest_async('DescribeRelatedAuthorizationStatus', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
def describe_related_authorization_status(self) -> ice20201109_models.DescribeRelatedAuthorizationStatusResponse:
runtime = util_models.RuntimeOptions()
return self.describe_related_authorization_status_with_options(runtime)
async def describe_related_authorization_status_async(self) -> ice20201109_models.DescribeRelatedAuthorizationStatusResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_related_authorization_status_with_options_async(runtime)
def delete_smart_job_with_options(
self,
request: ice20201109_models.DeleteSmartJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.DeleteSmartJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.DeleteSmartJobResponse(),
self.do_rpcrequest('DeleteSmartJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def delete_smart_job_with_options_async(
self,
request: ice20201109_models.DeleteSmartJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.DeleteSmartJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.DeleteSmartJobResponse(),
await self.do_rpcrequest_async('DeleteSmartJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def delete_smart_job(
self,
request: ice20201109_models.DeleteSmartJobRequest,
) -> ice20201109_models.DeleteSmartJobResponse:
runtime = util_models.RuntimeOptions()
return self.delete_smart_job_with_options(request, runtime)
async def delete_smart_job_async(
self,
request: ice20201109_models.DeleteSmartJobRequest,
) -> ice20201109_models.DeleteSmartJobResponse:
runtime = util_models.RuntimeOptions()
return await self.delete_smart_job_with_options_async(request, runtime)
def add_template_with_options(
self,
request: ice20201109_models.AddTemplateRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.AddTemplateResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.AddTemplateResponse(),
self.do_rpcrequest('AddTemplate', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
async def add_template_with_options_async(
self,
request: ice20201109_models.AddTemplateRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.AddTemplateResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.AddTemplateResponse(),
await self.do_rpcrequest_async('AddTemplate', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
def add_template(
self,
request: ice20201109_models.AddTemplateRequest,
) -> ice20201109_models.AddTemplateResponse:
runtime = util_models.RuntimeOptions()
return self.add_template_with_options(request, runtime)
async def add_template_async(
self,
request: ice20201109_models.AddTemplateRequest,
) -> ice20201109_models.AddTemplateResponse:
runtime = util_models.RuntimeOptions()
return await self.add_template_with_options_async(request, runtime)
def update_editing_project_with_options(
self,
request: ice20201109_models.UpdateEditingProjectRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.UpdateEditingProjectResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.UpdateEditingProjectResponse(),
self.do_rpcrequest('UpdateEditingProject', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def update_editing_project_with_options_async(
self,
request: ice20201109_models.UpdateEditingProjectRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.UpdateEditingProjectResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.UpdateEditingProjectResponse(),
await self.do_rpcrequest_async('UpdateEditingProject', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def update_editing_project(
self,
request: ice20201109_models.UpdateEditingProjectRequest,
) -> ice20201109_models.UpdateEditingProjectResponse:
runtime = util_models.RuntimeOptions()
return self.update_editing_project_with_options(request, runtime)
async def update_editing_project_async(
self,
request: ice20201109_models.UpdateEditingProjectRequest,
) -> ice20201109_models.UpdateEditingProjectResponse:
runtime = util_models.RuntimeOptions()
return await self.update_editing_project_with_options_async(request, runtime)
def list_media_producing_jobs_with_options(
self,
request: ice20201109_models.ListMediaProducingJobsRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.ListMediaProducingJobsResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.ListMediaProducingJobsResponse(),
self.do_rpcrequest('ListMediaProducingJobs', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def list_media_producing_jobs_with_options_async(
self,
request: ice20201109_models.ListMediaProducingJobsRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.ListMediaProducingJobsResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.ListMediaProducingJobsResponse(),
await self.do_rpcrequest_async('ListMediaProducingJobs', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def list_media_producing_jobs(
self,
request: ice20201109_models.ListMediaProducingJobsRequest,
) -> ice20201109_models.ListMediaProducingJobsResponse:
runtime = util_models.RuntimeOptions()
return self.list_media_producing_jobs_with_options(request, runtime)
async def list_media_producing_jobs_async(
self,
request: ice20201109_models.ListMediaProducingJobsRequest,
) -> ice20201109_models.ListMediaProducingJobsResponse:
runtime = util_models.RuntimeOptions()
return await self.list_media_producing_jobs_with_options_async(request, runtime)
def get_editing_project_materials_with_options(
self,
request: ice20201109_models.GetEditingProjectMaterialsRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.GetEditingProjectMaterialsResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.GetEditingProjectMaterialsResponse(),
self.do_rpcrequest('GetEditingProjectMaterials', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
async def get_editing_project_materials_with_options_async(
self,
request: ice20201109_models.GetEditingProjectMaterialsRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.GetEditingProjectMaterialsResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.GetEditingProjectMaterialsResponse(),
await self.do_rpcrequest_async('GetEditingProjectMaterials', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
def get_editing_project_materials(
self,
request: ice20201109_models.GetEditingProjectMaterialsRequest,
) -> ice20201109_models.GetEditingProjectMaterialsResponse:
runtime = util_models.RuntimeOptions()
return self.get_editing_project_materials_with_options(request, runtime)
async def get_editing_project_materials_async(
self,
request: ice20201109_models.GetEditingProjectMaterialsRequest,
) -> ice20201109_models.GetEditingProjectMaterialsResponse:
runtime = util_models.RuntimeOptions()
return await self.get_editing_project_materials_with_options_async(request, runtime)
def get_default_storage_location_with_options(
self,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.GetDefaultStorageLocationResponse:
req = open_api_models.OpenApiRequest()
return TeaCore.from_map(
ice20201109_models.GetDefaultStorageLocationResponse(),
self.do_rpcrequest('GetDefaultStorageLocation', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def get_default_storage_location_with_options_async(
self,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.GetDefaultStorageLocationResponse:
req = open_api_models.OpenApiRequest()
return TeaCore.from_map(
ice20201109_models.GetDefaultStorageLocationResponse(),
await self.do_rpcrequest_async('GetDefaultStorageLocation', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def get_default_storage_location(self) -> ice20201109_models.GetDefaultStorageLocationResponse:
runtime = util_models.RuntimeOptions()
return self.get_default_storage_location_with_options(runtime)
async def get_default_storage_location_async(self) -> ice20201109_models.GetDefaultStorageLocationResponse:
runtime = util_models.RuntimeOptions()
return await self.get_default_storage_location_with_options_async(runtime)
def delete_media_infos_with_options(
self,
request: ice20201109_models.DeleteMediaInfosRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.DeleteMediaInfosResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.DeleteMediaInfosResponse(),
self.do_rpcrequest('DeleteMediaInfos', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def delete_media_infos_with_options_async(
self,
request: ice20201109_models.DeleteMediaInfosRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.DeleteMediaInfosResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.DeleteMediaInfosResponse(),
await self.do_rpcrequest_async('DeleteMediaInfos', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def delete_media_infos(
self,
request: ice20201109_models.DeleteMediaInfosRequest,
) -> ice20201109_models.DeleteMediaInfosResponse:
runtime = util_models.RuntimeOptions()
return self.delete_media_infos_with_options(request, runtime)
async def delete_media_infos_async(
self,
request: ice20201109_models.DeleteMediaInfosRequest,
) -> ice20201109_models.DeleteMediaInfosResponse:
runtime = util_models.RuntimeOptions()
return await self.delete_media_infos_with_options_async(request, runtime)
def set_event_callback_with_options(
self,
request: ice20201109_models.SetEventCallbackRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SetEventCallbackResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SetEventCallbackResponse(),
self.do_rpcrequest('SetEventCallback', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def set_event_callback_with_options_async(
self,
request: ice20201109_models.SetEventCallbackRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SetEventCallbackResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SetEventCallbackResponse(),
await self.do_rpcrequest_async('SetEventCallback', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def set_event_callback(
self,
request: ice20201109_models.SetEventCallbackRequest,
) -> ice20201109_models.SetEventCallbackResponse:
runtime = util_models.RuntimeOptions()
return self.set_event_callback_with_options(request, runtime)
async def set_event_callback_async(
self,
request: ice20201109_models.SetEventCallbackRequest,
) -> ice20201109_models.SetEventCallbackResponse:
runtime = util_models.RuntimeOptions()
return await self.set_event_callback_with_options_async(request, runtime)
def get_template_with_options(
self,
request: ice20201109_models.GetTemplateRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.GetTemplateResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.GetTemplateResponse(),
self.do_rpcrequest('GetTemplate', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
async def get_template_with_options_async(
self,
request: ice20201109_models.GetTemplateRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.GetTemplateResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.GetTemplateResponse(),
await self.do_rpcrequest_async('GetTemplate', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
def get_template(
self,
request: ice20201109_models.GetTemplateRequest,
) -> ice20201109_models.GetTemplateResponse:
runtime = util_models.RuntimeOptions()
return self.get_template_with_options(request, runtime)
async def get_template_async(
self,
request: ice20201109_models.GetTemplateRequest,
) -> ice20201109_models.GetTemplateResponse:
runtime = util_models.RuntimeOptions()
return await self.get_template_with_options_async(request, runtime)
def register_media_info_with_options(
self,
request: ice20201109_models.RegisterMediaInfoRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.RegisterMediaInfoResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.RegisterMediaInfoResponse(),
self.do_rpcrequest('RegisterMediaInfo', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def register_media_info_with_options_async(
self,
request: ice20201109_models.RegisterMediaInfoRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.RegisterMediaInfoResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.RegisterMediaInfoResponse(),
await self.do_rpcrequest_async('RegisterMediaInfo', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def register_media_info(
self,
request: ice20201109_models.RegisterMediaInfoRequest,
) -> ice20201109_models.RegisterMediaInfoResponse:
runtime = util_models.RuntimeOptions()
return self.register_media_info_with_options(request, runtime)
async def register_media_info_async(
self,
request: ice20201109_models.RegisterMediaInfoRequest,
) -> ice20201109_models.RegisterMediaInfoResponse:
runtime = util_models.RuntimeOptions()
return await self.register_media_info_with_options_async(request, runtime)
def create_editing_project_with_options(
self,
request: ice20201109_models.CreateEditingProjectRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.CreateEditingProjectResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.CreateEditingProjectResponse(),
self.do_rpcrequest('CreateEditingProject', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def create_editing_project_with_options_async(
self,
request: ice20201109_models.CreateEditingProjectRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.CreateEditingProjectResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.CreateEditingProjectResponse(),
await self.do_rpcrequest_async('CreateEditingProject', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def create_editing_project(
self,
request: ice20201109_models.CreateEditingProjectRequest,
) -> ice20201109_models.CreateEditingProjectResponse:
runtime = util_models.RuntimeOptions()
return self.create_editing_project_with_options(request, runtime)
async def create_editing_project_async(
self,
request: ice20201109_models.CreateEditingProjectRequest,
) -> ice20201109_models.CreateEditingProjectResponse:
runtime = util_models.RuntimeOptions()
return await self.create_editing_project_with_options_async(request, runtime)
def batch_get_media_infos_with_options(
self,
request: ice20201109_models.BatchGetMediaInfosRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.BatchGetMediaInfosResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.BatchGetMediaInfosResponse(),
self.do_rpcrequest('BatchGetMediaInfos', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def batch_get_media_infos_with_options_async(
self,
request: ice20201109_models.BatchGetMediaInfosRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.BatchGetMediaInfosResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.BatchGetMediaInfosResponse(),
await self.do_rpcrequest_async('BatchGetMediaInfos', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def batch_get_media_infos(
self,
request: ice20201109_models.BatchGetMediaInfosRequest,
) -> ice20201109_models.BatchGetMediaInfosResponse:
runtime = util_models.RuntimeOptions()
return self.batch_get_media_infos_with_options(request, runtime)
async def batch_get_media_infos_async(
self,
request: ice20201109_models.BatchGetMediaInfosRequest,
) -> ice20201109_models.BatchGetMediaInfosResponse:
runtime = util_models.RuntimeOptions()
return await self.batch_get_media_infos_with_options_async(request, runtime)
def set_default_storage_location_with_options(
self,
request: ice20201109_models.SetDefaultStorageLocationRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SetDefaultStorageLocationResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SetDefaultStorageLocationResponse(),
self.do_rpcrequest('SetDefaultStorageLocation', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def set_default_storage_location_with_options_async(
self,
request: ice20201109_models.SetDefaultStorageLocationRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SetDefaultStorageLocationResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SetDefaultStorageLocationResponse(),
await self.do_rpcrequest_async('SetDefaultStorageLocation', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def set_default_storage_location(
self,
request: ice20201109_models.SetDefaultStorageLocationRequest,
) -> ice20201109_models.SetDefaultStorageLocationResponse:
runtime = util_models.RuntimeOptions()
return self.set_default_storage_location_with_options(request, runtime)
async def set_default_storage_location_async(
self,
request: ice20201109_models.SetDefaultStorageLocationRequest,
) -> ice20201109_models.SetDefaultStorageLocationResponse:
runtime = util_models.RuntimeOptions()
return await self.set_default_storage_location_with_options_async(request, runtime)
def update_media_info_with_options(
self,
request: ice20201109_models.UpdateMediaInfoRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.UpdateMediaInfoResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.UpdateMediaInfoResponse(),
self.do_rpcrequest('UpdateMediaInfo', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def update_media_info_with_options_async(
self,
request: ice20201109_models.UpdateMediaInfoRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.UpdateMediaInfoResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.UpdateMediaInfoResponse(),
await self.do_rpcrequest_async('UpdateMediaInfo', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def update_media_info(
self,
request: ice20201109_models.UpdateMediaInfoRequest,
) -> ice20201109_models.UpdateMediaInfoResponse:
runtime = util_models.RuntimeOptions()
return self.update_media_info_with_options(request, runtime)
async def update_media_info_async(
self,
request: ice20201109_models.UpdateMediaInfoRequest,
) -> ice20201109_models.UpdateMediaInfoResponse:
runtime = util_models.RuntimeOptions()
return await self.update_media_info_with_options_async(request, runtime)
def get_media_producing_job_with_options(
self,
request: ice20201109_models.GetMediaProducingJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.GetMediaProducingJobResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.GetMediaProducingJobResponse(),
self.do_rpcrequest('GetMediaProducingJob', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
async def get_media_producing_job_with_options_async(
self,
request: ice20201109_models.GetMediaProducingJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.GetMediaProducingJobResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.GetMediaProducingJobResponse(),
await self.do_rpcrequest_async('GetMediaProducingJob', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
def get_media_producing_job(
self,
request: ice20201109_models.GetMediaProducingJobRequest,
) -> ice20201109_models.GetMediaProducingJobResponse:
runtime = util_models.RuntimeOptions()
return self.get_media_producing_job_with_options(request, runtime)
async def get_media_producing_job_async(
self,
request: ice20201109_models.GetMediaProducingJobRequest,
) -> ice20201109_models.GetMediaProducingJobResponse:
runtime = util_models.RuntimeOptions()
return await self.get_media_producing_job_with_options_async(request, runtime)
def describe_ice_product_status_with_options(
self,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.DescribeIceProductStatusResponse:
req = open_api_models.OpenApiRequest()
return TeaCore.from_map(
ice20201109_models.DescribeIceProductStatusResponse(),
self.do_rpcrequest('DescribeIceProductStatus', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def describe_ice_product_status_with_options_async(
self,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.DescribeIceProductStatusResponse:
req = open_api_models.OpenApiRequest()
return TeaCore.from_map(
ice20201109_models.DescribeIceProductStatusResponse(),
await self.do_rpcrequest_async('DescribeIceProductStatus', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def describe_ice_product_status(self) -> ice20201109_models.DescribeIceProductStatusResponse:
runtime = util_models.RuntimeOptions()
return self.describe_ice_product_status_with_options(runtime)
async def describe_ice_product_status_async(self) -> ice20201109_models.DescribeIceProductStatusResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_ice_product_status_with_options_async(runtime)
def list_media_basic_infos_with_options(
self,
request: ice20201109_models.ListMediaBasicInfosRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.ListMediaBasicInfosResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.ListMediaBasicInfosResponse(),
self.do_rpcrequest('ListMediaBasicInfos', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def list_media_basic_infos_with_options_async(
self,
request: ice20201109_models.ListMediaBasicInfosRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.ListMediaBasicInfosResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.ListMediaBasicInfosResponse(),
await self.do_rpcrequest_async('ListMediaBasicInfos', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def list_media_basic_infos(
self,
request: ice20201109_models.ListMediaBasicInfosRequest,
) -> ice20201109_models.ListMediaBasicInfosResponse:
runtime = util_models.RuntimeOptions()
return self.list_media_basic_infos_with_options(request, runtime)
async def list_media_basic_infos_async(
self,
request: ice20201109_models.ListMediaBasicInfosRequest,
) -> ice20201109_models.ListMediaBasicInfosResponse:
runtime = util_models.RuntimeOptions()
return await self.list_media_basic_infos_with_options_async(request, runtime)
def submit_subtitle_produce_job_with_options(
self,
request: ice20201109_models.SubmitSubtitleProduceJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitSubtitleProduceJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitSubtitleProduceJobResponse(),
self.do_rpcrequest('SubmitSubtitleProduceJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def submit_subtitle_produce_job_with_options_async(
self,
request: ice20201109_models.SubmitSubtitleProduceJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitSubtitleProduceJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitSubtitleProduceJobResponse(),
await self.do_rpcrequest_async('SubmitSubtitleProduceJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def submit_subtitle_produce_job(
self,
request: ice20201109_models.SubmitSubtitleProduceJobRequest,
) -> ice20201109_models.SubmitSubtitleProduceJobResponse:
runtime = util_models.RuntimeOptions()
return self.submit_subtitle_produce_job_with_options(request, runtime)
async def submit_subtitle_produce_job_async(
self,
request: ice20201109_models.SubmitSubtitleProduceJobRequest,
) -> ice20201109_models.SubmitSubtitleProduceJobResponse:
runtime = util_models.RuntimeOptions()
return await self.submit_subtitle_produce_job_with_options_async(request, runtime)
def submit_key_word_cut_job_with_options(
self,
request: ice20201109_models.SubmitKeyWordCutJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitKeyWordCutJobResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.SubmitKeyWordCutJobResponse(),
self.do_rpcrequest('SubmitKeyWordCutJob', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
async def submit_key_word_cut_job_with_options_async(
self,
request: ice20201109_models.SubmitKeyWordCutJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitKeyWordCutJobResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.SubmitKeyWordCutJobResponse(),
await self.do_rpcrequest_async('SubmitKeyWordCutJob', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
def submit_key_word_cut_job(
self,
request: ice20201109_models.SubmitKeyWordCutJobRequest,
) -> ice20201109_models.SubmitKeyWordCutJobResponse:
runtime = util_models.RuntimeOptions()
return self.submit_key_word_cut_job_with_options(request, runtime)
async def submit_key_word_cut_job_async(
self,
request: ice20201109_models.SubmitKeyWordCutJobRequest,
) -> ice20201109_models.SubmitKeyWordCutJobResponse:
runtime = util_models.RuntimeOptions()
return await self.submit_key_word_cut_job_with_options_async(request, runtime)
def add_editing_project_materials_with_options(
self,
request: ice20201109_models.AddEditingProjectMaterialsRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.AddEditingProjectMaterialsResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.AddEditingProjectMaterialsResponse(),
self.do_rpcrequest('AddEditingProjectMaterials', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def add_editing_project_materials_with_options_async(
self,
request: ice20201109_models.AddEditingProjectMaterialsRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.AddEditingProjectMaterialsResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.AddEditingProjectMaterialsResponse(),
await self.do_rpcrequest_async('AddEditingProjectMaterials', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def add_editing_project_materials(
self,
request: ice20201109_models.AddEditingProjectMaterialsRequest,
) -> ice20201109_models.AddEditingProjectMaterialsResponse:
runtime = util_models.RuntimeOptions()
return self.add_editing_project_materials_with_options(request, runtime)
async def add_editing_project_materials_async(
self,
request: ice20201109_models.AddEditingProjectMaterialsRequest,
) -> ice20201109_models.AddEditingProjectMaterialsResponse:
runtime = util_models.RuntimeOptions()
return await self.add_editing_project_materials_with_options_async(request, runtime)
def submit_asrjob_with_options(
self,
request: ice20201109_models.SubmitASRJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitASRJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitASRJobResponse(),
self.do_rpcrequest('SubmitASRJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def submit_asrjob_with_options_async(
self,
request: ice20201109_models.SubmitASRJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitASRJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitASRJobResponse(),
await self.do_rpcrequest_async('SubmitASRJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def submit_asrjob(
self,
request: ice20201109_models.SubmitASRJobRequest,
) -> ice20201109_models.SubmitASRJobResponse:
runtime = util_models.RuntimeOptions()
return self.submit_asrjob_with_options(request, runtime)
async def submit_asrjob_async(
self,
request: ice20201109_models.SubmitASRJobRequest,
) -> ice20201109_models.SubmitASRJobResponse:
runtime = util_models.RuntimeOptions()
return await self.submit_asrjob_with_options_async(request, runtime)
def get_editing_project_with_options(
self,
request: ice20201109_models.GetEditingProjectRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.GetEditingProjectResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.GetEditingProjectResponse(),
self.do_rpcrequest('GetEditingProject', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
async def get_editing_project_with_options_async(
self,
request: ice20201109_models.GetEditingProjectRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.GetEditingProjectResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.GetEditingProjectResponse(),
await self.do_rpcrequest_async('GetEditingProject', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
def get_editing_project(
self,
request: ice20201109_models.GetEditingProjectRequest,
) -> ice20201109_models.GetEditingProjectResponse:
runtime = util_models.RuntimeOptions()
return self.get_editing_project_with_options(request, runtime)
async def get_editing_project_async(
self,
request: ice20201109_models.GetEditingProjectRequest,
) -> ice20201109_models.GetEditingProjectResponse:
runtime = util_models.RuntimeOptions()
return await self.get_editing_project_with_options_async(request, runtime)
def list_sys_templates_with_options(
self,
request: ice20201109_models.ListSysTemplatesRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.ListSysTemplatesResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.ListSysTemplatesResponse(),
self.do_rpcrequest('ListSysTemplates', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
async def list_sys_templates_with_options_async(
self,
request: ice20201109_models.ListSysTemplatesRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.ListSysTemplatesResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.ListSysTemplatesResponse(),
await self.do_rpcrequest_async('ListSysTemplates', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
def list_sys_templates(
self,
request: ice20201109_models.ListSysTemplatesRequest,
) -> ice20201109_models.ListSysTemplatesResponse:
runtime = util_models.RuntimeOptions()
return self.list_sys_templates_with_options(request, runtime)
async def list_sys_templates_async(
self,
request: ice20201109_models.ListSysTemplatesRequest,
) -> ice20201109_models.ListSysTemplatesResponse:
runtime = util_models.RuntimeOptions()
return await self.list_sys_templates_with_options_async(request, runtime)
def delete_template_with_options(
self,
request: ice20201109_models.DeleteTemplateRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.DeleteTemplateResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.DeleteTemplateResponse(),
self.do_rpcrequest('DeleteTemplate', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
async def delete_template_with_options_async(
self,
request: ice20201109_models.DeleteTemplateRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.DeleteTemplateResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.DeleteTemplateResponse(),
await self.do_rpcrequest_async('DeleteTemplate', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
def delete_template(
self,
request: ice20201109_models.DeleteTemplateRequest,
) -> ice20201109_models.DeleteTemplateResponse:
runtime = util_models.RuntimeOptions()
return self.delete_template_with_options(request, runtime)
async def delete_template_async(
self,
request: ice20201109_models.DeleteTemplateRequest,
) -> ice20201109_models.DeleteTemplateResponse:
runtime = util_models.RuntimeOptions()
return await self.delete_template_with_options_async(request, runtime)
def submit_irjob_with_options(
self,
request: ice20201109_models.SubmitIRJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitIRJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitIRJobResponse(),
self.do_rpcrequest('SubmitIRJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def submit_irjob_with_options_async(
self,
request: ice20201109_models.SubmitIRJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitIRJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitIRJobResponse(),
await self.do_rpcrequest_async('SubmitIRJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def submit_irjob(
self,
request: ice20201109_models.SubmitIRJobRequest,
) -> ice20201109_models.SubmitIRJobResponse:
runtime = util_models.RuntimeOptions()
return self.submit_irjob_with_options(request, runtime)
async def submit_irjob_async(
self,
request: ice20201109_models.SubmitIRJobRequest,
) -> ice20201109_models.SubmitIRJobResponse:
runtime = util_models.RuntimeOptions()
return await self.submit_irjob_with_options_async(request, runtime)
def delete_editing_project_materials_with_options(
self,
request: ice20201109_models.DeleteEditingProjectMaterialsRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.DeleteEditingProjectMaterialsResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.DeleteEditingProjectMaterialsResponse(),
self.do_rpcrequest('DeleteEditingProjectMaterials', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
async def delete_editing_project_materials_with_options_async(
self,
request: ice20201109_models.DeleteEditingProjectMaterialsRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.DeleteEditingProjectMaterialsResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.DeleteEditingProjectMaterialsResponse(),
await self.do_rpcrequest_async('DeleteEditingProjectMaterials', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
def delete_editing_project_materials(
self,
request: ice20201109_models.DeleteEditingProjectMaterialsRequest,
) -> ice20201109_models.DeleteEditingProjectMaterialsResponse:
runtime = util_models.RuntimeOptions()
return self.delete_editing_project_materials_with_options(request, runtime)
async def delete_editing_project_materials_async(
self,
request: ice20201109_models.DeleteEditingProjectMaterialsRequest,
) -> ice20201109_models.DeleteEditingProjectMaterialsResponse:
runtime = util_models.RuntimeOptions()
return await self.delete_editing_project_materials_with_options_async(request, runtime)
def search_editing_project_with_options(
self,
request: ice20201109_models.SearchEditingProjectRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SearchEditingProjectResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SearchEditingProjectResponse(),
self.do_rpcrequest('SearchEditingProject', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def search_editing_project_with_options_async(
self,
request: ice20201109_models.SearchEditingProjectRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SearchEditingProjectResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SearchEditingProjectResponse(),
await self.do_rpcrequest_async('SearchEditingProject', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def search_editing_project(
self,
request: ice20201109_models.SearchEditingProjectRequest,
) -> ice20201109_models.SearchEditingProjectResponse:
runtime = util_models.RuntimeOptions()
return self.search_editing_project_with_options(request, runtime)
async def search_editing_project_async(
self,
request: ice20201109_models.SearchEditingProjectRequest,
) -> ice20201109_models.SearchEditingProjectResponse:
runtime = util_models.RuntimeOptions()
return await self.search_editing_project_with_options_async(request, runtime)
def list_templates_with_options(
self,
request: ice20201109_models.ListTemplatesRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.ListTemplatesResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.ListTemplatesResponse(),
self.do_rpcrequest('ListTemplates', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def list_templates_with_options_async(
self,
request: ice20201109_models.ListTemplatesRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.ListTemplatesResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.ListTemplatesResponse(),
await self.do_rpcrequest_async('ListTemplates', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def list_templates(
self,
request: ice20201109_models.ListTemplatesRequest,
) -> ice20201109_models.ListTemplatesResponse:
runtime = util_models.RuntimeOptions()
return self.list_templates_with_options(request, runtime)
async def list_templates_async(
self,
request: ice20201109_models.ListTemplatesRequest,
) -> ice20201109_models.ListTemplatesResponse:
runtime = util_models.RuntimeOptions()
return await self.list_templates_with_options_async(request, runtime)
def delete_editing_projects_with_options(
self,
request: ice20201109_models.DeleteEditingProjectsRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.DeleteEditingProjectsResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.DeleteEditingProjectsResponse(),
self.do_rpcrequest('DeleteEditingProjects', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def delete_editing_projects_with_options_async(
self,
request: ice20201109_models.DeleteEditingProjectsRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.DeleteEditingProjectsResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.DeleteEditingProjectsResponse(),
await self.do_rpcrequest_async('DeleteEditingProjects', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def delete_editing_projects(
self,
request: ice20201109_models.DeleteEditingProjectsRequest,
) -> ice20201109_models.DeleteEditingProjectsResponse:
runtime = util_models.RuntimeOptions()
return self.delete_editing_projects_with_options(request, runtime)
async def delete_editing_projects_async(
self,
request: ice20201109_models.DeleteEditingProjectsRequest,
) -> ice20201109_models.DeleteEditingProjectsResponse:
runtime = util_models.RuntimeOptions()
return await self.delete_editing_projects_with_options_async(request, runtime)
def get_media_info_with_options(
self,
request: ice20201109_models.GetMediaInfoRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.GetMediaInfoResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.GetMediaInfoResponse(),
self.do_rpcrequest('GetMediaInfo', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def get_media_info_with_options_async(
self,
request: ice20201109_models.GetMediaInfoRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.GetMediaInfoResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.GetMediaInfoResponse(),
await self.do_rpcrequest_async('GetMediaInfo', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def get_media_info(
self,
request: ice20201109_models.GetMediaInfoRequest,
) -> ice20201109_models.GetMediaInfoResponse:
runtime = util_models.RuntimeOptions()
return self.get_media_info_with_options(request, runtime)
async def get_media_info_async(
self,
request: ice20201109_models.GetMediaInfoRequest,
) -> ice20201109_models.GetMediaInfoResponse:
runtime = util_models.RuntimeOptions()
return await self.get_media_info_with_options_async(request, runtime)
def submit_smart_job_with_options(
self,
request: ice20201109_models.SubmitSmartJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitSmartJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitSmartJobResponse(),
self.do_rpcrequest('SubmitSmartJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def submit_smart_job_with_options_async(
self,
request: ice20201109_models.SubmitSmartJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitSmartJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitSmartJobResponse(),
await self.do_rpcrequest_async('SubmitSmartJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def submit_smart_job(
self,
request: ice20201109_models.SubmitSmartJobRequest,
) -> ice20201109_models.SubmitSmartJobResponse:
runtime = util_models.RuntimeOptions()
return self.submit_smart_job_with_options(request, runtime)
async def submit_smart_job_async(
self,
request: ice20201109_models.SubmitSmartJobRequest,
) -> ice20201109_models.SubmitSmartJobResponse:
runtime = util_models.RuntimeOptions()
return await self.submit_smart_job_with_options_async(request, runtime)
def submit_delogo_job_with_options(
self,
request: ice20201109_models.SubmitDelogoJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitDelogoJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitDelogoJobResponse(),
self.do_rpcrequest('SubmitDelogoJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def submit_delogo_job_with_options_async(
self,
request: ice20201109_models.SubmitDelogoJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitDelogoJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitDelogoJobResponse(),
await self.do_rpcrequest_async('SubmitDelogoJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def submit_delogo_job(
self,
request: ice20201109_models.SubmitDelogoJobRequest,
) -> ice20201109_models.SubmitDelogoJobResponse:
runtime = util_models.RuntimeOptions()
return self.submit_delogo_job_with_options(request, runtime)
async def submit_delogo_job_async(
self,
request: ice20201109_models.SubmitDelogoJobRequest,
) -> ice20201109_models.SubmitDelogoJobResponse:
runtime = util_models.RuntimeOptions()
return await self.submit_delogo_job_with_options_async(request, runtime)
def update_template_with_options(
self,
request: ice20201109_models.UpdateTemplateRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.UpdateTemplateResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.UpdateTemplateResponse(),
self.do_rpcrequest('UpdateTemplate', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
async def update_template_with_options_async(
self,
request: ice20201109_models.UpdateTemplateRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.UpdateTemplateResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.UpdateTemplateResponse(),
await self.do_rpcrequest_async('UpdateTemplate', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
def update_template(
self,
request: ice20201109_models.UpdateTemplateRequest,
) -> ice20201109_models.UpdateTemplateResponse:
runtime = util_models.RuntimeOptions()
return self.update_template_with_options(request, runtime)
async def update_template_async(
self,
request: ice20201109_models.UpdateTemplateRequest,
) -> ice20201109_models.UpdateTemplateResponse:
runtime = util_models.RuntimeOptions()
return await self.update_template_with_options_async(request, runtime)
def submit_audio_produce_job_with_options(
self,
request: ice20201109_models.SubmitAudioProduceJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitAudioProduceJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitAudioProduceJobResponse(),
self.do_rpcrequest('SubmitAudioProduceJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def submit_audio_produce_job_with_options_async(
self,
request: ice20201109_models.SubmitAudioProduceJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitAudioProduceJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitAudioProduceJobResponse(),
await self.do_rpcrequest_async('SubmitAudioProduceJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def submit_audio_produce_job(
self,
request: ice20201109_models.SubmitAudioProduceJobRequest,
) -> ice20201109_models.SubmitAudioProduceJobResponse:
runtime = util_models.RuntimeOptions()
return self.submit_audio_produce_job_with_options(request, runtime)
async def submit_audio_produce_job_async(
self,
request: ice20201109_models.SubmitAudioProduceJobRequest,
) -> ice20201109_models.SubmitAudioProduceJobResponse:
runtime = util_models.RuntimeOptions()
return await self.submit_audio_produce_job_with_options_async(request, runtime)
def submit_media_producing_job_with_options(
self,
request: ice20201109_models.SubmitMediaProducingJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitMediaProducingJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitMediaProducingJobResponse(),
self.do_rpcrequest('SubmitMediaProducingJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def submit_media_producing_job_with_options_async(
self,
request: ice20201109_models.SubmitMediaProducingJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitMediaProducingJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitMediaProducingJobResponse(),
await self.do_rpcrequest_async('SubmitMediaProducingJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def submit_media_producing_job(
self,
request: ice20201109_models.SubmitMediaProducingJobRequest,
) -> ice20201109_models.SubmitMediaProducingJobResponse:
runtime = util_models.RuntimeOptions()
return self.submit_media_producing_job_with_options(request, runtime)
async def submit_media_producing_job_async(
self,
request: ice20201109_models.SubmitMediaProducingJobRequest,
) -> ice20201109_models.SubmitMediaProducingJobResponse:
runtime = util_models.RuntimeOptions()
return await self.submit_media_producing_job_with_options_async(request, runtime)
def update_smart_job_with_options(
self,
request: ice20201109_models.UpdateSmartJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.UpdateSmartJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.UpdateSmartJobResponse(),
self.do_rpcrequest('UpdateSmartJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def update_smart_job_with_options_async(
self,
request: ice20201109_models.UpdateSmartJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.UpdateSmartJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.UpdateSmartJobResponse(),
await self.do_rpcrequest_async('UpdateSmartJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def update_smart_job(
self,
request: ice20201109_models.UpdateSmartJobRequest,
) -> ice20201109_models.UpdateSmartJobResponse:
runtime = util_models.RuntimeOptions()
return self.update_smart_job_with_options(request, runtime)
async def update_smart_job_async(
self,
request: ice20201109_models.UpdateSmartJobRequest,
) -> ice20201109_models.UpdateSmartJobResponse:
runtime = util_models.RuntimeOptions()
return await self.update_smart_job_with_options_async(request, runtime)
def list_all_public_media_tags_with_options(
self,
request: ice20201109_models.ListAllPublicMediaTagsRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.ListAllPublicMediaTagsResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.ListAllPublicMediaTagsResponse(),
self.do_rpcrequest('ListAllPublicMediaTags', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def list_all_public_media_tags_with_options_async(
self,
request: ice20201109_models.ListAllPublicMediaTagsRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.ListAllPublicMediaTagsResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.ListAllPublicMediaTagsResponse(),
await self.do_rpcrequest_async('ListAllPublicMediaTags', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def list_all_public_media_tags(
self,
request: ice20201109_models.ListAllPublicMediaTagsRequest,
) -> ice20201109_models.ListAllPublicMediaTagsResponse:
runtime = util_models.RuntimeOptions()
return self.list_all_public_media_tags_with_options(request, runtime)
async def list_all_public_media_tags_async(
self,
request: ice20201109_models.ListAllPublicMediaTagsRequest,
) -> ice20201109_models.ListAllPublicMediaTagsResponse:
runtime = util_models.RuntimeOptions()
return await self.list_all_public_media_tags_with_options_async(request, runtime)
def submit_matting_job_with_options(
self,
request: ice20201109_models.SubmitMattingJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitMattingJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitMattingJobResponse(),
self.do_rpcrequest('SubmitMattingJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def submit_matting_job_with_options_async(
self,
request: ice20201109_models.SubmitMattingJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitMattingJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitMattingJobResponse(),
await self.do_rpcrequest_async('SubmitMattingJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def submit_matting_job(
self,
request: ice20201109_models.SubmitMattingJobRequest,
) -> ice20201109_models.SubmitMattingJobResponse:
runtime = util_models.RuntimeOptions()
return self.submit_matting_job_with_options(request, runtime)
async def submit_matting_job_async(
self,
request: ice20201109_models.SubmitMattingJobRequest,
) -> ice20201109_models.SubmitMattingJobResponse:
runtime = util_models.RuntimeOptions()
return await self.submit_matting_job_with_options_async(request, runtime)
def get_event_callback_with_options(
self,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.GetEventCallbackResponse:
req = open_api_models.OpenApiRequest()
return TeaCore.from_map(
ice20201109_models.GetEventCallbackResponse(),
self.do_rpcrequest('GetEventCallback', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def get_event_callback_with_options_async(
self,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.GetEventCallbackResponse:
req = open_api_models.OpenApiRequest()
return TeaCore.from_map(
ice20201109_models.GetEventCallbackResponse(),
await self.do_rpcrequest_async('GetEventCallback', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def get_event_callback(self) -> ice20201109_models.GetEventCallbackResponse:
runtime = util_models.RuntimeOptions()
return self.get_event_callback_with_options(runtime)
async def get_event_callback_async(self) -> ice20201109_models.GetEventCallbackResponse:
runtime = util_models.RuntimeOptions()
return await self.get_event_callback_with_options_async(runtime)
def list_public_media_basic_infos_with_options(
self,
request: ice20201109_models.ListPublicMediaBasicInfosRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.ListPublicMediaBasicInfosResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.ListPublicMediaBasicInfosResponse(),
self.do_rpcrequest('ListPublicMediaBasicInfos', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def list_public_media_basic_infos_with_options_async(
self,
request: ice20201109_models.ListPublicMediaBasicInfosRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.ListPublicMediaBasicInfosResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.ListPublicMediaBasicInfosResponse(),
await self.do_rpcrequest_async('ListPublicMediaBasicInfos', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def list_public_media_basic_infos(
self,
request: ice20201109_models.ListPublicMediaBasicInfosRequest,
) -> ice20201109_models.ListPublicMediaBasicInfosResponse:
runtime = util_models.RuntimeOptions()
return self.list_public_media_basic_infos_with_options(request, runtime)
async def list_public_media_basic_infos_async(
self,
request: ice20201109_models.ListPublicMediaBasicInfosRequest,
) -> ice20201109_models.ListPublicMediaBasicInfosResponse:
runtime = util_models.RuntimeOptions()
return await self.list_public_media_basic_infos_with_options_async(request, runtime)
def submit_cover_job_with_options(
self,
request: ice20201109_models.SubmitCoverJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitCoverJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitCoverJobResponse(),
self.do_rpcrequest('SubmitCoverJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def submit_cover_job_with_options_async(
self,
request: ice20201109_models.SubmitCoverJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitCoverJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitCoverJobResponse(),
await self.do_rpcrequest_async('SubmitCoverJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def submit_cover_job(
self,
request: ice20201109_models.SubmitCoverJobRequest,
) -> ice20201109_models.SubmitCoverJobResponse:
runtime = util_models.RuntimeOptions()
return self.submit_cover_job_with_options(request, runtime)
async def submit_cover_job_async(
self,
request: ice20201109_models.SubmitCoverJobRequest,
) -> ice20201109_models.SubmitCoverJobResponse:
runtime = util_models.RuntimeOptions()
return await self.submit_cover_job_with_options_async(request, runtime)
def get_smart_handle_job_with_options(
self,
request: ice20201109_models.GetSmartHandleJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.GetSmartHandleJobResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.GetSmartHandleJobResponse(),
self.do_rpcrequest('GetSmartHandleJob', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
async def get_smart_handle_job_with_options_async(
self,
request: ice20201109_models.GetSmartHandleJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.GetSmartHandleJobResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.GetSmartHandleJobResponse(),
await self.do_rpcrequest_async('GetSmartHandleJob', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
def get_smart_handle_job(
self,
request: ice20201109_models.GetSmartHandleJobRequest,
) -> ice20201109_models.GetSmartHandleJobResponse:
runtime = util_models.RuntimeOptions()
return self.get_smart_handle_job_with_options(request, runtime)
async def get_smart_handle_job_async(
self,
request: ice20201109_models.GetSmartHandleJobRequest,
) -> ice20201109_models.GetSmartHandleJobResponse:
runtime = util_models.RuntimeOptions()
return await self.get_smart_handle_job_with_options_async(request, runtime)
def submit_h2vjob_with_options(
self,
request: ice20201109_models.SubmitH2VJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitH2VJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitH2VJobResponse(),
self.do_rpcrequest('SubmitH2VJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
async def submit_h2vjob_with_options_async(
self,
request: ice20201109_models.SubmitH2VJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitH2VJobResponse:
UtilClient.validate_model(request)
req = open_api_models.OpenApiRequest(
body=UtilClient.to_map(request)
)
return TeaCore.from_map(
ice20201109_models.SubmitH2VJobResponse(),
await self.do_rpcrequest_async('SubmitH2VJob', '2020-11-09', 'HTTPS', 'POST', 'AK', 'json', req, runtime)
)
def submit_h2vjob(
self,
request: ice20201109_models.SubmitH2VJobRequest,
) -> ice20201109_models.SubmitH2VJobResponse:
runtime = util_models.RuntimeOptions()
return self.submit_h2vjob_with_options(request, runtime)
async def submit_h2vjob_async(
self,
request: ice20201109_models.SubmitH2VJobRequest,
) -> ice20201109_models.SubmitH2VJobResponse:
runtime = util_models.RuntimeOptions()
return await self.submit_h2vjob_with_options_async(request, runtime)
def submit_pptcut_job_with_options(
self,
request: ice20201109_models.SubmitPPTCutJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitPPTCutJobResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.SubmitPPTCutJobResponse(),
self.do_rpcrequest('SubmitPPTCutJob', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
async def submit_pptcut_job_with_options_async(
self,
request: ice20201109_models.SubmitPPTCutJobRequest,
runtime: util_models.RuntimeOptions,
) -> ice20201109_models.SubmitPPTCutJobResponse:
UtilClient.validate_model(request)
query = OpenApiUtilClient.query(UtilClient.to_map(request))
req = open_api_models.OpenApiRequest(
query=query
)
return TeaCore.from_map(
ice20201109_models.SubmitPPTCutJobResponse(),
await self.do_rpcrequest_async('SubmitPPTCutJob', '2020-11-09', 'HTTPS', 'GET', 'AK', 'json', req, runtime)
)
def submit_pptcut_job(
self,
request: ice20201109_models.SubmitPPTCutJobRequest,
) -> ice20201109_models.SubmitPPTCutJobResponse:
runtime = util_models.RuntimeOptions()
return self.submit_pptcut_job_with_options(request, runtime)
async def submit_pptcut_job_async(
self,
request: ice20201109_models.SubmitPPTCutJobRequest,
) -> ice20201109_models.SubmitPPTCutJobResponse:
runtime = util_models.RuntimeOptions()
return await self.submit_pptcut_job_with_options_async(request, runtime)
|
[
"sdk-team@alibabacloud.com"
] |
sdk-team@alibabacloud.com
|
3f37e6e69fd849313e96f09820fdf01f9735a23e
|
4384e1836636ffd09e520ba805e0e76abceefcdb
|
/ptsemseg/loader/__init__.py
|
b53decc5fc89fe859e2fb6f444717f7e0f100d10
|
[] |
no_license
|
anonymousTSPANet/TSPANet
|
afc69403700c43578657f0c93999d44083fe8a05
|
6199506cf1287e0097e3c7072592cad75877c331
|
refs/heads/master
| 2022-04-25T00:54:23.654486
| 2020-04-19T05:11:34
| 2020-04-19T05:11:34
| 256,919,230
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,006
|
py
|
import json
from ptsemseg.loader.pascal_voc_loader import pascalVOCLoader
from ptsemseg.loader.camvid_loader import camvidLoader
from ptsemseg.loader.ade20k_loader import ADE20KLoader
from ptsemseg.loader.mit_sceneparsing_benchmark_loader import MITSceneParsingBenchmarkLoader
from ptsemseg.loader.cityscapes_loader import cityscapesLoader
from ptsemseg.loader.nyuv2_loader import NYUv2Loader
from ptsemseg.loader.sunrgbd_loader import SUNRGBDLoader
from ptsemseg.loader.mapillary_vistas_loader import mapillaryVistasLoader
from ptsemseg.loader.add_loader import addLoader
def get_loader(name):
"""get_loader
:param name:
"""
return {
"pascal": pascalVOCLoader,
"camvid": camvidLoader,
"ade20k": ADE20KLoader,
"mit_sceneparsing_benchmark": MITSceneParsingBenchmarkLoader,
"cityscapes": cityscapesLoader,
"nyuv2": NYUv2Loader,
"sunrgbd": SUNRGBDLoader,
"vistas": mapillaryVistasLoader,
"add": addLoader,
}[name]
|
[
"yswang96@unist.ac.kr"
] |
yswang96@unist.ac.kr
|
cd125a4096298f3e4f23ef260a4bfc26ca29fdbf
|
2dcdf1142a4c13240d2b250858977d2875571f3e
|
/05_TemporalDifference/sarsa.py
|
c0b594a78583c837e7f2150ed91ac2a13dd45418
|
[] |
no_license
|
gfluz94/Reinforcement_Learning
|
934c892288cb176ed1c3d8e2e9e869c90ac68177
|
4d850d164746dca08bb8961251f6e1e228ddba9f
|
refs/heads/master
| 2022-07-28T21:48:47.637860
| 2020-05-22T16:27:15
| 2020-05-22T16:27:15
| 263,734,666
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,610
|
py
|
import numpy as np
class GridWorld():
def __init__(self, width:int, height:int, start:tuple):
self.width = width
self.height = height
self.start = start
self.i = start[0]
self.j = start[1]
def set(self, rewards:dict, actions=["up", "down", "right", "left"], wall=None):
self.actions = actions
self.wall = wall
self.terminal_states = list(rewards.keys())
self.possible_actions = {(i,j):[] for i in range(self.height) for j in range(self.width) if (i,j)!=self.wall}
for (i, j) in self.possible_actions.keys():
if "up" in self.actions and i-1>=0 and (i-1, j)!=self.wall:
self.possible_actions[(i, j)].append("up")
if "left" in self.actions and j-1>=0 and (i, j-1)!=self.wall:
self.possible_actions[(i, j)].append("left")
if "down" in self.actions and i+1<self.height and (i+1, j)!=self.wall:
self.possible_actions[(i, j)].append("down")
if "right" in self.actions and j+1<self.width and (i, j+1)!=self.wall:
self.possible_actions[(i, j)].append("right")
if self.game_over():
raise ValueError("You cannot assign start state as terminal state.")
self.rewards = {}
for i in range(self.height):
for j in range(self.width):
if (i, j) in rewards.keys():
self.rewards[(i, j)] = rewards[(i,j)]
else:
self.rewards[(i, j)] = -0.1
def draw_grid(self):
print()
print("*---"*self.width, end="*\n")
for i in range(self.height):
for j in range(self.width):
if (i,j)==(self.i, self.j):
print("| x ", end="")
elif (i,j)==self.wall:
print("|///", end="")
else:
print("| ", end="")
print("|")
print("*---"*self.width, end="*\n")
def current_state(self):
return (self.i, self.j)
def reset_game(self):
self.i, self.j = self.start
def game_over(self):
return (self.i, self.j) in self.terminal_states
class Robot():
def __init__(self, probability_action=1):
self.probability_action = probability_action
self.state_history = []
def set_Q(self, env):
self.Q = {(i,j):{} for i in range(env.height) for j in range(env.width)}
self.N = {(i,j):{} for i in range(env.height) for j in range(env.width)}
for key in self.Q.keys():
if key==env.wall:
continue
for action in env.possible_actions[key]:
self.Q[key][action] = 0
self.N[key][action] = 1
def get_next_state(self, action, state, env, probability=1):
if probability==1:
if action=="up":
return (state[0]-1, state[1])
elif action=="down":
return (state[0]+1, state[1])
elif action=="right":
return (state[0], state[1]+1)
elif action=="left":
return (state[0], state[1]-1)
else:
r = np.random.rand()
residual = 1-probability
if r<residual:
if action=="up" or action=="down":
actions=["right", "left"]
elif action=="right" or action=="left":
actions=["up", "down"]
action = np.random.choice(actions)
i, j = state
if action=="up":
if i-1<0 or (i-1,j)==env.wall:
return (i,j)
return (i-1, j)
elif action=="down":
if i+1>=env.height or (i+1,j)==env.wall:
return (i,j)
return (i+1, j)
elif action=="right":
if j+1>=env.width or (i,j+1)==env.wall:
return (i,j)
return (i, j+1)
elif action=="left":
if j-1<0 or (i,j-1)==env.wall:
return (i,j)
return (i, j-1)
def next_action(self, env, state, epsilon=-1):
current_state = state
possible_actions = env.possible_actions[current_state]
if np.random.rand()<epsilon:
action = np.random.choice(possible_actions)
else:
maxQ = -np.inf
action = None
for next_action in possible_actions:
next_state = self.get_next_state(next_action, current_state, env)
for next_next_action in env.possible_actions[next_state]:
if self.Q[next_state][next_next_action]>maxQ:
maxQ = self.Q[next_state][next_next_action]
action = next_action
return action
def go_through_episode(self, env, epsilon, alpha):
current_position = env.start
self.state_history.append(current_position)
action = self.next_action(env, current_position, epsilon)
gamma = 0.9
while not env.game_over():
next_state = self.get_next_state(action, current_position, env, self.probability_action)
r = env.rewards[next_state]
next_action = self.next_action(env, next_state, epsilon)
new_alpha = alpha/self.N[current_position][action]
self.Q[current_position][action] += new_alpha*(r+gamma*self.Q[next_state][next_action]-self.Q[current_position][action])
env.i, env.j = next_state
current_position = next_state
action = next_action
env.reset_game()
def take_action(self, env):
s = env.current_state()
action = robot.next_action(env, s)
env.i, env.j = robot.get_next_state(action, s, env, self.probability_action)
def reset_state_history(self):
self.state_history = []
def print_learning(self, env):
policy = {}
for i in range(env.height):
for j in range(env.width):
s = (i,j)
if s in env.terminal_states:
best_action = "$"
elif s!=env.wall:
possible_actions = env.possible_actions[s]
maxQ = -np.inf
for action in possible_actions:
next_s = self.get_next_state(action, s, env)
for next_next_action in env.possible_actions[next_s]:
if self.Q[next_s][next_next_action]>maxQ:
maxQ = self.Q[next_s][next_next_action]
best_action = action
policy[s] = best_action
print()
print("*--------"*env.width, end="*\n")
self.V = {}
for i in range(env.height):
for j in range(env.width):
if (i,j)==env.wall:
print("|////////", end="")
else:
if policy[(i,j)]!="$":
self.V[(i,j)] = self.Q[(i,j)][policy[(i,j)]]
if self.Q[(i,j)][policy[(i,j)]]>0:
print(f"| +{self.Q[(i,j)][policy[(i,j)]]:.3f} ", end="")
else:
print(f"| {self.Q[(i,j)][policy[(i,j)]]:.3f} ", end="")
else:
self.V[(i,j)] = 0
print(f"| +0.000 ", end="")
print("|")
print("*--------"*env.width, end="*\n")
def final_policy(self, env):
policy = {}
for i in range(env.height):
for j in range(env.width):
s = (i,j)
if s in env.terminal_states:
best_action = "$"
elif s!=env.wall:
possible_actions = env.possible_actions[s]
maxV = -np.inf
for action in possible_actions:
next_s = self.get_next_state(action, s, env)
if self.V[next_s]>maxV:
maxV = self.V[next_s]
best_action = action[0].upper()
policy[s] = best_action
print()
print("*---"*env.width, end="*\n")
for i in range(env.height):
for j in range(env.width):
if (i,j)==env.wall:
print("|///", end="")
else:
print(f"| {policy[(i,j)]} ", end="")
print("|")
print("*---"*env.width, end="*\n")
def play_game(robot, env, draw=False):
env.reset_game()
while not env.game_over():
if draw:
env.draw_grid()
robot.take_action(env)
if draw:
env.draw_grid()
if __name__ == "__main__":
env = GridWorld(width=4, height=3, start=(2,0))
rewards = {
(0,3): 1,
(1,3): -1
}
actions = ["up", "down", "right", "left"]
env.set(rewards, actions, wall=(1,1))
robot = Robot(probability_action=1)
robot.set_Q(env)
print("\nStarting learning process...")
episodes = 5000
epsilon = 0.5
for i in range(episodes):
epsilon /= (i+1)
alpha = 0.5
if (i+1)%100==0:
print(f">> {i+1}/{episodes}")
robot.go_through_episode(env, epsilon, alpha)
print("\nLearning process is finished...")
print("\nLearning:")
robot.print_learning(env)
print("\nFinal Policy:")
robot.final_policy(env)
|
[
"noreply@github.com"
] |
noreply@github.com
|
2de021e9e85df16452f8d08a2931aff200c9ce8a
|
e7a3961e94ffce63f02a3d5bb92b5850005c7955
|
/django_tuts/forms_tuts/models.py
|
4eca909a170d97954cf1c7b64b0d370e80410ad5
|
[] |
no_license
|
sbhusal123/django-collections
|
219c032c97dd7bc2b3c5961f71fb8da5e4826ec1
|
4efed68d29fd1e383d15b303584fc4eb183aff98
|
refs/heads/master
| 2022-11-06T17:32:25.776023
| 2020-06-20T07:58:54
| 2020-06-20T07:58:54
| 273,394,086
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,007
|
py
|
from _datetime import datetime
from django.db import models
from django.contrib.auth import get_user_model
User = get_user_model()
class Products(models.Model):
"""Products for sale"""
name = models.CharField(max_length=256, unique=True, null=False, blank=False)
price = models.DecimalField(max_digits=8, decimal_places=2)
def __str__(self):
return f'{self.name}: {self.price}'
class OrderItem(models.Model):
"""Item collection in an order"""
product = models.ForeignKey(Products, on_delete=models.CASCADE)
quantity = models.IntegerField(default=1, blank=False, null=False) # validate such that no less than 3
class Order(models.Model):
"""Order information"""
items = models.ManyToManyField('OrderItem')
date = models.DateTimeField(default=datetime.now, blank=False) # validate such that not yesterday
total = models.DecimalField(max_digits=8, decimal_places=2)
user = models.ForeignKey(User, related_name='orders', on_delete=models.CASCADE)
|
[
"suryabhusal11@gmail.com"
] |
suryabhusal11@gmail.com
|
961bfb823a57ed38a13cb9ffb07cf646290d2769
|
8fac8d3ad6cafaf481ba8de8a51a4abab8ef07e6
|
/test/us_extract/client_test.py
|
6663f7fbddbf8196947bd486bf58c3aac9562524
|
[
"Apache-2.0"
] |
permissive
|
IFarhankhan/smartystreets-python-sdk
|
18ae4024853dd0c84cfd357ed501bed3ecf0efb2
|
c724b6ea5d2a37b3334517aed3377cc5f44ee831
|
refs/heads/master
| 2021-06-13T16:54:59.400944
| 2017-04-17T20:55:28
| 2017-04-17T20:55:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,004
|
py
|
import unittest
from smartystreets_python_sdk import URLPrefixSender, Response
from smartystreets_python_sdk.us_extract import Client, Lookup
from smartystreets_python_sdk.exceptions import SmartyException
from smartystreets_python_sdk.us_extract import Result
from test.mocks import *
class TestClient(unittest.TestCase):
def test_sending_body_only_lookup(self):
capturing_sender = RequestCapturingSender()
sender = URLPrefixSender('http://localhost/', capturing_sender)
serializer = FakeSerializer(None)
client = Client(sender, serializer)
expected_payload = 'Hello, World!'
client.send(Lookup('Hello, World!'))
self.assertEqual(expected_payload, capturing_sender.request.payload)
def test_sending_fully_populated_lookup(self):
capturing_sender = RequestCapturingSender()
sender = URLPrefixSender('http://localhost/', capturing_sender)
serializer = FakeSerializer(None)
client = Client(sender, serializer)
lookup = Lookup('1')
lookup.html = True
lookup.aggressive = True
lookup.addresses_have_line_breaks = True
lookup.addresses_per_line = 2
client.send(lookup)
request = capturing_sender.request
self.assertEqual('true', request.parameters['html'])
self.assertEqual('true', request.parameters['aggressive'])
self.assertEqual('true', request.parameters['addr_line_breaks'])
self.assertEqual(2, request.parameters['addr_per_line'])
def test_reject_blank_lookup(self):
capturing_sender = RequestCapturingSender()
sender = URLPrefixSender('http://localhost/', capturing_sender)
serializer = FakeSerializer(None)
client = Client(sender, serializer)
self.assertRaises(SmartyException, client.send, Lookup())
def test_deserialize_called_with_response_body(self):
response = Response('Hello, World!', 0)
sender = MockSender(response)
deserializer = FakeDeserializer({})
client = Client(sender, deserializer)
client.send(Lookup('Hello, World!'))
self.assertEqual(response.payload, deserializer.input)
def test_result_correctly_assigned_to_corresponding_lookup(self):
raw_result = {"meta": {}, "addresses": [{"text": "Hello, World!"}]}
expected_result = Result(raw_result)
lookup = Lookup('Hello, World!')
sender = MockSender(Response('[]', 0))
deserializer = FakeDeserializer(raw_result)
client = Client(sender, deserializer)
client.send(lookup)
self.assertEqual(expected_result.addresses[0].text, lookup.result.addresses[0].text)
def test_content_type_set_correctly(self):
sender = RequestCapturingSender()
serializer = FakeSerializer(None)
client = Client(sender, serializer)
lookup = Lookup("Hello, World!")
client.send(lookup)
self.assertEqual("text/plain", sender.request.content_type)
|
[
"neo@smartystreets.com"
] |
neo@smartystreets.com
|
467159dbc7962cddb1433df59cdc1a26132ca3af
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/adjectives/_waxiest.py
|
ed828779dd11c8bd004322d280f17905dcb1f528
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379
| 2017-01-28T02:00:50
| 2017-01-28T02:00:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 238
|
py
|
from xai.brain.wordbase.adjectives._waxy import _WAXY
#calss header
class _WAXIEST(_WAXY, ):
def __init__(self,):
_WAXY.__init__(self)
self.name = "WAXIEST"
self.specie = 'adjectives'
self.basic = "waxy"
self.jsondata = {}
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
2d2d19cf3287e502c55a04eaabe7c6cd9cd996ec
|
6bdcd513f0e8d500e56f87d44d4a9ba11c93f272
|
/utilities/wds_from_tfrecords.py
|
479a8b67785b9e042e299fc432c278eb642b55e3
|
[
"MIT"
] |
permissive
|
rom1504/DALLE-datasets
|
4d9416772b73befdb85867781bbab5c6bbff0cec
|
f0b039feb37abec68a81b9425fb71f94a713d9cf
|
refs/heads/main
| 2023-06-17T07:07:12.396964
| 2021-07-05T11:20:02
| 2021-07-05T11:20:02
| 383,932,361
| 2
| 1
|
MIT
| 2021-07-07T21:52:08
| 2021-07-07T21:52:07
| null |
UTF-8
|
Python
| false
| false
| 6,918
|
py
|
import tensorflow as tf
import webdataset as wds
from pathlib import Path
import argparse
import os
import timeit
import hashlib
from io import BytesIO
from PIL import Image
parser = argparse.ArgumentParser("""Generate sharded dataset from tfrecord-files.""")
parser.add_argument("--maxsize", type=float, default=1e9)
parser.add_argument("--maxcount", type=float, default=100000)
parser.add_argument(
"--compression",
dest="compression",
action="store_true",
help="Creates compressed .tar.gz files instead of uncompressed .tar files."
)
parser.add_argument(
"--use_encoder",
dest="use_encoder",
action="store_true",
help="Uses encoder on unknown filetimes (the suffix in the keep_keys argument)."
)
parser.add_argument(
"--keep_keys",
type=str,
default="image.pyd,label.cls",
help="Only keep the columns from the comma separated keys from that argument. The dot separated suffix is the filetype."
)
parser.add_argument(
"--remove_duplicates",
dest="remove_duplicates",
default="image",
help="Remove duplicates from given column name. (e.g. --remove_duplicates image)"
)
parser.add_argument(
"--min_max_size",
dest="min_max_size",
default="192,320",
help="Discards smaller and resizes larger images. (e.g. --min_max_size 256,320)"
)
parser.add_argument(
"--report_every",
type=int,
default="1000",
help="Report every n iterations."
)
parser.add_argument(
"--shards",
default="./shards",
help="directory where shards are written"
)
parser.add_argument(
"--shard_prefix",
default="wds_",
help="prefix of shards' filenames created in the shards-folder"
)
parser.add_argument(
"--data",
default="./tfr",
help="directory path containing tfrecord files",
)
args = parser.parse_args()
KEEP_KEYS = []
if args.keep_keys != '':
KEEP_KEYS = {x.split('.')[0]: x.split('.')[1] for x in args.keep_keys.split(',')}
SIZE = {}
if args.min_max_size != '':
SIZE = {
'min': int(args.min_max_size.split(',')[0]),
'max': int(args.min_max_size.split(',')[1])
}
assert args.maxsize > 10000000
assert args.maxcount < 1000000
assert os.path.isdir(os.path.join(args.data)), '{} does not exist.'.format(args.data)
os.makedirs(Path(args.shards), exist_ok=True)
tfrecord_files = [args.data + '/' + x for x in os.listdir(args.data) if x.split('.')[-1] == 'tfrecord']
total_files = len(tfrecord_files)
###### Example of a feature description to a tfrecord dataset
FEATURE_DESCRIPTION = {
###### Please provide your tfrecord feature description
}
FEATURE_DESCRIPTION = {
'sampleID': tf.io.FixedLenFeature([], tf.string),
'image': tf.io.FixedLenFeature([], tf.string),
'format': tf.io.FixedLenFeature([], tf.string),
'label': tf.io.FixedLenFeature([], tf.string),
'height': tf.io.FixedLenFeature([], tf.int64),
'width': tf.io.FixedLenFeature([], tf.int64),
}
assert len(FEATURE_DESCRIPTION) > 0, 'Please provide the feature description to your tfrecord dataset.'
def wrapper(gen):
while True:
try:
yield next(gen)
except StopIteration:
break
except Exception as e:
print(e)
def _parse_example(example_proto):
example = tf.io.parse_single_example(example_proto, FEATURE_DESCRIPTION)
return example
pattern = os.path.join(args.shards, args.shard_prefix + f"%06d.tar" + (".gz" if args.compression else ''))
count = 0
# Arguments for removing duplicates
duplicate_count = 0
duplicate_md5 = set()
skip_duplicate = False
# Arguments for resizing / discarding images
discard_count = 0
resize_count = 0
skip_sizemismatch_or_corrupt = False
start = timeit.default_timer()
with wds.ShardWriter(pattern, maxsize=int(args.maxsize), maxcount=int(args.maxcount), encoder=args.use_encoder) as sink:
for tfrecord_file in tfrecord_files:
raw_dataset = tf.data.TFRecordDataset(tfrecord_file)
dataset = raw_dataset.map(_parse_example)
for item in wrapper(dataset.as_numpy_iterator()):
ds_key = "%09d" % count
sample = {
"__key__": ds_key,
}
if args.remove_duplicates != '':
valuehash = hashlib.md5(item[args.remove_duplicates]).hexdigest()
if valuehash in duplicate_md5:
duplicate_count += 1
skip_duplicate = True
else:
duplicate_md5.add(valuehash)
if skip_duplicate == False:
### Resize, discard or keep block
if args.min_max_size != '':
if item['width'] < SIZE['min'] and item['height'] < SIZE['min']:
discard_count += 1
skip_sizemismatch_or_corrupt = True
elif item['width'] > SIZE['max'] or item['height'] > SIZE['max']:
# Try opening and resizing image
try:
foo = Image.open(BytesIO(item['image']))
if foo.mode != 'RGB':
foo = foo.convert('RGB')
a = max(SIZE['max']/foo.size[0], SIZE['max']/foo.size[1])
foo = foo.resize((int(foo.size[0] * a), int(foo.size[1] * a)), Image.ANTIALIAS)
# Image to bytes
img_byte_arr = BytesIO()
foo.save(img_byte_arr, format='jpeg', optimize=True, quality=85)
item['image'] = img_byte_arr.getvalue()
except Exception as e:
print(e)
discard_count += 1
skip_sizemismatch_or_corrupt = True
else:
resize_count += 1
if skip_sizemismatch_or_corrupt == False:
#### Writing row to WebDataset file
for key in KEEP_KEYS:
sample[key + '.' + KEEP_KEYS[key] if args.use_encoder else key] = item[key]
sink.write(sample)
#### End writing row to WebDataset file
else:
skip_sizemismatch_or_corrupt = False
else:
skip_duplicate = False
if count % args.report_every == 0:
print(' {:.2f}'.format(count), end='\r')
count += 1
stop = timeit.default_timer()
print('#################################################################################')
print('# Finished processing {:,} samples from tfrecord files.'.format(count))
print('# Process took {:.2f} seconds to finish.'.format(stop - start))
if (args.remove_duplicates != ''):
print('# Skipped {:,} duplicates from a total of {:,} items.'.format(duplicate_count, count))
if (args.min_max_size != ''):
print('# Discarded {:,} and resized {:,} images from remaining {:,} non-duplicates.'.format(discard_count, resize_count, count - duplicate_count))
print('# {:,} images remain in the Dataset.'.format(count - (duplicate_count + discard_count)))
print('# The WebDataset files can be found in {}.'.format(args.shards))
print('#################################################################################')
|
[
"robvanvolt@gmail.com"
] |
robvanvolt@gmail.com
|
8b9078061e1379d908d88310ba9a22318465690f
|
53c1fecdf3d52bc36086bd33cbede5da8f4d4b0c
|
/delicatewebizen/main/models.py
|
856cb6e59bcc11815e0976b08d586316dc4d68f6
|
[] |
no_license
|
pombredanne/example-app
|
2c5b2560dfc7c7c47e47b2adb57109b24a950211
|
706e0f3f0e15d28c146512fba49b5240fea35be7
|
refs/heads/master
| 2021-01-20T21:49:22.800372
| 2013-01-13T10:40:22
| 2013-01-13T10:40:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,182
|
py
|
from django.db import models
from django.http import HttpResponse
from django.template.response import SimpleTemplateResponse
from favorites.utils import BaseRender
from favorites.utils import ModelRenderMixin
class Link(models.Model, ModelRenderMixin):
title = models.CharField(max_length=255)
url = models.URLField()
class Render(BaseRender):
def full(self):
return self.render('main/link/full.html')
def preview(self):
return self.render('main/link/preview.html')
def inline(self):
return self.render('main/link/inline.html')
def __str__(self):
return self.title
class Poem(models.Model, ModelRenderMixin):
title = models.CharField(max_length=255)
body = models.TextField()
author = models.CharField(max_length=255)
class Render(BaseRender):
def full(self):
return self.render('main/poem/full.html')
def preview(self):
return self.render('main/poem/preview.html')
def inline(self):
return self.render('main/poem/inline.html')
def __str__(self):
return '%s by %s' % (self.title, self.author)
|
[
"amirouche.boubekki@gmail.com"
] |
amirouche.boubekki@gmail.com
|
6335c6a2e3c6af1cc255b9997be3316ef76b68de
|
1f3f3154ba64537b39284c01c73003ca50a29359
|
/与孩子一起学编程/chapter8/codelist 8-6.py
|
d268922abd407f78d8244d83fc9ef5ef3b5646e9
|
[] |
no_license
|
Chen-Isaac/pythonLearning
|
d00ffd473ef2fe0f8aba512c6174bb654115ba84
|
04e829cf3bd975491676f87bcb0dbacfa55ddd6c
|
refs/heads/master
| 2021-09-14T22:16:02.913336
| 2018-03-01T16:07:32
| 2018-03-01T16:07:32
| 114,447,004
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 90
|
py
|
import time
for i in range (10, 0, -1):
print i
time.sleep(1)
print "BLAST OFF!"
|
[
"cxx7821@gmail.com"
] |
cxx7821@gmail.com
|
f80a4ddfdd85a8b7cd32f06f3995cef5762127e8
|
5f8276fccfee82481545a15b96c39a67b70cdf08
|
/accounts/tests.py
|
83503bb7b7750715f754224ca0ed2e286c386fee
|
[
"MIT"
] |
permissive
|
alaorneto/pratinhas
|
7c27d681e5c0f3605c70b40a2b1728a09d45207a
|
699f9133fe952c767005c0a36d4531def836db8d
|
refs/heads/main
| 2022-08-25T11:53:46.020585
| 2021-10-17T22:14:21
| 2021-10-17T22:14:21
| 185,036,231
| 1
| 0
|
MIT
| 2022-08-11T14:51:09
| 2019-05-05T13:39:08
|
Python
|
UTF-8
|
Python
| false
| false
| 1,287
|
py
|
""" Testes do módulo de autenticação. """
from datetime import datetime
from django.shortcuts import get_object_or_404
from django.contrib.auth import get_user_model
from rest_framework import status
from rest_framework.authtoken.models import Token
from rest_framework.test import APITestCase, APIClient
class UsuarioTestCase(APITestCase):
""" Testes de operações com contas. """
client = None
user = None
username = 'test'
email = 'test@pratinhas.app'
password = 'Test1234!'
def setUp(self):
self.client = APIClient()
self.user = get_user_model().objects.create_user(self.username, self.email, self.password)
payload = {
"username": self.username,
"password": self.password
}
response = self.client.post("/api/token/", payload, format='json')
token = response.data["access"]
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token)
def test_registrar_usuario(self):
usuario = {
"username": 'alaor',
"password": 'Test12#',
"email": "test@test.com",
}
response = self.client.post("/api/accounts/register", usuario, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
[
"alaorneto@gmail.com"
] |
alaorneto@gmail.com
|
3daff0ac255348970fbdef54719b9f87a7e3d0e2
|
049122a17f5ca2e2778b9ccc334e31f6b73ef71a
|
/Proj029Pipelines/pipeline_metagenomeassembly.py
|
9bad0c4d6ea3154de593f11a9237b8a3a92a6f56
|
[] |
no_license
|
CGATOxford/proj029
|
55c43412eb0b86330a111e855508ea9dab1bbab1
|
f0a8ea63b4f086e673aa3bf8b7d3b9749261b525
|
refs/heads/master
| 2016-09-15T13:55:41.647489
| 2016-03-09T10:17:26
| 2016-03-09T10:17:26
| 32,919,136
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 64,283
|
py
|
"""
=============================
Metagenome assembly pipeline
=============================
:Author: Nick Ilott
:Release: $Id$
:Date: |today|
:Tags: Python
The metagenome assembly pipeline takes reads from one or more NGS
experiments and assembles into contigs / scaffolds. Genes present on
contigs are predicted using ORF perdiction software.
Overview
========
The pipeline assumes the data derive from multiple tissues/conditions
(:term:`experiment`) with one or more biological and/or technical
replicates (:term:`replicate`). A :term:`replicate` within each
:term:`experiment` is a :term:`track`.
Assembly stategy
----------------
While there exist many tools for assembling reads from single genomes,
only recently has software been specifically developed (or extended)
to allow for the assembly of metagenomes. The major factor affecting
the ability to assemble a metagenome is the diversity of the sample.
Accurate assembly of long contigs is difficult in the presence of many
species at differential abundances. This is in constrast to single
genome assembly where reads are (or should be) uniformly sampled
across the genome.
This pieline therefore uses a range of available software for the assembly of metagenomes.
Considerations
--------------
Metagenomics is a young and rapidly developing field. There is, as
yet, no gold standard for assembly. It is likely that the presence of
multiple, highly related species will lead to the assembly of
chimeric contigs i.e. contigs derived from more than one species. It
is generally considered that longer K-mer lengths used in the
construction of the de-bruijn graph (for de-bruijn graph assemblers)
will result in fewer chimeras. Nevertheless, longer k-mers may also
result in more, short contigs being produced as a result of a
neccessity for a greater overlap between nodes in the graph. The
length of k-mer chosen is also dependent on the length of reads that
you are trying to assemble - longer reads means you can use longer
k-mers. Which k-mer to use in the assembly process is therefore
dependent on the data used and the expected complexity of the
sample. We make no effort here to advise on k-mer length.
Usage
=====
See :ref:`PipelineSettingUp` and :ref:`PipelineRunning` on general
information how to use CGAT pipelines.
Configuration
-------------
The pipeline requires a configured :file:`pipeline.ini` file.
The sphinxreport report requires a :file:`conf.py` and
:file:`sphinxreport.ini` file (see :ref:`PipelineDocumenation`). To
start with, use the files supplied with the :ref:`Example` data.
Input
-----
Reads
+++++
Reads are imported by placing files are linking to files in the
:term:`working directory`.
The default file format assumes the following convention:
<sample>-<condition>-<replicate>.<suffix>
``sample`` and ``condition`` make up an :term:`experiment`, while
``replicate`` denotes the :term:`replicate` within an
:term:`experiment`. The ``suffix`` determines the file type. The
following suffixes/file types are possible:
fastq.gz
Single-end reads in fastq format.
fastq.1.gz, fastq2.2.gz
Paired-end reads in fastq format. The two fastq files must be sorted by read-pair.
.. note::
Quality scores need to be of the same scale for all input files. Thus it might be
difficult to mix different formats.
Optional inputs
+++++++++++++++
Requirements
------------
On top of the default CGAT setup, the pipeline requires the following
software to be in the path:
+--------------------+-------------------+------------------------------------------------+
|*Program* |*Version* |*Purpose* |
+--------------------+-------------------+------------------------------------------------+
|ray meta |>=2.2.0 |Metagenome assembler |
+--------------------+-------------------+------------------------------------------------+
|meta-velvet |>=1.2.02 |Metagenome assembler |
+--------------------+-------------------+------------------------------------------------+
|sga | |Genome assembler |
+--------------------+-------------------+------------------------------------------------+
|SOAPDenovo2 | |Genome assembler |
+--------------------+-------------------+------------------------------------------------+
|spades | |Genome assembler |
+--------------------+-------------------+------------------------------------------------+
|idba_ud |>=1.1.0 |Metagenome assembler |
+--------------------+-------------------+------------------------------------------------+
|MetaGeneMark |>=1.0.0 |ORF prediction |
+--------------------+-------------------+------------------------------------------------+
|bedtools |>=2.17.0 |BED interval analysis suite |
+--------------------+-------------------+------------------------------------------------+
|bwa |>=0.5.9 |Short read alignment algorithm |
+--------------------+-------------------+------------------------------------------------+
|bowtie |>=0.12.7 |Short read alignment algorithm |
+--------------------+-------------------+------------------------------------------------+
|bowtie2 |>=2.0.0 |Short read alignment algorithm |
+--------------------+-------------------+------------------------------------------------+
|blastn |>=2.2.25 |Simlilarity searching algorithm (nucleotides) |
+--------------------+-------------------+------------------------------------------------+
|blastp |>=2.2.25 |Simlilarity searching algorithm (proteins) |
+--------------------+-------------------+------------------------------------------------+
|hmmer |>=3 |gene annotation based on hmm models |
+--------------------+-------------------+------------------------------------------------+
Pipeline output
===============
The main output is the genome assembly - output as a fasta formatted file.
Additional outputs include taxon abundance estimation (metaphlan) and ORF
predictions (MetaGeneMark).
Additional outputs are stored in the database file :file:`csvdb`.
Glossary
========
.. glossary::
Code
====
"""
# load modules
from ruffus import *
import CGAT.Experiment as E
import logging as L
import CGAT.Database as Database
import CGAT.CSV as CSV
import sys
import os
import re
import shutil
import itertools
import math
import glob
import time
import gzip
import collections
import random
import numpy
import sqlite3
import CGAT.GTF as GTF
import CGAT.IOTools as IOTools
import CGAT.IndexedFasta as IndexedFasta
from rpy2.robjects import r as R
import rpy2.robjects as ro
import rpy2.robjects.vectors as rovectors
from rpy2.rinterface import RRuntimeError
import CGATPipelines.PipelineMapping as PipelineMapping
import CGATPipelines.PipelineMetagenomeAssembly as PipelineMetagenomeAssembly
import CGAT.FastaIterator as FastaIterator
import CGAT.Metaphlan as Metaphlan
import CGATPipelines.PipelineMapping as PipelineMapping
import CGATPipelines.PipelineMappingQC as PipelineMappingQC
import pysam
import CGAT.Fastq as Fastq
###################################################
###################################################
###################################################
# Pipeline configuration
###################################################
# load options from the config file
import CGATPipelines.Pipeline as P
P.getParameters(
["pipeline.ini"])
PARAMS = P.PARAMS
###################################################################
###################################################################
# Helper functions mapping tracks to conditions, etc
###################################################################
import PipelineTracks
# collect fastq.gz tracks
TRACKS = PipelineTracks.Tracks(PipelineTracks.Sample3).loadFromDirectory(
glob.glob( "*.fastq.gz" ), "(\S+).fastq.gz" ) +\
PipelineTracks.Tracks(PipelineTracks.Sample3).loadFromDirectory(
glob.glob("*.fastq.1.gz"), "(\S+).fastq.1.gz")
ALL = PipelineTracks.Sample3()
EXPERIMENTS = PipelineTracks.Aggregate(TRACKS, labels=("condition", "tissue"))
CONDITIONS = PipelineTracks.Aggregate(TRACKS, labels=("condition", ))
TISSUES = PipelineTracks.Aggregate(TRACKS, labels=("tissue", ))
###################################################################
# Global flags
###################################################################
# AH: added default values for assemblers and coverage_mapper
# to allow import of pipeline script
ASSEMBLERS = P.asList(PARAMS.get("assemblers", ""))
MAPPER = PARAMS.get("coverage_mapper", 'bwa')
BOWTIE = MAPPER == "bowtie"
BOWTIE2 = MAPPER == "bowtie2"
BWA = MAPPER == "bwa"
###################################################################
###################################################################
###################################################################
def connect():
'''connect to database.
This method also attaches to helper databases.
'''
dbh = sqlite3.connect(PARAMS["database"])
return dbh
###################################################################
###################################################################
###################################################################
# Sequence files input
###################################################################
###################################################################
###################################################################
SEQUENCEFILES = ("*.fasta", "*.fasta.gz", "*.fasta.1.gz",
"*.fastq", "*.fastq.gz", "*.fastq.1.gz")
SEQUENCEFILES_REGEX = regex(
r"(\S+).(fasta$|fasta.gz|fasta.1.gz|fastq$|fastq.gz|fastq.1.gz)")
###################################################################
# Should reads be pooled prior to assembly
###################################################################
def pool_out(infiles):
'''
return outfile name dependent on
input pairedness
'''
# AH: patch required when importing pipeline
if len(infiles) == 0:
return ""
out = {"separate": "1",
False: ""}
inf = infiles[0]
paired = PipelineMetagenomeAssembly.PairedData().checkPairs(inf)
if paired:
paired = paired[0]
format = PipelineMetagenomeAssembly.PairedData().getFormat(inf)
outname = "pooled_reads.dir/agg-agg-agg.%s" % format
return outname
###################################################################
###################################################################
###################################################################
@active_if('pool_reads' in PARAMS and PARAMS["pool_reads"])
@follows(mkdir("pooled_reads.dir"))
# bit of a hack
@merge(SEQUENCEFILES, pool_out([x for x in glob.glob("*R*.fast*")
if not x.endswith(".2.gz")
and not x.endswith(".2")]))
def poolReadsAcrossConditions(infiles, outfile):
'''
pool reads across conditions
'''
statement = PipelineMetagenomeAssembly.pool_reads(infiles,
outfile)
P.run()
###################################################################
# Counting reads input
###################################################################
@transform(SEQUENCEFILES,
SEQUENCEFILES_REGEX,
r"\1.nreads")
def countReads(infile, outfile):
'''count number of reads in input files.'''
to_cluster = True
m = PipelineMapping.Counter()
statement = m.build((infile,), outfile)
P.run()
###################################################################
###################################################################
###################################################################
@merge(countReads, "reads_summary.load")
def loadReadCounts(infiles, outfile):
'''load read counts into database.'''
to_cluster = False
outf = P.getTempFile()
outf.write("track\ttotal_reads\n")
for infile in infiles:
track = P.snip(infile, ".nreads")
lines = IOTools.openFile(infile).readlines()
nreads = int(lines[0][:-1].split("\t")[1])
outf.write("%s\t%i\n" % (track, nreads))
outf.close()
inname = outf.name
tablename = P.toTable(outfile)
statement = '''python %(scriptsdir)s/csv2db.py -t %(tablename)s --log=%(outfile)s.log
< %(inname)s > %(outfile)s'''
P.run()
os.unlink(outf.name)
###################################################################
###################################################################
###################################################################
# Preprocess reads for IDBA
###################################################################
@active_if("idba" in ASSEMBLERS and PARAMS["pool_reads"])
@follows(mkdir("fasta.dir"))
@transform(poolReadsAcrossConditions, regex("(\S+).fastq.*gz"), r"fasta.dir/\1.fa")
def preprocessIdba(infile, outfile):
'''
preprocess pooled reads for IDBA
'''
# check for second read in the pair
if infile.endswith(".fastq.gz"):
E.info("converting fastq file to fasta file")
outf = open(outfile, "w")
for fastq in Fastq.iterate(IOTools.openFile(infile)):
outf.write("%s\n%s\n" % (">" + fastq.identifier, fastq.seq))
outf.close()
elif infile.endswith(".1.gz"):
read2 = P.snip(infile, ".1.gz") + ".2.gz"
assert os.path.exists(read2), "file does not exist %s" % read2
statement = '''python %(scriptsdir)s/fastqs2fasta.py
-a %(infile)s
-b %(read2)s
--log=%(infile)s.log
> %(outfile)s'''
P.run()
###################################################################
###################################################################
###################################################################
@follows(mkdir("fasta.dir"))
@transform(SEQUENCEFILES, SEQUENCEFILES_REGEX, r"fasta.dir/\1.fa")
def preprocessReads(infile, outfile):
'''
create merged fasta file for use with IDBA
'''
# check for second read in the pair
if infile.endswith(".fastq.gz"):
E.info("converting fastq file to fasta file")
outf = open(outfile, "w")
for fastq in Fastq.iterate(IOTools.openFile(infile)):
outf.write("%s\n%s\n" % (">" + fastq.identifier, fastq.seq))
outf.close()
elif infile.endswith(".1.gz"):
read2 = P.snip(infile, ".1.gz") + ".2.gz"
assert os.path.exists(read2), "file does not exist %s" % read2
log = infile.replace("fastq.", "")
statement = '''python %(scriptsdir)s/fastqs2fasta.py
-a %(infile)s
-b %(read2)s
--log=%(log)s.log
> %(outfile)s'''
P.run()
###################################################################
###################################################################
###################################################################
# RUNNING ASESSEMBLY ALGORITHMS
###################################################################
###################################################################
###################################################################
###################################################################
# Have reads been pooled
###################################################################
SEQUENCE_TARGETS = {
1: (poolReadsAcrossConditions,
regex("(\S+)/(\S+).(fasta$|fasta.gz|fasta.1.gz|fastq$|fastq.gz|fastq.1.gz)"),
"2.contigs.fa"),
0: (SEQUENCEFILES, SEQUENCEFILES_REGEX, "1.contigs.fa"),
"": (SEQUENCEFILES, SEQUENCEFILES_REGEX, "1.contigs.fa")}
###################################################################
###################################################################
###################################################################
# assemble reads with meta-velvet
###################################################################
@active_if("metavelvet" in ASSEMBLERS)
@follows(mkdir("metavelvet.dir"))
@transform(SEQUENCE_TARGETS[PARAMS.get("pool_reads", "")][0],
SEQUENCE_TARGETS[PARAMS.get("pool_reads", "")][1],
r"metavelvet.dir/\%s" %
SEQUENCE_TARGETS[PARAMS.get("pool_reads", "")][2])
def runMetavelvet(infile, outfile):
'''
run meta-velvet on each track
'''
job_options = " -l mem_free=100G"
statement = PipelineMetagenomeAssembly.Metavelvet().build(infile, PARAMS)
P.run()
###################################################################
###################################################################
###################################################################
@jobs_limit(1, "R")
@transform(runMetavelvet, suffix(".contigs.fa"), ".stats.pdf")
def plotCoverageHistogram(infile, outfile):
'''
plot the coverage over kmers
'''
inf = P.snip(infile, ".contigs.fa") + ".stats.txt"
outf = P.snip(inf, ".txt") + ".pdf"
R('''library(plotrix)''')
R('''data = read.table("%s", header=TRUE)''' % inf)
R('''pdf("%s", height = 7, width = 7 )''' % outf)
R('''weighted.hist(data$short1_cov, data$lgth, breaks=seq(0, 200, by=1))''')
R["dev.off"]()
###################################################################
###################################################################
###################################################################
@transform(runMetavelvet, suffix(".contigs.fa"), ".stats.load")
def loadMetavelvetRawStats(infile, outfile):
'''
load the assembly stats for meta-velvet
'''
inf = P.snip(infile, ".contigs.fa") + ".stats.txt"
P.load(inf, outfile)
###################################################################
###################################################################
###################################################################
@transform(runMetavelvet, suffix(".contigs.fa"), ".summary.tsv")
def buildMetavelvetStats(infile, outfile):
'''
build metavelvet stats:
N50
Number of scaffolds
Total scaffold length
'''
PipelineMetagenomeAssembly.contig_to_stats(infile, outfile, PARAMS)
###################################################################
###################################################################
###################################################################
@transform(buildMetavelvetStats, regex("(\S+).dir/(\S+).tsv"), r"\1.dir/\1-\2.load")
def loadMetavelvetStats(infile, outfile):
'''
load the metavelvet stats
'''
P.load(infile, outfile)
###################################################################
###################################################################
###################################################################
# assemble reads with idba
###################################################################
IDBA_TARGETS = {1: (preprocessIdba, regex("(\S+)/(\S+).fa"),
"2.contigs.fa"), 0: (preprocessReads,
regex("(\S+)/(\S+).fa"), "2.contigs.fa"), "":
(preprocessReads, regex("(\S+)/(\S+).fa"), "2.contigs.fa")}
@active_if("idba" in ASSEMBLERS)
@follows(mkdir("idba.dir"))
@transform(IDBA_TARGETS[PARAMS.get("pool_reads", "")][0],
IDBA_TARGETS[PARAMS.get("pool_reads", "")][1],
r"idba.dir/\%s" %
IDBA_TARGETS[PARAMS.get("pool_reads", "")][2])
def runIdba(infile, outfile):
'''
run idba on each track
'''
to_cluster = True
job_options = " -l mem_free=100G"
statement = PipelineMetagenomeAssembly.Idba().build(infile)
P.run()
###################################################################
###################################################################
###################################################################
@transform(runIdba, suffix(".contigs.fa"), ".summary.tsv")
def buildIdbaStats(infile, outfile):
'''
build idba stats:
N50
Number of scaffolds
Total scaffold length
'''
PipelineMetagenomeAssembly.contig_to_stats(infile, outfile, PARAMS)
###################################################################
###################################################################
###################################################################
@transform(buildIdbaStats, regex("(\S+).dir/(\S+).tsv"), r"\1.dir/\1-\2.load")
def loadIdbaStats(infile, outfile):
'''
load the idba stats
'''
P.load(infile, outfile)
###################################################################
###################################################################
###################################################################
@active_if("ray" in ASSEMBLERS)
@follows(mkdir("ray.dir"))
@transform(SEQUENCE_TARGETS[PARAMS.get("pool_reads", "")][0],
SEQUENCE_TARGETS[PARAMS.get("pool_reads", "")][1],
r"ray.dir/\%s" %
SEQUENCE_TARGETS[PARAMS.get("pool_reads", "")][2])
def runRay(infile, outfile):
'''
run Ray on each track
'''
to_cluster = True
job_options = " -l mem_free=300G h=!andromeda,h=!cgatgpu1,h=!cgatsmp1,h=!gandalf,h=!saruman \
-pe mpi 10 \
-q all.q "
statement = PipelineMetagenomeAssembly.Ray().build(infile)
P.run()
###################################################################
###################################################################
###################################################################
@active_if("sga" in ASSEMBLERS)
@follows(mkdir("sga.dir"))
@transform(SEQUENCE_TARGETS[PARAMS.get("pool_reads", "")][0],
SEQUENCE_TARGETS[PARAMS.get("pool_reads", "")][1],
r"sga.dir/\%s" %
SEQUENCE_TARGETS[PARAMS.get("pool_reads", "")][2])
def runSGA(infile, outfile):
'''
run SGA on each track
'''
to_cluster = True
job_options = " -l mem_free=100G "
statement = PipelineMetagenomeAssembly.SGA().build(infile)
P.run()
###################################################################
###################################################################
###################################################################
@active_if("soapdenovo" in ASSEMBLERS)
@follows(mkdir("soapdenovo.dir"))
@transform(SEQUENCE_TARGETS[PARAMS.get("pool_reads", "")][0],
SEQUENCE_TARGETS[PARAMS.get("pool_reads", "")][1],
r"soapdenovo.dir/\%s" % "1.contigs.fa.cfg")
def buildSoapdenovoConfig(infile, outfile):
'''
run SGA on each track
'''
PipelineMetagenomeAssembly.SoapDenovo2().config(infile, outfile, PARAMS)
###################################################################
###################################################################
###################################################################
@transform(buildSoapdenovoConfig, suffix(".contigs.fa.cfg"), ".contigs.fa")
def runSoapdenovo(infile, outfile):
'''
run soapdenovo
'''
job_options = "-l mem_free=100G"
statement = PipelineMetagenomeAssembly.SoapDenovo2().build(infile)
P.run()
###################################################################
###################################################################
###################################################################
@active_if("spades" in ASSEMBLERS)
@follows(mkdir("spades.dir"))
@transform(SEQUENCE_TARGETS[PARAMS.get("pool_reads", "")][0],
SEQUENCE_TARGETS[PARAMS.get("pool_reads", "")][1],
r"spades.dir/\%s" %
SEQUENCE_TARGETS[PARAMS.get("pool_reads", "")][2])
def runSpades(infile, outfile):
'''
run spades on each track
'''
job_options = " -l mem_free=300G"
statement = PipelineMetagenomeAssembly.Spades().build(infile)
P.run()
###################################################################
###################################################################
###################################################################
ASSEMBLY_TARGETS = []
assembly_targets = {"metavelvet": runMetavelvet,
"idba": runIdba,
"ray": runRay,
"sga": runSGA,
"soapdenovo": runSoapdenovo,
"spades": runSpades}
for x in ASSEMBLERS:
ASSEMBLY_TARGETS.append(assembly_targets[x])
###################################################################
###################################################################
###################################################################
@transform(ASSEMBLY_TARGETS, suffix(".contigs.fa"), ".filtered.contigs.fa")
def filterContigs(infile, outfile):
'''
filter contigs if specified in .ini file. If not specified
then the pipeline will not remove any but will produce a new
outfile - this is not space efficient and SHOULD BE CHANGED
'''
if not PARAMS["filter"]:
length = 0
else:
length = PARAMS["filter"]
PipelineMetagenomeAssembly.filterContigs(infile, outfile, length)
###################################################################
###################################################################
###################################################################
@transform(filterContigs, suffix(".fa"), ".summary.tsv")
def buildContigStats(infile, outfile):
'''
build contig stats:
N50
Number of scaffolds
Total scaffold length
max length
'''
PipelineMetagenomeAssembly.contig_to_stats(infile, outfile, PARAMS)
###################################################################
###################################################################
###################################################################
@transform(buildContigStats, regex("(\S+).dir/(\S+).tsv"), r"\1.dir/\1-\2.load")
def loadContigStats(infile, outfile):
'''
load the contig stats
'''
P.load(infile, outfile)
###################################################################
###################################################################
###################################################################
@split(loadContigStats, "*/contig.summary.tsv")
def buildContigSummary(infiles, outfile):
'''
merge the contig summary statistics
'''
stats = collections.defaultdict(list)
for filepath in infiles:
dirname = os.path.dirname(filepath)
stats[dirname].append(os.path.basename(filepath))
N = PARAMS["scaffold_n"]
# connect to database
dbh = connect()
cc = dbh.cursor()
for dirname in stats.keys():
outfname = os.path.join(dirname, "contig.summary.tsv")
outf = open(outfname, "w")
outf.write(
"track\tnscaffolds\tscaffold_length\tN%i\tmean_length\tmedian_length\tmax_length\n" % N)
for infile in stats[dirname]:
track = P.snip(
infile.split(dirname.split(".dir")[0])[1][1:], ".summary.load")
table = P.toTable(infile)
data = cc.execute("""SELECT nscaffolds
, scaffold_length
, N50
, mean_length
, median_length
, max_length FROM %s""" % table).fetchone()
outf.write("\t".join(
map(str, [track, data[0], data[1], data[2], data[3], data[4], data[5]])) + "\n")
outf.close()
###################################################################
###################################################################
###################################################################
@transform(buildContigSummary, suffix(".tsv"), ".load")
def loadContigSummary(infile, outfile):
'''
load contig summary stats for each assembler
'''
outname = P.snip(os.path.dirname(infile), ".dir") + \
"_" + os.path.basename(infile) + ".load"
P.load(infile, outname)
P.touch(outfile)
###################################################################
###################################################################
###################################################################
@transform(filterContigs, suffix(".fa"), ".lengths.tsv")
def buildContigLengths(infile, outfile):
'''
output lengths for each contig in each of the assemblies
'''
PipelineMetagenomeAssembly.build_scaffold_lengths(infile, outfile, PARAMS)
###################################################################
###################################################################
###################################################################
@transform(buildContigLengths, suffix(".lengths.tsv"), ".lengths.load")
def loadContigLengths(infile, outfile):
'''
load contig lengths
'''
outname = P.snip(os.path.dirname(infile), ".dir") + \
"_" + P.snip(os.path.basename(infile), ".tsv") + ".load"
P.load(infile, outname, "--index=scaffold_name")
P.touch(outfile)
###################################################################
###################################################################
###################################################################
@transform(filterContigs, suffix(".fa"), ".gc.tsv")
def buildContigGCContent(infile, outfile):
'''
build the GC content for each contig
'''
statement = '''cat %(infile)s
| python %(scriptsdir)s/fasta2table.py
--section=cpg
--log=%(outfile)s.log
> %(outfile)s'''
P.run()
###################################################################
###################################################################
###################################################################
@transform(buildContigGCContent, suffix(".gc.tsv"), ".gc.load")
def loadContigGCContent(infile, outfile):
'''
load contig GC content
'''
outname = P.snip(os.path.dirname(infile), ".dir") + \
"_" + P.snip(os.path.basename(infile), ".tsv") + ".load"
P.load(infile, outname, "--index=id")
P.touch(outfile)
###################################################################
###################################################################
###################################################################
# @transform(filterContigs, suffix(".fa"), ".blast")
# def runBlastOnContigs(infile, outfile):
# '''
# run blast on the contigs for downstream taxonomic assignment
# runs a translated blast (x) and outputs blastx format
# or input into MEGAN
# '''
# #to_cluster = False
# db = PARAMS["megan_db"]
# evalue = PARAMS["megan_evalue"]
# statement = '''cat %(infile)s
# | python %(scriptsdir)s/farm.py --split-at-regex="^>(\S+)"
# --local
# --log=%(outfile)s.log
# --chunksize=100 "blastx -db %(db)s -evalue %(evalue)s" > %(outfile)s'''
# P.run()
###################################################################
###################################################################
###################################################################
@transform(filterContigs, suffix(".fa"), ".diamond.tsv.gz")
def runDiamondOnContigs(infile, outfile):
'''
diamond is an ultra fast equivalent to blastx. It takes
fastq files as input
At present it will run one sequence from paired files
'''
temp = P.getTempFilename(".")
outtemp = P.getTempFilename(".")
# this is going to mess up some other users potentially
job_options = "-q pairsdb.q -pe mpi 32 -l mem_free=200G"
db = PARAMS["diamond_db"]
diamond_options = PARAMS["diamond_options"]
statement = '''diamond blastx
--db %(db)s
--query %(infile)s
-v 2
%(diamond_options)s
-o %(outtemp)s &> %(outfile)s.log;
checkpoint;
gzip -c %(outtemp)s > %(outfile)s;
checkpoint;
rm -rf %(temp)s %(temp)s.fastq %(outtemp)s
'''
P.run()
###################################################################
###################################################################
###################################################################
@transform(runDiamondOnContigs, suffix(".diamond.tsv.gz"), ".lca.gz")
def runLCA(infile, outfile):
'''
run the lowest common ancestor algorithm
on the blast output to assign contigs to
taxa - from mtools. Runs with defaults at
the moment
'''
job_options="-l mem_free=25G"
# filtering options
filter_list = P.asList(PARAMS.get("lca_filter"))
if filter_list:
filter_stmt = "grep -v " + " | grep -v ".join(filter_list)
else:
filter_stmt = ""
track = P.snip(outfile, ".lca.gz")
gi2taxid = PARAMS.get("megan_gi2taxid")
outf_tax = P.snip(outfile, ".gz")
options = PARAMS.get("lca_options")
statement = '''lcamapper.sh
-i %(infile)s
-f Detect
%(options)s
-gt %(gi2taxid)s
-o %(outf_tax)s > %(outfile)s.log
; cat %(outf_tax)s
| %(filter_stmt)s
| gzip > %(outfile)s
; checkpoint
; rm -rf %(outf_tax)s'''
P.run()
###################################################################
###################################################################
###################################################################
@transform(runLCA, suffix(".lca.gz"), ".taxa.gz")
def parseLCA(infile, outfile):
'''
tabulate LCA output into nice format
'''
statement = '''zcat %(infile)s
| python %(scriptsdir)s/lca2table.py
--summarise=individual
--log=%(outfile)s.log
| sed -e 's/order/_order/g'
| gzip > %(outfile)s'''
P.run()
###################################################################
###################################################################
###################################################################
@jobs_limit(1, "db")
@transform(parseLCA, suffix(".gz"), ".load")
def loadLCA(infile, outfile):
'''
load LCA results
'''
tablename = P.snip(os.path.dirname(infile), ".dir") + \
"_" + os.path.basename(P.snip(infile, ".gz"))
tablename = P.toTable(tablename + ".load")
statement = '''zcat %(infile)s | python %(scriptsdir)s/csv2db.py
-t %(tablename)s
--index=id
--log=%(outfile)s.log
> %(outfile)s'''
P.run()
###################################################################
###################################################################
###################################################################
@transform(filterContigs, suffix(".fa"), ".tetra")
def buildTetranucleotideFreq(infile, outfile):
'''
calculate the tetranucleotide frequency for
contigs
'''
statement = '''cat %(infile)s | python %(scriptsdir)s/fasta2kmercontent.py
-k 4 --log=%(outfile)s.log > %(outfile)s'''
P.run()
###################################################################
###################################################################
###################################################################
@follows(loadContigStats,
loadContigSummary,
loadContigLengths,
loadContigGCContent,
loadLCA,
buildTetranucleotideFreq)
def assembly():
pass
###################################################################
###################################################################
###################################################################
# gene finding using MetaGeneMark
###################################################################
###################################################################
###################################################################
@transform(filterContigs, suffix(".fa"), ".genes.tsv")
def findGenesUsingMetaGeneMark(infile, outfile):
'''
Use the MetaGeneMark hmm software to predict genes.
Output is tsv - similar to gff format but with
sequences
'''
to_cluster = True
mparams = PARAMS["metagenemark_model_params"]
statement = '''gmhmmp -a -d -f G -m %(mparams)s -o %(outfile)s %(infile)s'''
P.run()
###################################################################
###################################################################
###################################################################
@transform(findGenesUsingMetaGeneMark, regex("(\S+).tsv"), r"\1.gff.gz")
def parseGenesGff(infile, outfile):
'''
parse the genes file
'''
to_cluster = True
statement = '''cat %(infile)s | python %(scriptsdir)s/formatMetagenemark.py
--format gff
--log=%(outfile)s.log
| gzip > %(outfile)s'''
P.run()
###################################################################
###################################################################
###################################################################
@transform(findGenesUsingMetaGeneMark, regex("(\S+).tsv"), r"\1.fasta.gz")
def parseGenesFasta(infile, outfile):
'''
parse the genes file
'''
to_cluster = True
statement = '''cat %(infile)s | python %(scriptsdir)s/formatMetagenemark.py
--format fasta
--log=%(outfile)s.log
| sed 's/DNA /DNA_/g'
| gzip > %(outfile)s'''
P.run()
###################################################################
###################################################################
###################################################################
@transform(findGenesUsingMetaGeneMark, regex("(\S+).tsv"), r"\1.aa.gz")
def parseGenesAa(infile, outfile):
'''
parse the genes file
'''
to_cluster = True
statement = '''cat %(infile)s | python %(scriptsdir)s/formatMetagenemark.py
--format aa
--log=%(outfile)s.log
| sed 's/Protein /Protein_/g'
| gzip > %(outfile)s'''
P.run()
###################################################################
###################################################################
###################################################################
@transform(parseGenesAa, suffix(".aa.gz"), ".essential.hmm.gz")
def assignEssentialGenesToContigs(infile, outfile):
'''
assign essential genes to contigs
'''
dirname = os.path.dirname(infile)
essential = PARAMS["hmmer_hmm"]
tempdir = P.getTempDir(".")
statement = '''zcat %(infile)s > %(tempdir)s/orfs.fa;
hmmsearch --tblout %(tempdir)s/hmm.out --cut_tc
--notextw %(essential)s %(tempdir)s/orfs.fa;
tail -n+4 %(tempdir)s/hmm.out | sed 's/ * / /g' | cut -f1,4 -d " "
| gzip > %(outfile)s'''
P.run()
statement = '''rm -rf %(tempdir)s'''
P.run()
###################################################################
###################################################################
###################################################################
@transform(assignEssentialGenesToContigs, suffix(".gz"), add_inputs(parseGenesGff), ".contigs.gz")
def postprocessEssentialGeneAssignments(infiles, outfile):
'''
need to add the contig that each orf is associates with to the
file
'''
track = P.snip(infiles[0], ".essential.hmm.gz")
genes, gff = infiles[0], [inf for inf in infiles[1:] if inf.find(track) != -1][0]
protein2contig = {}
for gff in GTF.iterator(IOTools.openFile(gff)):
protein2contig["Protein_" + str(gff.gene_id)] = gff.contig
# output contig associated with protein id
outf = IOTools.openFile(outfile, "w")
outf.write("contig\torf\thmm_profile\n")
for line in IOTools.openFile(genes).readlines():
data = line[:-1].split(" ")
protein, profile = data[0], data[1]
outf.write(
"\t".join([protein2contig[protein], protein, profile]) + "\n")
outf.close()
###################################################################
###################################################################
###################################################################
@transform(postprocessEssentialGeneAssignments, suffix(".gz"), ".load")
def loadEssentialGeneAssignments(infile, outfile):
'''
load assignments of essential genes
'''
P.load(infile, outfile, "--index=contig")
###################################################################
###################################################################
###################################################################
@follows(mkdir("genes.dir"))
@transform(parseGenesAa, regex("(\S+).dir/(\S+).aa.gz"), r"genes.dir/\1_\2.blast.result.gz")
def runBlastOnAminoAcidSequences(infile, outfile):
'''
look for homology with known genes
'''
to_cluster = True
db = PARAMS["blastp_db"]
evalue = PARAMS["blastp_evalue"]
if PARAMS["blastp_ungapped"]:
ungapped = "-ungapped"
else:
ungapped = ""
statement = '''zcat %(infile)s
| python %(scriptsdir)s/farm.py --split-at-regex="^>(\S+)"
--log=%(outfile)s.log
--chunksize=1000 "blastp -db %(db)s -evalue %(evalue)s
-outfmt '6 qseqid qlen sseqid sgi sacc slen qstart qend sstart send evalue bitscore score length pident mismatch gaps frames staxids sscinames'"
| gzip > %(outfile)s'''
P.run()
###################################################################
###################################################################
###################################################################
@transform(runBlastOnAminoAcidSequences, suffix(".result"), r".result.load")
def loadBlastOnAminoAcidSequences(infile, outfile):
'''
load blastp results
'''
P.load(infile, outfile)
###################################################################
###################################################################
###################################################################
@transform([parseGenesGff, parseGenesFasta, parseGenesAa], regex("(\S+).dir/(\S+).genes.(\S+).gz"), r"\1.dir/\1_\2.genes.\3.tsv.gz")
def buildGeneTables(infile, outfile):
'''
build gene tables
'''
to_cluster = True
if infile.endswith(".gff.gz"):
outf = gzip.open(outfile, "w")
outf.write(
"chr\tsource\tfeature\tstart\tend\tscore\tstrand\tframe\tattributes\n")
for line in gzip.open(infile).readlines():
outf.write(line)
outf.close()
else:
statement = '''zcat %(infile)s | python %(scriptsdir)s/fasta2table.py
-s sequence
--log=%(outfile)s.log | gzip > %(outfile)s'''
P.run()
###################################################################
###################################################################
###################################################################
jobs_limit(1, "db")
@transform(buildGeneTables, regex("(\S+)/(\S+).genes.(\S+).tsv.gz"), r"\1/\2.genes.\3.load")
def loadGeneTables(infile, outfile):
'''
load genes from metagenemaek analysis
'''
if infile.find("gff") != -1:
P.load(infile, outfile)
else:
P.load(infile, outfile)
###################################################################
###################################################################
###################################################################
@follows(loadEssentialGeneAssignments,
loadBlastOnAminoAcidSequences,
loadGeneTables)
def genes():
pass
###################################################################
###################################################################
###################################################################
# build indices for mapping - this is for coverage analysis
###################################################################
###################################################################
###################################################################
@active_if(BOWTIE)
@transform(filterContigs, suffix(".fa"), ".ebwt")
def buildAssemblyBowtieIndices(infile, outfile):
'''
build bowtie indices
'''
to_cluster = True
outbase = P.snip(infile, ".fa")
directory = os.path.dirname(infile)
statement = '''bowtie-build -f %(infile)s %(outbase)s'''
P.run()
P.touch(outfile)
###################################################################
###################################################################
###################################################################
@active_if(BOWTIE2)
@transform(filterContigs, suffix(".fa"), ".bt2")
def buildAssemblyBowtie2Indices(infile, outfile):
'''
build bowtie indices
'''
to_cluster = True
outbase = P.snip(infile, ".fa")
statement = '''bowtie2-build -f %(infile)s %(outbase)s'''
P.run()
P.touch(outfile)
###################################################################
###################################################################
###################################################################
@active_if(BWA)
@transform(filterContigs, suffix(".fa"), ".fa.bwt")
def buildAssemblyBWAIndices(infile, outfile):
'''
build bwa indices
'''
to_cluster = True
statement = '''bwa index %(infile)s'''
P.run()
P.touch(outfile)
###################################################################
###################################################################
###################################################################
# map
###################################################################
index = {"bowtie": buildAssemblyBowtieIndices,
"bowtie2": buildAssemblyBowtie2Indices, "bwa": buildAssemblyBWAIndices}
INDEX = index[MAPPER]
###################################################################
###################################################################
###################################################################
@active_if("metavelvet" in ASSEMBLERS)
@transform(SEQUENCEFILES, SEQUENCEFILES_REGEX, add_inputs(INDEX, runMetavelvet), r"metavelvet.dir/\1.filtered.contigs.bam")
def mapReadsAgainstMetavelvetContigs(infiles, outfile):
'''
map reads against metavelvet contigs
'''
inf = infiles[0]
to_cluster = True
index_dir = os.path.dirname(outfile)
if "agg" not in infiles[1]:
genome = re.search(
".*R[0-9]*", infiles[0]).group(0) + ".filtered.contigs.fa"
else:
genome = "agg-agg-agg.filtered.contigs.fa"
if infiles[1].endswith(".bt2") or infiles[1].endswith(".ebwt"):
infile, reffile = infiles[0], os.path.join(index_dir, genome)
m = PipelineMapping.Bowtie(
executable=P.substituteParameters(**locals())["bowtie_executable"])
elif infiles[1].endswith("bwt"):
genome = genome
job_options = " -l mem_free=%s" % (PARAMS["bwa_memory"])
bwa_index_dir = index_dir
bwa_mem_options = PARAMS["bwa_mem_options"]
bwa_threads = PARAMS["bwa_threads"]
m = PipelineMapping.BWAMEM(remove_non_unique=True)
statement = m.build((inf,), outfile)
P.run()
###################################################################
###################################################################
###################################################################
@active_if("idba" in ASSEMBLERS)
@transform(SEQUENCEFILES, SEQUENCEFILES_REGEX, add_inputs(INDEX, runIdba), r"idba.dir/\1.filtered.contigs.bam")
def mapReadsAgainstIdbaContigs(infiles, outfile):
'''
map reads against idba contigs
'''
inf = infiles[0]
to_cluster = True
index_dir = os.path.dirname(outfile)
if "agg" not in infiles[1]:
genome = re.search(
".*R[0-9]*", infiles[0]).group(0) + ".filtered.contigs.fa"
else:
genome = "agg-agg-agg.filtered.contigs.fa"
if infiles[1].endswith(".bt2") or infiles[1].endswith(".ebwt"):
infile, reffile = infiles[0], os.path.join(index_dir, genome)
m = PipelineMapping.Bowtie(
executable=P.substituteParameters(**locals())["bowtie_executable"])
elif infiles[1].endswith("bwt"):
genome = genome
job_options = " -l mem_free=%s" % (PARAMS["bwa_memory"])
bwa_index_dir = index_dir
bwa_mem_options = PARAMS["bwa_mem_options"]
bwa_threads = PARAMS["bwa_threads"]
m = PipelineMapping.BWAMEM(remove_non_unique=True)
statement = m.build((inf,), outfile)
P.run()
###################################################################
###################################################################
###################################################################
@active_if("ray" in ASSEMBLERS)
@transform(SEQUENCEFILES, SEQUENCEFILES_REGEX, add_inputs(INDEX, runRay), r"ray.dir/\1.filtered.contigs.bam")
def mapReadsAgainstRayContigs(infiles, outfile):
'''
map reads against Ray contigs
'''
inf = infiles[0]
to_cluster = True
index_dir = os.path.dirname(outfile)
if "agg" not in infiles[1]:
genome = re.search(
".*R[0-9]*", infiles[0]).group(0) + ".filtered.contigs.fa"
else:
genome = "agg-agg-agg.filtered.contigs.fa"
if infiles[1].endswith(".bt2") or infiles[1].endswith(".ebwt"):
infile, reffile = infiles[0], os.path.join(index_dir, genome) + ".fa"
m = PipelineMapping.Bowtie(
executable=P.substituteParameters(**locals())["bowtie_executable"])
elif infiles[1].endswith("bwt"):
genome = genome
job_options = " -l mem_free=%s" % (PARAMS["bwa_memory"])
bwa_index_dir = index_dir
bwa_mem_options = PARAMS["bwa_mem_options"]
bwa_threads = PARAMS["bwa_threads"]
m = PipelineMapping.BWAMEM(remove_non_unique=True)
statement = m.build((inf,), outfile)
P.run()
###################################################################
###################################################################
###################################################################
@active_if("sga" in ASSEMBLERS)
@transform(SEQUENCEFILES, SEQUENCEFILES_REGEX, add_inputs(INDEX, runSGA), r"sga.dir/\1.filtered.contigs.bam")
def mapReadsAgainstSGAContigs(infiles, outfile):
'''
map reads against Ray contigs
'''
inf = infiles[0]
to_cluster = True
index_dir = os.path.dirname(outfile)
if "agg" not in infiles[1]:
genome = re.search(
".*R[0-9]*", infiles[0]).group(0) + ".filtered.contigs.fa"
else:
genome = "agg-agg-agg.filtered.contigs.fa"
if infiles[1].endswith(".bt2") or infiles[1].endswith(".ebwt"):
infile, reffile = infiles[0], os.path.join(index_dir, genome) + ".fa"
m = PipelineMapping.Bowtie(
executable=P.substituteParameters(**locals())["bowtie_executable"])
elif infiles[1].endswith("bwt"):
genome = genome
job_options = " -l mem_free=%s" % (PARAMS["bwa_memory"])
bwa_index_dir = index_dir
bwa_mem_options = PARAMS["bwa_mem_options"]
bwa_threads = PARAMS["bwa_threads"]
m = PipelineMapping.BWAMEM(remove_non_unique=True)
statement = m.build((inf,), outfile)
P.run()
###################################################################
###################################################################
###################################################################
@active_if("soapdenovo" in ASSEMBLERS)
@transform(SEQUENCEFILES, SEQUENCEFILES_REGEX, add_inputs(INDEX, runSoapdenovo), r"soapdenovo.dir/\1.filtered.contigs.bam")
def mapReadsAgainstSoapdenovoContigs(infiles, outfile):
'''
map reads against Ray contigs
'''
inf = infiles[0]
to_cluster = True
index_dir = os.path.dirname(outfile)
if "agg" not in infiles[1]:
genome = re.search(
".*R[0-9]*", infiles[0]).group(0) + ".filtered.contigs.fa"
else:
genome = "agg-agg-agg.filtered.contigs.fa"
if infiles[1].endswith(".bt2") or infiles[1].endswith(".ebwt"):
infile, reffile = infiles[0], os.path.join(index_dir, genome) + ".fa"
m = PipelineMapping.Bowtie(
executable=P.substituteParameters(**locals())["bowtie_executable"])
elif infiles[1].endswith("bwt"):
genome = genome
job_options = " -l mem_free=%s" % (PARAMS["bwa_memory"])
bwa_index_dir = index_dir
bwa_mem_options = PARAMS["bwa_mem_options"]
bwa_threads = PARAMS["bwa_threads"]
m = PipelineMapping.BWAMEM(remove_non_unique=True)
statement = m.build((inf,), outfile)
P.run()
###################################################################
###################################################################
###################################################################
@active_if("spades" in ASSEMBLERS)
@transform(SEQUENCEFILES, SEQUENCEFILES_REGEX, add_inputs(INDEX, runSpades), r"spades.dir/\1.filtered.contigs.bam")
def mapReadsAgainstSpadesContigs(infiles, outfile):
'''
map reads against spades contigs
'''
inf = infiles[0]
to_cluster = True
index_dir = os.path.dirname(outfile)
if "agg" not in infiles[1]:
genome = re.search(
".*R[0-9]*", infiles[0]).group(0) + ".filtered.contigs.fa"
else:
genome = "agg-agg-agg.filtered.contigs.fa"
if infiles[1].endswith(".bt2") or infiles[1].endswith(".ebwt"):
infile, reffile = infiles[0], os.path.join(index_dir, genome) + ".fa"
m = PipelineMapping.Bowtie(
executable=P.substituteParameters(**locals())["bowtie_executable"])
elif infiles[1].endswith("bwt"):
genome = genome
job_options = " -l mem_free=%s" % (PARAMS["bwa_memory"])
bwa_index_dir = index_dir
bwa_mem_options = PARAMS["bwa_mem_options"]
bwa_threads = PARAMS["bwa_threads"]
m = PipelineMapping.BWAMEM(remove_non_unique=True)
statement = m.build((inf,), outfile)
P.run()
###################################################################
###################################################################
###################################################################
ALIGNMENT_TARGETS = []
alignment_targets = {"metavelvet": mapReadsAgainstMetavelvetContigs,
"idba": mapReadsAgainstIdbaContigs,
"ray": mapReadsAgainstRayContigs,
"sga": mapReadsAgainstSGAContigs,
"soapdenovo": mapReadsAgainstSoapdenovoContigs,
"spades": mapReadsAgainstSpadesContigs}
for x in ASSEMBLERS:
ALIGNMENT_TARGETS.append(alignment_targets[x])
###################################################################
###################################################################
###################################################################
@transform(ALIGNMENT_TARGETS, regex("(\S+).dir/(\S+).bam"), r"\1.dir/\1_\2.alignment_stats")
def buildAlignmentStats(infile, outfile):
'''
use bam2stats to get alignment statistics
'''
statement = '''cat %(infile)s | python %(scriptsdir)s/bam2stats.py
--force
--output-filename-pattern=%(outfile)s.%%s
--log=%(outfile)s.log
> %(outfile)s'''
P.run()
###################################################################
###################################################################
###################################################################
@jobs_limit(1, "db")
@transform(buildAlignmentStats, suffix("_stats"), "_stats.load")
def loadAlignmentStats(infile, outfile):
'''
load bam2stats results
'''
P.load(infile, outfile)
###################################################################
###################################################################
###################################################################
@transform(ALIGNMENT_TARGETS, regex("(\S+).dir/(\S+).bam"), r"\1.dir/\1_\2.picard_stats")
def buildPicardStats(infile, outfile):
'''build alignment stats using picard.
Note that picards counts reads but they are in fact alignments.
'''
if PARAMS["pool_reads"]:
reffile = os.path.join(
os.path.dirname(infile), "agg-agg-agg.filtered.contigs.fa")
else:
reffile = P.snip(infile, ".bam") + ".fa"
PipelineMappingQC.buildPicardAlignmentStats(infile,
outfile,
reffile)
###################################################################
###################################################################
###################################################################
#@jobs_limit( 1, "db" )
@merge(buildPicardStats, "picard_stats.load")
def loadPicardStats(infiles, outfile):
'''merge alignment stats into single tables.'''
PipelineMappingQC.loadPicardAlignmentStats(infiles, outfile)
###################################################################
###################################################################
###################################################################
@follows(*ALIGNMENT_TARGETS)
@transform(ALIGNMENT_TARGETS,
suffix(".bam"),
add_inputs(loadAlignmentStats),
".coverage.gz")
def buildCoverageOverContigs(infiles, outfile):
'''
build histograms of the coverage over each of the contigs
'''
to_cluster = True
bam = infiles[0]
# genomecoveragebed does not like some of the
# output from bwa. bwa outputs some reads
# that map off the end of contigs
# as having a leftmost position of 0. This is
# not ideal. Need to use temporary bam
# files with only mapped reads - this is
# nasty and needs changing
tempdir = P.getTempDir(".")
tempname = P.getTempFilename(tempdir) + ".bam"
P.submit("CGATPipelines.PipelineMetagenomeAssembly",
"filterBamOnPos",
infiles = bam,
outfiles = tempname)
# tablename where alignment stats live
tablename = os.path.dirname(
bam)[:-len(".dir")] + "_" + P.snip(os.path.basename(bam), ".bam") + "_alignment_stats"
# hack to convert to table - add .load
tablename = P.toTable(tablename + ".load")
# connect to database
dbh = connect()
cc = dbh.cursor()
# get number of reads aligned from bam2stats
if PARAMS.get("coverage_scale"):
scale_factor = cc.execute("""SELECT counts FROM %s
WHERE category == 'reads_mapped'""" % tablename).fetchone()[0]
scale_factor = 1 / (float(scale_factor) / 1000000)
scale_options = "-scale %(scale_factor)f"
else:
scale_options = ""
statement = '''genomeCoverageBed -ibam %(tempname)s %(scale_options)s -d | gzip > %(outfile)s;
rm -rf %(tempdir)s'''
P.run()
###################################################################
###################################################################
###################################################################
@transform(buildCoverageOverContigs, suffix(".gz"), ".stats.gz")
def buildCoverageStats(infile, outfile):
'''
build coverage statistics - mean and standard deviation
we also bin the contigs here and produce coverage
matrices - there is no tracking of these outfiles however.
Contigs that do not have coverage are not included.
'''
job_options = " -l mem_free=30G"
to_cluster = True
track = P.snip(infile, ".gz")
statement = '''zcat %(infile)s | python %(scriptsdir)s/coverage2stats.py --log=%(outfile)s.log
--bin --bin-number=500 --output-filename-prefix=%(track)s
| gzip > %(outfile)s'''
P.run()
###################################################################
###################################################################
###################################################################
@transform(buildCoverageStats, suffix(".gz"), add_inputs(buildContigLengths), ".postprocess.gz")
def postprocessCoverageStats(infiles, outfile):
'''
genomeCoverageBed outputs only non-zero depth. Add a "0" to
contigs that have zero coverage
'''
stats_file = infiles[0]
inf = IOTools.openFile(stats_file)
header = inf.readline()
if PARAMS["pool_reads"]:
contigs = [x for x in infiles[1:len(infiles)] if x.find(
os.path.dirname(stats_file)) != -1][0]
else:
contigs = stats_file.replace(".coverage.stats.gz", ".lengths.tsv")
contig2stats = {}
for line in inf.readlines():
data = line[:-1].split("\t")
contig, mean, sd = data[0], data[1], data[2]
contig2stats[contig] = (mean, sd)
inf2 = open(contigs)
header2 = inf2.readline()
outf = gzip.open(outfile, "w")
outf.write(header)
for line in inf2.readlines():
data = line[:-1].split("\t")
contig, length = data[0], data[1]
if contig in contig2stats.keys():
outf.write("%s\t%s\t%s\n" %
(contig, contig2stats[contig][0], contig2stats[contig][1]))
else:
outf.write("%s\t0\t0\n" % contig)
outf.close()
###################################################################
###################################################################
###################################################################
@jobs_limit(1, "db")
@transform(postprocessCoverageStats, suffix(".postprocess.gz"), ".load")
def loadCoverageStats(infile, outfile):
'''
load coverage stats
'''
tablename = P.toTable(
P.snip(os.path.dirname(infile), ".dir") + "_%s" % os.path.basename(outfile))
statement = '''zcat %(infile)s | python %(scriptsdir)s/csv2db.py
-t %(tablename)s
--index=contig
--log=%(outfile)s.log > %(outfile)s'''
P.run()
###################################################################
###################################################################
###################################################################
@follows(loadCoverageStats)
def coverage():
pass
####################
# full targets
####################
@follows(loadReadCounts,
assembly,
genes,
coverage)
def full():
pass
####################
# report building
####################
@follows(mkdir("report"))
def build_report():
'''build report from scratch.'''
E.info("starting documentation build process from scratch")
P.run_report(clean=True)
@follows(mkdir("report"))
def update_report():
'''update report.'''
E.info("updating documentation")
P.run_report(clean=False)
if __name__ == "__main__":
if sys.argv[1] == "plot":
pipeline_printout_graph("test.pdf", "pdf", [full], no_key_legend=True,
size=(4, 4),
user_colour_scheme = {"colour_scheme_index": 1})
else:
sys.exit(P.main(sys.argv))
|
[
"nicholas.ilott@dpag.ox.ac.uk"
] |
nicholas.ilott@dpag.ox.ac.uk
|
379340e2144871bd7cb7038d2126d6d8d7bee705
|
f46c8ee2f598c93af0957178e4d580c6eec5c8a6
|
/c.py
|
73980dab7fe6811a11588939f6827290428cda21
|
[] |
no_license
|
soumyarepo/staging
|
c757b342095f7c2da407cee75cc3595f4c0fa49d
|
2db66c51cd9bff4b9cbb6b3bf855a28c1b6c1ac6
|
refs/heads/master
| 2022-06-19T20:04:05.393692
| 2020-05-05T17:57:00
| 2020-05-05T17:57:00
| 261,508,224
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 17
|
py
|
print "hello c"
|
[
"ranjan.soumya8055@gmail.com"
] |
ranjan.soumya8055@gmail.com
|
9147ad76a990a71765f45ee71beb970508e99910
|
f92bd466895bda0ee47fbf31375ea5c5a922a1e7
|
/shop/migrations/0002_userid.py
|
f604898644e9e06e1cef70686e38897da142c6de
|
[] |
no_license
|
shenhuawade/tbzhuhost
|
814fbf68c90c16c4f2c75491ef93787f40989d50
|
f1970f8b37a237407f4969d9e8a0b8ed4aa5e104
|
refs/heads/master
| 2020-04-15T19:43:57.785329
| 2019-09-02T13:37:12
| 2019-09-02T13:37:12
| 164,962,066
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 549
|
py
|
# Generated by Django 2.1.4 on 2018-12-26 08:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shop', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='UserId',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('password', models.TextField()),
],
),
]
|
[
"317909531@qq.com"
] |
317909531@qq.com
|
a1629821fe371a2f36d55c88aa41539b627a4a4c
|
d93ac89636d61b7912f49f4070153d8f274a5fd7
|
/iot/iot/urls.py
|
2b8734b6631490c974ea7142a26f5c2a821a0f17
|
[] |
no_license
|
tyrantqiao/dataPlatform
|
735464e7e31873a67165e7ddd4aafcc9f8e76fed
|
cdc4bb2fda60ff5e8838fb23bcfa4a9180ffa5aa
|
refs/heads/master
| 2022-12-09T22:30:38.143146
| 2020-12-06T05:48:25
| 2020-12-06T05:48:25
| 163,085,069
| 0
| 0
| null | 2022-12-08T01:40:21
| 2018-12-25T13:27:04
|
Python
|
UTF-8
|
Python
| false
| false
| 2,322
|
py
|
"""iot URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.conf.urls import url, include
from django.contrib.auth.models import User
from rest_framework import routers, serializers, viewsets
# qiao: apis for frontend
from api.views import NodesListViewSet,DataListViewSet,SearchDataListViewSet,OrderListViewSet,CommodityListViewSet
from rest_framework.routers import DefaultRouter
from rest_framework.documentation import include_docs_urls
from permission.views import *
# Routers provide an easy way of automatically determining the URL conf.
# qiao: This register the docs page :8000/docs
router = routers.DefaultRouter()
router.register(r'users', UserViewSet)
router.register(r'nodes', NodesListViewSet)
router.register(r'data', DataListViewSet)
router.register(r'searchData', SearchDataListViewSet)
router.register(r'order', OrderListViewSet)
router.register(r'commodity', CommodityListViewSet)
router.register(r'user', UserViewSet)
router.register(r'email', EmailCodeViewset)
# qiao: add rest framwork's login and logout views
# error: django2.0 will not support app_name, change to include('blog.urls')
urlpatterns = [
path('admin/', admin.site.urls),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
#url(r'^', include('api.urls')),
url(r'^captcha/', include('captcha.urls')),
url(r'^register/', UserRegisterAPIView.as_view()),
url(r'^login/', UserLoginAPIView.as_view()),
url(r'^vertification/', CaptchaAPIView.as_view()),
url(r'^api/', include(router.urls)),
url(r'^permission/', include(router.urls)),
url(r'docs/', include_docs_urls(title="后台接口")),
url(r'^', include(router.urls)),
]
|
[
"tyrantqiao@icloud.com"
] |
tyrantqiao@icloud.com
|
35333f9be2676a818d19f6e2fb34856cd4c5f170
|
cf80b490187331a3241bc6409863b8fd28926bf0
|
/paint/settings.py
|
44245296b931992f38f56fedc6f07a9b3035827e
|
[] |
no_license
|
insomniac12/Paint
|
4e6928848bf937506668ff43828d33e77a2cf48f
|
f29056752ccfd322a7b02d27836f66e857c82374
|
refs/heads/master
| 2021-01-10T10:22:21.389777
| 2016-01-25T13:37:41
| 2016-01-25T13:37:41
| 50,351,192
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,791
|
py
|
"""
Django settings for paint project.
Generated by 'django-admin startproject' using Django 1.8.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '!sx=%f2aepc=73$*7_is_7__!2&69a-+wz#cn6dbkaf)$sbba_'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
#'djangoratings',
'mysite',
'rest_framework',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'paint.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['/home/shivangi/shivangi/sh/paint/templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'paint.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
STATIC_URL='/home/shivangi/sh/paint/templates/static/'
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
|
[
"shivi.bajpai1@gmail.com"
] |
shivi.bajpai1@gmail.com
|
f6d02001062b5a4ce081620e30210e6aa06ac151
|
ceabddf546c74adfec5b70d6444551415b75f6db
|
/dz_4.py
|
60d5a1aad04e152e6671724305483629f8c5bf2e
|
[] |
no_license
|
andrymachnev/python_lessons_one
|
5767beb359dc6acfbaa1bee3306e4ed92da6177a
|
2447627448ea0b1118f9ba38fe57a3ac9b37d073
|
refs/heads/master
| 2023-08-21T21:07:59.780284
| 2021-10-26T05:35:04
| 2021-10-26T05:35:04
| 417,171,852
| 0
| 0
| null | 2021-10-15T14:05:08
| 2021-10-14T14:54:01
|
Python
|
UTF-8
|
Python
| false
| false
| 3,091
|
py
|
# Проверить, установлен ли пакет pillow в глобальном окружении. Если да — зафиксировать версию.
# Установить самую свежую версию pillow, если ранее она не была установлена. Сделать подтверждающий скриншот.
# Создать и активировать виртуальное окружение. Убедиться, что в нем нет пакета pillow.
# Сделать подтверждающий скриншот.
# Установить в виртуальное окружение pillow версии 7.1.1 (или другой, отличной от самой свежей).
# Сделать подтверждающий скриншот. Деактивировать виртуальное окружение. Сделать подтверждающий скриншот.
# Скрины нумеровать двухразрядными числами, например: «01.jpg», «02.jpg».
# Если будут проблемы с pillow - можно поработать с другим пакетом: например, requests.
# Написать функцию currency_rates(), принимающую в качестве аргумента код валюты
# (например, USD, EUR, GBP, ...) и возвращающую курс этой валюты по отношению к рублю.
# Использовать
# библиотеку
# requests.
# В
# качестве
# API
# можно
# использовать
# http://www.cbr.ru/scripts/XML_daily.asp. Рекомендация: выполнить предварительно запрос к API
# в обычном браузере, посмотреть содержимое ответа. Можно ли, используя только методы класса
# str, решить поставленную задачу? Функция должна возвращать результат числового типа,
# например float. Подумайте: есть ли смысл для работы с денежными величинами использовать
# вместо float тип Decimal? Сильно ли усложняется код функции при этом? Если в качестве
# аргумента передали код валюты, которого нет в ответе, вернуть None. Можно ли сделать работу
# функции не зависящей от того, в каком регистре был передан аргумент? В качестве примера
# выведите курсы доллара и евро.
# def currency_rates(usd, eur, rub):
# from requests import get, utils
# responce = get('http://www.cbr.ru/scripts/XML_daily.asp')
# encodings = utils.get_encoding_from_headers(responce.headers)
# content = responce.content.decode(encoding=encodings)
# # print(type(responce))
# # print(dir(responce))
# print(content)
|
[
"andrey.machnev@lynkage.tu"
] |
andrey.machnev@lynkage.tu
|
b6262d62ecea4989442b3be72000fc2b602b926a
|
ccbad4acfc90110491e4a4ed80e5c0f2f1cc631b
|
/models/__init__.py
|
e62633308a277536bbadf574fc98b18e01726a75
|
[] |
no_license
|
Softinadev/custom_pos
|
f6217bb30a57bef876ea95a5b21cb8842f546d7c
|
77ab324fbdcecf36d69ebee87155fb3f22a20c37
|
refs/heads/main
| 2023-01-31T21:33:56.728061
| 2020-12-20T19:23:39
| 2020-12-20T19:23:39
| 323,147,337
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 100
|
py
|
# -*- coding: utf-8 -*-
from . import hr_employee
from . import pos_config
from . import pos_order
|
[
"noreply@github.com"
] |
noreply@github.com
|
f42060777917f69c2baf86a67e23ede553107892
|
08d66ea553e83894557d88fd91633b27ec8fdad0
|
/populate_database.py
|
ac0135ba71389659cfe118ebc12a1f31762946d3
|
[] |
no_license
|
Michael-Zelenoborsky-Long/databases_spring2020
|
b9f7c924204618233d8f550d3ac27e5c96477f78
|
c1a06eca8828ebc3986be32fa0d9b8ff72167328
|
refs/heads/master
| 2021-04-01T08:07:11.004998
| 2020-04-03T07:37:52
| 2020-04-03T07:37:52
| 248,171,629
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,349
|
py
|
import mysql.connector
import vcf
import os
import datetime
import json
# connection
cnx = mysql.connector.connect(user='long', password='password',
host='127.0.0.1',
database='pea_variant_calling')
cursor = cnx.cursor()
# variant caller run constants
vcf_file_path = "discoRes_k_31_c_3_D_100_P_3_b_0_coherent_for_IGV_edited.vcf"
run_date = datetime.datetime.fromtimestamp(os.path.getmtime(vcf_file_path)) # file modification datetime
phenotype = "wt, mutant"
ref_genome = (1, "frisson_draft")
# open vcf
with open("discoRes_k_31_c_3_D_100_P_3_b_0_coherent_for_IGV_edited.vcf", mode='r') as vcf_file:
vcf_reader = vcf.Reader(vcf_file)
counter = 0
#===== populate genome
#insert_query = "INSERT INTO genome (id, name) VALUES (1, 'frisson_draft')"
#cursor.execute(insert_query)
#emp_no = cursor.lastrowid
#print("Inserted into row # ", emp_no)
#cnx.commit()
#==== populate run info
format_json = json.dumps(vcf_reader.formats)
filter_json = json.dumps(vcf_reader.filters)
insert_query = """INSERT INTO vcaller_run (id, run_date, ref_genome_id, phenotype, format, filter)
VALUES (%s,%s,%s,%s,%s,%s)"""
#cursor.execute(insert_query, (1, run_date, 1, phenotype, format_json, filter_json))
#emp_no = cursor.lastrowid
#print("Inserted into row # ", emp_no)
#cnx.commit()
# populate variants table and contigs
for record in vcf_reader:
# check contigs table
query_contigs = """SELECT * FROM contigs WHERE contigs.name = (%s)"""
cursor.execute(query_contigs, (record.CHROM, ))
query_result = cursor.fetchall()
if len(query_result) < 1:
contig_ins_query = """INSERT INTO contigs (genome_id, name) VALUES (%s, %s)"""
cursor.execute(contig_ins_query, (1, record.CHROM))
cnx.commit()
query_contigs = """SELECT * FROM contigs WHERE contigs.name = (%s)"""
cursor.execute(query_contigs, (record.CHROM, ))
query_result = cursor.fetchall()
contig_id = query_result[0][0]
# check type table
cursor.execute("""SELECT * FROM variant_type WHERE name = %s""", (record.var_type, ))
query_result = cursor.fetchall()
if len(query_result) < 1:
cursor.execute("""INSERT INTO variant_type (name) VALUE (%s)""", (record.var_type, ))
cnx.commit()
cursor.execute("""SELECT * FROM variant_type WHERE variant_type.name = %s""", (record.var_type, ))
query_result = cursor.fetchall()
type_id = query_result[0][0]
# check subtype table
cursor.execute("""SELECT * FROM variant_subtype WHERE name = %s""", (record.var_subtype, ))
query_result = cursor.fetchall()
if len(query_result) < 1:
cursor.execute("""INSERT INTO variant_subtype (name) VALUE (%s)""", (record.var_subtype, ))
cnx.commit()
cursor.execute("""SELECT * FROM variant_subtype WHERE variant_subtype.name = %s""", (record.var_subtype, ))
query_result = cursor.fetchall()
subtype_id = query_result[0][0]
# insert variant
variant_ins_query = """INSERT INTO variants_table_doublelinked (run_id, contig_id, alleles, pos, alt, ref,
alt_alleles_list, qual, var_type_id, var_subtype_id, start, end, affected_start, affected_end, af, ac,
dp, info_dict, samples_dict) VALUE (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"""
variant_attributes = (1, contig_id, json.dumps(str(record.alleles)), record.POS, str(record.ALT[0]), record.REF,
json.dumps(str(record.ALT)), record.QUAL, type_id, subtype_id, record.start, record.end,
record.affected_start, record.affected_end, record.heterozygosity, record.call_rate,
record.num_called, json.dumps(record.INFO), json.dumps(str(record.samples)))
try:
cursor.execute(variant_ins_query, variant_attributes)
cnx.commit()
except mysql.connector.errors.DataError as error:
print(error)
print("Variant call {} cannot be imported".format(record))
continue
# close connection
cursor.close()
cnx.close()
|
[
"noreply@github.com"
] |
noreply@github.com
|
ec725efbd259fed16c95c5d9ccda48f352ec847f
|
1b14fb21afa97c6d033794dbef2258aea8346d4a
|
/liuWinshenProject0/clean.py
|
1018be1e5c7bc26046a5df17512f59185d882a88
|
[] |
no_license
|
liuwinshen/computer_systems
|
ece4ca49ea50a4654259c9498b7c16f274c04ca7
|
3be562cf0b68046ef6f331183f15dac0bc35d565
|
refs/heads/master
| 2020-03-30T21:23:22.366127
| 2018-10-14T03:29:40
| 2018-10-14T03:29:40
| 151,626,924
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,271
|
py
|
import sys
import re
def clean(filename, no_comments=None):
'''Takes file as input (format: 'filename.in'), removes whitespace, and
writes to new file. Optional functionality to also remove comments'''
try: # attempts to open file
f = open(filename, 'r')
except IOError:
print('File not found. Please try again with an existing filename.')
else:
with f:
# new file replaces '.in' with '.out' on original filename
filename = filename.rstrip('in') + 'out'
new_f = open(filename, 'w')
for line in f:
clean_line = line.replace(' ', '').replace('\n','') # removes whitespace
if no_comments is not None: # removes comments
clean_line = re.sub('(//[\w\W]*)','', clean_line)
if clean_line is not '': # writes to new file if line is not blank
new_f.write(clean_line + '\n')
def main():
user_input = sys.argv # parses command line arguments
if user_input[-1] == 'no-comments': # determines if no-comments is the last term
clean(user_input[1], user_input[-1])
else:
clean(user_input[1])
if __name__ == '__main__':
main()
|
[
"winshen@uchicago.edu"
] |
winshen@uchicago.edu
|
69b765b6c43f5b3185f837bec89da018492bd0ac
|
9ff1058a0500be499fd3de9ec0beccd697d5273c
|
/Shared/Ollinger/pyparserial/parallel/parallelppdev.py
|
eec2104495335dd90f546039d63dbbeb5c306dc7
|
[] |
no_license
|
jrussell9000/NeuroScripts
|
93f53c7d38c1d51fdc0cf39096e0996daee887cf
|
e41558754bd36385f94934333cb39a6500abfd9f
|
refs/heads/master
| 2021-06-09T20:30:59.956137
| 2021-04-08T18:45:39
| 2021-04-08T18:45:39
| 151,635,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 22,750
|
py
|
#!/usr/bin/env python
# parallel port access using the ppdev driver
import sys
import struct
import fcntl
import os
#----
# Generated by h2py 0.1.1 from <linux/ppdev.h>,
# then cleaned up a bit by Michael P. Ashton and then a gain by chris ;-)
# Changes for Python2.2 support (c) September 2004 Alex.Perry@qm.com
# JMO: This part from /usr/include/asm-generic/ioctl.h
def sizeof(type): return struct.calcsize(type)
def _IOC(dir, type, nr, size): return int((dir << _IOC_DIRSHIFT ) | (type << _IOC_TYPESHIFT ) |\
(nr << _IOC_NRSHIFT ) | (size << _IOC_SIZESHIFT))
def _IO(type, nr): return _IOC(_IOC_NONE, type, nr, 0)
def _IOR(type,nr,size): return _IOC(_IOC_READ, type, nr, sizeof(size))
def _IOW(type,nr,size): return _IOC(_IOC_WRITE, type, nr, sizeof(size))
_IOC_SIZEBITS = 14
_IOC_SIZEMASK = (1 << _IOC_SIZEBITS ) - 1
#_IOC_SIZEMASK = (1L << _IOC_SIZEBITS ) - 1
_IOC_NRSHIFT = 0
_IOC_NRBITS = 8
_IOC_TYPESHIFT = _IOC_NRSHIFT + _IOC_NRBITS
_IOC_TYPEBITS = 8
_IOC_SIZESHIFT = _IOC_TYPESHIFT + _IOC_TYPEBITS
IOCSIZE_MASK = _IOC_SIZEMASK << _IOC_SIZESHIFT
IOCSIZE_SHIFT = _IOC_SIZESHIFT
# Python 2.2 uses a signed int for the ioctl() call, so ...
if ( sys.version_info[0] < 3 ) and ( sys.version_info[1] < 3 ): # JMO 4/8/2009
_IOC_WRITE = 1L
_IOC_READ = -2L
_IOC_INOUT = -1L
else:
_IOC_WRITE = 1
_IOC_READ = 2
_IOC_INOUT = 3
_IOC_DIRSHIFT = _IOC_SIZESHIFT + _IOC_SIZEBITS
IOC_INOUT = _IOC_INOUT << _IOC_DIRSHIFT
IOC_IN = _IOC_WRITE << _IOC_DIRSHIFT
IOC_OUT = _IOC_READ << _IOC_DIRSHIFT
_IOC_NONE = 0
PP_IOCTL = ord('p')
PPCLAIM = _IO(PP_IOCTL, 0x8b)
PPCLRIRQ = _IOR(PP_IOCTL, 0x93, 'i')
PPDATADIR = _IOW(PP_IOCTL, 0x90, 'i')
PPEXCL = _IO(PP_IOCTL, 0x8f)
PPFCONTROL = _IOW(PP_IOCTL, 0x8e, 'BB')
PPGETFLAGS = _IOR(PP_IOCTL, 0x9a, 'i')
PPGETMODE = _IOR(PP_IOCTL, 0x98, 'i')
PPGETMODES = _IOR(PP_IOCTL, 0x97, 'I')
PPGETPHASE = _IOR(PP_IOCTL, 0x99, 'i')
PPGETTIME = _IOR(PP_IOCTL, 0x95, 'll')
PPNEGOT = _IOW(PP_IOCTL, 0x91, 'i')
PPRCONTROL = _IOR(PP_IOCTL, 0x83, 'B')
PPRDATA = _IOR(PP_IOCTL, 0x85, 'B')
#'OBSOLETE__IOR' undefined in 'PPRECONTROL'
PPRELEASE = _IO(PP_IOCTL, 0x8c)
#'OBSOLETE__IOR' undefined in 'PPRFIFO'
PPRSTATUS = _IOR(PP_IOCTL, 0x81, 'B')
PPSETFLAGS = _IOW(PP_IOCTL, 0x9b, 'i')
PPSETMODE = _IOW(PP_IOCTL, 0x80, 'i')
PPSETPHASE = _IOW(PP_IOCTL, 0x94, 'i')
PPSETTIME = _IOW(PP_IOCTL, 0x96, 'll')
PPWCONTROL = _IOW(PP_IOCTL, 0x84, 'B')
PPWCTLONIRQ = _IOW(PP_IOCTL, 0x92, 'B')
PPWDATA = _IOW(PP_IOCTL, 0x86, 'B')
#'OBSOLETE__IOW' undefined in 'PPWECONTROL'
#'OBSOLETE__IOW' undefined in 'PPWFIFO'
#'OBSOLETE__IOW' undefined in 'PPWSTATUS'
PPYIELD = _IO(PP_IOCTL, 0x8d)
PP_FASTREAD = 1 << 3
PP_FASTWRITE = 1 << 2
PP_W91284PIC = 1 << 4
PP_FLAGMASK = PP_FASTWRITE | PP_FASTREAD | PP_W91284PIC
PP_MAJOR = 99
_ASMI386_IOCTL_H= None
_IOC_DIRBITS = 2
_IOC_DIRMASK = (1 << _IOC_DIRBITS) - 1
_IOC_NRMASK = (1 << _IOC_NRBITS) - 1
_IOC_TYPEMASK = (1 << _IOC_TYPEBITS ) - 1
def _IOC_DIR(nr): return (nr >> _IOC_DIRSHIFT) & _IOC_DIRMASK
def _IOC_NR(nr): return (nr >> _IOC_NRSHIFT) & _IOC_NRMASK
def _IOC_SIZE(nr): return (nr >> _IOC_SIZESHIFT) & _IOC_SIZEMASK
def _IOC_TYPE(nr): return (nr >> _IOC_TYPESHIFT) & _IOC_TYPEMASK
def _IOWR(type, nr, size): return _IOC(_IOC_READ | _IOC_WRITE, type, nr , sizeof(size))
__ELF__ = 1
__i386 = 1
__i386__ = 1
__linux = 1
__linux__ = 1
__unix = 1
__unix__ = 1
i386 = 1
linux = 1
unix = 1
#-------- Constants from <linux/parport.h>
PARPORT_CONTROL_STROBE = 0x1
PARPORT_CONTROL_AUTOFD = 0x2
PARPORT_CONTROL_INIT = 0x4
PARPORT_CONTROL_SELECT = 0x8
PARPORT_STATUS_ERROR = 8
PARPORT_STATUS_SELECT = 0x10
PARPORT_STATUS_PAPEROUT = 0x20
PARPORT_STATUS_ACK = 0x40
PARPORT_STATUS_BUSY = 0x80
IEEE1284_MODE_NIBBLE = 0
IEEE1284_MODE_BYTE = 1
IEEE1284_MODE_COMPAT = 1<<8
IEEE1284_MODE_BECP = 1<<9
IEEE1284_MODE_ECP = 1<<4
IEEE1284_MODE_ECPRLE = IEEE1284_MODE_ECP | (1<<5)
IEEE1284_MODE_ECPSWE = 1<<10
IEEE1284_MODE_EPP = 1<<6
IEEE1284_MODE_EPPSL = 1<<11
IEEE1284_MODE_EPPSWE = 1<<12
IEEE1284_DEVICEID = 1<<2
IEEE1284_EXT_LINK = 1<<14
IEEE1284_ADDR = 1<<13
IEEE1284_DATA = 0
PARPORT_EPP_FAST = 1
PARPORT_W91284PIC = 2
#----
mode_codes = {IEEE1284_MODE_COMPAT: 'compatible', \
IEEE1284_MODE_NIBBLE: 'nibble', \
IEEE1284_MODE_BYTE: 'byte', \
IEEE1284_MODE_EPP: 'EPP', \
IEEE1284_MODE_ECP: 'ECP'} #JMO
code_modes = {'compatible': IEEE1284_MODE_COMPAT, \
'nibble': IEEE1284_MODE_NIBBLE, \
'byte': IEEE1284_MODE_BYTE, \
'epp': IEEE1284_MODE_EPP, \
'ecp': IEEE1284_MODE_ECP} #JMO
class Parallel:
"""Class for controlling the pins on a parallel port
This class provides bit-level access to the pins on a PC parallel
port. It is primarily designed for programs which must control
special circuitry - most often non-IEEE-1284-compliant devices
other than printers - using 'bit-banging' techniques.
The current implementation makes ioctl() calls to the Linux ppdev
driver, using the Python fcntl library. It might be rewritten in
C for extra speed. This particular implementation is written for
Linux; all of the upper-level calls can be ported to Windows as
well.
On Linux, the ppdev device driver, from the Linux 2.4 parallel
port subsystem, is used to control the parallel port hardware.
This driver must be made available from a kernel compile. The
option is called "Support user-space parallel-port drivers". When
using the module, be sure to unload the lp module first: usually
the lp module claims exclusive access to the parallel port, and if
it is loaded, this class will fail to open the parallel port file,
and throw an exception.
The primary source of information about the Linux 2.4 parallel
port subsystem is Tim Waugh's documentation, the source for which
is available in the kernel tree. This document (called,
appropriately enough, "The Linux 2.4 Parallel Port Subsystem"),
thoroughly describes the parallel port drivers and how to use
them.
This class provides a method for each of the ioctls supported by
the ppdev module. The ioctl methods are named, in uppercase, the
same as the ioctls they invoke. The documentation for these
methods was taken directly from the documentation for their
corresponding ioctl, and modified only where necessary.
Unless you have special reason to use the Linux ioctls, you should
use instead the upper-level functions, which are named in
lowerCase fashion and should be portable between Linux and
Windows. This way, any code you write for this class will (or
should) also work with the Windows version of this class.
"""
def __init__(self, port = 'dev/parport0'):
# if type(port) == type(""):
if isinstance(port, str):
self.device = port
else:
self.device = "/dev/parport%d" % port
# self._fd = os.open(self.device, os.O_RDWR)
self._fd = os.open(self.device, os.O_RDONLY)
# self.PPEXCL() # JMO
self.PPCLAIM() # JMO
self.setDataDir('out')
# self.setData(0)
def Close(self):
self.__del__()
def __del__(self):
self.PPRELEASE()
if self._fd is not None:
os.close(self._fd)
def timevalToFloat(self, timeval):
t=struct.unpack('ll', timeval)
return t[0] + (t[1]/1000000.0)
def floatToTimeval(self, time):
sec = int(time)
usec = int(time*1000000.0)
return struct.pack('ll', sec, usec)
def PPCLAIM(self):
"""
Claims access to the port. As a user-land device driver
writer, you will need to do this before you are able to
actually change the state of the parallel port in any
way. Note that some operations only affect the ppdev driver
and not the port, such as PPSETMODE; they can be performed
while access to the port is not claimed.
"""
fcntl.ioctl(self._fd, PPCLAIM)
def PPEXCL(self):
"""
Instructs the kernel driver to forbid any sharing of the port
with other drivers, i.e. it requests exclusivity. The PPEXCL
command is only valid when the port is not already claimed for
use, and it may mean that the next PPCLAIM ioctl will fail:
some other driver may already have registered itself on that
port.
Most device drivers don't need exclusive access to the
port. It's only provided in case it is really needed, for
example for devices where access to the port is required for
extensive periods of time (many seconds).
Note that the PPEXCL ioctl doesn't actually claim the port
there and then---action is deferred until the PPCLAIM ioctl is
performed.
"""
fcntl.ioctl(self._fd, PPEXCL)
def PPRELEASE(self):
"""
Releases the port. Releasing the port undoes the effect of
claiming the port. It allows other device drivers to talk to
their devices (assuming that there are any).
"""
fcntl.ioctl(self._fd, PPRELEASE)
def PPYIELD(self):
"""
Yields the port to another driver. This ioctl is a kind of
short-hand for releasing the port and immediately reclaiming
it. It gives other drivers a chance to talk to their devices,
but afterwards claims the port back. An example of using this
would be in a user-land printer driver: once a few characters
have been written we could give the port to another device
driver for a while, but if we still have characters to send to
the printer we would want the port back as soon as possible.
It is important not to claim the parallel port for too long,
as other device drivers will have no time to service their
devices. If your device does not allow for parallel port
sharing at all, it is better to claim the parallel port
exclusively (see PPEXCL).
"""
fcntl.ioctl(self._fd, PPYIELD)
def PPNEGOT(self, mode):
"""
Performs IEEE 1284 negotiation into a particular
mode. Briefly, negotiation is the method by which the host and
the peripheral decide on a protocol to use when transferring
data.
An IEEE 1284 compliant device will start out in compatibility
mode, and then the host can negotiate to another mode (such as
ECP).
The 'mode' parameter should be one of the following constants
from PPDEV:
- IEEE1284_MODE_COMPAT
- IEEE1284_MODE_NIBBLE
- IEEE1284_MODE_BYTE
- IEEE1284_MODE_EPP
- IEEE1284_MODE_ECP
The PPNEGOT ioctl actually does two things: it performs the
on-the-wire negotiation, and it sets the behaviour of
subsequent read/write calls so that they use that mode (but
see PPSETMODE).
"""
fcntl.ioctl(self._fd, PPNEGOT, struct.pack('i', mode))
def PPSETMODE(self, mode):
"""
Sets which IEEE 1284 protocol to use for the read and write
calls.
The 'mode' parameter should be one of the following constants
from PPDEV:
- IEEE1284_MODE_COMPAT
- IEEE1284_MODE_NIBBLE
- IEEE1284_MODE_BYTE
- IEEE1284_MODE_EPP
- IEEE1284_MODE_ECP
"""
mode = code_modes.get(mode)
fcntl.ioctl(self._fd, PPSETMODE, struct.pack('i', mode))
def PPGETMODE(self):
"""
Retrieves the IEEE 1284 mode being used for read and
write. The return value is one of the following constants
from PPDEV:
- IEEE1284_MODE_COMPAT
- IEEE1284_MODE_NIBBLE
- IEEE1284_MODE_BYTE
- IEEE1284_MODE_EPP
- IEEE1284_MODE_ECP
"""
# modes = {256: 'IEEE 1284 compatible', 0: 'IEEE 1284 nibble', 1: 'IEEE 1284 byte', 64: 'IEEE 1284 EPP', 16: 'IEEE 1284 ECP'} #JMO
ret = struct.pack('i', 0)
ret = fcntl.ioctl(self._fd, PPGETMODE, ret)
ret = struct.unpack('i', ret)[0]
# return struct.unpack('i', ret)[0]
return mode_codes.get(ret, 'Unavailable')
def PPGETTIME(self):
"""
Retrieves the time-out value. The read and write calls will
time out if the peripheral doesn't respond quickly enough. The
PPGETTIME ioctl retrieves the length of time that the
peripheral is allowed to have before giving up.
Returns the timeout value in seconds as a floating-point value.
"""
ret = struct.pack('ll', 0, 0)
ret = fcntl.ioctl(self._fd, PPGETTIME, ret)
return self.timevalToFloat(ret)
def PPSETTIME(self, time):
"""
Sets the time-out (see PPGETTIME for more information).
'time' is the new time-out in seconds; floating-point values
are acceptable.
"""
fcntl.ioctl(self._fd, PPSETTIME, floatToTimeval(time))
def PPGETMODES(self):
"""
Retrieves the capabilities of the hardware (i.e. the modes
field of the parport structure).
"""
raise NotImplementedError
def PPSETFLAGS(self):
"""
Sets flags on the ppdev device which can affect future I/O
operations. Available flags are:
- PP_FASTWRITE
- PP_FASTREAD
- PP_W91284PIC
"""
raise NotImplementedError
def PPWCONTROL(self, lines):
"""
Sets the control lines. The 'lines' parameter is a bitwise OR
of the following constants from PPDEV:
- PARPORT_CONTROL_STROBE
- PARPORT_CONTROL_AUTOFD
- PARPORT_CONTROL_INIT
- PARPORT_CONTROL_SELECT
"""
fcntl.ioctl(self._fd, PPWCONTROL, struct.pack('B', lines))
def PPRCONTROL(self):
"""
Returns the last value written to the control register, in the
form of an integer, for which each bit corresponds to a control
line (although some are unused).
This doesn't actually touch the hardware; the last value
written is remembered in software. This is because some
parallel port hardware does not offer read access to the
control register.
The control lines bits are defined by the following constants
from PPDEV:
- PARPORT_CONTROL_STROBE
- PARPORT_CONTROL_AUTOFD
- PARPORT_CONTROL_SELECT
- PARPORT_CONTROL_INIT
"""
ret = struct.pack('B',0)
ret = fcntl.ioctl(self._fd, PPRCONTROL, ret)
return struct.unpack('B', ret)[0]
def PPFCONTROL(self, mask, val):
"""
Frobs the control lines. Since a common operation is to change
one of the control signals while leaving the others alone, it
would be quite inefficient for the user-land driver to have to
use PPRCONTROL, make the change, and then use PPWCONTROL. Of
course, each driver could remember what state the control
lines are supposed to be in (they are never changed by
anything else), but in order to provide PPRCONTROL, ppdev must
remember the state of the control lines anyway.
The PPFCONTROL ioctl is for "frobbing" control lines, and is
like PPWCONTROL but acts on a restricted set of control
lines. The ioctl parameter is a pointer to a struct
ppdev_frob_struct:
struct ppdev_frob_struct {
unsigned char mask;
unsigned char val;
};
The mask and val fields are bitwise ORs of control line names
(such as in PPWCONTROL). The operation performed by PPFCONTROL
is:
new_ctr = (old_ctr & ~mask) | val
In other words, the signals named in mask are set to the
values in val.
"""
fcntl.ioctl(self._fd, PPFCONTROL, struct.pack('BB', mask, val))
def PPRSTATUS(self):
"""
Returns an unsigned char containing bits set for each status
line that is set (for instance, PARPORT_STATUS_BUSY). The
ioctl parameter should be a pointer to an unsigned char.
"""
ret = struct.pack('B',0)
# ret = struct.pack('b', 0)
# print 100,self._fd, type(PPRSTATUS), 'PPRSTATUS: 0x%08x' % PPRSTATUS
# PPRSTATUS = 0x80017081;
# PPRSTATUS = struct.pack('I',PPRSTATUS)
ret = fcntl.ioctl(self._fd, PPRSTATUS, ret)
# print 100,struct.unpack('B', ret)
return struct.unpack('B', ret)[0]
def PPDATADIR(self, out):
"""
Controls the data line drivers. Normally the computer's
parallel port will drive the data lines, but for byte-wide
transfers from the peripheral to the host it is useful to turn
off those drivers and let the peripheral drive the
signals. (If the drivers on the computer's parallel port are
left on when this happens, the port might be damaged.)
This is only needed in conjunction with PPWDATA or PPRDATA.
The 'out' parameter indicates the desired port direction. If
'out' is true or non-zero, the drivers are turned on (forward
direction); otherwise, the drivers are turned off (reverse
direction).
"""
if out:
msg=struct.pack('i',0)
else:
msg=struct.pack('i',1)
fcntl.ioctl(self._fd, PPDATADIR, msg)
def PPWDATA(self, byte):
"""
Sets the data lines (if in forward mode). The ioctl parameter
is a pointer to an unsigned char.
"""
fcntl.ioctl(self._fd, PPWDATA,struct.pack('B',byte))
def PPRDATA(self):
"""
Reads the data lines (if in reverse mode). The ioctl parameter
is a pointer to an unsigned char.
"""
ret=struct.pack('B',0)
ret=fcntl.ioctl(self._fd, PPRDATA,ret)
return struct.unpack('B',ret)[0]
def PPCLRIRQ(self):
"""
Returns the current interrupt count, and clears it. The ppdev
driver keeps a count of interrupts as they are triggered.
"""
ret=struct.pack('i',0)
ret=fcntl.ioctl(self._fd, PPCLRIRQ,ret)
return struct.unpack('i',ret)[0]
def PPWCTLONIRQ(self, lines):
"""
Set a trigger response. Afterwards when an interrupt is
triggered, the interrupt handler will set the control lines as
requested. The ioctl parameter is a pointer to an unsigned
char, which is interpreted in the same way as for PPWCONTROL.
The reason for this ioctl is simply speed. Without this ioctl,
responding to an interrupt would start in the interrupt
handler, switch context to the user-land driver via poll or
select, and then switch context back to the kernel in order to
handle PPWCONTROL. Doing the whole lot in the interrupt
handler is a lot faster.
"""
fcntl.ioctl(self._fd, PPWCTLONIRQ,struct.pack('B',lines))
#data lines
## def data(self):
## """Returns the states of the data bus line drivers (pins 2-9)"""
## return self._data
def setDataDir(self,direction):
"""Activates or deactivates the data bus line drivers (pins 2-9)"""
if direction == 'out':
idir = 1
elif direction == 'in':
idir = 0
else:
raise IOError('Invalid argument to setDataDir: %s' % direction)
self._dataDir = idir
self.PPDATADIR(self._dataDir)
def dataDir(self):
"""Returns true if the data bus line drivers are on (pins 2-9)"""
return self._dataDir
#control lines
## def strobe(self):
## """Returns the state of the nStrobe output (pin 1)"""
## return (self.PPRCONTROL()&PARPORT_CONTROL_STROBE)==0
def setDataStrobe(self, level):
"""Sets the state of the nStrobe output (pin 1)"""
if level:
self.PPFCONTROL(PARPORT_CONTROL_STROBE, 0)
else:
self.PPFCONTROL(PARPORT_CONTROL_STROBE, PARPORT_CONTROL_STROBE)
## def autoFd(self):
## """Returns the state of the nAutoFd output (pin 14)"""
## return (self.PPRCONTROL()&PARPORT_CONTROL_AUTOFD)==0
def setAutoFeed(self, level):
"""Sets the state of the nAutoFd output (pin 14)"""
if level:
self.PPFCONTROL(PARPORT_CONTROL_AUTOFD, 0)
else:
self.PPFCONTROL(PARPORT_CONTROL_AUTOFD, PARPORT_CONTROL_AUTOFD)
## def init(self):
## """Returns the state of the nInit output (pin 16)"""
## return (self.PPRCONTROL()&PARPORT_CONTROL_INIT)!=0
def setInitOut(self, level):
"""Sets the state of the nInit output (pin 16)"""
if level:
self.PPFCONTROL(PARPORT_CONTROL_INIT, PARPORT_CONTROL_INIT)
else:
self.PPFCONTROL(PARPORT_CONTROL_INIT, 0)
## def selectIn(self):
## """Returns the state of the nSelectIn output (pin 17)"""
## return (self.PPRCONTROL()&PARPORT_CONTROL_SELECT)==0
def setSelect(self,level):
"""Sets the state of the nSelectIn output (pin 17)"""
if level:
self.PPFCONTROL(PARPORT_CONTROL_SELECT, 0)
else:
self.PPFCONTROL(PARPORT_CONTROL_SELECT, PARPORT_CONTROL_SELECT)
def setData(self,d):
"""Sets the states of the data bus line drivers (pins 2-9)"""
self._data=d
return self.PPWDATA(d)
#status lines
def getInError(self):
"""Returns the level on the nFault pin (15)"""
return (self.PPRSTATUS() & PARPORT_STATUS_ERROR) != 0
def getInSelected(self):
"""Returns the level on the Select pin (13)"""
return (self.PPRSTATUS() & PARPORT_STATUS_SELECT) != 0
def getInPaperOut(self):
"""Returns the level on the paperOut pin (12)"""
return (self.PPRSTATUS() & PARPORT_STATUS_PAPEROUT) != 0
def getInAcknowledge(self):
"""Returns the level on the nAck pin (10)"""
return (self.PPRSTATUS() & PARPORT_STATUS_ACK) != 0
def getInBusy(self):
"""Returns the level on the Busy pin (11)"""
return (self.PPRSTATUS() & PARPORT_STATUS_BUSY) != 0
|
[
"jrussell9000@gmail.com"
] |
jrussell9000@gmail.com
|
fe97a04343d7b6f40ca0fc14e675149c11498d48
|
5a7c2a3b742b1660b90c6bcf03dca03aff11ccab
|
/dj_wiki/wsgi.py
|
b5231f07e35ce397f19ac035ec6a6875a1599e7f
|
[
"MIT"
] |
permissive
|
atlasfoo/djwiki_p1
|
16bbfa47579527ae7b61ae466aac9afea5714be1
|
3d3500d5f4055904aef484cb33d991fdb70e1580
|
refs/heads/master
| 2023-03-28T10:21:55.487728
| 2021-03-29T03:58:38
| 2021-03-29T03:58:38
| 298,748,142
| 0
| 0
|
MIT
| 2021-03-29T03:58:39
| 2020-09-26T05:58:48
|
Python
|
UTF-8
|
Python
| false
| false
| 391
|
py
|
"""
WSGI config for dj_wiki project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dj_wiki.settings')
application = get_wsgi_application()
|
[
"jf_mejiar547@live.com"
] |
jf_mejiar547@live.com
|
6a8b5a661bc5de5d995d5ebc3032fec9c32bb3bd
|
3d4fcc7cbfafc4aaebea8e08d3a084ed0f0d06a1
|
/Programme_1/Creation_donnees/MIDI/schumm-3fMidiSimple.py
|
3b960ac7c7a4d5f017387e01b32ea2cd76707b76
|
[] |
no_license
|
XgLsuLzRMy/Composition-Musicale-par-Reseau-de-Neurones
|
0421d540efe2d9dc522346810f6237c5f24fa3bf
|
518a6485e2ad44e8c7fbae93c94a9dc767454a83
|
refs/heads/master
| 2021-09-03T20:43:01.218089
| 2018-01-11T20:02:00
| 2018-01-11T20:02:00
| 106,448,584
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 117,270
|
py
|
import midi
pattern=midi.Pattern(format=1, resolution=480, tracks=\
[midi.Track(\
[ midi.NoteOnEvent(tick=1860, channel=0, data=[70, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 56]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 50]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 50]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 55]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 52]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=360, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 56]),
midi.NoteOnEvent(tick=420, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 56]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 55]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[72, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 54]),
midi.NoteOnEvent(tick=420, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 58]),
midi.NoteOnEvent(tick=420, channel=0, data=[78, 43]),
midi.NoteOnEvent(tick=60, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 52]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 45]),
midi.NoteOnEvent(tick=120, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 45]),
midi.NoteOnEvent(tick=120, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 45]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 43]),
midi.NoteOnEvent(tick=120, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 42]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 42]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 39]),
midi.NoteOnEvent(tick=12, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[65, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 39]),
midi.NoteOnEvent(tick=12, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 39]),
midi.NoteOnEvent(tick=12, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[68, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 42]),
midi.NoteOnEvent(tick=12, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 39]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 39]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 39]),
midi.NoteOnEvent(tick=120, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 43]),
midi.NoteOnEvent(tick=400, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=20, channel=0, data=[70, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 56]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 50]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 50]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 55]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 52]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=360, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=420, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 56]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 55]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[72, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 54]),
midi.NoteOnEvent(tick=420, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 58]),
midi.NoteOnEvent(tick=420, channel=0, data=[78, 43]),
midi.NoteOnEvent(tick=60, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 52]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 45]),
midi.NoteOnEvent(tick=120, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 45]),
midi.NoteOnEvent(tick=120, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 45]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 43]),
midi.NoteOnEvent(tick=120, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 42]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 42]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 39]),
midi.NoteOnEvent(tick=12, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[65, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 39]),
midi.NoteOnEvent(tick=12, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 39]),
midi.NoteOnEvent(tick=12, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[68, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 42]),
midi.NoteOnEvent(tick=12, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 39]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 39]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 39]),
midi.NoteOnEvent(tick=120, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 43]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 50]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 51]),
midi.NoteOnEvent(tick=12, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[75, 55]),
midi.NoteOnEvent(tick=12, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[73, 52]),
midi.NoteOnEvent(tick=12, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 52]),
midi.NoteOnEvent(tick=360, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=60, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=30, channel=0, data=[72, 43]),
midi.NoteOnEvent(tick=30, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 52]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 48]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 49]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 50]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 54]),
midi.NoteOnEvent(tick=480, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 54]),
midi.NoteOnEvent(tick=420, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 46]),
midi.NoteOnEvent(tick=60, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 51]),
midi.NoteOnEvent(tick=420, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 45]),
midi.NoteOnEvent(tick=60, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 56]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 56]),
midi.NoteOnEvent(tick=480, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 50]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 50]),
midi.NoteOnEvent(tick=12, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[72, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 45]),
midi.NoteOnEvent(tick=12, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[73, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 45]),
midi.NoteOnEvent(tick=12, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 50]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 55]),
midi.NoteOnEvent(tick=120, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 52]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 42]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 39]),
midi.NoteOnEvent(tick=480, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 35]),
midi.NoteOnEvent(tick=480, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 50]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 51]),
midi.NoteOnEvent(tick=12, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[75, 55]),
midi.NoteOnEvent(tick=12, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[73, 52]),
midi.NoteOnEvent(tick=12, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 52]),
midi.NoteOnEvent(tick=360, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=60, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=30, channel=0, data=[72, 43]),
midi.NoteOnEvent(tick=30, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 52]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 49]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 50]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 54]),
midi.NoteOnEvent(tick=480, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 54]),
midi.NoteOnEvent(tick=420, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 46]),
midi.NoteOnEvent(tick=60, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 51]),
midi.NoteOnEvent(tick=420, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 45]),
midi.NoteOnEvent(tick=60, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 56]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 56]),
midi.NoteOnEvent(tick=480, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 50]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 50]),
midi.NoteOnEvent(tick=12, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[72, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 45]),
midi.NoteOnEvent(tick=12, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[73, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 45]),
midi.NoteOnEvent(tick=12, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 50]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 55]),
midi.NoteOnEvent(tick=120, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 52]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 42]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 39]),
midi.NoteOnEvent(tick=480, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 35]),
midi.NoteOnEvent(tick=480, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 64]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 64]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=420, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 56]),
midi.NoteOnEvent(tick=60, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 54]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 54]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 66]),
midi.NoteOnEvent(tick=120, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 67]),
midi.NoteOnEvent(tick=120, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 67]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 63]),
midi.NoteOnEvent(tick=120, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 63]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 64]),
midi.NoteOnEvent(tick=420, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 57]),
midi.NoteOnEvent(tick=60, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 73]),
midi.NoteOnEvent(tick=240, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 62]),
midi.NoteOnEvent(tick=120, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 62]),
midi.NoteOnEvent(tick=120, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 56]),
midi.NoteOnEvent(tick=120, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 56]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 51]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 55]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 60]),
midi.NoteOnEvent(tick=12, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[75, 62]),
midi.NoteOnEvent(tick=12, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[73, 58]),
midi.NoteOnEvent(tick=12, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 58]),
midi.NoteOnEvent(tick=240, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 52]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 50]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 43]),
midi.NoteOnEvent(tick=120, channel=0, data=[61, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 44]),
midi.NoteOnEvent(tick=12, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[63, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 45]),
midi.NoteOnEvent(tick=12, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[61, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 44]),
midi.NoteOnEvent(tick=12, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[60, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 43]),
midi.NoteOnEvent(tick=12, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=468, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 43]),
midi.NoteOnEvent(tick=480, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 64]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 64]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=420, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 56]),
midi.NoteOnEvent(tick=60, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 54]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 54]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 66]),
midi.NoteOnEvent(tick=120, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 67]),
midi.NoteOnEvent(tick=120, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 67]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 63]),
midi.NoteOnEvent(tick=120, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 63]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 64]),
midi.NoteOnEvent(tick=420, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 57]),
midi.NoteOnEvent(tick=60, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 73]),
midi.NoteOnEvent(tick=240, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 62]),
midi.NoteOnEvent(tick=120, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 62]),
midi.NoteOnEvent(tick=120, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 56]),
midi.NoteOnEvent(tick=120, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 56]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 51]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 55]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 60]),
midi.NoteOnEvent(tick=12, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[75, 62]),
midi.NoteOnEvent(tick=12, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[73, 58]),
midi.NoteOnEvent(tick=12, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 58]),
midi.NoteOnEvent(tick=240, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 52]),
midi.NoteOnEvent(tick=120, channel=0, data=[61, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 50]),
midi.NoteOnEvent(tick=12, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[60, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 48]),
midi.NoteOnEvent(tick=12, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=228, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 43]),
midi.NoteOnEvent(tick=120, channel=0, data=[61, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 44]),
midi.NoteOnEvent(tick=12, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[63, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 45]),
midi.NoteOnEvent(tick=12, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[61, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 44]),
midi.NoteOnEvent(tick=12, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[60, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 43]),
midi.NoteOnEvent(tick=12, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=468, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 43]),
midi.NoteOnEvent(tick=420, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 48]),
midi.NoteOnEvent(tick=60, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 40]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 40]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 45]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 43]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 39]),
midi.NoteOnEvent(tick=60, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 39]),
midi.NoteOnEvent(tick=60, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 45]),
midi.NoteOnEvent(tick=360, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 39]),
midi.NoteOnEvent(tick=60, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 39]),
midi.NoteOnEvent(tick=60, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 45]),
midi.NoteOnEvent(tick=420, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 39]),
midi.NoteOnEvent(tick=60, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 42]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 42]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 45]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 42]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[72, 39]),
midi.NoteOnEvent(tick=60, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 44]),
midi.NoteOnEvent(tick=420, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 39]),
midi.NoteOnEvent(tick=60, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 43]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 48]),
midi.NoteOnEvent(tick=420, channel=0, data=[78, 34]),
midi.NoteOnEvent(tick=60, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 43]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 37]),
midi.NoteOnEvent(tick=120, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 37]),
midi.NoteOnEvent(tick=120, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 34]),
midi.NoteOnEvent(tick=120, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 33]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 33]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 31]),
midi.NoteOnEvent(tick=12, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 31]),
midi.NoteOnEvent(tick=12, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[67, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 31]),
midi.NoteOnEvent(tick=12, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=108, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 33]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 31]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 31]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 31]),
midi.NoteOnEvent(tick=120, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 30]),
midi.NoteOnEvent(tick=480, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 34]),
midi.NoteOnEvent(tick=400, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=20, channel=0, data=[80, 34]),
midi.NoteOnEvent(tick=60, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 37]),
midi.NoteOnEvent(tick=120, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 37]),
midi.NoteOnEvent(tick=120, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 45]),
midi.NoteOnEvent(tick=240, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 43]),
midi.NoteOnEvent(tick=480, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 39]),
midi.NoteOnEvent(tick=120, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 39]),
midi.NoteOnEvent(tick=60, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 34]),
midi.NoteOnEvent(tick=60, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 43]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 39]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 39]),
midi.NoteOnEvent(tick=120, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 32]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 32]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 32]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 36]),
midi.NoteOnEvent(tick=120, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 42]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 42]),
midi.NoteOnEvent(tick=420, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 30]),
midi.NoteOnEvent(tick=60, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 32]),
midi.NoteOnEvent(tick=120, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 32]),
midi.NoteOnEvent(tick=120, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 36]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 32]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[80, 36]),
midi.NoteOnEvent(tick=480, channel=0, data=[80, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 34]),
midi.NoteOnEvent(tick=120, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 34]),
midi.NoteOnEvent(tick=60, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 31]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[69, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 34]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 34]),
midi.NoteOnEvent(tick=120, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 34]),
midi.NoteOnEvent(tick=180, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 31]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[69, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 34]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 34]),
midi.NoteOnEvent(tick=120, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 34]),
midi.NoteOnEvent(tick=180, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 31]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[69, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 34]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 34]),
midi.NoteOnEvent(tick=120, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 42]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 42]),
midi.NoteOnEvent(tick=420, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 38]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 39]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 39]),
midi.NoteOnEvent(tick=120, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 32]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 39]),
midi.NoteOnEvent(tick=480, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 39]),
midi.NoteOnEvent(tick=420, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 37]),
midi.NoteOnEvent(tick=60, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 34]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 39]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 31]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 31]),
midi.NoteOnEvent(tick=420, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 30]),
midi.NoteOnEvent(tick=60, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 31]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 31]),
midi.NoteOnEvent(tick=120, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 31]),
midi.NoteOnEvent(tick=480, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 31]),
midi.NoteOnEvent(tick=420, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 28]),
midi.NoteOnEvent(tick=60, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 27]),
midi.NoteOnEvent(tick=120, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 31]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 26]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 24]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 18]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 21]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 18]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 21]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 19]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 16]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 18]),
midi.NoteOnEvent(tick=960, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[ midi.NoteOnEvent(tick=0, channel=0, data=[53, 34]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 22]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 34]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 22]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 34]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 22]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 34]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 20]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 34]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 34]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[64, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 22]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 37]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 37]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 34]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 34]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[64, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 38]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 25]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 38]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 25]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[49, 32]),
midi.NoteOnEvent(tick=120, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 22]),
midi.NoteOnEvent(tick=120, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 32]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 22]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[46, 32]),
midi.NoteOnEvent(tick=183, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 20]),
midi.NoteOnEvent(tick=120, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 30]),
midi.NoteOnEvent(tick=134, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 20]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 30]),
midi.NoteOnEvent(tick=90, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 22]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[48, 29]),
midi.NoteOnEvent(tick=90, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[64, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 20]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 32]),
midi.NoteOnEvent(tick=39, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 22]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 29]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 34]),
midi.NoteOnEvent(tick=86, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 34]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[64, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 22]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 36]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 25]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 32]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 34]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 32]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[64, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 38]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 25]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 38]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 25]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[49, 32]),
midi.NoteOnEvent(tick=120, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 22]),
midi.NoteOnEvent(tick=120, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 32]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 22]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[46, 32]),
midi.NoteOnEvent(tick=190, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 20]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 20]),
midi.NoteOnEvent(tick=120, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 29]),
midi.NoteOnEvent(tick=148, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 20]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 30]),
midi.NoteOnEvent(tick=90, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 22]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[48, 29]),
midi.NoteOnEvent(tick=90, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[64, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 20]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 32]),
midi.NoteOnEvent(tick=49, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 22]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 29]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 30]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 27]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 29]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 27]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 32]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 23]),
midi.NoteOnEvent(tick=120, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 28]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 22]),
midi.NoteOnEvent(tick=120, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 29]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 22]),
midi.NoteOnEvent(tick=120, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 29]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 22]),
midi.NoteOnEvent(tick=120, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 32]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 29]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 34]),
midi.NoteOnEvent(tick=208, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 24]),
midi.NoteOnEvent(tick=240, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 32]),
midi.NoteOnEvent(tick=183, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 25]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 29]),
midi.NoteOnEvent(tick=75, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 23]),
midi.NoteOnEvent(tick=90, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[49, 30]),
midi.NoteOnEvent(tick=90, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 23]),
midi.NoteOnEvent(tick=90, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[51, 29]),
midi.NoteOnEvent(tick=90, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[63, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 22]),
midi.NoteOnEvent(tick=90, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[51, 29]),
midi.NoteOnEvent(tick=90, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[61, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 22]),
midi.NoteOnEvent(tick=90, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[56, 29]),
midi.NoteOnEvent(tick=169, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 20]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 28]),
midi.NoteOnEvent(tick=56, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 20]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 30]),
midi.NoteOnEvent(tick=92, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 27]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 29]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 27]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 32]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 23]),
midi.NoteOnEvent(tick=120, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 28]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 22]),
midi.NoteOnEvent(tick=120, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 29]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 22]),
midi.NoteOnEvent(tick=120, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 29]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 22]),
midi.NoteOnEvent(tick=120, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 32]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 29]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[68, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 23]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 34]),
midi.NoteOnEvent(tick=233, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 24]),
midi.NoteOnEvent(tick=240, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 32]),
midi.NoteOnEvent(tick=197, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 25]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 29]),
midi.NoteOnEvent(tick=76, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 23]),
midi.NoteOnEvent(tick=90, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[49, 30]),
midi.NoteOnEvent(tick=90, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 23]),
midi.NoteOnEvent(tick=90, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[51, 29]),
midi.NoteOnEvent(tick=90, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[63, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 22]),
midi.NoteOnEvent(tick=90, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[51, 29]),
midi.NoteOnEvent(tick=90, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[61, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 22]),
midi.NoteOnEvent(tick=90, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[56, 29]),
midi.NoteOnEvent(tick=141, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 20]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 28]),
midi.NoteOnEvent(tick=70, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 20]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 51]),
midi.NoteOnEvent(tick=91, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 31]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 31]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 52]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 40]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 52]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 40]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 45]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 32]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 45]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 32]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[44, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[44, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 34]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[45, 45]),
midi.NoteOnEvent(tick=120, channel=0, data=[45, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[54, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 39]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[46, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[58, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 27]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[37, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[37, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[49, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 33]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[39, 29]),
midi.NoteOnEvent(tick=120, channel=0, data=[39, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 20]),
midi.NoteOnEvent(tick=120, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[39, 29]),
midi.NoteOnEvent(tick=120, channel=0, data=[39, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 20]),
midi.NoteOnEvent(tick=120, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[44, 27]),
midi.NoteOnEvent(tick=169, channel=0, data=[44, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 18]),
midi.NoteOnEvent(tick=240, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[44, 27]),
midi.NoteOnEvent(tick=99, channel=0, data=[44, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 18]),
midi.NoteOnEvent(tick=240, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 31]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 31]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 52]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 40]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 52]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 40]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 45]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 32]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 45]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 32]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[44, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[44, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 34]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[45, 45]),
midi.NoteOnEvent(tick=120, channel=0, data=[45, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[54, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 39]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[46, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[58, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 27]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[37, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[37, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[49, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 33]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[39, 29]),
midi.NoteOnEvent(tick=120, channel=0, data=[39, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[56, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 20]),
midi.NoteOnEvent(tick=120, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[39, 29]),
midi.NoteOnEvent(tick=120, channel=0, data=[39, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 20]),
midi.NoteOnEvent(tick=120, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[44, 27]),
midi.NoteOnEvent(tick=190, channel=0, data=[44, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 18]),
midi.NoteOnEvent(tick=240, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[44, 27]),
midi.NoteOnEvent(tick=173, channel=0, data=[44, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 18]),
midi.NoteOnEvent(tick=180, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[56, 0]),
midi.NoteOnEvent(tick=60, channel=0, data=[53, 27]),
midi.NoteOnEvent(tick=65, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 18]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 27]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[64, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 18]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 29]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 18]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 29]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 19]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 27]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 19]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 27]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[64, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 19]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 30]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 19]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[53, 30]),
midi.NoteOnEvent(tick=90, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[65, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 19]),
midi.NoteOnEvent(tick=90, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[49, 27]),
midi.NoteOnEvent(tick=183, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 18]),
midi.NoteOnEvent(tick=240, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 27]),
midi.NoteOnEvent(tick=165, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 18]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 18]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 27]),
midi.NoteOnEvent(tick=162, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 19]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 16]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 16]),
midi.NoteOnEvent(tick=240, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 25]),
midi.NoteOnEvent(tick=141, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 19]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 16]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 25]),
midi.NoteOnEvent(tick=105, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 18]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 24]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 19]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 16]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 27]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 18]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 24]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 18]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[46, 24]),
midi.NoteOnEvent(tick=120, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[61, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 18]),
midi.NoteOnEvent(tick=120, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[46, 24]),
midi.NoteOnEvent(tick=120, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[61, 25]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 20]),
midi.NoteOnEvent(tick=120, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[47, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[47, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 27]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 23]),
midi.NoteOnEvent(tick=240, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[47, 24]),
midi.NoteOnEvent(tick=240, channel=0, data=[47, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 27]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 27]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 27]),
midi.NoteOnEvent(tick=208, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 19]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 27]),
midi.NoteOnEvent(tick=240, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 19]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[46, 21]),
midi.NoteOnEvent(tick=90, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[61, 18]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 15]),
midi.NoteOnEvent(tick=90, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[46, 21]),
midi.NoteOnEvent(tick=90, channel=0, data=[46, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[61, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 18]),
midi.NoteOnEvent(tick=90, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=150, channel=0, data=[47, 27]),
midi.NoteOnEvent(tick=240, channel=0, data=[47, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 19]),
midi.NoteOnEvent(tick=240, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[47, 21]),
midi.NoteOnEvent(tick=240, channel=0, data=[47, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 18]),
midi.NoteOnEvent(tick=120, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 24]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 18]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 24]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 18]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 24]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 18]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 24]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 18]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 24]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 18]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 24]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 18]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[41, 27]),
midi.NoteOnEvent(tick=194, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 21]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 18]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 27]),
midi.NoteOnEvent(tick=240, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 18]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 27]),
midi.NoteOnEvent(tick=97, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 18]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[41, 27]),
midi.NoteOnEvent(tick=120, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[53, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 19]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[41, 27]),
midi.NoteOnEvent(tick=194, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 19]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 27]),
midi.NoteOnEvent(tick=194, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 23]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 19]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 27]),
midi.NoteOnEvent(tick=104, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 18]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[41, 27]),
midi.NoteOnEvent(tick=120, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 22]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 18]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[41, 21]),
midi.NoteOnEvent(tick=163, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 14]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 21]),
midi.NoteOnEvent(tick=240, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 14]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 21]),
midi.NoteOnEvent(tick=137, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 14]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 21]),
midi.NoteOnEvent(tick=80, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[54, 18]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 15]),
midi.NoteOnEvent(tick=120, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[41, 21]),
midi.NoteOnEvent(tick=187, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 18]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 15]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 21]),
midi.NoteOnEvent(tick=240, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 18]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 15]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 21]),
midi.NoteOnEvent(tick=120, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 14]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 21]),
midi.NoteOnEvent(tick=92, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 17]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 14]),
midi.NoteOnEvent(tick=240, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 18]),
midi.NoteOnEvent(tick=70, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 13]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 10]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 16]),
midi.NoteOnEvent(tick=240, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 13]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 10]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 14]),
midi.NoteOnEvent(tick=240, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 10]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 9]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 13]),
midi.NoteOnEvent(tick=240, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 10]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 9]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 10]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 9]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 9]),
midi.NoteOnEvent(tick=960, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.EndOfTrackEvent(tick=0, data=[])])])
midi.write_midifile("creationMidi.mid", pattern)
|
[
"jeremy.catelain@insa-rouen.fr"
] |
jeremy.catelain@insa-rouen.fr
|
1b0636fcd3958c905c9dc3f2ad157ad20a18c1c8
|
029aa4fa6217dbb239037dec8f2e64f5b94795d0
|
/Python算法指南/集合、列表、字符串/38_两数之和I_两数之和的应用_背.py
|
9f582ae4669e34cd92b44ea088f6f44037c21e74
|
[] |
no_license
|
tonyyo/algorithm
|
5a3f0bd4395a75703f9ee84b01e42a74283a5de9
|
60dd5281e7ce4dfb603b795aa194a67ff867caf6
|
refs/heads/master
| 2022-12-14T16:04:46.723771
| 2020-09-23T06:59:33
| 2020-09-23T06:59:33
| 270,216,530
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 857
|
py
|
class Solution(object):
def twoSum(self, nums, target):
hash = {0: 1}
size = len(nums)
ans = []
for j in range(size):
if target - nums[j] in hash:
ans.append([hash[target - nums[j]], j])
hash[nums[j]] = j
return ans
def twoSum2(self, nums, target):
hash = {} # hash映射
ans = [] # 存储所有可能结果值
for i in range(len(nums)):
if target - nums[i] in hash:
ans.append([hash[target - nums[i]], i])
hash[nums[i]] = i
return ans
if __name__ == '__main__':
temp = Solution()
List = [5, 4, 3, 4, 11]
nums = 8
print(("输入:" + str(List) + " " + str(nums)))
print(("输出:" + str(temp.twoSum(List, nums))))
print(("输出:" + str(temp.twoSum2(List, nums))))
|
[
"1325338208@qq.com"
] |
1325338208@qq.com
|
40fa9cbf1c99786c7eb3eeeabb6952fa105e9624
|
a091e824e1bedae467a9ebe15d9c19f3a1659dfb
|
/wifiandpic.py
|
87f4fcb613d41dc9c4077f82586ff92c144673ec
|
[] |
no_license
|
xhx509/Getmatp
|
8ce60872add1d78bc67522f46dd413d4d6b83d24
|
c1d673292bd698ca635f0782363632d1721a5002
|
refs/heads/master
| 2021-04-26T21:55:00.060790
| 2020-09-09T16:05:49
| 2020-09-09T16:05:49
| 124,176,306
| 0
| 1
| null | 2020-03-30T23:50:13
| 2018-03-07T03:54:53
|
Python
|
UTF-8
|
Python
| false
| false
| 24,781
|
py
|
# set of functions used in generating plots in the wheelhouse
# with program running parallel with "getmatp.py" call "wxpage.py"
# where it plots the record and generates output file
# where it now includes the "getclim" function to post climatology
#updates on
import glob
import ftplib
import shutil
from shutil import copyfile
import pytz
import matplotlib.pyplot as plt
import matplotlib.dates as dates
import datetime
from datetime import datetime as dt
from pylab import *
import pandas as pd
from pandas import *
import time
import os
import numpy as np
from gps import *
from time import *
import threading
def parse(datet):
from datetime import datetime
dt=datetime.strptime(datet,'%Y-%m-%dT%H:%M:%S')
return dt
def parse2(datet):
from datetime import datetime
dt=datetime.strptime(datet,'%Y-%m-%d %H:%M:%S')
return dt
def gmt_to_eastern(times_gmt):
import datetime
times=[]
eastern = pytz.timezone('US/Eastern')
gmt = pytz.timezone('Etc/GMT')
for i in range(len(times_gmt)):
date = datetime.datetime.strptime(str(times_gmt[i]),'%Y-%m-%d %H:%M:%S')
date_gmt=gmt.localize(date)
easterndate=date_gmt.astimezone(eastern)
times.append(easterndate)
return times
def dm2dd(lat,lon):
#converts lat, lon from decimal degrees,minutes to decimal degrees
(a,b)=divmod(float(lat),100.)
aa=int(a)
bb=float(b)
lat_value=aa+bb/60.
if float(lon)<0:
(c,d)=divmod(abs(float(lon)),100.)
cc=int(c)
dd=float(d)
lon_value=cc+(dd/60.)
lon_value=-lon_value
else:
(c,d)=divmod(float(lon),100.)
cc=int(c)
dd=float(d)
lon_value=cc+(dd/60.)
return lat_value, -lon_value
def c2f(c):
#convert Celsius to Fahrenheit
f = c * 1.8 + 32
return f
def getclim(yrday=str(int(dt.now().strftime('%j'))),var='Bottom_Temperature/BT_'):
# gets climatology of Bottom_Temperature, Surface_Temperature, Bottom_Salinity, or Surface_Salinity
# as calculated by Chris Melrose from 30+ years of NEFSC CTD data on the NE Shelf provided to JiM in May 2018
# where "lat1", "lon1", and "yrday" are the position and yearday of interest (defaulting to today)
# where "var" is the variable of interest (defaulting to Bottom_Temperature)
# inputdir='/net/data5/jmanning/clim/' # hardcoded directory name where you need to explode the "Data for Manning.zip"
# assumes an indidividual file is stored in the "<inputdir>/<var>" directory for each yearday
inputdir_csv='/home/pi/Desktop/towifi/'
inputdir='/home/pi/clim/' # hardcoded directory name
dflat=pd.read_csv(inputdir+'LatGrid.csv',header=None)
dflon=pd.read_csv(inputdir+'LonGrid.csv',header=None)
lat=np.array(dflat[0]) # gets the first col (35 to 45)
lon=np.array(dflon.ix[0])# gets the first row (-75 to -65)
clim=pd.read_csv(inputdir+var+yrday+'.csv',header=None) # gets bottom temp for this day of year
files=(glob.glob(inputdir_csv+'*.csv'))
files.sort(key=os.path.getmtime) # gets all the csv files in the towfi directory
dfcsv=pd.read_csv(files[-1],sep=',',skiprows=8)
[lat1,lon1]=dm2dd(float(dfcsv['lat'][0]),float(dfcsv['lon'][0]))
idlat = np.abs(lat - lat1).argmin() # finds the neareast lat to input lat1
idlon = np.abs(lon - lon1).argmin() # finds the neareast lon to input lon1
return clim[idlon][idlat]
def create_pic():
tit='Temperature and Angle'
if not os.path.exists('/home/pi/Desktop/Pictures'):
os.makedirs('/home/pi/Desktop/Pictures')
if not os.path.exists('../uploaded_files'):
os.makedirs('../uploaded_files')
n=0
try:
files=[]
files.extend(sorted(glob.glob('/home/pi/Desktop/towifi/*T.txt')))
if not os.path.exists('../uploaded_files/mypicfile.dat'):
open('../uploaded_files/mypicfile.dat','w').close()
#print files
with open('../uploaded_files/mypicfile.dat','r') as f:
content = f.readlines()
f.close()
upfiles = [line.rstrip('\n') for line in open('../uploaded_files/mypicfile.dat','r')]
#open('../uploaded_files/mypicfile.dat').close()
#f=open('../uploaded_files/myfile.dat', 'rw')
dif_data=list(set(files)-set(upfiles))
if dif_data==[]:
print 'no new file was found'
time.sleep(15)
pass
##################################
##################################
dif_data.sort(key=os.path.getmtime)
fn=dif_data[-1]
print 'fn: '+fn
if 3>2:
fn2=fn.split(')')[0]+')_MA.txt'
print fn
print fn2
if not os.path.exists('/home/pi/Desktop/Pictures/'+fn.split('(')[1].split('_')[0]):
os.makedirs('/home/pi/Desktop/Pictures/'+fn.split('(')[1].split('_')[0])
df=pd.read_csv(fn,sep=',',skiprows=0,parse_dates={'datet':[0]},index_col='datet',date_parser=parse)#creat a new Datetimeindex
df2=pd.read_csv(fn2,sep=',',skiprows=0,parse_dates={'datet':[0]},index_col='datet',date_parser=parse)
df['yd']=df.index.dayofyear+df.index.hour/24.+df.index.minute/60./24.+df.index.second/60/60./24.-1.0 #creates a yrday0 field
df2['yd']=df2.index.dayofyear+df2.index.hour/24.+df2.index.minute/60./24.+df2.index.second/60/60./24.-1.0
print len(df2),len(df)
try:
index_good=np.where(abs(df2['Az (g)'])<2) #Attention : If you want to use the angle, change the number under 1.
print index_good[0][3],index_good[0][-3]
index_good_start=index_good[0][3]
index_good_end=index_good[0][-3]
#print 'index_good_start:'+index_good_start+' index_good_end:'+index_good_end
except:
#os.remove(new_file_path+').lid')
#os.remove(new_file_path+')_MA.txt')
#os.remove(new_file_path+')_T.txt')
print "no good data"
pass
#df.rename(index=str,columns={"Temperature (C)":"Temperature"}) #change name
meantemp=round(np.mean(df['Temperature (C)'][index_good_start:index_good_end]),2)
fig=plt.figure()
ax1=fig.add_subplot(211)
ax2=fig.add_subplot(212)
#df['depth'].plot()
ax2.plot(df2.index,df2['Az (g)'],'b',label='Angle')
#ax2.plot(df2.index[index_good_start:index_good_end],df2['Az (g)'][index_good_start:index_good_end],'red',linewidth=4,label='in the water')
ax2.legend()
#df['temp'].plot()
ax1.plot(df.index,df['Temperature (C)'],'b')
#ax1.plot(df.index[index_good_start:index_good_end],df['Temperature (C)'][index_good_start:index_good_end],'red',linewidth=4,label='in the water')
ax1.set_ylabel('Temperature (Celius)')
ax1.legend(['temp','in the water'])
#print 2222222222222222222222222222222222222222222
try:
if max(df.index)-min(df.index)>Timedelta('0 days 04:00:00'):
ax1.xaxis.set_major_locator(dates.HourLocator(interval=(max(df.index)-min(df.index)).seconds/3600/6))# for hourly plot
ax1.xaxis.set_major_formatter(dates.DateFormatter('%D %H:%M'))
ax2.xaxis.set_major_formatter(dates.DateFormatter('%D %H:%M'))
else:
ax1.xaxis.set_major_locator(dates.MinuteLocator(interval=(max(df.index)-min(df.index)).seconds/60/6))# for minutely plot
ax1.xaxis.set_major_formatter(dates.DateFormatter('%H:%M'))
ax2.xaxis.set_major_formatter(dates.DateFormatter('%H:%M'))
except:
print 'too less data'
ax1.text(0.9, 0.15, 'mean temperature in the water='+str(round(meantemp*1.8+32,1))+'F',
verticalalignment='bottom', horizontalalignment='right',
transform=ax1.transAxes,
color='green', fontsize=15)
#ax1.xaxis.set_major_formatter(dates.DateFormatter('%D %H:%M'))
ax1.set_xlabel('')
#ax1.set_ylim(int(np.nanmin(df['Temperature (C)'].values)),int(np.nanmax(df['Temperature (C)'].values)))
#ax1.set_xticklabels([])
ax1.grid()
ax12=ax1.twinx()
ax12.set_title(tit)
ax12.set_ylabel('Fahrenheit')
ax12.set_xlabel('')
#ax12.set_xticklabels([])
ax12.set_ylim(np.nanmin(df['Temperature (C)'].values)*1.8+32,np.nanmax(df['Temperature (C)'].values)*1.8+32)
ax2=fig.add_subplot(212)
ax2.plot(df2.index,df2['Az (g)'].values)
ax2.invert_yaxis()
ax2.set_ylabel('Angle')
#ax2.set_xlabel(df2.index[0])
ax2.yaxis.set_major_formatter(ScalarFormatter(useOffset=False))
ax2.grid()
ax2.set_ylim(-1,1)
ax22=ax2.twinx()
ax22.set_ylabel('Angle')
#ax22.set_ylim(np.nanmin(df['depth'].values)/1.8288,np.nanmax(df['depth'].values)/1.8288)
ax22.set_ylim(1,-1)
ax22.invert_yaxis()
plt.gcf().autofmt_xdate()
ax2.set_xlabel('GMT TIME '+df.index[0].strftime('%m/%d/%Y %H:%M:%S')+' - '+df.index[-1].strftime('%m/%d/%Y %H:%M:%S'))
plt.savefig('/home/pi/Desktop/Pictures/'+fn.split('(')[1].split('_')[0]+'/'+fn.split('(')[0][-2:]+fn.split('(')[1][:-6]+'.png')
plt.close()
print 'picture is saved'
upfiles.extend(dif_data)
f=open('uploaded_files/mypicfile.dat','r').close()
f=open('../uploaded_files/mypicfile.dat','w+')
[f.writelines(i+'\n') for i in upfiles]
f.close()
print ' All Pictures are Generated'
return
except:
print 'something wrong'
return
def p_create_pic():
tit='Temperature and Depth'
if not os.path.exists('/home/pi/Desktop/Pictures'):
os.makedirs('/home/pi/Desktop/Pictures')
if not os.path.exists('uploaded_files'):
os.makedirs('uploaded_files')
n=0
if 'r' in open('/home/pi/Desktop/mode.txt').read():
file='control_file.txt'
mode='real'
else:
file='test_control_file.txt'
mode='test'
try:
files=[]
files.extend(sorted(glob.glob('/home/pi/Desktop/towifi/*.csv')))
if not os.path.exists('uploaded_files/mypicfile.dat'):
open('uploaded_files/mypicfile.dat','w').close()
with open('uploaded_files/mypicfile.dat','r') as f:
content = f.readlines()
f.close()
upfiles = [line.rstrip('\n') for line in open('uploaded_files/mypicfile.dat','r')]
dif_data=list(set(files)-set(upfiles))
if dif_data==[]:
print 'Standby. When the program detects a probe haul, machine will reboot and show new data.'
import time
time.sleep(14)
return
##################################
##################################
dif_data.sort(key=os.path.getmtime)
for fn in dif_data:
fn2=fn
if not os.path.exists('/home/pi/Desktop/Pictures/'+fn.split('/')[-1].split('_')[2]):
os.makedirs('/home/pi/Desktop/Pictures/'+fn.split('/')[-1].split('_')[2])
df=pd.read_csv(fn,sep=',',skiprows=8,parse_dates={'datet':[1]},index_col='datet',date_parser=parse2)#creat a new Datetimeindex
if mode=='real':
df=df.ix[(df['Depth (m)']>0.85*mean(df['Depth (m)']))]
df=df.ix[3:-2] # delete this line if cannot get plot
if len(df)>1000:
df=df.ix[5:-5]
df=df.iloc[::(len(df)/960+1),:] #Plot at most 1000 data
else:
if len(df)>1000:
df=df.iloc[::(len(df)/960+1),:]
df2=df
df2['Depth (m)']=[x*(-0.5468) for x in df2['Depth (m)'].values]
if len(df2)<5:
continue
meantemp=round(np.mean(df['Temperature (C)']),2)
fig=plt.figure()
ax1=fig.add_subplot(211)
ax2=fig.add_subplot(212)
time_df2=gmt_to_eastern(df2.index)
time_df=gmt_to_eastern(df.index)
ax1.plot(time_df,df['Temperature (C)']*1.8+32,'b',)
#ax1.set_ylim(np.nanmin(df['Temperature (C)'].values)*1.8+30,np.nanmax(df['Temperature (C)'].values)*1.8+36)
ax1.set_ylabel('Temperature (Fahrenheit)')
ax1.legend(['temp','in the water'])
try:
if max(df.index)-min(df.index)>Timedelta('0 days 04:00:00'):
ax1.xaxis.set_major_locator(dates.DateLocator(interval=(max(df.index)-min(df.index)).seconds/3600/12))# for hourly plot
ax2.xaxis.set_major_locator(dates.DateLocator(interval=(max(df.index)-min(df.index)).seconds/3600/12))# for hourly plot
else:
ax1.xaxis.set_major_locator(dates.DateLocator(interval=(max(df.index)-min(df.index)).seconds/3600/4))# for hourly plot
ax2.xaxis.set_major_locator(dates.DateLocator(interval=(max(df.index)-min(df.index)).seconds/3600/4))# for hourly plot
except:
print ' '
clim=getclim()# extracts climatological values at this place and yearday
if isnan(clim):
txt='mean temperature ='+str(round(c2f(meantemp),1))+'F (No Climatology here.)'
else:
txt='mean temperature ='+str(round(c2f(meantemp),1))+'F Climatology ='+str(round(c2f(clim),1))+'F'
ax1.text(0.95, 0.01,txt,
verticalalignment='bottom', horizontalalignment='right',
transform=ax1.transAxes,
color='red', fontsize=14)
ax1.grid()
ax12=ax1.twinx()
ax12.set_title(tit)
#ax12.set_ylabel('Fahrenheit')
ax12.set_ylabel('Temperature (Celius)')
#ax12.set_xlabel('')
ax12.set_ylim(np.nanmin(df['Temperature (C)'].values),np.nanmax(df['Temperature (C)'].values)+0.01)
ax2.plot(time_df2,df2['Depth (m)'],'b',label='Depth',color='green')
ax2.legend()
ax2.invert_yaxis()
ax2.set_ylabel('Depth(Fathom)')
ax2.set_ylim(np.nanmin(df2['Depth (m)'].values)*1.05,np.nanmax(df2['Depth (m)'].values)*0.95)
ax2.yaxis.set_major_formatter(ScalarFormatter(useOffset=False))
ax2.grid()
ax22=ax2.twinx()
ax22.set_ylabel('Depth(feet)')
ax22.set_ylim(round(np.nanmax(df2['Depth (m)'].values)*6*0.95,1),round(np.nanmin(df2['Depth (m)'].values)*6*1.05,1))
ax22.invert_yaxis()
plt.gcf().autofmt_xdate()
ax2.set_xlabel('TIME '+time_df[0].astimezone(pytz.timezone('US/Eastern')).strftime('%m/%d/%Y %H:%M:%S')+' - '+time_df[-1].astimezone(pytz.timezone('US/Eastern')).strftime('%m/%d/%Y %H:%M:%S'))
plt.savefig('/home/pi/Desktop/Pictures/'+fn.split('/')[-1].split('_')[2]+'/'+fn.split('/')[-1].split('_')[-1].split('.')[0]+'.png')
plt.close()
a=open('uploaded_files/mypicfile.dat','r').close()
a=open('uploaded_files/mypicfile.dat','a+')
[a.writelines(i+'\n') for i in dif_data]
a.close()
print 'New data successfully downloaded. Plot will appear.'
return
except:
print 'the new csv file cannot be plotted, skip it'
a=open('uploaded_files/mypicfile.dat','a+')
[a.writelines(i+'\n') for i in dif_data]
a.close()
return
def wifi():
if not os.path.exists('../uploaded_files'):
os.makedirs('../uploaded_files')
if not os.path.exists('/home/pi/for_update/Desktop'):
os.makedirs('/home/pi/for_update/Desktop')
if not os.path.exists('/home/pi/for_update/mat_modules'):
os.makedirs('/home/pi/for_update/mat_modules')
if not os.path.exists('../uploaded_files/myfile.dat'):
open('../uploaded_files/myfile.dat','w').close()
#software updates
import time
session1 = ftplib.FTP('66.114.154.52','huanxin','123321')
session1.cwd("/updates/Desktop")
files_Desktop=session1.nlst()
#print files_Desktop
for a in files_Desktop:
file = open('/home/pi/for_update/Desktop/'+a,'wb')
session1.retrbinary('RETR '+a,file.write)
file.close()
time.sleep(1)
if os.stat('/home/pi/for_update/Desktop/'+a).st_size>=4:
copyfile('/home/pi/for_update/Desktop/'+a,'/home/pi/Desktop/'+a)
time.sleep(1)
session1.cwd("/updates/mat_modules")
files_mat_modules=session1.nlst()
for b in files_mat_modules:
file = open('/home/pi/for_update/mat_modules/'+b,'wb')
session1.retrbinary('RETR '+b,file.write)
file.close()
time.sleep(1)
if os.stat('/home/pi/for_update/mat_modules/'+b).st_size>=4:
copyfile('/home/pi/for_update/mat_modules/'+b,'/home/pi/Desktop/mat_modules/'+b)
if os.path.exists("mat_modules/"+b+'c'):
os.remove("mat_modules/"+b+'c')
session1.quit()
time.sleep(3)
if 3>2:
files=[]
files.extend(sorted(glob.glob('/home/pi/Desktop/towifi/*.csv')))
files.extend(sorted(glob.glob('/home/pi/Desktop/towifi/error*')))
#print files
with open('../uploaded_files/myfile.dat') as f:
content = f.readlines()
upfiles = [line.rstrip('\n') for line in open('../uploaded_files/myfile.dat')]
#f=open('../uploaded_files/myfile.dat', 'rw')
dif_data=list(set(files)-set(upfiles))
#print dif_data
if dif_data==[]:
print ''
time.sleep(1)
return
for u in dif_data:
import time
#print u
session = ftplib.FTP('66.114.154.52','huanxin','123321')
file = open(u,'rb')
session.cwd("/Matdata")
#session.retrlines('LIST') # file to send
session.storbinary("STOR "+u[24:], open(u, 'r')) # send the file
#session.close()
session.quit()# close file and FTP
time.sleep(1)
file.close()
print u[24:]
#os.rename('C:/Program Files (x86)/Aquatec/AQUAtalk for AQUAlogger/DATA/'+u[:7]+'/'+u[8:], 'C:/Program Files (x86)/Aquatec/AQUAtalk for AQUAlogger/uploaded_files/'+u[8:])
print u[24:]+' uploaded'
#os.rename(u[:7]+'/'+u[8:], "uploaded_files/"+u[8:])
time.sleep(3) # close file and FTP
f=open('../uploaded_files/myfile.dat','a+')
#print 11111111111111111111111111111
#print 'u:'+u
f.writelines(u+'\n')
f.close()
#upfiles.extend(dif_data)
#f=open('../uploaded_files/myfile.dat','w')
#[f.writelines(i+'\n') for i in upfiles]
#f.close()
print 'all files are uploaded'
#os.system('sudo ifdown wlan0')
time.sleep(1500)
return
else:
#import time
#print 'no wifi'
time.sleep(1)
return
def judgement(boat_type,ma_file,t_file):
valid='no'
try:
df=pd.read_csv(t_file,sep=',',skiprows=0,parse_dates={'datet':[0]},index_col='datet',date_parser=parse)#creat a new Datetimeindex
df2=pd.read_csv(ma_file,sep=',',skiprows=0,parse_dates={'datet':[0]},index_col='datet',date_parser=parse2)
index_good=np.where(abs(df2['Az (g)'])<0.2)
index_better=[]
for e in range(len(index_good[0][:-1])):
if index_good[0][e+1]-index_good[0][e]>1:
index_better.append(index_good[0][e+1])
print index_good,index_better
if index_better==[]:
index_better=[index_good[0][0]]
index_good_start=index_better[-1]
index_good_end=index_good[0][-1]+1
print 'index_good_start:'+str(index_good_start)+' index_good_end:'+str(index_good_end)
if boat_type=='fixed':
if index_good_end-index_good_start<60: #100 means 200 minutes
print 'too less data, not in the sea'
return valid,index_good_start,index_good_end
else:
valid='yes'
return valid,index_good_start,index_good_end,
else:
if index_good_end-index_good_start<3: #12 means 24 minutes
print 'too less data, not in the sea'
return valid,index_good_start,index_good_end
else:
valid='yes'
return valid,index_good_start,index_good_end
except:
print 'data not in the sea'
return valid,index_good_start,index_good_end
def judgement2(boat_type,s_file,logger_timerange_lim,logger_pressure_lim):
valid='no'
try:
df=pd.read_csv(s_file,sep=',',skiprows=0,parse_dates={'datet':[0]},index_col='datet',date_parser=parse)#creat a new Datetimeindex
index_good_start=1
index_good_end=len(df)-1
if boat_type<>'mobile':
index_good=np.where(abs(df['Depth (m)'])>logger_pressure_lim)
if len(index_good[0])<logger_timerange_lim: #100 means 150 minutes
print 'too less data, not in the sea'
return valid,index_good_start,index_good_end
else:
valid='yes'
return valid,index_good_start,index_good_end
else:
index_good=np.where(abs(df['Depth (m)'])>logger_pressure_lim)#make sure you change it before on the real boat
if len(index_good[0])<logger_timerange_lim or len(df)>1440: #make sure the good data is long enough,and total data is not more than one day
print 'too less data, not in the sea'
return valid,index_good_start,index_good_end
else:
valid='yes'
return valid,index_good[0][0],index_good[0][-1]
except:
print 'data not in the sea'
return valid,index_good_start,index_good_end
def gps_compare(lat,lon,mode): #check to see if the boat is in the harbor
harbor_range=0.5 #box size of latitude and longitude, unit: seconds/10
if mode=='test':
file2='/home/pi/Desktop/test_harborlist.txt'
else:
file2='/home/pi/Desktop/harborlist.txt'
df2=pd.read_csv(file2,sep=',')
indice_lat=[i for i ,v in enumerate(abs(np.array(df2['lat'])-lat)<harbor_range) if v]
indice_lon=[i for i ,v in enumerate(abs(np.array(df2['lon'])-lon)<harbor_range) if v]
harbor_point_list=[i for i, j in zip(indice_lat,indice_lon) if i==j]
return harbor_point_list
|
[
"noreply@github.com"
] |
noreply@github.com
|
bef1548305d046b43c0479fbaa2577e13ac971ed
|
5b432d8ffa09a0d5194ff321ea06f9c83ac26ad8
|
/eval/labels_stats.py
|
d04d9b4fdd8286635b275ec6ec7178eb2dc5dcfb
|
[
"MIT"
] |
permissive
|
CristianViorelPopa/BART-TL-topic-label-generation
|
b234d448f66d62a412e6285db21e3932a23efdb2
|
d2cee2438bb855e426548b39de1ed89c6db08840
|
refs/heads/main
| 2023-04-11T21:52:50.407503
| 2021-05-20T19:24:11
| 2021-05-20T19:24:11
| 360,913,591
| 13
| 6
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,336
|
py
|
import sys
import numpy as np
def preprocess_label(label):
return label.split('(')[0].replace('_', ' ').strip()
def dcg(scores):
result = 0
for idx, score in enumerate(scores):
result += (2 ** score - 1) / np.log2(idx + 2)
return result
def ndcg(scores, n):
target_scores = scores[:n]
perfect_scores = sorted(scores, reverse=True)[:n]
return dcg(target_scores) / dcg(perfect_scores)
def main():
if len(sys.argv) != 3:
print("Usage: " + sys.argv[0] + " <annotated labels csv file> <target labels file>")
exit(0)
with open(sys.argv[1]) as in_file:
# ignore the header
in_file.readline()
label_scores = [{}]
current_topic = 0
while True:
line = in_file.readline()
if line.strip() == '':
break
# format of the annotated dataset: label, topic id, annotator scores...
tokens = line.strip().split(',')
topic_id = int(float(tokens[1]))
if topic_id > current_topic:
label_scores.append({})
current_topic = topic_id
scores = [int(float(score)) for score in tokens[2:]]
final_score = sum(scores) / len(scores)
label_scores[topic_id][preprocess_label(tokens[0])] = final_score
target_labels = [[preprocess_label(label) for label in line.strip().split(' ')]
for line in open(sys.argv[2])]
target_scores = []
for topic_idx in range(len(target_labels)):
target_scores.append([])
added_labels = []
for label in target_labels[topic_idx]:
if label in label_scores[topic_idx] and label not in added_labels:
target_scores[-1].append(label_scores[topic_idx][label])
added_labels.append(label)
target_scores = np.array(target_scores)
print(np.min([len(scores) for scores in target_scores]))
print("Top-1 Average Rating: " + str(np.mean([scores[0] for scores in target_scores])))
print("nDCG-1: " + str(np.mean([ndcg(scores, 1) for scores in target_scores])))
print("nDCG-3: " + str(np.mean([ndcg(scores, 3) for scores in target_scores])))
print("nDCG-5: " + str(np.mean([ndcg(scores, 5) for scores in target_scores])))
if __name__ == '__main__':
main()
|
[
"cristianviorel.popa@crowdstrike.com"
] |
cristianviorel.popa@crowdstrike.com
|
656effb7c39ee26e16f6aa357a759a7907827897
|
5ae383483213c7140dd8a9d886ab532b8ee3c549
|
/ani01/capitals/urls.py
|
37c2031802cef3da18479b73a4347689cac1d770
|
[] |
no_license
|
2001anindita/Techtronics
|
dedb99050e947c47ae0cd67d82256548634783fe
|
335db9217948e8786b0e06bcfa2182ce0a88464b
|
refs/heads/master
| 2023-06-30T18:42:10.600715
| 2021-08-02T08:33:51
| 2021-08-02T08:33:51
| 391,869,132
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 225
|
py
|
from django.urls import path,include
from . import views
from rest_framework import routers
router= routers.DefaultRouter()
router.register('capitals',views.CapitalView)
urlpatterns = [
path('',include(router.urls))
]
|
[
"aninditasardar3@gmail.com"
] |
aninditasardar3@gmail.com
|
99255793d613669f28b1301533a50b4fb60b1ba0
|
872f4bd2426e4e0cf23ef739a185d330429e22ff
|
/flexible_clustering/hnsw_optics_cachefile.py
|
1b0056a2d103558128d411f99a90fbc8a4ffb282
|
[
"BSD-3-Clause"
] |
permissive
|
supertulli/flexible-clustering
|
dd4da8bb784be248176f54d3f885afcd6efb6b32
|
cd3d85eb5e96ab412f00932429e5aec65d397190
|
refs/heads/master
| 2020-09-09T11:51:27.884241
| 2019-11-13T11:14:29
| 2019-11-13T11:14:29
| 221,221,533
| 0
| 0
|
BSD-3-Clause
| 2019-11-12T13:14:35
| 2019-11-12T13:14:35
| null |
UTF-8
|
Python
| false
| false
| 2,270
|
py
|
#!/usr/bin/env python3
# Copyright (c) 2017-2018 Symantec Corporation. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import argparse
import dbm
from numpy.linalg import norm
from sklearn.datasets import make_blobs
from hnsw_optics import *
from optics import optics
from plot_optics import do_plot
parser = argparse.ArgumentParser()
parser.add_argument('filename')
parser.add_argument('--nitems', type=int, default=100)
parser.add_argument('--minpts', type=int, default=5)
args = parser.parse_args()
data, labels = make_blobs(args.nitems)
def distance(x, y):
return norm(x - y)
hnsw = hnsw_distances_file(data, distance, args.filename)
with dbm.open(args.filename) as db:
ordering, rh = optics(args.nitems, args.minpts, db_neighbors(db))
do_plot(data[ordering], rh, args.minpts, labels[ordering])
|
[
"matteo_dellamico@symantec.com"
] |
matteo_dellamico@symantec.com
|
a0bdc0c9daf60f5836f7c073b077236aa58eb427
|
bb144cbce615e3c55789957bd974f1897a1e96bf
|
/deliverygps/testapp/migrations/0002_auto_20201010_1410.py
|
0e7093f8fd99fb34aae36a0af1d1bd6f5d403902
|
[] |
no_license
|
AvinashProjects/AvinashproGps
|
eacb0afd54c3d33d5b7ba368d98f39b20185a517
|
146845737130089cb3f0443d4cde6fe5b842d8d7
|
refs/heads/master
| 2022-12-28T15:59:50.799457
| 2020-10-10T21:23:17
| 2020-10-10T21:23:17
| 302,992,359
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 584
|
py
|
# Generated by Django 3.0.8 on 2020-10-10 08:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('testapp', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='deliveryboylctn',
name='l_lat',
field=models.DecimalField(decimal_places=7, max_digits=9),
),
migrations.AlterField(
model_name='deliveryboylctn',
name='l_lon',
field=models.DecimalField(decimal_places=7, max_digits=9),
),
]
|
[
"pparthibann007@gmail.com"
] |
pparthibann007@gmail.com
|
352cc68fef7a6d186120f09912904aa95ac698e1
|
35d7d68913e3953c2a20658f6813718839fe98bc
|
/data_helpers.py
|
bf1b06005f2d936091fbee60b7cefadd2655d141
|
[] |
no_license
|
nooralahzadeh/Relation_Extraction
|
5fed4c24d4c118a9302ba10a647423043083b94a
|
1d6d948fc8e8611f280cd4af561b1acb3cc4a668
|
refs/heads/master
| 2021-06-13T12:48:58.666598
| 2017-02-18T17:58:13
| 2017-02-18T17:58:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,684
|
py
|
import pandas as pd
from nltk.tokenize import TweetTokenizer
import re
import os
import tensorflow as tf
import numpy as np
file_path = "/home/sahil/ML-bucket/data/train_new.csv"
def read_data(file=file_path):
col_names = ['System-Id', 'Message', 'drug-offset-start', 'drug-offset-end', 'sideEffect-offset-start',
'sideEffect-offset-end', 'WM1', 'WM2', 'relType']
data_frame = pd.read_csv(file, skipinitialspace=True, usecols=col_names)
mssg_frame = data_frame['Message'].drop_duplicates()
tokenizer = TweetTokenizer()
string = []
for mssg in mssg_frame:
tokens = tokenizer.tokenize(mssg)
for token in tokens:
if is_word(token):
string.append(token.lower())
if not os.path.isfile("words.txt"):
with open("words.txt", "w") as text_file:
print(string, file=text_file)
return data_frame
# TODO use space splitter and then strip the word
# TODO change regex to [a-z0-9].+
def is_word(word):
for char in word:
if char.isalpha() or char.isdigit():
return True
return False
# def word2id(word):
# word = 'b\'' + word + '\''
# with open("data/vocab.txt") as f:
# for i, line in enumerate(f):
# if line.split()[0] == word:
# return i
# return -1
# def get_word_vector():
# tf.load_op_library(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'word2vec_ops.so'))
# metafile = str(tf.train.get_checkpoint_state("data").model_checkpoint_path) + ".meta"
# sess = tf.Session()
# new_saver = tf.train.import_meta_graph(metafile)
# new_saver.restore(sess, tf.train.latest_checkpoint("data"))
# all_vars = tf.trainable_variables()
# init_op = tf.global_variables_initializer()
# sess.run(init_op)
# yield sess.run(all_vars[3])
def batch_iter(doc, batch_size, num_epochs, shuffle=True):
"""
Generates a batch iterator for a dataset.
"""
data = list()
for iter in doc:
data.append(iter)
# print("len", len(data))
data = np.array(data)
data_size = len(data)
num_batches_per_epoch = int((len(data) - 1) / batch_size) + 1
for epoch in range(num_epochs):
# Shuffle the data at each epoch
if shuffle:
shuffle_indices = np.random.permutation(np.arange(data_size))
shuffled_data = data[shuffle_indices]
else:
shuffled_data = data
for batch_num in range(num_batches_per_epoch):
start_index = batch_num * batch_size
end_index = min((batch_num + 1) * batch_size, data_size)
yield shuffled_data[start_index:end_index]
|
[
"sahil24wadhwa@gmail.com"
] |
sahil24wadhwa@gmail.com
|
e25d87c60c698b281091df6c1e09696e8052e453
|
9eef707da67275fcee438bf5ebb9643b5ea200b4
|
/main_app/models.py
|
7329118caabfd6598047570034ec73f663f06812
|
[] |
no_license
|
atevadd/Rcffutminna
|
640f2a545b9abe4105d0438870e3d01704080c64
|
a989f2a52997121421844d3b62c8d79d3feff5f1
|
refs/heads/master
| 2022-11-17T17:14:57.095951
| 2020-07-15T14:05:01
| 2020-07-15T14:05:01
| 269,304,952
| 0
| 1
| null | 2020-07-14T22:55:39
| 2020-06-04T08:38:32
|
CSS
|
UTF-8
|
Python
| false
| false
| 4,873
|
py
|
from main_app import db, login_manager, UserMixin
from datetime import datetime
@login_manager.user_loader
def admin(id):
return User.query.get(int(id))
#message model
class Message(db.Model):
id = db.Column(db.Integer, primary_key=True)
minister = db.Column(db.String(50))
title = db.Column(db.String(100), nullable=False)
tag = db.Column(db.String(100))
date = db.Column(db.DateTime, default=datetime.utcnow)
message = db.Column(db.Text, nullable=False)
audio = db.Column(db.String(100))
def __str__(self):
return f"Message('{self.minister}', '{self.title}')"
@classmethod
def find_by_title(cls, title:str):
return cls.query.filter_by(title=title).first()
@classmethod
def find_by_id(cls, id:int):
return cls.query.filter_by(id=id).first()
@classmethod
def find_by_minister(cls, minister:str):
return cls.query.filter_by(minister=minister).first()
def save_to_database(self) -> None:
db.session.add(self)
db.session.commit()
def remove_from_database(self) -> None:
db.session.delete(self)
db.session.commit()
#Announcement model
class Announcement(db.Model):
id = db.Column(db.Integer, primary_key=True)
annouce = db.Column(db.String(50))
image = db.Column(db.String(50), default="default.jpg")
content = db.Column(db.Text, nullable=False)
@classmethod
def find_by_id(cls, id:int):
return cls.query.filter_by(id=id).first()
def save_to_database(self) -> None:
db.session.add(self)
db.session.commit()
def remove_from_database(self) -> None:
db.session.delete(self)
db.session.commit()
#testimony model
class Testimony(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), nullable=False)
testimony = db.Column(db.Text)
@classmethod
def find_by_id(cls, id:int):
return cls.query.filter_by(id=id).first()
def save_to_database(self) -> None:
db.session.add(self)
db.session.commit()
def remove_from_database(self) -> None:
db.session.delete(self)
db.session.commit()
#books model
class Book(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(50), nullable=False)
author = db.Column(db.String(50), nullable=False)
name = db.Column(db.String(50), nullable=False)
def __str__(self):
return self.title
@classmethod
def find_by_id(cls, id:int):
return cls.query.filter_by(id=id).first()
def save_to_database(self) -> None:
db.session.add(self)
db.session.commit()
def remove_from_database(self) -> None:
db.session.delete(self)
db.session.commit()
#gallery model
class Gallery(db.Model):
id = db.Column(db.Integer, primary_key=True)
tag = db.Column(db.String(50), nullable=False)
image = db.Column(db.String(50), default="default.jpg")
date = db.Column(db.DateTime, default=datetime.utcnow)
@classmethod
def find_by_id(cls, id:int):
return cls.query.filter_by(id=id).first()
def save_to_database(self) -> None:
db.session.add(self)
db.session.commit()
def remove_from_database(self) -> None:
db.session.delete(self)
db.session.commit()
#ADMIN USER MODEL
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(50), nullable=False)
password = db.Column(db.String(50), nullable=False)
def save_to_database(self) -> None:
db.session.add(self)
db.session.commit()
def remove_from_database(self) -> None:
db.session.delete(self)
db.session.commit()
#ALUMNI MODEL
class Alumni(db.Model):
id = db.Column(db.Integer, primary_key=True)
first_name = db.Column(db.String(50))
last_name = db.Column(db.String(50))
email = db.Column(db.String(50), unique=True)
phone_number = db.Column(db.String(20))
unit = db.Column(db.String(20), default=None)
role = db.Column(db.String(50))
def __str__(self):
return self.first_name
def save_to_database(self) -> None:
db.session.add(self)
db.session.commit()
def remove_from_database(self) -> None:
db.session.delete(self)
db.session.commit()
#CONTACT MODEL
class Contact(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100))
email = db.Column(db.String(100))
message = db.Column(db.Text)
def save_to_database(self) -> None:
db.session.add(self)
db.session.commit()
def remove_from_database(self) -> None:
db.session.delete(self)
db.session.commit()
|
[
"nnamenearinze@gmail.com"
] |
nnamenearinze@gmail.com
|
9f7a8508050ab744d943a95045c9c3ce3129c548
|
981d6e960db2a1dc8c555364093f0bdbb4709b5a
|
/trainers/sub_trainers/dpm_ms_trainers.py
|
570a0bfc98e58520d93feceb160c970380685355
|
[] |
no_license
|
REA1/scratracer
|
6feb43ad1c04fb9cced5754461c477c4939acb6f
|
3d072145df2735d6d59412dc7242bf6161ae0968
|
refs/heads/master
| 2023-01-31T04:42:14.744019
| 2020-12-13T07:08:16
| 2020-12-13T07:08:16
| 314,434,221
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 977
|
py
|
from trainers.Trainer import *
from trainers.ModelSelectionTrainer import *
class DPMMSRandomTrainer(ModelSelectionTrainer):
def __init__(self):
super().__init__()
def populate(self, X, y, train_id_list, pool_id_list):
super().populate(X, y, train_id_list, pool_id_list)
self.pre_processor = DPMPreProcessor()
class DPMMSCertaintyTrainer(ModelSelectionTrainer):
def __init__(self):
super().__init__()
def populate(self, X, y, train_id_list, pool_id_list):
super().populate(X, y, train_id_list, pool_id_list)
self.pre_processor = DPMPreProcessor()
self.sampler = CertaintySampler()
class DPMMSUncertaintyTrainer(ModelSelectionTrainer):
def __init__(self):
super().__init__()
def populate(self, X, y, train_id_list, pool_id_list):
super().populate(X, y, train_id_list, pool_id_list)
self.pre_processor = DPMPreProcessor()
self.sampler = UncertaintySampler()
|
[
"wwang33@ncsu.edu"
] |
wwang33@ncsu.edu
|
e398266ec2bb991e101560d67092ff805b861f66
|
af4761b401ecec831ff42344a33cc1e85996eb64
|
/freq.py
|
6f1636fff4abfdfd953c32d3ee2924b86d831e62
|
[
"MIT"
] |
permissive
|
rayjustinhuang/BitesofPy
|
70aa0bb8fdbffa87810f00210b4cea78211db8cf
|
e5738f4f685bad4c8fb140cbc057faa441d4b34c
|
refs/heads/master
| 2022-08-15T05:02:19.084123
| 2022-08-09T12:26:42
| 2022-08-09T12:26:42
| 219,509,496
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 129
|
py
|
from collections import Counter
def freq_digit(num: int) -> int:
return int(Counter(str(num)).most_common(1)[0][0])
pass
|
[
"rayjustinhuang@gmail.com"
] |
rayjustinhuang@gmail.com
|
5088f6b3f2353635f7d058f8bacc384981913b52
|
5955ea34fd72c719f3cb78fbb3c7e802a2d9109a
|
/MATRIX/Trivial/trivia1.py
|
f67af7e92614c8d5539f7d8a23712859516740da
|
[] |
no_license
|
AndreySperansky/TUITION
|
3c90ac45f11c70dce04008adc1e9f9faad840b90
|
583d3a760d1f622689f6f4f482c905b065d6c732
|
refs/heads/master
| 2022-12-21T21:48:21.936988
| 2020-09-28T23:18:40
| 2020-09-28T23:18:40
| 299,452,924
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 181
|
py
|
matrix = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
print(matrix[1])
print(matrix[1][1])
print(matrix[2][0])
matrix = [[1, 2, 3],
[4, 5, 6],
[7, 8, 9]]
print(matrix[1][1])
|
[
"andrey.speransky@gmail.com"
] |
andrey.speransky@gmail.com
|
2f169cafa3c55f803ef3f1efa2eb0e8d816c6e2a
|
9868ee8339f73de21e5d22be3c774ef8a8bf2e2b
|
/experiment/migrations/0008_trial_anon_user.py
|
3511b2d5d4f6eba5dc955e6155ebb5827c097310
|
[] |
no_license
|
thartbm/OE_WEBAPP
|
a33eb15e09bcf5d0a2ebf2bd65ea1cf04ad955b6
|
a0fba8e7c0fdbb1a29ca8d2c2f51301b2d015243
|
refs/heads/master
| 2021-03-19T13:44:48.422289
| 2018-09-04T03:15:40
| 2018-09-04T03:15:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 455
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('experiment', '0007_auto_20180711_0448'),
]
operations = [
migrations.AddField(
model_name='trial',
name='anon_user',
field=models.CharField(default='a0', max_length=50),
preserve_default=False,
),
]
|
[
"muhamad_hmeed@live.ca"
] |
muhamad_hmeed@live.ca
|
9111eee82037e86525397c7db43781874cc7c4ce
|
f613b1555a58b8a47e922c1bcf4af5310160a48e
|
/tac_follow/ppo.py
|
ca5279686ecee356b9780ca90ccdb272ed477f69
|
[] |
no_license
|
quantumiracle/Store
|
833620dcce08f56ac0a204b89a18dd957e263cd3
|
d452a2307fd1edb83fd8879e6147f9a8f1dc3811
|
refs/heads/master
| 2022-12-26T14:21:57.202458
| 2019-08-01T13:43:24
| 2019-08-01T13:43:24
| 192,982,880
| 1
| 0
| null | 2022-12-08T05:19:08
| 2019-06-20T20:22:52
|
ASP
|
UTF-8
|
Python
| false
| false
| 14,872
|
py
|
"""
pure vector observation based learning: position of tactip and target
task: tactip following the cylinder to reach the ball target
"""
import tensorflow as tf
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten
import numpy as np
import matplotlib.pyplot as plt
import gym, threading, queue
from gym_unity.envs import UnityEnv
import argparse
from PIL import Image
import time
# from env import Reacher
EP_MAX = 10000
EP_LEN = 100
N_WORKER = 3 # parallel workers
GAMMA = 0.9 # reward discount factor
A_LR = 0.0001 # learning rate for actor
C_LR = 0.0002 # learning rate for critic
MIN_BATCH_SIZE = 10 # minimum batch size for updating PPO
UPDATE_STEP = 3 # loop update operation n-steps
EPSILON = 0.2 # for clipping surrogate objective
S_DIM, A_DIM, CHANNEL = 256, 2, 1 # state and action dimension
NUM_PINS = 91 #127
VS_DIM = 6 # dim of vector state
S_DIM_ALL = S_DIM*S_DIM*CHANNEL
env_name = "./tac_follow_new" # Name of the Unity environment binary to launch
# env = UnityEnv(env_name, worker_id=2, use_visual=False)
parser = argparse.ArgumentParser(description='Train or test neural net motor controller.')
parser.add_argument('--train', dest='train', action='store_true', default=False)
parser.add_argument('--test', dest='test', action='store_true', default=False)
args = parser.parse_args()
class PPO(object):
def __init__(self):
self.sess = tf.Session()
self.vs = tf.placeholder(tf.float32, [None, VS_DIM], 'state')
# self.tfs = tf.placeholder(tf.float32, [None, S_DIM, S_DIM, 1], 'state')
self.tfs = self.vs
# critic
# encoded = self.encoder(self.tfs) # convolutional encoder
encoded = self.tfs
l1 = tf.layers.dense(encoded, 100, tf.nn.relu)
l2 = tf.layers.dense(l1, 100, tf.nn.relu)
l3 = tf.layers.dense(l2, 100, tf.nn.relu)
l4 = tf.layers.dense(l3, 100, tf.nn.relu)
self.v = tf.layers.dense(l4, 1)
self.tfdc_r = tf.placeholder(tf.float32, [None, 1], 'discounted_r')
self.advantage = self.tfdc_r - self.v
self.closs = tf.reduce_mean(tf.square(self.advantage))
self.ctrain_op = tf.train.AdamOptimizer(C_LR).minimize(self.closs)
# actor
pi, pi_params = self._build_anet('pi', trainable=True)
oldpi, oldpi_params = self._build_anet('oldpi', trainable=False)
self.sample_op = tf.squeeze(pi.sample(1), axis=0) # operation of choosing action
self.update_oldpi_op = [oldp.assign(p) for p, oldp in zip(pi_params, oldpi_params)]
self.tfa = tf.placeholder(tf.float32, [None, A_DIM], 'action')
self.tfadv = tf.placeholder(tf.float32, [None, 1], 'advantage')
# ratio = tf.exp(pi.log_prob(self.tfa) - oldpi.log_prob(self.tfa))
ratio = pi.prob(self.tfa) / (oldpi.prob(self.tfa) + 1e-5)
surr = ratio * self.tfadv # surrogate loss
self.aloss = -tf.reduce_mean(tf.minimum( # clipped surrogate objective
surr,
tf.clip_by_value(ratio, 1. - EPSILON, 1. + EPSILON) * self.tfadv))
self.atrain_op = tf.train.AdamOptimizer(A_LR).minimize(self.aloss)
self.sess.run(tf.global_variables_initializer())
self.saver = tf.train.Saver()
# def maxpool2d(x, k=2):
# # MaxPool2D wrapper
# return tf.nn.max_pool(x, ksize=[1, k, k, 1], strides=[1, k, k, 1], padding='SAME')
def update(self):
global GLOBAL_UPDATE_COUNTER
while not COORD.should_stop():
if GLOBAL_EP < EP_MAX:
UPDATE_EVENT.wait() # wait until get batch of data
self.sess.run(self.update_oldpi_op) # copy pi to old pi
data = [QUEUE.get() for _ in range(QUEUE.qsize())] # collect data from all workers
data = np.vstack(data)
# s, a, r = data[:, :S_DIM_ALL], data[:, S_DIM_ALL: S_DIM_ALL + A_DIM], data[:, -1:]
# s = s.reshape(-1, S_DIM,S_DIM,CHANNEL)
s, a, r = data[:, :VS_DIM], data[:, VS_DIM: VS_DIM + A_DIM], data[:, -1:]
adv = self.sess.run(self.advantage, {self.tfs: s, self.tfdc_r: r})
# update actor and critic in a update loop
[self.sess.run(self.atrain_op, {self.tfs: s, self.tfa: a, self.tfadv: adv}) for _ in range(UPDATE_STEP)]
[self.sess.run(self.ctrain_op, {self.tfs: s, self.tfdc_r: r}) for _ in range(UPDATE_STEP)]
UPDATE_EVENT.clear() # updating finished
GLOBAL_UPDATE_COUNTER = 0 # reset counter
ROLLING_EVENT.set() # set roll-out available
def encoder(self, input):
model = tf.keras.models.Sequential(name='encoder')
model.add(Conv2D(filters=8, kernel_size=(2,2), padding='same', activation='relu'))
model.add(MaxPooling2D(pool_size = (2,2)))
model.add(Conv2D(filters=4, kernel_size=(2,2), padding='same', activation='relu'))
model.add(MaxPooling2D(pool_size = (2,2)))
# model.add(Conv2D(filters=2, kernel_size=(2,2), padding='same', activation='relu'))
# model.add(MaxPooling2D(pool_size = (2,2)))
model.add(Flatten())
return model(input)
# c1 = tf.keras.layers.Conv2D(filters=4, kernel_size=(2,2), padding='same', activation='relu')
# p1 = self.maxpool2d(c1, k=2)
# c2 = tf.keras.layers.Conv2D(filters=4, kernel_size=(2,2), padding='same', activation='relu')
# p1 = self.maxpool2d(c1, k=2)
# tf.layers.flatten(p2)
def _build_anet(self, name, trainable):
with tf.variable_scope(name):
# encoded = self.encoder(self.tfs)
# print('latent dim: ', encoded.shape)
encoded = self.tfs
l1 = tf.layers.dense(encoded, 200, tf.nn.relu, trainable=trainable)
l2 = tf.layers.dense(l1, 200, tf.nn.relu, trainable=trainable)
l3 = tf.layers.dense(l2, 200, tf.nn.relu, trainable=trainable)
l4 = tf.layers.dense(l3, 200, tf.nn.relu, trainable=trainable)
action_scale = 0.5 # 0.5
mu = action_scale * tf.layers.dense(l4, A_DIM, tf.nn.tanh, trainable=trainable)
sigma = tf.layers.dense(l4, A_DIM, tf.nn.softplus, trainable=trainable)
sigma +=1e-3 # without this line, 0 value sigma may cause NAN action
sigma = tf.clip_by_value(sigma, 0, 1) # for stability
# print('mu,sig: ', mu, sigma)
norm_dist = tf.distributions.Normal(loc=mu, scale=sigma)
params = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=name)
return norm_dist, params
def choose_action(self, s):
s = s[np.newaxis, :] # np.newaxis is to increase dim, [] -> [[]]
a = self.sess.run(self.sample_op, {self.tfs: s})[0]
return np.clip(a, -360, 360)
def get_v(self, s):
if s.ndim < 4: s = s[np.newaxis, :]
return self.sess.run(self.v, {self.tfs: s})[0, 0]
def save(self, path):
self.saver.save(self.sess, path)
def load(self, path):
self.saver.restore(self.sess, path)
class Worker(object):
def __init__(self, wid):
self.wid = wid
self.env = UnityEnv(env_name, worker_id=wid+1, use_visual=False, use_both=True)
# self.env=Reacher(render=True)
self.ppo = GLOBAL_PPO
def work(self):
global GLOBAL_EP, GLOBAL_RUNNING_R, GLOBAL_UPDATE_COUNTER
step=0
while not COORD.should_stop():
s, info= self.env.reset()
s6=s[:6]
step+=1
ep_r = 0
buffer_s, buffer_a, buffer_r = [], [], []
self.pins_x=[]
self.pins_y=[]
self.pins_z=[]
self.object_x=[]
self.object_y=[]
self.object_z=[]
for t in range(EP_LEN):
if not ROLLING_EVENT.is_set(): # while global PPO is updating
ROLLING_EVENT.wait() # wait until PPO is updated
buffer_s, buffer_a, buffer_r = [], [], [] # clear history buffer, use new policy to collect data
a = self.ppo.choose_action(s6)
s_, r, done, info= self.env.step(a)
s_6=s_[:6]
# print(np.array(s_).shape)
# plot pins
pins_x = s[6::3]
pins_z = s[8::3]
self.object_x.append(s[0])
self.object_z.append(s[2])
self.pins_x.append(pins_x)
self.pins_z.append(pins_z)
# print('a: ',a) # shape: []
# print('s: ',s_) # shape: []
# plt.imshow(s[:,:,0])
# plt.show()
# print('r: ',r) # shape: scalar
# print('done: ', done) # shape: True/False
buffer_s.append(s)
buffer_a.append(a)
buffer_r.append(r) # normalize reward, find to be useful
s = s_
s6=s_6
ep_r += r
GLOBAL_UPDATE_COUNTER += 1 # count to minimum batch size, no need to wait other workers
if t == EP_LEN - 1 or GLOBAL_UPDATE_COUNTER >= MIN_BATCH_SIZE:
v_s_ = self.ppo.get_v(s_6)
discounted_r = [] # compute discounted reward
for r in buffer_r[::-1]:
v_s_ = r + GAMMA * v_s_
discounted_r.append(v_s_)
discounted_r.reverse()
bs, ba, br = np.vstack(buffer_s), np.vstack(buffer_a), np.array(discounted_r)[:, np.newaxis]
buffer_s, buffer_a, buffer_r = [], [], []
QUEUE.put(np.hstack((bs, ba, br))) # put data in the queue
if GLOBAL_UPDATE_COUNTER >= MIN_BATCH_SIZE:
ROLLING_EVENT.clear() # stop collecting data
UPDATE_EVENT.set() # globalPPO update
if GLOBAL_EP >= EP_MAX: # stop training
COORD.request_stop()
break
if GLOBAL_EP%50==0 and GLOBAL_EP>0:
self.ppo.save(model_path)
# reshape_pins_x = np.array(self.pins_x).transpose()
# reshape_pins_z = np.array(self.pins_z).transpose()
# plt.clf()
# for i in range(NUM_PINS):
# plt.subplot(411)
# plt.plot(np.arange(len(self.pins_x)), reshape_pins_x[i])
# plt.title('X-Position')
# plt.subplot(412)
# plt.plot(np.arange(len(self.pins_z)), reshape_pins_z[i])
# plt.title('Y-Position') # although it's z, to match reality, use y
# plt.subplot(413)
# # plt.plot(np.arange(len(self.pins_x)), reshape_pins_x[i]-self.object_x)
# # plt.title('X-Relative')
# # plt.plot(np.arange(len(self.pins_x)), reshape_pins_x[i]-reshape_pins_x[i][0])
# # plt.title('X-Displacement')
# plt.plot(np.arange(len(self.pins_x)), (reshape_pins_x[i]-self.object_x)-(reshape_pins_x[i][0]-self.object_x[0]))
# plt.title('X-Displacement')
# plt.subplot(414)
# plt.plot(np.arange(len(self.pins_x)), (reshape_pins_z[i]-self.object_z)-(reshape_pins_z[i][0]-self.object_z[0]))
# plt.title('Y-Displacement')
# plt.xlabel('Time Step')
# plt.ylim(-100, 200)
# plt.tight_layout()
# plt.savefig('./ppo_pins.png')
# record reward changes, plot later
if len(GLOBAL_RUNNING_R) == 0: GLOBAL_RUNNING_R.append(ep_r)
else: GLOBAL_RUNNING_R.append(GLOBAL_RUNNING_R[-1]*0.99+ep_r*0.01)
GLOBAL_EP += 1
print('{0:.1f}%'.format(GLOBAL_EP/EP_MAX*100), '|W%i' % self.wid, '|Ep_r: %.2f' % ep_r,)
if GLOBAL_EP % 30==0: # plot every N episode; some error about main thread for plotting
plt.clf()
plt.plot(np.arange(GLOBAL_EP),GLOBAL_RUNNING_R)
plt.xlabel('Episode')
plt.ylabel('Reward')
try:
plt.savefig('./ppo_multi.png')
except:
print('writing conflict!')
self.ppo.save(model_path)
if __name__ == '__main__':
model_path = './model/tac_pins'
if args.train:
time=time.time()
GLOBAL_PPO = PPO()
UPDATE_EVENT, ROLLING_EVENT = threading.Event(), threading.Event()
UPDATE_EVENT.clear() # not update now
ROLLING_EVENT.set() # start to roll out
workers = [Worker(wid=i+5) for i in range(N_WORKER)]
GLOBAL_UPDATE_COUNTER, GLOBAL_EP = 0, 0
GLOBAL_RUNNING_R = []
COORD = tf.train.Coordinator()
QUEUE = queue.Queue() # workers putting data in this queue
threads = []
for worker in workers: # worker threads
t = threading.Thread(target=worker.work, args=())
t.daemon = True # kill the main thread, the sub-threads die as well
t.start() # training
threads.append(t)
# add a PPO updating thread
threads.append(threading.Thread(target=GLOBAL_PPO.update,))
threads[-1].start()
COORD.join(threads)
# plot reward change and test
# plt.plot(np.arange(len(GLOBAL_RUNNING_R)), GLOBAL_RUNNING_R)
# plt.xlabel('Episode'); plt.ylabel('Moving reward'); plt.ion(); plt.show()
# env = gym.make('Pendulum-v0')
# env=Reacher(render=True)
# env = UnityEnv(env_name, worker_id=10, use_visual=True, use_both=True)
# s, info = env.reset()
# for t in range(100):
# # env.render()
# s, r, done, info = env.step(GLOBAL_PPO.choose_action(s))
GLOBAL_PPO.save(model_path)
if args.test:
env = UnityEnv(env_name, worker_id=np.random.randint(0,10), use_visual=False, use_both=True)
env.reset()
GLOBAL_PPO = PPO()
GLOBAL_PPO.load(model_path)
test_steps = 100
test_episode = 10
for _ in range(test_episode):
s, info = env.reset()
for t in range(test_steps):
# env.render()
a=GLOBAL_PPO.choose_action(s[:6])
print(a)
s, r, done, info = env.step(a)
|
[
"zd2418@graphic06.doc.ic.ac.uk"
] |
zd2418@graphic06.doc.ic.ac.uk
|
60d734472901344d763d6e229ca2310ce9e90063
|
714cbe0205a4af7b8386116854c1eb85b63fb74d
|
/base.py
|
c506f9f1f78873528074a0dcd48211ca9c54dd1f
|
[] |
no_license
|
hologerry/lincoln
|
8ff7eb0b1ffe6b3792c8908e9db3adbe59e3ce22
|
e3144d949c7e2e85075a2211c1f49bcf40d5b5b2
|
refs/heads/master
| 2021-02-05T19:10:40.708747
| 2020-03-02T16:24:16
| 2020-03-02T16:24:16
| 243,821,199
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,342
|
py
|
from numpy import ndarray
from utils.np_utils import assert_same_shape
class Operation(object):
def __init__(self):
pass
def forward(self,
input_: ndarray,
inference: bool = False) -> ndarray:
self.input_ = input_
self.output = self._output(inference)
return self.output
def backward(self, output_grad: ndarray) -> ndarray:
assert_same_shape(self.output, output_grad)
self.input_grad = self._input_grad(output_grad)
assert_same_shape(self.input_, self.input_grad)
return self.input_grad
def _output(self, inference: bool) -> ndarray:
raise NotImplementedError()
def _input_grad(self, output_grad: ndarray) -> ndarray:
raise NotImplementedError()
class ParamOperation(Operation):
def __init__(self, param: ndarray) -> ndarray:
super().__init__()
self.param = param
def backward(self, output_grad: ndarray) -> ndarray:
assert_same_shape(self.output, output_grad)
self.input_grad = self._input_grad(output_grad)
self.param_grad = self._param_grad(output_grad)
assert_same_shape(self.input_, self.input_grad)
return self.input_grad
def _param_grad(self, output_grad: ndarray) -> ndarray:
raise NotImplementedError()
|
[
"hologerry@gmail.com"
] |
hologerry@gmail.com
|
22b302924e1b1ab6e2a83f7157859da0403a32c5
|
faa981d6822b4b75c339a2841c66889e2cb5c7f1
|
/ictk_djserver/work_report/models.py
|
9c71b5b281bcebe94288e2097a3a9c05e1d9893a
|
[] |
no_license
|
neo1seok/django_server
|
664bc36cb7dd089a903334c1832d36465a8e8449
|
0ab8aec921b69065699702e1d24caaae508218c3
|
refs/heads/master
| 2020-06-18T16:42:48.627723
| 2020-01-08T06:43:55
| 2020-01-08T06:43:55
| 196,368,877
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,024
|
py
|
from django.db import models
from django.utils import timezone
# Create your models here.
class Post(models.Model):
author = models.ForeignKey('account.User', on_delete=models.CASCADE)
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
class Table2(models.Model):
# author = models.ForeignKey('auth.User', on_delete=models.CASCADE)
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
|
[
"neo1seok@gmail.com"
] |
neo1seok@gmail.com
|
797cb6f5b818febbf53825d1a83a277b58244134
|
98ecb8754d90ea6c8099cffbf8e71dcb218672c0
|
/gui/fct_fournie_1.py
|
67bf8cc2fb94dd0224b1cc934678ee1db515201b
|
[] |
no_license
|
djokami/Theatre
|
2e70dfc3784ca8f2bbca1dd04a2d27106daa78b7
|
3a619c3f976f3664bc40e262509caad295118811
|
refs/heads/master
| 2020-05-25T00:19:57.472020
| 2019-05-19T21:56:49
| 2019-05-19T21:56:49
| 187,529,545
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,262
|
py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'fct_fournie_1.ui'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_fct_fournie_1(object):
def setupUi(self, fct_fournie_1):
fct_fournie_1.setObjectName("fct_fournie_1")
fct_fournie_1.resize(561, 601)
self.verticalLayout = QtWidgets.QVBoxLayout(fct_fournie_1)
self.verticalLayout.setObjectName("verticalLayout")
self.table_fct_fournie_1 = QtWidgets.QTableWidget(fct_fournie_1)
self.table_fct_fournie_1.setObjectName("table_fct_fournie_1")
self.table_fct_fournie_1.setColumnCount(2)
self.table_fct_fournie_1.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.table_fct_fournie_1.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.table_fct_fournie_1.setHorizontalHeaderItem(1, item)
self.table_fct_fournie_1.horizontalHeader().setDefaultSectionSize(200)
self.table_fct_fournie_1.horizontalHeader().setMinimumSectionSize(50)
self.table_fct_fournie_1.horizontalHeader().setStretchLastSection(True)
self.table_fct_fournie_1.verticalHeader().setVisible(False)
self.verticalLayout.addWidget(self.table_fct_fournie_1)
self.label_fct_fournie_1 = QtWidgets.QLabel(fct_fournie_1)
self.label_fct_fournie_1.setText("")
self.label_fct_fournie_1.setAlignment(QtCore.Qt.AlignCenter)
self.label_fct_fournie_1.setObjectName("label_fct_fournie_1")
self.verticalLayout.addWidget(self.label_fct_fournie_1)
self.retranslateUi(fct_fournie_1)
QtCore.QMetaObject.connectSlotsByName(fct_fournie_1)
def retranslateUi(self, fct_fournie_1):
_translate = QtCore.QCoreApplication.translate
fct_fournie_1.setWindowTitle(_translate("fct_fournie_1", "Liste des représentations de \"How to be a parisian ?\""))
item = self.table_fct_fournie_1.horizontalHeaderItem(0)
item.setText(_translate("fct_fournie_1", "nomSpec"))
item = self.table_fct_fournie_1.horizontalHeaderItem(1)
item.setText(_translate("fct_fournie_1", "dateRep"))
|
[
"kamissokoselin@gmail.com"
] |
kamissokoselin@gmail.com
|
fbf7595217ffa37fbf7e7069d36ab4b3baf3169b
|
e75157fa392861377db6869fae459e4eef694a74
|
/backend_python/main.py
|
b43ddd2777aa5b0cdeb0b54053610eaef2b096fb
|
[] |
no_license
|
gpard77/cs496_final
|
422e6ae244f4d7ad090e4bf9f0a13e7a3374a5fd
|
20b66296bba13a9c238c8fd289884a764568192b
|
refs/heads/master
| 2020-06-14T03:55:09.912094
| 2016-12-04T06:42:28
| 2016-12-04T06:42:28
| 75,519,823
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,077
|
py
|
# import webapp2
# from google.appengine.api import oauth
# application = webapp2.WSGIApplication([
# ('/book', 'book.Book'),
# ('/shelf', 'shelf.Shelf')
# ], debug=True)
# # get back details about a specific book - use an id
# application.router.add(webapp2.Route(r'/book/<id:[0-9]+><:/?>', 'book.Book'))
# # this one gets a list of book keys
# application.router.add(webapp2.Route(r'/book/search', 'book.BookSearch'))
# # this one gets a list of shelf keys
# application.router.add(webapp2.Route(r'/shelf', 'shelf.Shelf'))
# # this one allows book additions to a shelf via the PUT method
# application.router.add(webapp2.Route(r'/shelf/<cid:[0-9]+>/book/<bid:[0-9]+><:/?>', 'shelf.ShelfBooks'))
# # this one also returns a list of shelf keys
# application.router.add(webapp2.Route(r'/shelf/search', 'shelf.ShelfSearch'))
# # this one provides details about a specific shelf - use an id
# application.router.add(webapp2.Route(r'/shelf/<id:[0-9]+><:/?>', 'shelf.Shelf'))
import webapp2
from google.appengine.api import oauth
application = webapp2.WSGIApplication([
('/job', 'job.Job'),
('/member', 'member.Member')
], debug=True)
# get back details about a specific job - use an id
application.router.add(webapp2.Route(r'/job/<id:[0-9]+><:/?>', 'job.Job'))
# this one gets a list of job keys
application.router.add(webapp2.Route(r'/job/search', 'job.JobSearch'))
# this one gets a list of member keys
application.router.add(webapp2.Route(r'/member', 'member.Member'))
# this one allows job additions to a member via the PUT method
application.router.add(webapp2.Route(r'/member/<cid:[0-9]+>/job/<bid:[0-9]+><:/?>', 'member.MemberJobs'))
# remove job from member
application.router.add(webapp2.Route(r'/member/<cid:[0-9]+>/job/remove/<bid:[0-9]+><:/?>', 'member.MemberRemoveJob'))
# this one also returns a list of member keys
application.router.add(webapp2.Route(r'/member/search', 'member.MemberSearch'))
# this one provides details about a specific member - use an id
application.router.add(webapp2.Route(r'/member/<id:[0-9]+><:/?>', 'member.Member'))
|
[
"pardg@oregonstate.edu"
] |
pardg@oregonstate.edu
|
e698508e2fe3948862afe6bba9c8f5afb175f8da
|
02609a37df8092cdfcb51fe42ea9935e0024320b
|
/zamiaai/skills/psychology/__init__.py
|
2cc4501305f39168b8553dcf49540cc825e007ad
|
[
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
TexaProject/zamia-ai
|
5a742118060406f1aa271c5ed1b14110e9953544
|
d9e9c6123fdadca3fae55e87ea2b2b32d82bc210
|
refs/heads/master
| 2020-04-14T05:57:51.068798
| 2018-12-24T00:12:22
| 2018-12-24T00:12:22
| 163,673,844
| 1
| 1
|
Apache-2.0
| 2018-12-31T14:08:09
| 2018-12-31T14:08:08
| null |
UTF-8
|
Python
| false
| false
| 826
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2016, 2017, 2018 Guenter Bartsch
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
DEPENDS = [ 'base', 'dialog' ]
import psychology
import positive
import negative
def get_data(k):
psychology.get_data(k)
positive.get_data(k)
negative.get_data(k)
|
[
"guenter@zamia.org"
] |
guenter@zamia.org
|
d31cdeea85d18f104969c895d5974aa43676e55a
|
2100443dd6cbd55587f7ea8d875fc4bbe70b5423
|
/summertunes/cli/run_mpv.py
|
edd8b96826d73ed84253ed801765b898f3cfb233
|
[
"MIT"
] |
permissive
|
irskep/summertunes
|
af57584cf789e5f216dc24faf6fb51cbe216af1a
|
d79c8f2e77bb97a48685a0ac23e8e4b7a753eb69
|
refs/heads/master
| 2021-05-02T04:44:17.498633
| 2017-06-25T20:59:05
| 2017-06-25T20:59:05
| 76,800,404
| 19
| 1
| null | 2017-01-17T19:47:08
| 2016-12-18T18:49:51
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 1,486
|
py
|
#!/usr/bin/env python
import json
import logging
import os
import signal
import sys
from multiprocessing import Process, Queue
from subprocess import Popen
log = logging.getLogger(__name__)
def _run_mpv_wrapper(pid_queue, mpv_args):
log.debug(' '.join(mpv_args))
proc = Popen(mpv_args)
pid_queue.put(proc.pid)
try:
proc.communicate()
except SystemExit:
proc.kill()
proc.kill()
except KeyboardInterrupt:
proc.kill()
proc.kill()
def wait_for_processes(pid_queue, procs):
try:
last_proc = None
for proc in procs:
proc.start()
last_proc = proc
if last_proc:
last_proc.join()
except KeyboardInterrupt:
pass
finally:
while not pid_queue.empty():
pid = pid_queue.get()
log.info("Kill %d", pid)
try:
os.kill(pid, signal.SIGTERM)
os.kill(pid, signal.SIGTERM)
except ProcessLookupError:
pass
for p2 in procs:
while p2.is_alive():
p2.terminate()
def run_mpv(websocket_port, socket_path):
pid_queue = Queue()
mpv_cmd = [
sys.executable, '-m', 'summertunes.mpv2websocket',
'--mpv-websocket-port', str(websocket_port),
'--mpv-socket-path', str(socket_path),
]
wait_for_processes(pid_queue, [
Process(target=_run_mpv_wrapper, args=(pid_queue, mpv_cmd))
])
|
[
"sjohnson@hipmunk.com"
] |
sjohnson@hipmunk.com
|
da0e8bd9c5e22a441ccf996635f3cc883e7db8ad
|
ac23a4af430f92868ce1bd0c7571fe78e9ba6ffe
|
/routs/models.py
|
44f5e7fd6b73f989909ee5412fcea7ed5e5b1073
|
[] |
no_license
|
nathaniel-prog/sofproject
|
52501ba050b3cf5dd10911fd283bee323dce491c
|
16fedf099f1e5e63883ea6a1a01965b9a3fd0ba5
|
refs/heads/master
| 2023-02-07T22:11:11.671001
| 2020-12-30T09:23:30
| 2020-12-30T09:23:30
| 294,345,213
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,580
|
py
|
from django.db import models
from django.shortcuts import render
from django.contrib.auth.models import User
from bemember.models import Post
from django.db.models import Q
import json
from routs.insert_geoloc import extract_lat_lng
from django.contrib import messages
import datetime
d= datetime.date.today()
class PostquerySet(models.QuerySet):
def search(self,query=None):
qs=self
if query is not None:
or_lookup= (Q(name__icontains=query)|
Q(address__icontains=query)|
Q(town__icontains=query))
qs=qs.filter(or_lookup).distinct()
return qs
class PostManager(models.Manager):
def get_queryset(self):
return PostquerySet(self.model, using=self._db)
def search(self,query=None):
return self.get_queryset().search(query=query)
class Town(models.Model):
name= models.CharField(max_length=100, unique=True )
image_town = models.FileField(default='hotelsample.jpg', upload_to='images/', null=True)
def __str__(self):
return f' {self.name} '
def giv_lng_latt(self):
if self.name=='bengourion' or 'Bengourion':
return extract_lat_lng(self.name)
else:
print('you are not in bg airport ')
class Hotels(models.Model):
name= models.CharField(max_length=300)
rates= models.IntegerField(null= True )
town= models.ForeignKey(Town,on_delete=models.CASCADE)
cost = models.IntegerField( null=False,default=150)
hotel_Main_Img = models.FileField(default='hotelsample.jpg',upload_to='images/', null= True )
def __str__(self):
return f" from {self.town} we have {self.name} "
class Appartement(models.Model):
town= models.ForeignKey(Town, on_delete=models.CASCADE)
address=models.CharField(max_length=200,null=True)
cost = models.IntegerField(null=True, default=150)
pieces= models.IntegerField(null=False, default=3)
surface=models.IntegerField(null=False, default= 90)
mirpeset= models.BooleanField(default=None, null=True)
parking=models.BooleanField(default=None , null=True)
air_conditioner= models.BooleanField(default=True, null=True)
comment= models.TextField(max_length=500)
app_image = models.ImageField(default='hotelsample.jpg', upload_to='images/', null=True)
likes= models.ManyToManyField(User, related_name='Appart_like')
def totalikes(self):
return self.likes.count()
def __str__(self):
return f" {self.address} cost {self.cost}"
|
[
"66939923+nathaniel-prog@users.noreply.github.com"
] |
66939923+nathaniel-prog@users.noreply.github.com
|
6cc63d83e26329cff0d80c500a81303a67f72c10
|
1f4a7e6377d92658838b65fde4437f26901d5601
|
/nets/gazebase.py
|
6c55e92c9bebd6e5118cf15195a2d9c8229e3e5b
|
[] |
no_license
|
qbqandyxq/My_gaze
|
7e7ad1dcdf4d69a53d331dff909cbe5980408a7a
|
bebe9005bffeb564cfae230be08ff2d610b9262b
|
refs/heads/master
| 2021-09-11T20:59:15.310599
| 2018-04-12T09:41:55
| 2018-04-12T09:41:55
| 115,717,806
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,196
|
py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
slim = tf.contrib.slim
trunc_normal = lambda stddev: tf.truncated_normal_initializer(0.0, stddev)
from collections import namedtuple
import math
import numpy as np
GazeParams = namedtuple('GazeParameters', ['img_shape','labels'])
class GazeNet(object):
default_params = GazeParams(
img_shape=(448, 448),
labels=6,)
def __init__(self, params=None):
if isinstance(params, GazeParams):
self.params = params
else:
self.params = GazeNet.default_params
# ======================================================================= #
def net(self, inputs,
is_training=True,
update_feat_shapes=True,
dropout_keep_prob=0.5,
reuse=None,
scope='gaze_alexnet'):
r = alexnet_v2(inputs,
labels=self.params.labels,
is_training=is_training,
dropout_keep_prob=dropout_keep_prob,
reuse=reuse,
scope=scope)
return r
def arg_scope(self, weight_decay=0.0005, data_format='NHWC'):
return alexnet_v2_arg_scope(weight_decay, data_format=data_format)
def losses(self, logits, glabels, scope='gaze_losses'):
return gaze_losses(logits, glabels, scope=scope)
def alexnet_v2_arg_scope(weight_decay=0.0005,data_format='NHWC'):
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=tf.nn.relu,
biases_initializer=tf.constant_initializer(0.1),
weights_regularizer=slim.l2_regularizer(weight_decay)):
with slim.arg_scope([slim.conv2d], padding='SAME'):
with slim.arg_scope([slim.max_pool2d], padding='VALID') as arg_sc:
return arg_sc
def alexnet_v2(inputs,
labels=6,
is_training=True,
dropout_keep_prob=0.5,
scope='gaze_alexnet',
reuse=None,
global_pool=False):
with tf.variable_scope(scope, 'gaze_alexnet', [inputs]) as sc:
end_points_collection = sc.original_name_scope + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d],):
#outputs_collections=[end_points_collection]):
net = slim.conv2d(inputs, 64, [11, 11], 4, padding='VALID', scope='conv1')
# 110,110,64
net = slim.max_pool2d(net, [3, 3], 2, padding="SAME",scope='pool1')
# 55,55,64
net = slim.conv2d(net, 192, [5, 5], scope='conv2')
# 55,55,192
net = slim.max_pool2d(net, [3, 3], 2, scope='pool2')
# 27,27,192
net = slim.conv2d(net, 384, [3, 3], scope='conv3')
# 27,27,384
net = slim.conv2d(net, 384, [3, 3], scope='conv4')
# 27,27,384
net = slim.conv2d(net, 256, [3, 3], scope='conv5')
# 27,27,256
net = slim.max_pool2d(net, [3, 3], 2, scope='pool5')
# 13, 13, 256
# Use conv2d instead of fully_connected layers.
with slim.arg_scope([slim.conv2d, slim.fully_connected],
weights_initializer=trunc_normal(0.005),
biases_initializer=tf.constant_initializer(0.1)):
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
net_ = slim.conv2d(net, 256, [1, 1], scope='convs_1')
net_ = slim.conv2d(net_, 256, [1, 1], scope='convs_2')
net_ = slim.conv2d(net_, 1, [1, 1], scope='convs_3')
#change the dimension
#net_ = tf.stack
net = tf.multiply(net , tf.concat([net_]*256,3) )#
print("==========", net.shape)
net = tf.reshape(net, [16,-1])
net = slim.fully_connected(net, 4096, scope='fc6')
net = slim.dropout(net, 0.5, scope='dropout6')
net = slim.fully_connected(net, 4096, scope='fc7')
net = slim.dropout(net, 0.5, scope='dropout7')
net = slim.fully_connected(net, 6, activation_fn=None, scope='fc8')
end_points['final8_layer'] = net
return net #, end_points
def gaze_losses(logits, glabels, scope=None):
with tf.name_scope('loss'):
loss = tf.abs(logits - glabels)
cost = tf.reduce_mean(loss)
with tf.name_scope('loss'):
tf.losses.add_loss(cost)
def gaze_arg_scope(weight_decay=0.0005, data_format='NHWC'):
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=tf.nn.relu,
weights_regularizer=slim.l2_regularizer(weight_decay),
weights_initializer=tf.contrib.layers.xavier_initializer(),
biases_initializer=tf.zeros_initializer()):
with slim.arg_scope([slim.conv2d, slim.max_pool2d],
padding='SAME',
data_format=data_format) as sc:
return sc
alexnet_v2.default_image_size = (448, 448)
|
[
"971725806@qq.com"
] |
971725806@qq.com
|
2de31ce63bf56006e3b69bfa7c958f0145752bff
|
555b9f764d9bca5232360979460bc35c2f5ad424
|
/google/ads/google_ads/v1/proto/services/conversion_adjustment_upload_service_pb2_grpc.py
|
6e9635e57ac9663a3ce02788dfd010e22f15e749
|
[
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
juanmacugat/google-ads-python
|
b50256163782bc0223bcd8b29f789d74f4cfad05
|
0fc8a7dbf31d9e8e2a4364df93bec5f6b7edd50a
|
refs/heads/master
| 2021-02-18T17:00:22.067673
| 2020-03-05T16:13:57
| 2020-03-05T16:13:57
| 245,215,877
| 1
| 0
|
Apache-2.0
| 2020-03-05T16:39:34
| 2020-03-05T16:39:33
| null |
UTF-8
|
Python
| false
| false
| 2,255
|
py
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.ads.google_ads.v1.proto.services import conversion_adjustment_upload_service_pb2 as google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_conversion__adjustment__upload__service__pb2
class ConversionAdjustmentUploadServiceStub(object):
"""Service to upload conversion adjustments.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.UploadConversionAdjustments = channel.unary_unary(
'/google.ads.googleads.v1.services.ConversionAdjustmentUploadService/UploadConversionAdjustments',
request_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_conversion__adjustment__upload__service__pb2.UploadConversionAdjustmentsRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_conversion__adjustment__upload__service__pb2.UploadConversionAdjustmentsResponse.FromString,
)
class ConversionAdjustmentUploadServiceServicer(object):
"""Service to upload conversion adjustments.
"""
def UploadConversionAdjustments(self, request, context):
"""Processes the given conversion adjustments.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ConversionAdjustmentUploadServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'UploadConversionAdjustments': grpc.unary_unary_rpc_method_handler(
servicer.UploadConversionAdjustments,
request_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_conversion__adjustment__upload__service__pb2.UploadConversionAdjustmentsRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_conversion__adjustment__upload__service__pb2.UploadConversionAdjustmentsResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.ads.googleads.v1.services.ConversionAdjustmentUploadService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
[
"noreply@github.com"
] |
noreply@github.com
|
e33115a19dd732a6c8ce0df64c67a4d22df9a5b5
|
12796e7a68295a5777690cf916197f553dcfc690
|
/plans/manageiq/rhev.py
|
cce05c9c1157767f73a22cb58d304b2c1cdf57a7
|
[
"Apache-2.0"
] |
permissive
|
jruariveiro/kcli
|
294768c25748f8a2281d9d7b3cad6f6d6dd5d9a9
|
2de467f9c74913b030ca8e2f32c7caad59bf53c1
|
refs/heads/master
| 2020-03-26T05:50:58.301210
| 2018-08-13T12:45:04
| 2018-08-13T12:45:04
| 144,577,929
| 0
| 0
|
Apache-2.0
| 2018-08-13T12:39:19
| 2018-08-13T12:39:19
| null |
UTF-8
|
Python
| false
| false
| 655
|
py
|
#!/usr/bin/python
import json
import requests
user = "admin"
password = "[[ password ]]"
rhevuser = "admin@internal"
rhevpassword = "[[ rhev_password ]]"
rhevhost = "[[ rhev_host ]]"
headers = {'content-type': 'application/json', 'Accept': 'application/json'}
postdata = {
"type": "ManageIQ::Providers::Redhat::InfraManager",
"name": "rhev",
"hostname": rhevhost,
"ipaddress": rhevhost,
"credentials": {
"userid": rhevuser,
"password": rhevpassword
}
}
url = "https://127.0.0.1/api/providers"
r = requests.post(url, verify=False, headers=headers, auth=(user, password), data=json.dumps(postdata))
print r.json()
|
[
"karimboumedhel@gmail.com"
] |
karimboumedhel@gmail.com
|
8ecbc754fb7b5d7fc3b127f5aba3afc90fec38bd
|
55ceefc747e19cdf853e329dba06723a44a42623
|
/_CodeTopics/LeetCode/401-600/000430/000430.py
|
3d1bb3c077f5343fa55b72a6a1472a0b865025c4
|
[] |
no_license
|
BIAOXYZ/variousCodes
|
6c04f3e257dbf87cbe73c98c72aaa384fc033690
|
ee59b82125f100970c842d5e1245287c484d6649
|
refs/heads/master
| 2023-09-04T10:01:31.998311
| 2023-08-26T19:44:39
| 2023-08-26T19:44:39
| 152,967,312
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,241
|
py
|
"""
# Definition for a Node.
class Node(object):
def __init__(self, val, prev, next, child):
self.val = val
self.prev = prev
self.next = next
self.child = child
"""
class Solution(object):
def flatten(self, head):
"""
:type head: Node
:rtype: Node
"""
if not head:
return head
res = []
def flatten_one_level(curr):
while curr:
nextNode = curr.next
curr.prev = None
curr.next = None
res.append(curr)
if curr.child:
flatten_one_level(curr.child)
curr.child = None
curr = nextNode
flatten_one_level(head)
for i in range(len(res)-1):
res[i].next = res[i+1]
res[i+1].prev = res[i]
res[-1].next = None
return head
"""
https://leetcode-cn.com/submissions/detail/222579276/
26 / 26 个通过测试用例
状态:通过
执行用时: 28 ms
内存消耗: 13.7 MB
执行用时:28 ms, 在所有 Python 提交中击败了57.78%的用户
内存消耗:13.7 MB, 在所有 Python 提交中击败了66.67%的用户
"""
|
[
"noreply@github.com"
] |
noreply@github.com
|
edd405aeb0f0765e654d4fe0abd73b494c5a9491
|
ebb3f1e7af7008dce995eeecae945eedfd5b278a
|
/insert_dates.py
|
e1159d40950cb7779bb34c65efdcbca4041494bf
|
[
"MIT"
] |
permissive
|
fellypesb/project_PET_2020
|
b718b151b664cecd2910ff34ae67ef91235dd3db
|
402e4964b5a23ad6786870f4a0fcb1b9ccc522b1
|
refs/heads/main
| 2023-04-30T19:04:25.640447
| 2021-05-16T14:54:09
| 2021-05-16T14:54:09
| 322,129,305
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,961
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 22 14:59:57 2020
@author: fellypesb
"""
import pandas as pd
df = pd.read_csv('/home/fellypesb/Documents/PET/project_codes/outubro/dataset/cluster_data_hour.csv',
parse_dates=['Data'])
df2 = df.iloc[:,:-2]
day = []
month = []
year = []
for i in df.Data:
day.append(i.day)
month.append(i.month)
year.append(i.year)
df2['Dia'] = day
df2['Mes'] = month
df2['Ano'] = year
df2 = pd.concat([df2,df.iloc[:,-2:]], axis=1)
train = df2[:int(0.8*len(df2)) + 17]
test = df2[int(0.8*len(df2)) + 17:]
# group_train = train.groupby('Local', axis=0)
# group_test = test.groupby('Local', axis=0)
# group_train.count()
# group_test.count()
# train.to_csv('train_cdh2.csv', index=False)
# test.to_csv('test_cdh2.csv', index=False)
df = pd.read_csv('/home/fellypesb/Documents/PET/project_codes/outubro/dataset/cluster_dh_add_imputer_nan.csv',
parse_dates=['Data'])
df2 = df.iloc[:,:-2]
day = []
month = []
year = []
for i in df.Data:
day.append(i.day)
month.append(i.month)
year.append(i.year)
df2['Dia'] = day
df2['Mes'] = month
df2['Ano'] = year
df2 = pd.concat([df2,df.iloc[:,-2:]], axis=1)
train = df2[:int(0.8*len(df2)) + 17]
test = df2[int(0.8*len(df2)) + 17:]
# train.to_csv('train_cdh_nan2.csv', index=False)
# test.to_csv('test_cdh_nan2.csv', index=False)
df = pd.read_csv('/home/fellypesb/Documents/PET/project_codes/outubro/dataset/average_stations.csv',
parse_dates=['Data'])
df2 = df.iloc[:,:-1]
day = []
month = []
year = []
for i in df.Data:
day.append(i.day)
month.append(i.month)
year.append(i.year)
df2['Dia'] = day
df2['Mes'] = month
df2['Ano'] = year
df2 = pd.concat([df2,df.iloc[:,-1]], axis=1)
train = df2[:int(0.8*len(df2)) + 3]
test = df2[int(0.8*len(df2)) + 3:]
# train.to_csv('train_average_stations2.csv')
# test.to_csv('test_average_stations2.csv')
|
[
"fellypecsiqueira@gmail.com"
] |
fellypecsiqueira@gmail.com
|
fc5b935b9f8310e3f6f4113fb0a228bec78d5e73
|
2bb6f9c0a3658acf2bd6b97dcdeace9213723640
|
/namespace/models/list_field.py
|
ee577399fe07b1476580fed945fe7ee0aef6dc04
|
[] |
no_license
|
hugoseabra/mailchimp-service
|
4fee930fd11b30f4c7da3654da2cbb231ca34341
|
0424b90fdc0911b4a6b1d514ba51c88d7d3572b4
|
refs/heads/develop
| 2022-07-20T02:52:06.147740
| 2020-03-17T21:31:37
| 2020-03-17T21:31:37
| 242,433,347
| 0
| 0
| null | 2022-07-06T20:30:14
| 2020-02-23T00:39:43
|
Python
|
UTF-8
|
Python
| false
| false
| 2,475
|
py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from core.models import track_data
from core.models.mixins import (
DateTimeManagementMixin,
DeletableModelMixin,
EntityMixin,
UUIDPkMixin,
)
@track_data('namespace_id', 'label', 'tag')
class ListField(UUIDPkMixin,
EntityMixin,
DeletableModelMixin,
DateTimeManagementMixin,
models.Model):
"""
Campo adicionado a uma lista.
"""
class Meta:
verbose_name = _('list field')
verbose_name_plural = _('list fields')
unique_together = (
('namespace_id', 'tag',),
)
FIELD_TYPE_TEXT = 'text'
FIELD_TYPE_NUMBER = 'number'
FIELD_TYPES = (
(FIELD_TYPE_TEXT, _('Text')),
(FIELD_TYPE_NUMBER, _('Number')),
)
namespace = models.ForeignKey(
verbose_name=_('namespace'),
to='namespace.Namespace',
on_delete=models.PROTECT,
null=False,
blank=False,
related_name='fields',
)
field_type = models.CharField(
max_length=6,
verbose_name=_('field type'),
null=False,
blank=False,
choices=FIELD_TYPES,
default=FIELD_TYPE_TEXT,
)
label = models.CharField(
max_length=50,
verbose_name=_('label'),
null=False,
blank=False,
)
tag = models.CharField(
max_length=50,
verbose_name=_('tag'),
null=False,
blank=False,
)
help_text = models.CharField(
max_length=255,
verbose_name=_('help text'),
null=True,
blank=True,
)
active = models.BooleanField(
verbose_name=_('active'),
default=False,
null=False,
blank=False,
help_text=_('If true, it means that the field will be created in list'
' in MailChimp platform.'),
)
def to_sync_data(self):
return {
'name': self.label,
'tag': self.tag,
'type': self.field_type,
'required': False,
'list_id': self.namespace.default_list_id,
'help_text': self.help_text,
}
def __repr__(self):
return '<ListField pk: {}, label: {}, tag: {}'.format(
self.pk,
self.label,
self.tag,
)
def __str__(self):
return '{} ({})'.format(self.label, self.tag)
|
[
"hugoseabra19@gmail.com"
] |
hugoseabra19@gmail.com
|
2e852072b13816e1478dfbbc6347f0ce6148e604
|
52bbc4b2d23782984b184efcae5b2c0fb67fef76
|
/flaskDemo/models.py
|
1061da6316aa6820ae02c58e026a991664fc29b4
|
[] |
no_license
|
zhecz/Work_Management_System
|
3f4b871fa5828f65f3a4832fa3d13c8bb0d16984
|
6eea41da4e8f656ce176af1086d72f1a8940f33a
|
refs/heads/master
| 2022-12-08T11:55:58.463241
| 2019-10-28T16:41:18
| 2019-10-28T16:41:18
| 215,641,496
| 0
| 0
| null | 2022-12-08T06:47:09
| 2019-10-16T20:46:14
|
Python
|
UTF-8
|
Python
| false
| false
| 5,391
|
py
|
from datetime import datetime
from flaskDemo import db, login_manager
from flask_login import UserMixin
from functools import partial
from sqlalchemy import orm
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
@login_manager.user_loader
def load_user(user_id):
return employee.query.get(int(user_id))
class role(db.Model, UserMixin):
__tablename__ = 'Role'
__table_args__ = {'extend_existing': False}
roleID = db.Column(db.Integer, primary_key=True)
roleName = db.Column(db.String(25), nullable=False)
#employee = db.relationship("Employee",backref = 'role',lazy = True)
class employee(db.Model, UserMixin):
__tablename__ = 'Employee'
__table_args__ = {'extend_existing': False}
employeeID = db.Column(db.Integer, primary_key=True)
firstName = db.Column(db.String(200), nullable=False)
lastName = db.Column(db.String(200), nullable=False)
username = db.Column(db.String(200),unique=True,nullable = False)
password = db.Column(db.String(70), nullable=False)
phoneNumber = db.Column(db.String(12), nullable=False)
email = db.Column(db.String(100), nullable=False)
roleID = db.Column(db.Integer, db.ForeignKey('Role.roleID'), nullable=False)
def get_id(self):
return (self.employeeID)
class building(db.Model, UserMixin):
__tablename__ = 'Building'
__table_args__ = {'extend_existing': False}
buildingID = db.Column(db.Integer, primary_key=True)
buildingName = db.Column(db.String(200), nullable=False)
buildingAddress = db.Column(db.String(200), nullable=False)
postalCode = db.Column(db.Integer, nullable=False)
numberOfrooms = db.Column(db.Integer, nullable=False)
units=db.relationship('unit',backref='building')
def get_id(self):
return (self.buildingID)
def __str__(self):
return self.buildingName
class unit(db.Model, UserMixin):
__tablename__ = 'Unit'
__table_args__ = {'extend_existing': False}
buildingID = db.Column(db.Integer,db.ForeignKey('Building.buildingID'), nullable=False)
unitID = db.Column(db.Integer,primary_key=True)
unitName = db.Column(db.String(200), nullable=False)
def get_id(self):
return (self.unitID)
class work(db.Model, UserMixin):
__tablename__ = 'Work'
__table_args__ = {'extend_existing': False}
workID = db.Column(db.Integer, primary_key=True, nullable=False)
employeeID = db.Column(db.Integer,db.ForeignKey('Employee.employeeID'), nullable=False)
buildingID = db.Column(db.Integer,db.ForeignKey('Building.buildingID'),nullable = False)
unitID = db.Column(db.Integer, db.ForeignKey('Unit.unitID'),nullable = False)
workType = db.Column(db.String(100),nullable = False)
workOrdernumber = db.Column(db.String,unique=True,nullable = False)
startTimeAuto = db.Column(db.DateTime,nullable = False)
endTimeAuto = db.Column(db.DateTime,nullable = True)
startTimeManual = db.Column(db.DateTime,nullable = False)
endTimeManual = db.Column(db.DateTime,nullable = True)
class maintenance(db.Model, UserMixin):
__tablename__ = 'Maintenance'
__table_args__ = {'extend_existing': False}
mainID = db.Column(db.Integer, primary_key = True,nullable = False)
workID = db.Column(db.Integer,db.ForeignKey('Work.workID'),nullable = False)
maintenanceType = db.Column(db.String(200),nullable = False)
yearOrworkOrder = db.Column(db.String(200),nullable = False)
description = db.Column(db.String(200),nullable = False)
picture = db.Column(db.String(200))
class apartmentrehab(db.Model, UserMixin):
__tablename__ = 'ApartmentRehab'
__table_args__ = {'extend_existing': False}
rehID = db.Column(db.Integer, primary_key = True,nullable = False)
workID = db.Column(db.Integer,db.ForeignKey('Work.workID'),nullable = False)
rehabType = db.Column(db.String(200),nullable = False)
others = db.Column(db.String(200),nullable = True)
description = db.Column(db.String(200),nullable = False)
picture = db.Column(db.String(200))
class others(db.Model, UserMixin):
__tablename__ = 'Others'
__table_args__ = {'extend_existing': False}
othID = db.Column(db.Integer, primary_key = True,nullable = False)
workID = db.Column(db.Integer,db.ForeignKey('Work.workID'),nullable = False)
othersType = db.Column(db.String(200),nullable = False)
others = db.Column(db.String(200),nullable = True)
description = db.Column(db.String(200),nullable = False)
picture = db.Column(db.String(200))
class landscaping(db.Model, UserMixin):
__tablename__ = 'Landscaping'
__table_args__ = {'extend_existing': False}
lanscID = db.Column(db.Integer, primary_key = True,nullable = False)
workID = db.Column(db.Integer,db.ForeignKey('Work.workID'),nullable = False)
landscapingType = db.Column(db.String(200),nullable = False)
description = db.Column(db.String(200),nullable = False)
picture = db.Column(db.String(200))
class pestcontrol(db.Model, UserMixin):
__tablename__ = 'PestControl'
__table_args__ = {'extend_existing': False}
pcID = db.Column(db.Integer, primary_key = True,nullable = False)
workID = db.Column(db.Integer,db.ForeignKey('Work.workID'),nullable = False)
description = db.Column(db.String(200),nullable = False)
picture = db.Column(db.String(200))
|
[
"youcanfindivy@gmail.com"
] |
youcanfindivy@gmail.com
|
835b817d0ee6455037488d80b4e5cc940fc02e0d
|
0e110705bf2859b6a987685280ab5c3dfc1fc89d
|
/jpmoth/jpmoth/spiders/jpmoth_spider.py
|
a4a902ad07fd5f6836872871b475871b9e3ca13d
|
[] |
no_license
|
KitauraHiromi/scrapy
|
ed72ce1bcf7fb9b9f45f3b6660228589b497aae5
|
d4087ea119e9a37362fe03f451a8cb7ff0ae4a03
|
refs/heads/master
| 2021-01-20T17:33:49.851409
| 2016-08-07T03:07:20
| 2016-08-07T03:07:20
| 65,112,704
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 984
|
py
|
import scrapy
from jpmoth.items import jpmothItem
from scrapy.selector import HtmlXPathSelector
import codecs
f = open("jpmoth.txt", "w")
def cvt(str_list):
for i in range(len(str_list)):
str_list[i] = str_list[i].encode("shift-jis")
return str_list
class jpmothSpider(scrapy.Spider):
name = "jpmoth"
allowed_domains = ["jpmoth.org"]
start_urls = ["http://www.jpmoth.org/"]
custom_settings = {
"DOWNLOAD_DELAY": 1,
"DEPTH_LIMIT" : 10,
}
def parse(self, response):
for href in response.xpath("//a/@href"):
url = response.urljoin(href.extract())
yield scrapy.Request(url, callback=self.parse_next_page)
def parse_next_page(self, response):
urls = []
for href in response.xpath("//a/@href"):
#item["title"] = response.xpath("//title/text()").extract()
urls.append(response.urljoin(href.extract()))
#item["desc"] = response.xpath("//p/text()").extract()
for element in urls:
f.write(element)
f.write("\n")
yield{
'urls':urls,
}
|
[
"kasumiga_eria394@yahoo.co.jp"
] |
kasumiga_eria394@yahoo.co.jp
|
95b8b5da4d3c36487e38a700d023474a9104bc49
|
7c94fef9b1dd109efb9f7851871130b3e0f27b65
|
/.c9/metadata/environment/services/spaceapps/spaceapps/spaceapps/core/utils_ai_collect.py
|
e00df4801b4cdecb47d1a43b8ef3bc12337f25ad
|
[] |
no_license
|
turtlesallthewayup/spaceapps_webapp
|
c2eb14cd1a999bbe8ead32555b4592348881afb8
|
486ed9058c5d73dd47d7e195591c63b301496b5f
|
refs/heads/master
| 2020-08-21T16:11:18.021549
| 2019-11-04T00:04:41
| 2019-11-04T00:04:41
| 216,195,499
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,887
|
py
|
{"filter":false,"title":"utils_ai_collect.py","tooltip":"/services/spaceapps/spaceapps/spaceapps/core/utils_ai_collect.py","undoManager":{"mark":1,"position":1,"stack":[[{"start":{"row":0,"column":0},"end":{"row":60,"column":16},"action":"insert","lines":["import CONST","import numpy as np","import cv2","import os","import time","","# helper functions","def createFolder(directory):"," try:"," if not os.path.exists(directory):"," os.makedirs(directory)"," except OSError:"," print ('Error: Creating directory. ' + directory)"," ","def record_frames(video_name):"," # capture a video"," cap = cv2.VideoCapture(0)"," time.sleep(2)",""," frame_count = 1"," createFolder('./dataset/'+video_name+'/')"," "," print('Keep r pressed to record')"," print('Keep q pressed to exit')"," "," # record frames"," while(frame_count <= CONST.FRAMES_PER_VIDEO):"," # Capture frame-by-frame"," ret, frame = cap.read()"," "," # Display the resulting frame"," cv2.imshow('frame',frame)"," "," if ret==True:"," if cv2.waitKey(33) == ord('r'):"," print ('.', end=\"\")"," #save the frame as an image"," cv2.imwrite('dataset/%s/%d.jpg' % (video_name, frame_count), frame) # save frame as JPEG file"," frame_count += 1 "," elif cv2.waitKey(33) == ord('q'):"," break"," else:"," break "," # Release opencv resources"," cap.release()"," cv2.destroyAllWindows()","","# GETTING THE DATASET "," ","# extract frames from live video","label = input(\"In a word, what am I about to see?\")","record_frames(label)","print ('\\nGot it')","","label = input(\"And now?\")","record_frames(label)","print ('\\nAlright')","","label = input(\"what about the last?\")","record_frames(label)","print ('\\nDone')"],"id":1}],[{"start":{"row":0,"column":0},"end":{"row":0,"column":2},"action":"insert","lines":["# "],"id":2},{"start":{"row":1,"column":0},"end":{"row":1,"column":2},"action":"insert","lines":["# "]},{"start":{"row":2,"column":0},"end":{"row":2,"column":2},"action":"insert","lines":["# "]},{"start":{"row":3,"column":0},"end":{"row":3,"column":2},"action":"insert","lines":["# "]},{"start":{"row":4,"column":0},"end":{"row":4,"column":2},"action":"insert","lines":["# "]},{"start":{"row":6,"column":0},"end":{"row":6,"column":2},"action":"insert","lines":["# "]},{"start":{"row":7,"column":0},"end":{"row":7,"column":2},"action":"insert","lines":["# "]},{"start":{"row":8,"column":0},"end":{"row":8,"column":2},"action":"insert","lines":["# "]},{"start":{"row":9,"column":0},"end":{"row":9,"column":2},"action":"insert","lines":["# "]},{"start":{"row":10,"column":0},"end":{"row":10,"column":2},"action":"insert","lines":["# "]},{"start":{"row":11,"column":0},"end":{"row":11,"column":2},"action":"insert","lines":["# "]},{"start":{"row":12,"column":0},"end":{"row":12,"column":2},"action":"insert","lines":["# "]},{"start":{"row":14,"column":0},"end":{"row":14,"column":2},"action":"insert","lines":["# "]},{"start":{"row":15,"column":0},"end":{"row":15,"column":2},"action":"insert","lines":["# "]},{"start":{"row":16,"column":0},"end":{"row":16,"column":2},"action":"insert","lines":["# "]},{"start":{"row":17,"column":0},"end":{"row":17,"column":2},"action":"insert","lines":["# "]},{"start":{"row":19,"column":0},"end":{"row":19,"column":2},"action":"insert","lines":["# "]},{"start":{"row":20,"column":0},"end":{"row":20,"column":2},"action":"insert","lines":["# "]},{"start":{"row":22,"column":0},"end":{"row":22,"column":2},"action":"insert","lines":["# "]},{"start":{"row":23,"column":0},"end":{"row":23,"column":2},"action":"insert","lines":["# "]},{"start":{"row":25,"column":0},"end":{"row":25,"column":2},"action":"insert","lines":["# "]},{"start":{"row":26,"column":0},"end":{"row":26,"column":2},"action":"insert","lines":["# "]},{"start":{"row":27,"column":0},"end":{"row":27,"column":2},"action":"insert","lines":["# "]},{"start":{"row":28,"column":0},"end":{"row":28,"column":2},"action":"insert","lines":["# "]},{"start":{"row":30,"column":0},"end":{"row":30,"column":2},"action":"insert","lines":["# "]},{"start":{"row":31,"column":0},"end":{"row":31,"column":2},"action":"insert","lines":["# "]},{"start":{"row":33,"column":0},"end":{"row":33,"column":2},"action":"insert","lines":["# "]},{"start":{"row":34,"column":0},"end":{"row":34,"column":2},"action":"insert","lines":["# "]},{"start":{"row":35,"column":0},"end":{"row":35,"column":2},"action":"insert","lines":["# "]},{"start":{"row":36,"column":0},"end":{"row":36,"column":2},"action":"insert","lines":["# "]},{"start":{"row":37,"column":0},"end":{"row":37,"column":2},"action":"insert","lines":["# "]},{"start":{"row":38,"column":0},"end":{"row":38,"column":2},"action":"insert","lines":["# "]},{"start":{"row":39,"column":0},"end":{"row":39,"column":2},"action":"insert","lines":["# "]},{"start":{"row":40,"column":0},"end":{"row":40,"column":2},"action":"insert","lines":["# "]},{"start":{"row":41,"column":0},"end":{"row":41,"column":2},"action":"insert","lines":["# "]},{"start":{"row":42,"column":0},"end":{"row":42,"column":2},"action":"insert","lines":["# "]},{"start":{"row":43,"column":0},"end":{"row":43,"column":2},"action":"insert","lines":["# "]},{"start":{"row":44,"column":0},"end":{"row":44,"column":2},"action":"insert","lines":["# "]},{"start":{"row":45,"column":0},"end":{"row":45,"column":2},"action":"insert","lines":["# "]},{"start":{"row":47,"column":0},"end":{"row":47,"column":2},"action":"insert","lines":["# "]},{"start":{"row":49,"column":0},"end":{"row":49,"column":2},"action":"insert","lines":["# "]},{"start":{"row":50,"column":0},"end":{"row":50,"column":2},"action":"insert","lines":["# "]},{"start":{"row":51,"column":0},"end":{"row":51,"column":2},"action":"insert","lines":["# "]},{"start":{"row":52,"column":0},"end":{"row":52,"column":2},"action":"insert","lines":["# "]},{"start":{"row":54,"column":0},"end":{"row":54,"column":2},"action":"insert","lines":["# "]},{"start":{"row":55,"column":0},"end":{"row":55,"column":2},"action":"insert","lines":["# "]},{"start":{"row":56,"column":0},"end":{"row":56,"column":2},"action":"insert","lines":["# "]},{"start":{"row":58,"column":0},"end":{"row":58,"column":2},"action":"insert","lines":["# "]},{"start":{"row":59,"column":0},"end":{"row":59,"column":2},"action":"insert","lines":["# "]},{"start":{"row":60,"column":0},"end":{"row":60,"column":2},"action":"insert","lines":["# "]}]]},"ace":{"folds":[],"scrolltop":0,"scrollleft":0,"selection":{"start":{"row":29,"column":8},"end":{"row":29,"column":8},"isBackwards":false},"options":{"guessTabSize":true,"useWrapMode":false,"wrapToView":true},"firstLineState":0},"timestamp":1571521193556,"hash":"196706fa32081caf0c300b3ee7c2366b841da3d6"}
|
[
"ubuntu@ip-172-31-46-231.us-west-2.compute.internal"
] |
ubuntu@ip-172-31-46-231.us-west-2.compute.internal
|
2c63726f954f6cb0ed0fa254ffeea056e8545a62
|
3481023b43028c5ee9520a8be0978e914bdcb548
|
/manga_py/providers/nightow_net.py
|
86e53cd25348d31c6966eb6c3205f80499332ff9
|
[
"MIT"
] |
permissive
|
manga-py/manga-py
|
18f6818d8efc96c3e69efee7dff3f3d6c773e32a
|
0db97123acab1f2fb99e808b0ba54db08977e5c8
|
refs/heads/stable_1.x
| 2023-08-20T03:04:06.373108
| 2023-04-16T08:28:15
| 2023-04-16T08:28:15
| 98,638,892
| 444
| 56
|
MIT
| 2023-07-27T13:21:40
| 2017-07-28T10:27:43
|
Python
|
UTF-8
|
Python
| false
| false
| 1,285
|
py
|
from urllib.parse import unquote_plus
from manga_py.provider import Provider
from .helpers.std import Std
class NightowNet(Provider, Std):
_name_re = r'manga=(.+?)(?:&.+)?$'
def get_chapter_index(self) -> str:
ch = unquote_plus(self.chapter)
idx = self.re.search(r'chapter=(?:.+?)\+(\d+(?:\.\d+)?)', ch)
if idx:
return '-'.join(idx.group(1).split('.'))
return self.re.search('chapter=(.+?)(?:&.+)?$', ch).group(1)
def get_content(self):
name = self._get_name(self._name_re)
return self.http_get('{}/online/?manga={}'.format(
self.domain,
name
))
def get_manga_name(self) -> str:
return unquote_plus(self._get_name(self._name_re))
def get_chapters(self):
return self._elements('.selector .options a')
def prepare_cookies(self):
self._storage['referer'] = self.domain + '/online/'
def get_files(self):
content = self.http_get(self.chapter)
items = self.re.findall(r'imageArray\[\d+\]\s*=\s*[\'"](.+)[\'"];', content)
n = self.normalize_uri
return [n(i) for i in items]
def get_cover(self) -> str:
pass
def book_meta(self) -> dict:
# todo meta
pass
main = NightowNet
|
[
"sttv-pc@mail.ru"
] |
sttv-pc@mail.ru
|
8a20174c536f7b7a825e2aa4666c5462ebb3d9a5
|
a6106cedc42dcab94ccc4ee6d681372d2246ce5e
|
/python/활용자료/예제/02/ex2-24.py
|
c858e184ca1d9032edc29dc8ef0b5ed1a39a8b11
|
[] |
no_license
|
leemyoungwoo/pybasic
|
a5a4b68d6b3ddd6f07ff84dc8df76da02650196f
|
481075f15613c5d8add9b8c4d523282510d146d2
|
refs/heads/master
| 2022-10-08T19:57:26.073431
| 2020-06-15T06:50:02
| 2020-06-15T06:50:02
| 267,502,565
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 170
|
py
|
name = '황예린'
age = 18
eyesight = 1.2
a = '이름 : {}'.format(name)
b = '나이 : {}세'.format(age)
c = '시력 : {}'.format(eyesight)
print(a)
print(b)
print(c)
|
[
"mwlee2587@gmail.com"
] |
mwlee2587@gmail.com
|
f8977685a94d80cf3dfd33f28317c3f562df7ba3
|
1fccf52e0a694ec03aac55e42795487a69ef1bd4
|
/src/euler_python_package/euler_python/medium/p422.py
|
3c591094a147780090d3295335ca6c74526aab21
|
[
"MIT"
] |
permissive
|
wilsonify/euler
|
3b7e742b520ee3980e54e523a018cd77f7246123
|
5214b776175e6d76a7c6d8915d0e062d189d9b79
|
refs/heads/master
| 2020-05-27T12:15:50.417469
| 2019-09-14T22:42:35
| 2019-09-14T22:42:35
| 188,614,451
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 27
|
py
|
def problem422():
pass
|
[
"tom.andrew.wilson@gmail.com"
] |
tom.andrew.wilson@gmail.com
|
914b69a2a74888fec8815e7f2d5180c2fd0fd4d8
|
15c4f7546ddabe8b2f3daeb3c35ebcf651293cd4
|
/env/bin/thresholder.py
|
5544f1cefe45e19923288026cc56d35e21debbc8
|
[] |
no_license
|
mcscope/Lips
|
285fa4ae57b389e2fb19b0aa40d222e96e8513fa
|
90aa39a90e68e8a57f2c8c5b3995539393743fa9
|
refs/heads/master
| 2020-12-30T12:56:01.039063
| 2017-09-13T21:04:21
| 2017-09-13T21:04:21
| 91,373,206
| 0
| 2
| null | 2017-09-13T21:04:22
| 2017-05-15T18:51:19
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 2,014
|
py
|
#!/Users/Christopher/Code/lipsweb/env/bin/python2.7
#
# The Python Imaging Library
# $Id$
#
# this demo script illustrates how a 1-bit BitmapImage can be used
# as a dynamically updated overlay
#
import sys
if sys.version_info[0] > 2:
import tkinter
else:
import Tkinter as tkinter
from PIL import Image, ImageTk
#
# an image viewer
class UI(tkinter.Frame):
def __init__(self, master, im, value=128):
tkinter.Frame.__init__(self, master)
self.image = im
self.value = value
self.canvas = tkinter.Canvas(self, width=im.size[0], height=im.size[1])
self.backdrop = ImageTk.PhotoImage(im)
self.canvas.create_image(0, 0, image=self.backdrop, anchor=tkinter.NW)
self.canvas.pack()
scale = tkinter.Scale(self, orient=tkinter.HORIZONTAL, from_=0, to=255,
resolution=1, command=self.update_scale,
length=256)
scale.set(value)
scale.bind("<ButtonRelease-1>", self.redraw)
scale.pack()
# uncomment the following line for instant feedback (might
# be too slow on some platforms)
# self.redraw()
def update_scale(self, value):
self.value = float(value)
self.redraw()
def redraw(self, event=None):
# create overlay (note the explicit conversion to mode "1")
im = self.image.point(lambda v, t=self.value: v >= t, "1")
self.overlay = ImageTk.BitmapImage(im, foreground="green")
# update canvas
self.canvas.delete("overlay")
self.canvas.create_image(0, 0, image=self.overlay, anchor=tkinter.NW,
tags="overlay")
# --------------------------------------------------------------------
# main
if len(sys.argv) != 2:
print("Usage: thresholder file")
sys.exit(1)
root = tkinter.Tk()
im = Image.open(sys.argv[1])
if im.mode != "L":
im = im.convert("L")
# im.thumbnail((320,200))
UI(root, im).pack()
root.mainloop()
|
[
"mcscope@gmail.com"
] |
mcscope@gmail.com
|
4ff59cf7d07865a4a752e0689e440e2a5188be29
|
260e33f93cb8aa1799b32966326a72d460f132cf
|
/highaltitudeparabolic/apps/service/videosrv.py
|
7303a1dbe44b0cfbe299891c1aeb41c53d38897b
|
[] |
no_license
|
yuangongping/highaltitudeparabolic_demo
|
c135fdf60e5f1fe03ad13b780dd92e0e7b757e20
|
6090baa5b9c799d61733f01d1b1ee57c078284d3
|
refs/heads/master
| 2021-03-20T10:30:48.436412
| 2020-03-14T03:31:40
| 2020-03-14T03:31:40
| 247,201,149
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,485
|
py
|
from ..model import db
from ..model.video import Video
from flask import abort
from sqlalchemy import and_
from sqlalchemy import func
class VideoSrv(object):
@classmethod
def getAllVideo(cls, page: int, num: int, camera_name: str,
start_date:str, end_date: str):
try:
exp_list = []
if camera_name is not None:
exp_list.append(Video.camera_name == camera_name)
if start_date is not None:
exp_list.append(
func.date_format(Video.start_date, '%Y-%m-%d %H:%i:%S') >= start_date)
if end_date is not None:
exp_list.append(func.date_format(Video.start_date, '%Y-%m-%d %H:%i:%S') < end_date)
order_exp = Video.start_date.desc()
if len(exp_list) > 0:
filter_exp = and_(*exp_list)
pagination = Video.query.filter(filter_exp).order_by(order_exp).paginate(page, num, error_out=False)
else:
pagination = Video.query.order_by(order_exp).paginate(page, num, error_out=False)
return {
'total': pagination.total,
'pages': pagination.pages,
'has_prev': pagination.has_prev,
'has_next': pagination.has_next,
'data': [video.toDict() for video in pagination.items]
}
except Exception as e:
abort(500, "数据出错!")
|
[
"1030617785@qq.com"
] |
1030617785@qq.com
|
86f22aa4eff1568a076d319dbefbca33c085762f
|
f8c26b516e4b07fa5c3c5816afdc03db9c062074
|
/api/src/model/User.py
|
7b898d165b8e587851b0b382081d0ee57bbab7ca
|
[] |
no_license
|
bbortolli/bora-app
|
c116682982d12cf6b17df28cc8212da318b652e7
|
4833645d33216fd5f4c0f2bc9fec81045470f16b
|
refs/heads/master
| 2023-01-18T18:14:33.656195
| 2020-11-23T22:50:27
| 2020-11-23T22:50:27
| 312,370,116
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 958
|
py
|
from mongoengine import *
import datetime
from src.model.sets import STATES, COUNTRIES, GENDERS
class User(Document):
email = EmailField(required=True, unique=True)
first_name = StringField(max_length=100, required=True)
last_name = StringField(max_length=100, required=True)
password = StringField(max_length=500, required=True)
city = StringField(min_length=2, max_length=100, required=True)
state = StringField(choices=STATES)
country = StringField(choices=COUNTRIES)
document = StringField(min_length=5, max_length=50)
gender = StringField(choices=GENDERS)
birthday = DateTimeField(min_length=24, max_length=24)
friends = ListField(StringField(min_length=24, max_length=24))
groups = ListField(StringField(min_length=24, max_length=24))
events = ListField(StringField(min_length=24, max_length=24))
invites = ListField(StringField(min_length=24, max_length=24))
created_at = DateTimeField(default=datetime.datetime.utcnow)
|
[
"brunobortolli@outlook.com"
] |
brunobortolli@outlook.com
|
f75a08f30a2388d8f504e336bfa014b281c6b703
|
7f7bcd5986d410b1b52371a16cc8ddbd5618388a
|
/TradexaApp/Products/urls.py
|
72d14b0379f8c97cf246ed9e153fb6a71d5d54c3
|
[] |
no_license
|
sanjaynandy89/KavachAssignment2
|
c8229a354a724f9af5274a1c2bde588a29ed9e06
|
98cc351aebf761418f95250bd0a3fc4a549464cb
|
refs/heads/master
| 2023-01-05T21:24:23.194167
| 2020-10-20T20:24:28
| 2020-10-20T20:24:28
| 305,824,353
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 195
|
py
|
from django.urls import path
from.import views
urlpatterns = [
path('Product/', views.Product,name='Product'),
path("Product/user/Logout", views.Logout,name='Product/user/Logout'),
]
|
[
"sanjaynandy89@gmail.com"
] |
sanjaynandy89@gmail.com
|
73646403792e9179d36f218cdd166e48d3e30b8f
|
a0cb819ec9218a9ad8bdead700250c1078c1f5ab
|
/migrations/versions/7f5ddb45e9a4_.py
|
7d9d6b3a7abb3987831a04b43f0d505d4e5a1e71
|
[] |
no_license
|
airmnb/airmnb-app
|
0ae89267d7c553f089f1bbd81af486cdf8d65513
|
d0e051efe7d046168113a0aa501122526bb308da
|
refs/heads/master
| 2020-03-08T17:54:05.749987
| 2018-08-11T10:13:38
| 2018-08-11T10:13:38
| 128,280,972
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 681
|
py
|
"""empty message
Revision ID: 7f5ddb45e9a4
Revises: 0e5a1b60304a
Create Date: 2018-06-15 00:06:12.979884
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '7f5ddb45e9a4'
down_revision = '0e5a1b60304a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('wechat_users', sa.Column('wechat_nick_name', sa.TEXT(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('wechat_users', 'wechat_nick_name')
# ### end Alembic commands ###
|
[
"mr.shaojun@gmail.com"
] |
mr.shaojun@gmail.com
|
592e159a1529338cb3dc8a06795ad65f63d356e6
|
a50dfc4b806b7d46944462be91353a2fa212f170
|
/Maintenance/GetUnusedRedirects.py
|
9bfa255eea0ddac584b2eda5a956324a72f6ff83
|
[] |
no_license
|
The-Foilist/HeadCanon
|
05adab98ca5d60a7ca9ae469dc188f75fed0e977
|
8e426743bc6c56b02d89faf3e0e94be4eb7c9de2
|
refs/heads/main
| 2023-05-31T08:13:56.874377
| 2021-06-20T20:23:42
| 2021-06-20T20:23:42
| 345,531,995
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 954
|
py
|
# Get a list of the names of all articles listed on the wiki's Special:UnusedRedirects page
import requests
import lxml
import bs4
base_url = "http://192.168.1.20:1138" # Base URL of the wiki
out_file_path = "" # Where to put the output file
offset = 0 # Start from this page on the list
limit = 500 # Go by groups of this many pages
url = base_url + "/index.php?title=Special:UnusedRedirects&limit="
while 1:
source = requests.get(url + str(limit) + "&offset=" + str(offset)).text
soup = bs4.BeautifulSoup(source, "lxml")
chunk = soup.find_all("a", {"class": "mw-redirect"})
with open(out_file_path + "UnusedRedirects.txt", "a", encoding="utf-8") as f:
for item in chunk:
f.write(item["title"] + "\n")
print(item["title"])
f.close()
if len(chunk) == 0:
break
else:
offset += limit
|
[
"noreply@github.com"
] |
noreply@github.com
|
c0b1c30bf26bb233b30563baccc2d61e29651d9f
|
789289f1b2a05d70be9eb67af36fdb5633abe855
|
/Profiles/Profile_api/admin.py
|
531553c7d4f2853426e438a881834f4aed6a5847
|
[] |
no_license
|
Emmanuel1993-2607/Profile_API-Django
|
1a49d9086de3276ae46b89f325f3550059a5bbed
|
dc538de823f305026df7e67dec2ed4d135f9fd85
|
refs/heads/master
| 2023-04-11T16:24:04.102058
| 2021-05-17T10:30:14
| 2021-05-17T10:30:14
| 368,534,769
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 114
|
py
|
from django.contrib import admin
from Profiles.Profile_api import models
admin.site.register(models.userProfile)
|
[
"emmanuelorozco1993@gmail.com"
] |
emmanuelorozco1993@gmail.com
|
e59a6463fe020653e052c089c27ff4eaa1a2054f
|
8978b34587fab7db3a35f626e9855fa06b796993
|
/EulerPython/18. Maximum path sum I.py
|
a9eb20d5ca3fdb0293f27f06a150c7a98378c1b1
|
[] |
no_license
|
lukekweston/EulerQuestions
|
1a0701034f33c9268c49fa3345560b4e11a949d4
|
e496a5275627864faf663258b8d243d33007a24c
|
refs/heads/main
| 2023-02-19T14:04:02.522319
| 2021-01-18T23:08:46
| 2021-01-18T23:08:46
| 304,795,377
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,797
|
py
|
inputTriangle = """75
95 64
17 47 82
18 35 87 10
20 04 82 47 65
19 01 23 75 03 34
88 02 77 73 07 63 67
99 65 04 28 06 16 70 92
41 41 26 56 83 40 80 70 33
41 48 72 33 47 32 37 16 94 29
53 71 44 65 25 43 91 52 97 51 14
70 11 33 28 77 73 17 78 39 68 17 57
91 71 52 38 17 14 91 43 58 50 27 29 48
63 66 04 68 89 53 67 30 73 16 69 87 40 31
04 62 98 27 23 09 70 98 73 93 38 53 60 04 23"""
def formatTriangle(inputTriangle):
triangle = []
for line in inputTriangle.split("\n"):
lineList = []
for n in line.split(" "):
lineList.append(int(n))
triangle.append(lineList)
return triangle
####recursive brute force method startign from bottom -- slow
#gets all possible combinations going up
def getPath(triangle, row, path):
#at the top of the triangle
if row == 0:
# print("hello", [0] + path)
return [0] + path
else:
#can always only go to 0 on the next row if in 0 position
if path[0] == 0:
return getPath(triangle, row - 1, [0] + path)
#can always only go to last positon if in last position
elif path[0] == len(triangle[row]):
return getPath(triangle, row - 1, [len(triangle[row]) - 1] + path)
#compare numbers above and work out which path will create the greatest sum
else:
if sumPath(triangle, getPath(triangle, row - 1, [path[0]] + path)) > sumPath(triangle, getPath(triangle, row -1, [path[0] - 1] + path)):
return getPath(triangle, row - 1, [path[0]] + path)
else:
return getPath(triangle, row -1, [path[0] - 1] + path)
def sumPath(triangle, path):
sum = 0
for i in range(len(path)):
sum += triangle[i][path[i]]
return sum
triangle = formatTriangle(inputTriangle)
lastRow = triangle[len(triangle) - 1]
sums = []
#
# #get the max values in every 2nd index for the last row (smaller ones dont need to be ever checked)
# for i in range(0, len(lastRow), 2):
# path = []
# if(i != len(lastRow) -1 and lastRow[i] < lastRow[i] + 1):
# path = getPath(triangle, len(triangle) - 2, [i + 1])
# else:
# path = getPath(triangle, len(triangle) - 2, [i])
#
#
# sums.append(sumPath(triangle,path))
#
# print("max path sum: ", max(sums))
# ###
def fastSum(triangle):
trianglesSummed = triangle
for row in range(len(triangle) - 2, -1, -1):
for i in range(len(triangle[row])):
if(trianglesSummed[row + 1][i] > trianglesSummed[row + 1][i + 1]):
trianglesSummed[row][i] = trianglesSummed[row + 1][i] + triangle[row][i]
else:
trianglesSummed[row][i] = trianglesSummed[row + 1][i + 1] + triangle[row][i]
return trianglesSummed[0]
print(fastSum(formatTriangle(inputTriangle)))
|
[
"luke@weston.net.nz"
] |
luke@weston.net.nz
|
eae92e47c1d4a062e8b1bbd1f5491ed7c0450eb6
|
64ef180b1725d831891ef075557ddcc540c6e42a
|
/init.py
|
f947e2f7251a20b7e823621b1950cb622574ebbc
|
[
"MIT"
] |
permissive
|
nasingfaund/tkinter-gui-application-examples
|
125acc088133020adae0bfc9c752a8e75c780c73
|
7073d163713829b2ff10a331c7f88f845d89b1bc
|
refs/heads/master
| 2023-07-15T06:41:35.861732
| 2021-08-28T14:36:28
| 2021-08-28T14:36:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,703
|
py
|
#!/usr/bin/env python3
# -*- coding:utf-8-*-
import tkinter.messagebox
from tkinter import Button, Label, Tk
from lib.functions import set_window_center
from lib.sqlite_helper import DBHelper
from main import App
class InitWindow(Tk):
"""初始化窗口"""
def __init__(self):
Tk.__init__(self)
self.title("初始化数据")
set_window_center(self, 300, 180)
self.resizable(False, False)
self.win_success = None # 初始化成功的提示窗口
self.init_page()
def init_page(self):
"""加载控件"""
btn_1 = Button(self, text="初始化数据库", command=self.do_init_db)
btn_1.pack(expand="yes", padx=10, pady=10, ipadx=5, ipady=5)
def do_init_db(self):
"""初始化"""
db_helper = DBHelper()
db_helper.reset_database()
db_helper.create_database()
try:
tmp = db_helper.insert_user("admin", "admin") # 默认用户
tmp2 = db_helper.insert_content_by_username(
"admin",
"Hello World !",
"源码仓库地址:https://github.com/doudoudzj/tkinter-app",
"github",
)
tmp3 = db_helper.get_content_by_username("admin")
print("添加用户admin:", tmp)
print("添加内容:", tmp2)
print("查询内容:", tmp3)
self.do_success()
self.destroy()
except KeyError:
print(KeyError)
self.do_failed()
def do_failed(self):
"""是否重试"""
res = tkinter.messagebox.askretrycancel('提示', '初始化失败,是否重试?', parent=self)
if res is True:
self.do_init_db()
elif res is False:
self.destroy()
def do_success(self):
"""初始化成功弹窗"""
self.win_success = Tk()
self.win_success.title("初始化成功")
set_window_center(self.win_success, 250, 150)
self.win_success.resizable(False, False)
msg = Label(self.win_success, text="初始化成功")
msg.pack(expand="yes", fill="both")
btn = Button(self.win_success, text="确定", command=self.quit)
btn.pack(side="right", padx=10, pady=10, ipadx=5, ipady=5)
btn_open_app = Button(self.win_success, text="启动程序", command=self.open_app)
btn_open_app.pack(side="right", padx=10, pady=10, ipadx=5, ipady=5)
def open_app(self):
"""打开应用程序"""
self.quit()
self.win_success.destroy()
self.win_success.quit()
App()
if __name__ == "__main__":
APP_INIT = InitWindow()
APP_INIT.mainloop()
|
[
"doudoudzj@sina.com"
] |
doudoudzj@sina.com
|
c3fdb042b0b268cd9bc6c223f5973f5b41afa0b2
|
a0ea73e7869f9e3a01377310fe684244375a529f
|
/WTDscript_9_5_2014/classes/classes.py
|
fb3fa10d2af5a37bde1b91995e38202888aa9f57
|
[] |
no_license
|
vnvnp/WTD
|
ed9730e51fef2fc12aba60cc1a22abd211546f3a
|
d238f8b7523706e81486edff3bdc6e9b2f1ccfc8
|
refs/heads/master
| 2020-05-20T05:58:28.120790
| 2014-09-07T04:06:40
| 2014-09-07T04:06:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 16,095
|
py
|
"""
Class module which contains all ambiguous classes
imports:
Class Element:
specifically this class is intended to stroe the raw organized pull
from the excel sheets and is the object for each row in the excel spreadsheet.
Class Dprep:
this class handles all data preparation with specific functions
"""
class Plotter(object):
"""The plotter object instantiates a plotting
instance where by the user can manage all figures
which they wish to plot. It loads the ALL data
object which houses contents of all of the data
from the WTD"""
def __init__(self, ALL):
self.ALL = ALL
def makepdf(self,name):
"""
this creates a pdf file with the name passed to the make fig function.
Then, it creates a figure on the pdf file. plot properties are specified
by the pltpr function. C and C line plot is generated and plotted. then
the figure and pdf files are saved and closed. The pdf files are treated
as object which are generated and then operated on.
"""
# prepare imports
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
self.pdf_pages = PdfPages(name)
# set up plot properties
self.plt = plt
# save and close the figure
def genfig(self,title):
self.fig = self.plt.figure(figsize=(6.875, 6), dpi=100)
self.plt.title(title)
self.plt.grid()
def svfig(self):
self.pdf_pages.savefig(self.fig)
def clpdf(self):
self.pdf_pages.close()
def pltCandC(self,c1,c2,color,mklabel,label2use):
# initialize C & C lines
x,y = self.CandC(c1, c2)
# plotting action
self.plt.grid()
self.plt.xlabel('At')
self.plt.ylabel('Cp')
self.plt.xscale('log')
if mklabel == True:
self.plt.plot(x,y,color=color,label = label2use)
else:
self.plt.plot(x,y,color=color)
def CandC(self,c1,c2):
x,y = self.AISC_CC(c1,c2)
return x,y
def mklegend(self,*args):
self.plt.grid()
self.plt.xlabel('At')
self.plt.ylabel('Cp')
self.plt.xscale('log')
self.plt.legend(*args,loc=0,prop={'size':6})
def lgf(self,x1,x2,y1,y2,x):
import math
b = y2 - (math.log10(x2)/(math.log10(x2)-math.log10(x1)))*(y2-y1)
y = ((y2-y1)/(math.log10(x2)-math.log10(x1)))*math.log10(x) + b
return y
def AISC_CC(self,c1,c2):
import math
x1 = range(1,11)
y1 = [c1]*len(x1)
x2 = range(11,101)
y2 = [self.lgf(10., 100., c1, c2, e) for e in x2]
x3 = range(101,301)
y3 = [c2]*len(x3)
x = x1 + x2 + x3
y = y1 + y2 + y3
return x,y
def WTD(self,x,y,color,mklabel,label2use):
self.plt.grid()
self.plt.xlabel('At')
self.plt.ylabel('Cp')
self.plt.xscale('log')
if mklabel == True:
self.plt.plot(x,y,'x', color=color, markersize=2., label = label2use)
else:
self.plt.plot(x,y,'x', color=color, markersize=2.)
def WTDfit(self,x,y,color,mklabel,label2use):
import numpy as np
# fit with np.polyfit
m1, m2, b = np.polyfit(x, y, 2)
x1=np.linspace(10,270,1000)
y = [m1*e**2 + m2*e + b for e in x1]
xy=sorted(zip(x1,y))
x = []
y = []
for x1,y1 in xy:
x.append(x1)
y.append(y1)
self.plt.grid()
self.plt.xlabel('At')
self.plt.ylabel('Cp')
self.plt.xscale('log')
if mklabel == True:
self.plt.plot(x,y,'-',color=color,label = label2use)
else:
self.plt.plot(x,y,'-',color=color)
class ElementSP(object):
"""This class maintains the definition
of an element object. Within it,
there are properties which define
its context in the scope of the wind tunnel
data proccessing procedure."""
def __init__(self, WD, PN, GCp, mxmn, zone, AT, alpha, On, RC):
self.WD = WD
self.PN = PN
self.GCp = GCp
self.mxmn = mxmn
self.zone = zone
self.AT = AT
self.alpha = alpha
self.On = On
self.RC = RC
class ElementATP(object):
"""This class maintains the definition
of an element object. Within it,
there are properties which define
its context in the scope of the wind tunnel
data proccessing procedure."""
def __init__(self, ATG, WD, mxmn, run, RC, alpha, On, AT, GCp):
self.ATG = ATG
self.WD = WD
self.mxmn = mxmn
self.run = run
self.RC = RC
self.alpha = alpha
self.On = On
self.AT = AT
self.GCp = GCp
class Panel(object):
'''
represents a distint panel object
whose properties depend on the wind
directionality, location, and the offset
from the roof ridge
'''
def __init__(self,location,WD,On):
self.WD = WD
self.On = On
self.location = location
A = 18.33
if WD ==0:
# zone 1
if location==2:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==3:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==4:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==5:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==7:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==8:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==9:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==10:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==12:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==13:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==14:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==15:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
#zone 2
if On==6:
if location ==1:
self.w1 = 0.5*5.5/A
self.w2 = 2.5*5.5/A
self.w3 = 0.
if location ==6:
self.w1 = 0.5*5.5/A
self.w2 = 2.5*5.5/A
self.w3 = 0.
if location ==11:
self.w1 = 0.5*5.5/A
self.w2 = 2.5*5.5/A
self.w3 = 0.
if location ==16:
self.w1 = 0.
self.w2 = 0.5*5.5/A
self.w3 = 2.5*5.5/A
if On==21:
if location ==1:
self.w1 = 1.75*5.5/A
self.w2 = 1.25*5.5/A
self.w3 = 0.
if location ==6:
self.w1 = 1.75*5.5/A
self.w2 = 1.25*5.5/A
self.w3 = 0.
if location ==11:
self.w1 = 1.75*5.5/A
self.w2 = 1.25*5.5/A
self.w3 = 0.
if location ==16:
self.w1 = 0.
self.w2 = 1.75*5.5/A
self.w3 = 1.25*5.5/A
if On==36:
if location ==1:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location ==6:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location ==11:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location ==16:
self.w1 = 0.
self.w2 = 1.
self.w3 = 0.
if location == 17:
self.w1 = 0.
self.w2 = 1.
self.w3 = 0.
if location == 18:
self.w1 = 0.
self.w2 = 1.
self.w3 = 0.
if location == 19:
self.w1 = 0.
self.w2 = 1.
self.w3 = 0.
if location == 20:
self.w1 = 0.
self.w2 = 1.
self.w3 = 0.
if WD ==1:
# zone 1
if location==17:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==18:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==19:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==20:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==7:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==8:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==9:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==10:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==12:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==13:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==14:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location==15:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
#zone 2
if On==6:
if location ==16:
self.w1 = 0.5*5.5/A
self.w2 = 2.5*5.5/A
self.w3 = 0.
if location ==6:
self.w1 = 0.5*5.5/A
self.w2 = 2.5*5.5/A
self.w3 = 0.
if location ==11:
self.w1 = 0.5*5.5/A
self.w2 = 2.5*5.5/A
self.w3 = 0.
if location ==1:
self.w1 = 0.
self.w2 = 0.5*5.5/A
self.w3 = 2.5*5.5/A
if On==21:
if location ==16:
self.w1 = 1.75*5.5/A
self.w2 = 1.25*5.5/A
self.w3 = 0.
if location ==6:
self.w1 = 1.75*5.5/A
self.w2 = 1.25*5.5/A
self.w3 = 0.
if location ==11:
self.w1 = 1.75*5.5/A
self.w2 = 1.25*5.5/A
self.w3 = 0.
if location ==1:
self.w1 = 0.
self.w2 = 1.75*5.5/A
self.w3 = 1.25*5.5/A
if On==36:
if location ==16:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location ==6:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location ==11:
self.w1 = 1.
self.w2 = 0.
self.w3 = 0.
if location ==1:
self.w1 = 0.
self.w2 = 1.
self.w3 = 0.
if location == 2:
self.w1 = 0.
self.w2 = 1.
self.w3 = 0.
if location == 3:
self.w1 = 0.
self.w2 = 1.
self.w3 = 0.
if location == 4:
self.w1 = 0.
self.w2 = 1.
self.w3 = 0.
if location == 5:
self.w1 = 0.
self.w2 = 1.
self.w3 = 0.
class Generate_Array(object):
"""
This takes the panel object and makes
an array that is representative of a
certain config with a specific WD and
a specific On. each panel has a w1,
w2 and w3 which can be used to build
the ATG w1, w2, w3
"""
def __init__(self,WDn,On):
self.array = {}
locn = range(1,21)
for x1 in locn:
self.array[x1] = Panel(x1, WDn, On)
class ATGdict(object):
"""
provides a dictionary of the gemoetric
areas and thier associated panel numbers
"""
def __init__(self):
self.ATGd = {}
self.ATGd[1] = [1,2,3,4,5]
self.ATGd[2] = [6,7,8,9,10]
self.ATGd[3] = [11,12,13,14,15]
self.ATGd[4] = [16,17,18,19,20]
self.ATGd[5] = [6,7,8,9,10,11,12,13,14,15,16,17,18,19,20]
self.ATGd[6] = [11,12,13,14,15,16,17,18,19,20]
self.ATGd[7] = [13,14,15,18,19,20]
self.ATGd[8] = [11,12,13,16,17,18]
self.ATGd[9] = [1,2,3,4,16,17,18,19]
self.ATGd[10] = [6,7,8,9,11,12,13,14]
class Findweights(object):
"""
This builds the arrays for each different
WD and On when initialized. Then, it
calculates the weights of each ATG and
outputs that.
"""
def __init__(self,ele):
# has w1,w2,w3 values for each individual panel for each of the WD and O6 possibilities
self.ele = ele
self.Array_0_6 =Generate_Array(0,6)
self.Array_0_21=Generate_Array(0,21)
self.Array_0_36=Generate_Array(0,36)
self.Array_1_6 =Generate_Array(1,6)
self.Array_1_21=Generate_Array(1,21)
self.Array_1_36=Generate_Array(1,36)
# contains my ATG panel numberings for ATGn 1 to 10
self.myATG=ATGdict()
def calcweights(self):
"""
This produces the w1n, w2n, and w3n.
WHich are the weights associated with each ATGn
"""
for x1 in range(len(self.ele)):
o = self.ele[x1]
w1n, w2n, w3n=self.matcher(o.ATGn, o.WD, o.On)
o.w1n = w1n
o.w2n = w2n
o.w3n = w3n
return self.ele
def matcher(self, ATGn, WD, On):
"""
This is housed inside of the calcweights
and is the actual decision mechanism handeling
the weighting of each panel object.
"""
if WD==0.:
if On == 6.:
w1c = []
w2c = []
w3c = []
w1 = 0.
w2 = 0.
w3 = 0.
for ATGi in self.myATG.ATGd[ATGn]:
for panel_num in range(1,21):
if ATGi==panel_num:
w1 = self.Array_0_6.array[panel_num].w1
w2 = self.Array_0_6.array[panel_num].w2
w3 = self.Array_0_6.array[panel_num].w3
w1c.append(w1)
w2c.append(w2)
w3c.append(w3)
w1k = sum(w1c)
w2k = sum(w2c)
w3k = sum(w3c)
ws = w1k + w2k + w3k
w1n = w1k/ws
w2n = w2k/ws
w3n = w3k/ws
if On == 21.:
w1c = []
w2c = []
w3c = []
w1 = 0.
w2 = 0.
w3 = 0.
for ATGi in self.myATG.ATGd[ATGn]:
for panel_num in range(1,21):
if ATGi==panel_num:
w1 = self.Array_0_21.array[panel_num].w1
w2 = self.Array_0_21.array[panel_num].w2
w3 = self.Array_0_21.array[panel_num].w3
w1c.append(w1)
w2c.append(w2)
w3c.append(w3)
w1k = sum(w1c)
w2k = sum(w2c)
w3k = sum(w3c)
ws = w1k + w2k + w3k
w1n = w1k/ws
w2n = w2k/ws
w3n = w3k/ws
if On == 36.:
w1c = []
w2c = []
w3c = []
w1 = 0.
w2 = 0.
w3 = 0.
for ATGi in self.myATG.ATGd[ATGn]:
for panel_num in range(1,21):
if ATGi==panel_num:
w1 = self.Array_0_36.array[panel_num].w1
w2 = self.Array_0_36.array[panel_num].w2
w3 = self.Array_0_36.array[panel_num].w3
w1c.append(w1)
w2c.append(w2)
w3c.append(w3)
w1k = sum(w1c)
w2k = sum(w2c)
w3k = sum(w3c)
ws = w1k + w2k + w3k
w1n = w1k/ws
w2n = w2k/ws
w3n = w3k/ws
if WD==1.:
if On == 6.:
w1c = []
w2c = []
w3c = []
w1 = 0.
w2 = 0.
w3 = 0.
for ATGi in self.myATG.ATGd[ATGn]:
for panel_num in range(1,21):
if ATGi==panel_num:
w1 = self.Array_1_6.array[panel_num].w1
w2 = self.Array_1_6.array[panel_num].w2
w3 = self.Array_1_6.array[panel_num].w3
w1c.append(w1)
w2c.append(w2)
w3c.append(w3)
w1k = sum(w1c)
w2k = sum(w2c)
w3k = sum(w3c)
ws = w1k + w2k + w3k
w1n = w1k/ws
w2n = w2k/ws
w3n = w3k/ws
if On == 21.:
w1c = []
w2c = []
w3c = []
w1 = 0.
w2 = 0.
w3 = 0.
for ATGi in self.myATG.ATGd[ATGn]:
for panel_num in range(1,21):
if ATGi==panel_num:
w1 = self.Array_1_21.array[panel_num].w1
w2 = self.Array_1_21.array[panel_num].w2
w3 = self.Array_1_21.array[panel_num].w3
w1c.append(w1)
w2c.append(w2)
w3c.append(w3)
w1k = sum(w1c)
w2k = sum(w2c)
w3k = sum(w3c)
ws = w1k + w2k + w3k
w1n = w1k/ws
w2n = w2k/ws
w3n = w3k/ws
if On == 36.:
w1c = []
w2c = []
w3c = []
w1 = 0.
w2 = 0.
w3 = 0.
for ATGi in self.myATG.ATGd[ATGn]:
for panel_num in range(1,21):
if ATGi==panel_num:
w1 = self.Array_1_36.array[panel_num].w1
w2 = self.Array_1_36.array[panel_num].w2
w3 = self.Array_1_36.array[panel_num].w3
w1c.append(w1)
w2c.append(w2)
w3c.append(w3)
w1k = sum(w1c)
w2k = sum(w2c)
w3k = sum(w3c)
ws = w1k + w2k + w3k
w1n = w1k/ws
w2n = w2k/ws
w3n = w3k/ws
return w1n, w2n, w3n
class Dprep(object):
'''
prep_APT
add ATGn and float everything that should be
floated except for the alpha
'''
def prep_APT1(self, obj_ele_dict):
for x1 in range(len(obj_ele_dict)):
o=obj_ele_dict[x1]
if o.AT == 90:
if o.ATG == 1:
o.ATGn = 1
if o.ATG == 2:
o.ATGn = 2
if o.ATG == 3:
o.ATGn = 3
if o.ATG == 4:
o.ATGn = 4
if o.AT == 108:
if o.ATG == 1:
o.ATGn = 5
if o.ATG == 2:
o.ATGn = 6
if o.AT == 144:
if o.ATG == 1:
o.ATGn = 7
if o.ATG == 2:
o.ATGn = 8
if o.AT == 180:
o.ATGn = 9
if o.AT == 270:
o.ATGn = 10
return obj_ele_dict
def prep_SPT1(self, obj_ele_dict):
# use the weight finder class to obtain the array panel configs
weightfinder=Findweights(obj_ele_dict)
for x1 in range(len(obj_ele_dict)):
o=obj_ele_dict[x1]
if o.WD==0.0:
if o.On==6.0:
array=weightfinder.Array_0_6
PN = o.PN
o.w1n = array.array[PN].w1
o.w2n = array.array[PN].w2
o.w3n = array.array[PN].w3
if o.On==21.0:
array=weightfinder.Array_0_21
PN = o.PN
o.w1n = array.array[PN].w1
o.w2n = array.array[PN].w2
o.w3n = array.array[PN].w3
if o.On==36.0:
array=weightfinder.Array_0_36
PN = o.PN
o.w1n = array.array[PN].w1
o.w2n = array.array[PN].w2
o.w3n = array.array[PN].w3
if o.WD==1.0:
if o.On==6.0:
array=weightfinder.Array_1_6
PN = o.PN
o.w1n = array.array[PN].w1
o.w2n = array.array[PN].w2
o.w3n = array.array[PN].w3
if o.On==21.0:
array=weightfinder.Array_1_21
PN = o.PN
o.w1n = array.array[PN].w1
o.w2n = array.array[PN].w2
o.w3n = array.array[PN].w3
if o.On==36.0:
array=weightfinder.Array_1_36
PN = o.PN
o.w1n = array.array[PN].w1
o.w2n = array.array[PN].w2
o.w3n = array.array[PN].w3
return obj_ele_dict
|
[
"evan.prado@gatech.edu"
] |
evan.prado@gatech.edu
|
7adfe3b1ad0c43bbbce9b77cbeffd6b20f756191
|
cc4b21b9672f17b4303953baabea2a053ee6fd42
|
/tests/test_auth.py
|
839b19ff03896b402a36e63fd04fc5f011ae930f
|
[] |
no_license
|
simiyu1/Hello-books-v2
|
78aebcf0872e4bdbc930790f710df73c22e39ff2
|
9eef531efd10867fca4e19a704eb1c795e7684af
|
refs/heads/master
| 2020-03-21T06:34:50.938105
| 2018-09-07T10:37:11
| 2018-09-07T10:37:11
| 138,229,482
| 0
| 0
| null | 2018-09-07T10:37:12
| 2018-06-21T22:50:32
|
Python
|
UTF-8
|
Python
| false
| false
| 4,719
|
py
|
import json
import unittest
from tests.helper_tests import InitTests
class UserTests(unittest.TestCase):
def setUp(self):
InitTests.testSetUp(self)
def tearDown(self):
InitTests.testTearDown(self)
def test_can_create_user(self):
self.user = {"email": "juma@ymail.com", "username": "Juma", "password": "pass123"}
resp = self.client.post(self.BASE_URL + 'register', data=json.dumps(
self.user), content_type='application/json')
self.assertEqual(resp.status_code, 201,
msg="Successfully registered")
def test_can_create_user_fail_username_exists(self):
self.user = {"email": "mbiy@gmail", "username": "Mercy Mbiya", 'password': 'pass123'}
resp = self.client.post(self.BASE_URL + 'register', data=json.dumps(
self.user), content_type='application/json')
self.assertEqual(resp.status_code, 409,
msg="Failed, Username or email already exists, Please sign In")
def test_can_login_user_pass(self):
self.successuser = {"username": "Mercy Mbiya", "password": "pass123", "email": "mbiya@gmail.com"}
responce = self.client.post(self.BASE_URL + 'login', data=json.dumps(
self.successuser), content_type='application/json')
self.assertEqual(responce.status_code, 200,
msg="Successfully logged In")
def test_can_login_user_fails(self):
self.successuser = {"username": "Mercy Mbiya", "password": "Badpass123", "email": "mbiya@gmail.com"}
respo = self.client.post(self.BASE_URL + 'login', data=json.dumps(
self.successuser), content_type='application/json')
self.assertTrue(respo.status_code, 401)
def test_can_logout_user(self):
resp = self.client.post(self.BASE_URL + 'logout', content_type='application/json', headers={'access-token': self.tokenuser})
self.assertEqual(resp.status_code, 200,
msg="Successful you are logged out")
def test_can_logout_user_fail(self):
resp = self.client.post(self.BASE_URL + 'logout', content_type='application/json')
self.assertEqual(resp.status_code, 200,
msg="user unknown")
def test_can_reset_password(self):
self.resetdata = {"username": "Mercy Mbiya", 'password': 'pass123', 'new_password': 'pass456',
'confirm_new_password': 'pass456'}
resp = self.client.post(self.BASE_URL + 'reset', data=json.dumps(
self.resetdata), content_type='application/json')
self.assertEqual(resp.status_code, 200,
msg="Reset success")
def test_can_reset_password_fail(self):
self.resetdata = {"username": "Mercy Mbiya", 'password': 'pass123', 'new_password': 'canadian123',
'confirm_new_password': 'can123'}
resp = self.client.post(self.BASE_URL + 'reset', data=json.dumps(
self.resetdata), content_type='application/json', headers={'access-token': self.tokenuser})
self.assertEqual(resp.status_code, 200,
msg="New Passwords do not match")
def test_can_reset_fields_empty(self):
self.resetdata = {'username': 'Miguna'}
resp = self.client.post(self.BASE_URL + 'reset', data=json.dumps(
self.resetdata), content_type='application/json', headers={'access-token': self.tokenuser})
self.assertEqual(resp.status_code, 400,
msg="Make sure to fill all required fields")
def test_book_not_found(self):
self.book_data = "11"
resp = self.client.post('/api/v1/users/books/' + self.book_data,
content_type='application/json',
headers={'access-token': self.tokenuser})
self.assertEqual(resp.status_code, 404, msg='Book not found')
def test_user_can_borrow_a_book_not_logged_in(self):
data = {"userid": 2}
self.book_data = "4"
# send the data
resp = self.client.post('/api/v1/users/books/' + self.book_data,
content_type='application/json', headers={'Authorization': 'Bearer '})
self.assertEqual(resp.status_code, 401, msg='Token is missing, login to get token')
def test_book_not_exist(self):
self.book_data = "18"
# send the data
resp = self.client.post('/api/v1/users/books/' + self.book_data,
content_type='application/json', headers={'access-token': self.tokenuser})
self.assertEqual(resp.status_code, 404, msg='Item not found')
if __name__ == '__main__':
unittest.main()
|
[
"nabendesimiyu@gmail.com"
] |
nabendesimiyu@gmail.com
|
c3b56d6e2f22bd6e142a34891ddff6eb035cb135
|
29860a04902d53ace9b84b959b59f12b0a09b298
|
/split_data.py
|
7a070a155f795db5661ab611377827951aa56b74
|
[] |
no_license
|
lidaboo/qtim_ROP
|
a3e0f688dde87e0e02b61aa07e0ac51aad48e382
|
26975c474d3d6c0b4a82ccefd531622140f24e1d
|
refs/heads/master
| 2021-01-25T04:37:24.838178
| 2017-06-05T21:02:13
| 2017-06-05T21:02:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,145
|
py
|
from os.path import basename, join
import numpy as np
from shutil import copy
from common import get_subdirs, find_images, make_sub_dir
def split_data(in_dir, out_dir, n=5):
# Count images in subdirs and verify equal amounts
sub_dirs = get_subdirs(in_dir)
image_lists = {basename(sub_dir): find_images(sub_dir, extensions=['*.gif']) for sub_dir in sub_dirs}
no_imgs = np.asarray([len(x) for x in image_lists.values()])
try:
assert(np.array_equal(no_imgs, no_imgs))
except AssertionError:
print "Number of images in directories '{}' must be equal".format(image_lists.keys())
split_size = int(round(no_imgs[0] / float(n)))
for i in range(0, n):
# Create split directory
split_dir = make_sub_dir(out_dir, 'split_{}'.format(i))
for dir_name, image_list in image_lists.items():
sub_list = image_list[i * split_size:(i * split_size) + split_size]
img_dir = make_sub_dir(split_dir, dir_name)
for img in sub_list:
copy(img, img_dir)
if __name__ == '__main__':
import sys
split_data(sys.argv[1], sys.argv[2])
|
[
"jbrown97@mgh.harvard.edu"
] |
jbrown97@mgh.harvard.edu
|
9e3792043fd56915f96a427288025faa54a6a339
|
8b7e9d06fca9d0999eabe7f6906db0e6f1f81d4c
|
/tourney/tournament/management/commands/players.py
|
f7ab85f516eaad2f0ee9ada587ed17b09dacad9e
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] |
permissive
|
jonejone/tourney
|
9efa442e9c27660a7a4544b008e066592011b194
|
1372c635c873b6dc6c085a2bfdb02f6528ef25c3
|
refs/heads/master
| 2021-01-17T11:37:13.876732
| 2015-06-17T13:51:47
| 2015-06-17T13:51:47
| 7,472,571
| 0
| 0
| null | 2013-04-19T10:17:36
| 2013-01-06T20:49:36
|
Python
|
UTF-8
|
Python
| false
| false
| 1,945
|
py
|
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
from tourney.tournament.models import Player, Tournament
class Command(BaseCommand):
args = ''
help = 'Lists all players with their options'
option_list = BaseCommand.option_list + (
make_option('--with-options-only',
action='store_true',
dest='options-only',
default=False,
help='Only show players that has chosen any options.'),
)
def validate_tournament(self, *args, **options):
# First we must validate tournament
try:
t_slug = args[0]
tournament = Tournament.objects.get(slug=t_slug)
self.tournament = tournament
except IndexError:
raise CommandError('Please enter a tournament slug')
except Tournament.DoesNotExist:
raise CommandError('Tournament slug not found')
def handle(self, *args, **options):
self.validate_tournament(*args, **options)
if options['options-only']:
players = []
for p in self.tournament.tournamentplayer_set.all():
if p.options.count() > 0:
players.append(p)
else:
players = self.tournament.tournamentplayer_set.all()
for player in players:
opts = [opt.name for opt in player.options.all()]
if player.options.count() == 0:
output = '%(player_name)s - %(total_price)s'
else:
output = '%(player_name)s - %(total_price)s - %(options)s'
output_data = {
'player_name': player.player.name,
'options': ', '.join(opts),
'total_price': '%i %s' % (
player.get_player_price(),
self.tournament.currency),
}
self.stdout.write(output % output_data)
|
[
"jone@idev.no"
] |
jone@idev.no
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.