text
stringlengths 8
6.05M
|
|---|
#!/usr/bin/python3
import string
class Node():
def __init__(self, value, dataCount = None, fileName = None):
self.data = value
self.dataFileName = fileName
self.next = None
self.dataCount = dataCount
#Function points current node to another node
def addNode(self, node2):
self.next = node2
#Getter function for next
def getNext(self):
return self.next
#Getter function for data
def getData(self):
return self.data
#Getter function for dataFileName
def getDataFileName(self):
return self.dataFileName
class PostingsList():
def __init__(self):
self.numNodes = 0
self.indivNodeLengths = []
self.nodes = []
#Function adds a string to the postings list as a branch
def addTokenToList(self, tokenTouple):
#Creating linked list head
#Text added to Node.data contains punctuation, newline characters, spaces
#Allows text, in its entirety, to be displayed to user if matched
nodeHead = Node(tokenTouple[0], None, tokenTouple[1])
nodeTail = nodeHead
#Postings have newline characters, spaces, punctuation removed
#Replacing newline characters with spaces
tokenString = tokenTouple[0]
if '\n' in tokenString:
tokenString = tokenString.replace('\n', ' ')
#Removing punctuation from string
#Lowercasing string
tokenString = tokenString.translate(str.maketrans('', '', string.punctuation)).lower()
wordList = tokenString.split(' ')
#Not adding spaces or empty strings as postings
wordCountDict = {}
for word in wordList:
if (word == ' ') or (word == ''):
continue
cleaned = word.strip()
#Though this is a boolean retrieval matrix
#incrementing counter allows for 'ordering results' functionality to be added later
if cleaned not in wordCountDict:
wordCountDict[cleaned] = 1
else:
wordCountDict[cleaned] += 1
nodeCount = 0
#Sorting postings, allows for more efficent search when looking for a search term
#Adding each posting to head node in linked list
for word in sorted(wordCountDict.keys()):
tempNode = Node(word, wordCountDict[word])
nodeTail.addNode(tempNode)
nodeTail = tempNode
nodeCount += 1
self.nodes.append(nodeHead)
#Storing some metadata on each branch, allows for further functionality in the future
self.numNodes += 1
self.indivNodeLengths.append(nodeCount)
#Function builds postings list from list of tuples
#Tuples in format: [([Text1,...,TextN], file1),...,([Text1,...,TextN], fileN)]
def buildPostingsList(self, fileTokenTouples):
for stringList, fileVal in fileTokenTouples:
for stringVal in stringList:
#Every string has a file associated with it when passed to branch building function
self.addTokenToList((stringVal, fileVal))
#Function searches postings list for an individual search term, ie: 'cat'
#If inverseFlag=False, find all node heads whose postings contain 'cat'
#If inverseFlag=True, find all node heads whose postings do not contain 'cat'
def searchAllBranches(self, term, inverseFlag = False):
headNodesContainingTerm = []
#All search is done on lowercase search terms
term = term.lower()
#Iterating over postings list
for nodeHead in self.nodes:
avoid = False
traversalNode = nodeHead
#Iterating over branches
while traversalNode.getNext() != None:
if traversalNode.getData() == term:
#Inverse flag is disabled, if search term is found, include node data and file name info
if inverseFlag == False:
headNodesContainingTerm.append((nodeHead.getData(), nodeHead.getDataFileName()))
else:
avoid = True
break
else:
traversalNode = traversalNode.getNext()
#Inverse flag is true, end of branch was reached, include node data and file name info
if (inverseFlag == True) & (avoid == False):
headNodesContainingTerm.append((nodeHead.getData(), nodeHead.getDataFileName()))
return headNodesContainingTerm
#Function invertes list of node head data, finds all node heads that are not in passed-in list
def inverseAllBranches(self, branches):
invertedBranches = []
#Iterating over postings list
for nodeHead in self.nodes:
currentNode = (nodeHead.getData(), nodeHead.getDataFileName())
#Current node is not a node in passed in list, include this node
if currentNode not in branches:
invertedBranches.append(currentNode)
return invertedBranches
#Function applies 'AND' operator to search term or list of node head data
def AND(self, stack1, stack2):
#If stack1 is a string, ie: 'cat', get node head result list for that term
if isinstance(stack1, list) == False:
stack1 = self.searchAllBranches(stack1)
#If stack2 is a string, ie: 'cat', get node head result list for that term
if isinstance(stack2, list) == False:
stack2 = self.searchAllBranches(stack2)
#stack1 and stack2 might already be lists
#Returns elements found in both stack1 and stack2
return [match for match in stack1 if match in stack2]
#Function applies 'OR' operator to search term or list of node head data
def OR(self, stack1, stack2):
#If stack1 is a string, ie: 'cat', get node head result list for that term
if isinstance(stack1, list) == False:
stack1 = self.searchAllBranches(stack1)
#If stack2 is a string, ie: 'cat', get node head result list for that term
if isinstance(stack2, list) == False:
stack2 = self.searchAllBranches(stack2)
#stack1 and stack2 might already be lists
#Simply add lists and remove duplicate strings
return list(set(stack1 + stack2))
#Function applies 'NOT' operator to search term or list of node head data
def NOT(self, stack1):
#If stack1 is a string, ie: 'cat'
#Search postings list for individual term, but 'True' flag inverts results
if isinstance(stack1, list) == False:
return self.searchAllBranches(stack1, True)
#If stack1 is a list of node head data, find all other node heads not in this list
else:
return self.inverseAllBranches(stack1)
#Function searches for individual search term in postings list, ie: 'cat'
def searchIndivTerm(self, term):
return self.searchAllBranches(term)
#Function uses prefix list of boolean operations and search terms to search postings list
#Returns tuple in format:
#({0 or 1, 0 if search is invalid, 1 if search is valid}, [strings of matching head nodes in postings list])
def searchPostingList(self, parsedQuery):
operatorList = ['not', 'and', 'or']
operandStack = []
operatorStack = []
#If prefix list is a single search time, ie: ['cat']
if (len(parsedQuery) == 1) & (parsedQuery[0] not in operatorList):
return (1, self.searchIndivTerm(parsedQuery[0]))
while True:
if len(parsedQuery) == 0:
#When prefix list is empty, operator stack has to be 0, operand stack has to be 1
if (len(operatorStack) != 0) & (len(operandStack) != 1):
return (0, None)
else:
return (1, operandStack.pop())
#If prefix element is operand, add to operand stack
if parsedQuery[-1] not in operatorList:
operandStack.append(parsedQuery.pop())
#Prefix element is operator
else:
#Adding to operator stack
operatorStack.append(parsedQuery.pop())
#'NOT' operator operates on single operand
#'OR' & 'AND' operators operate on two operands
if operatorStack[-1] != 'not':
#At least two operands
if len(operandStack) >= 2:
operator = operatorStack.pop()
if operator == 'and':
result = self.AND(operandStack.pop(), operandStack.pop())
elif operator == 'or':
result = self.OR(operandStack.pop(), operandStack.pop())
#Re-adding result to operand stack
operandStack.append(result)
#At least one operand
elif len(operandStack) >= 1:
operatorStack.pop()
result = self.NOT(operandStack.pop())
#Re-adding result to operand stack
operandStack.append(result)
#Function is used for debugging, iterates over postings list
#At each iteration, prints node head data and iterates over each branch, printing postings
def printBranches(self):
for count, node in enumerate(self.nodes):
#Printing node head data
print('ON HEAD NODE:', node.getData(), '\nFROM FILE:', node.getDataFileName())
#Traversing branches
traversalNode = node.getNext()
print('Branch Data,', str(self.indivNodeLengths[count]), 'in total:')
#Printing postings
while traversalNode != None:
print(traversalNode.getData())
traversalNode = traversalNode.getNext()
print('')
#Function prints results of boolean search query for the user
def printResults(self, resultList):
print('\n-----------------------------------------------')
#Different versions to be gramatically correct
if len(resultList) > 1:
print('FOUND '+str(len(resultList))+' TOTAL RESULTS\n')
elif len(resultList) == 1:
print('FOUND '+str(len(resultList))+' TOTAL RESULT\n')
else:
print('FOUND '+str(len(resultList))+' TOTAL RESULTS')
if len(resultList) != 0:
for count, result in enumerate(resultList):
#Printing text
print('RESULT '+str(count+1)+':\n'+result[0])
#Printing file text was found in
if count+1 != len(resultList):
print('IN FILE:\n'+result[1]+'\n')
#Last element lacks newline character for appearance
else:
print('IN FILE:\n'+result[1])
print('-----------------------------------------------\n')
|
#!/usr/bin/env python
#import modules
try:
#delays
import time
#threads
import threading
except RuntimeError:
print "Error importing modules\n"
print "Try using 'sudo' to run this script\n"
#------------------------------------------------------------------------------
def readBuff():
try: GPIO.wait_for_edge(21, GPIO.FALLING) #SAVE_RDY
except: pass
time.sleep(0.05) #let's inWaiting get the real number of chars/bytes
nc = ser.inWaiting()
r=ser.read(nc)
print r
print "\n"
return r
def saveBuff(buff):
f.write(buff)
f.flush()
def convertBuff(buff, nc):
for i in range(0, nc):
m=0
#------------------------------------------------------------------------------
def main():
print "Welcome to SPQR\n"
tgps = threading.Thread( group=None, target=tGPS, name="Thread GPS", args=(), kwargs={} )
tgps.setDaemon(True)
tgps.start()
while 1:
time.sleep(2.05)
print "main\n"
def tGPS():
while 1:
time.sleep(2.05)
print "tGPS\n"
if __name__ == '__main__':
main()
|
try:
from tuneup.ndimraces import sigopt_versus_shgo_deap
except ImportError:
pass
from tuneup.ndimraces import open_source_race
|
import os
import unittest
import __main__
class PrhTests(unittest.TestCase):
def create_local_ref(self, name):
path_prefix = __main__.get_repo_git_dir() + "/refs/heads/"
self.create_dirs_and_file(name, path_prefix)
def create_remote_ref(self, name):
path_prefix = __main__.get_repo_git_dir() + "/refs/remotes/origin/"
self.create_dirs_and_file(name, path_prefix)
def create_dirs_and_file(self, name, path_prefix):
last_slash_index = name.rfind('/')
if last_slash_index != -1:
try:
os.makedirs(path_prefix + name[0: last_slash_index])
open(path_prefix + name, "w").close()
except():
print("error creating local ref")
else:
open(path_prefix + name, "w").close()
def delete_local_ref(self, name):
path_prefix = __main__.get_repo_git_dir() + "/refs/heads/"
last_slash_index = name.rfind('/')
if last_slash_index != -1:
try:
os.remove(path_prefix + name)
os.removedirs(path_prefix + name[0: last_slash_index])
except OSError as e:
print(e.message)
else:
try:
os.remove(path_prefix + name)
except OSError as e:
print(e.message)
def put_ref_in_head(self, ref_name):
with open(__main__.get_repo_git_dir() + "/HEAD", 'w') as f:
f.write("ref: refs/heads/%s" % ref_name)
def delete_remote_ref(self, name):
origin_ = __main__.get_repo_git_dir() + "/refs/remotes/origin/"
splited_name = name.split('/')
if len(splited_name) > 1:
path = origin_ + name
if os.path.exists(path):
os.removedirs(path)
else:
path = origin_ + name
if os.path.exists(path):
os.remove(path)
def test_config_file_migration(self):
old_config_path = "test1.py"
with open(old_config_path, "w") as f:
f.write('test1_param1 = "test1 Value1 "')
self.assertTrue(os.path.isfile(old_config_path), "old config doesn't exist")
new_config_path = "test1.json"
__main__.migrate_config_file(from_path=old_config_path, to_path=new_config_path)
self.assertFalse(os.path.isfile(old_config_path), "old config didn't get removed after migration")
self.assertTrue(os.path.isfile(new_config_path), "new config doesn't exists after migration")
config_file = __main__.read_from_config_file(file_path=new_config_path)
self.assertEqual(config_file["test1_param1"], 'test1 Value1', "the values didn't carry over to new config")
def test_pr_template_append(self):
pass
def test_multiple_link_in_commit_message(self):
cm, full_url, story_ids = __main__.parse_commit_message(
"this ishttps://www.pivotaltracker.com/story/show/140176051 https://www.pivotaltracker.com/story/show/139604723a test",
[], [])
self.assertEqual(cm, "this is a test")
self.assertEqual(full_url, ["https://www.pivotaltracker.com/story/show/140176051",
"https://www.pivotaltracker.com/story/show/139604723"])
self.assertEqual(story_ids, ["140176051", "139604723"])
def test_verify_parent_in_origin(self):
ref_name = "prh_test_t1"
self.delete_remote_ref(ref_name)
self.create_remote_ref(ref_name)
error = __main__.verify_parent_in_origin(ref_name)
self.assertFalse(error, "failed with ref_name = %s" % ref_name)
ref_name = "prh_test/t1"
self.delete_remote_ref(ref_name)
self.create_remote_ref(ref_name)
error = __main__.verify_parent_in_origin(ref_name)
self.assertFalse(error, "failed with ref_name = %s" % ref_name)
def test_get_head(self):
ref_name = "t1"
self.delete_local_ref(ref_name)
self.create_local_ref(ref_name)
self.put_ref_in_head(ref_name)
res = __main__.get_head()
self.assertEqual(ref_name, res, "failed with a simple ref name")
ref_name = "prh_test/t1"
self.delete_local_ref(ref_name)
self.create_local_ref(ref_name)
self.put_ref_in_head(ref_name)
res = __main__.get_head()
self.assertEqual(ref_name, res, "failed when there is '/' in ref name")
|
#This program flashes an led connected to GIPO Pin 17 on the Raspberry PI
import gpiozero #Importing LED functions from GIPO in Pyhton
import time #Import Sleep function
ledBlue = gpiozero.LED(17) #Assign control of the pin to the variable, note this is referencing GIPO 17 not the pin#
while True: #Loop Always
ledBlue.off() #Start with LED OFF
time.sleep(0.5) #Wait half a second
ledBlue.on() #Turn the LED on
time.sleep(0.5) #Wait another half second
|
import numpy as np
from scipy.ndimage import zoom
from imu.io import mkdir, segToRgb
from imageio import imsave
def createMipImages(getInputImage, getOutputName, zran, level_ran=range(3), resize_order=1, do_seg=False):
# need helper function to get image slice from 3D volume or image namges
# getInputImage(z): image at slice z
# getOutputName(m, z): filename at mip m and slice z
output_name = getOutputName(0, 0)
root_folder = output_name[:output_name.rfind('/')]
root_folder = root_folder[:root_folder.rfind('/')]
mkdir(root_folder)
for m in level_ran:
output_name = getOutputName(m, 0)
output_folder = output_name[:output_name.rfind('/')]
mkdir(output_folder)
for z in zran:
im = getInputImage(z)
# downsample until first mip level
for i in range(level_ran[0]-1):
im = zoom(im, 0.5, order=resize_order)
for m in level_ran:
if do_seg:
imsave(getOutputName(m, z), segToRgb(im))
else:
imsave(getOutputName(m, z), im)
if m != level_ran[-1]:
im = zoom(im, 0.5, order=resize_order)
|
from flask import Flask
from flask_restful import Api, Resource, reqparse
import pickle
import numpy as np
from PIL import Image
from io import BytesIO
import base64
# variables Flask
app = Flask(__name__)
api = Api(app)
# se carga el modelo de Logistic Regression del Notebook #3
pkl_filename = "ModeloLR.pkl"
with open(pkl_filename, 'rb') as file:
model = pickle.load(file)
# Load MNIST model
pkl_mnist = 'mnist_model.pkl'
with open(pkl_mnist, 'rb') as file:
model_mnist = pickle.load(file)
parser = reqparse.RequestParser()
parser.add_argument('petal_length')
parser.add_argument('petal_width')
parser.add_argument('sepal_length')
parser.add_argument('sepal_width')
number_parser = reqparse.RequestParser()
number_parser.add_argument('img')
size = 28, 28
def decode_img(encoded_string):
img = Image.open(BytesIO(base64.b64decode(encoded_string)))
return img
def reshape_image(img):
img.thumbnail(size, Image.ANTIALIAS) # resize
img = np.invert(img.convert('L')).ravel() # Convert to grayscale and set shape as (28,)
return img
class Predict(Resource):
@staticmethod
def post():
# request para el modelo
args = parser.parse_args()
datos = np.fromiter(args.values(), dtype=float)
# prediccion
out = {'Prediccion': int(model.predict([datos])[0])}
return out, 200
def get(self):
args = parser.parse_args()
datos = np.fromiter(args.values(), dtype=float)
# prediccion
out = {'Prediccion': int(model.predict([datos])[0])}
return out, 200
class PredictNumber(Resource):
@staticmethod
def post():
# Convertir en una imagen
args = number_parser.parse_args()
img = decode_img(args.img)
img.save('server-images/img.jpg')
# Reshape image 28x28
img = reshape_image(img)
result = model_mnist.predict([img])[0]
return { 'result': result }, 200
api.add_resource(Predict, '/predict')
api.add_resource(PredictNumber, '/predict-number')
if __name__ == '__main__':
app.run(debug=True, port='1080')
|
# Generated by Django 2.1 on 2018-08-09 01:21
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('exchanges', '0002_auto_20180809_0049'),
]
operations = [
migrations.RenameField(
model_name='exchange',
old_name='icon',
new_name='logo',
),
]
|
class Solution:
# @param A, a list of integer
# @return an integer
def singleNumber(self, A):
result = 0
for a in A:
result ^= a
return result
s = Solution()
print s.singleNumber([1,2,3,4,5,6,5,4,3,2,1,2,2,6,4])
|
from django import template
from ghostwriter.shepherd.models import AuxServerAddress
register = template.Library()
@register.simple_tag
def get_primary_address(value):
"""Gets the primary IP address for this server."""
primary_address = value.ip_address
aux_addresses = AuxServerAddress.objects.filter(static_server=value)
for address in aux_addresses:
if address.primary:
primary_address = address.ip_address
return primary_address
|
from collections import OrderedDict
import threading
import time
import unittest
from smqtk.utils.read_write_lock import \
ContextualReadWriteLock
def wait_for_value(f, timeout):
"""
Wait a specified timeout period of time (seconds) for the given
function to execute successfully.
`f` usually wraps an assertion function.
:param f: Assertion function.
:type f: () -> None
:param timeout: Time out in seconds to wait for convergence.
:type timeout: float
"""
s = time.time()
neq = True
while neq:
try:
f()
# function passed.
neq = False
except (Exception, AssertionError):
# if assertion fails past timeout, actually raise assertion.
if time.time() - s > timeout:
raise
class TestContextualReadWriteLock (unittest.TestCase):
def setUp(self):
self.state = OrderedDict()
def wait_for_state(self, k):
""" Wait forever until a state attribute is True. """
while k not in self.state or not self.state[k]:
pass
# Added asserts
def assertInState(self, k):
""" Assert key in state """
self.assertIn(k, self.state)
def assertLockFree(self, l):
self.assertEqual(l._semlock._get_value(), 1)
def assertLockAcquired(self, l):
self.assertEqual(l._semlock._get_value(), 0)
# Unit Tests
def test_initial_state(self):
# Test expected lock and value states before use.
crwl = ContextualReadWriteLock()
self.assertLockFree(crwl._service_lock)
self.assertLockFree(crwl._resource_lock)
self.assertLockFree(crwl._reader_count_lock)
self.assertEqual(crwl._reader_count, 0)
def test_read_context_state(self):
# Test expected state conditions when transitioning into and out of a
# read-lock context.
crwl = ContextualReadWriteLock()
def t1(c):
with c.read_context():
self.state['t1_read_acquired'] = True
self.wait_for_state('t1_release')
self.state['t1_read_released'] = True
t1 = threading.Thread(target=t1, args=(crwl,))
t1.daemon = True
t1.start()
# Thread should immediately attempt to acquire read lock. We should see
# that it does successfully.
wait_for_value(lambda: self.assertInState('t1_read_acquired'),
1.0)
self.assertLockFree(crwl._service_lock)
self.assertLockAcquired(crwl._resource_lock)
self.assertLockFree(crwl._reader_count_lock)
self.assertEqual(crwl._reader_count, 1)
# Trigger thread to release context and check state.
self.state['t1_release'] = True
wait_for_value(lambda: self.assertInState('t1_read_released'),
1.0)
self.assertLockFree(crwl._service_lock)
self.assertLockFree(crwl._resource_lock)
self.assertLockFree(crwl._reader_count_lock)
self.assertEqual(crwl._reader_count, 0)
def test_write_context_state(self):
# Test expected state conditions when transitioning into and out of a
# write-lock context.
crwl = ContextualReadWriteLock()
def t1_func(c):
with c.write_context():
self.state['t1_write_acquired'] = True
self.wait_for_state('t1_release')
self.state['t1_write_released'] = True
t1 = threading.Thread(target=t1_func, args=(crwl,))
t1.daemon = True
t1.start()
# Thread should immediately attempt to acquire write lock. We should
# see that it does successfully.
wait_for_value(lambda: self.assertInState('t1_write_acquired'),
1.0)
self.assertLockFree(crwl._service_lock)
self.assertLockAcquired(crwl._resource_lock)
self.assertLockFree(crwl._reader_count_lock)
self.assertEqual(crwl._reader_count, 0)
# Trigger thread to release context and check state.
self.state['t1_release'] = True
wait_for_value(lambda: self.assertInState('t1_write_released'),
1.0)
self.assertLockFree(crwl._service_lock)
self.assertLockFree(crwl._resource_lock)
self.assertLockFree(crwl._reader_count_lock)
self.assertEqual(crwl._reader_count, 0)
def test_concurrent_read_then_write(self):
# Test that a thread with a read lock blocks a write lock from entering.
crwl = ContextualReadWriteLock()
# Thread 1 function - Read lock
def t1_func(c):
with c.read_context():
self.state['t1_read_acquired'] = True
self.wait_for_state('t1_release')
self.state['t1_read_released'] = True
# Thread 2 function - Write lock
def t2_func(c):
self.wait_for_state('t2_acquire')
with c.write_context():
self.state['t2_write_acquired'] = True
self.wait_for_state('t2_release')
self.state['t2_write_released'] = True
t1 = threading.Thread(target=t1_func, args=(crwl,))
t2 = threading.Thread(target=t2_func, args=(crwl,))
t1.daemon = t2.daemon = True
t1.start()
t2.start()
# Upon starting threads, t1 should get read lock and t2 should not have
# done anything yet.
wait_for_value(lambda: self.assertInState('t1_read_acquired'), 1.0)
self.assertNotIn('t2_write_acquired', self.state)
self.assertLockFree(crwl._service_lock)
self.assertLockAcquired(crwl._resource_lock)
self.assertLockFree(crwl._reader_count_lock)
self.assertEqual(crwl._reader_count, 1)
# t2 should attempt to acquire write context but be blocked. We should
# see that the service lock is acquired and that 't2_write_acquired' is
# not set.
self.state['t2_acquire'] = True
wait_for_value(lambda: self.assertLockAcquired(crwl._service_lock), 1.0)
self.assertNotIn('t2_write_acquired', self.state)
self.assertLockAcquired(crwl._service_lock)
self.assertLockAcquired(crwl._resource_lock)
self.assertLockFree(crwl._reader_count_lock)
self.assertEqual(crwl._reader_count, 1)
# Releasing t1's read lock should cause t2 to acquire write lock.
self.state['t1_release'] = True
wait_for_value(lambda: self.assertInState('t1_read_released'), 1.0)
wait_for_value(lambda: self.assertInState('t2_write_acquired'), 1.0)
self.assertLockFree(crwl._service_lock)
self.assertLockAcquired(crwl._resource_lock)
self.assertLockFree(crwl._reader_count_lock)
self.assertEqual(crwl._reader_count, 0)
# t2 should now be able to release the write lock like normal
self.state['t2_release'] = True
wait_for_value(lambda: self.assertInState('t2_write_released'), 1.0)
self.assertLockFree(crwl._service_lock)
self.assertLockFree(crwl._resource_lock)
self.assertLockFree(crwl._reader_count_lock)
self.assertEqual(crwl._reader_count, 0)
def test_concurrent_write_then_read(self):
# Test that a thread with a read lock blocks a write lock from entering.
crwl = ContextualReadWriteLock()
# Thread 1 function - Write lock
def t1_func(c):
with c.write_context():
self.state['t1_write_acquired'] = True
self.wait_for_state('t1_release')
self.state['t1_write_released'] = True
# Thread 2 function - Read lock
def t2_func(c):
self.wait_for_state('t2_acquire')
self.state['t2_read_attempt'] = True
with c.read_context():
self.state['t2_read_acquired'] = True
self.wait_for_state('t2_release')
self.state['t2_read_released'] = True
t1 = threading.Thread(target=t1_func, args=(crwl,))
t2 = threading.Thread(target=t2_func, args=(crwl,))
t1.daemon = t2.daemon = True
t1.start()
t2.start()
# Upon starting threads, t1 should get write lock and t2 should not have
# done anything yet.
wait_for_value(lambda: self.assertInState('t1_write_acquired'), 1.0)
self.assertNotIn('t2_read_acquired', self.state)
self.assertLockFree(crwl._service_lock)
self.assertLockAcquired(crwl._resource_lock)
self.assertLockFree(crwl._reader_count_lock)
self.assertEqual(crwl._reader_count, 0)
# t2 should attempt to acquire read context but be blocked. We should
# see that the service lock is acquired and that 't2_read_acquired' is
# not set.
self.state['t2_acquire'] = True
wait_for_value(lambda: self.assertLockAcquired(crwl._service_lock), 1.0)
self.assertNotIn('t2_write_acquired', self.state)
self.assertLockAcquired(crwl._service_lock)
self.assertLockAcquired(crwl._resource_lock)
self.assertLockAcquired(crwl._reader_count_lock)
self.assertEqual(crwl._reader_count, 0)
# Releasing t1's write lock should cause t2 to acquire read lock.
self.state['t1_release'] = True
wait_for_value(lambda: self.assertInState('t1_write_released'), 1.0)
wait_for_value(lambda: self.assertInState('t2_read_acquired'), 1.0)
self.assertLockFree(crwl._service_lock)
self.assertLockAcquired(crwl._resource_lock)
self.assertLockFree(crwl._reader_count_lock)
self.assertEqual(crwl._reader_count, 1)
# t2 should now be able to release the read lock like normal
self.state['t2_release'] = True
wait_for_value(lambda: self.assertInState('t2_read_released'), 1.0)
self.assertLockFree(crwl._service_lock)
self.assertLockFree(crwl._resource_lock)
self.assertLockFree(crwl._reader_count_lock)
self.assertEqual(crwl._reader_count, 0)
|
#!/usr/bin/python
import optparse
import os
import re
import sys
import shutil
base_path = '/storage/tvseries/'
down_path = '/home/todsah/download'
patterns_epi = [
'(?P<series>.*)[sS](?P<season>\d+?)[eE](?P<episode>\d+)',
'(?P<series>.*)(?P<season>\d\d)(?P<episode>\d\d)',
'(?P<series>.*)(?P<season>\d)(?P<episode>\d\d)',
'(?P<series>.*)(?P<season>\d)x(?P<episode>\d\d)',
]
# Handle options
parser = optparse.OptionParser(
usage="%prog [OPTIONS] [download path]",
version="%prog 0.1",
description='Find downloaded TV series torrent files and normale/move them.')
parser.add_option("-d", "--dry-run", dest="dryrun", action="store_true", default=False, help="Do not actually do anything.")
(options, args) = parser.parse_args()
if args:
down_path = args.pop(0)
sys.stdout.write('Searching in %s\n' % (down_path))
for fname in [fname for fname in os.listdir(down_path) if os.path.splitext(fname)[1] == '.avi']:
# Try to find info in a fuzzy way.
matches = []
for pattern in patterns_epi:
m = re.match(pattern, fname)
if m:
matches.append(m.groupdict())
# See which matches the best
info = None
best_score = -1
for match in matches:
score = 0
if int(match['episode']) != 0: score += 1
if int(match['season']) != 0: score += 1
if score > 0 and score > best_score:
info = match
best_score = score
if not info:
sys.stderr.write('Couldn\'t find match for: %s\n' % (fname))
continue
# Normalize the series name
info['series'] = info['series'].replace('.', ' ')
info['series'] = info['series'].replace('_', ' ')
info['series'] = ' '.join([s.capitalize() for s in info['series'].split(' ')])
info['series'] = info['series'].strip()
# Normalize season, episode
info['season'] = int(info['season'])
info['episode'] = int(info['episode'])
# Build final paths
season_path = os.path.join(base_path, str(info['series']), 'Season %s' % (str(info['season'])))
file_path = os.path.join(season_path, '%s - S%02i E%02i.avi' % (info['series'], info['season'], info['episode']))
# SOme checks
if not os.path.exists(base_path):
sys.stderr.write('Base path %s does not exist. Aborting.\n' % (base_path))
sys.exit(1)
else:
sys.stdout.write('%s -> %s\n' % (fname, file_path))
if not options.dryrun:
try:
os.makedirs(season_path)
except OSError, e:
if e.errno == 17:
# File exists
pass
else:
raise e
shutil.move(os.path.join(down_path, fname), file_path)
|
# this file is to handle the file transactions
# it is the main application level in the server
# our goal is to make the server as light weight as possible
import time
import trans
import meta_puller
import config
import random
import pickle
import os
import time
# the "transaction" is implemented as follows:
# using some trick to give the client some files to handle,
# when the client commit the transaction, the server issue a mv operation
# to "commit" the change in the server in synchronise way
CreatFileLock = {} # map 'string' to arbitrary integer
WriteFileLock = {}
ReadFileLock = {}
trans.Init()
SERVER_LOG_FILE_NAME = config.SERVER_LOG_FILE_NAME
FT_MODE = 0 #0: ok, 1 no commit at all, 2 commit only one
# pulling datas from cloud storage
meta_puller.init_config()
meta_puller.pull_meta()
def request_fail_server(server_id):
meta_puller.report_fail(server_id)
return '0'
def request_ok_server(server_id):
meta_puller.ok_server(server_id)
return '0'
def get_all_files(folder_name):
ret = []
all_files = meta_puller.get_all_file_names()
#print 'all files ', all_files
for file_name_index in range(len(all_files)):
file_name = all_files[file_name_index]
file_name_index += 1
#print 'file_name ',file_name
tmp = meta_puller.get_file_meta_info(file_name)
file_map = {}
file_map['file_name'] = file_name
file_map['size'] = tmp[1]
file_map['is_folder'] = tmp[2]
ret.append(file_map)
#print 'file_name ', file_name, ' Loop Ends', all_files
return ret
def get_all_chunks_of_file(file_name):
''' return representation of the chunks '''
num_ids = meta_puller.get_chunks_id(file_name)
ret = []
for i in range(num_ids):
one_chunk = {}
tmp = meta_puller.get_file_chunk_info(file_name, i)
one_chunk['file_name'] = file_name;
one_chunk['chunk_index'] = i
one_chunk['chunk_id'] = i
one_chunk['server_id_of_chunk'] = []
for server_id,size in tmp:
one_chunk['server_id_of_chunk'].append(server_id)
one_chunk['server_id_of_chunk'].append(size)
ret.append(one_chunk)
return ret
def abort_transaction(key, trans_value):
if trans_value[0] == 'CREATE_FILE':
return abort_create_file(trans_value)
def abort_create_file(trans):
print "ABORT: " + trans[1]
# which server to put the file
def choose_create_target_server(file_name):
#return range(len(meta_puller.SERVERS))
# randomly select
ret = []
for id in range(len(meta_puller.SERVERS)):
if meta_puller.SERVERS[id]['live'] == 0:
continue
ret.append(id)
random.shuffle(ret)
upper = min(len(ret),config.FILE_DUPLICATE_NUM)
ret = ret[0:upper]
if len(ret) < config.FILE_DUPLICATE_NUM:
ret.extend([-10] * (config.FILE_DUPLICATE_NUM - len(ret)))
return ret
def alive_server_ids():
ret = []
for id in range(len(meta_puller.SERVERS)):
if meta_puller.SERVERS[id]['live'] == 0:
continue
ret.append(id)
return ret
# randomly
def choose_servers(alread_choosed,num_total):
remain = num_total - len(alread_choosed)
if remain <= 0:
return alread_choosed
alives = alive_server_ids()
for i in range(len(alives)):
if alives[i] in alread_choosed:
alives[i] = -1
ret = alread_choosed
count = 0
for i in range(remain):
if count < len(alives) and alives[count] != -1:
ret.append(alives[count])
count += 1
if len(ret) < num_total:
ret.extend([-1] * (num_total - len(ret)))
return ret
# where to put the file to write,
# if it already exist, put it in original server,
# else call choose_create_target_server
def choose_write_target_server(file_name, chunks):
ret = []
for chunk in chunks:
if chunk < meta_puller.get_chunks_id(file_name):
original_location = meta_puller.get_file_chunk_info(file_name,chunk)
original_location = [i[0] for i in original_location]
if len(original_location) >= config.FILE_DUPLICATE_NUM:
ret.extend(original_location[0:config.FILE_DUPLICATE_NUM])
else:
ret.extend(choose_servers(original_location,config.FILE_DUPLICATE_NUM))
#remain = config.FILE_DUPLICATE_NUM - len(original_location)
#num_server = meta_puller.get_server_num()
#other_server = random.shuffle(range(num_server))[0:remain]
# for test purpose, I only use 0
#other_server = [0] * remain
#ret.extend(other_server);
else: # add new chunk
# TODO, implement server choosing algorithm
tmp = choose_create_target_server(file_name)
ret.extend(tmp)
#ret.extend([0] * config.FILE_DUPLICATE_NUM)
return ret
# where to read the files
def choose_read_target_server(file_name, chunks):
# currently I only choose the 1st one
ret = []
max_chunk_id = meta_puller.get_chunks_id(file_name)
for chunk in chunks:
if max_chunk_id <= chunk:
ret.append(-1)
continue
tmp = meta_puller.get_file_chunk_info(file_name,chunk)
ret.append(tmp[0][0])
return ret
# all the transactions returns the 0:0:xx: format for the client to understand
def request_create_file(file_name):
#print file_name
global CreatFileLock
# check the existence of file_name,
# lock, we assume single thread mode in server, so never mind the lock here..
if CreatFileLock.has_key(file_name):
if config.IGNORE_LOCK == False:
return '-1:File Creation Pending'
if file_name in meta_puller.get_all_file_names():
return '-2:File already exist!'
CreatFileLock[file_name] = 0
# create a transaction id
trans_id = trans.get_next_trans_id()
# add the transaction to transaction manager
target_server = choose_create_target_server(file_name)
trans.AddTrans(trans_id,[ \
'CREATE_FILE', \
file_name,
target_server,
time.time() ])
# return the response for client to do their own stuff
# success, trans_id, file_name
tmp = [str(0), str(trans_id), str(len(target_server)) ]
for server in target_server:
tmp.append(str(server))
tmp.append(file_name);
return ':'.join(tmp)
def request_write_file(file_name,chunks, chunk_sizes):
if file_name not in meta_puller.get_all_file_names():
return '-2:file Not exist'
# first check the readFileLock
if ReadFileLock.has_key(file_name):
for chunk in chunks:
if chunk in ReadFileLock[file_name].keys():
if config.IGNORE_LOCK == False:
return '-1:Read Locked by others'
if WriteFileLock.has_key(file_name):
for chunk in chunks:
if chunk in WriteFileLock[file_name].keys():
if config.IGNORE_LOCK == False:
return '-1:Write Locked by others'
if not WriteFileLock.has_key(file_name):
WriteFileLock[file_name] = {}
for chunk in chunks:
WriteFileLock[file_name][chunk] = 0
# create a transaction id
trans_id = trans.get_next_trans_id()
# add the transaction to transaction manager
target_server = choose_write_target_server(file_name, chunks)
# [chunk_0_server_1, chunk_0_server_2.., chunk_1_server_1]
trans.AddTrans(trans_id,[ \
'WRITE_FILE', \
file_name,
target_server,
chunks,
chunk_sizes,
time.time() ])
# return the response for client to do their own stuff
# success, trans_id, file_name
tmp = [str(0), str(trans_id), str(len(target_server)) ]
for server in target_server:
tmp.append(str(server))
return ':'.join(tmp)
def request_read_file(file_name,chunks):
if file_name not in meta_puller.get_all_file_names():
return '-2:file Not exist'
if WriteFileLock.has_key(file_name):
for chunk in chunks:
if chunk in WriteFileLock[file_name].keys():
if config.IGNORE_LOCK == False:
return '-1:Write Locked by others'
if not ReadFileLock.has_key(file_name):
ReadFileLock[file_name] = {}
for chunk in chunks:
ReadFileLock[file_name][chunk] = 0
# create a transaction id
trans_id = trans.get_next_trans_id()
# add the transaction to transaction manager
target_server = choose_read_target_server(file_name, chunks)
meta_puller.copy_file_by_renaming(trans_id, file_name, chunks, target_server);
trans.AddTrans(trans_id,[ \
'READ_FILE', \
file_name,
target_server,
chunks,
time.time() ])
# return the response for client to do their own stuff
# success, trans_id, file_name
tmp = [str(0), str(trans_id), str(len(target_server)) ]
for server in target_server:
tmp.append(str(server))
return ':'.join(tmp)
def del_file(file_name):
# will implement the lock mechanism later
if file_name not in meta_puller.get_all_file_names():
return '-1:not file named ' + file_name
trans_id = trans.get_next_trans_id()
ret_val,ret_msg = meta_puller.del_file(file_name,trans_id)
return str(ret_val) + ":" + ret_msg
# commit the create file
def commit_create_file(trans_id):
if trans.has_key(trans_id) == False or trans.get_key(trans_id) == None:
return '-1:' + 'No such Trans:' + str(trans_id)
# change the file name
#try:
# mysql_api.create_file((trans.get_key(trans_id))[1])
#except Exception as e:
# abort_transaction(trans_id,trans.get_key(trans_id))
# trans.DelTrans(trans_id)
# return '-1:' + str(e)
meta_puller.create_file_by_renaming(trans_id,trans.get_key(trans_id)[1], trans.get_key(trans_id)[2])
file_name = trans.get_key(trans_id)[1]
del CreatFileLock[file_name]
trans.DelTrans(trans_id)
return '0'
def handle_ft_mode(mode_id):
global FT_MODE
FT_MODE = mode_id
return '0'
def handle_resume():
global FT_MODE,SERVER_LOG_FILE_NAME
logs = pickle.load(open(SERVER_LOG_FILE_NAME,'r'))
# [trans_id,file_name,chunks,chunk_sizes,target_servers,finished_servers,status]
for index in range(len(logs)):
m = logs[index]
trans_id,file_name,chunks,chunk_sizes,target_servers, finished_servers, status = m
if status != 1:
remain = [i for i in target_servers if i not in finished_servers]
meta_puller.update_file_by_renaming(trans_id, file_name, chunks, chunk_sizes, remain)
logs[index][-2] = target_servers
logs[index][-1] = 1
f = open(SERVER_LOG_FILE_NAME,'w')
pickle.dump(logs,f)
f.close()
FT_MODE = 0
return '0'
# commit the write file
# the log file looks like [trans_id,file_name,chunks,chunk_sizes,target_servers,finished_servers,status]
def commit_write_file(trans_id):
global SERVER_LOG_FILE_NAME
if trans.has_key(trans_id) == False or trans.get_key(trans_id) == None:
return '-1:' + 'No such Trans:' + str(trans_id)
if config.SAVE_FAKE_LOG == True and os.path.exists(SERVER_LOG_FILE_NAME) == False:
f = open(SERVER_LOG_FILE_NAME,'w')
pickle.dump([],f)
f.close()
Trans = trans.get_key(trans_id)
# extract the transaction information
file_name = Trans[1]
target_servers = Trans[2]
chunks = Trans[3]
chunk_sizes = Trans[4]
if config.SAVE_FAKE_LOG == True :
f = open(SERVER_LOG_FILE_NAME,'r')
logs = pickle.load(f)
f.close()
logs.append([trans_id,file_name,chunks,chunk_sizes,target_servers,[],0])
f = open(SERVER_LOG_FILE_NAME,'w')
pickle.dump(logs,f)
f.close()
global FT_MODE
if FT_MODE == 0:
a = time.time()
meta_puller.update_file_by_renaming(trans_id, file_name, chunks, chunk_sizes, target_servers)
#print 'handle commit renaming cost ', time.time() - a, ' s'
if config.SAVE_FAKE_LOG == True:
logs[-1][-2] = target_servers
logs[-1][-1] = 1
elif FT_MODE == 1:
pass
else: #2, partial
target_servers = target_servers[0:1]
meta_puller.update_file_by_renaming(trans_id, file_name, chunks, chunk_sizes, target_servers)
if config.SAVE_FAKE_LOG == True:
logs[-1][-2] = target_servers
logs[-1][-1] = 0
if config.SAVE_FAKE_LOG == True:
f = open(SERVER_LOG_FILE_NAME,'w')
pickle.dump(logs,f)
f.close()
for c in chunks:
del WriteFileLock[file_name][c]
trans.DelTrans(trans_id)
meta_puller.pull_meta()
return '0'
def commit_read_file(trans_id):
if trans.has_key(trans_id) == False or trans.get_key(trans_id) == None:
return '-1:' + 'No such Trans:' + str(trans_id)
Trans = trans.get_key(trans_id)
# extract the transaction infomation
file_name = Trans[1]
target_servers = Trans[2]
chunks = Trans[3]
# meta_puller.update_file_by_renaming(trans_id, file_name, chunks, chunk_sizes, target_servers)
# just delete something
meta_puller.del_tmp_file_to_read(trans_id, file_name, chunks, target_servers)
for c in chunks:
del ReadFileLock[file_name][c]
trans.DelTrans(trans_id)
return '0'
def handle_commit(transaction_id, msg):
if trans.has_key(transaction_id) == False:
return '-1:No Transaction or Tans be deleted'
trans.LockResource()
ret_result = '0:Transaction Succeed'
#try:
command = trans.get_key(transaction_id)[0]
if command == 'CREATE_FILE':
ret_result = commit_create_file(transaction_id)
elif command == 'WRITE_FILE':
ret_result = commit_write_file(transaction_id)
elif command == 'READ_FILE':
ret_result = commit_read_file(transaction_id)
else:
ret_result = '-1:unknown tansaction cmd'
#except Exception as e:
# print 'Exception!!',str(e)
# trans.DelTrans(transaction_id)
# ret_result = '-1:' + str(e)
trans.UnlockResource()
return ret_result
|
from conans import ConanFile, CMake, tools
import os
from conans import ConanFile, CMake, tools, AutoToolsBuildEnvironment, RunEnvironment, python_requires
from conans.errors import ConanInvalidConfiguration, ConanException
from conans.tools import os_info
import os, re, stat, fnmatch, platform, glob, traceback, shutil
from functools import total_ordering
# if you using python less than 3 use from distutils import strtobool
from distutils.util import strtobool
conan_build_helper = python_requires("conan_build_helper/[~=0.0]@conan/stable")
class TestPackageConan(conan_build_helper.CMakePackage):
name = "chromium_base_test_package"
settings = "os", "compiler", "build_type", "arch"
generators = "cmake", "cmake_find_package"
# sets cmake variables required to use clang 10 from conan
def _is_compile_with_llvm_tools_enabled(self):
return self._environ_option("COMPILE_WITH_LLVM_TOOLS", default = 'false')
# installs clang 10 from conan
def _is_llvm_tools_enabled(self):
return self._environ_option("ENABLE_LLVM_TOOLS", default = 'false')
def build_requirements(self):
self.build_requires("cmake_platform_detection/master@conan/stable")
self.build_requires("cmake_build_options/master@conan/stable")
self.build_requires("cmake_helper_utils/master@conan/stable")
# TODO: separate is_lsan
if self.options['chromium_base'].enable_tsan \
or self.options['chromium_base'].enable_msan \
or self.options['chromium_base'].enable_asan \
or self.options['chromium_base'].enable_ubsan:
self.build_requires("cmake_sanitizers/master@conan/stable")
# provides clang-tidy, clang-format, IWYU, scan-build, etc.
if self._is_llvm_tools_enabled():
self.build_requires("llvm_tools/master@conan/stable")
def build(self):
cmake = CMake(self)
cmake.definitions['ENABLE_UBSAN'] = self.options['chromium_base'].enable_ubsan
cmake.definitions['ENABLE_ASAN'] = self.options['chromium_base'].enable_asan
cmake.definitions['ENABLE_MSAN'] = self.options['chromium_base'].enable_msan
cmake.definitions['ENABLE_TSAN'] = self.options['chromium_base'].enable_tsan
self.add_cmake_option(cmake, "COMPILE_WITH_LLVM_TOOLS", self._is_compile_with_llvm_tools_enabled())
cmake.configure()
cmake.build()
def test(self):
if not tools.cross_building(self):
#bin_path = os.path.join("bin", "test_package")
bin_path = os.path.join(self.build_folder, "chromium_base_test_package")
self.run("%s -s" % bin_path, run_environment=True)
|
import chainer
from onnx_chainer.functions.opset_version import support
from onnx_chainer import onnx_helper
@support((1, 6, 7))
def convert_Dropout(func, opset_version, input_names, output_names, context):
if opset_version == 1:
return onnx_helper.make_node(
'Dropout', input_names, output_names,
is_test=0 if chainer.config.train else 1,
ratio=func.dropout_ratio,
consumed_inputs=[1]
),
elif opset_version == 6:
return onnx_helper.make_node(
'Dropout', input_names, output_names,
is_test=0 if chainer.config.train else 1,
ratio=func.dropout_ratio,
),
elif opset_version == 7:
return onnx_helper.make_node(
'Dropout', input_names, output_names,
ratio=func.dropout_ratio,
),
|
from newspaper import Article
import fact_check
#url = "https://politics.theonion.com/trump-insists-he-never-thought-about-firing-mueller-fe-1822461545" # later to be replaced by active tab's url
#url = "http://uspoliticalpost.com/economic_terrorism_exposed/"
#url = "http://empirenews.net/trump-begins-waging-battle-against-the-war-on-new-years-eve/"
url = "https://nypost.com/2018/01/27/cheating-still-rampant-at-disgraced-stuyvesant-school/"
article = Article(url)
article.download()
article.parse()
article.nlp()
def title():
return article.title
def author():
return article.authors
def keywords():
return article.keywords
def summary():
return article.summary
def content():
return article.text
reliability_score = fact_check.return_score(url, author(), content())
print(reliability_score)
# print(title())
# print(author())
# print(keywords())
# print(summary())
|
# -*- coding:utf-8 -*-
'''
硬币找零问题:
我们有 3 种不同的硬币,1 元、3 元、5 元,我们要支付9元,最少需要几个硬币
'''
import numpy as np
'''
money = [1,3,5]
total = 9
states = np.zeros((total, total+1))
# states[次数][金额]
for i in money:
states[0][i] = 1
for i in range(1,total):
for j in range(total+1):
if states[i-1][j]:
if j + 1 <= total:
states[i][j+1] = 1
if j + 3 <= total:
states[i][j+3] = 1
if j + 5 <= total:
states[i][j+5] = 1
if states[i][total]:
print(states)
print(i+1)
exit()
'''
def coins_change(money, total):
states = np.zeros((total+1, total+1))
for i in money:
states[0][i] = 1
for i in range(1, total+1):
for j in range(total+1):
if states[i-1][j]:
for value in money:
if j+value <= total:
states[i][j+value] = 1
if states[i][total] == 1:
return i+1
money = [1,3,5]
total = 9
print('最少需要硬币:',coins_change(money, total))
|
#!/bin/python3
import sys
from collections import *
def migratoryBirds(n, ar):
c = Counter(ar).most_common(1)
return (c[0][0])
n = int(input().strip())
ar = list(map(int, input().strip().split(' ')))
result = migratoryBirds(n, ar)
print(result)
|
import unittest
from katas.kyu_7.speed_control import gps
class GPSTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(gps(
15, [0.0, 0.19, 0.5, 0.75, 1.0, 1.25, 1.5, 1.75, 2.0, 2.25]), 74)
def test_equals_2(self):
self.assertEqual(gps(
15, [0.0, 0.15, 0.42, 0.67, 1.0, 1.15, 1.5, 1.65, 1.8, 2]), 84)
def test_equals_3(self):
self.assertEqual(gps(
20, [0.0, 0.23, 0.46, 0.69, 0.92, 1.15, 1.38, 1.61]), 41)
def test_equals_4(self):
self.assertEqual(gps(12, [
0.0, 0.11, 0.22, 0.33, 0.44, 0.65, 1.08, 1.26, 1.68, 1.89, 2.1,
2.31, 2.52, 3.25
]), 219)
def test_equals_5(self):
self.assertEqual(gps(20, [
0.0, 0.18, 0.36, 0.54, 0.72, 1.05, 1.26, 1.47, 1.92, 2.16, 2.4,
2.64, 2.88, 3.12, 3.36, 3.6, 3.84
]), 81)
def test_equals_6(self):
self.assertEqual(gps(10, []), 0)
|
>>> from tdd1 import *
>>> f(2,3)
5
>>> f(-1,1)
0
>>> fact(5)
120
>>> fact(1)
1
#from my-rb import *
|
import logging
import numpy as np
from copy import deepcopy
from scipy import sparse
"""
@desc: Adds a generator which produces random spikes and
connects it to the excitatory reservoir neurons
"""
def addNoiseGenerator(self):
# Create spike generator
sg = self.nxNet.createSpikeGenProcess(numPorts=self.p.noiseNeurons)
# Create random spikes
randSpikes = np.random.rand(self.p.noiseNeurons, self.p.stepsPerTrial - self.p.noiseOffset)
randSpikes[randSpikes < (1-self.p.noiseSpikeprob)] = 0
randSpikes[randSpikes >= (1-self.p.noiseSpikeprob)] = 1
# Append offset zeros in the beginning and store spikes in global object
self.noiseSpikes = np.insert(randSpikes.astype(int), 0, np.zeros((self.p.noiseOffset, self.p.noiseNeurons)), axis=1)
# Repeat spikes and add spike times to spike generator
for i in range(self.p.noiseNeurons):
# Get spike times from binary spike array
spikesPerTrial = np.where(self.noiseSpikes[i,:])[0]
# Apply the same random spike every trial
totalNoiseSpikes = np.array([ spikesPerTrial + self.p.stepsPerTrial*k + self.p.resetOffset*(k+1) for k in range(self.p.trials) ]).flatten()
# Add spike times to generator
sg.addSpikes(spikeInputPortNodeIds=i, spikeTimes=totalNoiseSpikes.tolist())
# Create mask for noise/reservoir connections
self.noiseMask = self.drawSparseMaskMatrix(self.p.noiseDens, self.p.reservoirExSize, self.p.noiseNeurons, avoidSelf=False)
# Create weights for noise/reservoir connections
randoms = None
if self.p.onlyExcitatory:
# weights between between 0 and +noiseMaxWeight
randoms = np.random.rand(self.p.reservoirExSize, self.p.noiseNeurons)
else:
# weights between between -noiseMaxWeight and +noiseMaxWeight
randoms = ((np.random.rand(self.p.reservoirExSize, self.p.noiseNeurons)*2) - 1)
self.noiseWeights = sparse.csr_matrix(np.round(self.p.noiseMaxWeight*randoms).astype(int))
#sign = np.random.rand(self.p.reservoirExSize, self.p.noiseNeurons)
#sign[sign < 0.5] = -1
#sign[sign >= 0.5] = 1
#self.noiseWeights = self.drawSparseWeightMatrix(self.noiseMask).multiply(sign).tocsr()
#import sys
# Connect noise network to the reservoir network
for i in range(len(self.exReservoirChunks)):
fr, to = i*self.p.neuronsPerCore, (i+1)*self.p.neuronsPerCore
ma = self.noiseMask[fr:to, :].toarray()
we = self.noiseWeights[fr:to, :].toarray()
pm = deepcopy(we)
pm[pm >= 0] = 0
pm[pm < 0] = 1
#print(pm)
#print(we)
#sys.exit()
#sg.connect(self.exReservoirChunks[i], prototype=self.mixedConnProto, connectionMask=ma, weight=we)
sg.connect(self.exReservoirChunks[i],
prototype=[self.exConnProto, self.inConnProto],
prototypeMap=pm,
connectionMask=ma,
weight=we
)
# Log that background noise was added
logging.info('Background noise was added to the network')
"""
@desc: Create input spiking generator to add a constant signal,
the input is connected to a share of the reservoir network,
an excitatory connection prototype is used
"""
def addConstantGenerator(self):
# Create spike generator
sg = self.nxNet.createSpikeGenProcess(numPorts=self.p.constGens)
constSpikes = []
for i in range(self.p.constGens):
# Generate spikes for one training step
constSpikesInd = self.generateInputSignal(self.p.totalSteps, prob=self.p.constSpikeProb, start=0)
# Add all spikes to spike generator
sg.addSpikes(spikeInputPortNodeIds=i, spikeTimes=constSpikesInd)
# Store const input in object
constSpikes.append(constSpikesInd)
self.constSpikes.append(np.array(constSpikes))
# Connect generator to the reservoir network
startNeuron = 0
endNeuron = (self.p.constSize-1)
# Sample mask for constant input
constMask = self.drawSparseMaskMatrix(self.p.constDens, self.p.reservoirExSize, self.p.constGens)
constMask[endNeuron:, :] = 0 # set all mask values behind last neuron of cluster to zero
# Sample weights for constant input
constWeights = self.drawSparseWeightMatrix(constMask)
# Connect generator to the excitatory reservoir network
for i in range(len(self.exReservoirChunks)):
fr, to = i*self.p.neuronsPerCore, (i+1)*self.p.neuronsPerCore
ma = constMask[fr:to, :].toarray()
we = constWeights[fr:to, :].toarray()
sg.connect(self.exReservoirChunks[i], prototype=self.exConnProto, connectionMask=ma, weight=we)
#self.constMasks.append(constMask)
#self.constWeights.append(constWeights)
|
from .boxplot import boxplot
from .distribution import distribution
from .pca import pca
from .permutation_test import permutation_test
from .roc import roc_boot, roc_cv, roc
from .scatter import scatter
from .scatterCI import scatterCI
from .scatter_ellipse import scatter_ellipse
__all__ = ["boxplot", "distribution", "pca", "permutation_test", "roc_boot", "roc_cv", "roc", "scatter", "scatterCI", "scatter_ellipse"]
|
#!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import unittest
import environment
import utils
import tablet
# single shard / 2 tablets
shard_0_master = tablet.Tablet()
shard_0_slave = tablet.Tablet()
cert_dir = environment.tmproot + '/certs'
def setUpModule():
try:
environment.topo_server().setup()
logging.debug('Creating certificates')
os.makedirs(cert_dir)
utils.run(environment.binary_args('vttlstest') +
['-root', cert_dir,
'CreateCA'])
utils.run(environment.binary_args('vttlstest') +
['-root', cert_dir,
'CreateSignedCert',
'-common_name', 'Mysql Server',
'-serial', '01',
'server'])
utils.run(environment.binary_args('vttlstest') +
['-root', cert_dir,
'CreateSignedCert',
'-common_name', 'Mysql Client',
'-serial', '02',
'client'])
extra_my_cnf = cert_dir + '/secure.cnf'
fd = open(extra_my_cnf, 'w')
fd.write('ssl-ca=' + cert_dir + '/ca-cert.pem\n')
fd.write('ssl-cert=' + cert_dir + '/server-cert.pem\n')
fd.write('ssl-key=' + cert_dir + '/server-key.pem\n')
fd.close()
setup_procs = [
shard_0_master.init_mysql(extra_my_cnf=extra_my_cnf),
shard_0_slave.init_mysql(extra_my_cnf=extra_my_cnf),
]
utils.wait_procs(setup_procs)
utils.run_vtctl(['CreateKeyspace', 'test_keyspace'])
shard_0_master.init_tablet('replica', 'test_keyspace', '0')
shard_0_slave.init_tablet('replica', 'test_keyspace', '0')
# create databases so vttablet can start behaving normally
shard_0_master.create_db('vt_test_keyspace')
shard_0_slave.create_db('vt_test_keyspace')
except:
tearDownModule()
raise
def tearDownModule():
utils.required_teardown()
if utils.options.skip_teardown:
return
shard_0_master.kill_vttablet()
shard_0_slave.kill_vttablet()
teardown_procs = [
shard_0_master.teardown_mysql(),
shard_0_slave.teardown_mysql(),
]
utils.wait_procs(teardown_procs, raise_on_error=False)
environment.topo_server().teardown()
utils.kill_sub_processes()
utils.remove_tmp_files()
shard_0_master.remove_tree()
shard_0_slave.remove_tree()
class TestSecure(unittest.TestCase):
"""This test makes sure that we can use SSL replication with Vitess.
"""
def test_secure(self):
# start the tablets
shard_0_master.start_vttablet(wait_for_state='NOT_SERVING')
shard_0_slave.start_vttablet(wait_for_state='NOT_SERVING',
repl_extra_flags={
'flags': '2048',
'ssl-ca': cert_dir + '/ca-cert.pem',
'ssl-cert': cert_dir + '/client-cert.pem',
'ssl-key': cert_dir + '/client-key.pem',
})
# Reparent using SSL (this will also check replication works)
utils.run_vtctl(['InitShardMaster', '-force', 'test_keyspace/0',
shard_0_master.tablet_alias], auto_log=True)
if __name__ == '__main__':
utils.main()
|
import time
for i in range(1,100):
f = open('Sources\\' + str(i) + '.txt')
print(f.readline())
time.sleep(1)
|
"""
설탕배달
https://www.acmicpc.net/problem/2839
"""
n = int(input())
cnt = 0
while n>0:
if n % 5==0:
cnt += n/5
n = 0
else:
cnt += 1
n -= 3
if n == 0:
print(int(cnt))
else:
print("-1")
|
#!/usr/bin/env python3
import numpy as np
import matplotlib.pyplot as plt
def check(lhs, rhs, at):
if lhs >= rhs:
print('+ Convex at {}'.format(at))
else:
print('- Concave at {}'.format(at))
def f(x):
return -x*x*x
def f_2(x):
return -x*x*x + 2*x*x + 5
def test_set(x, y, p):
for s in range(p+1):
res = (1-s/p)*x + (s/p)*y
print(res)
def test_func(p_1, p_2, n_samples):
n = len(n_samples)
for s in n_samples:
lam = s/n_samples
lhs = lam*f(p_1) + (1-lam)*f(p_2)
rhs = f(lam*p_1 + (1-lam)*p_2)
check(lhs, rhs, s)
def plot_it(x, y, precision):
y_pts = []
for s in range(x, y+1):
y_pts.append(-1)
def main(*args, **kwargs):
p = 50
x = 15
y = -15
test_func(x, y, p)
plot_it(x, y, p)
if __name__ == '__main__':
main()
|
import time
import random
from pylo import Datatype
from pylo import MicroscopeInterface
from pylo import MeasurementVariable
class DummyMicroscope(MicroscopeInterface):
"""This class represents a dummy microscope.
Attributes
----------
record_time : int or None
The record time in seconds or None for random times (between 0 and 1)
"""
def __init__(self, *args, **kwargs) -> None:
"""Create a new camera interface object."""
super().__init__(*args, **kwargs)
self.record_time = None
self._values = {}
self.lorentz_mode = False
self.supports_parallel_measurement_variable_setting = False
self.registerMeasurementVariable(
MeasurementVariable("focus", "Focus", 0, 100, "nm", Datatype.int, 3),
lambda: self._getVal("focus"), lambda x: self._setVal("focus", x)
)
var = self.registerMeasurementVariable(
MeasurementVariable("ol-current", "Objective lens current", 0,
0x800, "hex", Datatype.hex_int),
lambda: self._getVal("ol-current"),
lambda x: self._setVal("ol-current", x)
)
var.default_step_width_value = 0x300
var.default_end_value = 0x600
self.registerMeasurementVariable(
MeasurementVariable("pressure", "Pressure", 51, 3060, "Pa",
Datatype.int, 1/1020, None, "bar", "Atmospheres",
calibrated_format=float),
lambda: self._getVal("pressure"),
lambda x: self._setVal("pressure", x)
)
self.setMeasurementVariableValue("focus", random.randint(0, 100))
self.setMeasurementVariableValue("ol-current", random.randint(0, 0x800))
self.setMeasurementVariableValue("pressure", random.randint(51, 3060))
def _setVal(self, id_, value):
if isinstance(self.record_time, (int, float)):
if self.record_time >= 0:
time.sleep(self.record_time)
else:
time.sleep(random.random())
self._values[id_] = value
def _getVal(self, id_):
if id_ in self._values:
return self._values[id_]
else:
return 0
def setInLorentzMode(self, lorentz_mode):
self.lorentz_mode = lorentz_mode
def getInLorentzMode(self, lorentz_mode):
return self.lorentz_mode
def resetToSafeState(self) -> None:
pass
|
#!/usr/bin/python
import sys
import os
def convert(password):
password = password.strip('\'')
if password.startswith("$SHA$") is False:
print("Invalid Token: Does not begin with $SHA$")
exit(1)
password = password[5:]
salthash = password.split('$')
if len(salthash) != 2:
print("Invalid Token: Could not get salt and/or hash correctly.")
exit(1)
elif len(salthash[0]) != 16 or len(salthash[1]) != 64:
print("Invalid Token: Bad salt/hash lengths.")
exit(1)
return f'{salthash[1].strip()}${salthash[0]}'
if len(sys.argv) < 2:
print("Usage: authme2john.py <single/file> [OUTPUT]")
print("(Does the file exist, or is the hash not single-quoted?)")
exit(1)
password = sys.argv[1]
output = sys.argv[2] if len(sys.argv) == 3 and os.path.isfile(sys.argv[2]) else None
if os.path.isfile(password):
ostream = None
if output is not None:
ostream = open(output, 'a')
for hash in open(password, 'r'):
if output is None:
print(convert(hash))
else:
ostream.write(convert(hash) + "\n")
if ostream is not None:
ostream.close()
else:
print(convert(password))
|
from .symex import SymexFrontend, TestInfo
from .fuzz import FuzzerFrontend, FuzzFrontendError
|
from tkinter import *
import time
import pymysql as p
class MainFrame():
def __init__(self ,master ,number):
#Create frame
self.cos = self
self.master = master
self.master.iconbitmap("C:/Users/bkwia/Desktop/python/bankGUI/data/bank.ico")
self.master.title("BANK")
self.master.minsize(1000, 600)
self.master.maxsize(1000, 600)
self.number = number
self.fMainB = Frame(self.master, bg="#D2D2D2")
self.fMainB.place(width=1000, height=600)
self.fNav = Frame(self.fMainB, bg="#83AD87")
self.fNav.place(x=0, y=0, width=160, height=600)
self.fUp = Frame(self.fMainB, bg="#E0E0E0")
self.fUp.place(x=160, y=0, width=840, height=90)
self.fContainerBallance = Frame(self.fMainB, bg="#C4C4C4")
self.fContainerBallance.place(x=230, y=114, width=700, height=200)
#Image
self.imageExit = PhotoImage(file = "C:/Users/bkwia/Desktop/python/bankGUI/data/exitImage.png")
self.imageSettings = PhotoImage(file = "C:/Users/bkwia/Desktop/python/bankGUI/data/settingsImage.png")
self.imageTransfer = PhotoImage(file = "C:/Users/bkwia/Desktop/python/bankGUI/data/transferImage.png")
self.imageContact = PhotoImage(file="C:/Users/bkwia/Desktop/python/bankGUI/data/contactsImage.png")
self.imageHistory = PhotoImage(file="C:/Users/bkwia/Desktop/python/bankGUI/data/historyImage.png")
self.imageInfo = PhotoImage(file="C:/Users/bkwia/Desktop/python/bankGUI/data/myAccountImage.png")
#Connect with data base
try:
myBase = p.connect(host="localhost", user="root", db="bank")
except:
self.connectionErrorL = Label(self.fMainB, text="NIE UDAŁO SIĘ POLĄCZYĆ Z BAZĄ DANYCH", font=("K2D", 24, "bold"), bg="#C4C4C4")
self.connectionErrorL.place(x=230, y=114, width=700, height=200)
self.master.after(4000, self.master.destroy)
cursor = myBase.cursor()
#checking balance
cursor.execute("SELECT balance FROM data WHERE number={}".format(self.number))
accountBalanceB = cursor.fetchall()
for ch in accountBalanceB:
for v in ch:
self.accountBalance=v
#Menu GUI
self.transferF = Frame(self.fMainB, bg="#C4C4C4")
self.contactF = Frame(self.fMainB, bg="#C4C4C4")
self.historyF = Frame(self.fMainB, bg="#C4C4C4")
self.infoF = Frame(self.fMainB, bg="#C4C4C4")
self.button5 = Frame(self.fMainB, bg="#C4C4C4")
self.button6 = Frame(self.fMainB, bg="#C4C4C4")
self.button7 = Frame(self.fMainB, bg="#C4C4C4")
self.button8 = Frame(self.fMainB, bg="#C4C4C4")
self.transferL = Label(self.fMainB, image=self.imageTransfer, bg="#C4C4C4")
self.transferL.bind("<Button-1>", lambda event ,choice = "Transfer(self.master, self.number)" : self.choiceMenu(choice))
self.contactL = Label(self.fMainB, image=self.imageContact, bg="#C4C4C4")
self.contactL.bind("<Button-1>", lambda event ,choice = "Contact(self.master, self.number)" : self.choiceMenu(choice))
self.historyL = Label(self.fMainB, image=self.imageHistory, bg="#C4C4C4")
self.historyL.bind("<Button-1>", lambda event ,choice = "History(self.master, self.number)" : self.choiceMenu(choice))
self.infoL = Label(self.fMainB, image=self.imageInfo, bg="#C4C4C4")
self.infoL.bind("<Button-1>", lambda event ,choice = "Info(self.master, self.number)" : self.choiceMenu(choice))
self.transferF.place(x=172, y=322, width=198, height=128)
self.contactF.place(x=378, y=322, width=198, height=128)
self.historyF.place(x=584, y=322, width=198, height=128)
self.infoF.place(x=790, y=322, width=198, height=128)
self.button5.place(x=172, y=464, width=198, height=128)
self.button6.place(x=378, y=464, width=198, height=128)
self.button7.place(x=584, y=464, width=198, height=128)
self.button8.place(x=790, y=464, width=198, height=128)
self.transferL.place(x=203, y=322, height=128)
self.contactL.place(x=419, y=332)
self.historyL.place(x=629, y=329)
self.infoL.place(x=840, y=328)
self.logo = Label(self.fMainB, text="NARODOWY BANK", font=("K2D", 48, "bold"), bg="#E0E0E0" )
self.accountBalanceL = Label(self.fMainB, text="DOSTĘPNE ŚRODKI: {} zł".format(self.accountBalance), font=("K2D", 30, "bold"), bg="#C4C4C4")
self.settingsB = Label(self.fMainB, image=self.imageSettings, bg="#83AD87")
self.exitB = Label(self.fMainB, image=self.imageExit, bg="#83AD87")
self.settingsB.bind("<Button-1>", self.settings)
self.exitB.bind("<Button-1>", self.exitF)
self.logo.place(x=160, y=0, width=840, height=90)
self.accountBalanceL.place(x=230, y=114, width=700, height=200)
self.settingsB.place(x=48, y=14, width=64, height=63)
self.exitB.place(x=48, y=528, width=64, height=50)
self.fMainB.mainloop()
def choiceMenu(self, choice):
#Destroy Menu GUI
self.fMainB.destroy()
self.logo.destroy()
self.accountBalanceL.destroy()
self.exitB.destroy()
self.settingsB.destroy()
self.transferL.destroy()
self.contactL.destroy()
self.historyL.destroy()
self.infoL.destroy()
objectM = eval(choice)
def settings(self, event):
#Create frame
self.fSettings = Toplevel(self.master, bg="#D2D2D2")
self.fSettings.minsize(280, 400)
self.fSettings.maxsize(280, 400)
self.fSettings.title("BANK - USTAWIENIA")
self.footer = Frame(self.fSettings, bg="#83AD87")
self.footer.place(x=0, y=340, width=280, height=60)
self.soon = Label(self.fSettings, text="SOON")
self.soon.place(x=46, y=60)
#Options
self.returnB = Button(self.fSettings, text="POWRÓT", bg="#FFF8F8", font=("K2D", 12, "bold") ,command=lambda: self.fSettings.destroy())
self.returnB.place(x=90, y=350, width=100, height=40)
def exitF(self, event):
self.logo.destroy()
self.accountBalanceL.destroy()
self.exitB.destroy()
self.settingsB.destroy()
self.transferL.destroy()
self.contactL.destroy()
self.historyL.destroy()
self.infoL.destroy()
self.goodbayL = Label(self.fMainB, text="ŻEGNAMY", font=("K2D", 30, "bold"), bg="#C4C4C4")
self.goodbayL.place(x=230, y=114, width=700, height=200)
self.master.after(2000, self.master.destroy)
class Contact(MainFrame):
def __init__(self, master, number):
self.master = master
self.number = number
#Create frame
self.fMainC = Frame(self.master, bg="#D2D2D2")
self.fMainC.place(width=1000, height=600)
self.fNav = Frame(self.fMainC, bg="#83AD87")
self.fNav.place(x=0, y=0, width=160, height=600)
self.fUp = Frame(self.fMainC, bg="#E0E0E0")
self.fUp.place(x=160, y=0, width=840, height=90)
self.logo = Label(self.fMainC, text="NARODOWY BANK", font=("K2D", 48, "bold"), bg="#E0E0E0" )
self.logo.place(x=160, y=0, width=840, height=90)
#Variables
self.contact = []
self.imageReturn = PhotoImage(file = "C:/Users/bkwia/Desktop/python/bankGUI/data/backImage.png")
#Connect with data base
try:
myBase = p.connect(host="localhost", user="root", db="bank")
except:
self.connectionErrorL = Label(self.fMainC, text="NIE UDAŁO SIĘ POLĄCZYĆ Z BAZĄ DANYCH", font=("K2D", 24, "bold"), bg="#C4C4C4" )
self.connectionErrorL.place(x=305, y=291, width=550, height=100)
self.master.after(4000, self.master.destroy)
self.cursor = myBase.cursor()
try:
#Import contact
self.cursor.execute("SELECT contact FROM `{}` ".format(self.number))
self.contactB = self.cursor.fetchall()
for ch in self.contactB:
for v in ch:
self.contact.append(v)
except:
pass
#Check multiple contact
self.contactTemp = []
self.contactTrue = []
for i in self.contact:
if i in self.contactTemp:
continue
else:
self.contactTrue.append(i)
self.contactTemp.append(i)
#Create scrollbars
self.scrollbarContact = Scrollbar(self.fMainC)
self.scrollbarContact.place(x=840, y=116, width=30, height=450)
self.contactList = Listbox(self.fMainC, bg="#C4C4C4", highlightcolor="#C4C4C4", font=("K2D", 30, "bold") , yscrollcommand = self.scrollbarContact.set )
if self.contactTrue == ['']:
self.contactList.destroy()
self.scrollbarContact.destroy()
self.emptyContact = Label(self.fMainC, text="Brak kontaktów", font=("K2D", 40, "bold"), bg="#C4C4C4" )
self.emptyContact.place(x=305, y=291, width=550, height=100)
else:
#Print contact
for i in range(len(self.contactTrue)):
self.contactList.insert(END, "{}".format(self.contactTrue[i]))
self.contactList.place(x=320, y=116, width=520, height=450)
self.scrollbarContact.config( command = self.contactList.yview )
self.returnB = Label(self.fMainC, image=self.imageReturn, bg="#83AD87")
self.returnB.bind("<Button-1>", self.returnF)
self.returnB.place(x=37, y=515, width=85, height=85)
def returnF(self, event):
self.fMainC.destroy()
try:
self.scrollbarContact.destroy()
self.contactList.destroy()
except:
pass
try:
self.emptyContact.destroy()
except:
pass
self.returnB.destroy()
MainFrame.__init__(self, self.master, self.number)
def returnError(self):
self.fMainC.destroy()
MainFrame.__init__(self, self.master, self.number)
class Info(MainFrame):
def __init__(self, master,number):
self.master = master
self.number = number
self.imageReturn = PhotoImage(file = "C:/Users/bkwia/Desktop/python/bankGUI/data/backImage.png")
#Create frame
self.fMainI = Frame(self.master, bg="#D2D2D2")
self.fMainI.place(width=1000, height=600)
self.fNav = Frame(self.fMainI, bg="#83AD87")
self.fNav.place(x=0, y=0, width=160, height=600)
self.fUp = Frame(self.fMainI, bg="#E0E0E0")
self.fUp.place(x=160, y=0, width=840, height=90)
self.logo = Label(self.fMainI, text="NARODOWY BANK", font=("K2D", 48, "bold"), bg="#E0E0E0" )
self.logo.place(x=160, y=0, width=840, height=90)
self.container = Frame(self.fMainI, bg="#C4C4C4")
self.container.place(x=320, y=116, width=520, height=450)
#Connect with data base
try:
myBase = p.connect(host="localhost", user="root", db="bank")
except:
self.connectionErrorL = Label(self.fMainI, text="Nie udało się połączyć z bazą danych", font=("K2D", 24, "bold"), bg="#C4C4C4" )
self.connectionErrorL.place(x=305, y=291, width=550, height=100)
self.master.after(4000, self.master.destroy)
cursor = myBase.cursor()
#import login
cursor.execute("SELECT login FROM data WHERE number = {}".format(self.number))
loginT=cursor.fetchall()
for ch in loginT:
for v in ch:
self.login=v
#import password
cursor.execute("SELECT password FROM data WHERE number = {}".format(self.number))
passwordT=cursor.fetchall()
for ch in passwordT:
for v in ch:
self.password=v
#import pin
cursor.execute("SELECT pin FROM data WHERE number = {}".format(self.number))
pinT=cursor.fetchall()
for ch in pinT:
for v in ch:
self.pin=v
#import balance
cursor.execute("SELECT balance FROM data WHERE number = {}".format(self.number))
balanceT=cursor.fetchall()
for ch in balanceT:
for v in ch:
self.balance=v
#import name
cursor.execute("SELECT name FROM data WHERE number = {}".format(self.number))
nameT=cursor.fetchall()
for ch in nameT:
for v in ch:
self.name=v
#import surname
cursor.execute("SELECT surname FROM data WHERE number = {}".format(self.number))
surnameT=cursor.fetchall()
for ch in surnameT:
for v in ch:
self.surname=v
#import dateOfBirth
cursor.execute("SELECT dateOfBirth FROM data WHERE number = {}".format(self.number))
dateOfBirthT=cursor.fetchall()
for ch in dateOfBirthT:
for v in ch:
self.dateOfBirth=v
#import accountNumber
cursor.execute("SELECT accountNumber FROM data WHERE number = {}".format(self.number))
accountNumberT=cursor.fetchall()
for ch in accountNumberT:
for v in ch:
self.accountNumber=v
#Info GUI
self.loginL = Label(self.fMainI, text="LOGIN - {}".format(self.login), font=("K2D", 16, "bold"), bg="#C4C4C4")
self.passwordL = Label(self.fMainI, text="HASŁO - POKAŻ HASŁO", font=("K2D", 16, "bold"), bg="#C4C4C4")
self.passwordL.bind("<Button-1>", self.showPassword)
self.pinL = Label(self.fMainI, text="PIN - POKAŻ PIN", font=("K2D", 16, "bold"), bg="#C4C4C4")
self.pinL.bind("<Button-1>", self.showPIN)
self.balanceL = Label(self.fMainI, text="STAN KONTA - {}".format(self.balance), font=("K2D", 16, "bold"), bg="#C4C4C4")
self.nameL = Label(self.fMainI, text="IMIĘ - {}".format(self.name), font=("K2D", 16, "bold"), bg="#C4C4C4")
self.surnameL = Label(self.fMainI, text="NAZWISKO - {}".format(self.surname), font=("K2D", 16, "bold"), bg="#C4C4C4")
self.dateOfBirthL = Label(self.fMainI, text="DATA URODZENIA - {}".format(self.dateOfBirth), font=("K2D", 16, "bold"), bg="#C4C4C4")
self.accountNumberL = Label(self.fMainI, text="NUMER KONTA - {}".format(self.accountNumber), font=("K2D", 14, "bold"), bg="#C4C4C4")
self.loginL.place(x=320, y=141, width=520, height=50)
self.passwordL.place(x=320, y=191, width=520, height=50)
self.pinL.place(x=320, y=241, width=520, height=50)
self.balanceL.place(x=320, y=291, width=520, height=50)
self.nameL.place(x=320, y=341, width=520, height=50)
self.surnameL.place(x=320, y=391, width=520, height=50)
self.dateOfBirthL.place(x=320, y=441, width=520, height=50)
self.accountNumberL.place(x=320, y=491, width=520, height=50)
self.returnB = Label(self.fMainI, image=self.imageReturn, bg="#83AD87")
self.returnB.bind("<Button-1>", self.returnF)
self.returnB.place(x=37, y=515, width=85, height=85)
def returnF(self, event):
self.fMainI.destroy()
self.loginL.destroy()
self.passwordL.destroy()
self.pinL.destroy()
self.balanceL.destroy()
self.nameL.destroy()
self.surnameL.destroy()
self.dateOfBirthL.destroy()
self.accountNumberL.destroy()
self.returnB.destroy()
MainFrame.__init__(self, self.master, self.number)
def showPassword(self, event):
self.passwordL.config(text="HASŁO - {} ".format(self.password))
def showPIN(self, event):
self.pinL.config(text="PIN - {} ".format(self.pin))
class History (MainFrame):
def __init__ (self, master, number):
self.master = master
self.number = number
#Create frame
self.fMainH = Frame(self.master, bg="#D2D2D2")
self.fMainH.place(width=1000, height=600)
self.fNav = Frame(self.fMainH, bg="#83AD87")
self.fNav.place(x=0, y=0, width=160, height=600)
self.fUp = Frame(self.fMainH, bg="#E0E0E0")
self.fUp.place(x=160, y=0, width=840, height=90)
self.logo = Label(self.fMainH, text="NARODOWY BANK", font=("K2D", 48, "bold"), bg="#E0E0E0" )
self.logo.place(x=160, y=0, width=840, height=90)
#Variables
theDateOutgoing = []
accountNumberOutgoing = []
amountOutgoing = []
commentOutgoing = []
theDateIncoming = []
accountNumberIncoming = []
amountIncoming = []
commentIncoming = []
self.imageReturn = PhotoImage(file = "C:/Users/bkwia/Desktop/python/bankGUI/data/backImage.png")
#Connect with data base
try:
myBaseH = p.connect(host="localhost", user="root", db="bank")
except:
self.connectionErrorL = Label(self.fMainH, text="NIE UDAŁO SIĘ POLĄCZYĆ Z BAZĄ DANYCH", font=("K2D", 12, "bold"),bg="D2D2D2")
self.connectionErrorL.place(x=392, y=560, width=375, height=40)
self.master.after(4000, self.master.destroy)
cursor = myBaseH.cursor()
#Import outgoing
try:
#Import date
cursor.execute("SELECT date FROM `{}` WHERE type='outgoing' ".format(self.number))
dateB = cursor.fetchall()
for ch in dateB:
for v in ch:
theDateOutgoing.append(v)
#Import accountNumber
cursor.execute("SELECT accountNumber FROM `{}` WHERE type='outgoing' ".format(self.number))
accountNumberB = cursor.fetchall()
for ch in accountNumberB:
for v in ch:
accountNumberOutgoing.append(v)
#Import amount
cursor.execute("SELECT amount FROM `{}` WHERE type='outgoing' ".format(self.number))
amountB = cursor.fetchall()
for ch in amountB:
for v in ch:
amountOutgoing.append(v)
#Import comment
cursor.execute("SELECT comment FROM `{}` WHERE type='outgoing' ".format(self.number))
commentB = cursor.fetchall()
for ch in commentB:
for v in ch:
commentOutgoing.append(v)
#Import counter
cursor.execute("SELECT COUNT(*) FROM `{}` WHERE type='outgoing' ".format(self.number))
counterB=cursor.fetchall()
for ch in counterB:
for v in ch:
counterOutgoing=v
except:
self.returnError()
#Import incoming
try:
#Import date
cursor.execute("SELECT date FROM `{}` WHERE type='incoming' ".format(self.number))
dateB = cursor.fetchall()
for ch in dateB:
for v in ch:
theDateIncoming.append(v)
#Import accountNumber
cursor.execute("SELECT accountNumber FROM `{}` WHERE type='incoming' ".format(self.number))
accountNumberB = cursor.fetchall()
for ch in accountNumberB:
for v in ch:
accountNumberIncoming.append(v)
#Import amount
cursor.execute("SELECT amount FROM `{}` WHERE type='incoming' ".format(self.number))
amountB = cursor.fetchall()
for ch in amountB:
for v in ch:
amountIncoming.append(v)
#Import comment
cursor.execute("SELECT comment FROM `{}` WHERE type='incoming' ".format(self.number))
commentB = cursor.fetchall()
for ch in commentB:
for v in ch:
commentIncoming.append(v)
#Import counter
cursor.execute("SELECT COUNT(*) FROM `{}` WHERE type='incoming' ".format(self.number))
counterB=cursor.fetchall()
for ch in counterB:
for v in ch:
counterIncoming=v
except:
self.returnError()
#Create scrollbars
self.outgoingL = Label(self.fMainH, text="WYCHODZĄCE", font=("K2D", 13, "bold"), bg="#D2D2D2" )
self.incomingL = Label(self.fMainH, text="PRZYCHODZĄCE", font=("K2D", 13, "bold"), bg="#D2D2D2" )
self.scrollbarOutgoing = Scrollbar(self.fMainH)
self.scrollbarOutgoing.place(x=501, y=154, width=30, height=400)
self.historyListOutgoing = Listbox(self.fMainH, yscrollcommand = self.scrollbarOutgoing.set, font=("K2D", 11, "bold"), bg="#C4C4C4", highlightcolor="#C4C4C4")
self.scrollbarIncoming = Scrollbar(self.fMainH)
self.scrollbarIncoming.place(x=929, y=154, width=30, height=400)
self.historyListIncoming = Listbox(self.fMainH, yscrollcommand = self.scrollbarIncoming .set, font=("K2D", 11, "bold"), bg="#C4C4C4", highlightcolor="#C4C4C4")
self.outgoingL.place(x=201, y=102, width=300, height=52)
self.outgoingL.place(x=629, y=102, width=300, height=52)
#Print history Outgoing
for i in range(counterOutgoing):
self.historyListOutgoing.insert(END, "Przelew nr {}".format(i+1))
self.historyListOutgoing.insert(END, "Data: {}".format(theDateOutgoing[i]))
self.historyListOutgoing.insert(END, "Adres: {}".format(accountNumberOutgoing[i]))
self.historyListOutgoing.insert(END, "Kwota: {} zł".format(amountOutgoing[i]))
self.historyListOutgoing.insert(END, "Komentarz: {}".format(commentOutgoing[i]))
self.historyListOutgoing.insert(END, "---------------------------------------------------------")
self.historyListOutgoing.place(x=201, y=154, width=300, height=400)
self.scrollbarOutgoing.config( command = self.historyListOutgoing.yview )
#Print history Incoming
for i in range(counterIncoming):
self.historyListIncoming.insert(END, "Przelew nr {}".format(i+1))
self.historyListIncoming.insert(END, "Data: {}".format(theDateIncoming[i]))
self.historyListIncoming.insert(END, "Adres: {}".format(accountNumberIncoming[i]))
self.historyListIncoming.insert(END, "Kwota: {} zł".format(amountIncoming[i]))
self.historyListIncoming.insert(END, "Komentarz: {}".format(commentIncoming[i]))
self.historyListIncoming.place(x=629, y=154, width=300, height=400)
self.scrollbarIncoming.config( command = self.historyListIncoming.yview )
self.returnB = Label(self.fMainH, image=self.imageReturn, bg="#83AD87")
self.returnB.bind("<Button-1>", self.returnF)
self.returnB.place(x=37, y=515, width=85, height=85)
def returnF(self, event):
self.fMainH.destroy()
self.scrollbarOutgoing.destroy()
self.historyListOutgoing.destroy()
self.scrollbarIncoming.destroy()
self.historyListIncoming.destroy()
self.returnB.destroy()
MainFrame.__init__(self, self.master, self.number)
def returnError(self):
self.fMainH.destroy()
MainFrame.__init__(self, self.master, self.number)
class Transfer (MainFrame):
def __init__(self, master, number):
self.master = master
self.number = number
self.imageReturn = PhotoImage(file = "C:/Users/bkwia/Desktop/python/bankGUI/data/backImage.png")
try:
self.errorLabel.destroy()
except:
#Create frame
self.fMainT = Frame(master, bg="#D2D2D2")
self.fMainT.place(width=1000, height=600)
self.fNav = Frame(self.fMainT, bg="#83AD87")
self.fNav.place(x=0, y=0, width=160, height=600)
self.fUp = Frame(self.fMainT, bg="#E0E0E0")
self.fUp.place(x=160, y=0, width=840, height=90)
self.logo = Label(self.fMainT, text="NARODOWY BANK", font=("K2D", 48, "bold"), bg="#E0E0E0" )
self.logo.place(x=160, y=0, width=840, height=90)
self.container = Frame(self.fMainT, bg="#C4C4C4")
self.container.place(x=320, y=116, width=520, height=450)
self.date = time.strftime("%d-%m-%Y %H:%M:%S", time.localtime())
#Connect with data base
try:
self.myBaseT = p.connect(host="localhost", user="root", db="bank")
except:
self.connectionErrorL = Label(self.fMainT, text="NIE UDAŁO SIĘ POLĄCZYĆ Z BAZĄ DANYCH", font=("K2D", 24, "bold"), bg="#C4C4C4" )
self.connectionErrorL.place(x=305, y=291, width=550, height=100)
self.master.after(4000, self.master.destroy)
self.cursor = self.myBaseT.cursor()
#Contact
self.contactT = []
try:
#Import contact
self.cursor.execute("SELECT contact FROM `{}` ".format(self.number))
self.contactBT = self.cursor.fetchall()
for ch in self.contactBT:
for v in ch:
self.contactT.append(v)
except:
pass
#Check multiple contact
self.contactTempT = []
self.contactTrueT = []
for i in self.contactT:
if i in self.contactTempT:
continue
else:
self.contactTrueT.append(i)
self.contactTempT.append(i)
#checking balance
self.cursor.execute("SELECT balance FROM data WHERE number={}".format(self.number))
accountBalanceB = self.cursor.fetchall()
for ch in accountBalanceB:
for v in ch:
self.accountBalance=v
self.accountBalanceLT = Label(self.fMainT, text="STAN KONTA: {} zł".format(self.accountBalance), font=("K2D", 25, "bold"), bg="#C4C4C4")
self.contactL = Label(self.fMainT, text="Kontakt", font=("K2D", 16, "bold"), bg="#C4C4C4")
self.contactE = Entry(self.fMainT, font=("K2D", 13, "bold"), bg="#FFF8F8")
self.nrOfTL = Label(self.fMainT, text="Numer konta odbiorcy ", font=("K2D", 16, "bold"), bg="#C4C4C4")
self.nrOfTE = Entry(self.fMainT, font=("K2D", 13, "bold"), bg="#FFF8F8")
self.nrOfTE.bind("<Button-1>", self.checkContact)
self.howMuchL = Label(self.fMainT, text="Kwota przelewu", font=("K2D", 16, "bold"), bg="#C4C4C4")
self.howMuchE = Entry(self.fMainT, font=("K2D", 13, "bold"), bg="#FFF8F8")
self.comL = Label(self.fMainT, text="Komentarz do przelewu", font=("K2D", 16, "bold"), bg="#C4C4C4")
self.comE = Entry(self.fMainT, font=("K2D", 13, "bold"), bg="#FFF8F8")
self.confirmB = Button(self.fMainT, text="Wyślij", command = lambda: self.confirmF(), font=("K2D", 16, "bold"), bg="#FFF8F8")
self.accountBalanceLT.place(x=320, y=116, width=520, height=50)
self.contactL.place(x=320, y=192, width=260, height=60)
self.contactE.place(x=580, y=202, width=241, height=50)
self.nrOfTL.place(x=320, y=252, width=260, height=60)
self.nrOfTE.place(x=580, y=262, width=241, height=50)
self.howMuchL.place(x=320, y=312, width=260, height=60)
self.howMuchE.place(x=580, y=322, width=241, height=50)
self.comL.place(x=320, y=372, width=260, height=60)
self.comE.place(x=580, y=382, width=241, height=50)
self.confirmB.place(x=480, y=474, width=200, height=50)
self.returnB = Label(self.fMainT, image=self.imageReturn, bg="#83AD87")
self.returnB.bind("<Button-1>", self.returnF)
self.returnB.place(x=37, y=515, width=85, height=85)
def checkContact(self, event):
if self.contactE.get() in self.contactTrueT:
#import accountNumber:
try:
self.cursor.execute("SELECT accountNumber FROM `{}` WHERE contact = '{}' ".format(self.number, self.contactE.get()))
accountNumberPromptE = self.cursor.fetchall()
for ch in accountNumberPromptE:
for v in ch:
self.accountNumberPrompt = v
except:
pass
self.nrOfTE.delete(0, END)
self.nrOfTE.insert(0, self.accountNumberPrompt)
return "break"
def confirmF(self):
if int(self.howMuchE.get()) > self.accountBalance:
self.errorLabel= Label(self.fMainT, text="Podana kwota jest większa od stanu konta", font=("K2D", 10, "bold"), bg="#C4C4C4")
self.errorLabel.place(x=357, y=432, width=446, height=42)
return
else:
self.cursor.execute("SELECT accountNumber FROM data WHERE accountNumber= '{}' ".format(self.nrOfTE.get()))
self.nrOfTDB = self.cursor.fetchall()
for ch in self.nrOfTDB:
for v in ch:
self.nrOfTDB=v
if self.nrOfTE.get() == self.nrOfTDB:
self.cursor.execute("SELECT balance FROM data WHERE accountNumber = '{}' ".format(self.nrOfTE.get()))
self.balance = self.cursor.fetchall()
for ch in self.balance:
for v in ch:
self.balance=v
self.newBalance = self.balance + float(self.howMuchE.get())
self.cursor.execute("UPDATE data SET balance = {} WHERE accountNumber = '{}' ".format(self.newBalance, self.nrOfTE.get()))
self.myBaseT.commit()
self.cursor.execute("SELECT number FROM data WHERE accountNumber = '{}' ".format(self.nrOfTE.get()))
self.numberTo = self.cursor.fetchall()
for ch in self.numberTo:
for v in ch:
self.numberTo=v
self.cursor.execute("INSERT INTO `{}`(date, accountNumber, amount, comment, type) VALUES ('{}' , '{}' , {} , '{}', '{}')".format(self.numberTo, self.date, self.nrOfTE.get(), float(self.howMuchE.get()), self.comE.get(), "incoming"))
self.myBaseT.commit()
#change balance
self.accountBalance -= int(self.howMuchE.get())
self.cursor.execute("UPDATE data SET balance = {} WHERE number={}".format(self.accountBalance, self.number))
self.myBaseT.commit()
#saving history
self.cursor.execute("INSERT INTO `{}`(date, accountNumber, contact, amount, comment, type) VALUES ('{}' , '{}' ,'{}', {} , '{}', '{}')".format(self.number, self.date, self.nrOfTE.get(),self.contactE.get(), float(self.howMuchE.get()), self.comE.get(), "outgoing"))
self.myBaseT.commit()
self.cursor.close()
self.myBaseT.close()
self.master.after(2000, self.returnF2())
def returnF(self, event):
self.fMainT.destroy()
self.accountBalanceLT.destroy()
self.nrOfTL.destroy()
self.nrOfTE.destroy()
self.contactL.destroy()
self.contactE.destroy()
self.howMuchL.destroy()
self.howMuchE.destroy()
self.comL.destroy()
self.comE.destroy()
self.confirmB.destroy()
self.returnB.destroy()
MainFrame.__init__(self, self.master, self.number)
def returnF2(self):
self.fMainT.destroy()
self.accountBalanceLT.destroy()
self.nrOfTL.destroy()
self.nrOfTE.destroy()
self.contactL.destroy()
self.contactE.destroy()
self.howMuchL.destroy()
self.howMuchE.destroy()
self.comL.destroy()
self.comE.destroy()
self.confirmB.destroy()
self.returnB.destroy()
MainFrame.__init__(self, self.master, self.number)
|
from django.db import models
# Create your models here.
class Data_sets(models.Model):
train_data = models.FileField(upload_to='files')
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'dkdrive_2.ui'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1107, 728)
MainWindow.setMinimumSize(QtCore.QSize(1107, 728))
MainWindow.setMaximumSize(QtCore.QSize(1107, 728))
MainWindow.setLayoutDirection(QtCore.Qt.RightToLeft)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.func_table = QtWidgets.QTabWidget(self.centralwidget)
self.func_table.setGeometry(QtCore.QRect(0, 0, 1111, 701))
self.func_table.setMinimumSize(QtCore.QSize(0, 0))
self.func_table.setMaximumSize(QtCore.QSize(1111, 701))
font = QtGui.QFont()
font.setFamily("Agency FB")
font.setPointSize(24)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.func_table.setFont(font)
self.func_table.setLayoutDirection(QtCore.Qt.LeftToRight)
self.func_table.setStyleSheet("font: 24pt \"Agency FB\";")
self.func_table.setObjectName("func_table")
self.my_drive = QtWidgets.QWidget()
self.my_drive.setObjectName("my_drive")
self.current_dir_edit = QtWidgets.QLineEdit(self.my_drive)
self.current_dir_edit.setGeometry(QtCore.QRect(70, 10, 591, 21))
self.current_dir_edit.setStyleSheet("font: 9pt \"微软雅黑\";\n"
"color: rgb(0, 85, 255);")
self.current_dir_edit.setObjectName("current_dir_edit")
self.current_dir_label = QtWidgets.QLabel(self.my_drive)
self.current_dir_label.setGeometry(QtCore.QRect(0, 10, 71, 21))
self.current_dir_label.setStyleSheet("font: 11pt \"Agency FB\";")
self.current_dir_label.setObjectName("current_dir_label")
self.search_edit = QtWidgets.QLineEdit(self.my_drive)
self.search_edit.setGeometry(QtCore.QRect(910, 10, 131, 21))
self.search_edit.setStyleSheet("font: 9pt \"微软雅黑\";\n"
"color: rgb(0, 85, 255);")
self.search_edit.setObjectName("search_edit")
self.search_btn = QtWidgets.QPushButton(self.my_drive)
self.search_btn.setGeometry(QtCore.QRect(1050, 10, 51, 23))
font = QtGui.QFont()
font.setFamily("Agency FB")
font.setPointSize(9)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.search_btn.setFont(font)
self.search_btn.setStyleSheet("font: 9pt \"Agency FB\";")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/my_pic/search_icon.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.search_btn.setIcon(icon)
self.search_btn.setObjectName("search_btn")
self.return_btn = QtWidgets.QPushButton(self.my_drive)
self.return_btn.setGeometry(QtCore.QRect(670, 10, 51, 23))
self.return_btn.setStyleSheet("font: 9pt \"Agency FB\";")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/my_pic/return_icon.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.return_btn.setIcon(icon1)
self.return_btn.setObjectName("return_btn")
self.upload_file = QtWidgets.QPushButton(self.my_drive)
self.upload_file.setGeometry(QtCore.QRect(740, 10, 81, 23))
self.upload_file.setStyleSheet("font: 9pt \"Agency FB\";")
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/my_pic/upload_icon.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.upload_file.setIcon(icon2)
self.upload_file.setObjectName("upload_file")
self.upload_dir = QtWidgets.QPushButton(self.my_drive)
self.upload_dir.setGeometry(QtCore.QRect(820, 10, 81, 23))
self.upload_dir.setStyleSheet("font: 9pt \"Agency FB\";")
self.upload_dir.setIcon(icon2)
self.upload_dir.setObjectName("upload_dir")
self.func_table.addTab(self.my_drive, "")
self.transfer_widget = QtWidgets.QWidget()
self.transfer_widget.setObjectName("transfer_widget")
self.left_widget = QtWidgets.QListWidget(self.transfer_widget)
self.left_widget.setGeometry(QtCore.QRect(0, 0, 141, 131))
self.left_widget.setObjectName("left_widget")
item = QtWidgets.QListWidgetItem()
self.left_widget.addItem(item)
item = QtWidgets.QListWidgetItem()
self.left_widget.addItem(item)
item = QtWidgets.QListWidgetItem()
self.left_widget.addItem(item)
self.right_widget = QtWidgets.QStackedWidget(self.transfer_widget)
self.right_widget.setGeometry(QtCore.QRect(140, 0, 961, 581))
self.right_widget.setObjectName("right_widget")
self.download_page = QtWidgets.QWidget()
self.download_page.setObjectName("download_page")
self.download_table = QtWidgets.QTableWidget(self.download_page)
self.download_table.setGeometry(QtCore.QRect(0, 0, 941, 561))
font = QtGui.QFont()
font.setFamily("Agency FB")
font.setPointSize(24)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.download_table.setFont(font)
self.download_table.setObjectName("download_table")
self.download_table.setColumnCount(5)
self.download_table.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.download_table.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.download_table.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.download_table.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.download_table.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.download_table.setHorizontalHeaderItem(4, item)
self.right_widget.addWidget(self.download_page)
self.upload_page = QtWidgets.QWidget()
self.upload_page.setObjectName("upload_page")
self.upload_table = QtWidgets.QTableWidget(self.upload_page)
self.upload_table.setGeometry(QtCore.QRect(0, 0, 941, 561))
font = QtGui.QFont()
font.setFamily("Agency FB")
font.setPointSize(24)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.upload_table.setFont(font)
self.upload_table.setObjectName("upload_table")
self.upload_table.setColumnCount(4)
self.upload_table.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.upload_table.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.upload_table.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.upload_table.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.upload_table.setHorizontalHeaderItem(3, item)
self.right_widget.addWidget(self.upload_page)
self.page_3 = QtWidgets.QWidget()
self.page_3.setObjectName("page_3")
self.finish_table = QtWidgets.QTableWidget(self.page_3)
self.finish_table.setGeometry(QtCore.QRect(0, 0, 941, 561))
font = QtGui.QFont()
font.setFamily("Agency FB")
font.setPointSize(24)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.finish_table.setFont(font)
self.finish_table.setObjectName("finish_table")
self.finish_table.setColumnCount(6)
self.finish_table.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.finish_table.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.finish_table.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.finish_table.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.finish_table.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.finish_table.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.finish_table.setHorizontalHeaderItem(5, item)
self.right_widget.addWidget(self.page_3)
self.func_table.addTab(self.transfer_widget, "")
self.file_share = QtWidgets.QWidget()
self.file_share.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.file_share.setObjectName("file_share")
self.func_table.addTab(self.file_share, "")
self.others = QtWidgets.QWidget()
self.others.setObjectName("others")
self.func_table.addTab(self.others, "")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1107, 23))
self.menubar.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
self.menubar.setFont(font)
self.menubar.setAutoFillBackground(True)
self.menubar.setObjectName("menubar")
self.menufile = QtWidgets.QMenu(self.menubar)
self.menufile.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
self.menufile.setLayoutDirection(QtCore.Qt.RightToLeft)
self.menufile.setAutoFillBackground(True)
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(":/my_pic/logo-splash.jpg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.menufile.setIcon(icon3)
self.menufile.setToolTipsVisible(False)
self.menufile.setObjectName("menufile")
self.menuhelp = QtWidgets.QMenu(self.menubar)
self.menuhelp.setObjectName("menuhelp")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.action123 = QtWidgets.QAction(MainWindow)
self.action123.setObjectName("action123")
self.action123123 = QtWidgets.QAction(MainWindow)
self.action123123.setObjectName("action123123")
self.actionguanyu = QtWidgets.QAction(MainWindow)
self.actionguanyu.setObjectName("actionguanyu")
self.menu_file = QtWidgets.QAction(MainWindow)
self.menu_file.setObjectName("menu_file")
self.action2 = QtWidgets.QAction(MainWindow)
self.action2.setObjectName("action2")
self.action2_2 = QtWidgets.QAction(MainWindow)
self.action2_2.setObjectName("action2_2")
self.action3 = QtWidgets.QAction(MainWindow)
self.action3.setObjectName("action3")
self.actionopen = QtWidgets.QAction(MainWindow)
self.actionopen.setShortcutVisibleInContextMenu(False)
self.actionopen.setObjectName("actionopen")
self.actionsave = QtWidgets.QAction(MainWindow)
self.actionsave.setObjectName("actionsave")
self.actioncontect_us = QtWidgets.QAction(MainWindow)
self.actioncontect_us.setObjectName("actioncontect_us")
self.menufile.addAction(self.actionopen)
self.menufile.addAction(self.actionsave)
self.menufile.addSeparator()
self.menuhelp.addAction(self.actioncontect_us)
self.menubar.addAction(self.menuhelp.menuAction())
self.menubar.addAction(self.menufile.menuAction())
self.retranslateUi(MainWindow)
self.func_table.setCurrentIndex(0)
self.right_widget.setCurrentIndex(1)
self.left_widget.currentRowChanged['int'].connect(self.right_widget.setCurrentIndex)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "dkdrive"))
self.current_dir_edit.setText(_translate("MainWindow", "/"))
self.current_dir_label.setText(_translate("MainWindow", "当前目录:"))
self.search_btn.setText(_translate("MainWindow", "搜索"))
self.return_btn.setText(_translate("MainWindow", "返回"))
self.upload_file.setText(_translate("MainWindow", "文件上传"))
self.upload_dir.setText(_translate("MainWindow", "目录上传"))
self.func_table.setTabText(self.func_table.indexOf(self.my_drive), _translate("MainWindow", "我的网盘"))
__sortingEnabled = self.left_widget.isSortingEnabled()
self.left_widget.setSortingEnabled(False)
item = self.left_widget.item(0)
item.setText(_translate("MainWindow", "正在下载"))
item = self.left_widget.item(1)
item.setText(_translate("MainWindow", "正在上传"))
item = self.left_widget.item(2)
item.setText(_translate("MainWindow", "传输完成"))
self.left_widget.setSortingEnabled(__sortingEnabled)
item = self.download_table.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "file_name"))
item = self.download_table.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "loading"))
item = self.download_table.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "delete"))
item = self.download_table.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "stop"))
item = self.download_table.horizontalHeaderItem(4)
item.setText(_translate("MainWindow", "locate"))
item = self.upload_table.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "file_name"))
item = self.upload_table.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "loading"))
item = self.upload_table.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "delete"))
item = self.upload_table.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "stop"))
item = self.finish_table.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "file_name"))
item = self.finish_table.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "loading"))
item = self.finish_table.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "delete"))
item = self.finish_table.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "stop"))
item = self.finish_table.horizontalHeaderItem(4)
item.setText(_translate("MainWindow", "locate"))
item = self.finish_table.horizontalHeaderItem(5)
item.setText(_translate("MainWindow", "time"))
self.func_table.setTabText(self.func_table.indexOf(self.transfer_widget), _translate("MainWindow", "传输列表"))
self.func_table.setTabText(self.func_table.indexOf(self.file_share), _translate("MainWindow", " 好友分享"))
self.func_table.setTabText(self.func_table.indexOf(self.others), _translate("MainWindow", "others"))
self.menufile.setTitle(_translate("MainWindow", "file"))
self.menuhelp.setTitle(_translate("MainWindow", "help"))
self.action123.setText(_translate("MainWindow", "123"))
self.action123123.setText(_translate("MainWindow", "123123"))
self.actionguanyu.setText(_translate("MainWindow", "guanyu"))
self.menu_file.setText(_translate("MainWindow", "1"))
self.action2.setText(_translate("MainWindow", "你好"))
self.action2_2.setText(_translate("MainWindow", "2"))
self.action3.setText(_translate("MainWindow", "3"))
self.actionopen.setText(_translate("MainWindow", "user_name"))
self.actionsave.setText(_translate("MainWindow", "setting"))
self.actioncontect_us.setText(_translate("MainWindow", "handbook"))
import photo_1_rc
|
#!/usr/bin/python3
# -*- encoding: utf-8 -*-
# PKGBUILDer v2.1.5.12
# An AUR helper (and library) in Python 3.
# Copyright © 2011-2012, Kwpolska.
# See /LICENSE for licensing information.
# Names convention: pkg = a package object, pkgname = a package name.
"""
pkgbuilder.pbds
~~~~~~~~~~~~~~~
PKGBUILDer Data Storage.
:Copyright: © 2011-2012, Kwpolska.
:License: BSD (see /LICENSE).
"""
from . import _, __version__
import sys
import os
import logging
import subprocess
### PBDS PB global data storage ###
class PBDS():
"""PKGBUILDer Data Storage."""
# For fancy-schmancy messages stolen from makepkg.
colors = {
'all_off': '\x1b[1;0m',
'bold': '\x1b[1;1m',
'blue': '\x1b[1;1m\x1b[1;34m',
'green': '\x1b[1;1m\x1b[1;32m',
'red': '\x1b[1;1m\x1b[1;31m',
'yellow': '\x1b[1;1m\x1b[1;33m'
}
pacman = False
validate = True
depcheck = True
pkginst = True
protocol = 'https'
categories = ['ERROR', 'none', 'daemons', 'devel', 'editors',
'emulators', 'games', 'gnome', 'i18n', 'kde',
'lib', 'modules', 'multimedia', 'network',
'office', 'science', 'system', 'x11',
'xfce', 'kernels']
# TRANSLATORS: see makepkg.
inttext = _('Aborted by user! Exiting...')
# TRANSLATORS: see pacman.
wrapperinttext = _('Interrupt signal received\n')
### STUFF NOT TO BE CHANGED BY HUMAN BEINGS. EVER.
mp1 = '=='
mp2 = ' '
debug = False
console = None
if os.getenv('PACMAN') is None:
paccommand = 'pacman'
else:
paccommand = os.getenv('PACMAN')
if os.path.exists('/usr/bin/sudo'):
hassudo = True
else:
hassudo = False
uid = os.geteuid()
# Creating the configuration/log stuff...
confhome = os.getenv('XDG_CONFIG_HOME')
if confhome is None:
confhome = os.path.expanduser('~/.config/')
kwdir = os.path.join(confhome, 'kwpolska')
confdir = os.path.join(kwdir, 'pkgbuilder')
if not os.path.exists(confhome):
os.mkdir(confhome)
if not os.path.exists(kwdir):
os.mkdir(kwdir)
if not os.path.exists(confdir):
os.mkdir(confdir)
if not os.path.exists(confdir):
print(' '.join(_('ERROR:'), _('Cannot create the configuration '
'directory.')))
print(' '.join(_('WARNING:'), _('Logs will not be created.')))
logging.basicConfig(format='%(asctime)-15s [%(levelname)-7s] '
':%(name)-10s: %(message)s',
filename=os.path.join(confdir, 'pkgbuilder.log'),
level=logging.DEBUG)
log = logging.getLogger('pkgbuilder')
log.info('*** PKGBUILDer v' + __version__)
def sudo(self, *rargs):
"""
Run as root. ``sudo`` if present, ``su -c`` otherwise, nothing if
already running as root.
.. note:: Accepts only one command. `shell=False`, for safety.
``*rargs`` is catching all the arguments. However, in order to make
sure that nothing breaks, it checks if the element is a list or a
tuple. If yes, it is appended to the argument list (Python’s ``+``
operator); if not, it is split on spaces (``.split(' ')``) and
appended to the argument list. Finally, the list is passed to
``subprocess.call``.
"""
args = []
for i in rargs:
if type(i) == list or type(i) == tuple:
for j in i:
args.append(j)
else:
for j in i.split(' '):
args.append(j)
if self.uid != 0:
if self.hassudo:
subprocess.call(['sudo'] + args)
else:
subprocess.call('su -c "{}"'.format(' '.join(args)))
else:
subprocess.call(args)
def debugmode(self, nochange=False):
"""Print all the logged messages to stderr."""
if not self.debug:
self.console = logging.StreamHandler()
self.console.setLevel(logging.DEBUG)
self.console.setFormatter(logging.Formatter('[%(levelname)-7s] '
':%(name)-10s: %(message)s'))
logging.getLogger('').addHandler(self.console)
self.debug = True
self.mp1 = 'pb'
self.mp2 = 'pb'
elif self.debug and nochange:
pass
else:
logging.getLogger('').removeHandler(self.console)
self.debug = False
self.mp1 = '=='
self.mp2 = ' '
def colorson(self):
"""Colors on."""
self.colors = {
'all_off': '\x1b[1;0m',
'bold': '\x1b[1;1m',
'blue': '\x1b[1;1m\x1b[1;34m',
'green': '\x1b[1;1m\x1b[1;32m',
'red': '\x1b[1;1m\x1b[1;31m',
'yellow': '\x1b[1;1m\x1b[1;33m'
}
def colorsoff(self):
"""Colors off."""
self.colors = {
'all_off': '',
'bold': '',
'blue': '',
'green': '',
'red': '',
'yellow': ''
}
def fancy_msg(self, text):
"""makepkg's msg(). Use for main messages."""
sys.stderr.write(self.colors['green'] + self.mp1 + '>' +
self.colors['all_off'] +
self.colors['bold'] + ' ' + text +
self.colors['all_off'] + '\n')
self.log.info('(auto fancy_msg ) ' + text)
def fancy_msg2(self, text):
"""makepkg's msg2(). Use for sub-messages."""
sys.stderr.write(self.colors['blue'] + self.mp2 + '->' +
self.colors['all_off'] +
self.colors['bold'] + ' ' + text +
self.colors['all_off'] + '\n')
self.log.info('(auto fancy_msg2 ) ' + text)
def fancy_warning(self, text):
"""makepkg's warning(). Use when you have problems."""
sys.stderr.write(self.colors['yellow'] + self.mp1 + '> ' +
_('WARNING:') + self.colors['all_off'] +
self.colors['bold'] + ' ' + text +
self.colors['all_off'] + '\n')
self.log.warning('(auto fancy_warning ) ' + text)
def fancy_warning2(self, text):
"""Like fancy_warning, but looks like a sub-message (fancy_msg2)."""
sys.stderr.write(self.colors['yellow'] + self.mp2 + '->' +
self.colors['all_off'] + self.colors['bold'] + ' ' +
text + self.colors['all_off'] + '\n')
self.log.warning('(auto fancy_warning2) ' + text)
def fancy_error(self, text):
"""makepkg's error(). Use for errors. Quitting is suggested."""
sys.stderr.write(self.colors['red'] + self.mp1 + '> ' + _('ERROR:') +
self.colors['all_off'] + self.colors['bold'] + ' ' +
text + self.colors['all_off'] + '\n')
self.log.error('(auto fancy_error ) ' + text)
def fancy_error2(self, text):
"""Like fancy_error, but looks like a sub-message (fancy_msg2)."""
sys.stderr.write(self.colors['red'] + self.mp2 + '->' +
self.colors['all_off'] + self.colors['bold'] + ' ' +
text + self.colors['all_off'] + '\n')
self.log.error('(auto fancy_error2 ) ' + text)
|
"""
Student: Karina Jonina - 10543032
Module: B8IT110
Module Name: HDIP PROJECT
Task: Time Series Forecasting of Cryptocurrency
File: This file is for functions to run the
"""
# Downloading necessary files
import numpy as np
import pandas as pd
import yfinance as yf
import plotly.graph_objs as go
import plotly.express as px
from plotly.subplots import make_subplots
import plotly.io as pio
import seaborn as sns
import mplfinance as mpf
import matplotlib.pyplot as plt
import datetime as dt
from matplotlib import pyplot
import datetime
from datetime import datetime
from statsmodels.graphics.tsaplots import plot_acf
from statsmodels.graphics.tsaplots import plot_pacf
from statsmodels.tsa.stattools import pacf
from statsmodels.tsa.stattools import kpss, adfuller
from statsmodels.tsa.arima_model import ARMA
import plotly.graph_objects as go
import statsmodels.api as sm
from pylab import rcParams
import mpld3
from statsmodels.tsa.seasonal import seasonal_decompose
import re
import json
import requests
import codecs
from bs4 import BeautifulSoup
import pandas as pd
from pandas.io.json import json_normalize
# display plotly in browser when run in Spyder
pio.renderers.default = 'browser'
## =============================================================================
## Reading in files for ease of use
## =============================================================================
## read the CSV file
#df_cryptolist = pd.read_csv('df_cryptolist.csv')
# =============================================================================
# Getting Yahoo Table
# =============================================================================
# getting the live page
def get_yahoo_table():
global df_cryptolist
headers = {'user-agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36'}
url = 'https://finance.yahoo.com/cryptocurrencies/'
# url = 'https://coinmarketcap.com/'
response = requests.get(url , headers = headers)
content = response.content
soup = BeautifulSoup(content, features="html.parser")
pattern = re.compile(r'\s--\sData\s--\s')
script_data = soup.find('script', text = pattern).contents[0]
start = script_data.find("context")-2
json_data = json.loads(script_data[start:-12])
pattern = re.compile(r'\s--\sData\s--\s')
script_data = soup.find('script', text = pattern).contents[0]
start = script_data.find("context")-2
json_data = json.loads(script_data[start:-12])
# examining the columns
# columns_json = json_data['context']['dispatcher']['stores']['ScreenerResultsStore']['results']['columns']
# Checking that it works
# print('=====================================')
# print('Columns\' Names')
# print('=====================================')
# print(columns_json)
# this is where the data is
crypto_json = json_data['context']['dispatcher']['stores']['ScreenerResultsStore']['results']['rows']
# # Checking that it works
# print('=====================================')
# print('Printing First JSON - Bitcoin')
# print('=====================================')
# print(crypto_json[0])
df_cryptolist = pd.io.json.json_normalize(crypto_json)
# creating a dataset with the right columns and correct column names
df_cryptolist = pd.DataFrame({'Symbol': df_cryptolist['symbol'],
'Name': df_cryptolist['shortName'],
'Price (Intraday)': df_cryptolist['regularMarketPrice.fmt'],
'Change': df_cryptolist['regularMarketChange.fmt'],
'% Change': df_cryptolist['regularMarketChangePercent.fmt'],
'Market Cap': df_cryptolist['marketCap.fmt'],
'Volume in Currency (Since 0:00 UTC)': df_cryptolist['regularMarketVolume.fmt'],
'Volume in Currency (24Hr)': df_cryptolist['volume24Hr.fmt'],
'Total Volume All Currencies (24Hr)': df_cryptolist['volumeAllCurrencies.fmt'],
'Circulating Supply': df_cryptolist['circulatingSupply.fmt']})
# # writing the dataset to csv
# df_cryptolist.to_csv(r"df_cryptolist.csv", index = False)
# =============================================================================
# getting a list from the table
# =============================================================================
cryptolist = []
def get_crypto_df():
index = 0
while index < len(df_cryptolist.iloc[:,0]):
try:
for crypto in df_cryptolist.iloc[:,0]:
cryptolist.append(str(crypto))
index += 1
except:
index = len(df_cryptolist.iloc[:,0])
break
return cryptolist
# ============================================================================
# Trying to create an error message
# ============================================================================
def please_choose_crypto():
global crypto_name
global insert
while True:
print('============================================================')
print('Top', len(df_cryptolist), 'Cryptocurrencies')
print('============================================================')
print(df_cryptolist[['Symbol','Name','Market Cap']].head(len(df_cryptolist)))
try:
insert = str(input('What cryptocurrency would you like to try out? Please select a symbol: ')).upper()
#found = df_cryptolist[df_cryptolist['Symbol'].str.contains(insert)]
crypto_name = str(df_cryptolist[df_cryptolist['Symbol'].str.contains(insert)].iloc[:,1]).split(' ')[4]
except ValueError:
print("Sorry, I didn't understand that.")
continue
if not insert in cryptolist:
print('Sorry. You did not select an available symbol or you misspelled the symbol')
else:
print('============================================================')
print('You have selected: ', insert)
df_new = df_cryptolist.copy()
df_new.set_index("Symbol", inplace=True)
df_new.head()
print('============================================================')
print(df_new.loc[insert])
print('============================================================')
break
# =============================================================================
# Collecting info from Yahoo Finance and creating a dataset for that cryptocurrency
# =============================================================================
def create_df(x):
# =============================================================================
# Creating a new dataset
# =============================================================================
global df
start = "2009-01-01"
end = dt.datetime.now()
short_sma = 50
long_sma = 200
# creating a dataset for selected cryptocurrency
df = yf.download(x, start, end,interval = '1d')
df = pd.DataFrame(df.dropna(), columns = ['Open', 'High','Low','Close', 'Adj Close', 'Volume'])
# Create short SMA
df['short_SMA'] = df.iloc[:,1].rolling(window = short_sma).mean()
# Create Long SMA
df['long_SMA'] = df.iloc[:,1].rolling(window = long_sma).mean()
# Create daily_return
df['daily_return'] = df['Close'].pct_change(periods=1).mul(100)
# Create monthly_return
df['monthly_return'] = df['Close'].pct_change(periods=30).mul(100)
# Create annual_return
df['annual_return'] = df['Close'].pct_change(periods=365).mul(100)
df['Name'] = crypto_name
# print('============================================================')
# print(crypto_name, '- Full Dataset')
# print('------------------------------------------------------------')
# print(df.head())
# print('------------------------------------------------------------')
# print(crypto_name, 'Full Dataset - Column Names')
# print(df.columns)
print('============================================================')
# preparing data from time series analysis
# eliminating any NAs - in most cryptocurrencies there are 4 days missing
df.index = pd.to_datetime(df.index)
df = df.asfreq('D')
print(crypto_name)
print('Nan in each columns' , df.isna().sum())
df = df.bfill()
print('Nan in each columns' , df.isna().sum())
df = df.dropna()
# # write to csv
# df.to_csv(r"df.csv", index = True)
# =============================================================================
# Assigning the target variable
# =============================================================================
def create_y(x):
global y
y = pd.DataFrame(df['Close'], columns = ['Close'])
y.sort_index(inplace = True)
y['Name'] = crypto_name
# examining the pct_change
y['Close Percentage Change'] = y['Close'].pct_change(1)
# Creating a new variable, examining the difference for each observation
y['diff'] = y['Close'].diff()
# logging the target varialbe due to great variance
y['log_Close'] = np.log(y['Close'])
# Creating a new variable, examining the difference for each observation
y['log_Close_diff'] = y['log_Close'].diff()
y['Logged Close Percentage Change'] = y['log_Close'].pct_change(1)
# dropping the first na (because there is no difference)
y = y.dropna()
# # write to csv
# y.to_csv(r"y.csv", index = True)
print('============================================================')
print(crypto_name, '- Target Variable')
print('------------------------------------------------------------')
print(y.head())
print('------------------------------------------------------------')
print('Column Names')
print(y.columns)
print('============================================================')
# =============================================================================
# Creating a graph examining the price and moving averages
# =============================================================================
def price_sma_volume_chart():
fig = make_subplots(rows=2, cols=1, shared_xaxes=True, subplot_titles=[
'Price and Death Cross of {}'.format(str(crypto_name)),
'Volume of {}'.format(str(crypto_name))])
# Lineplots of price and moving averages
fig.add_trace(go.Scatter(
x = df.index,
y = df['Close'],
name = crypto_name,
mode='lines',
customdata = df['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Closing Price: %{y:$,.2f}<br>" ,
line = dict(color="black")), row = 1, col = 1)
fig.add_trace(go.Scatter(x = df.index,
y = df['short_SMA'],
name = 'Short SMA 50-Day',
mode = 'lines',
customdata = df['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Short (50-Day) Moving Average Price: %{y:$,.2f}<br>",
line = dict(color="red")), row = 1, col = 1)
fig.add_trace(go.Scatter(x = df.index,
y = df['long_SMA'],
name = 'Long SMA 200-Day',
mode = 'lines',
customdata = df['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Long (200-Day) Moving Average Price: %{y:$,.2f}<br>",
line = dict(color="green")), row = 1, col = 1)
# Barplot of volume
fig.add_trace(go.Bar(x = df.index,
y = df['Volume'],
name = 'Volume',
customdata = df['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Volume: %{y:,.}<br>" +
"<extra></extra>",
marker = dict(color="black", opacity = True)), row = 2, col = 1)
# Add titles
fig.update_layout(
title = 'Summary of {}'.format(str(crypto_name)),
title_font_size=30)
fig['layout']['yaxis1']['title']='US Dollars'
fig['layout']['yaxis2']['title']='Volume'
# X-Axes
fig.update_xaxes(
rangeslider_visible = True,
rangeselector = dict(
buttons = list([
dict(count = 7, step = "day", stepmode = "backward", label = "1W"),
dict(count = 1, step = "month", stepmode = "backward", label = "1M"),
dict(count = 3, step = "month", stepmode = "backward", label = "3M"),
dict(count = 6, step = "month", stepmode = "backward", label = "6M"),
dict(count = 1, step = "year", stepmode = "backward", label = "1Y"),
dict(count = 2, step = "year", stepmode = "backward", label = "2Y"),
dict(count = 5, step = "year", stepmode = "backward", label = "5Y"),
dict(count = 1, step = "all", stepmode = "backward", label = "MAX"),
dict(count = 1, step = "year", stepmode = "todate", label = "YTD")])))
fig.update_layout(xaxis_rangeslider_visible = False)
fig.update_yaxes(tickprefix = '$', tickformat = ',.', row = 1, col = 1)
#time buttons
fig.update_xaxes(rangeselector= {'visible' :False}, row = 2, col = 1)
#Show
fig.show()
def candlestick_moving_average():
fig = go.Figure()
trace1 = go.Candlestick(
x = df.index,
open = df["Open"],
high = df["High"],
low = df["Low"],
close = df["Close"],
name = crypto_name)
data = [trace1]
for i in range(5, 201, 5):
sma = go.Scatter(
x = df.index,
y = df["Close"].rolling(i).mean(), # Pandas SMA
name = "SMA" + str(i),
line = dict(color = "#3E86AB",width=3),
customdata = df['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Simple Moving Average Price: %{y:$,.2f}<br>",
opacity = 0.7,
visible = False,
)
data.append(sma)
sliders = dict(
# GENERAL
steps = [],
currentvalue = dict(
font = dict(size = 16),
prefix = "Simple Moving Average Step: ",
xanchor = "left",
),
x = 0,
y = 0,
len = 1,
pad = dict(t = 0, b = 0),
yanchor = "bottom",
xanchor = "left",
)
for i in range((200 // 5) + 1):
step = dict(
method = "restyle",
label = str(i * 5),
value = str(i * 5),
args = ["visible", [False] * ((200 // 5) + 1)],
)
step['args'][1][0] = True
step['args'][1][i] = True
sliders["steps"].append(step)
layout = dict(
title = 'Price of {}'.format(str(crypto_name)),
# ANIMATIONS
sliders = [sliders],
xaxis = dict(
rangeselector = dict(
activecolor = "#888888",
bgcolor = "#DDDDDD",
buttons = [
dict(count = 7, step = "day", stepmode = "backward", label = "1W"),
dict(count = 1, step = "month", stepmode = "backward", label = "1M"),
dict(count = 3, step = "month", stepmode = "backward", label = "3M"),
dict(count = 6, step = "month", stepmode = "backward", label = "6M"),
dict(count = 1, step = "year", stepmode = "backward", label = "1Y"),
dict(count = 2, step = "year", stepmode = "backward", label = "2Y"),
dict(count = 5, step = "year", stepmode = "backward", label = "5Y"),
dict(count = 1, step = "all", stepmode = "backward", label = "MAX"),
dict(count = 1, step = "year", stepmode = "todate", label = "YTD"),
]
),
),
yaxis = dict(
tickprefix = "$", tickformat = ',.',
type = "linear",
domain = [0.25, 1],
),
)
fig = go.Figure(data = data, layout = layout)
#
fig.update_layout(xaxis_rangeslider_visible = False)
fig.update_layout(showlegend=False)
#Show
fig.show()
# =============================================================================
# Analysing the Histogram and Boxplot for crypto
# =============================================================================
def create_hist_and_box(data):
fig = make_subplots(rows=2, cols=1, shared_xaxes=True,
subplot_titles=['Histogram of {} price'.format(crypto_name),
'Box plot of {} price'.format(crypto_name)],
x_title = 'US Dollars')
# 1.Histogram
fig.add_trace(go.Histogram(x = data, name = 'Histogram', nbinsx = round(len(data) / 20),
# customdata = df['Name'],
# hovertemplate="<b>%{customdata}</b>"
), row=1, col=1)
#2. Boxplot
fig.add_trace(go.Box(x = data, name = 'Boxplot',
customdata = df['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Closing Price: %{x:$,.2f}<br>"+
"<extra></extra>"), row=2, col=1)
fig.update_layout(title = 'Plots of {} price'.format(crypto_name))
fig.update_xaxes(tickprefix = '$', tickformat = ',.')
fig.show()
# creating graph for Close Percentage Change
def create_hist_and_box_pct_change():
fig = make_subplots(rows=2, cols=1,
subplot_titles=['Histogram of {} 1-Day Close Percentage Change'.format(crypto_name),
'Box plot of {} 1-Day Close Percentage Change'.format(crypto_name)],
x_title = '1-Day Close Percentage Change')
# 1.Histogram
fig.add_trace(go.Histogram(x = y['Close Percentage Change'], name = 'Histogram', nbinsx = round(len(y) / 20),
), row=1, col=1)
#2. Boxplot
fig.add_trace(go.Box(x = y['Close Percentage Change'], name = 'Boxplot',
customdata = y['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"1-Day Percentage Change: %{x:.0%}<br>"+
"<extra></extra>"
), row=2, col=1)
fig.update_layout(title = 'Plots of 1-Day Close Percentage Change for {}'.format(crypto_name))
fig['layout']['yaxis1']['title'] = '# of Observations'
fig.update_xaxes(tickformat = '.0%', row = 1, col = 1)
fig.update_xaxes(tickformat = '.0%', row = 2, col = 1)
fig.update_layout(showlegend=False)
fig.show()
def logged_create_hist_and_box_pct_change():
fig = make_subplots(rows=2, cols=1,
subplot_titles=['Logged Closing Price - Histogram of {} 1-Day Close Percentage Change'.format(crypto_name),
'Logged Closing Price - Box plot of {} 1-Day Close Percentage Change'.format(crypto_name)],
x_title = 'Loogged Price - 1-Day Close Percentage Change')
# 1.Histogram
fig.add_trace(go.Histogram(x = y['Logged Close Percentage Change'], name = 'Histogram', nbinsx = round(len(df) / 20),
), row=1, col=1)
#2. Boxplot
fig.add_trace(go.Box(x = y['Logged Close Percentage Change'], name = 'Boxplot',
customdata = df['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"1-Day Percentage Change: %{x:.0%}<br>"+
"<extra></extra>"), row=2, col=1)
fig.update_layout(title = 'Loogged Closing Price - Plots of 1-Day Close Percentage Change for {}'.format(crypto_name))
fig['layout']['yaxis1']['title'] = '# of Observations'
fig.update_xaxes(tickformat = '.0%', row = 1, col = 1)
fig.update_xaxes(tickformat = '.0%', row = 2, col = 1)
fig.show()
# =============================================================================
# Creating a plot with analysis and rolling mean and standard deviation
# =============================================================================
def test_stationarity(timeseries):
#Determing rolling statistics
rolmean = timeseries.rolling(window = 365).mean()
rolstd = timeseries.rolling(window = 365).std()
#Plot rolling statistics:
fig = go.Figure()
fig.add_trace(go.Scatter(x = timeseries.index,
y = timeseries,
name = 'Original',
mode='lines',
customdata = timeseries['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Closing Price: %{y:$,.2f}<br>" +
"<extra></extra>",
line = dict(color="blue")))
fig.add_trace(go.Scatter(x = timeseries.index,
y = rolmean,
name = 'Rolling Mean',
mode='lines',
customdata = timeseries['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Rolling Mean Price: %{y:$,.2f}<br>" +
"<extra></extra>",
line = dict(color="red")))
fig.add_trace(go.Scatter(x = y.index,
y = rolstd,
name = 'Rolling Std',
mode='lines',
customdata = y['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Rolling Std: %{y:$,.2f}<br>" +
"<extra></extra>",
line = dict(color="black")))
# Add titles
fig.update_layout(
title = 'Rolling Mean & Standard Deviation of {}'.format(crypto_name),
yaxis_title = 'US Dollars',
yaxis_tickprefix = '$', yaxis_tickformat = ',.')
#Show
fig.show()
# =============================================================================
# Exploring the difference
# =============================================================================
# creating the plot to examine the difference
def diff_plot(data):
fig = go.Figure()
fig.add_trace(go.Scatter(x = data.index,
y = data,
name = str(crypto_name),
mode='lines',
customdata = data['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Price Volatility: %{y:$,.2f}<br>"+
"<extra></extra>"))
# Add titles
fig.update_layout(
title = 'Price of {}'.format(crypto_name),
yaxis_title = 'US Dollars',
yaxis_tickprefix = '$', yaxis_tickformat = ',.')
# X-Axes
fig.update_xaxes(
rangeslider_visible = True,
rangeselector = dict(
buttons = list([
dict(count = 7, step = "day", stepmode = "backward", label = "1W"),
dict(count = 1, step = "month", stepmode = "backward", label = "1M"),
dict(count = 3, step = "month", stepmode = "backward", label = "3M"),
dict(count = 6, step = "month", stepmode = "backward", label = "6M"),
dict(count = 1, step = "year", stepmode = "backward", label = "1Y"),
dict(count = 2, step = "year", stepmode = "backward", label = "2Y"),
dict(count = 5, step = "year", stepmode = "backward", label = "5Y"),
dict(count = 1, step = "all", stepmode = "backward", label = "MAX"),
dict(count = 1, step = "year", stepmode = "todate", label = "YTD")])))
#Show
fig.show()
# =============================================================================
# Diff and volume plot
# =============================================================================
def create_diff_volume(data):
fig = make_subplots(rows=2, cols=1, shared_xaxes=False,
subplot_titles=['Differnce of {} price'.format(crypto_name),
'Volume of {}'.format(crypto_name)])
# 1.Difference
fig.add_trace(go.Scatter(x = data.index,
y = data,
name = str(crypto_name),
mode='lines',
customdata = data['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Price Volatility: %{y:$,.2f}<br>"+
"<extra></extra>"), row = 1, col =1)
#2. Volume
# Barplot of volume
fig.add_trace(go.Bar(x = df.index,
y = df['Volume'],
name = 'Volume',
# corrects hovertemplate labels!
customdata = df['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Volume: %{y:,.}<br>" +
"<extra></extra>",
marker = dict(color="black", opacity = True)), row = 2, col = 1)
# Add titles
fig.update_layout(
title = 'Price of {}'.format(str(crypto_name)))
fig['layout']['yaxis1']['title']='US Dollars'
fig['layout']['yaxis2']['title']='Volume'
# X-Axes
fig.update_xaxes(
rangeslider_visible = True,
rangeselector = dict(
buttons = list([
dict(count = 7, step = "day", stepmode = "backward", label = "1W"),
dict(count = 1, step = "month", stepmode = "backward", label = "1M"),
dict(count = 3, step = "month", stepmode = "backward", label = "3M"),
dict(count = 6, step = "month", stepmode = "backward", label = "6M"),
dict(count = 1, step = "year", stepmode = "backward", label = "1Y"),
dict(count = 2, step = "year", stepmode = "backward", label = "2Y"),
dict(count = 5, step = "year", stepmode = "backward", label = "5Y"),
dict(count = 1, step = "all", stepmode = "backward", label = "MAX"),
dict(count = 1, step = "year", stepmode = "todate", label = "YTD")])))
fig.update_layout(xaxis_rangeslider_visible = False)
fig.update_yaxes(tickprefix = '$', tickformat = ',.', row = 1, col = 1)
#time buttons
fig.update_xaxes(rangeselector= {'visible' :False}, row = 2, col = 1)
#Show
fig.show()
# =============================================================================
#
# =============================================================================
def create_diff_log_diff():
fig = make_subplots(rows=2, cols=1, shared_xaxes=False,
subplot_titles=['Difference of Closing {} Price'.format(crypto_name),
'Logged Closing {} Price Difference'.format(crypto_name)])
# 1.Difference
fig.add_trace(go.Scatter(x = y.index,
y = y['diff'],
name = str(crypto_name),
mode='lines',
customdata = df['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Price Volatility: %{y:$,.2f}<br>"+
"<extra></extra>"), row = 1, col =1)
# 1.Difference of log
fig.add_trace(go.Scatter(x = y.index,
y = y['log_Close_diff'],
name = str(crypto_name),
mode='lines',
customdata = df['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Logged Price Difference: %{y:,.2f}<br>"+
"<extra></extra>"), row = 2, col =1)
# Add titles
fig.update_layout(
title = 'Price of {}'.format(str(crypto_name)))
fig['layout']['yaxis1']['title']='US Dollars'
fig['layout']['yaxis2']['title']=' '
# X-Axes
fig.update_xaxes(
rangeslider_visible = True,
rangeselector = dict(
buttons = list([
dict(count = 7, step = "day", stepmode = "backward", label = "1W"),
dict(count = 1, step = "month", stepmode = "backward", label = "1M"),
dict(count = 3, step = "month", stepmode = "backward", label = "3M"),
dict(count = 6, step = "month", stepmode = "backward", label = "6M"),
dict(count = 1, step = "year", stepmode = "backward", label = "1Y"),
dict(count = 2, step = "year", stepmode = "backward", label = "2Y"),
dict(count = 5, step = "year", stepmode = "backward", label = "5Y"),
dict(count = 1, step = "all", stepmode = "backward", label = "MAX"),
dict(count = 1, step = "year", stepmode = "todate", label = "YTD")])))
fig.update_layout(xaxis_rangeslider_visible = False)
fig.update_yaxes(tickprefix = '$', tickformat = ',.', row = 1, col = 1)
fig.update_xaxes(rangeselector= {'visible':False}, row=2, col=1)
fig.update_xaxes(rangeslider= {'visible':False}, row=2, col=1)
fig.update_layout(showlegend=False)
fig.show()
# =============================================================================
# daily, monthly, annual returns
# =============================================================================
def returns():
fig = make_subplots(rows=4, cols=1, shared_xaxes=False, subplot_titles=[
'Closing Price of {}'.format(str(crypto_name)),
'Daily Return of {}'.format(str(crypto_name)),
'Monthly Return of {}'.format(str(crypto_name)),
'Annual Return of {}'.format(str(crypto_name))])
fig.add_trace(go.Scatter(
x = df.index,
y = df['Close'],
mode='lines',
customdata = df['Name'], name = 'Closing Price',
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Closing Price: %{y:$,.2f}<br>"+
"<extra></extra>"), row = 1, col = 1)
fig.add_trace(go.Scatter(
x = df.index,
y = df['daily_return'],
mode='lines',
customdata = df['Name'], name = 'Daily Return',
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Daily Return: %{y:,.0%}<br>"+
"<extra></extra>"), row = 2, col = 1)
fig.add_trace(go.Scatter(
x = df.index,
y = df['monthly_return'],
mode='lines',
customdata = df['Name'], name = 'Monthly Return',
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Monthly Return: %{y:,.0%}<br>"+
"<extra></extra>"), row = 3, col = 1)
fig.add_trace(go.Scatter(
x = df.index,
y = df['annual_return'],
mode='lines',
customdata = df['Name'], name = 'Annual Return',
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Annual Return: %{y:,.0%}<br>"+
"<extra></extra>"), row = 4, col = 1)
# Add titles
fig.update_layout(
title = 'Price of {}'.format(str(crypto_name)))
fig['layout']['yaxis1']['title']='US Dollars'
fig['layout']['yaxis2']['title']='% Return'
fig['layout']['yaxis3']['title']='% Return'
fig['layout']['yaxis4']['title']='% Return'
# X-Axes
fig.update_xaxes(
rangeslider_visible = True,
rangeselector = dict(
buttons = list([
dict(count = 7, step = "day", stepmode = "backward", label = "1W"),
dict(count = 1, step = "month", stepmode = "backward", label = "1M"),
dict(count = 3, step = "month", stepmode = "backward", label = "3M"),
dict(count = 6, step = "month", stepmode = "backward", label = "6M"),
dict(count = 1, step = "year", stepmode = "backward", label = "1Y"),
dict(count = 2, step = "year", stepmode = "backward", label = "2Y"),
dict(count = 5, step = "year", stepmode = "backward", label = "5Y"),
dict(count = 1, step = "all", stepmode = "backward", label = "MAX"),
dict(count = 1, step = "year", stepmode = "todate", label = "YTD")])))
fig.update_layout(xaxis_rangeslider_visible=False)
fig.update_xaxes(rangeslider= {'visible':False}, row=2, col=1)
fig.update_xaxes(rangeslider= {'visible':False}, row=3, col=1)
fig.update_xaxes(rangeslider= {'visible':False}, row=4, col=1)
fig.update_xaxes(rangeselector= {'visible':False}, row=2, col=1)
fig.update_xaxes(rangeselector= {'visible':False}, row=3, col=1)
fig.update_xaxes(rangeselector= {'visible':False}, row=4, col=1)
fig.update_yaxes(tickprefix = '$', tickformat = ',.', row = 1, col = 1)
fig.update_yaxes(tickformat = ',.0%', row = 2, col = 1)
fig.update_yaxes(tickformat = ',.0%', row = 3, col = 1)
fig.update_yaxes(tickformat = ',.0%', row = 4, col = 1)
fig.update_layout(showlegend=False)
#Show
fig.show()
# =============================================================================
# Splitting the data in Training and Test Data
# =============================================================================
def create_train_and_test():
global df_train
global df_test
# Train data - 80%
df_train = y[:int(0.80*(len(y)))]
print('============================================================')
print('{} Training Set'.format(crypto_name))
print('============================================================')
print(df_train.head())
print('Training set has {} rows and {} columns.'.format(*df_train.shape))
# Test data - 20%
df_test = y[int(0.80*(len(y))):]
print('============================================================')
print('{} Test Set'.format(crypto_name))
print('============================================================')
print(df_test.head())
print('Test set has {} rows and {} columns.'.format(*df_test.shape))
def training_and_test_plot():
# creating a plotly graph for training and test set
trace1 = go.Scatter(
x = df_train.index,
y = df_train['Close'],
customdata = df_train['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Closing Price: %{y:$,.2f}<br>"+
"<extra></extra>",
name = 'Training Set')
trace2 = go.Scatter(
x = df_test.index,
y = df_test['Close'],
name = 'Test Set',
customdata = df_test['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Closing Price: %{y:$,.2f}<br>"+
"<extra></extra>",
yaxis="y1")
data = [trace1, trace2]
fig = go.Figure(data = data)
fig.update_layout({'title': {'text':'Training and Test Set Plot'}},
yaxis_tickprefix = '$', yaxis_tickformat = ',.')
fig.show()
# =============================================================================
# creating important functions for Time Series Analysis
# =============================================================================
def normalise():
# Select first prices
first_price = df['Close'].iloc[0]
# Create normalized
normalized = df['Close'].div(first_price)
# Plot normalized
normalized.plot()
plt.show()
# Dickey Fuller Test
def adfuller_test(data):
dftest = adfuller(data)
dfoutput = pd.Series(dftest[0:4], index=['Test Statistic','p-value','#Lags Used','Number of Observations Used'])
for key,value in dftest[4].items():
dfoutput['Critical Value (%s)'%key] = value
print('============================================================')
print('Results of Dickey-Fuller Test for {}:'.format(crypto_name))
print('============================================================')
print (dfoutput)
if dftest[1]>0.05:
print('Conclude not stationary')
else:
print('Conclude stationary')
def adfuller_test_for_Django(data, crypto_name):
dftest = adfuller(data)
dfoutput = pd.Series(dftest[0:4], index=['Test Statistic','p-value','#Lags Used','Number of Observations Used'])
for key,value in dftest[4].items():
dfoutput['Critical Value (%s)'%key] = value
dfoutput = pd.DataFrame(dfoutput)
dfoutput = dfoutput.reset_index()
dfoutput = dfoutput.rename(columns={'index': crypto_name, '0': 0})
dfoutput1 = pd.DataFrame([['Stationary', np.where(dftest[1]>0.05, 'Conclude not stationary', 'Conclude stationary')]], columns=[crypto_name, 0])
dfoutput = pd.concat([dfoutput,dfoutput1], sort=False).reset_index(drop=True)
print(dfoutput)
# KPSS Test
def KPSS_test(data):
result = kpss(data.values, regression='c', lags='auto')
print('============================================================')
print('Results of KPSS Test for {}:'.format(crypto_name))
print('============================================================')
print('\nKPSS Statistic: %f' % result[0])
print('p-value: %f' % result[1])
for key, value in result[3].items():
print('Critial Values:')
print(f' {key}, {value}')
# seasonal decomposition
def simple_seasonal_decompose(data,number):
rcParams['figure.figsize'] = 10, 8
decomposition = seasonal_decompose(data, model='additive', period=number)
decomposition.plot()
plt.show()
def acf_and_pacf_plots(data):
sns.set_style('darkgrid')
# fig, (ax1, ax2,ax3) = plt.subplots(3,1, figsize = (8,15)) # graphs in a column
fig, (ax1, ax2,ax3) = plt.subplots(1,3, figsize = (20,5)) # graphs in a row
fig.suptitle('ACF and PACF plots of Logged Closing Price Difference for {}'.format(crypto_name), fontsize=16)
ax1.plot(data)
ax1.set_title('Original')
plot_acf(data, lags=40, ax=ax2);
plot_pacf(data, lags=40, ax=ax3);
def rolling_mean_std(timeseries, freq):
#Determing rolling statistics
rolmean = timeseries.rolling(window=freq).mean()
rolstd = timeseries.rolling(window=freq).std()
#Plot rolling statistics:
orig = plt.plot(timeseries, color='blue',label='Original')
mean = plt.plot(rolmean, color='red', label='Rolling Mean')
std = plt.plot(rolstd, color='black', label = 'Rolling Std')
plt.legend(loc='best')
plt.title('Rolling Mean & Standard Deviation')
plt.show()
# =============================================================================
# Monthly Data - 2511 observations to 82 - Not good
# =============================================================================
## RESAMPLING DATA INTO MONTHL1Y
#monthly_y = y.copy()
#monthly_y.resample('M').mean().head()
#monthly_y = monthly_y.asfreq('M')
##monthly_y.resample('M').median().head()
#
#
## DIFF - STATIONARY
#simple_seasonal_decompose(monthly_y['diff'], 12)
#acf_and_pacf_plots(monthly_y['diff'])
#KPSS_test(monthly_y['diff'])
#adfuller_test(monthly_y['diff'])
#rolling_mean_std(monthly_y['diff'], 365)
#
#
## LOGGED CLOSE DIFF - STATIONARY
#simple_seasonal_decompose(monthly_y['log_Close_diff'], 12)
#acf_and_pacf_plots(monthly_y['log_Close_diff'])
#KPSS_test(monthly_y['log_Close_diff'])
#adfuller_test(monthly_y['log_Close_diff'])
#rolling_mean_std(monthly_y['log_Close_diff'], 365)
## =============================================================================
## Boxplots of Returns with PLOTLY
## =============================================================================
#def box_year():
# fig = go.Figure()
#
#
# fig.add_trace(go.Box(x = df.index.year, y = df['daily_return'],
# customdata = df['Name'],
# hovertemplate="<b>%{customdata}</b><br><br>" +
# "Date: %{x|%d %b %Y} <br>" +
# "Daily Return: %{y:.0%}<br>"+
# "<extra></extra>"))
#
# fig.update_layout(
# title = 'Daily Returns of {}'.format(crypto_name),
# yaxis_title = '% Change',
# yaxis_tickformat = ',.0%')
# fig.show()
#
#box_year()
# =============================================================================
# Decomposition with PLOTLY PACKAGE!
# =============================================================================
def decomposition(data, period):
decomposition = sm.tsa.seasonal_decompose(data, period=period)
#seasonality
decomp_seasonal = decomposition.seasonal
#trend
decomp_trend = decomposition.trend
#residual
decomp_resid = decomposition.resid
fig = make_subplots(rows=4, cols=1, shared_xaxes=True, subplot_titles=[
'Price of {}'.format(str(crypto_name)),
'Trend values of {}'.format(str(crypto_name)),
'Seasonal values of {}'.format(str(crypto_name)),
'Residual values of {}'.format(str(crypto_name))])
fig.add_trace(go.Scatter(x = df.index,
y = data,
name = crypto_name,
mode='lines'),row = 1, col = 1)
fig.add_trace(go.Scatter(x = df.index,
y = decomp_trend,
name = 'Trend',
mode='lines'),row = 2, col = 1)
fig.add_trace(go.Scatter(x = df.index,
y = decomp_seasonal,
name = 'Seasonality',
mode='lines'),row = 3, col = 1)
fig.add_trace(go.Scatter(x = df.index,
y = decomp_resid,
name = 'Residual',
mode='lines'),row = 4, col = 1)
# Add titles
fig.update_layout(
title = 'Decomposition of {} for {} days'.format(str(crypto_name),period))
fig['layout']['yaxis1']['title']='US Dollars'
fig['layout']['yaxis2']['title']='Trend'
fig['layout']['yaxis3']['title']='Seasonality'
fig['layout']['yaxis4']['title']='Residual'
fig.update_layout(showlegend=False)
fig.show()
# =============================================================================
# Predict Closing Price using FBProphet
# =============================================================================
def predict_prophet():
global df_forecast
global crypto
global df_prophet
crypto = df_train[['Close', 'Name']]
crypto = crypto.reset_index()
crypto = crypto.rename(columns={'Date': 'ds', 'Close': 'y'})
df_prophet = Prophet(changepoint_prior_scale=0.15,yearly_seasonality=True,daily_seasonality=True)
df_prophet.fit(crypto)
df_forecast = df_prophet.make_future_dataframe(periods= len(df_test), freq='D')
df_forecast = df_prophet.predict(df_forecast)
df_forecast['Name'] = crypto['Name']
def predict_prophet_components():
df_prophet.plot_components(df_forecast)
def predict_prophet_plotly():
crypto = df_train[['Close', 'Name']]
crypto = crypto.reset_index()
crypto = crypto.rename(columns={'Date': 'ds', 'Close': 'y'})
df_prophet = Prophet(changepoint_prior_scale=0.15,yearly_seasonality=True,daily_seasonality=True)
df_prophet.fit(crypto)
df_forecast = df_prophet.make_future_dataframe(periods= len(df_test), freq='D')
df_forecast = df_prophet.predict(df_forecast)
df_forecast['Name'] = df_test['Name']
df_forecast['Name'] = df_forecast['Name'].replace(np.nan, crypto_name)
return df_forecast
def prophet_prediction_plot(request, df_forecast, df_train, df_test, crypto_name):
df_train = go.Scatter(
x = df_train.index,
y = df_train['Close'],
customdata = df_train['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Closing Price: %{y:$,.2f}<br>",
name = 'Training Set')
df_test = go.Scatter(
x = df_test.index,
y = df_test['Close'],
name = 'Test Set',
customdata = df_test['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Closing Price: %{y:$,.2f}<br>",
yaxis="y1")
trend = go.Scatter(
name = 'Trend',
mode = 'lines',
x = list(df_forecast['ds']),
y = list(df_forecast['yhat']),
customdata = df_forecast['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Trend: %{y:$,.2f}<br>",
marker=dict(color='red', line=dict(width=3))
)
upper_band = go.Scatter(
name = 'Upper Band',
mode = 'lines',
x = list(df_forecast['ds']),
y = list(df_forecast['yhat_upper']),
customdata = df_forecast['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Upper Band: %{y:$,.2f}<br>",
line= dict(color='#57b88f'),
fill = 'tonexty'
)
lower_band = go.Scatter(
name= 'Lower Band',
mode = 'lines',
x = list(df_forecast['ds']),
y = list(df_forecast['yhat_lower']),
customdata = df_forecast['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Lower Band: %{y:$,.2f}<br>",
line= dict(color='#57b88f')
)
data = [df_train, df_test, trend, lower_band, upper_band]
layout = dict(title='Predicting Closing Price of {} Using FbProphet'.format(crypto_name),
xaxis=dict(title = 'Dates', ticklen=2, zeroline=True))
fig = go.Figure(data = data, layout=layout)
# fig['layout']['yaxis1']['title']='US Dollars'
# X-Axes
fig.update_xaxes(
rangeslider_visible = True,
rangeselector = dict(
buttons = list([
dict(count = 7, step = "day", stepmode = "backward", label = "1W"),
dict(count = 1, step = "month", stepmode = "backward", label = "1M"),
dict(count = 3, step = "month", stepmode = "backward", label = "3M"),
dict(count = 6, step = "month", stepmode = "backward", label = "6M"),
dict(count = 1, step = "year", stepmode = "backward", label = "1Y"),
dict(count = 2, step = "year", stepmode = "backward", label = "2Y"),
dict(count = 5, step = "year", stepmode = "backward", label = "5Y"),
dict(count = 1, step = "all", stepmode = "backward", label = "MAX"),
dict(count = 1, step = "year", stepmode = "todate", label = "YTD")])))
fig.update_layout(xaxis_rangeslider_visible = False)
fig.update_yaxes(tickprefix = '$', tickformat = ',.')
fig.update_layout(showlegend=False)
fig.show()
# =============================================================================
# Forecasting Price with Prophet
# =============================================================================
from fbprophet import Prophet
def forecast_prophet():
global df_forecast
global crypto
global df_prophet
crypto = df[['Close', 'Name']]
crypto = crypto.reset_index()
crypto = crypto.rename(columns={'Date': 'ds', 'Close': 'y'})
df_prophet = Prophet(changepoint_prior_scale=0.15,yearly_seasonality=True,daily_seasonality=True)
df_prophet.fit(crypto)
estimated_days=91
df_forecast = df_prophet.make_future_dataframe(periods= estimated_days*2, freq='D')
df_forecast = df_prophet.predict(df_forecast)
df_forecast['Name'] = crypto['Name']
def forecast_prophet_components():
df_prophet.plot_components(df_forecast)
def forecast_prophet_plotly():
crypto = df[['Close', 'Name']]
crypto = crypto.reset_index()
crypto = crypto.rename(columns={'Date': 'ds', 'Close': 'y'})
df_prophet = Prophet(changepoint_prior_scale=0.15,yearly_seasonality=True,daily_seasonality=True)
df_prophet.fit(crypto)
df_forecast = df_prophet.make_future_dataframe(periods= 120, freq='D')
df_forecast = df_prophet.predict(df_forecast)
df_forecast['Name'] = crypto['Name']
df_forecast['Name'] = df_forecast['Name'].replace(np.nan, crypto_name)
actual = go.Scatter(
x = df.index,
y = df['Close'],
customdata = df['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Closing Price: %{y:$,.2f}<br>",
name = 'Actual Price',
marker = dict(line = dict(width=1))
)
trend = go.Scatter(
name = 'Trend',
mode = 'lines',
x = list(df_forecast['ds']),
y = list(df_forecast['yhat']),
customdata = df_forecast['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Trend: %{y:$,.2f}<br>",
marker=dict(color='red', line=dict(width=3))
)
upper_band = go.Scatter(
name = 'Upper Band',
mode = 'lines',
x = list(df_forecast['ds']),
y = list(df_forecast['yhat_upper']),
customdata = df_forecast['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Upper Band: %{y:$,.2f}<br>",
line= dict(color='#57b88f'),
fill = 'tonexty'
)
lower_band = go.Scatter(
name= 'Lower Band',
mode = 'lines',
x = list(df_forecast['ds']),
y = list(df_forecast['yhat_lower']),
customdata = df_forecast['Name'],
hovertemplate="<b>%{customdata}</b><br><br>" +
"Date: %{x|%d %b %Y} <br>" +
"Lower Band: %{y:$,.2f}<br>",
line= dict(color='#57b88f')
)
data = [trend, lower_band, upper_band, actual]
layout = dict(title='Forecasting Closing Price of {} Using FbProphet'.format(crypto_name),
xaxis=dict(title = 'Dates', ticklen=2, zeroline=True))
fig = go.Figure(data = data, layout=layout)
# fig['layout']['yaxis1']['title']='US Dollars'
# X-Axes
fig.update_xaxes(
rangeslider_visible = True,
rangeselector = dict(
buttons = list([
dict(count = 7, step = "day", stepmode = "backward", label = "1W"),
dict(count = 1, step = "month", stepmode = "backward", label = "1M"),
dict(count = 3, step = "month", stepmode = "backward", label = "3M"),
dict(count = 6, step = "month", stepmode = "backward", label = "6M"),
dict(count = 1, step = "year", stepmode = "backward", label = "1Y"),
dict(count = 2, step = "year", stepmode = "backward", label = "2Y"),
dict(count = 5, step = "year", stepmode = "backward", label = "5Y"),
dict(count = 1, step = "all", stepmode = "backward", label = "MAX"),
dict(count = 1, step = "year", stepmode = "todate", label = "YTD")])))
fig.update_layout(xaxis_rangeslider_visible = False)
fig.update_yaxes(tickprefix = '$', tickformat = ',.')
fig.update_layout(showlegend=False)
fig.show()
# =============================================================================
# Getting the Yahoo Table with Beautiful Soup
# =============================================================================
get_yahoo_table()
# =============================================================================
# creating a list from the crypto-table
# =============================================================================
get_crypto_df()
# ============================================================================
# Asking the user for an input
# ============================================================================
please_choose_crypto()
from HDIP_Project_Functions import crypto_name, insert
# =============================================================================
# Collecting info from Yahoo Finance and creating a dataset for that cryptocurrency
# =============================================================================
create_df(insert)
create_y(insert)
from HDIP_Project_Functions import *
# =============================================================================
# Creating a graph examining the price and moving averages
# =============================================================================
price_sma_volume_chart()
candlestick_moving_average()
# =============================================================================
# Analysing the Histogram and Boxplot for crypto
# =============================================================================
create_hist_and_box_pct_change()
logged_create_hist_and_box_pct_change()
# =============================================================================
# Creating a plot with analysis and rolling mean and standard deviation
# =============================================================================
test_stationarity(df['Close'])
test_stationarity(y['Close Percentage Change'])
# =============================================================================
# Splitting the data in Training and Test Data
# =============================================================================
# splitting the data
create_train_and_test()
from HDIP_Project_Functions import *
# creating a plot for the training and test set
training_and_test_plot()
# =============================================================================
#
# =============================================================================
create_diff_volume(y['diff'])
create_diff_log_diff()
# =============================================================================
# Examining CLOSE
# =============================================================================
simple_seasonal_decompose(y['Close'], 365)
acf_and_pacf_plots(y['Close'])
KPSS_test(y['Close'])
adfuller_test(y['Close'])
rolling_mean_std(y['Close'], 365)
# =============================================================================
# Examining LOG CLOSE
# =============================================================================
simple_seasonal_decompose(y['log_Close'], 365)
acf_and_pacf_plots(y['log_Close'])
KPSS_test(y['log_Close'])
adfuller_test(y['log_Close'])
rolling_mean_std(y['log_Close'], 365)
# =============================================================================
# Examining DIFF - STATIONARY
# =============================================================================
simple_seasonal_decompose(y['diff'], 365)
acf_and_pacf_plots(y['diff'])
KPSS_test(y['diff'])
adfuller_test(y['diff'])
rolling_mean_std(y['diff'], 365)
# =============================================================================
# Examining LOG CLOSE DIFF - STATIONARY
# =============================================================================
simple_seasonal_decompose(y['log_Close_diff'], 365)
acf_and_pacf_plots(y['log_Close_diff'])
KPSS_test(y['log_Close_diff'])
adfuller_test(y['log_Close_diff'])
rolling_mean_std(y['log_Close_diff'], 365)
# =============================================================================
# Plotly
# =============================================================================
decomposition(df['Close'], 365)
# =============================================================================
# Predicing and Forecasting the Closing Price with FBProphet
# =============================================================================
# predicting price using FBProphet
predict_prophet()
predict_prophet_components()
predict_prophet_plotly()
#Forecasting price using FBProphet
forecast_prophet()
forecast_prophet_components()
forecast_prophet_plotly()
|
__author__ = "Narwhale"
class ListNode:
def __init__(self,elem):
self.elem = elem
self.next = None
class Solution(object):
def __init__(self,node=None):
self.__head = node
def is_empty(self):
return self.__head == None
def append(self,item):
node = ListNode(item)
if self.is_empty():
self.__head = node
else:
cur = self.__head
while cur.next is not None:
cur = cur.next
cur.next = node
def travel(self):
"""遍历整个链表"""
cur = self.__head
while cur != None:
print(cur.elem,end=" ")
cur = cur.next
def length(self):
"""求长度"""
cur = self.__head
count = 0
while cur is not None:
cur = cur.next
count += 1
return count
# def cross_node(link1,link2):
# """求交叉的节点"""
# length1 = link1.length()
# length2 = link2.length()
# print(link1)
# print(link2)
if __name__ == "__main__":
l1 = Solution()
l1.append(1)
l1.append(3)
l1.append(5)
l1.append(7)
l1.append(9)
l1.append(12)
l1.append(13)
l1.travel()
print('\n')
l2 = Solution()
l2.append(2)
l2.append(4)
l2.append(6)
l2.append(8)
l2.append(10)
l2.append(11)
l2.append(12)
l2.append(13)
l2.travel()
print('\n')
c= cross_node(l1,l2)
|
#pypharm by Danny Limoges, PharmD
# A construct for managing data for: fill claims, fills, prescriptions, drug data, insurance data and more.
# The goal is to develop easy-to-use interfaces for pharmacy-related APIs
|
"""empty message
Revision ID: 5d4aee209354
Revises: 99cd2a081a3c
Create Date: 2018-11-16 15:15:44.885014
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5d4aee209354'
down_revision = '99cd2a081a3c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ##
op.alter_column('dogma_attributes', 'value',
existing_type=sa.Integer(),
type_=sa.Float(),
existing_nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ##
op.alter_column('dogma_attributes', 'value',
existing_type=sa.Float(),
type_=sa.Integer(),
existing_nullable=True)
# ### end Alembic commands ###
|
# -*- coding: utf-8 -*-
a=int(input())
i=1
sum=0
while i <= a:
if (i % 5 == 0):
sum=sum+i
i=i+1
print(sum)
|
from selenium.webdriver.common.by import By
# for maintainability we can seperate web objects by page name
class FormPageLocators(object):
FormUrl = 'https://www.seleniumeasy.com/test/basic-first-form-demo.html'
NumberField1 = (By.XPATH, '//*[@id="sum1"]')
NumberField2 = (By.XPATH, '//*[@id="sum2"]')
TotalButton = (By.XPATH, '//*[@id="gettotal"]/button')
class InputFormLocators(object):
InputFormUrl = 'https://www.seleniumeasy.com/test/input-form-demo.html'
first_name = (By.XPATH, '//*[@id="contact_form"]/fieldset/div[1]/div/div/input')
last_name = (By.XPATH, '//*[@id="contact_form"]/fieldset/div[2]/div/div/input')
email = (By.XPATH, '//*[@id="contact_form"]/fieldset/div[3]/div/div/input')
phone_no = (By.XPATH, '//*[@id="contact_form"]/fieldset/div[4]/div/div/input')
address = (By.XPATH, '//*[@id="contact_form"]/fieldset/div[5]/div/div/input')
city = (By.XPATH, '//*[@id="contact_form"]/fieldset/div[6]/div/div/input')
state = (By.XPATH, '//*[@id="contact_form"]/fieldset/div[7]/div/div/select')
zip_code = (By.XPATH, '//*[@id="contact_form"]/fieldset/div[8]/div/div/input')
website_domain_name = (By.XPATH, '//*[@id="contact_form"]/fieldset/div[9]/div/div/input')
radio_button = (By.XPATH, '//*[@id="contact_form"]/fieldset/div[10]/div/div[2]/label/input')
description = (By.XPATH, '//*[@id="contact_form"]/fieldset/div[11]/div/div/textarea')
valid = (By.XPATH, '//*[@id="contact_form"]/fieldset/div[1]/div/i')
invalid = (By.XPATH, '//*[@id="contact_form"]/fieldset/div/div/small')
class TableSearchLocators(object):
TableSearchUrl = 'https://www.seleniumeasy.com/test/table-search-filter-demo.html'
input_filter_field = (By.XPATH, '//*[@id="task-table-filter"]')
input_data_field = (By.XPATH, '/html/body/div[2]/div/div[2]/div[2]/div/table/thead/tr/th[1]/input')
filter_activate_button = (By.XPATH, '/html/body/div[2]/div/div[2]/div[2]/div/div/div/button')
###########################33
class TablePaginationLocators(object):
TablePaginationUrl = 'https://www.seleniumeasy.com/test/table-pagination-demo.html'
rows = (By.XPATH, '//*[@id="myTable"]/tr')
page_link1 = (By.XPATH, '//*[@id="myPager"]/li[2]/a')
page_link2 = (By.XPATH, '//*[@id="myPager"]/li[3]/a')
page_link3 = (By.XPATH, '//*[@id="myPager"]/li[4]/a')
prev_link = (By.XPATH, '//*[@id="myPager"]/li[1]/a')
next_link = (By.XPATH, '//*[@id="myPager"]/li[5]/a')
class ProgressBarsLocators(object):
JqueryProgressUrl = 'https://www.seleniumeasy.com/test/jquery-download-progress-bar-demo.html'
BootstrapProgressUrl = 'https://www.seleniumeasy.com/test/bootstrap-download-progress-demo.html'
download_button = (By.XPATH, '//*[@id="downloadButton"]')
progress_label = (By.XPATH, '//*[@id="dialog"]/div[1]')
circle_button = (By.XPATH, '//*[@id="cricle-btn"]')
percent_field = (By.XPATH, '//*[@id="circle"]/div/div[1]')
class AlertsLocators(object):
AlertsUrl = 'https://www.seleniumeasy.com/test/bootstrap-alert-messages-demo.html'
autoclosable_btn_success = (By.XPATH, '//*[@id="autoclosable-btn-success"]')
normal_btn_success = (By.XPATH, '//*[@id="normal-btn-success"]')
autoclosable_btn_warning = (By.XPATH, '//*[@id="autoclosable-btn-warning"]')
normal_btn_warning = (By.XPATH, '//*[@id="normal-btn-warning"]')
autoclosable_btn_danger = (By.XPATH, '//*[@id="autoclosable-btn-danger"]')
normal_btn_danger = (By.XPATH, '//*[@id="normal-btn-danger"]')
autoclosable_btn_info = (By.XPATH, '//*[@id="autoclosable-btn-info"]')
normal_btn_info = (By.XPATH, '//*[@id="normal-btn-info"]')
autoclosable_alert_success = (By.XPATH, '/html/body/div[2]/div/div[2]/div/div[2]/div[1]')
autoclosable_alert_warning = (By.XPATH, '/html/body/div[2]/div/div[2]/div/div[2]/div[3]')
autoclosable_alert_danger = (By.XPATH, '/html/body/div[2]/div/div[2]/div/div[2]/div[5]')
autoclosable_alert_info = (By.XPATH, '/html/body/div[2]/div/div[2]/div/div[2]/div[7]')
normal_alert_success = (By.XPATH, '/html/body/div[2]/div/div[2]/div/div[2]/div[2]')
normal_alert_warning = (By.XPATH, '/html/body/div[2]/div/div[2]/div/div[2]/div[4]')
normal_alert_danger = (By.XPATH, '/html/body/div[2]/div/div[2]/div/div[2]/div[6]')
normal_alert_info = (By.XPATH, '/html/body/div[2]/div/div[2]/div/div[2]/div[8]')
close_buttons_group = (By.XPATH, '/html/body/div[2]/div/div[2]/div/div[2]/div/button')
class ModalsLocators(object):
ModalUrl = 'https://www.seleniumeasy.com/test/bootstrap-modal-demo.html'
# single modal
launch_modal_button = (By.XPATH, '/html/body/div[2]/div/div[2]/div[1]/div/div/div[2]/a')
single_modal_title = (By.XPATH, '//*[@id="myModal0"]/div/div/div[1]/h4')
save_changes_button = (By.XPATH, '//*[@id="myModal0"]/div/div/div[4]/a[2]')
# multi modals
launch_multi_modal_button1 = (By.XPATH, '/html/body/div[2]/div/div[2]/div[2]/div/div/div[2]/a')
launch_multi_modal_button2 = (By.XPATH, '//*[@id="myModal"]/div/div/div[3]/a')
save_changes_modal2_button = (By.XPATH, '//*[@id="myModal2"]/div/div/div[6]/a[2]')
close_modal2_button = (By.XPATH, '//*[@id="myModal2"]/div/div/div[6]/a[1]')
save_changes_modal1_button = (By.XPATH, '//*[@id="myModal"]/div/div/div[4]/a[2]')
first_modal_title = (By.XPATH, '//*[@id="myModal"]/div/div/div[1]/h4')
second_modal_title = (By.XPATH, '//*[@id="myModal2"]/div/div/div[1]/h4')
class ListBoxLocators(object):
ListBoxUrl = 'https://www.seleniumeasy.com/test/jquery-dual-list-box-demo.html'
list1_selections = (By.XPATH, '//*[@id="pickList"]/div/div[1]/select')
list2_selections = (By.XPATH, '//*[@id="pickList"]/div/div[3]/select')
add_button = (By.XPATH, '//*[@id="pickList"]/div/div[2]/button[1]')
removeAll_button = (By.XPATH, '//*[@id="pickList"]/div/div[2]/button[4]')
addAll_button = (By.XPATH, '//*[@id="pickList"]/div/div[2]/button[2]')
remove_button = (By.XPATH, '//*[@id="pickList"]/div/div[2]/button[3]')
class ListFilterLocators(object):
ListFilterUrl = 'https://www.seleniumeasy.com/test/data-list-filter-demo.html'
input_search = (By.XPATH, '//*[@id="input-search"]')
class LoadingDataLocators(object):
LoadDataUrl = 'https://www.seleniumeasy.com/test/dynamic-data-loading-demo.html'
get_user_button = (By.XPATH, '//*[@id="save"]')
loading_image = (By.XPATH, '//*[@id="loading"]/img')
class DragDropLocators(object):
DragDropUrl = 'https://www.seleniumeasy.com/test/drag-and-drop-demo.html'
item_to_drag = (By.XPATH, '//*[@id="todrag"]/span[1]')
target = (By.XPATH, '//*[@id="mydropzone"]')
|
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import logging
from dataclasses import dataclass
import ijson.backends.python as ijson
from pants.backend.go.util_rules import go_mod
from pants.backend.go.util_rules.cgo import CGoCompilerFlags
from pants.backend.go.util_rules.sdk import GoSdkProcess
from pants.engine.internals.selectors import Get
from pants.engine.process import ProcessResult
from pants.engine.rules import collect_rules, rule
from pants.util.frozendict import FrozenDict
from pants.util.logging import LogLevel
logger = logging.getLogger(__name__)
@dataclass(frozen=True)
class GoStdLibPackage:
name: str
import_path: str
pkg_source_path: str
imports: tuple[str, ...]
import_map: FrozenDict[str, str]
# Analysis for when Pants is able to compile the SDK directly.
go_files: tuple[str, ...]
cgo_files: tuple[str, ...]
c_files: tuple[str, ...]
cxx_files: tuple[str, ...]
m_files: tuple[str, ...]
h_files: tuple[str, ...]
f_files: tuple[str, ...]
s_files: tuple[str, ...]
syso_files: tuple[str, ...]
cgo_flags: CGoCompilerFlags
# Embed configuration.
#
# Note: `EmbedConfig` is not resolved here to avoid issues with trying to build the the embed analyzer.
# The `EmbedConfig` will be resolved in `build_pkg_target.py` rules.
embed_patterns: tuple[str, ...]
embed_files: tuple[str, ...]
class GoStdLibPackages(FrozenDict[str, GoStdLibPackage]):
"""A mapping of standard library import paths to an analysis of the package at that import
path."""
@dataclass(frozen=True)
class GoStdLibPackagesRequest:
with_race_detector: bool
cgo_enabled: bool = True
@rule(desc="Analyze Go standard library packages.", level=LogLevel.DEBUG)
async def analyze_go_stdlib_packages(request: GoStdLibPackagesRequest) -> GoStdLibPackages:
maybe_race_arg = ["-race"] if request.with_race_detector else []
list_result = await Get(
ProcessResult,
GoSdkProcess(
# "-find" skips determining dependencies and imports for each package.
command=("list", *maybe_race_arg, "-json", "std"),
env={"CGO_ENABLED": "1" if request.cgo_enabled else "0"},
description="Ask Go for its available import paths",
),
)
stdlib_packages = {}
for pkg_json in ijson.items(list_result.stdout, "", multiple_values=True):
import_path = pkg_json.get("ImportPath")
pkg_source_path = pkg_json.get("Dir")
if not import_path or not pkg_source_path:
continue
stdlib_packages[import_path] = GoStdLibPackage(
name=pkg_json.get("Name"),
import_path=import_path,
pkg_source_path=pkg_source_path,
imports=tuple(pkg_json.get("Imports", ())),
import_map=FrozenDict(pkg_json.get("ImportMap", {})),
go_files=tuple(pkg_json.get("GoFiles", ())),
cgo_files=tuple(pkg_json.get("CgoFiles", ())),
c_files=tuple(pkg_json.get("CFiles", ())),
cxx_files=tuple(pkg_json.get("CXXFiles", ())),
m_files=tuple(pkg_json.get("MFiles", ())),
h_files=tuple(pkg_json.get("HFiles", ())),
f_files=tuple(pkg_json.get("FFiles", ())),
s_files=tuple(pkg_json.get("SFiles", ())),
syso_files=tuple(pkg_json.get("SysoFiles", ())),
cgo_flags=CGoCompilerFlags(
cflags=tuple(pkg_json.get("CgoCFLAGS", [])),
cppflags=tuple(pkg_json.get("CgoCPPFLAGS", [])),
cxxflags=tuple(pkg_json.get("CgoCXXFLAGS", [])),
fflags=tuple(pkg_json.get("CgoFFLAGS", [])),
ldflags=tuple(pkg_json.get("CgoLDFLAGS", [])),
pkg_config=tuple(pkg_json.get("CgoPkgConfig", [])),
),
embed_patterns=tuple(pkg_json.get("EmbedPatterns", [])),
embed_files=tuple(pkg_json.get("EmbedFiles", [])),
)
return GoStdLibPackages(stdlib_packages)
def rules():
return (
*collect_rules(),
*go_mod.rules(),
)
|
import cv2
import matplotlib.pyplot as plt
img = cv2.imread('bird.png')
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
# Lower/Upper thresholds for edge detection. If between, it is kept if adjacent to high. L1 absolute, L2 squared
L1 = cv2.Canny(img, 150, 200, L2gradient=False)
L2 = cv2.Canny(img, 150, 200, L2gradient=True)
titles = ['Original Image', 'L1 Norm', 'L2 Norm']
outputs = [img, L1, L2]
for i in range(3):
plt.subplot(1, 3, i + 1)
plt.imshow(outputs[i], cmap='gray')
plt.title(titles[i])
plt.xticks([])
plt.yticks([])
plt.show()
def cameraedges(size):
cap = cv2.VideoCapture(0)
while True:
_, frame = cap.read()
edges = cv2.Canny(frame, size[0], size[1])
cv2.imshow('OG', frame)
cv2.imshow('Edges', edges)
if cv2.waitKey(1) == 27: # ESC to exit
break
cv2.destroyAllWindows()
cap.release()
cameraedges([100, 60])
|
from django.apps import AppConfig
class GtinConfig(AppConfig):
name = 'gtin'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ==================================================
# @Time : 2019-06-20 14:40
# @Author : ryuchen
# @File : celery_app.py
# @Desc :
# ==================================================
from celery.bin import worker
from apps.jobs.celery import app
from lib.base.Application import Application
class CeleryApplication(Application):
def __init__(self):
super(CeleryApplication, self).__init__()
def run(self) -> None:
application = worker.worker(app=app)
options = {
'loglevel': 'INFO',
'traceback': True,
}
application.run(**options)
|
from pyTFM.data_analysis import *
# reading the Wildtype data set. Use your own output text file here
# your file maybe called out0.txt or similiar
file_WT = r"/home/user/Software/example_data_for_pyTFM/clickpoints_tutorial/WT/out.txt"
# reading the parameters and the results, sorted for frames and object ids
parameter_dict_WT, res_dict_WT = read_output_file(file_WT)
# pooling all frames and objects together.
n_frames_WT, values_dict_WT, frame_list_WT = prepare_values(res_dict_WT)
# reading the KO data set. Use your own output text file here
# your file maybe called out0.txt or similiar
file_KO = r"/home/user/Software/example_data_for_pyTFM/clickpoints_tutorial/KO/out.txt"
parameter_dict_KO, res_dict_KO = read_output_file(file_KO)
n_frames_KO, values_dict_KO, frame_list_KO = prepare_values(res_dict_KO)
# normalizing the strain energy
values_dict_WT["strain energy per area"] = values_dict_WT["strain energy"]/values_dict_WT["area Cell Area"]
values_dict_KO["strain energy per area"] = values_dict_KO["strain energy"]/values_dict_WT["area Cell Area"]
# normalizing the contractility
values_dict_WT["contractility per area"] = values_dict_WT["contractility"]/values_dict_WT["area Cell Area"]
values_dict_KO["contractility per area"] = values_dict_KO["contractility"]/values_dict_WT["area Cell Area"]
# t-test for all value pairs
t_test_dict = t_test(values_dict_WT, values_dict_KO)
lables = ["WT", "KO"] # designations for the two dictionaries that are provided to the box_plots functions
types = ["contractility per area", "strain energy per area"] # name of the quantities that are plotted
ylabels = ["contractility per colony area [N/m²]", "strain energy per colony area [J/m²]"] # custom axes labels
# producing a two box plots comparing the strain energy and the contractility in WT and KO
fig_force = box_plots(values_dict_WT, values_dict_KO, lables, t_test_dict=t_test_dict, types=types,
low_ylim=0, ylabels=ylabels, plot_legend=True)
lables = ["WT", "KO"] # designations for the two dictionaries that are provided to the box_plots functions
types = ["mean normal stress Cell Area", "average magnitude line tension"] # name of the quantities that are plotted
ylabels = ["mean normal stress [N/m]", "line tension [N/m]"] #
fig_stress = box_plots(values_dict_WT, values_dict_KO, lables, t_test_dict=t_test_dict, types=types,
low_ylim=0, ylabels=ylabels, plot_legend=True)
lables = ["WT", "KO"] # designations for the two dictionaries that are provided to the box_plots functions
# name of the measures that are plotted. Must be length 2 for this case.
types = ["contractility per area", "strain energy per area"]
# plotting value of types[0] vs value of types[1]
fig_force2 = compare_two_values(values_dict_WT, values_dict_KO, types, lables,
xlabel="contractility per colony area [N/m²]", ylabel="strain energy per colony area [J/m²]")
# define and output folder for your figures
folder_plots = r"/home/user/Software/example_data_for_pyTFM/clickpoints_tutorial/plots/"
# create the folder, if it doesn't already exist
createFolder(folder_plots)
# saving the three figures that were created beforehand
fig_force.savefig(os.path.join(folder_plots, "forces1.png")) # boxplot comparing measures for force generation
fig_stress.savefig(os.path.join(folder_plots, "fig_stress.png")) # boxplot comparing normal stress and line tension
fig_force2.savefig(os.path.join(folder_plots, "forces2.png")) # plot of strain energy vs contractility
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class PairNet(nn.Module):
def __init__(self, dim_in, dim_out):
super(PairNet, self).__init__()
self.fc1 = nn.Linear(dim_in, 512)
self.fc2 = nn.Linear(512, 256)
self.fc3 = nn.Linear(256, dim_out)
def forward(self, x):
h = F.relu(self.fc1(x))
h = F.relu(self.fc2(h))
out = self.fc3(h)
return out
class TGNN(nn.Module):
def __init__(self, num_atom_feats, num_bond_feats, dim_target):
super(TGNN, self).__init__()
self.num_atom_feats = num_atom_feats
self.num_bond_feats = num_bond_feats
self.atom_net = nn.Linear(self.num_atom_feats, 128)
self.bond_net = nn.Linear(self.num_bond_feats, 128)
self.pair_net = PairNet(2 * 128 + 128, 1024)
self.fc1 = nn.Linear(1024, 512)
self.fc2 = nn.Linear(512, 32)
self.fc3 = nn.Linear(32, dim_target)
def forward(self, pairs, idx_pairs, ref_feats):
naf = self.num_atom_feats
atom_emb1 = F.relu(self.atom_net(pairs[:, :naf]))
atom_emb2 = F.relu(self.atom_net(pairs[:, naf:2*naf]))
bond_emb = F.relu(self.bond_net(pairs[:, 2*naf:]))
h_pair = self.readout(self.pair_net(torch.cat([atom_emb1, atom_emb2, bond_emb], dim=1)), idx_pairs)
# h = F.relu(self.fc1(torch.cat([h_pair, ref_feats], dim=1)))
h = F.relu(self.fc1(h_pair))
h = F.relu(self.fc2(h))
out = self.fc3(h)
return out
def readout(self, x, idx):
h = torch.empty((idx.shape[0], x.shape[1]), dtype=torch.float).cuda()
pos = 0
for i in range(0, idx.shape[0]):
h[i, :] = torch.mean(x[pos:pos+idx[i], :], dim=0)
pos += idx[i]
return h
|
# Enter your code here. Read input from STDIN. Print output to STDOUT
text = raw_input()
print text
|
__author__ = 'AmmiNi'
import unittest
import SMSSender
import EmailSender
class NotificationTest(unittest.TestCase):
def test_one_sms(self):
sms_client = SMSSender.SMSSender("test sms", '')
raised = False
try:
sms_client.send_message("+6594681497", "Test message")
except:
raised = True
self.assertNotEqual(raised, True)
def test_one_email(self):
email_client = EmailSender.EmailSender('incidentsinsg@gmail.com', 'incidents', "test")
raised = False
try:
email_client.send_email_to('lisi0010@e.ntu.edu.sg', 'Test email', 'test email')
except:
raised = True
self.assertNotEqual(raised, True)
def test_all_sms(self):
sms_client = SMSSender.SMSSender("test sms", '')
raised = False
try:
sms_client.start()
except:
raised = True
self.assertNotEqual(raised, True)
def test_all_email(self):
email_client = EmailSender.EmailSender('incidentsinsg@gmail.com', 'incidents', "test")
raised = False
try:
email_client.start()
except:
raised = True
self.assertNotEqual(raised, True)
if __name__ == '__main__':
unittest.main()
|
# Generated by Django 2.2.6 on 2020-01-12 18:37
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('work', '0084_auto_20200110_1124'),
]
operations = [
migrations.RenameField(
model_name='historicalsurveyqty',
old_name='approval_status',
new_name='status',
),
migrations.RenameField(
model_name='surveyqty',
old_name='approval_status',
new_name='status',
),
]
|
"""
ZIP UTILS
Herramientas para el uso de archivos ZIP
Autor: Pablo Pizarro R. @ ppizarror.com
Licencia:
The MIT License (MIT)
Copyright 2017 Pablo Pizarro R.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ['Zip']
# Importación de librerías
import os
import zipfile
class Zip(object):
"""
Clase para administrar archivos zip.
"""
def __init__(self, filename):
"""
Constructor, crea un archivo zipfile con un nombre
:param filename: Nombre del archivo
"""
if '.zip' not in filename:
filename += '.zip'
# Crea un objeto zipfile
self._zip = zipfile.ZipFile(filename, 'w', zipfile.ZIP_DEFLATED)
# Lista de excepciones
self._excptfiles = []
# Path a descontar
self.ghostpath = ''
def add_excepted_file(self, filename):
"""
Agrega un archivo a la lista de excepciones.
:param filename: Nombre del archivo
:type filename: str, list
:return: None
"""
if type(filename) is list:
for f in filename:
self.add_excepted_file(f)
else:
self._excptfiles.append(filename)
def _check_excepted_file(self, filename):
"""
Indica si el archivo está dentro de la lista de excepciones.
:param filename: Nombre del archivo
:type filename: str
:return: Booleano
:rtype: bool
"""
filebasename = os.path.basename(filename)
for f in self._excptfiles:
if f in filebasename:
return True
return False
def save(self):
"""
Guarda el archivo zip
:return: None
"""
self._zip.close()
def _writefile(self, f, fname):
"""
Escribe un archivo en el zip.
:param f: Dirección del archivo
:param fname: Nombre del archivo
:return:
"""
self._zip.write(f, fname)
def add_file(self, ufile, ghostpath=None):
"""
Añade un archivo al zip.
:param ghostpath: Dirección a borrar
:param ufile: Ubicación del archivo
:type ufile: str, list
:return: None
"""
if type(ufile) is list:
for f in ufile:
if ghostpath is None:
self._writefile(f, f.replace(self.ghostpath, ''))
else:
self._writefile(f, f.replace(ghostpath, ''))
else:
if ghostpath is None:
self._writefile(ufile, ufile.replace(self.ghostpath, ''))
else:
self._writefile(ufile, ufile.replace(ghostpath, ''))
def add_folder(self, folder):
"""
Agrega una carpeta al archivo zip.
:param folder: Carpeta
:type folder: str, list
:return: None
"""
if type(folder) is list:
for f in folder:
self.add_folder(f)
else:
for f in os.listdir(folder):
full_path = os.path.join(folder, f)
if os.path.isfile(full_path):
if not self._check_excepted_file(full_path):
self.add_file(full_path)
elif os.path.isdir(full_path):
self.add_folder(full_path)
def set_ghostpath(self, path):
"""
Añade path fantasma para eliminar entrada de archivo.
:param path: Dirección
:return:
"""
self.ghostpath = path
|
## Initial conditions cuts
from __future__ import print_function, division
import hdf5_to_dict as io
import sys
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from pylab import *
COMPARE = False
dump_dir = sys.argv[1]
init_file = io.get_dumps_list(dump_dir)[0]
hdr, geom, dump = io.load_all(init_file, extras=False)
N2 = hdr['n2']
r = geom['r'][:, N2//2, 0]
rho = dump['RHO'][:, N2//2, 0]
uu = dump['UU'][:, N2//2, 0]
p = (hdr['gam']-1)*uu
b2 = dump['bsq'][:, N2//2, 0]
beta = dump['beta'][:, N2//2, 0]
gamma = dump['gamma'][:, N2//2, 0]
figname = 'initial-cuts.pdf'
if COMPARE:
tablename = 'initial-cuts.csv'
data=loadtxt('torus_cuts.csv')
#data1=loadtxt('data_d2_x+0.16D+01_n0000.csv',skiprows=1,delimiter=',')
r_=0
rho_=1
p_=2
lfac_=4
b2_=3
def betainv(data):
return data[:,b2_]/2./data[:,p_]
f, all_axes = plt.subplots(2, 3, sharex='col')
((ax1, ax2, ax3), (ax4, ax5, ax6)) = all_axes
f.subplots_adjust(wspace=.5)
f.set_size_inches(10,4)
if COMPARE:
ax1.plot(data[:,r_],data[:,rho_],'r-')
ax2.plot(data[:,r_],data[:,p_],'r-')
ax3.plot(data[:,r_],sqrt(data[:,b2_]),'r-')
ax4.plot(data[:,r_],betainv(data),'r-')
ax5.plot(data[:,r_],data[:,lfac_],'r-')
ax6.plot(data[:,r_],data[:,p_]+data[:,b2_]/2.,'r-')
ax1.plot(r,rho,'b')
ax1.set_ylabel(r'$\rho$')
ax1.set_ylim(1e-8,1)
ax2.plot(r,p,'b')
ax2.set_ylabel(r'$P_{\rm gas}$')
ax2.set_ylim(1e-12,0.2)
ax3.plot(r,sqrt(b2),'b')
ax3.set_ylabel(r'$\sqrt{b_\mu b^\mu}$')
ax3.set_ylim(1.e-4,1.e-2)
ax4.plot(r,1/beta,'b')
ax4.set_ylabel(r'$\beta^{-1}$')
ax4.set_xlabel(r'$r_{\rm KS} [GM/c^2]$')
ax4.set_ylim(1.e-7,1.e-1)
ax5.plot(r,gamma,'b')
ax5.set_ylabel(r'$\Gamma$')
ax5.set_xlabel(r'$r_{\rm KS} [GM/c^2]$')
ax5.set_ylim(0.98,1.25)
ax6.plot(r,(p + b2/2.),'b')
ax6.set_ylabel(r'$P_{\rm gas}+P_{\rm mag}$')
ax6.set_xlabel(r'$r_{\rm KS} [GM/c^2]$')
ax6.set_ylim(1e-12,0.01)
for ax in all_axes.flatten():
ax.grid(True)
ax.set_yscale('log')
ax.set_xlim(2,50)
f.savefig(figname,bbox_inches='tight')
close()
#ascii.write(data[:,[r_,rho_,p_,lfac_,b2_]],tablename,delimiter=',',names=['r','rho','p','lfac','balphabalpha'])
|
from __future__ import division
import caffe
from caffe import tools
import test
from util import Timer
import numpy as np
import os
save_format, snapshot_prefix = test.prepare()
weights = '/home/jonlong/x/caffe/models/VGG/VGG16_full_conv.caffemodel'
caffe.set_device(6)
solver = caffe.SGDSolver('solver.prototxt')
solver.set_snapshot_prefix(snapshot_prefix)
solver.net.set_phase_train()
solver.net.set_mode_gpu()
solver.net.copy_from(weights)
for _ in range(1000):
solver.step(200)
test.seg_tests(solver, save_format, 'pascal')
|
from keras.models import Sequential
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
from keras.layers import Flatten
from keras.layers import Dense
classifier = Sequential()
classifier.add(Conv2D(32, (3, 3), input_shape = (64, 64, 3), activation = 'relu'))
# Step 2 - Pooling
classifier.add(MaxPooling2D(pool_size = (2, 2)))
# Adding a second convolutional layer
classifier.add(Conv2D(32, (3, 3), activation = 'relu'))
classifier.add(MaxPooling2D(pool_size = (2, 2)))
# Step 3 - Flattening
classifier.add(Flatten())
# Step 4 - Full connection
classifier.add(Dense(units = 128, activation = 'relu'))
classifier.add(Dense(units = 15, activation = 'softmax'))
# Compiling the CNN
classifier.compile(optimizer = 'adam', loss = 'categorical_crossentropy', metrics = ['accuracy'])
#classifier.save('mymodel.h5')
from keras.preprocessing.image import ImageDataGenerator
train_datagen = ImageDataGenerator(rescale = 1./255,
shear_range = 0.2,
zoom_range = 0.2,
horizontal_flip = True)
test_datagen = ImageDataGenerator(rescale = 1./255)
training_set = train_datagen.flow_from_directory('D:/MLAI/Hackathon/IBM-project/PlantVillage/train',
target_size = (64, 64),
batch_size = 2,
class_mode = 'categorical')
test_set = test_datagen.flow_from_directory('D:/MLAI/Hackathon/IBM-project/PlantVillage/test',
target_size = (64, 64),
batch_size = 2,
class_mode = 'categorical')
classifier.fit_generator(training_set,
steps_per_epoch =15484 ,
epochs = 5,
validation_data = test_set,
validation_steps = 5154)
import numpy as np
from keras.preprocessing import image
test_image = image.load_img('D:/MLAI/Hackathon/IBM-project/PlantVillage/single_pred/tom dis.jpg', target_size = (64, 64))
test_image = image.img_to_array(test_image)
test_image = np.expand_dims(test_image, axis = 0)
training_set.class_indices
result = classifier.predict(test_image)
result
for i in range(15):
if result[0][i]==1:
cnt=i
index=['Pepper__bell___Bacterial_spot','Pepper__bell___healthy','Potato___Early_blight','Potato___Late_blight','Potato___healthy','Tomato_Bacterial_spot','Tomato_Early_blight','Tomato_Late_blight','Tomato_Leaf_Mold','Tomato_Septoria_leaf_spot','Tomato_Spider_mites_Two_spotted_spider_mite','Tomato__Target_Spot','Tomato__Tomato_YellowLeaf__Curl_Virus','Tomato__Tomato_mosaic_virus','Tomato_healthy']
print(index[cnt])
|
import argparse
from FileHandler import File
from ExitHandler import Exit
class Handler:
def __init__(self):
self.parser = argparse.ArgumentParser
self.parser.add_argument('-c', '--config', action='store', help='Configuration File to use.')
self.parser.add_argument('--ftp', action='store_true', help='Enable FTP Honeypot.')
self.parser.add_argument('--ssh', action='store_true', help='Enable SSH Honeypot.')
self.parser.add_argument('--telnet', action='store_true', help='Enable Telnet Honeypot.')
self.parser.add_argument('--smtp', action='store_true', help='Enable SMTP Honeypot.')
self.parser.add_argument('--proxy', action='store_true', help='Enable Proxy Honeypot.')
self.parser.add_argument('--ftpport', action='store', help='Port for incoming FTP Connections.')
self.parser.add_argument('--ftpuser', action='store', help='Username for incoming FTP Connections.')
self.parser.add_argument('--ftppass', action='store', help='Password for incoming FTP Connections.')
self.parser.add_argument('--sshport', action='store', help='Port for incoming SSH Connections.')
self.parser.add_argument('--sshuser', action='store', help='Username for incoming SSH Connections.')
self.parser.add_argument('--sshpass', action='store', help='Password for incoming SSH Connections.')
self.parser.add_argument('--telnetport', action='store', help='Port for incoming Telnet Connections.')
self.parser.add_argument('--telnetuser', action='store', help='Username for incoming Telnet Connections.')
self.parser.add_argument('--telnetpass', action='store', help='Password for incoming Telnet Connections.')
self.parser.add_argument('--smtpport', action='store', help='Port for incoming SMTP Connections.')
self.parser.add_argument('--smtpuser', action='store', help='Username for incoming SMTP Connections.')
self.parser.add_argument('--smtppass', action='store', help='Password for incoming SMTP Connections.')
self.parser.add_argument('--proxyport', action='store', help='Port for incoming Proxy Connections.')
self.parser.add_argument('--proxytype', action='store', help='SOCKS4, SOCKS5 or HTTP.')
self.parser.add_argument('--proxyuser', action='store', help='Username for incoming Proxy Connections.')
self.parser.add_argument('--proxypass', action='store', help='Password for incoming Proxy Connections.')
self.args = self.parser.parse_args()
self.check_arguments()
def check_arguments(self):
if self.args.config is not None:
if File.file_exits(self.args.config) is False:
Exit.Exit(reason="Config File does not exist.")
else:
|
from math import *
import numpy as np
import matplotlib.pyplot as plt
from qiskit import *
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister, execute
from qiskit.circuit import Parameter
from qiskit.tools.visualization import plot_histogram
from qiskit import Aer, IBMQ
from qiskit.providers.ibmq import least_busy
from qiskit.tools.monitor import job_monitor
from mapping import Qstate_dict
##~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def LQR(Qcircuit, n_qubits=4):
LQR = Qcircuit
for i in np.arange(n_qubits):
LQR.h(i)
return LQR
def SpinalQ( Qcircuit, n_qubits, XROT_theta, ZROT_psi): ## n_qubits should be length of Rx
num_tats = len(ZROT_psi)
theta = Parameter('theta') ## Note: Need to adjust the SigPro to get freq of num of tatums present
psi = Parameter('psi')
gamma = Parameter('gam')
spine = np.arange(0, n_qubits)
spine = spine[spine > 3]
spine_circuit = Qcircuit
spine_circuit.barrier()
for index, Ugate in enumerate(zip(XROT_theta,ZROT_psi)): ## Rx is the number of tatum onsets in the current beat
if index <= 3:
spine_circuit.cu3(theta, psi, gamma, index, spine[index])
else:
icyc = index - 4
spine_circuit.cu3(theta, psi, gamma, icyc, spine[icyc])
spine_circuit = spine_circuit.bind_parameters({theta: round(Ugate[0], 3)})
spine_circuit = spine_circuit.bind_parameters({psi: round(Ugate[1], 3)})
spine_circuit = spine_circuit.bind_parameters({gamma: 0})
spine_circuit.barrier()
return spine_circuit, num_tats
def qft_dagger(circ, n): ## Make sure to site this function from the qiskit textbook
"""n-qubit QFTdagger the first n qubits in circ"""
for j in range(n):
for m in range(j):
circ.cu1(-np.pi/float(2**(j-m)), m, j)
circ.h(j)
return circ
def build_circuit(n_qubits, n_bits, Ry_Base):
circ_array = []
for index, circuit in enumerate(Ry_Base):
Rx = Ry_Base[f'Beat{index+1}'][f'Rx {index+1}']
Rz = Ry_Base[f'Beat{index+1}'][f'Rz {index+1}']
ZROT_psi = (Rz / 1000) * 360 #This is the phase rotation
XROT_theta = Rx * 360 #This is the drive amplitude
tqr = QuantumRegister(n_qubits)
tcr1 = ClassicalRegister(n_bits)
tcr2 = ClassicalRegister(n_bits)
Qtest_full = QuantumCircuit( tqr, tcr1, tcr2 )
phase_est = LQR(Qtest_full)
Spine, num_tats = SpinalQ(phase_est, n_qubits, XROT_theta, ZROT_psi)
QuiKo = qft_dagger( Spine, 4); QuiKo.barrier()
## Measurement Stage:
QuiKo.measure( tqr[:4],tcr1[:4] )
QuiKo.measure( tqr[4:],tcr2[:4] )
circ_array.append([QuiKo, num_tats])
return circ_array
##~~~~~Running simulator and Quanutm devices~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def simulator_backend(circ_array):
backend = Aer.get_backend("qasm_simulator")
res_array = []
for index, Qcirc in enumerate(circ_array):
simulate = execute(Qcirc, backend=backend, shots=1024).result()
res_array.append(simulate.get_counts())
return res_array
def IBMQ_backend(circ_array):
IBMQ.load_account()
provider = IBMQ.get_provider('ibm-q')
backend = provider.get_backend('ibmq_16_melbourne')
res_array = []
for index, Qcirc in enumerate(circ_array):
job = execute(Qcirc, backend=backend, shots=1024).result()
res_array.append(job.get_counts())
return res_array
##~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def prob_dist(result_dict):
prob_dist = []; pd = []
states = []; st = []
for index, result in enumerate(result_dict):
for element in result:
pd.append(result[element])
st.append(element)
prob_dist.append(pd); pd = []
states.append(st); st = []
return prob_dist, states
def QuiKo_Algorithm(audiosample, simulator=True):
filename = 'Samples/'+audiosample+'.wav'
Ry_Base, tempo = Qstate_dict(filename)
#IBMQ.load_account()
n_qubits = 8; n_bits = 4
circ_array = build_circuit(n_qubits, n_bits, Ry_Base)
circ_array[1][0].draw(output="mpl", filename='MQC2')
num_tats = [circ_array[i][1] for i, val in enumerate(circ_array)]
circ_array = [circ_array[i][0] for i, val in enumerate(circ_array)]
if simulator == True: ## Toss it to the backend
results = simulator_backend(circ_array)
else:
results = IBMQ_backend(circ_array)
for index, result in enumerate(results):
total_counts = sum(results[index].values())
for key in result:
result[key] = result[key] / total_counts
return results, num_tats, tempo
|
from sklearn.metrics import confusion_matrix
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
def name_species(predict, error):
class_result = []
if error < 0.1:
error = 0.1
for i in range(len(predict)):
if (1-abs(predict[i][0])) <= error and (0-abs(predict[i][1])) <= error and (0-abs(predict[i][2])) <= error:
class_result.append("setosa")
elif (0 - abs(predict[i][0])) <= error and (1 - abs(predict[i][1])) <= error and (0 - abs(predict[i][2])) <= error:
class_result.append("versicolor")
elif (0 - abs(predict[i][0])) <= error and (0 - abs(predict[i][1])) <= error and (1 - abs(predict[i][2])) <= error:
class_result.append("viriginica")
else:
class_result.append("not classified")
return class_result
def skliearnmatrix(target, predict):
return confusion_matrix(target, predict, labels=["setosa", "versicolor", "viriginica"])
def visualize():
sns.set(style="ticks", color_codes=True)
data = pd.read_csv("iris.csv")
g = sns.pairplot(data, hue="variety")
plt.show()
|
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import os.path
import platform
import subprocess
from pathlib import Path
from textwrap import dedent
import pytest
from pants.backend.go import target_type_rules
from pants.backend.go.goals import package_binary
from pants.backend.go.goals.package_binary import GoBinaryFieldSet
from pants.backend.go.target_types import GoBinaryTarget, GoModTarget, GoPackageTarget
from pants.backend.go.util_rules import (
assembly,
build_pkg,
build_pkg_target,
first_party_pkg,
go_mod,
import_analysis,
link,
sdk,
third_party_pkg,
)
from pants.backend.go.util_rules.build_opts import GoBuildOptions
from pants.backend.go.util_rules.build_pkg import BuildGoPackageRequest, FallibleBuiltGoPackage
from pants.core.goals.package import BuiltPackage
from pants.engine.addresses import Address
from pants.engine.rules import QueryRule
from pants.engine.target import Target
from pants.testutil.rule_runner import RuleRunner
from pants.util.contextutil import temporary_dir
@pytest.fixture()
def rule_runner() -> RuleRunner:
rule_runner = RuleRunner(
rules=[
*assembly.rules(),
*import_analysis.rules(),
*package_binary.rules(),
*build_pkg.rules(),
*build_pkg_target.rules(),
*first_party_pkg.rules(),
*go_mod.rules(),
*link.rules(),
*target_type_rules.rules(),
*third_party_pkg.rules(),
*sdk.rules(),
QueryRule(BuiltPackage, (GoBinaryFieldSet,)),
QueryRule(FallibleBuiltGoPackage, (BuildGoPackageRequest,)),
],
target_types=[
GoBinaryTarget,
GoModTarget,
GoPackageTarget,
],
)
rule_runner.set_options([], env_inherit={"PATH"})
return rule_runner
def build_package(rule_runner: RuleRunner, binary_target: Target) -> BuiltPackage:
field_set = GoBinaryFieldSet.create(binary_target)
result = rule_runner.request(BuiltPackage, [field_set])
rule_runner.write_digest(result.digest)
return result
def test_build_package_with_assembly(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"go.mod": dedent(
"""\
module example.com/assembly
go 1.17
"""
),
"main.go": dedent(
"""\
package main
import "fmt"
func main() {
fmt.Println(add(1, 2))
}
"""
),
"add_amd64.go": "package main\nfunc add(x, y int64) int64",
"add_arm64.go": "package main\nfunc add(x, y int64) int64",
# Based on https://davidwong.fr/goasm/add.
"add_amd64.s": dedent(
"""\
TEXT ·add(SB),$0-24
MOVQ x+0(FP), BX
MOVQ y+8(FP), BP
ADDQ BP, BX
MOVQ BX, ret+16(FP)
RET
"""
),
# Based on combining https://davidwong.fr/goasm/add and `go tool compile -S` to get
# ARM instructions.
"add_arm64.s": dedent(
"""\
TEXT ·add(SB),$0-24
MOVD x+0(FP), R0
MOVD y+8(FP), R1
ADD R1, R0, R0
MOVD R0, ret+16(FP)
RET
"""
),
"BUILD": dedent(
"""\
go_mod(name="mod")
go_package(name="pkg", sources=["*.go", "*.s"])
go_binary(name="bin")
"""
),
}
)
binary_tgt = rule_runner.get_target(Address("", target_name="bin"))
built_package = build_package(rule_runner, binary_tgt)
assert len(built_package.artifacts) == 1
assert built_package.artifacts[0].relpath == "bin"
result = subprocess.run([os.path.join(rule_runner.build_root, "bin")], stdout=subprocess.PIPE)
assert result.returncode == 0
assert result.stdout == b"3\n"
def test_build_invalid_package(rule_runner: RuleRunner) -> None:
request = BuildGoPackageRequest(
import_path="example.com/assembly",
pkg_name="main",
dir_path="",
build_opts=GoBuildOptions(),
go_files=("add_amd64.go", "add_arm64.go"),
digest=rule_runner.make_snapshot(
{
"add_amd64.go": "package main\nfunc add(x, y int64) int64",
"add_arm64.go": "package main\nfunc add(x, y int64) int64",
"add_amd64.s": "INVALID!!!",
"add_arm64.s": "INVALID!!!",
}
).digest,
s_files=("add_amd64.s", "add_arm64.s"),
direct_dependencies=(),
minimum_go_version=None,
)
result = rule_runner.request(FallibleBuiltGoPackage, [request])
assert result.output is None
assert result.exit_code == 1
assert result.stdout == "add_amd64.s:1: unexpected EOF\nasm: assembly of add_amd64.s failed\n"
def test_build_package_with_prebuilt_object_files(rule_runner: RuleRunner) -> None:
# Compile helper assembly into a prebuilt .syso object file.
machine = platform.uname().machine
if machine == "x86_64":
assembly_text = dedent(
"""\
/* Apple still insists on underscore prefixes for C function names. */
#if defined(__APPLE__)
#define EXT(s) _##s
#else
#define EXT(s) s
#endif
.align 4
.globl EXT(fortytwo)
EXT(fortytwo):
movl $42, %eax
ret
"""
)
elif machine == "arm64":
assembly_text = dedent(
"""\
/* Apple still insists on underscore prefixes for C function names. */
#if defined(__APPLE__)
#define EXT(s) _##s
#else
#define EXT(s) s
#endif
.align 4
.globl EXT(fortytwo)
EXT(fortytwo):
mov x0, #42
ret
"""
)
else:
pytest.skip(f"Unsupported architecture for test: {machine}")
with temporary_dir() as tempdir:
source_path = Path(tempdir) / "fortytwo.S"
source_path.write_text(assembly_text)
output_path = source_path.with_suffix(".o")
subprocess.check_call(["gcc", "-c", "-o", str(output_path), str(source_path)])
object_bytes = output_path.read_bytes()
rule_runner.write_files(
{
"go.mod": dedent(
"""\
module example.com/syso_files
go 1.17
"""
),
"main.go": dedent(
"""\
package main
import "fmt"
func main() {
fmt.Println(value())
}
"""
),
"value.go": dedent(
"""\
package main
// extern int fortytwo();
import "C"
func value() int {
return int(C.fortytwo())
}
"""
),
"value.syso": object_bytes,
"BUILD": dedent(
"""\
go_mod(name="mod")
go_package(name="pkg", sources=["*.go", "*.syso"])
go_binary(name="bin")
"""
),
}
)
binary_tgt = rule_runner.get_target(Address("", target_name="bin"))
built_package = build_package(rule_runner, binary_tgt)
assert len(built_package.artifacts) == 1
assert built_package.artifacts[0].relpath == "bin"
result = subprocess.run([os.path.join(rule_runner.build_root, "bin")], stdout=subprocess.PIPE)
assert result.returncode == 0
assert result.stdout == b"42\n"
def test_build_package_using_api_metdata(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"go.mod": dedent(
"""\
module example.com/assembly
go 1.17
"""
),
"main.go": dedent(
"""\
package main
import "fmt"
const MagicValueToBeUsedByAssembly int = 42
func main() {
fmt.Println(add_magic(10))
}
"""
),
"add_amd64.go": "package main\nfunc add_magic(x int64) int64",
"add_arm64.go": "package main\nfunc add_magic(x int64) int64",
"add_amd64.s": dedent(
"""\
#include "textflag.h" // for NOSPLIT
#include "go_asm.h" // for const_MagicValueToBeUsedByAssembly
TEXT ·add_magic(SB),NOSPLIT,$0
MOVQ x+0(FP), BX
MOVQ $const_MagicValueToBeUsedByAssembly, BP
ADDQ BP, BX
MOVQ BX, ret+8(FP)
RET
"""
),
"add_arm64.s": dedent(
"""\
#include "textflag.h" // for NOSPLIT
#include "go_asm.h" // for const_MagicValueToBeUsedByAssembly
TEXT ·add_magic(SB),NOSPLIT,$0
MOVD x+0(FP), R0
MOVD $const_MagicValueToBeUsedByAssembly, R1
ADD R1, R0, R0
MOVD R0, ret+8(FP)
RET
"""
),
"BUILD": dedent(
"""\
go_mod(name="mod")
go_package(name="pkg", sources=["*.go", "*.s"])
go_binary(name="bin")
"""
),
}
)
binary_tgt = rule_runner.get_target(Address("", target_name="bin"))
built_package = build_package(rule_runner, binary_tgt)
assert len(built_package.artifacts) == 1
assert built_package.artifacts[0].relpath == "bin"
result = subprocess.run([os.path.join(rule_runner.build_root, "bin")], stdout=subprocess.PIPE)
assert result.returncode == 0
assert result.stdout == b"52\n" # should be 10 + the 42 "magic" value
def test_build_package_with_copied_header(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"go.mod": dedent(
"""\
module example.com/assembly
go 1.17
"""
),
"constant_linux.h": dedent(
"""
#define MAGIC_VALUE 42
"""
),
"constant_darwin.h": dedent(
"""
#define MAGIC_VALUE 42
"""
),
"main.go": dedent(
"""\
package main
import "fmt"
func main() {
fmt.Println(add_magic(10))
}
"""
),
"add_amd64.go": "package main\nfunc add_magic(x int64) int64",
"add_arm64.go": "package main\nfunc add_magic(x int64) int64",
"add_amd64.s": dedent(
"""\
#include "textflag.h" // for NOSPLIT
#include "constant_GOOS.h" // for MAGIC_VALUE
TEXT ·add_magic(SB),NOSPLIT,$0
MOVQ x+0(FP), BX
MOVQ $MAGIC_VALUE, BP
ADDQ BP, BX
MOVQ BX, ret+8(FP)
RET
"""
),
"add_arm64.s": dedent(
"""\
#include "textflag.h" // for NOSPLIT
#include "constant_GOOS.h" // for MAGIC_VALUE
TEXT ·add_magic(SB),NOSPLIT,$0
MOVD x+0(FP), R0
MOVD $MAGIC_VALUE, R1
ADD R1, R0, R0
MOVD R0, ret+8(FP)
RET
"""
),
"BUILD": dedent(
"""\
go_mod(name="mod")
go_package(name="pkg", sources=["*.go", "*.s", "*.h"])
go_binary(name="bin")
"""
),
}
)
binary_tgt = rule_runner.get_target(Address("", target_name="bin"))
built_package = build_package(rule_runner, binary_tgt)
assert len(built_package.artifacts) == 1
assert built_package.artifacts[0].relpath == "bin"
result = subprocess.run([os.path.join(rule_runner.build_root, "bin")], stdout=subprocess.PIPE)
assert result.returncode == 0
assert result.stdout == b"52\n" # should be 10 + the 42 "magic" value
|
#!/usr/bin/python
VERSION = 0.1
#---------------------------------
# BAGEL-calc_foldchange: given a matrix of read counts, normalize, filter for low reads, and calculate fold change
# (c) Traver Hart, 10/2015.
# modified 9/2015
# Free to modify and redistribute with attribtution
#---------------------------------
helptext = ('\n'
'BAGEL-calc_foldchange.py -i [read count file] -o [output label] -c [control column]\n'
'\n'
' from the Bayesian Analysis of Gene EssentiaLity (BAGEL) suite\n'
' Version ' + str(VERSION) + '\n'
'\n'
' required options:\n'
' -i [read count file] Tab-delmited file of reagents and fold changes. See documentation for format.\n'
' -o [output label] Label for all output files\n'
' -c [control column] Control (T0 or plasmid) column\n'
'\n'
' other options:\n'
' --minreads=N Discard gRNA with T0 counts < N (default 30)\n'
' -h, --help Show this help text\n'
'\n'
' Example:\n'
' BAGEL-calc_foldchange.py -i readcount_file -o experiment_name -c 1\n'
'\n'
' Filters readcount_file for reagents with at least 30 reads in the control sample,\n'
' calculates fold change, and writes [output label].foldchange and [output label].normalized_reads\n'
'\n')
from numpy import * # into current namespace
import sys, getopt
import pandas as pd
#-------------------------------------------#
# SET CONTSTANTS; INITIALIZE VARIABLES #
#-------------------------------------------#
MIN_READS = 30
#----------------------------------#
# READ COMMAND LINE ARGUMENTS #
#----------------------------------#
try:
opts, args = getopt.getopt(sys.argv[1:], "hi:o:c:", ["minreads=","help"])
except getopt.GetoptError:
print helptext
sys.exit(2)
for opt, arg in opts:
if opt in ( '-h', '--help'):
print helptext
sys.exit()
elif opt == '-i':
readcountfile = arg
elif opt == '-o':
label = arg
elif opt == '-c':
ctrl_column = eval(arg)
elif opt == '--minreads':
MIN_READS = int(arg)
#----------------------------------------------------------------#
# Import raw read data, normalize, filter for T0 min readcounts #
# Output: [output label].foldchange #
#----------------------------------------------------------------#
if type(ctrl_column) is int:
ctrl_column = [ctrl_column]
reads = pd.read_table(readcountfile, sep='\t', index_col=0)
control_label = reads.columns.values[ctrl_column]
numClones, numColumns = reads.shape
#
# missing gene name = replace
# missing read count = zero count
#
reads[ reads.columns.values[1] ].fillna('NO_GENE_NAME', inplace=True)
reads.fillna(0, inplace=True)
#
# normalize each sample to a fixed total readcount
#
sumReads = reads.ix[:,range(1,numColumns)].sum(0)
normed = pd.DataFrame( index=reads.index.values )
normed['GENE'] = reads.ix[:,0] # first column is gene name
normed = reads.ix[:,range(1,numColumns)] / tile( sumReads, [numClones,1]) * 10000000 # normalize to 10M reads
#
# filter for minimum readcount
#
f = where( (reads.ix[:,ctrl_column ] >= MIN_READS).sum(axis=1) == len(ctrl_column) )[0]
normed = normed.ix[f,:]
#
# calculate fold change
#
foldchange = pd.DataFrame( index=normed.index.values )
foldchange.index.name = 'REAGENT_ID'
foldchange['GENE'] = reads.ix[f,0] # dataframe 'normed' has no GENE column
for i in range( numColumns -1 ):
foldchange[ normed.columns.values[i] ] = log2( (normed.ix[:,normed.columns.values[i] ] + 0.5) \
/ ( normed.ix[:,control_label].sum(axis=1)*1.0/len(control_label) + 0.5 ) )
#
# we have calculated a foldchange for the control column. Drop it.
#
foldchange.drop( control_label, axis=1, inplace=True)
#
# write normed readcount file
# write foldchange file
#
foldchange_filename = label + '.foldchange'
foldchange.to_csv( foldchange_filename, sep='\t', float_format='%4.3f')
normedreads_filename = label + '.normed_readcount'
normed.to_csv( normedreads_filename, sep='\t', float_format='%3.2f')
|
def define_parinject_model(vocab_size, max_length):
# feature extractor model
inputs1 = Input(shape=(4096,))
fe1 = Dropout(0.5)(inputs1)
fe2 = Dense(256, activation='relu')(fe1)
fe3 = fe2
for i in range(33):
fe3=concatenate([fe3,fe2],axis=1)
fe4 = Reshape((34,256))(fe3)
# sequence model
inputs2 = Input(shape=(max_length,))
se1 = Embedding(vocab_size, 256, mask_zero=False)(inputs2)
se2 = Dropout(0.5)(se1)
#se3=Flatten()(se2)
#encoder
encode1=concatenate([fe4,se2],axis=-1)
#encode2 = Reshape((35,256))(encode1)
se3 = LSTM(256)(encode1)
# decoder model
decoder1 = Dense(256, activation='relu')(se3)
outputs = Dense(vocab_size, activation='softmax')(decoder1)
# tie it together [image, seq] [word]
model = Model(inputs=[inputs1, inputs2], outputs=outputs)
model.compile(loss='categorical_crossentropy', optimizer='adam')
# summarize model
print(model.summary())
plot_model(model, to_file='model.png', show_shapes=True)
return model
|
import numpy as np
import open3d as o3d
import math
from math import cos, sin, pi
from sklearn.linear_model import LinearRegression
from matplotlib import pyplot as plt
import lineSegmentation as seg
# import sortline as sl
############################## Macro ###############################
# pi = 3.141592653589793238
######################### Define Function ##########################
def get_dy2yaw(input_list):
angle = math.atan2(input_list[1], input_list[0])
if angle<-pi/4:
angle = angle + pi
return angle
def get_angle(input_list):
angle = math.atan2(input_list[1], input_list[0])
if input_list[1]<0:
angle = angle+2*pi
return angle*180/pi
def get_distance(xy1,xy2):
distance = ((xy1[0]-xy2[0])**2 + (xy1[1]-xy2[1])**2)**0.5
return distance
def sortline_co(line):
length = len(line[:][:,0])
linedict = {}
for i in range(0,length):
linedict[line[:][i,0]] = line[:][i,:]
linedict_sorted = sorted(linedict.items())
line_sorted = np.empty([0,2])
length = len(linedict_sorted)
for j in range(0,length):
line_sorted = np.append(line_sorted, [linedict_sorted[j][1]],axis = 0)
return line_sorted
def sortline_angle(line, inner_point):
length = len(line[:][:,0])
linedict = {}
linevectors = line - inner_point
listangle = list(map(get_angle, linevectors))
for i in range(0,length):
#line1dict[xline1[i]] = [xline1[i],yline1[i]]
linedict[listangle[i]] = line[:][i,:]
linedict_sorted = sorted(linedict.items())
listangle = sorted(listangle)
line_sorted = np.empty([0,2])
length = len(linedict_sorted)
for j in range(0,length):
line_sorted = np.append(line_sorted, [linedict_sorted[j][1]],axis = 0)
for i in range(0,length-1):
theta = abs(listangle[i]-listangle[i+1])
if 180 < theta:
move = line_sorted[:i+1]
line_sorted = line_sorted[i+1:]
line_sorted = np.append(line_sorted,move,axis = 0)
return line_sorted
##########################################################################
############################# Main Function ##############################
##########################################################################
def sort_Car(points, center):
####################### Get result #########################
points_sorted = sortline_angle(points, center)
x1, y1 = points_sorted[0][0], points_sorted[0][1]
x2, y2 = points_sorted[1][0], points_sorted[1][1]
x3, y3 = points_sorted[2][0], points_sorted[2][1]
x4, y4 = points_sorted[3][0], points_sorted[3][1]
x1x2 = ((x1-x3)**2+(y1-y3)**2)**0.5
x2x3 = ((x2-x3)**2+(y2-y3)**2)**0.5
ang12 = get_dy2yaw([x1-x2,y1-y2])
ang32 = get_dy2yaw([x3-x2,y3-y2])
w = x1x2
l = x2x3
if x1x2 < x2x3:
w = x2x3
l = x1x2
yaw = ang32
if abs(center[1])<2 and (w<1 or l<3):
temp = w
w = l
l = temp
if len(line1_sorted)>=len(line2_sorted):
yaw = ang12
else: yaw = ang32
if yaw > pi/4: yaw = yaw - pi/2
ang1 = ang12*180/pi
ang2 = ang32*180/pi
# if -> Car
# else -> Not Car but cluster
#if(62<abs(ang1-ang2)<131.2): flag = True
if(50<abs(ang1-ang2)<131.2): pass
else: flag = False
#return None, None, None
line1_sorted_plot = (np.array([ [0,-1], [1,0]]) @ line1_sorted.T).T
line2_sorted_plot = (np.array([ [0,-1], [1,0]]) @ line2_sorted.T).T
center_plot = (np.array([ [0,-1], [1,0]]) @ np.asarray(center).T).T
# plt.figure()
plt.plot(line1_sorted_plot[:,0],line1_sorted_plot[:,1], 'bo', markersize = 0.8)
plt.plot(line2_sorted_plot[:,0],line2_sorted_plot[:,1], 'ro', markersize = 0.8)
x, y, u, v = point[1][0], point[1][1], cos(yaw), sin(yaw)
[x,y] = (np.array([ [0,-1], [1,0]]) @ np.asarray([x,y]).T).T
[u,v] = (np.array([ [0,-1], [1,0]]) @ np.asarray([u,v]).T).T
plt.quiver(x, y, u, v, scale= 2, scale_units = 'inches', color = 'red')
# plt.show()
return [center[0], center[1], yaw], [w, l], flag
# return [center[0], center[1], yaw, point], [w, l,h], flag
if __name__ == "__main__":
print("Error.. Why sortCar Module execute")
|
import random
import time
print ('Vamos jogar pedra papel tesoura? ')
r2 = str (input ('Digite pedra ou papel ou tesoura: (digite em minusculo SEM ESPACOS) '))
r = ['pedra', 'papel','tesoura']
r1 = random.choice(r,)
print ('JO')
time.sleep(1)
print ('KEN')
time.sleep(1)
print('PO')
if r2 == 'pedra' and r1 == 'tesoura' or r2 == 'tesoura' and r1 == 'papel' or r2 == 'papel' and r1 == 'pedra' :
print (f' PARABÉNS você ganhou, eu escolhi {r1} e você {r2} \n {r2} ganha de {r1}')
elif r2 == r1 :
print(f'A gente jogou igual affs, deu empate porque vc escolheu {r2} e eu tambem')
elif r1 == 'pedra' and r2 == 'tesoura' or r1 == 'tesoura' and r2 == 'papel' or r1 == 'papel' and r2 == 'pedra' :
print (f'eu ganheii, você escolheu {r2} e eu {r1} \n {r1} ganha de {r2} portanto eu maquina te venci xD ')
else:
print('FATAL ERROR, TENTE NOVAMENTE')
|
# Create a set called my_fav_numbers with your favorites numbers.
# Add two new numbers to it.
# Remove the last one.
# Create a set called friend_fav_numbers with your friend’s favorites numbers.
# Concatenate my_fav_numbers and friend_fav_numbers to our_fav_numbers.
#task1
# my_fav_numbers = set([9, 3])
# my_fav_numbers.add("8")
# my_fav_numbers.add("4")
# my_fav_numbers.discard(3)
# friend_fav_numbers =set ([3, 1])
# our_fav_numbers = set(my_fav_numbers).union(set(friend_fav_numbers))
# print(our_fav_numbers)
#task2
#Given a tuple with integers is it possible to add more integers to the tuple?
#no becuase its immutable we could but we would creat a copy of it and it would not be saved anywhere
#task3
# fruits = ['apple', 'banana', 'kiwi', 'pear']
# for fruit in fruits:
# print(fruit)
# x = range(21)
# for n in x:
# print(n)
#another way easier
# for i in range(1, 21,):
# print(i)
#task4
# import decimal
# def float_range(start, stop, step):
# while start < stop:
# yield float(start)
# start += decimal.Decimal(step)
# print(list(float_range(0, 20.5, '0.5')))
#need to ask for help
#task5
# Consider this list basket = ["Banana", "Apples", "Oranges", "Blueberries"];
# Remove “Banana” from the list.
# Remove “Blueberries” from the list.
# Put “Kiwi” at the end of the list.
# Add “Apples” at the beginning of the list.
# Count how many apples are in the basket.
# Empty the basket.
# basket = ["Banana", "Apples", "Oranges", "Blueberries"];
# basket.remove("Banana")
# # print(basket)
# addbasket = ["Kiwi"]
# sumbaseket= basket + addbasket
# # print(sumbaseket)
# # print(len(sumbaseket))
# # print(sumbaseket.count("Apples"))
# # sumbaseket.clear
# # print(sumbaseket)
# print(sumbaseket)
# sumbaseket.clear()
# print(sumbaseket)
# task7
# Write a while loop that will keep asking the user for input until the input is the same as your name.
# Name = ''
# while Name != 'Rob':
# Name = input('Guess my name ? ')
# print('You guessed the right name!')
#task7
# Given a list, use a while loop to print out every element which has an even index.
# num=0
# basket = ["Banana", "Apples", "Oranges", "Blueberries"]
# while(num < len(basket)):
# # checking condition
# if num % 2 == 0:
# print(basket[num], end = " ")
# # increment num
# num += 1
# #task8Make a list of the multiples of 3 from 3 to 30. Use a for loop to print the numbers in your list.
# l = [i for i in range(3, 31) if i % 3 == 0]
# print(l)
# for i in range(3, 31,3):
# print(i)
#task9
#Use a for loop to find numbers between 1500 and 2700, which are divisible by 7 and multiples of 5.
# result=[]
# for x in range(1500, 2701):
# if (x%7==0) and (x%5==0):
# result.append(str(x))
# print (','.join(result))
#task10
# Ask the user to type in his/her favorite fruit(s) (one or several fruits).
# Hint : Use the input built in method. Ask the user to separate the fruits with a single space, eg. "apple mango cherry".
# Store the favorite fruit(s) in a list. (How can we ‘convert’ a string of words into a list of words?).
# # Now that we have a list of fruits, ask the user to type in the name of any fruit.
# # If the user’s input is a fruit name existing in the list above, print “You chose one of your favorite fruits! Enjoy!”.
# # If the user’s input is NOT a fruit name existing in the list above, print, “You chose a new fruit. I hope you enjoy it too!”.
# # Bonus: Display the list in a user friendly way : add the word “and” before the last fruit in the list – but only if there are more than 1 favorites!
# Fav = input('type your favorite fruits and separate with space?')
# def Convert(string):
# Fav = list(string.split(" "))
# return Fav
# favlist=(Fav.split(" "))
# print (type(favlist))
# # # Given string
# # print("Given string", Fav)
# # print(type(Fav))
# # # String to list
# # res = Fav.strip('][').split(', ')
# # # Result and its type
# # print("final list", res)
# # print(type(res))
# input1=""
# while input1 != favlist:
# favlist = input("type in any fruit")
# print("you guessed my fav fruit")
#code is broken grrrrr
# Exercise 11: Who Ordered A Pizza ?
# Write a loop that prompts the user to enter a series of pizza toppings until they enter a ‘quit’ value.
# As they enter each topping, print a message saying you’ll add that topping to their pizza.
# Upon exit print all the toppings on the pizza and what the total is (10 + 2.5 for each topping)
# pizza=[]
# pizzaone=[]
# while pizza != "quit":
# pizza=input("topping?")
# pizzaone.append(pizza)
# print( pizza )
# print(len(pizzaone)*2.5+10)
|
# Copyright (c) 2017-2023 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
"""
Logging utilities used in `dazl`.
This is an internal API and not meant to be used directly outside of dazl; symbols declared in this
file may change at any time.
"""
from __future__ import annotations
from functools import partial
import logging
import sys
from time import time
from typing import Type, cast
__all__ = ["LOG", "VERBOSE", "configure"]
# A custom logging level below "DEBUG". This is used within dazl for messages that are rarely
# important enough to be printed, even within tests, but still occasionally have value when
# debugging particular blocks of code.
VERBOSE = 5
logging.addLevelName(VERBOSE, "VERBOSE")
# If a custom logger type was set before dazl was loaded, respect it by using it as our logger's
# base class.
# noinspection PyTypeChecker
Logger = logging.getLoggerClass() # type: Type[logging.Logger]
class ExtendedLogger(Logger): # type: ignore
"""
A logger with additional utility logging functions used within dazl.
"""
def verbose(self, msg, *args, **kwargs):
"""
Log a message with level ``VERBOSE`` on this logger.
"""
self.log(VERBOSE, msg, *args, **kwargs)
def verbose_timed(self, msg, *args, **kwargs) -> "TimedLogMessageContext":
"""
Log a message with level ``VERBOSE`` on the logger, additionally annotating the log message
with the time it took to complete the block.
"""
return TimedLogMessageContext(self, VERBOSE, msg, args, kwargs)
def debug_timed(self, msg, *args, **kwargs) -> "TimedLogMessageContext":
"""
Log a message with level ``DEBUG`` on the logger, additionally annotating the log message
with the time it took to complete the block.
"""
return TimedLogMessageContext(self, logging.DEBUG, msg, args, kwargs)
def info_timed(self, msg, *args, **kwargs) -> "TimedLogMessageContext":
"""
Log a message with level ``INFO`` on the logger, additionally annotating the log message
with the time it took to complete the block.
"""
return TimedLogMessageContext(self, logging.INFO, msg, args, kwargs)
# Create our Logger with our special type that has all of our goodies.
logging.setLoggerClass(ExtendedLogger)
LOG = cast(ExtendedLogger, logging.getLogger("dazl"))
# There is a chance that someone instantiated a logger using our name already. If that's the case,
# then it is too late to specify the actual class instance used and `LOG` will not have the
# functions declared on it that we expect. So make sure that we specifically add methods to _our_
# logger.
#
# An alternate implementation would have been to simply assign our custom logging functions directly
# to an instance logger without playing games with `logger.setLoggerClass`; but we don't, because
# mypy/IDEs won't understand what is going on; we'd be using "reflection" instead of giving
# mypy/IDEs a simple class (ExtendedLogger) to key off of.
for field in dir(ExtendedLogger):
if not field.startswith("__") and not hasattr(LOG, field):
setattr(LOG, field, partial(getattr(ExtendedLogger, field), LOG))
# This is essentially a runtime assertion that our logging functions have been defined properly.
LOG.verbose("dazl logging has been loaded.")
# Restore the original Logger type.
logging.setLoggerClass(Logger)
class TimedLogMessageContext:
"""
A simple ContextManager that logs a message at the specified logging level, additionally with
timing information. If an exception is thrown in the block, that exception is instead logged.
"""
__slots__ = "logger", "log_level", "msg", "args", "kwargs", "start"
def __init__(self, logger, log_level, msg, args, kwargs):
self.logger = logger
self.log_level = log_level
self.msg = msg
self.args = args
self.kwargs = kwargs
self.start = None
def __enter__(self):
self.start = time()
def __exit__(self, exc_type, exc_val, exc_tb):
elapsed_ms = (time() - self.start) * 1000.0
if exc_type is not None:
self.logger.exception(
self.msg + " (%0.2f ms)",
*self.args,
elapsed_ms,
exc_info=(exc_type, exc_val, exc_tb),
**self.kwargs,
)
else:
self.logger.log(
self.log_level, self.msg + " (%0.2f ms)", *self.args, elapsed_ms, **self.kwargs
)
did_configure = False
def configure(level=logging.INFO):
"""
Set up a default logger format and stream handler with sensible defaults.
"""
root = logging.getLogger()
global did_configure
if not did_configure:
did_configure = True
else:
root.warning("configure being called more than once!")
return
logging.captureWarnings(True)
root.setLevel(level)
stream_handler = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter("[%(levelname)7s] %(asctime)s | %(name)-7s | %(message)s")
stream_handler.setFormatter(formatter)
root.addHandler(stream_handler)
|
import unittest
import os
import json
from utils.text import Text
TEST_PATH = os.path.dirname(os.path.realpath(__file__))
TEST_FILES_PATH = os.path.join(TEST_PATH, '../test_source')
TMP_PATH = os.path.join(TEST_PATH, '../tmp_test')
class TextTestCase(unittest.TestCase):
def setUp(self):
self.test_intervention_files = [['c3d9d2a15a76a9fbb591_sentences.txt',
'c3d9d2a15a76a9fbb591_decode.json',
'c3d9d2a15a76a9fbb591_align.json'],
['d96ee006b62213506a07_sentences.txt',
'd96ee006b62213506a07_decode.json',
'd96ee006b62213506a07_align.json'],
['28fd6d0874eecbfdff35_sentences.txt',
'28fd6d0874eecbfdff35_decode.json',
'28fd6d0874eecbfdff35_align.json']]
if not os.path.exists(TMP_PATH):
os.mkdir(TMP_PATH)
def tearDown(self):
os.popen('rm %s/*.*'%TMP_PATH)
def test_text_aligner(self):
for test_intervention in self.test_intervention_files:
sentence_file = os.path.join(TEST_FILES_PATH, test_intervention[0])
decode_file = os.path.join(TEST_FILES_PATH, test_intervention[1])
alignment_file = os.path.join(TMP_PATH, test_intervention[2])
comparison_alignment_file = os.path.join(TEST_FILES_PATH,
test_intervention[2])
with open(sentence_file) as infile:
sentences = [line.strip() for line in infile.readlines()]
with open(decode_file) as infile:
decode = json.load(infile)
with open(comparison_alignment_file) as infile:
self.comparison_alignment = json.load(infile)
# initialize
self.text_ops = Text(sentences, decode, alignment_file)
self.text_ops.align()
alignment = self.text_ops.align_results
self.assertEqual(alignment, self.comparison_alignment)
|
from common import *
#from mpl_toolkits.mplot3d import Axes3D
from mpl_toolkits.mplot3d.art3d import juggle_axes
from mpl_toolkits.axes_grid1 import make_axes_locatable
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
from matplotlib import colors
from matplotlib.ticker import MaxNLocator
tri_corr = True
class LivePlot:
"""
Live plotting functionality.
"""
def __init__(self,stats,key,E=None,P=None,only=False,**kwargs):
"""
Initialize plots.
- only: (possible) fignums to plot.
"""
if isinstance(only,bool):
only=range(99)
#else: assume only is a list fo fignums
HMM = stats.HMM
config = stats.config
m = HMM.f.m
dt = HMM.t.dt
# Store
self.HMM = HMM
self.stats = stats
self.xx = stats.xx ; xx = stats.xx
self.yy = stats.yy ; yy = stats.yy
# Abbreviate
mu = stats.mu
# Set up prompts
self.is_on = True
self.is_paused = False
print('Initializing liveplotting...')
print('Press <Enter> to toggle live plot OFF/ON.')
print('Press <Space> and then <Enter> to pause.')
#ens_props = {} yields rainbow
ens_props = {'color': 0.7*RGBs['w'],'alpha':0.3}
# For periodic functions
ii,wrap = setup_wrapping(m)
#####################
# Correlation plot
#####################
if 2 in only and m<1001:
GS = {'height_ratios':[4, 1],'hspace':0.09,'top':0.95}
fig_C, (ax_C,ax_AC) = freshfig(2, (5,6), nrows=2, gridspec_kw=GS)
win_title(fig_C, "Correlations")
set_figpos('2311')
if m<=1003:
# Get cov matrix
if E is not None:
C = np.cov(E.T, ddof=1)
else:
assert P is not None
C = P.full if isinstance(P,CovMat) else P
C = C.copy()
# Compute corr from cov
std = sqrt(diag(C))
C /= std[:,None]
C /= std[None,:]
# Mask half
mask = np.zeros_like(C, dtype=np.bool)
mask [np.tril_indices_from(mask)] = True
if tri_corr:
C = np.ma.masked_where(mask, C)
# Make colormap. Log-transform cmap, but not internally in matplotlib,
# so as to avoid transforming the colorbar too.
cmap = plt.get_cmap('RdBu')
trfm = colors.SymLogNorm(linthresh=0.2,linscale=0.2,vmin=-1, vmax=1)
cmap = cmap(trfm(linspace(-0.6,0.6,cmap.N)))
cmap = colors.ListedColormap(cmap)
#
VM = 1.0 # abs(np.percentile(C,[1,99])).max()
im_C = ax_C.imshow(C,cmap=cmap,vmin=-VM,vmax=VM)
#
cax = ax_C.figure.colorbar(im_C,ax=ax_C,shrink=0.8)
plt.box(False)
ax_C.set_facecolor('w')
ax_C.grid(False)
ax_C.set_title("State correlation matrix:", y=1.07)
ax_C.xaxis.tick_top()
# ax_AC = inset_axes(ax_C,width="30%",height="60%",loc=3)
ACF = circulant_ACF(C)
AAF = circulant_ACF(C,do_abs=True)
line_AC, = ax_AC.plot(arange(m), ACF, label='Correlation')
line_AA, = ax_AC.plot(arange(m), AAF, label='Abs. corr.')
_ = ax_AC.hlines(0,0,m-1,'k','dotted',lw=1)
# Align ax_AC with ax_C
bb_AC = ax_AC.get_position()
bb_C = ax_C.get_position()
ax_AC.set_position([bb_C.x0, bb_AC.y0, bb_C.width, bb_AC.height])
# Tune plot
ax_AC.set_title("Auto-correlation:")
ax_AC.set_ylabel("Mean value")
ax_AC.set_xlabel("Distance (in state indices)")
ax_AC.set_xticklabels([])
ax_AC.set_yticks([0,1] + list(ax_AC.get_yticks()[[0,-1]]))
ax_AC.set_ylim(top=1)
ax_AC.legend(frameon=True,facecolor='w',
bbox_to_anchor=(1, 1), loc='upper left', borderaxespad=0.02)
self.fig_C = fig_C
self.ax_C = ax_C
self.ax_AC = ax_AC
self.im_C = im_C
self.line_AC = line_AC
self.line_AA = line_AA
self.mask = mask
else:
not_available_text(ax_C)
#####################
# Spectral error plot
#####################
fig_S, ax_S = freshfig(4, (4,3))
win_title(fig_S, "Spectral view")
set_figpos('2311')
ax_S.set_xlabel('Sing. value index')
ax_S.set_yscale('log')
ax_S.set_ylim(bottom=1e-5)
#ax_S.set_ylim([1e-3,1e1])
try:
msft = abs(stats.umisf[0])
sprd = stats.svals[0]
except KeyError:
self.do_spectral_error = False
not_available_text(ax_S, "Spectral stats not being computed")
else:
if np.any(np.isfinite(msft)):
not_available_text(ax_S, "Spectral stats not finite")
self.do_spectral_error = False
else:
self.do_spectral_error = True
if self.do_spectral_error:
self.line_msft, = ax_S.plot(arange(len(msft)),msft,'k',lw=2,label='Error')
self.line_sprd, = ax_S.plot(arange(len(sprd)),sprd,'b',lw=2,label='Spread',alpha=0.9)
ax_S.get_xaxis().set_major_locator(MaxNLocator(integer=True))
ax_S.legend()
self.ax_S = ax_S
#####################
# Diagnostics
#####################
if 1 in only:
GS = {'left':0.125+0.04,'right':0.9+0.04}
self.fig_pulse, (self.ax_RMS, self.ax_Uni) = freshfig(
1,(5,3.5),nrows=2,sharex=True,gridspec_kw=GS)
win_title(self.fig_pulse,"Scalar diagnostics")
set_figpos('2312')
self.has_checked_presence = False
def lin(a,b):
def f(x):
y = a + b*x
return y
return f
def divN():
try:
N = E.shape[0]
def f(x): return x/N
return f
except AttributeError:
pass
# --------------
# RMS
# --------------
d_RMS = {
'rmse' : dict(c='k', label='Error'),
'rmv' : dict(c='b', label='Spread', alpha=0.6),
}
# --------------
# Plain
# --------------
d_Uni = OrderedDict([
('skew' , dict(c='g', label='Skew')),
('kurt' , dict(c='r', label='Kurt')),
('infl' , dict(c='c', label='(Infl-1)*10',transf=lin(-10,10))),
('N_eff' , dict(c='y', label='N_eff/N' ,transf=divN(), step=True)),
('iters' , dict(c='m', label='Iters/2' ,transf=lin(0,.5), step=True)),
('trHK' , dict(c='k', label='HK')),
('resmpl' , dict(c='k', label='Resampl?')),
])
chrono = HMM.t
chrono.pK = estimate_good_plot_length(xx,chrono,mult=80)
chrono.pKObs = int(chrono.pK / chrono.dkObs)
def raise_field_lvl(dct,fld):
dct[fld] = dct['plt'][fld]
del dct['plt'][fld]
def init_axd(ax,dict_of_dicts):
new = {}
for name in dict_of_dicts:
# Make plt settings a sub-dict
d = {'plt':dict_of_dicts[name]}
# Set default lw
if 'lw' not in d['plt']: d['plt']['lw'] = 2
# Extract from plt-dict 'transf' and 'step' fields
try: raise_field_lvl(d,'transf')
except KeyError: d['transf'] = lambda x: x
try: raise_field_lvl(d,'step')
except KeyError: pass
try: stat = getattr(stats,name) # Check if stat is there.
# Fails e.g. if assess(0) before creating stat.
except AttributeError: continue
try: val0 = stat[0] # Check if stat[0] has been written
# Fails e.g. if store_u==False and k_tmp==None (init)
except KeyError: continue
if isinstance(stat,np.ndarray):
if len(stat) != (chrono.KObs+1): raise TypeError(
"Only len=(KObs+1) ndarrays supported " +
"[use FAU_series for len=(K+1)]")
d['data'] = np.full(chrono.pKObs, nan)
tt_ = chrono.ttObs[arange(chrono.pKObs)]
else:
d['data'] = np.full(chrono.pK, nan)
tt_ = chrono.tt [arange(chrono.pK)]
d['data'][0] = d['transf'](val0)
d['h'], = ax.plot(tt_,d['data'],**d['plt'])
new[name] = d
return new
self.d_RMS = init_axd(self.ax_RMS, d_RMS)
self.ax_RMS.set_ylabel('RMS')
self.d_Uni = init_axd(self.ax_Uni, d_Uni)
self.ax_Uni.set_ylabel('mean of marginal\n $\sigma$-normalized values',
fontsize='small', labelpad=0)
self.ax_Uni.set_xlabel('time (t)')
#####################
# Weighted histogram
#####################
if 4 in only and E is not None and stats._has_w:
fig_hw, ax_hw = freshfig(4,(6,3), gridspec_kw={'bottom':.15})
win_title(fig_hw,"Weight histogram")
set_figpos('2321')
ax_hw.set_xscale('log')
ax_hw.set_xlabel('weigth [× N]')
ax_hw.set_ylabel('count')
if len(E)<10001:
hist = ax_hw.hist(stats.w[0])[2]
N = len(E)
xticks = 1/N * 10**arange(-4,log10(N)+1)
xtlbls = array(['$10^{'+ str(int(log10(w*N))) + '}$' for w in xticks])
xtlbls[xticks==1/N] = '1'
ax_hw.set_xticks(xticks)
ax_hw.set_xticklabels(xtlbls)
self.fig_hw = fig_hw
self.ax_hw = ax_hw
self.hist = hist
else:
not_available_text(ax_hw,'Not computed (N > threshold)')
#####################
# User-defined state
#####################
if 9 in only and hasattr(HMM,'liveplotting'):
self.fig_custom, self.custom = HMM.liveplotting(stats,key,E,P)
win_title(self.fig_custom,"Custom plot")
set_figpos('2322')
plot_pause(0.01)
self.prev_k = 0
plot_pause(0.01)
def skip_plotting(self):
"""
Poll user for keypresses.
Decide on toggling pause/step/plot:
"""
open_figns = plt.get_fignums()
if open_figns == []:
return True
if self.is_paused:
# If paused
ch = getch()
# Wait for <space> or <enter>
while ch not in [' ','\r']:
ch = getch()
# If <enter>, turn off pause
if '\r' in ch:
self.is_paused = False
key = poll_input() # =None if <space> was pressed above
if key is not None:
if key == '\n':
# If <enter>
self.is_on = not self.is_on # toggle plotting on/off
elif key == ' \n':
# If <space>+<enter>
self.is_on = True # turn on plotting
self.is_paused = not self.is_paused # toggle pause
print("Press <Space> to step. Press <Enter> to resume.")
return not self.is_on
def update(self,key,E=None,P=None,**kwargs):
"""Update liveplots"""
if self.skip_plotting(): return
k,kObs,f_a_u = key
stats = self.stats
mu = stats.mu
m = self.xx.shape[1]
ii,wrap = setup_wrapping(m)
#####################
# Correlation plot
#####################
if hasattr(self, 'fig_C') and plt.fignum_exists(self.fig_C.number):
plt.figure(self.fig_C.number)
if E is not None:
C = np.cov(E,rowvar=False)
else:
assert P is not None
C = P.full if isinstance(P,CovMat) else P
C = C.copy()
std = sqrt(diag(C))
C /= std[:,None]
C /= std[None,:]
if tri_corr:
C = np.ma.masked_where(self.mask, C)
self.im_C.set_data(C)
# Auto-corr function
ACF = circulant_ACF(C)
AAF = circulant_ACF(C,do_abs=True)
self.line_AC.set_ydata(ACF)
self.line_AA.set_ydata(AAF)
plot_pause(0.01)
#####################
# Spectral error plot
#####################
if hasattr(self, 'fig_S') and plt.fignum_exists(self.fig_S.number) and self.do_spectral_error:
plt.figure(self.fig_S.number)
msft = abs(stats.umisf[k])
sprd = stats.svals[k]
self.line_sprd.set_ydata(sprd)
self.line_msft.set_ydata(msft)
update_ylim(msft, self.ax_S)
plot_pause(0.01)
#####################
# Diagnostics
#####################
if hasattr(self,'fig_pulse') and plt.fignum_exists(self.fig_pulse.number):
plt.figure(self.fig_pulse.number)
chrono = self.HMM.t
# Indices with shift
kkU = arange(chrono.pK) + max(0,k-chrono.pK)
ttU = chrono.tt[kkU]
# Indices for Obs-times
kkA = kkU[0] <= chrono.kkObs
kkA &= chrono.kkObs <= kkU[-1]
def update_axd(ax,dict_of_dicts):
ax.set_xlim(ttU[0], ttU[0] + 1.1*(ttU[-1]-ttU[0]))
for name, d in dict_of_dicts.items():
stat = getattr(stats,name)
if isinstance(stat,np.ndarray):
tt_ = chrono.ttObs[kkA]
d['data'] = stat [kkA]
if d.get('step',False):
# Creat "step"-style graph
d['data'] = d['data'].repeat(2)
tt_ = tt_ .repeat(2)
right = tt_[-1] # use ttU[-1] for continuous extrapolation
tt_ = np.hstack([ttU[0], tt_[0:-2], right])
elif stat.dtype == 'bool':
# Creat "impulse"-style graph
tt_ = tt_ .repeat(3)
d['data'] = d['data'].repeat(3)
tt_ [2::3] = nan
d['data'][::3] = False
else:
tt_ = ttU
if stat.store_u:
d['data'] = stat[kkU]
else: # store .u manually
tmp = stat[k]
if self.prev_k not in [k, k-1]:
# Reset display
d['data'][:] = nan
if k >= chrono.pK:
# Rolling display
d['data'] = roll_n_sub(d['data'], tmp, -1)
else:
# Initial display: append
d['data'][k] = tmp
d['data'] = d['transf'](d['data'])
d['h'].set_data(tt_,d['data'])
def rm_absent(ax,dict_of_dicts):
for name in list(dict_of_dicts):
d = dict_of_dicts[name]
if not np.any(np.isfinite(d['data'])):
d['h'].remove()
del dict_of_dicts[name]
if dict_of_dicts:
ax.legend(loc='upper left')
update_axd(self.ax_RMS,self.d_RMS)
update_axd(self.ax_Uni,self.d_Uni)
#if k%(chrono.pK/5) <= 1:
update_ylim([d['data'] for d in self.d_RMS.values()], self.ax_RMS,
bottom=0, cC=0.2,cE=0.9)
update_ylim([d['data'] for d in self.d_Uni.values()], self.ax_Uni,
Max=4, Min=-4, cC=0.3,cE=0.9)
# Check which diagnostics are present
if (not self.has_checked_presence) and (k>=chrono.kkObs[0]):
rm_absent(self.ax_RMS,self.d_RMS)
rm_absent(self.ax_Uni,self.d_Uni)
self.has_checked_presence = True
plot_pause(0.01)
#####################
# Weight histogram
#####################
if kObs and hasattr(self, 'fig_hw') and plt.fignum_exists(self.fig_hw.number):
plt.figure(self.fig_hw.number)
ax_hw = self.ax_hw
_ = [b.remove() for b in self.hist]
w = stats.w[k]
N = len(w)
wmax = w.max()
bins = exp(linspace(log(1e-5/N), log(1), int(N/20)))
counted = w>bins[0]
nC = np.sum(counted)
nn,_,pp = ax_hw.hist(w[counted], bins=bins, color='b')
self.hist = pp
#thresh = '#(w<$10^{'+ str(int(log10(bins[0]*N))) + '}/N$ )'
ax_hw.set_title('N: {:d}. N_eff: {:.4g}. Not shown: {:d}. '.\
format(N, 1/(w@w), N-nC))
update_ylim([nn], ax_hw, cC=True)
plot_pause(0.01)
#####################
# User-defined state
#####################
if hasattr(self,'fig_custom') and plt.fignum_exists(self.fig_custom.number):
plt.figure(self.fig_custom.number)
self.custom(key,E,P)
plot_pause(0.01)
# Trackers
self.prev_k = k
def plot_pause(duration):
"""
plt.pause is not supported by jupyter notebook.
Provide fallback that does work.
stackoverflow.com/q/34486642
"""
try:
plt.pause(duration)
except:
fig = plt.gcf()
fig.canvas.draw()
time.sleep(0.1)
def setup_wrapping(m,periodic=True):
"""
Make periodic indices and a corresponding function
(that works for ensemble input).
"""
if periodic:
ii = np.hstack([-0.5, arange(m), m-0.5])
def wrap(E):
midpoint = (E[[0],...] + E[[-1],...])/2
return ccat(midpoint,E,midpoint)
else:
ii = arange(m)
wrap = lambda x: x
return ii, wrap
def adjust_position(ax,adjust_extent=False,**kwargs):
"""
Adjust values (add) to get_position().
kwarg must be one of 'x0','y0','width','height'.
"""
# Load get_position into d
pos = ax.get_position()
d = OrderedDict()
for key in ['x0','y0','width','height']:
d[key] = getattr(pos,key)
# Make adjustments
for key,item in kwargs.items():
d[key] += item
if adjust_extent:
if key=='x0': d['width'] -= item
if key=='y0': d['height'] -= item
# Set
ax.set_position(d.values())
# TODO: rename "xtrma"
def span(xx,axis=None):
a = xx.min(axis)
b = xx.max(axis)
return a, b
def stretch(a,b,factor=1,int=False):
"""
Stretch distance a-b by factor.
Return a,b.
If int: floor(a) and ceil(b)
"""
c = (a+b)/2
a = c + factor*(a-c)
b = c + factor*(b-c)
if int:
a = floor(a)
b = ceil(b)
return a, b
def update_ylim(data,ax,bottom=None,top=None,Min=-1e20,Max=+1e20,cC=0,cE=1):
"""
Update ylim's intelligently, mainly by computing
the low/high percentiles of the data.
- data: iterable of arrays for computing percentiles.
- bottom/top: override values.
- Max/Min: bounds.
- cE: exansion (widenting) rate ∈ [0,1].
Default: 1, which immediately expands to percentile.
- cC: compression (narrowing) rate ∈ [0,1].
Default: 0, which does not allow compression.
Despite being a little involved,
the cost of this subroutine is typically not substantial.
"""
#
def worth_updating(a,b,curr):
# Note: should depend on cC and cE
d = abs(curr[1]-curr[0])
lower = abs(a-curr[0]) > 0.002*d
upper = abs(b-curr[1]) > 0.002*d
return lower and upper
#
current = ax.get_ylim()
# Find "reasonable" limits (by percentiles), looping over data
maxv = minv = -np.inf # init
for d in data:
d = d[np.isfinite(d)]
if len(d):
minv, maxv = np.maximum([minv, maxv], \
array([-1, 1]) * np.percentile(d,[1,99]))
minv *= -1
minv, maxv = stretch(minv,maxv,1.02)
# Pry apart equal values
if np.isclose(minv,maxv):
maxv += 0.5
minv -= 0.5
# Set rate factor as compress or expand factor.
c0 = cC if minv>current[0] else cE
c1 = cC if maxv<current[1] else cE
# Adjust
minv = np.interp(c0, (0,1), (current[0], minv))
maxv = np.interp(c1, (0,1), (current[1], maxv))
# Bounds
maxv = min(Max,maxv)
minv = max(Min,minv)
# Overrides
if top is not None: maxv = top
if bottom is not None: minv = bottom
# Set (if anything's changed)
#if worth_updating(minv,maxv,current):
#ax.set_ylim(minv,maxv)
ax.set_ylim(minv,maxv)
def set_ilim(ax,i,Min=None,Max=None):
"""Set bounds on axis i."""
if i is 0: ax.set_xlim(Min,Max)
if i is 1: ax.set_ylim(Min,Max)
if i is 2: ax.set_zlim(Min,Max)
def fit_lim(data,zoom=1.0):
Min = data.min()
Max = data.max()
lims = round2sigfig([Min, Max])
lims = inflate_ens(lims,1/zoom)
return lims
def estimate_good_plot_length(xx,chrono=None,mult=100):
"""
Estimate good length for plotting stuff
from the time scale of the system.
Provide sensible fall-backs (better if chrono is supplied).
"""
if xx.ndim == 2:
# If mult-dim, then average over dims (by ravel)....
# But for inhomogeneous variables, it is important
# to subtract the mean first!
xx = xx - mean(xx,axis=0)
xx = xx.ravel(order='F')
try:
K = mult * estimate_corr_length(xx)
except ValueError:
K = 0
if chrono != None:
t = chrono
K = int(min(max(K, t.dkObs), t.K))
T = round2sigfig(t.tt[K],2) # Could return T; T>tt[-1]
K = find_1st_ind(t.tt >= T)
if K: return K
else: return t.K
else:
K = int(min(max(K, 1), len(xx)))
T = round2sigfig(K,2)
return K
def get_plot_inds(xx,chrono,K=None,T=None,**kwargs):
"""
Def subset of kk for plotting, from one of
- K
- T
- mult * auto-correlation length of xx
"""
t = chrono
if K is None:
if T: K = find_1st_ind(t.tt >= min(T,t.T))
else: K = estimate_good_plot_length(xx,chrono=t,**kwargs)
plot_kk = t.kk[:K+1]
plot_kkObs = t.kkObs[t.kkObs<=K]
return plot_kk, plot_kkObs
def plot_3D_trajectory(stats,dims=0,**kwargs):
"""
Plot 3D phase-space trajectory.
kwargs forwarded to get_plot_inds().
"""
if is_int(dims):
dims = dims + arange(3)
assert len(dims)==3
xx = stats.xx
mu = stats.mu
chrono = stats.HMM.t
kk,kkA = get_plot_inds(xx,chrono,mult=100,**kwargs)
if mu.store_u:
xx = xx[kk]
mu = mu[kk]
T = chrono.tt[kk[-1]]
else:
xx = xx[kkA]
mu = mu.a[:len(kkA)]
T = chrono.tt[kkA[-1]]
plt.figure(14).clf()
set_figpos('3321 mac')
ax3 = plt.subplot(111, projection='3d')
xx = xx.T[dims]
mu = mu.T[dims]
ax3.plot (*xx ,c='k',label='Truth')
ax3.plot (*mu ,c='b',label='DA estim.')
ax3.scatter(*xx[:, 0],c='g',s=40)
ax3.scatter(*xx[:,-1],c='r',s=40)
ax3.set_title('Phase space trajectory up to t={:<5.2f}'.format(T))
ax3.set_xlabel('dim ' + str(dims[0]))
ax3.set_ylabel('dim ' + str(dims[1]))
ax3.set_zlabel('dim ' + str(dims[2]))
ax3.legend(frameon=False)
ax3.set_facecolor('w')
def plot_time_series(stats,**kwargs):
"""
Plot time series of various statistics.
kwargs forwarded to get_plot_inds().
"""
# Figure, axes
fg = plt.figure(12,figsize=(5,3.5))
fg.clf()
set_figpos('1313 mac')
fg, (ax_e,ax_K) = plt.subplots(2,1,sharex=True,num=12)
# Time
chrono = stats.HMM.t
xx = stats.xx
m = xx.shape[1]
dims = equi_spaced_integers(m, min(m, 10))
kk,kkA = get_plot_inds(xx[:,dims],chrono,mult=80,**kwargs)
tt,ttA = chrono.tt[kk], chrono.tt[kkA]
KA = len(kkA)
# Stats
s = stats
if s.mu.store_u:
tt_ = tt
rmse = s.rmse[kk]
rmv = s.rmv [kk]
else:
tt_ = ttA
rmse = s.rmse.a[:KA]
rmv = s.rmv .a[:KA]
trKH = s.trHK [:KA]
skew = s.skew.a[:KA]
kurt = s.kurt.a[:KA]
ax_e.plot( tt_, rmse,'k',lw=2 ,label='Error')
ax_e.fill_between(tt_, rmv ,alpha=0.7,label='Spread')
ax_e.set_ylim(0, 1.1*max(np.percentile(rmse,99), rmv.max()) )
ax_e.set_ylabel('RMS')
ax_e.legend()
ax_K.plot(ttA, trKH,'k',lw=2,label='HK')
ax_K.plot(ttA, skew,'g',lw=2,label='Skew')
ax_K.plot(ttA, kurt,'r',lw=2,label='Kurt')
ax_K.set_xlabel('time (t)')
ax_K.set_ylabel('mean of marginal\n $\sigma$-normalized values',
fontsize='small', labelpad=0)
ax_K.legend()
def plot_hovmoller(xx,chrono=None,**kwargs):
"""
Plot Hovmöller diagram.
kwargs forwarded to get_plot_inds().
"""
#cm = mpl.colors.ListedColormap(sns.color_palette("BrBG", 256)) # RdBu_r
#cm = plt.get_cmap('BrBG')
fig, ax = freshfig(16,(4,3.5))
set_figpos('3311 mac')
m = xx.shape[1]
if chrono!=None:
kk,_ = get_plot_inds(xx,chrono,mult=40,**kwargs)
tt = chrono.tt[kk]
ax.set_ylabel('Time (t)')
else:
pK = estimate_good_plot_length(xx,mult=40)
tt = arange(pK)
ax.set_ylabel('Time indices (k)')
plt.contourf(arange(m),tt,xx[kk],25)
plt.colorbar()
ax.set_position([0.125, 0.20, 0.62, 0.70])
ax.set_title("Hovmoller diagram (of 'Truth')")
ax.set_xlabel('Dimension index (i)')
add_endpoint_xtick(ax)
def add_endpoint_xtick(ax):
"""Useful when xlim(right) is e.g. 39 (instead of 40)."""
xF = ax.get_xlim()[1]
ticks = ax.get_xticks()
if ticks[-1] > xF:
ticks = ticks[:-1]
ticks = np.append(ticks, xF)
ax.set_xticks(ticks)
def integer_hist(E,N,centrd=False,weights=None,**kwargs):
"""Histogram for integers."""
ax = plt.gca()
rnge = (-0.5,N+0.5) if centrd else (0,N+1)
ax.hist(E,bins=N+1,range=rnge,normed=1,weights=weights,**kwargs)
ax.set_xlim(rnge)
def not_available_text(ax,txt=None,fs=20):
if txt is None: txt = '[Not available]'
else: txt = '[' + txt + ']'
ax.text(0.5,0.5,txt,
fontsize=fs,
transform=ax.transAxes,
va='center',ha='center',
wrap=True)
def plot_err_components(stats):
"""
Plot components of the error.
Note: it was chosen to plot(ii, mean_in_time(abs(err_i))),
and thus the corresponding spread measure is MAD.
If one chose instead: plot(ii, std_in_time(err_i)),
then the corresponding measure of spread would have been std.
This choice was made in part because (wrt. subplot 2)
the singular values (svals) correspond to rotated MADs,
and because rms(umisf) seems to convoluted for interpretation.
"""
fgE = plt.figure(15,figsize=(6,6)).clf()
set_figpos('1312 mac')
chrono = stats.HMM.t
m = stats.xx.shape[1]
err = mean( abs(stats.err .a) ,0)
sprd = mean( stats.mad .a ,0)
umsft = mean( abs(stats.umisf.a) ,0)
usprd = mean( stats.svals.a ,0)
ax_r = plt.subplot(311)
ax_r.plot( arange(m), err,'k',lw=2, label='Error')
if m<10**3:
ax_r.fill_between(arange(m),[0]*len(sprd),sprd,alpha=0.7,label='Spread')
else:
ax_r.plot( arange(m), sprd,alpha=0.7,label='Spread')
#ax_r.set_yscale('log')
ax_r.set_title('Element-wise error comparison')
ax_r.set_xlabel('Dimension index (i)')
ax_r.set_ylabel('Time-average (_a) magnitude')
ax_r.set_ylim(bottom=mean(sprd)/10)
ax_r.set_xlim(right=m-1); add_endpoint_xtick(ax_r)
ax_r.get_xaxis().set_major_locator(MaxNLocator(integer=True))
plt.subplots_adjust(hspace=0.55) # OR: [0.125,0.6, 0.78, 0.34]
ax_r.legend()
ax_s = plt.subplot(312)
ax_s.set_xlabel('Principal component index')
ax_s.set_ylabel('Time-average (_a) magnitude')
ax_s.set_title('Spectral error comparison')
has_been_computed = np.any(np.isfinite(umsft))
if has_been_computed:
L = len(umsft)
ax_s.plot( arange(L), umsft,'k',lw=2, label='Error')
ax_s.fill_between(arange(L),[0]*L,usprd,alpha=0.7,label='Spread')
ax_s.set_yscale('log')
ax_s.set_ylim(bottom=1e-4*usprd.sum())
ax_s.set_xlim(right=m-1); add_endpoint_xtick(ax_s)
ax_s.get_xaxis().set_major_locator(MaxNLocator(integer=True))
ax_s.legend()
else:
not_available_text(ax_s)
rmse = stats.rmse.a[chrono.maskObs_BI]
ax_R = plt.subplot(313)
ax_R.hist(rmse,bins=30,normed=0)
ax_R.set_ylabel('Num. of occurence (_a)')
ax_R.set_xlabel('RMSE')
ax_R.set_title('Histogram of RMSE values')
def plot_rank_histogram(stats):
chrono = stats.HMM.t
has_been_computed = \
hasattr(stats,'rh') and \
not all(stats.rh.a[-1]==array(np.nan).astype(int))
def are_uniform(w):
"""Test inital & final weights, not intermediate (for speed)."""
(w[0]==1/N).all() and (w[-1]==1/N).all()
fg = plt.figure(13,figsize=(6,3)).clf()
set_figpos('3331 mac')
#
ax_H = plt.subplot(111)
ax_H.set_title('(Average of marginal) rank histogram (_a)')
ax_H.set_ylabel('Freq. of occurence\n (of truth in interval n)')
ax_H.set_xlabel('ensemble member index (n)')
ax_H.set_position([0.125,0.15, 0.78, 0.75])
if has_been_computed:
w = stats.w.a [chrono.maskObs_BI]
ranks = stats.rh.a[chrono.maskObs_BI]
m = ranks.shape[1]
N = w.shape[1]
if are_uniform(w):
# Ensemble rank histogram
integer_hist(ranks.ravel(),N)
else:
# Experimental: weighted rank histogram.
# Weight ranks by inverse of particle weight. Why? Coz, with correct
# importance weights, the "expected value" histogram is then flat.
# Potential improvement: interpolate weights between particles.
w = w
K = len(w)
w = np.hstack([w, ones((K,1))/N]) # define weights for rank N+1
w = array([ w[arange(K),ranks[arange(K),i]] for i in range(m)])
w = w.T.ravel()
w = np.maximum(w, 1/N/100) # Artificial cap. Reduces variance, but introduces bias.
w = 1/w
integer_hist(ranks.ravel(),N,weights=w)
else:
not_available_text(ax_H)
def adjustable_box_or_forced():
"For set_aspect(), adjustable='box-forced' replaced by 'box' since mpl 2.2.0."
from pkg_resources import parse_version as pv
return 'box-forced' if pv(mpl.__version__) < pv("2.2.0") else 'box'
def show_figs(fignums=None):
"""Move all fig windows to top"""
if fignums == None:
fignums = plt.get_fignums()
try:
fignums = list(fignums)
except:
fignums = [fignums]
for f in fignums:
plt.figure(f)
fmw = plt.get_current_fig_manager().window
fmw.attributes('-topmost',1) # Bring to front, but
fmw.attributes('-topmost',0) # don't keep in front
def win_title(fig, string, num=True):
"Set window title"
if num:
n = fig.number
string += " [" + str(n) + "]"
fig.canvas.set_window_title(string)
def set_figpos(loc):
"""
Place figure on screen, where 'loc' can be either
NW, E, ...
or
4 digits (as str or int) to define grid m,n,i,j.
"""
#Only works with both:
#- Patrick's monitor setup (Dell with Mac central-below)
#- TkAgg backend. (Previously: Qt4Agg)
if not user_is_patrick or mpl.get_backend() != 'TkAgg':
return
fmw = plt.get_current_fig_manager().window
loc = str(loc)
# Qt4Agg only:
# # Current values
# w_now = fmw.width()
# h_now = fmw.height()
# x_now = fmw.x()
# y_now = fmw.y()
# # Constants
# Dell_w = 2560
# Dell_h = 1440
# Mac_w = 2560
# Mac_h = 1600
# # Why is Mac monitor scaled by 1/2 ?
# Mac_w /= 2
# Mac_h /= 2
# Append the string 'mac' to place on mac monitor.
# if 'mac' in loc:
# x0 = Dell_w/4
# y0 = Dell_h+44
# w0 = Mac_w
# h0 = Mac_h-44
# else:
# x0 = 0
# y0 = 0
# w0 = Dell_w
# h0 = Dell_h
# TkAgg
x0 = 0
y0 = 0
w0 = 1280
h0 = 752
# Def place function with offsets
def place(x,y,w,h):
#fmw.setGeometry(x0+x,y0+y,w,h) # For Qt4Agg
geo = str(int(w)) + 'x' + str(int(h)) + \
'+' + str(int(x)) + '+' + str(int(y))
fmw.geometry(newGeometry=geo) # For TkAgg
if not loc[:4].isnumeric():
if loc.startswith('NW'): loc = '2211'
elif loc.startswith('SW'): loc = '2221'
elif loc.startswith('NE'): loc = '2212'
elif loc.startswith('SE'): loc = '2222'
elif loc.startswith('W' ): loc = '1211'
elif loc.startswith('E' ): loc = '1212'
elif loc.startswith('S' ): loc = '2121'
elif loc.startswith('N' ): loc = '2111'
# Place
m,n,i,j = [int(x) for x in loc[:4]]
assert m>=i>0 and n>=j>0
h0 -= (m-1)*25
yoff = 25*(i-1)
if i>1:
yoff += 25
place((j-1)*w0/n, yoff + (i-1)*h0/m, w0/n, h0/m)
# stackoverflow.com/a/7396313
from matplotlib import transforms as mtransforms
def autoscale_based_on(ax, line_handles):
"Autoscale axis based (only) on line_handles."
ax.dataLim = mtransforms.Bbox.unit()
for iL,lh in enumerate(line_handles):
xy = np.vstack(lh.get_data()).T
ax.dataLim.update_from_data_xy(xy, ignore=(iL==0))
ax.autoscale_view()
from matplotlib.widgets import CheckButtons
import textwrap
def toggle_lines(ax=None,autoscl=True,numbering=False,txtwidth=15,txtsize=None,state=None):
"""
Make checkbuttons to toggle visibility of each line in current plot.
autoscl : Rescale axis limits as required by currently visible lines.
numbering: Add numbering to labels.
txtwidth : Wrap labels to this length.
State of checkboxes can be inquired by
OnOff = [lh.get_visible() for lh in ax.findobj(lambda x: isinstance(x,mpl.lines.Line2D))[::2]]
"""
if ax is None: ax = plt.gca()
if txtsize is None: txtsize = mpl.rcParams['font.size']
# Get lines and their properties
lines = {'handle': list(ax.get_lines())}
for p in ['label','color','visible']:
lines[p] = [plt.getp(x,p) for x in lines['handle']]
# Put into pandas for some reason
lines = pd.DataFrame(lines)
# Rm those that start with _
lines = lines[~lines.label.str.startswith('_')]
# Adjust labels
if numbering: lines['label'] = [str(i)+': '+lbl for i,lbl in enumerate(lines['label'])]
if txtwidth: lines['label'] = [textwrap.fill(lbl,width=txtwidth) for lbl in lines['label']]
# Set state. BUGGY? sometimes causes MPL complaints after clicking boxes
if state is not None:
state = array(state).astype(bool)
lines.visible = state
for i,x in enumerate(state):
lines['handle'][i].set_visible(x)
# Setup buttons
# When there's many, the box-sizing is awful, but difficult to fix.
W = 0.23 * txtwidth/15 * txtsize/10
N = len(lines)
nBreaks = sum(lbl.count('\n') for lbl in lines['label']) # count linebreaks
H = min(1,0.05*(N+nBreaks))
plt.subplots_adjust(left=W+0.12,right=0.97)
rax = plt.axes([0.05, 0.5-H/2, W, H])
check = CheckButtons(rax, lines.label, lines.visible)
# Adjust button style
for i in range(N):
check.rectangles[i].set(lw=0,facecolor=lines.color[i])
check.labels[i].set(color=lines.color[i])
if txtsize: check.labels[i].set(size=txtsize)
# Callback
def toggle_visible(label):
ind = lines.label==label
handle = lines[ind].handle.item()
vs = not lines[ind].visible.item()
handle.set_visible( vs )
lines.loc[ind,'visible'] = vs
if autoscl:
autoscale_based_on(ax,lines[lines.visible].handle)
plt.draw()
check.on_clicked(toggle_visible)
# Return focus
plt.sca(ax)
# Must return (and be received) so as not to expire.
return check
@vectorize0
def toggle_viz(h,prompt=False,legend=False):
"""Toggle visibility of the graphics with handle h."""
# Core functionality: turn on/off
is_viz = not h.get_visible()
h.set_visible(is_viz)
if prompt:
input("Press <Enter> to continue...")
# Legend updating. Basic version: works by
# - setting line's label to actual_label/'_nolegend_' if is_viz/not
# - re-calling legend()
if legend:
if is_viz:
try:
h.set_label(h.actual_label)
except AttributeError:
pass
else:
h.actual_label = h.get_label()
h.set_label('_nolegend_')
# Legend refresh
ax = h.axes
with warnings.catch_warnings():
warnings.simplefilter("error",category=UserWarning)
try:
ax.legend()
except UserWarning:
# If all labels are '_nolabel_' then ax.legend() throws warning,
# and quits before refreshing. => Refresh by creating/rm another legend.
ax.legend('TMP').remove()
plt.pause(0.02)
return is_viz
def freshfig(num=None,figsize=None,*args,**kwargs):
"""
- If the figure does not exist: create figure it.
This allows for figure sizing -- even on Macs.
- Otherwise: clear figure (we avoid closing/opening so as
to keep (potentially manually set) figure positioning.
- The rest is the same as:
>>> fig, ax = suplots()
"""
fig = plt.figure(num=num,figsize=figsize)
fig.clf()
_, ax = plt.subplots(num=fig.number,*args,**kwargs)
return fig, ax
def savefig_n(f=None):
"""
Simplify the exporting of a figure, especially when it's part of a series.
"""
assert savefig_n.index>=0, "Initalize using savefig_n.index = 1 in your script"
if f is None:
f = inspect.getfile(inspect.stack()[1][0]) # Get __file__ of caller
f = save_dir(f) # Prep save dir
f = f + str(savefig_n.index) + '.pdf' # Compose name
print("Saving fig to:",f) # Print
plt.savefig(f) # Save
savefig_n.index += 1 # Increment index
plt.pause(0.1) # For safety?
savefig_n.index = -1
from matplotlib.gridspec import GridSpec
def axes_with_marginals(n_joint, n_marg,**kwargs):
"""
Create a joint axis along with two marginal axes.
Example:
>>> ax_s, ax_x, ax_y = axes_with_marginals(4, 1)
>>> x, y = np.random.randn(2,500)
>>> ax_s.scatter(x,y)
>>> ax_x.hist(x)
>>> ax_y.hist(y,orientation="horizontal")
"""
N = n_joint + n_marg
# Method 1
#fig, ((ax_s, ax_y), (ax_x, _)) = plt.subplots(2,2,num=plt.gcf().number,
#sharex='col',sharey='row',gridspec_kw={
#'height_ratios':[n_joint,n_marg],
#'width_ratios' :[n_joint,n_marg]})
#_.set_visible(False) # Actually removing would bug the axis ticks etc.
# Method 2
gs = GridSpec(N,N,**kwargs)
fig = plt.gcf()
ax_s = fig.add_subplot(gs[n_marg:N ,0 :n_joint])
ax_x = fig.add_subplot(gs[0 :n_marg,0 :n_joint],sharex=ax_s)
ax_y = fig.add_subplot(gs[n_marg:N ,n_joint:N ],sharey=ax_s)
# Cannot delete ticks coz axis are shared
plt.setp(ax_x.get_xticklabels(), visible=False)
plt.setp(ax_y.get_yticklabels(), visible=False)
return ax_s, ax_x, ax_y
from matplotlib.patches import Ellipse
def cov_ellipse(ax, mu, sigma, **kwargs):
"""
Draw ellipse corresponding to (Gaussian) 1-sigma countour of cov matrix.
Inspired by stackoverflow.com/q/17952171
Example:
>>> ellipse = cov_ellipse(ax, y, R,
>>> facecolor='none', edgecolor='y',lw=4,label='$1\\sigma$')
"""
# Cov --> Width, Height, Theta
vals, vecs = np.linalg.eigh(sigma)
x, y = vecs[:, -1] # x-y components of largest (last) eigenvector
theta = np.degrees(np.arctan2(y, x))
theta = theta % 180
h, w = 2 * np.sqrt(vals.clip(0))
# Get artist
e = Ellipse(mu, w, h, theta, **kwargs)
ax.add_patch(e)
e.set_clip_box(ax.bbox) # why is this necessary?
# Return artist
return e
|
#!/usr/bin/env python
import os
import sys
import time
import pyrax
pyrax.set_setting("identity_type", "rackspace")
creds_file = os.path.expanduser("~/.rackspace_cloud_credentials")
pyrax.set_credential_file(creds_file)
# Prints out all your current servers and their statuses
# Note: lists for DFW and ord
print "Here is a list of your current servers and their status"
csDfw = pyrax.connect_to_cloudservers(region="DFW")
csOrd = pyrax.connect_to_cloudservers(region="ORD")
for dfwServer in csDfw.servers.list():
print "Name:",dfwServer.name
print " ID:",dfwServer.id
print " Region: DFW"
print " Status:",dfwServer.status
for ordServer in csOrd.servers.list():
print "Name:",ordServer.name
print " ID:",ordServer.id
print " Region: ORD"
print " Status:", ordServer.status
print
|
# -*- python -*-
from math import trunc
def rSigma( n ):
# Case: if type( n ) is float
n = trunc( n )
if n <= 0:
return( 0 )
elif n == 1:
return( 1 )
else:
return( n + rSigma( n - 1 ) )
# Testing
print "rSigma( 5 )", " =", rSigma( 5 ), "## Expect =", 15
print "rSigma( 2.5 )", " =", rSigma( 2.5 ), "## Expect =", 3
print "rSigma( -1 )", " =", rSigma( -1 ), "## Expect =", 0
|
#!/usr/bin/env python
"""
npy_asyncio.py
----------------
TODO: integrate with external runloop/REPL,
eg receiving an array whilst in a live ipython session
* https://ipython.readthedocs.io/en/stable/interactive/autoawait.html
https://docs.python.org/3/library/asyncio-stream.html
Async marks a function that may be interrupted, await is required to call
async-functions (aka coroutine) and marks a point were task can be switched.
* https://blog.jupyter.org/ipython-7-0-async-repl-a35ce050f7f7
::
loop = asyncio.get_event_loop()
loop.run_until_complete(child(10))
https://docs.python.org/3/library/asyncio-dev.html#asyncio-multithreading
https://docs.python.org/3/library/asyncio-dev.html#concurrency-and-multithreading
https://docs.python.org/3/library/asyncio-eventloop.html#asyncio.loop.run_in_executor
Similar to first example from:
* https://www.oreilly.com/library/view/using-asyncio-in/9781492075325/ch04.html
"""
import os, sys, logging, asyncio, numpy as np
from opticks.bin.npy import serialize_with_header, HEADER_BYTES, unpack_prefix
from opticks.bin.npy import npy_deserialize, meta_deserialize
log = logging.getLogger(__name__)
gethost = lambda:os.environ.get("TCP_HOST", "127.0.0.1" )
getport = lambda:int(os.environ.get("TCP_PORT", "15006" ))
getdump = lambda:os.environ.get("DUMP","0")
async def npy_write( writer, arr, meta ):
log.info(f'npy_write:{arr.shape!r}')
if getdump() == "1":
print(arr.shape)
print(arr)
print(meta)
pass
writer.write(serialize_with_header(arr, meta))
await writer.drain()
async def npy_read( reader):
prefix = await reader.readexactly(HEADER_BYTES)
sizes = unpack_prefix(prefix)
log.info(f"npy_read received prefix {sizes!r}")
hdr_bytes, arr_bytes, meta_bytes, zero = sizes
assert zero == 0
arr_data = await reader.readexactly(hdr_bytes+arr_bytes)
arr = npy_deserialize(arr_data)
meta_data = await reader.readexactly(meta_bytes)
meta = meta_deserialize(meta_data)
if getdump() == "1":
print(arr.shape)
print(arr)
print(meta)
pass
return arr, meta
async def npy_client(arr, meta):
reader, writer = await asyncio.open_connection(gethost(),getport())
await npy_write( writer, arr, meta )
arr, meta = await npy_read(reader) ;
log.info('npy_client : close connection')
writer.close()
await writer.wait_closed()
async def handle_npy(reader, writer):
addr = writer.get_extra_info('peername')
log.info(f"handle_npy : received from peer {addr!r}")
arr, meta = await npy_read(reader) ;
meta["src"] = sys.argv[0]
arr += 42
await npy_write( writer, arr, meta )
log.info("handle_npy : close connection")
writer.close()
async def npy_server():
server = await asyncio.start_server(handle_npy, gethost(), getport())
addr = server.sockets[0].getsockname()
log.info(f'npy_server : serving on {addr}')
async with server:
await server.serve_forever()
pass
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
if len(sys.argv) > 1 and sys.argv[1] == "s":
asyncio.run(npy_server())
else:
arr = np.zeros((1,6,4), dtype=np.float32)
meta = dict(src=sys.argv[0])
asyncio.run(npy_client(arr, meta))
pass
|
#http://live.amasupercross.com/xml/sx/RaceResults.json?
import urllib2
import twitter
import time
import pandas as pd
import itertools
import random
import config
import json
import re
from xml.etree import cElementTree as ET
import os
import sys
import helpers
top_x_dict = {0 : 10, 10 : 5, 15 : 3}
positions_to_tweet = 22
max_nr_attempts = 190
sleep_time = random.randint(12,16)
exit_count = 0
last_lap = "0"
race_complete = False
race_strings = ['MAIN','HEAT', 'LCQ', 'SEMI', 'LAST CHANCE QUALIFIER', 'MOTO'] #Last Chance Qualifier
url = "http://americanmotocrosslive.com/xml/mx/RaceResultsWeb.xml?"
info_url = "http://americanmotocrosslive.com/xml/mx/Announcements.json"
race_info_url = 'http://americanmotocrosslive.com/xml/sx/RaceData.json?'
points = { 1 : 25, 2 : 22, 3 : 20, 4 : 18, 5 : 16, 6 : 15, 7 : 14, 8 : 13, 9 : 12, 10 : 11,
11 : 10, 12 : 9, 13 : 8, 14 : 7, 15 : 6, 16 : 5, 17 : 4, 18 : 3, 19 : 2, 20 : 1}
base_dir = os.path.dirname(os.path.abspath(__file__))
tweet_names_on_laps = [1,5,10,15,20]
def get_OA_tweet(riders, class_name):
top_x = len(riders)
d = get_moto_one(class_name)
df = pd.DataFrame(columns=('riderNum', 'Points', 'MotoTwoPos'))
for x in range(top_x):
try:
motoOnePoints = int(d[riders[x].attrib['N']])
except:
motoOnePoints = 0
pass
motoTwoPoints = points.get(x+1,0)
df.loc[x] = [riders[x].attrib['N'], (motoOnePoints + motoTwoPoints), (x + 1)]
df = df.sort_values(by=['Points','MotoTwoPos'], ascending=[False,True])
df = df.reset_index()
oaTweet = 'OA '
for x in range(10):
oaTweet += '(' + str((x + 1)) + ')' + df.ix[x].riderNum + '__'
return oaTweet[:len(oaTweet) - 2]
def get_moto_one(class_name):
d = {}
#filePath = "home/haffner/lap/" + class_name
file_path = os.path.join(base_dir, class_name)
with open(file_path) as f:
for line in f:
(key, val) = line.split(',')
d[key] = val
return d
def savetop_x(riders, class_name):
top_x = len(riders)
file_path = os.path.join(base_dir, class_name)
file = open(file_path, "w")
for x in range(top_x):
file.write(riders[x].attrib['N'] + ',' + str(points.get(x+1,0)) + '\n')
file.close()
def getlapTimes():
tweets = []
global last_lap
global positions_to_tweet
global race_complete
try:
data = urllib2.urlopen(info_url, timeout=3)
except urllib2.URLError, e:
return e, None
try:
event_header = json.loads(data.read())
class_name, event_name, event_number = helpers.get_event_info(event_header["S"].upper(),
race_strings)
if not event_name: #checking to see if we care about this event
return "Not Ready", None
except Exception as e:
return e, None
try:
race_data = urllib2.urlopen(url, timeout=3)
except urllib2.URLError, e:
return e, None
try:
tree = ET.parse(race_data)
root = tree.getroot()
riders = root.findall("./B")
#lap = str(riders[0].attrib['L']).strip()
length_of_announcements = len(event_header["B"])
last_annoucement = event_header["B"][length_of_announcements-1]["M"]
if last_annoucement.find("Session Complete") > -1 and not race_complete:
race_complete = True
tweet = "Checkers "
tweets.append(helpers.get_ro_tweet(tweet, riders, positions_to_tweet,3))
tweets.append(tweet + helpers.get_ro(riders, "F", 10))
#if this is moto 1 save the results
if int(event_number) == 1:
savetop_x(riders, class_name)
else:
tweets.append(tweet + get_OA_tweet(riders, class_name))
return 'OK', tweets
elif last_annoucement.find("Session Complete") > -1:
return "Not Ready", None
else:
lap = str(riders[0].attrib['L']).strip()
tweet = 'L' + lap + ' '
if race_complete: race_complete = False
#are we still on the same lap the last time we tweeted
if lap == last_lap:
return "Not Ready", None
#Have we completed a lap yet
gapTest = riders[1].attrib['G']
if gapTest == '--.---' or gapTest == '00.000' or gapTest == '-.---' or gapTest == '0.000' or lap == '0':
return 'Not Ready', None
#riders that are currently on the lead lap
ridersOnLeadlap = list(itertools.takewhile(lambda x: x.attrib['G'].find('ap') == -1, riders))
#get how many 'spaced' riders will be tweeted based on the current lap number
top_x = helpers.get_top_x(int(lap))
#Check to see if we have enough riders on the same lead lap to tweet
if len(ridersOnLeadlap) < top_x:
return 'Not Ready', None
# store lap times for analyis
helpers.store_lap_results(riders, lap, class_name, event_name, event_number)
# Get the time left in race
rd = urllib2.urlopen(race_info_url)
d = json.loads(rd.read())
if d['T']:
time_left = d['T'][3:]
tweet = tweet + time_left + ' '
tweet = helpers.get_ro_tweet(tweet, riders, positions_to_tweet, top_x)
tweets.append(tweet)
if int(lap) in tweet_names_on_laps:
tweets.append('L' + lap + ' ' + helpers.get_ro(riders, "F", 10))
if int(event_number) == 2:
tweets.append('L' + lap + ' ' + get_OA_tweet(riders, class_name))
last_lap = lap
return 'OK', tweets
except Exception as e:
return e, None
if __name__ == '__main__':
if len(sys.argv) < 2:
tweet_this = False
else:
if sys.argv[1].upper() == 'TWEET':
tweet_this = True
else:
tweet_this = False
if tweet_this == True:
#the necessary twitter authentification
my_auth = twitter.OAuth(config.twitter["token"], config.twitter["token_secret"], config.twitter["consumer_key"], config.twitter["consumer_secret"])
twit = twitter.Twitter(auth=my_auth)
while True:
print "Trying..."
status, tweets = getlapTimes()
#exit()
if status == 'OK':
exit_count = 0
print tweets
if tweet_this == True:
for tweet in tweets:
print 'tweeting - ' + tweet
# Shorten the lap tweet if needed.
if len(tweet) > 140:
tweet = tweet[:137] + '...'
twit.statuses.update(status=tweet[:140]) #lap Times Tweet
else:
exit_count = exit_count + 1
print 'Exit Count is ' + str(exit_count) + ' out of ' + str(max_nr_attempts)
#exit_count keeps track of the number of times that getlapTimes() returns 'Not Ready'
#This will stop the script when it exceeds max_nr_attempts
if exit_count > max_nr_attempts:
exit()
print status
sleep_time = random.randint(12,16)
print 'Sleeping for ' + str(sleep_time) + ' seconds'
time.sleep(sleep_time) #puts the app to sleep for a predetermined amount of time
|
# Copyright (c) 2018 Amdocs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import sys
import traceback
import logging
import urllib2
import uuid
import httplib2
import json
from multicloud_azure.pub.config.config import AAI_SCHEMA_VERSION
from multicloud_azure.pub.config.config import AAI_SERVICE_URL
from multicloud_azure.pub.config.config import AAI_USERNAME
from multicloud_azure.pub.config.config import AAI_PASSWORD
from multicloud_azure.pub.config.config import MSB_SERVICE_IP, MSB_SERVICE_PORT
from multicloud_azure.pub.config.config import ARIA_SERVER_URL
from multicloud_azure.pub.exceptions import VimDriverAzureException
rest_no_auth, rest_oneway_auth, rest_bothway_auth = 0, 1, 2
HTTP_200_OK, HTTP_201_CREATED = '200', '201'
HTTP_204_NO_CONTENT, HTTP_202_ACCEPTED = '204', '202'
status_ok_list = [HTTP_200_OK, HTTP_201_CREATED,
HTTP_204_NO_CONTENT, HTTP_202_ACCEPTED]
HTTP_404_NOTFOUND, HTTP_403_FORBIDDEN = '404', '403'
HTTP_401_UNAUTHORIZED, HTTP_400_BADREQUEST = '401', '400'
logger = logging.getLogger(__name__)
def call_req(base_url, user, passwd, auth_type, resource, method, content='',
headers=None):
callid = str(uuid.uuid1())
# logger.debug("[%s]call_req('%s','%s','%s',%s,'%s','%s','%s')" % (
# callid, base_url, user, passwd, auth_type, resource, method, content))
ret = None
resp_status = ''
resp = ""
full_url = ""
try:
full_url = combine_url(base_url, resource)
if headers is None:
headers = {}
headers['content-type'] = 'application/json'
if user:
headers['Authorization'] = 'Basic ' + \
('%s:%s' % (user, passwd)).encode("base64")
ca_certs = None
for retry_times in range(3):
http = httplib2.Http(
ca_certs=ca_certs,
disable_ssl_certificate_validation=(
auth_type == rest_no_auth))
http.follow_all_redirects = True
try:
logger.debug("request=%s" % full_url)
resp, resp_content = http.request(
full_url, method=method.upper(), body=content,
headers=headers)
resp_status = resp['status']
resp_body = resp_content.decode('UTF-8')
if resp_status in status_ok_list:
ret = [0, resp_body, resp_status, resp]
else:
ret = [1, resp_body, resp_status, resp]
break
except Exception as ex:
if 'httplib.ResponseNotReady' in str(sys.exc_info()):
logger.error(traceback.format_exc())
ret = [1, "Unable to connect to %s" % full_url,
resp_status, resp]
continue
raise ex
except urllib2.URLError as err:
ret = [2, str(err), resp_status, resp]
except Exception:
logger.error(traceback.format_exc())
logger.error("[%s]ret=%s" % (callid, str(sys.exc_info())))
res_info = str(sys.exc_info())
if 'httplib.ResponseNotReady' in res_info:
res_info = ("The URL[%s] request failed or is not responding." %
full_url)
ret = [3, res_info, resp_status, resp]
# logger.debug("[%s]ret=%s" % (callid, str(ret)))
return ret
def req_by_msb(resource, method, content=''):
base_url = "http://%s:%s/" % (MSB_SERVICE_IP, MSB_SERVICE_PORT)
return call_req(base_url, "", "", rest_no_auth, resource, method, content)
def combine_url(base_url, resource):
full_url = None
if base_url.endswith('/') and resource.startswith('/'):
full_url = base_url[:-1] + resource
elif base_url.endswith('/') and not resource.startswith('/'):
full_url = base_url + resource
elif not base_url.endswith('/') and resource.startswith('/'):
full_url = base_url + resource
else:
full_url = base_url + '/' + resource
return full_url
def get_res_from_aai(resource, content=''):
headers = {
'X-FromAppId': 'MultiCloud',
'X-TransactionId': '9001',
'content-type': 'application/json',
'accept': 'application/json'
}
base_url = "%s/%s" % (AAI_SERVICE_URL, AAI_SCHEMA_VERSION)
return call_req(base_url, AAI_USERNAME, AAI_PASSWORD, rest_no_auth,
resource, "GET", content, headers)
class AAIClient(object):
def __init__(self, cloud_owner, cloud_region):
self.base_url = "%s/%s" % (AAI_SERVICE_URL, AAI_SCHEMA_VERSION)
self.username = AAI_USERNAME
self.password = AAI_PASSWORD
self.default_headers = {
'X-FromAppId': 'multicloud-azure',
'X-TransactionId': '9004',
'content-type': 'application/json',
'accept': 'application/json'
}
self.cloud_owner = cloud_owner
self.cloud_region = cloud_region
self._vim_info = None
def get_vim(self, get_all=False):
resource = ("/cloud-infrastructure/cloud-regions/cloud-region"
"/%s/%s" % (self.cloud_owner, self.cloud_region))
if get_all:
resource = "%s?depth=all" % resource
resp = call_req(self.base_url, self.username, self.password,
rest_no_auth, resource, "GET",
headers=self.default_headers)
if resp[0] != 0:
raise VimDriverAzureException(
status_code=404,
content="Failed to query VIM with id (%s_%s) from extsys." % (
self.cloud_owner, self.cloud_region))
return json.loads(resp[1])
def delete_vim(self):
resp = self.get_vim(get_all=True)
logger.debug('Delete cloud region')
resource = ("/cloud-infrastructure/cloud-regions/cloud-region"
"/%s/%s?resource-version=%s" %
(self.cloud_owner, self.cloud_region,
resp['resource-version']))
resp = call_req(self.base_url, self.username, self.password,
rest_no_auth, resource, "DELETE",
headers=self.default_headers)
if resp[0] != 0:
raise VimDriverAzureException(
status_code=400,
content="Failed to delete cloud %s_%s: %s." % (
self.cloud_owner, self.cloud_region, resp[1]))
def update_vim(self, content):
self.add_flavors(content)
def update_identity_url(self):
vim = self.get_vim()
vim['identity-url'] = ("http://%s/api/multicloud/v0/%s_%s/identity/"
"v3" % (MSB_SERVICE_IP, self.cloud_owner,
self.cloud_region))
resource = ("/cloud-infrastructure/cloud-regions/cloud-region"
"/%s/%s" % (self.cloud_owner, self.cloud_region))
logger.debug("Updating identity url %s" % vim)
call_req(self.base_url, self.username, self.password,
rest_no_auth, resource, "PUT",
content=json.dumps(vim),
headers=self.default_headers)
def add_flavors(self, content):
for flavor in content['flavors']:
resource = ("/cloud-infrastructure/cloud-regions/cloud-region/"
"%s/%s/flavors/flavor/%s" % (
self.cloud_owner, self.cloud_region,
flavor['name']))
body = {
'flavor-name': flavor['name'],
'flavor-vcpus': flavor['vcpus'],
'flavor-ram': flavor['ram'],
'flavor-disk': flavor['disk'],
'flavor-selflink': ""
}
# Handle extra specs
if flavor['name'].startswith("onap."):
hpa_capabilities = self._get_hpa_capabilities(
flavor)
body['hpa-capabilities'] = {
'hpa-capability': hpa_capabilities}
logger.debug("Adding flavors to cloud region")
call_req(self.base_url, self.username, self.password,
rest_no_auth, resource, "PUT",
content=json.dumps(body),
headers=self.default_headers)
def _get_hpa_capabilities(self, flavor):
hpa_caps = []
# Basic capabilties
caps_dict = self._get_hpa_basic_capabilities(flavor)
if len(caps_dict) > 0:
logger.debug("basic_capabilities_info: %s" % caps_dict)
hpa_caps.append(caps_dict)
# storage capabilities
caps_dict = self._get_storage_capabilities(flavor)
if len(caps_dict) > 0:
logger.debug("storage_capabilities_info: %s" % caps_dict)
hpa_caps.append(caps_dict)
# CPU instruction set extension capabilities
caps_dict = self._get_instruction_set_capabilities(
flavor['extra_specs'])
if len(caps_dict) > 0:
logger.debug("instruction_set_capabilities_info: %s" % caps_dict)
hpa_caps.append(caps_dict)
# ovsdpdk capabilities
caps_dict = self._get_ovsdpdk_capabilities()
if len(caps_dict) > 0:
logger.debug("ovsdpdk_capabilities_info: %s" % caps_dict)
hpa_caps.append(caps_dict)
return hpa_caps
def _get_hpa_basic_capabilities(self, flavor):
basic_capability = {}
feature_uuid = uuid.uuid4()
basic_capability['hpa-capability-id'] = str(feature_uuid)
basic_capability['hpa-feature'] = 'basicCapabilities'
basic_capability['architecture'] = 'generic'
basic_capability['hpa-version'] = 'v1'
basic_capability['hpa-feature-attributes'] = []
basic_capability['hpa-feature-attributes'].append({
'hpa-attribute-key': 'numVirtualCpu',
'hpa-attribute-value': json.dumps(
{'value': str(flavor['vcpus'])})})
basic_capability['hpa-feature-attributes'].append({
'hpa-attribute-key': 'virtualMemSize',
'hpa-attribute-value': json.dumps({'value': str(
flavor['ram']), 'unit': 'GB'})})
return basic_capability
def _get_storage_capabilities(self, flavor):
storage_capability = {}
feature_uuid = uuid.uuid4()
storage_capability['hpa-capability-id'] = str(feature_uuid)
storage_capability['hpa-feature'] = 'localStorage'
storage_capability['architecture'] = 'generic'
storage_capability['hpa-version'] = 'v1'
storage_capability['hpa-feature-attributes'] = []
storage_capability['hpa-feature-attributes'].append({
'hpa-attribute-key': 'diskSize',
'hpa-attribute-value': json.dumps({'value': str(
flavor['disk']), 'unit': 'MB'})
})
storage_capability['hpa-feature-attributes'].append({
'hpa-attribute-key': 'swapMemSize',
'hpa-attribute-value': json.dumps({'value': str(
flavor.get('swap', 0)), 'unit': 'MB'})
})
storage_capability['hpa-feature-attributes'].append({
'hpa-attribute-key': 'ephemeralDiskSize',
'hpa-attribute-value': json.dumps({'value': str(
flavor.get('OS-FLV-EXT-DATA:ephemeral', 0)), 'unit': 'GB'})
})
return storage_capability
def _get_instruction_set_capabilities(self, extra_specs):
instruction_capability = {}
feature_uuid = uuid.uuid4()
if extra_specs.get('hw:capabilities:cpu_info:features'):
instruction_capability['hpa-capability-id'] = str(feature_uuid)
instruction_capability['hpa-feature'] = 'instructionSetExtensions'
instruction_capability['architecture'] = 'Intel64'
instruction_capability['hpa-version'] = 'v1'
instruction_capability['hpa-feature-attributes'] = []
instruction_capability['hpa-feature-attributes'].append({
'hpa-attribute-key': 'instructionSetExtensions',
'hpa-attribute-value': json.dumps(
{'value': extra_specs[
'hw:capabilities:cpu_info:features']})
})
return instruction_capability
def _get_ovsdpdk_capabilities(self):
ovsdpdk_capability = {}
feature_uuid = uuid.uuid4()
if not self._vim_info:
self._vim_info = self.get_vim(get_all=True)
cloud_extra_info_str = self._vim_info.get('cloud-extra-info')
if not isinstance(cloud_extra_info_str, dict):
try:
cloud_extra_info_str = json.loads(cloud_extra_info_str)
except Exception as ex:
logger.error("Can not convert cloud extra info %s %s" % (
str(ex), cloud_extra_info_str))
return {}
if cloud_extra_info_str:
cloud_dpdk_info = cloud_extra_info_str.get("ovsDpdk")
if cloud_dpdk_info:
ovsdpdk_capability['hpa-capability-id'] = str(feature_uuid)
ovsdpdk_capability['hpa-feature'] = 'ovsDpdk'
ovsdpdk_capability['architecture'] = 'Intel64'
ovsdpdk_capability['hpa-version'] = 'v1'
ovsdpdk_capability['hpa-feature-attributes'] = []
ovsdpdk_capability['hpa-feature-attributes'].append({
'hpa-attribute-key': str(cloud_dpdk_info.get("libname")),
'hpa-attribute-value': json.dumps(
{'value': cloud_dpdk_info.get("libversion")})
})
return ovsdpdk_capability
def call_aria_rest(service_id, workflow_name):
base_url = "%s" % (ARIA_SERVER_URL)
resource = ("/services/%s/executions/%s" % (service_id, workflow_name))
headers = {}
headers['content-type'] = 'text/plain'
return call_req(base_url, "", "", rest_no_auth, resource, "POST",
headers=headers)
|
from taiga.requestmaker import RequestMaker
from taiga.models import UserStoryStatus, UserStoryStatuses
import unittest
from mock import patch
class TestPriorities(unittest.TestCase):
@patch('taiga.models.base.ListResource._new_resource')
def test_create_user_story_status(self, mock_new_resource):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
mock_new_resource.return_value = UserStoryStatus(rm)
userstory_status = UserStoryStatuses(rm).create(1, 'USS 1')
mock_new_resource.assert_called_with(
payload={'project': 1, 'name': 'USS 1'}
)
|
import os
import json
import requests
import subprocess
from urllib.parse import urlparse
from datetime import datetime
import boto3
from .azul_agent import AzulAgent
from .data_store_agent import DataStoreAgent
from .ingest_agents import IngestUIAgent, IngestApiAgent
from .analysis_agent import AnalysisAgent
from .matrix_agent import MatrixAgent
from .utils import Progress
from .wait_for import WaitFor
MINUTE = 60
JOB_MANAGER_URL = "https://job-manager.caas-prod.broadinstitute.org"
class DatasetRunner:
FASTQ_CONTENT_TYPE = 'application/gzip; dcp-type=data'
def __init__(self, deployment, export_bundles=True):
self.s3_client = boto3.client('s3')
self.deployment = deployment
self.export_bundles = export_bundles
self.ingest_broker = IngestUIAgent(deployment=deployment)
self.ingest_api = IngestApiAgent(deployment=deployment)
self.data_store = DataStoreAgent(deployment=deployment)
self.analysis_agent = None
self.azul_agent = AzulAgent(deployment=deployment)
self.matrix_agent = MatrixAgent(deployment=deployment)
self.dataset = None
self.project_shortname = None
self.submission_id = None
self.submission_envelope = None
self.upload_credentials = None
self.upload_area_uuid = None
self.expected_bundle_count = None
self.primary_uuid_to_secondary_bundle_fqid_map = {}
self.failure_reason = None
self.analysis_workflow_set = set([])
gcp_credentials_file_for_analysis = os.environ.get('GCP_ACCOUNT_ANALYSIS_INFO')
if gcp_credentials_file_for_analysis:
self.analysis_agent = AnalysisAgent(deployment=deployment,
service_account_key=json.loads(gcp_credentials_file_for_analysis))
@property
def primary_bundle_uuids(self):
return list(self.primary_uuid_to_secondary_bundle_fqid_map.keys())
@property
def secondary_bundle_uuids(self):
return [fqid.split('.')[0] for fqid in self.primary_uuid_to_secondary_bundle_fqid_map.values()]
@property
def secondary_bundle_fqids(self):
return list(self.primary_uuid_to_secondary_bundle_fqid_map.values())
def run(self, dataset_fixture, run_name_prefix="test"):
"""The entrypoint for running the tests.
Note: we use different logic for scaling tests (tests that with a prefix of "scale") and
non-scaling tests (e.g. integration test) during the polling process, to save money and
resources.
1. If it's in the scaling test mode, once the bundles get exported, the runner will poll
the following info altogether:
- primary bundles count
- ongoing analysis workflows count
- successful analysis workflows count
- secondary bundles count
so the progress in DSS and Analysis is tracked simultaneously
and single workflow failure won't interfere the test
2. If the test is not a scaling test, we'd like to poll the following info step by step:
- primary bundles count
- analysis workflows (name, id, status)
- secondary bundles count
so if any of the steps failed, the test will fail early instead of timing out.
"""
self.dataset = dataset_fixture
self.set_project_shortname(run_name_prefix)
self.upload_spreadsheet_and_create_submission()
self.wait_for_ingest_to_process_spreadsheet_files_tab()
self.get_upload_area_credentials()
self.stage_data_files()
self.wait_for_envelope_to_be_validated()
if self.export_bundles:
self.complete_submission()
if run_name_prefix == "scale":
# == Scaling Logic ==
self.wait_for_primary_bundles_analysis_workflows_and_results_bundles()
else:
# == Non-scaling Logic ==
self.wait_for_primary_bundles()
self.wait_for_analysis_workflows()
self.wait_for_secondary_bundles()
self.assert_data_browser_bundles()
if self.deployment != 'prod':
self.retrieve_loom_output_from_matrix_service()
if self.failure_reason:
raise RuntimeError(self.failure_reason)
def set_project_shortname(self, run_name_prefix):
self.project_shortname = "{prefix}/{dataset}/{when}".format(
prefix=run_name_prefix,
dataset=self.dataset.name,
when=datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'))
Progress.report(f"UPDATING SPREADSHEET PROJECT SHORTNAME TO {self.project_shortname}")
self.dataset.update_spreadsheet_project_shortname(self.project_shortname)
def upload_spreadsheet_and_create_submission(self):
spreadsheet_filename = os.path.basename(self.dataset.metadata_spreadsheet_path)
Progress.report(f"CREATING SUBMISSION with {spreadsheet_filename}...")
self.submission_id = self.ingest_broker.upload(self.dataset.metadata_spreadsheet_path)
Progress.report(f" submission ID is {self.submission_id}\n")
self.submission_envelope = self.ingest_api.submission(self.submission_id)
def wait_for_ingest_to_process_spreadsheet_files_tab(self):
file_count = self._how_many_files_do_we_expect()
Progress.report(f"WAIT FOR INGEST TO PROCESS {file_count} SPREADSHEET FILE ROWS...")
self.upload_credentials = WaitFor(
self._submission_files_count
).to_return_value(value=file_count)
def _how_many_files_do_we_expect(self):
return self.dataset.count_of_rows_in_spreadsheet_tab('Sequence file')
def _submission_files_count(self):
return len(self.submission_envelope.reload().files())
def get_upload_area_credentials(self):
Progress.report("WAITING FOR STAGING AREA...")
self.upload_credentials = WaitFor(
self._get_upload_area_credentials
).to_return_a_value_other_than(other_than_value=None, timeout_seconds=2 * MINUTE)
Progress.report(" credentials received.\n")
def _get_upload_area_credentials(self):
return self.submission_envelope.reload().upload_credentials()
def stage_data_files(self):
self.upload_area_uuid = urlparse(self.upload_credentials).path.split('/')[1]
self._stage_data_files_using_s3_sync()
def _stage_data_files_using_s3_sync(self):
Progress.report("STAGING FILES using hca cli...")
self.select_upload_area()
self.upload_files()
self.forget_about_upload_area()
def select_upload_area(self):
upload_area_s3_location = f"s3://org-humancellatlas-upload-{self.deployment}/{self.upload_area_uuid}/"
self._run_command(['hca', 'upload', 'select', upload_area_s3_location])
def upload_files(self):
self._run_command(['hca', 'upload', 'files', self.dataset.config['data_files_location']])
def forget_about_upload_area(self):
self._run_command(['hca', 'upload', 'forget', self.upload_area_uuid])
def wait_for_envelope_to_be_validated(self):
Progress.report("WAIT FOR VALIDATION...")
WaitFor(self._envelope_is_valid).to_return_value(value=True)
Progress.report(" envelope is valid.\n")
def _envelope_is_valid(self):
envelope_status = self.submission_envelope.reload().status
Progress.report(f" envelope status is {envelope_status}")
if envelope_status == 'Invalid':
raise Exception("envelope status is Invalid")
return envelope_status in ['Valid', 'Submitted']
def complete_submission(self):
Progress.report("COMPLETING SUBMISSION...")
submit_url = self.submission_envelope.data['_links']['submit']['href']
headers = self.ingest_api.ingest_auth_agent.make_auth_header()
response = requests.put(submit_url, headers=headers)
if response.status_code != requests.codes.accepted:
raise RuntimeError(f"PUT {submit_url} returned {response.status_code}: {response.content}")
Progress.report(" done.\n")
def wait_for_primary_bundles_analysis_workflows_and_results_bundles(self):
"""
We wait for all primary bundles to be created before starting to look for results in non-scaling tests.
It appears that with large submissions, results can start to appear before bundle export is finished,
so we monitor both kinds of bundles simultaneously in scaling tests.
"""
Progress.report("WAITING FOR PRIMARY BUNDLE(s), ANALYSIS WORKFLOWS AND RESULTS BUNDLE(s)...")
self.expected_bundle_count = self.dataset.config["expected_bundle_count"]
WaitFor(
self._count_primary_bundles_analysis_workflows_and_results_bundles
).to_return_value(value=self.expected_bundle_count)
def _count_primary_bundles_analysis_workflows_and_results_bundles(self):
if self._primary_bundle_count() < self.expected_bundle_count:
self._count_primary_bundles()
if self._analysis_workflows_count() < self.expected_bundle_count:
self._batch_count_analysis_workflows_by_project_shortname()
if self._results_bundles_count() < self.expected_bundle_count:
self._count_results_bundles()
Progress.report(" primary bundles: {0}/{1} \n workflows: running: {2}/{3}, \
succeeded: {4}/{5}, failed: {6}/{7} \n results bundles: {8}/{9} ".format(
self._primary_bundle_count(),
self.expected_bundle_count,
self._ongoing_analysis_workflows_count(),
self._primary_bundle_count(),
self._successful_analysis_workflows_count(),
self._primary_bundle_count(),
self._failed_analysis_workflows_count(),
self._primary_bundle_count(),
self._results_bundles_count(),
self._primary_bundle_count()
))
return self._results_bundles_count()
def wait_for_primary_bundles(self):
Progress.report('Waiting for submission to complete...')
WaitFor(self.submission_envelope.check_status).to_return_str('complete')
self.expected_bundle_count = self.dataset.config["expected_bundle_count"]
primary_bundles_count = self._primary_bundle_count()
if primary_bundles_count != self.expected_bundle_count:
raise RuntimeError(f'Expected {self.expected_bundle_count} primary bundles, but only '
f'got {primary_bundles_count}')
def wait_for_analysis_workflows(self):
if not self.analysis_agent:
Progress.report("NO CREDENTIALS PROVIDED FOR ANALYSIS AGENT, SKIPPING WORKFLOW(s) CHECK...")
else:
Progress.report("WAITING FOR ANALYSIS WORKFLOW(s) TO FINISH...")
WaitFor(
self._count_analysis_workflows_and_report
).to_return_value(value=self.expected_bundle_count)
def _count_analysis_workflows_and_report(self):
if self._successful_analysis_workflows_count() < self.expected_bundle_count:
self._count_analysis_workflows()
Progress.report(" successful analysis workflows: {}/{}".format(
self._successful_analysis_workflows_count(),
self.expected_bundle_count
))
return self._successful_analysis_workflows_count()
def _batch_count_analysis_workflows_by_project_shortname(self):
"""This should only be used for the scaling test"""
# TODO: remove the following line once there are no more scalability concerns of the analysis agent
with self.analysis_agent.ignore_logging_msg():
try:
workflows = self.analysis_agent.query_by_project_shortname(project_shortname=self.project_shortname, with_labels=False)
self.analysis_workflow_set.update(workflows)
except requests.exceptions.HTTPError:
Progress.report(" something went wrong when querying workflows, skipping for this time...")
def _count_analysis_workflows(self):
for bundle_uuid in self.submission_envelope.bundles():
# TODO: remove the following line once there are no more scalability concerns of the analysis agent
with self.analysis_agent.ignore_logging_msg():
try:
workflows = self.analysis_agent.query_by_bundle(bundle_uuid=bundle_uuid, with_labels=False)
self.analysis_workflow_set.update(workflows)
# NOTE: this one-bundle-one-workflow mechanism might change in the future
if len(workflows) > 1:
if all(wf.status == "Running" for wf in workflows):
raise Exception(f"Bundle {bundle_uuid} triggered more than one running workflows: {workflows}")
elif len(workflows) == 1:
workflow = workflows[0]
if workflow.status in ('Failed', 'Aborted', 'Aborting'):
raise Exception(f"The status of workflow {workflow.uuid} is: {workflow.status} \n For debugging, you might want to look into: {JOB_MANAGER_URL}/jobs/{workflow.uuid}")
if workflow.status == 'Succeeded':
Progress.report(f" workflow succeeded for bundle {bundle_uuid}: \n {workflow}")
else:
Progress.report(f" Found workflow for bundle {bundle_uuid}: \n {workflow}")
except requests.exceptions.HTTPError:
# Progress.report("ENCOUNTERED AN ERROR FETCHING WORKFLOW INFO, RETRY NEXT TIME...")
continue
def wait_for_secondary_bundles(self):
Progress.report("WAITING FOR RESULTS BUNDLE(s) TO BE CREATED...")
self.expected_bundle_count = self.dataset.config["expected_bundle_count"]
WaitFor(
self._count_secondary_bundles_and_report
).to_return_value(value=self.expected_bundle_count)
def _count_primary_bundles_and_report(self):
if self._primary_bundle_count() < self.expected_bundle_count:
self._count_primary_bundles()
Progress.report(" bundles: primary: {}/{}".format(
self._primary_bundle_count(),
self.expected_bundle_count
))
return self._primary_bundle_count()
def _count_secondary_bundles_and_report(self):
if self._results_bundles_count() < self.expected_bundle_count:
self._count_results_bundles()
Progress.report(" bundles: results: {}/{}".format(
self._results_bundles_count(),
self._primary_bundle_count()
))
return self._results_bundles_count()
def _count_primary_bundles(self):
for bundle_uuid in self.submission_envelope.bundles():
if bundle_uuid not in self.primary_uuid_to_secondary_bundle_fqid_map:
Progress.report(f" found new primary bundle: {bundle_uuid}")
self.primary_uuid_to_secondary_bundle_fqid_map[bundle_uuid] = None
def _primary_bundle_count(self):
self._count_primary_bundles()
return len(self.primary_uuid_to_secondary_bundle_fqid_map)
def _analysis_workflows_count(self):
return len(self.analysis_workflow_set)
def _ongoing_analysis_workflows_count(self):
return len(
list(filter(lambda wf: wf.status in ('Submitted', 'On Hold', 'Running'), self.analysis_workflow_set))
)
def _successful_analysis_workflows_count(self):
return len(
list(filter(lambda wf: wf.status == 'Succeeded', self.analysis_workflow_set))
)
def _failed_analysis_workflows_count(self):
return len(
list(filter(lambda wf: wf.status in ('Failed', 'Aborting', 'Aborted'), self.analysis_workflow_set))
)
def _results_bundles_count(self):
return len(list(v for v in self.primary_uuid_to_secondary_bundle_fqid_map.values() if v))
def _count_results_bundles(self):
for primary_bundle_uuid, secondary_bundle_fqid in self.primary_uuid_to_secondary_bundle_fqid_map.items():
if secondary_bundle_fqid is None:
query = {
"query": {
"match": {
"files.analysis_process_json.input_bundles": primary_bundle_uuid
}
}
}
results = self.data_store.search(query)
if len(results) > 0:
results_bundle_fqid = results[0]['bundle_fqid']
if self.primary_uuid_to_secondary_bundle_fqid_map[primary_bundle_uuid] is None:
Progress.report(f" found new results bundle: {results_bundle_fqid}")
self.primary_uuid_to_secondary_bundle_fqid_map[primary_bundle_uuid] = results_bundle_fqid
@staticmethod
def _run_command(cmd_and_args_list, expected_retcode=0):
retcode = subprocess.call(cmd_and_args_list)
if retcode != 0:
raise Exception(
"Unexpected return code from '{command}', expected {expected_retcode} got {actual_retcode}".format(
command=" ".join(cmd_and_args_list), expected_retcode=expected_retcode, actual_retcode=retcode
)
)
def assert_data_browser_bundles(self):
Progress.report(f"Project shortname: {self.project_shortname}")
WaitFor(
self._assert_data_browser_bundles, self.project_shortname
).to_return_value(value=True)
def _assert_data_browser_bundles(self, project_shortname):
# Numbers here correspond to references to files for each bundle (AKA contributions). We must wait for all files
# references to be indexed before we continue to cleanup in order to avoid potential races.
# A file reference is a contribution if it appears in the bundle with "indexed": false.
# For ss2 we have 2 fastqs in the primary bundle, the 2 fastqs and 18 other contributions from the secondary;
# for optimus we have 3 fastqs in the primary bundle; the 3 fastqs and 7 other contributions from the secondary;
# for a total of 22 and 13 contributions respectively for each project
num_contributions_by_project = {'Smart-seq2': 22, 'optimus': 14}
expected_bundle_uuids = set(self.primary_bundle_uuids).union(self.secondary_bundle_uuids)
files = self.azul_agent.get_entities_by_project('files', project_shortname)
bundle_uuids = {bundle['bundleUuid'] for file in files for bundle in file['bundles']}
# We're care only about the intersection because it's possible for other bundles to reference the same files
bundle_uuids.intersection_update(expected_bundle_uuids)
project_shortnames = {project_short_name
for file in files
for project in file['projects']
for project_short_name in project['projectShortname']}
num_contributions = sum(1 for f in files for b in f['bundles'] if b['bundleUuid'] in expected_bundle_uuids)
Progress.report(f"{len(bundle_uuids)}/{len(expected_bundle_uuids)} distinct bundles")
Progress.report(f"{num_contributions}/{num_contributions_by_project[self.dataset.name]}"
f" distinct file contributions")
if num_contributions < num_contributions_by_project[self.dataset.name]:
return False
elif num_contributions > num_contributions_by_project[self.dataset.name]:
raise AssertionError('More contributions than expected were found in the Azul index')
if bundle_uuids < expected_bundle_uuids:
return False
if project_shortnames:
assert project_shortnames == {project_shortname}
else:
return False
return True
def _assert_project_removed_from_azul(self):
results_empty = [len(self.azul_agent.get_entities_by_project(entity, self.project_shortname)) == 0
for entity in ['files', 'projects', 'samples']]
Progress.report("Project removed from index files: {}, projects: {}, samples: {}".format(*results_empty))
return all(results_empty)
def _assert_workflows_are_terminated(self, workflows):
statuses = []
for analysis_workflow in workflows:
workflow = self.analysis_agent.query_by_workflow_uuid(uuid=analysis_workflow.uuid)
statuses.append(workflow.status)
return all([status in ('Aborted', 'Succeeded', 'Failed') for status in statuses])
def cleanup_primary_and_result_bundles(self):
RETAIN_BUNDLES = os.environ.get('RETAIN_BUNDLES')
if RETAIN_BUNDLES:
print("FLAG TO RETAIN BUNDLES IS SET. NO TOMBSTONING WILL OCCUR")
return
for primary_bundle_uuid, secondary_bundle_fqid in self.primary_uuid_to_secondary_bundle_fqid_map.items():
self.data_store.tombstone_bundle(primary_bundle_uuid)
if secondary_bundle_fqid is not None:
secondary_bundle_uuid = secondary_bundle_fqid.split('.')[0]
self.data_store.tombstone_bundle(secondary_bundle_uuid)
Progress.report("WAITING FOR BUNDLES TO BE REMOVED FROM AZUL ")
WaitFor(
self._assert_project_removed_from_azul
).to_return_value(True)
def cleanup_analysis_workflows(self):
ongoing_workflows = [wf for wf in self.analysis_workflow_set if wf.status in ('Submitted', 'On Hold', 'Running')]
for analysis_workflow in ongoing_workflows:
try:
self.analysis_agent.abort_workflow(uuid=analysis_workflow.uuid)
except requests.HTTPError as e:
Progress.report(f"An error occurred: {e}")
Progress.report("WAITING FOR WORKFLOW(S) TO BE ABORTED IN CROMWELL")
WaitFor(
self._assert_workflows_are_terminated, ongoing_workflows
).to_return_value(True)
def retrieve_loom_output_from_matrix_service(self):
WaitFor(
self.matrix_agent.is_matrix_project_indexed, self.project_shortname
).to_return_value(value=True, timeout_seconds=300)
request_id = self.matrix_agent.post_matrix_request(self.secondary_bundle_fqids, "loom")
WaitFor(
self.matrix_agent.get_matrix_request, request_id
).to_return_value(value="Complete", timeout_seconds=600)
|
import argparse
import random
import string
import os
parser = argparse.ArgumentParser(
prog='keygen',
description='Key generator')
parser.add_argument('-q', type=int)
args = parser.parse_args()
def gen_key():
key = ''
alphabet = string.ascii_uppercase + '0123456789'
for block in range(5):
for num_characters in range(5):
key += random.choice(alphabet)
if block != 4:
key += "-"
return key
def write_key(key):
with open(f'/Users/{os.getlogin()}/Desktop/key.txt', 'a') as f:
f.write(f'{key}\n')
f.close()
def run(quantity):
for _ in range(quantity):
write_key(gen_key())
if __name__ == "__main__":
run(args.q)
|
# Simulation 1
import os
import sys
sys.path.insert(1, os.path.join(sys.path[0], '..'))
import agent
def f12(x1, x2):
if (x1, x2) == (0, 0): return 3
elif (x1, x2) == (0, 1): return 2
elif (x1, x2) == (1, 0): return 4
elif (x1, x2) == (1, 1): return 1
else: raise ValueError
def f21(x2, x1):
return f12(x1, x2)
def f13(x1, x3):
if (x1, x3) == (0, 0): return 1
elif (x1, x3) == (0, 1): return 2
elif (x1, x3) == (1, 0): return 2
elif (x1, x3) == (1, 1): return 1
else: raise ValueError
def f31(x3, x1):
return f13(x1, x3)
def f23(x2, x3):
if (x2, x3) == (0, 0): return 2
elif (x2, x3) == (0, 1): return 3
elif (x2, x3) == (1, 0): return 1
elif (x2, x3) == (1, 1): return 3
else: raise ValueError
def f32(x3, x2):
return f23(x2, x3)
def f24(x2, x4):
if (x2, x4) == (0, 0): return 2
elif (x2, x4) == (0, 1): return 1
elif (x2, x4) == (1, 0): return 4
elif (x2, x4) == (1, 1): return 2
else: raise ValueError
def f42(x4, x2):
return f24(x2, x4)
agents_file = "agents-sim-1.txt"
agent1 = agent.Agent(1, [0, 1],
{(1,3): f13,
(1,2): f12},
agents_file)
agent2 = agent.Agent(2, [0, 1],
{(2,4): f24,
(2,1): f21,
(2,3): f23},
agents_file)
agent3 = agent.Agent(3, [0, 1],
{(3, 1): f31,
(3, 2): f32},
agents_file)
agent4 = agent.Agent(4, [0, 1], {(4,2): f42}, agents_file)
# A trick so that this process is allowed to fork.
pid = os.getpid()
children = []
if pid == os.getpid():
childid = os.fork()
children.append(childid)
if childid == 0:
agent2.start()
print 'agent2:', agent2.value
if pid == os.getpid():
childid = os.fork()
children.append(childid)
if childid == 0:
agent3.start()
print 'agent3:', agent3.value
if pid == os.getpid():
childid = os.fork()
children.append(childid)
if childid == 0:
agent4.start()
print 'agent4:', agent4.value
if pid == os.getpid():
agent1.start()
print 'max_util:', agent1.max_util
print 'agent1:', agent1.value
for i in children:
os.wait()
|
# -*- coding: utf-8 -*-
from architect.inventory.client import BaseClient
from celery.utils.log import get_logger
logger = get_logger(__name__)
class ArchitectClient(BaseClient):
def __init__(self, **kwargs):
super(ArchitectClient, self).__init__(**kwargs)
def check_status(self):
return False
def inventory(self, resource=None):
return {}
def class_list(self, resource=None):
return {}
def parameter_list(self, resource=None):
resource_list = {}
return resource_list
|
import os
import sys
import time
import scipy
import matplotlib
import pybedtools
import subprocess
def gfffeature_to_interval(feature):
return pybedtools.create_interval_from_list(feature.tostring().split('\t'))
def chunker(f, n):
"""
Utility function to split iterable `f` into `n` chunks
"""
f = iter(f)
x = []
while 1:
if len(x) < n:
try:
x.append(f.next())
except StopIteration:
if len(x) > 0:
yield tuple(x)
break
else:
yield tuple(x)
x = []
def nice_colormap(z, invalid=None):
"""
Dynamically scales the midpoint to the median of the positive values.
Returns a colormap ready for imshow or pcolor or whatever.
"""
norm = matplotlib.colors.Normalize()
z = z.copy()
norm(z)
# Set max to 99th percentile
norm.vmax = scipy.stats.scoreatpercentile(z.ravel(), 99)
zeropoint = norm(0)
# split from zero to max(z) into chunks
dcolor = (1 - zeropoint) / 3
# midpoint of color change is median of positive values.
medpoint = norm(scipy.stats.scoreatpercentile(z[z > 0].ravel(), 50))
# construct that bitch
cdict = {
'red': ((0.0, 0.0, 0.0),
(zeropoint, 1.0, 1.0),
(medpoint, 1., 1.0),
(1.0, 1.0, 1.0)),
'green': ((0.0, 0.0, 0.0),
(zeropoint, 1.0, 1.0),
(medpoint, .9, .9),
(1.0, 0.0, 0.0)),
'blue': ((0.0, 0.0, 1.0),
(zeropoint, 1.0, 1.0),
(medpoint, .0, .0),
(1.0, 0.0, 0.0))
}
cmap = matplotlib.colors.LinearSegmentedColormap('my_colormap', cdict, 256)
# NaNs and stuff will be medium gray.
if invalid is not None:
cmap.set_bad(invalid, 1.0)
return norm, cmap
def bam2bigwig(bam, bigwig, genome, scale=1e6, verbose=False):
"""
Uses BEDTools to go from BAM to bedgraph, then bedGraphToBigWig to get the
final bigwig.
"""
if scale is not None:
cmds = ['samtools', 'view', '-F', '0x4', '-c', bam]
p = subprocess.Popen(cmds, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
total_reads = float(stdout)
reads_per_scale = total_reads / scale
if verbose:
sys.stderr.write('%s total reads\n' % total_reads)
sys.stderr.flush()
chromsizes = pybedtools.chromsizes_to_file(pybedtools.chromsizes(genome))
t0 = time.time()
bedgraph = pybedtools.BedTool(bam)\
.genome_coverage(bg=True, g=chromsizes, scale=scale)\
.moveto('bedgraph.bedgraph')
print bedgraph.fn
if verbose:
sys.stderr.write('Completed bedGraph in %.1fs\n' % (time.time() - t0))
sys.stderr.flush()
cmds = ['bedGraphToBigWig', bedgraph.fn, chromsizes, bigwig]
p = subprocess.Popen(cmds, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if verbose:
sys.stderr.write('Completed bigWig %s\n' % bigwig)
sys.stderr.flush()
def bedgraph2bigwig(bedgraph, bigwig, genome, verbose=False):
"""
Create a bigWig from `bedgraph`.
:param bedgraph: Input filename of bedgraph
:param bigwig: Output filename of bigWig to create
:param genome: String assembly name of genome
:param verbose: Print messages to stderr
"""
chromsizes = pybedtools.chromsizes_to_file(pybedtools.chromsizes(genome))
cmds = ['bedGraphToBigWig', bedgraph, chromsizes, bigwig]
p = subprocess.Popen(cmds, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if verbose:
sys.stderr.write('Completed bigWig %s\n' % bigwig)
sys.stderr.flush()
def data_dir():
"""
Returns the data directory that contains example files for tests and
documentation.
"""
return os.path.join(os.path.dirname(__file__), 'test', 'data')
def example_filename(fn):
"""
Return a bed file from the pybedtools examples directory. Use
:func:`list_example_files` to see a list of files that are included.
"""
fn = os.path.join(data_dir(), fn)
if not os.path.exists(fn):
raise ValueError("%s does not exist" % fn)
return fn
|
import numpy as np
import matplotlib.pyplot as plt
def drawPlan(plans, stu, time_len, bins, stat_time=10, note=''):
for name, groups in plans.items():
for i in range(groups):
group_arr = np.random.normal(time_len / groups * (i + 1) - time_len / groups / 3,
time_len / groups / 4, int(stu / groups))
# plt.hist(group_arr, int(timeLen / groups))
# plt.show()
if i == 0:
arrival = group_arr
else:
arrival = np.append(arrival, group_arr)
# print(np.random.normal(timeLen / groups * (i + 1) / 2, timeLen / groups / 4, int(students / groups)))
# print(arrival)
plt.title(name + note, fontproperties='SimHei', fontsize=20)
plt.xlabel('时间(分)(统计时长:' + str(stat_time) + '分)', fontproperties='SimHei', fontsize=15)
plt.ylabel('学生(人)', fontproperties='SimHei', fontsize=15)
plt.hist(arrival, int(bins))
plt.savefig(name + '(时长' + str(time_len) + '分)')
# plt.show()
plt.close()
# 原方案(时长30分)
stu = 1400
timeLen = 30
plans = {'原方案': 1}
bins = 4
drawPlan(plans, stu, timeLen, bins, note='(上学时间段:7:00 - 7:30)')
# 新方案(时长60分)
timeLen = 60
bins = timeLen / 10 * 1.75
plans = {'新方案—1组': 1, '新方案—2组': 2, '新方案—3组': 3, '新方案—4组': 4, '新方案—5组': 5}
drawPlan(plans, stu, timeLen, bins, note='(上学时间段:7:00 - 8:00)')
# 新方案(时长90分)
timeLen = 90
drawPlan(plans, stu, timeLen, bins, note='(上学时间段:7:00 - 8:30)')
|
# This program do the following
# - opens a list of devices to telne
# - save the list of devices in a list
# - asks for user and password
# - then loops entering to devices and sendsa command
# - the output is shown and stored in a file
import getpass
import telnetlib
import time
with open ("device-list.list", "r") as devicelist:
hosts = []
hosts=devicelist.readlines()
# with open ("command-list.txt", "r") as commandlist:
# commands = []
# commands = commandlist.readlines()
# print(commands)
user = input("Enter your username: ")
password = getpass.getpass()
for host in hosts:
tn = telnetlib.Telnet(host)
time.sleep(2)
tn.read_until(b"Username: ")
tn.write(user.encode('ascii') + b"\n")
time.sleep(2)
tn.read_until(b"Password: ")
tn.write(password.encode('ascii') + b"\n")
time.sleep(2)
tn.write(b"show ip int br\n")
time.sleep(2)
tn.write(b"exit\n")
time.sleep(2)
lastpost = tn.read_all().decode('ascii')
op = open(host+".txt", "w")
op.write(lastpost)
op.close()
print(lastpost)
tn.close()
|
import base64
from bbbingo import db
class User(db.Document):
username = db.StringField(unique=True)
email = db.StringField()
password = db.StringField()
cards = db.ListField(db.ReferenceField('Card'))
plays = db.ListField(db.ReferenceField('Play'))
class Card(db.Document):
slug = db.StringField(unique=True)
short_id = db.StringField()
name = db.StringField()
owner = db.ReferenceField(User)
category = db.StringField(default='uncategorized')
values = db.ListField(db.StringField(default=''))
free_space = db.BooleanField(default=True)
free_space_text = db.StringField()
# public|loggedin|unlisted|private
privacy = db.StringField(default='public')
playable = db.StringField(default='yes') # yes|owner|no
plays = db.ListField(db.ReferenceField('Play'))
meta = {
'ordering': ['id'],
}
def is_playable(self, user):
if self.is_viewable(user):
if self.playable == 'yes':
return True
if self.playable == 'owner' and self.owner == user:
return True
return False
def is_viewable(self, user):
if self.privacy == 'private' and user != self.owner:
return False
if self.privacy == 'loggedin' and not user:
return False
return True
class Play(db.Document):
slug = db.StringField(unique=True)
short_id = db.StringField()
owner = db.ReferenceField(User)
card = db.ReferenceField(Card)
description = db.StringField()
order = db.ListField(db.IntField())
solution = db.ListField(db.BooleanField())
meta = {
'ordering': ['id'],
}
def short_id(model):
return base64.urlsafe_b64encode(model.id.binary[-3:]).decode()
|
logparser = r'(\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}:\d{2},\d{3})\s+' \
r'(DEBUG|ERROR|INFO)\s+\[(\w+):(\w+):?(\w+)?\]\s+(.+)$'
|
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.urls import reverse
from django.utils.text import slugify
import uuid, os
from django.contrib.gis.geos import Point
from geopy.geocoders import Nominatim
from django.contrib.gis.db.models import PointField
geolocator = Nominatim(user_agent="extrabooks_app")
def get_user_location(city):
location = geolocator.geocode(city)
longitude = location.longitude
latitude = location.latitude
user_location = Point(longitude,latitude)
return user_location
def user_directory_path(instance, filename):
ext = filename.split('.')[-1]
filename = '{}.{}'.format(uuid.uuid4().hex[:10], ext)
# return the whole path to the file
return os.path.join(str(instance.id), "avatar", filename)
class CustomUser(AbstractUser):
city = models.CharField(max_length=50)
zip_code = models.CharField( max_length=5, blank = False)
avatar = models.ImageField(upload_to = user_directory_path,
verbose_name= 'profile_pic',
default = 'None/no-img.jpg')
slug = models.SlugField(unique=True)
location = PointField(null=True,blank=True)
def save(self, *args, **kwargs):
self.slug = slugify(self.username)
self.location = get_user_location(self.city)
super(CustomUser, self).save(*args, **kwargs)
def __str__(self) -> str:
return self.username
def get_absolute_url(self):
return reverse('profile', args =[str(self.slug)])
class Contact(models.Model):
from_user = models.ForeignKey(CustomUser, on_delete = models.CASCADE, related_name="from_user")
to_user = models.ForeignKey(CustomUser, on_delete = models.CASCADE, related_name="to_user")
CustomUser.add_to_class(
'following',
models.ManyToManyField(
'self',
through=Contact,
related_name='followers',
symmetrical=False))
|
global namephone
global flag
import numpy
namephone=numpy.array([[' ']*6]*10)
namephone[0][0]='Tossapon'
namephone[1][0]='Poowanai'
namephone[2][0]='Norasath'
namephone[0][1]='0811323545'
namephone[1][1]='0835411234'
namephone[1][2]='0823354121'
namephone[2][1]='0850550354'
flag=0
def createuser():
return
def run():
x=raw_input('Phonebook - Input your command (h for help):')
function(x)
def insert():
insert=raw_input('Enter a phone number to be inserted:')
i=len(insert)
#print i
if (i==10):
flag=showlist()
insertphonetoolduser(insert)
if flag==1:
x=raw_input('Enter the person id to append the phone number or n for new person:')
y=raw_input('Enter replaced number or n for new phone number :')
addphonenumber(x,y,insert)
print 'insert suscess'
run()
else:
print 'Cant insert Phonenumber'
elif(i>10):
print 'Error - the number is too long'
insert=0
run()
elif(i<10):
print 'Error - the number is too short'
insert=0
run()
return
def addphonenumber(x,y,insert):
# if x=='1':
# for i in range(1,6):
# if namephone[0][]
# namephone[0][j]
# elif x=='2':
# elif x=='3':
# elif x=='4':
# elif x=='5':
# showlist()
return
def search(digi=0):
return phone
def allfunc():
print 'i - insert a phone number'
print 'd - delete a phone number'
print 'm - modify a phone number'
print 's - Search'
print 'x - exit program'
def function(func):
if(func=='i'):
insert()
elif(func=='d'):
print 'delete a phone number'
elif(func=='m'):
print 'modify a phone number'
elif(func=='s'):
print 'Search'
elif(func=='h'):
allfunc()
x=''
run()
elif(func=='x'):
print 'exit program'
else:
x=''
run()
return
def insertphonetoolduser(insert):
return
def showlist():
j=1
for k in range(0,10):
for j in range(1,6):
if namephone[k][j] == ' ':
j=1
break
if j==1:
l=k+1
print '[%d]'%l,namephone[k][j-1],j,'-',namephone[k][j]
else:
print ' ',j,'-',namephone[k][j]
j=1
if namephone[k][j]==' ':
break
flag=1
return flag
run()
|
# -*- coding: utf-8 -*-
"""
Created on Sun May 17 14:51:48 2015
@author: Martin Nguyen
"""
from NodeClass import node
from numpy import random
#list of TreatmentTree
# [[IOPreduction],CurrentMedicationType,SideEffect,MedicationCombination]
TreatmentTree = node ([ [511,1381] ,1, [10,109] , [1,0,0,0,0] ],
node([ [589,1407] ,2, [22,258] , [0,1,0,0,0] ],
node([ [294,1231] ,3, [2,12] ,[0,0,1,0,0] ],
node([ [148,559] ,4, [5,17] , [0,0,0,1,0]],
node([ [763,1480] ,5, 0 , [0,0,0,0,1]]),
node([ [763,1480] ,23,[5,17],[0,0,0,1,1] ])),
node([ [148,559] ,21,[5,17] , [0,0,1,1,0] ],
node([ [763,1480] ,5, 0 , [0,0,0,0,1]]),
node([ [763,1480] ,22,[5,17],[0,0,1,1,1] ]))),
node([ [294,1231] , 16 , [2,12] , [0,1,0,1,0] ],
node([ [148,559] ,17,[5,17] , [0,1,0,1,0] ],
node([ [763,1480] ,5, 0 , [0,0,0,0,1]]),
node([ [763,1480] ,20,[5,17],[0,1,0,1,1] ])),
node([ [148,559] ,18,[5,17] , [0,1,1,1,0] ],
node([ [763,1480] ,5, 0 , [0,0,0,0,1]]),
node([ [763,1480] ,19,[5,17],[0,1,1,1,1] ])))
),
node([ [589,1407] , 6 , [22,258] , [1,1,0,0,0] ],
node([ [294,1231] , 7 , [2,12] , [1,0,1,0,0] ],
node([ [148,559] ,12,[5,17] , [1,0,0,1,0] ],
node([ [763,1480] ,5, 0 , [0,0,0,0,1]]),
node([ [763,1480] ,15,[5,17],[1,0,0,1,1] ])),
node([ [148,559] ,13,[5,17] , [0,1,0,1,0] ],
node([ [763,1480] ,5, 0 , [0,0,0,0,1]]),
node([ [763,1480] ,14,[5,17],[1,0,1,1,1] ]))),
node([ [294,1231] , 8 , [2,12] , [1,1,1,0,0] ],
node([ [148,559] ,9,[5,17] , [1,1,0,1,0] ],
node([ [763,1480] ,5, 0 , [0,0,0,0,1]]),
node([ [763,1480] ,11,[5,17],[1,1,0,1,1] ])),
node([ [148,559] ,10,[5,17] , [1,1,1,0,1] ]))
)
)
class TreatmentBlock1(object):
def __init__(self, params,medicalRecords):
self.params = params
self.medicalRecords = medicalRecords
def update (self):
#either no treatment or with treatment and side Effect (the TreatmentOverallStatus == 1) outside here only means that
#side effect is incurred
if self.medicalRecords['CurrentMedicationType'] == 0 or self.medicalRecords['TreatmentOverallStatus'] == 1:
self.medicalRecords['ContinueTreatment'] = True
self.DoctorPrescription()
if self.params['IOPReduction'] < 0.1 and self.medicalRecords['MedicationIntake'] > 1 and self.medicalRecords['ContinueTreatment'] == True:
self.medicalRecords['TreatmentOverallStatus'] =1
self.DoctorPrescription()
if self.medicalRecords['TreatmentOverallStatus'] == 2 and self.medicalRecords['MedicationIntake'] > 1:
self.medicalRecords['ContinueTreatment'] = True
self.DoctorPrescription()
self.SetTimeNextVisit()
def DoctorPrescription(self):
#self.params['IOPReduction'] = 0.12
tracenode = node(0)
tracenode = TreatmentTree
if self.medicalRecords['MedicationPath'][0] == 0:
self.medicalRecords['MedicationPath'][0] = 1
self.operations(TreatmentTree)
self.medicalRecords['TreatmentOverallStatus'] = 0
self.medicalRecords['MedicationIntake'] = 0
else:
i = 1
#this would shortcut CurrentMedicationType ==10, so when if-else evaluate below, we sure this is not because
#the new treatment is
if self.medicalRecords['CurrentMedicationType'] == 10 and self.params['FirstProgression'] == 1:
self.medicalRecords['ExitCode'] = True
self.ResetMedicationPath()
while i < 5 and self.medicalRecords['MedicationPath'][i] <> 0 and self.medicalRecords['CurrentMedicationType'] <> 10:
#add condition of i in here
if self.medicalRecords['MedicationPath'][i] == 1:
tracenode = tracenode.left
i = i +1
elif self.medicalRecords['MedicationPath'][i] == 2:
tracenode = tracenode.right
i = i+1
if i < 5 and self.medicalRecords['CurrentMedicationType'] <> 10 :
self.medicalRecords['MedicationPath'][i] = self.medicalRecords['TreatmentOverallStatus']
if self.medicalRecords['TreatmentOverallStatus'] == 1:
tracenode = tracenode.left
self.operations(tracenode)
if self.medicalRecords['TreatmentOverallStatus'] == 2:
#print('shit')
tracenode = tracenode.right
self.operations(tracenode)
#This is the only way to get out of the treatment block
# Here after every medication indication, patients need to
self.medicalRecords['TreatmentOverallStatus'] = 0
self.medicalRecords['MedicationIntake'] = 0
#exit the block if i == 5
if i == 5 and self.params['FirstProgression'] == 1:
self.medicalRecords['ExitCode'] = True
self.ResetMedicationPath()
def operations(self,tracenode):
self.params['IOPReduction'] = random.beta(tracenode.value[0][0],tracenode.value[0][1])
self.medicalRecords['CurrentMedicationType'] = tracenode.value[1]
if self.medicalRecords['CurrentMedicationType'] <> 5:
self.params['SideEffect'] = random.beta(tracenode.value[2][0],tracenode.value[2][1])
else:
self.params['SideEffect'] = 0
self.medicalRecords['MedicationCombination'] = tracenode.value[3]
def SetTimeNextVisit(self):
if self.medicalRecords['MedicationIntake'] == 0:
self.params['time_next_visit'] = 3
else:
self.params['time_next_visit'] = 6
def ResetMedicationPath(self):
self.medicalRecords['MedicationPath'][2] = 0
self.medicalRecords['MedicationPath'][3] = 0
self.medicalRecords['MedicationPath'][4] = 0
self.medicalRecords['OnTrabeculectomy'] = False
if self.medicalRecords['MedicationPath'][1] ==1 :
self.medicalRecords['MedicationPath'][1] = 2
self.medicalRecords['CurrentMedicationType'] = 6
else:
self.medicalRecords['MedicationPath'][1] = 0
|
import time
class BubbleSort:
def bubble_sort(vetor, tempo):
elementos = len(vetor.lista) - 1
vetor.ordenado = False
ordenado = False
j = 0
while not ordenado:
ordenado = True
for i in range(elementos):
if vetor.lista[i] > vetor.lista[i + 1]:
vetor.lista[i], vetor.lista[i + 1] = vetor.lista[i + 1], vetor.lista[i]
ordenado = False
time.sleep(tempo)
vetor.ordenado = True
return vetor.lista
|
class Color:
def __init__(self, id, name, rgba):
self.id = id
self.name = name
self.rgba = rgba
# https://minecraft.gamepedia.com/Map_item_format#Map_colors
COLOR_NONE = Color(0, "NONE", (0, 0, 0, 0))
COLOR_GRASS = Color(1, "GRASS", (127, 178, 56, 255))
COLOR_SAND = Color(2, "SAND", (247, 233, 163, 255))
COLOR_WOOL = Color(3, "WOOL", (199, 199, 199, 255))
COLOR_FIRE = Color(4, "FIRE", (255, 0, 0, 255))
COLOR_ICE = Color(5, "ICE", (160, 160, 255, 255))
COLOR_METAL = Color(6, "METAL", (167, 167, 167, 255))
COLOR_PLANT = Color(7, "PLANT", (0, 124, 0, 255))
COLOR_SNOW = Color(8, "SNOW", (255, 255, 255, 255))
COLOR_CLAY = Color(9, "CLAY", (164, 168, 184, 255))
COLOR_DIRT = Color(10, "DIRT", (151, 109, 77, 255))
COLOR_STONE = Color(11, "STONE", (112, 112, 112, 255))
COLOR_WATER = Color(12, "WATER", (64, 64, 255, 255))
COLOR_WOOD = Color(13, "WOOD", (143, 119, 72, 255))
COLOR_QUARTZ = Color(14, "QUARTZ", (255, 252, 245, 255))
COLOR_ORANGE = Color(15, "COLOR_ORANGE", (216, 127, 51, 255))
COLOR_MAGENTA = Color(16, "COLOR_MAGENTA", (178, 76, 216, 255))
COLOR_LIGHT_BLUE = Color(17, "COLOR_LIGHT_BLUE", (102, 153, 216, 255))
COLOR_YELLOW = Color(18, "COLOR_YELLOW", (229, 229, 51, 255))
COLOR_LIGHT_GREEN = Color(19, "COLOR_LIGHT_GREEN", (127, 204, 25, 255))
COLOR_PINK = Color(20, "COLOR_PINK", (242, 127, 165, 255))
COLOR_GRAY = Color(21, "COLOR_GRAY", (76, 76, 76, 255))
COLOR_LIGHT_GRAY = Color(22, "COLOR_LIGHT_GRAY", (153, 153, 153, 255))
COLOR_CYAN = Color(23, "COLOR_CYAN", (76, 127, 153, 255))
COLOR_PURPLE = Color(24, "COLOR_PURPLE", (127, 63, 178, 255))
COLOR_BLUE = Color(25, "COLOR_BLUE", (51, 76, 178, 255))
COLOR_BROWN = Color(26, "COLOR_BROWN", (102, 76, 51, 255))
COLOR_GREEN = Color(27, "COLOR_GREEN", (102, 127, 51, 255))
COLOR_RED = Color(28, "COLOR_RED", (153, 51, 51, 255))
COLOR_BLACK = Color(29, "COLOR_BLACK", (25, 25, 25, 255))
COLOR_GOLD = Color(30, "GOLD", (250, 238, 77, 255))
COLOR_DIAMOND = Color(31, "DIAMOND", (92, 219, 213, 255))
COLOR_LAPIS = Color(32, "LAPIS", (74, 128, 255, 255))
COLOR_EMERALD = Color(33, "EMERALD", (0, 217, 58, 255))
COLOR_PODZOL = Color(34, "PODZOL", (129, 86, 49, 255))
COLOR_NETHER = Color(35, "NETHER", (112, 2, 0, 255))
COLOR_TERRACOTTA_WHITE = Color(36, "TERRACOTTA_WHITE", (209, 177, 161, 255))
COLOR_TERRACOTTA_ORANGE = Color(37, "TERRACOTTA_ORANGE", (159, 82, 36, 255))
COLOR_TERRACOTTA_MAGENTA = Color(38, "TERRACOTTA_MAGENTA", (149, 87, 108, 255))
COLOR_TERRACOTTA_LIGHT_BLUE = Color(39, "TERRACOTTA_LIGHT_BLUE", (112, 108, 138, 255))
COLOR_TERRACOTTA_YELLOW = Color(40, "TERRACOTTA_YELLOW", (186, 133, 36, 255))
COLOR_TERRACOTTA_LIGHT_GREEN = Color(41, "TERRACOTTA_LIGHT_GREEN", (103, 117, 53, 255))
COLOR_TERRACOTTA_PINK = Color(42, "TERRACOTTA_PINK", (160, 77, 78, 255))
COLOR_TERRACOTTA_GRAY = Color(43, "TERRACOTTA_GRAY", (57, 41, 35, 255))
COLOR_TERRACOTTA_LIGHT_GRAY = Color(44, "TERRACOTTA_LIGHT_GRAY", (135, 107, 98, 255))
COLOR_TERRACOTTA_CYAN = Color(45, "TERRACOTTA_CYAN", (87, 92, 92, 255))
COLOR_TERRACOTTA_PURPLE = Color(46, "TERRACOTTA_PURPLE", (122, 73, 88, 255))
COLOR_TERRACOTTA_BLUE = Color(47, "TERRACOTTA_BLUE", (76, 62, 92, 255))
COLOR_TERRACOTTA_BROWN = Color(48, "TERRACOTTA_BROWN", (76, 50, 35, 255))
COLOR_TERRACOTTA_GREEN = Color(49, "TERRACOTTA_GREEN", (76, 82, 42, 255))
COLOR_TERRACOTTA_RED = Color(50, "TERRACOTTA_RED", (142, 60, 46, 255))
COLOR_TERRACOTTA_BLACK = Color(51, "TERRACOTTA_BLACK", (37, 22, 16, 255))
COLOR_CRIMSON_NYLIUM = Color(52, "CRIMSON_NYLIUM", (189, 48, 49, 255))
COLOR_CRIMSON_STEM = Color(53, "CRIMSON_STEM", (148, 63, 97, 255))
COLOR_CRIMSON_HYPHAE = Color(54, "CRIMSON_HYPHAE", (92, 25, 29, 255))
COLOR_WARPED_NYLIUM = Color(55, "WARPED_NYLIUM", (22, 126, 134, 255))
COLOR_WARPED_STEM = Color(56, "WARPED_STEM", (58, 142, 140, 255))
COLOR_WARPED_HYPHAE = Color(57, "WARPED_HYPHAE", (86, 44, 62, 255))
COLOR_WARPED_WART_BLOCK = Color(58, "WARPED_WART_BLOCK", (20, 180, 133, 255))
|
import json
from urllib import request, parse
import codecs
from tv_trivia import settings
from tv_trivia.models import Show
def get_show_by_id(title_query, year=''):
qry_dict = {'t': title_query, 'apikey': settings.OMDB_API_KEY}
if year:
qry_dict.update({'y': year})
r = request.urlopen("http://www.omdbapi.com/?{}".format(parse.urlencode(qry_dict)))
rdr = codecs.getdecoder('utf-8')
omdb_json = json.loads(r.read().decode('utf-8'))
imdb_id = omdb_json.get('imdbID')
title = omdb_json.get('Title')
s = Show(imdb_id, title)
s.season_qty = int(omdb_json.get('totalSeasons'))
y = omdb_json.get('Year', None)
if y:
s.year_start, s.year_end = y.split('–') if len(y.split('–')) > 1 else (y, '')
return s
|
"""
A script to convert Bus Monitor Log files to CSV files easily importable to Excel.
Note: only parameters are exported, and read/write appear are different entries at different times.
"""
import sys
import os.path
import re
import collections
__commentLinePattern__ = re.compile(r'^\*[\s]*[\w:\s\.=]*$')
__aliasLinePattern__ = re.compile(r"""^\*[\s]*ALIAS[\s]*: #beginning of ALIAS comment line
[\s](?P<busID>[\d]*)\.(?P<portID>[\d]*)\.(?P<ID>[\d]*)
[\s]*=[\s]* # equality
(?P<name>[\w]*)
[\s]*$""", re.VERBOSE) # end of line
__paramUpdatePattern__ = re.compile(r"""^[\s]*
(?P<time>[\d.]*)
,(WRITE_PARAM|READ_PARAM|WRITE_MSG_UNPACK_PARAM|READ_MSG_UNPACK_PARAM)[\s]*,[\s]*
(?P<paramID>[\d]*),
(?P<value>.*)
$""", re.VERBOSE)
__openPortPattern__ = re.compile(r"""^[\s]*[\d.]*[\s]*,[\s]*OPEN[\s]*,
(?P<busID>[\d]*)
.
(?P<portID>[\d]*)
[\s]*,[\s]*$""", re.VERBOSE)
__closePortPattern__ = re.compile(r"""^[\s]*[\d.]*[\s]*,[\s]*CLOSE[\s]*,
(?P<busID>[\d]*)
.
(?P<portID>[\d]*)
[\s]*,[\s]*$""", re.VERBOSE)
__invalid_ID__ = 0xFFFFFFFF
BusAddress = collections.namedtuple('BusAddress', 'busID portID ID')
__invalid_Address__ = BusAddress(busID=__invalid_ID__, portID=__invalid_ID__, ID=__invalid_ID__)
__current_context__ = __invalid_Address__
__aliases__ = dict()
class LogFileParameterData:
"""
This class is:
- A data container for the parameter values read in the log files.
- An iterable object that returns CSV file lines
"""
def __init__(self, aliases):
"""
Constructor
"""
self.clear()
self.aliases = aliases
def clear(self):
self.time = list() #a list for the time axis
self.data = dict() #a dictionnary of data streams.
self.current = 0
def add_new_data_stream(self, address):
"""
Add a new data stream.
If the there are existing values for other data stream (at previous times),
blank entries are automatically added to the created data stream.
""
:param address: address in bus infrastructure the data stream corresponds to.
"""
if self.get_name_from_bus_address(address) not in self.data:
if len(self.data) > 0:
number_of_missing_entries = max([len(v) for (k, v) in self.data.iteritems()])
self.data[self.get_name_from_bus_address(address)] = [' '] * number_of_missing_entries
else:
self.data[self.get_name_from_bus_address(address)] = list()
def add_data_point(self, time, address, value):
"""
Add a new data point to the collection.
This method:
- Automatically align time entries by ensuring that data is automatically created for all the streams (the last
recorded value is automatically inserted).
- Convert addresses to names using the alias dictionnary passed at creation.
"""
self.time.append(time)
self.add_new_data_stream(address)
self.data[self.get_name_from_bus_address(address)].append(value)
other_data_streams = {k: v for (k, v) in self.data.iteritems() if self.get_name_from_bus_address(address) != k}
for name, data_stream in other_data_streams.iteritems():
data_stream.append(data_stream[len(data_stream)-1])
def get_name_from_bus_address(self, address):
"""
Given a bus address, return the equivalent alias name
Note: if no alias entry existing, the returned name is a string corresponding to the bus address.
"""
try:
return self.aliases[address]
except KeyError as e:
return str(address.busID)+'.'+str(address.portID)+'.'+str(address.ID)
except Exception as e:
sys.stderr.write('Could not convert address to name (address is {!s}).\n'.format(address))
sys.exit(2)
def get_data_stream_names_line(self):
"""
The the data stream names (the first line in the CSV file)
"""
names = ['TIME(s)']
for key in self.data:
names.append(key)
return ','.join(names)
def __iter__(self):
return self
def next(self):
"""
Next iteration.
Returns aC CSV file at the current time entries and move to the next time entry.
"""
if self.current >= len(self.time):
raise StopIteration
else:
self.current += 1
csvline = ''
try:
csvline = self.time[self.current-1]+','
csvline += ','.join([str(val[self.current-1]) for (name, val) in self.data.iteritems()])
except Exception as e:
pass
return csvline
__file_data__ = LogFileParameterData(__aliases__)
def print_help():
"""
Print tool usage
"""
print "PURPOSE"
print '\tA script to convert Bus Monitor Log files to CSV files easily importable to Excel.'
print '\tNote: only parameters are exported, and read/write appear as different entries at different times.'
print "USAGE"
print '\tSimply execute passing the log file path as an argument. A CSV file corresponding to the log file will be' +\
'created in the same folder.'
print '\tAlternatively, pass a folder, and all the file with a \'bus.log\' extension will be converted.'
def get_options():
"""
Retrieve options, either from the command line arguments, or opening
dialogs if necessary.
"""
if len(sys.argv) != 2:
sys.stderr.write('Invalid argument.\n')
print_help()
sys.exit(2)
else:
try:
path = str(sys.argv[1])
except Exception as e:
sys.stderr.write('Argument must be a string (path to file or folder) [{!s}].\n'.format(e))
sys.exit(2)
else:
return path
def parse_alias_line(line, line_number):
"""
Update the aliases dictionnary from alias log file line.
:param line: log file line
:param line_number: log file line number
:return: none
"""
matched_groups = __aliasLinePattern__.match(line)
try:
__aliases__[BusAddress(busID=int(matched_groups.group('busID')),
portID=int(matched_groups.group('portID')),
ID=int(matched_groups.group('ID')))] = matched_groups.group('name')
except Exception as e:
sys.stderr.write('Could not import alias at line {!s} [{!s}].\n'.format(line_number, e))
sys.exit(2)
def parse_parameter_update_line(line, line_number):
"""
Add a new data point from a log file parameter read or write entry.
:param line: log file line
:param line_number: log file line number
:return: none
"""
matched_groups = __paramUpdatePattern__.match(line)
try:
address = BusAddress(busID=__current_context__.busID,
portID=__current_context__.portID,
ID=int(matched_groups.group('paramID')))
__file_data__.add_data_point(matched_groups.group('time'),
address,
matched_groups.group('value'))
except Exception as e:
sys.stderr.write('Malformed parameter update at line {!s} [{!s}].\n'.format(line_number, e))
sys.exit(2)
def parse_open_port_line(line, line_number):
"""
Update current context from an OPEN PORT log entry.
:param line: content of the log file line.
:param line_number: log file line number.
:return: None
"""
global __current_context__
matched_groups = __openPortPattern__.match(line)
try:
__current_context__ = BusAddress(busID=int(matched_groups.group('busID')),
portID=int(matched_groups.group('portID')),
ID=__invalid_ID__)
except Exception as e:
sys.stderr.write('Invalid ID at line {!s} [{!s}].\n'.format(line_number, e))
sys.exit(2)
def parse_close_port_line(line, line_number):
"""
Reset the current context following a CLOSE port log entry.
:param line: content of the log file line.
:param line_number: log file line number.
:return:
"""
global __current_context__
matched_groups = __closePortPattern__.match(line)
try:
if int(matched_groups.group('busID')) != __current_context__.busID \
or int(matched_groups.group('portID')) != __current_context__.portID:
sys.stderr.write('Malformed file: the context closed at line {!s} was never opened [{!s}], current context is {!s}.\n'.format(line_number,line,__current_context__))
except Exception as e:
sys.stderr.write('Invalid ID at line {!s} [{!s}].\n'.format(line_number, e))
sys.exit(2)
else:
__current_context__ = __invalid_Address__
# Below is a list that associates a Regex Pattern unique to a log file line type to a dedicated line handler.
__line_handlers__ = [{'pattern': __aliasLinePattern__, 'handler': parse_alias_line},
{'pattern': __paramUpdatePattern__, 'handler': parse_parameter_update_line},
{'pattern': __openPortPattern__, 'handler': parse_open_port_line},
{'pattern': __closePortPattern__, 'handler': parse_close_port_line}]
def convert_log_to_csv(input_file_path):
"""
Convert a bus.log file to a CSV file
"""
__current_context__ = __invalid_Address__
__aliases__ = dict()
__file_data__.clear()
print 'Opened {!s}'.format(input_file_path)
#Validate the extension
if input_file_path.split('.')[-2:] != ['bus', 'log']:
sys.stderr.write('File {!s} does not have the expected extension.\n'.format(input_file_path))
return
current_line_number = 0
try:
with open(input_file_path) as file:
for line in file:
current_line_number += 1
for line_type in __line_handlers__:
if line_type['pattern'].match(line):
line_type['handler'](line, current_line_number)
except Exception as e:
sys.stderr.write('Could not parse input file.\n'.format(e))
else:
csv_file_path = os.path.splitext(input_file_path)[0] + '.csv'
# Create the CSV file from __file_data__
try:
with open(csv_file_path, 'w') as CSVfile:
CSVfile.write(__file_data__.get_data_stream_names_line() + '\n')
for data in __file_data__:
CSVfile.write(data + '\n')
except Exception as e:
sys.stderr.write('Could not write CSV file [{!s}].\n'.format(e))
print 'created {!s}'.format(csv_file_path)
if __name__ == "__main__":
"""
Entry point
"""
path = get_options()
if os.path.isfile(path):
list_of_file_to_convert = [path]
elif os.path.isdir(path):
list_of_file_to_convert = [os.path.join(path, f) for f in os.listdir(path) if os.path.isfile(os.path.join(path, f))
and f.split('.')[-2:] == ['bus', 'log']]
else:
sys.stderr.write('argument {!s} should be a folder or file path.'.format(path))
sys.exit(2)
# Read the log file an update __file_data__
for input_file_path in list_of_file_to_convert:
convert_log_to_csv(input_file_path)
|
# -*- coding: utf-8 -*-
"""
99: program finished
1 : adds num in two positions and store the result in third position.
2 : multiplies num in two positions and store the result in third position.
3 : takes an input to store in a specific pos
4 : outputs the value in the specific pos
5 : jump_if_true
6 : jump if false
7 : less then
8 : equals
9 : move relative base
"""
import copy
correct_op = [1,2,3,4,5,6,7,8,9,99] #supported operations so far
# define macros of directions
UP = 0
DOWN = 1
LEFT = 2
RIGHT = 3
cursor = 0
rela_base = 0
def int_compute(code_list, iter_input):
#cursor = 0
global cursor
global rela_base
op_code = code_list[cursor]%100
output_cnt = 0
output = []
#print('op code is: ', op_code, ' cursor is: ',code_list[cursor], code_list[cursor+1],code_list[cursor+2])
while(op_code in correct_op):
#print('op code is: ', op_code, ' cursor is: ',code_list[cursor], code_list[cursor+1],code_list[cursor+2], code_list[cursor+3])
op_code = code_list[cursor]%100
op_mode = []
op_mode_int = code_list[cursor]//100
#print('op_mode_int: ' +str(op_mode_int))
for i in range(0,3):
op_mode.append(op_mode_int%10)
op_mode_int = op_mode_int//10
#print('op_mode is ', op_mode)
if(op_code == 1):
if(op_mode[0] == 0):
p1 = code_list[code_list[cursor+1]]
elif(op_mode[0] == 1):
p1 = code_list[cursor+1]
elif(op_mode[0] == 2):
p1 = code_list[rela_base + code_list[cursor+1]]
else:
print('error getting addr in op1')
if(op_mode[1] == 0):
p2 = code_list[code_list[cursor+2]]
elif(op_mode[1] == 1):
p2 = code_list[cursor+2]
elif(op_mode[1] == 2):
p2 = code_list[rela_base + code_list[cursor+2]]
else:
print('error getting addr in op1')
if(op_mode[2] == 0):
code_list[code_list[cursor+3]] = p1 + p2
elif(op_mode[2] == 2):
code_list[rela_base + code_list[cursor+3]] = p1+ p2
else:
print('error getting addr in op1')
cursor += 4
elif(op_code == 2):
#print('curr pos: ', code_list[cursor], code_list[cursor+1], code_list[cursor+2], code_list[cursor+3])
if(op_mode[0] == 0):
#print('curr pos: ', code_list[cursor+1])
p1 = code_list[code_list[cursor+1]]
elif(op_mode[0] == 1):
p1 = code_list[cursor+1]
elif(op_mode[0] == 2):
p1 = code_list[rela_base + code_list[cursor+1]]
else:
print('error getting addr in op2')
if(op_mode[1] == 0):
p2 = code_list[code_list[cursor+2]]
elif(op_mode[1] == 1):
p2 = code_list[cursor+2]
elif(op_mode[1] == 2):
p2 = code_list[rela_base + code_list[cursor+2]]
else:
print('error getting addr in op2')
if(op_mode[2] == 0):
code_list[code_list[cursor+3]] = p1 * p2
elif(op_mode[2] == 2):
code_list[rela_base + code_list[cursor+3]] = p1 * p2
else:
print('error getting addr in op2')
cursor += 4
elif(op_code == 3):
if (op_mode[0] != 0):
#print('error getting addr in op3')
#print(code_list[cursor])
code_list[rela_base + code_list[cursor+1]] = iter_input
else:
code_list[code_list[cursor+1]] = iter_input
cursor += 2
elif(op_code == 4):
#print('op_mode: ' + str(op_mode))
if(op_mode[0] == 0):
#print("the output value (mode 0): " + str(code_list[code_list[cursor+1]]))
if not output_cnt:
output.append(code_list[code_list[cursor+1]])
output_cnt += 1
else:
output.append(code_list[code_list[cursor+1]])
cursor += 2
return output
elif(op_mode[0] == 2):
#print("the output value (mode 2): " + str(code_list[rela_base + code_list[cursor+1]]))
if not output_cnt:
output.append(code_list[rela_base + code_list[cursor+1]])
output_cnt += 1
else:
output.append(code_list[rela_base + code_list[cursor+1]])
cursor += 2
return output
else:
#print("the output value (mode 1): " + str(code_list[cursor+1]))
if not output_cnt:
output.append(code_list[cursor+1])
output_cnt += 1
else:
output.append(code_list[cursor+1])
cursor += 2
return output
cursor += 2
elif(op_code == 5):
if(op_mode[0] == 0):
p1 = code_list[code_list[cursor+1]]
elif(op_mode[0] == 1):
p1 = code_list[cursor+1]
elif(op_mode[0] == 2):
p1 = code_list[rela_base + code_list[cursor+1]]
else:
print('error getting addr in op5')
if(op_mode[1] == 0):
p2 = code_list[code_list[cursor+2]]
elif(op_mode[1] == 1):
p2 = code_list[cursor+2]
elif(op_mode[1] == 2):
p2 = code_list[rela_base + code_list[cursor+2]]
else:
print('error getting addr in op5')
if p1:
cursor = p2
else:
cursor += 3
elif(op_code == 6):
if(op_mode[0] == 0):
p1 = code_list[code_list[cursor+1]]
elif(op_mode[0] == 1):
p1 = code_list[cursor+1]
elif(op_mode[0] == 2):
p1 = code_list[rela_base + code_list[cursor+1]]
else:
print('error getting addr in op6')
if(op_mode[1] == 0):
p2 = code_list[code_list[cursor+2]]
elif(op_mode[1] == 1):
p2 = code_list[cursor+2]
elif(op_mode[1] == 2):
p2 = code_list[rela_base + code_list[cursor+2]]
else:
print('error getting addr in op6')
if not p1:
cursor = p2
else:
cursor += 3
elif(op_code == 7):
if(op_mode[0] == 0):
p1 = code_list[code_list[cursor+1]]
elif(op_mode[0] == 1):
p1 = code_list[cursor+1]
elif(op_mode[0] == 2):
p1 = code_list[rela_base + code_list[cursor+1]]
else:
print('error getting addr in op7')
if(op_mode[1] == 0):
p2 = code_list[code_list[cursor+2]]
elif(op_mode[1] == 1):
p2 = code_list[cursor+2]
elif(op_mode[1] == 2):
p2 = code_list[rela_base + code_list[cursor+2]]
else:
print('error getting addr in op7')
if(op_mode[2] == 0):
code_list[code_list[cursor+3]] = 1 if p1 < p2 else 0
elif(op_mode[2] == 2):
code_list[rela_base + code_list[cursor+3]] = 1 if p1 < p2 else 0
else:
print('error getting addr in op7')
cursor += 4
elif(op_code == 8):
if(op_mode[0] == 0):
p1 = code_list[code_list[cursor+1]]
elif(op_mode[0] == 1):
p1 = code_list[cursor+1]
elif(op_mode[0] == 2):
p1 = code_list[rela_base + code_list[cursor+1]]
else:
print('error getting addr in op8')
if(op_mode[1] == 0):
p2 = code_list[code_list[cursor+2]]
elif(op_mode[1] == 1):
p2 = code_list[cursor+2]
elif(op_mode[1] == 2):
p2 = code_list[rela_base + code_list[cursor+2]]
else:
print('error getting addr in op8')
if(op_mode[2] == 0):
code_list[code_list[cursor+3]] = 1 if p1 == p2 else 0
elif(op_mode[2] == 2):
code_list[rela_base + code_list[cursor+3]] = 1 if p1 == p2 else 0
else:
print('error getting addr in op8')
cursor += 4
elif(op_code == 9):
if(op_mode[0] == 0):
p1 = code_list[code_list[cursor+1]]
elif(op_mode[0] == 1):
p1 = code_list[cursor+1]
elif(op_mode[0] == 2):
p1 = code_list[rela_base + code_list[cursor+1]]
else:
print('error getting addr in op9')
rela_base += p1
cursor += 2
else:
if(op_code == 99):
print("program halt at: " + str(code_list[cursor-1]))
return -1
op_code = code_list[cursor]%100
print('break: error: ', code_list[cursor], ' next value: ', code_list[cursor+1])
def turn_direc(curr_dirc, turn_act):
if(curr_dirc == UP):
next_dirc = RIGHT if turn_act else LEFT
elif(curr_dirc == LEFT):
next_dirc = UP if turn_act else DOWN
elif(curr_dirc == DOWN):
next_dirc = LEFT if turn_act else RIGHT
elif(curr_dirc == RIGHT):
next_dirc = DOWN if turn_act else UP
else:
print('unrecged direction!')
next_dirc = -1
return next_dirc
if __name__ == "__main__":
f = open("input.txt", "r")
line = f.read()
mem = line.split(',' , line.count(','))
mem = list(map(int, mem))
add = []
for i in range(1000):
add.append(0)
mem.extend(add)
panel_wide = 60
panel_len = 100
panel = []
for i in range(panel_len):
line = []
for j in range(panel_wide):
line.append([0,0])
panel.append(line)
coord_x = 10
coord_y = 50
cur_direction = UP
#the first panel is white (1)
panel[coord_y][coord_x][0] = 1
cur_output = int_compute(mem, panel[coord_y][coord_x][0])
while(cur_output != -1):
panel[coord_y][coord_x][0] = cur_output[0]
panel[coord_y][coord_x][1] = 1
next_direction = turn_direc(cur_direction,cur_output[1])
if next_direction == UP:
coord_y -= 1
elif next_direction == DOWN:
coord_y += 1
elif next_direction == LEFT:
coord_x -= 1
elif next_direction == RIGHT:
coord_x += 1
else:
break
print('x = ', coord_x, ' y = ', coord_y)
cur_output = int_compute(mem, panel[coord_y][coord_x][0])
cur_direction = next_direction
f.close()
all_cnt = 0
for i in range(panel_len):
for j in range(panel_wide):
if panel[i][j][1]:
all_cnt += 1
for i in range(panel_len):
for j in range(panel_wide):
if(panel[i][j][0] == 1):
print('8', end=' ')
else:
print(' ', end = ' ')
print('\n')
|
#!/usr/bin/env python
import traceback
import sys
from regression import scenario
try:
scenario.run()
except Exception, ex:
print "Error in scenario"
tb = tb2 = sys.exc_info()[2]
tb_len = 1
while tb2.tb_next:
tb_len += 1
tb2 = tb2.tb_next
traceback.print_tb(tb, tb_len - 1, sys.stdout)
print ex
|
# 渾沌加密外掛程式(免key的版本
# by sklonely
# import自動修復 程式碼片段Stste
lestModName = ""
while 1:
try:
import sys
import os
sys.path.append(sys.path[0] + '/mods/') # 將自己mods的路徑加入倒python lib裡面
# 要import的東西放這下面
from flask import Flask, jsonify, request
from flask_cors import CORS
from HENMAP_chaos_model import Chaos
from AESmod import AEScharp
import random
import json
import time
import threading
import copy
import hashlib
except (ModuleNotFoundError, ImportError): # python import error
err = str(sys.exc_info()[1])[17:-1]
if (lestModName != err):
print("缺少mod: " + err + " 正在嘗試進行安裝")
os.system("pip install " + err)
lestModName = err
else:
print("無法修復import問題 請人工檢查", "mod name: " + err)
sys.exit()
else:
del lestModName
break
# import自動修復 程式碼片段
app = Flask(__name__)
# 變數
CORS(app)
@app.route("/")
def hello():
return str("歡迎來到渾沌加解密測試首頁")
# @app.route("/encrypt", methods=['POST'])
@app.route("/encrypt")
def encrypt():
temp_Um = Um
return ("key: " + str(X[0]) + " Um:" + str(temp_Um))
@app.route("/AES_encrypt", methods=['POST'])
def AES_encrypt(data="這是測試用的訊息"):
# s = time.time()
# 初始化加密資料
temp_Um = copy.deepcopy(Um)
key = copy.deepcopy(X[0])
aes = AEScharp()
try:
dict1 = json.loads(request.get_data())
data = dict1['data']
use_key = hashlib.sha256(dict1['key'].encode('utf-8')).digest()
use_key = list(use_key)
for i in range(32):
temp_Um[i] = round(temp_Um[i] + use_key[i], 7)
# print(temp_Um)
except:
pass
# 加密資料
sendData = aes.encrypt(data, key)
global testEN
testEN = sendData
# json 黨製作
sendData = {'encrypt_text': str(sendData), 'Um': str(temp_Um)}
# e = time.time()
# print(e - s)
return json.dumps(sendData)
@app.route("/AES_decrypt", methods=['POST'])
def decrypt():
# 初始化解碼資料
# s = time.time()
try:
# 嘗試
dict1 = json.loads(request.get_data())
data = dict1['data']
data = eval(data)
temp_Um = dict1['Um']
temp_Um = temp_Um[1:-1].split(", ")
use_key = hashlib.sha256(dict1['key'].encode('utf-8')).digest()
use_key = list(use_key)
# 加密Um
for i in range(len(temp_Um)):
use_key[i] = float(use_key[i])
temp_Um[i] = float(temp_Um[i])
temp_Um[i] -= use_key[i]
except:
print("has error")
pass
# print(len(data))
# 開始同步
async_flag = False # 同步旗標
times = 0 # 同步失敗次數
while async_flag is False:
Y = [random.random(), random.random(), random.random()]
client = Chaos()
chck = 0
for i in range(len(temp_Um) - 1, -1, -1):
Y = client.runSlave(2, Y, temp_Um[i])
if i == 1:
chck = client.createUs(Y)
# 判斷有沒有同步
if round(temp_Um[0] + chck, 6):
# print(temp_X, Y[0], client.createUs(Y), temp_Um[0] + chck)
async_flag = False
if times > 12:
break
times += 1
print(round(temp_Um[0] + chck, 6))
else:
async_flag = True
# 解密
aes = AEScharp()
getData = aes.decrypt(data, Y[0])
# json 檔製作
getData = {'decrypt_text': str(getData), 'flag': str(async_flag)}
# e = time.time()
# print(e - s)
return json.dumps(getData)
def chaos():
# 初始化 準備Um buff
sys_chaos = Chaos()
global X, Um
X = [random.random(), random.random(), random.random()]
Um = []
for i in range(32):
Um.append(0)
Um[0] = sys_chaos.createUm(X)
X = sys_chaos.runMaster(0, X)
# 進入迴圈開始跑渾沌
while 1:
for i in range(31, 0, -1):
Um[i] = Um[i - 1]
Um[0] = sys_chaos.createUm(X)
X = sys_chaos.runMaster(1, X)
# print(X[0], Um[0])
time.sleep(0.001)
def show(times=50):
# 測試寒士
time.sleep(.2)
x = 0
for i in range(times):
AES_encrypt()
time.sleep(.05)
if (decrypt()[1]):
x += 1
print(i, True)
else:
print(i, False)
print("成功:", x, "失敗:", times - x, "同步率:", (x / times) * 100, "%")
if __name__ == "__main__":
try:
# 啟動本機系統的混沌系統
sys_chaos = threading.Thread(target=chaos)
sys_chaos.setDaemon(True)
sys_chaos.start()
print("SYS_Chaos 初始化完成 進入本機伺服器...")
# show()
# AES_encrypt()
port = int(os.environ.get('PORT', 5000))
app.run("0.0.0.0", port)
except:
print("退出渾沌加密系統")
# host = https://chaos-mod-sever.herokuapp.com/
|
import sys
from keras.preprocessing.image import img_to_array
from keras.preprocessing.image import load_img
from keras.models import load_model
from numpy import vstack, expand_dims
from loadingdata import load_dataset
from matplotlib import pyplot
from keras_contrib.layers.normalization.instancenormalization import InstanceNormalization
def image_to_translate(path):
img = load_img(path)
img = img_to_array(img)
img = expand_dims(img, 0)
img = (img - 127.5) / 127.5
return img
def translate(domain, img, AtoB, BtoA):
if domain == 'm':
generated = AtoB.predict(img)
reconstructed = BtoA.predict(generated)
else:
generated = BtoA.predict(img)
reconstructed = AtoB.predict(generated)
return generated, reconstructed
def save_img(img, generated, reconstructed,path):
images = vstack((img,generated,reconstructed))
titles = ['Input','Generated','Reconstructed']
images = (images + 1) / 2.0
for i in range(len(images)):
pyplot.subplot(1,len(images),i+1)
pyplot.axis('off')
pyplot.imshow(images[i])
pyplot.title(titles[i])
pyplot.savefig(path+'translation.png')
# Generate translation with: python3 generate.py [name of AtoB model] [name of BtoA model] [picture to translate] [m/f]
# Models should be placed in models folder in the repo and image to translate should be placed in translate folder in repo.
path_models = '../../models/'
path_image = '../../translate/'
cust = {'InstanceNormalization': InstanceNormalization}
AtoB = load_model(path_models+sys.argv[1],cust)
BtoA = load_model(path_models+sys.argv[2],cust)
img = image_to_translate(path_image+sys.argv[3])
generated, reconstructed = translate(sys.argv[4],img,AtoB,BtoA)
save_img(img,generated,reconstructed,path_image)
|
'''
Created on Nov 15, 2015
@author: TranBui
'''
from Tkinter import Frame,BOTH,Tk
import datetime
from ChartingCanvas import ChartingCanvas
from StockReader import StockReader
from MatplotCharting import MatplotCharting
class MainFrame(Frame):
'''
presents our human interface in an TkInter image window
The size for the frame is 700x700
'''
def __init__(self, root, symbol, start, end):
'''
Constructor
'''
Frame.__init__(self, root, background="white")
#Create the stock reader
self.stockReader =StockReader(symbol, start, end)
self.root = root
#Create the canvas. We also pass in the stock reader (data) to the canvas
self.canvas = ChartingCanvas(root, self.stockReader)
self.initUI()
def initUI(self):
self.root.title(self.canvas.chartTitle)
self.pack(fill=BOTH, expand = 0)
def main():
root = Tk()
root.geometry("700x700+300+0")
d1 = datetime.datetime(2015, 1, 1)
d2 = datetime.datetime.today()
mainFrame = MainFrame(root,"AAPL", d1, d2)
#Plotting with matplot
stockReader =StockReader("AAPL", d1, d2)
matplotCharting = MatplotCharting(stockReader)
root.mainloop()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
from setuptools import setup, find_packages
with open('VERSION') as version_file:
version = version_file.read().strip()
with open('README.rst') as readme_file:
readme = readme_file.read()
install_requires = [
'boto3>=1.4.0',
]
setup(
name='marquee',
version=version,
description='A simple Python logging formatter and handler for CloudWatch Events',
long_description=readme,
author='Herkermer Sherwood',
author_email='theherk@gmail.com',
url='https://github.com/theherk/marquee',
packages=find_packages(),
platforms=['all'],
install_requires=install_requires,
setup_requires=['pytest-runner'],
tests_require=[
'mock',
'pytest'
]
)
|
from flask import Flask,render_template,request
app = Flask(__name__)
@app.route('/')
def hello():
return render_template("form.html")
@app.route('/form',methods=["post"])
def fb():
enroll = request.form["enrollment"]
name = request.form["name"]
mail = request.form["mail"]
print(enroll , name, mail)
return render_template("new.html", enroll=enroll , name=name , mail=mail)
if __name__ == '__main__':
app.run(port=6050,debug = 2)
|
from dagster import pipeline, execute_pipeline
from zitarice.solids.caloriest_cereals import sort_by_calories
from zitarice.solids.configuring_download_csv import download_csv
@pipeline
def configurable_pipeline():
sort_by_calories(download_csv())
if __name__ == '__main__':
run_config = {
"solids": {
"download_csv": {
"config": {"url": "https://docs.dagster.io/assets/cereal.csv"}
}
}
}
result = execute_pipeline(configurable_pipeline, run_config=run_config)
|
"""
Week 4, Day 2: Interval List Intersections
Given two lists of closed intervals, each list of intervals is pairwise disjoint and in sorted order.
Return the intersection of these two interval lists.
(Formally, a closed interval [a, b] (with a <= b) denotes the set of real numbers x with a <= x <= b. The
intersection of two closed intervals is a set of real numbers that is either empty, or can be represented
as a closed interval. For example, the intersection of [1, 3] and [2, 4] is [2, 3].)
Note 1:
0 <= A.length < 1000
0 <= B.length < 1000
0 <= A[i].start, A[i].end, B[i].start, B[i].end < 10^9
"""
from typing import List
class Solution:
def intervalIntersection(self, A: List[List[int]], B: List[List[int]]) -> List[List[int]]:
a, b, LA, LB, result = 0, 0, len(A), len(B), []
while a < LA and b < LB:
first_of_a, last_of_a, first_of_b, last_of_b = A[a], B[b]
first_of_overlap, last_of_overlap = max(first_of_a, first_of_b), min(last_of_a, last_of_b)
if first_of_overlap <= last_of_overlap:
result.append([first_of_overlap, last_of_overlap])
if last_of_a <= last_of_b:
a += 1
else:
b += 1
return result
if __name__ == '__main__':
o = Solution()
A = [[0, 2], [5, 10], [13, 23], [24, 25]]
B = [[1, 5], [8, 12], [15, 24], [25, 26]]
expected = [[1, 2], [5, 5], [8, 10], [15, 23], [24, 24], [25, 25]]
print('A =', A, '\nB =', B, '\noutput =', o.intervalIntersection(A, B), '\nexpected =', expected, '\n')
# last line of code
|
# Generated by Django 2.2.4 on 2019-09-24 20:28
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('recipes', '0015_auto_20190924_1558'),
]
operations = [
migrations.RenameModel(
old_name='MedicinesPharmacies',
new_name='Good',
),
migrations.AlterField(
model_name='apothecary',
name='user',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Пользователь'),
),
]
|
import time
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import os
import urllib.request
URL = 'https://www.google.co.kr/imghp?hl=ko'
keyword = 'akakubi'
totalCount = 3
driver = webdriver.Chrome('H:\chromedriver.exe')
driver.get(url=URL)
driver.implicitly_wait(3)
input_element = driver.find_element_by_name('q')
input_element.send_keys(keyword)
input_element.send_keys(Keys.RETURN)
images = driver.find_elements_by_css_selector('.Q4LuWd')
count = 0
for img in images:
img.click()
time.sleep(0.5)
count += 1
imgURL = driver.find_element_by_xpath('//*[@id="Sva75c"]/div/div/div[3]/div[2]/c-wiz/div/div[1]/div[1]/div[2]/div[1]/a/img')
urllib.request.urlretrieve(imgURL,'./'+str(keyword)+str(count)+'.jpg')
if count == totalCount:
break
print('done')
|
import itertools
import time
import numpy
import pytest
from helpers import *
from tigger.helpers import product
from tigger.fft import FFT
import tigger.cluda.dtypes as dtypes
from tigger.transformations import scale_param
def pytest_generate_tests(metafunc):
perf_log_shapes = [
(4,), (10,), (13,), # 1D
(4, 4), (7, 7), (10, 10), # 2D
(4, 4, 4), (5, 5, 7), (7, 7, 7)] # 3D
perf_mem_limit = 4 * 2**20
if 'shape_and_axes' in metafunc.funcargnames:
shapes = []
for x in [3, 8, 9, 10, 11, 12, 13, 20]:
shapes.append((2 ** x,))
for x, y in itertools.product([4, 7, 8, 10], [4, 7, 8, 10]):
shapes.append((2 ** x, 2 ** y))
for x, y, z in itertools.product([4, 7, 10], [4, 7, 10], [4, 7, 10]):
shapes.append((2 ** x, 2 ** y, 2 ** z))
batch_sizes = [1, 16, 128, 1024, 4096]
mem_limit = 2 ** 20
vals = []
ids = []
for shape, batch in itertools.product(shapes, batch_sizes):
if product(shape) * batch <= mem_limit:
if batch == 1:
vals.append((shape, None))
else:
vals.append(((batch,) + shape, tuple(range(1, len(shape) + 1))))
ids.append(str(batch) + "x" + str(shape))
metafunc.parametrize('shape_and_axes', vals, ids=ids)
elif 'non2batch_shape_and_axes' in metafunc.funcargnames:
def idgen(shape_and_axes):
shape, axes = shape_and_axes
assert len(axes) == 1
outer_batch = shape[:axes[0]]
inner_batch = shape[axes[0]+1:]
return ((str(outer_batch) + "x") if len(outer_batch) > 0 else "") + \
str(shape[axes[0]]) + "x" + str(inner_batch)
vals = [
((17, 16), (1,)),
((177, 256), (1,)),
((39, 16, 7), (1,)),
((17, 16, 131), (1,)),
((7, 1024, 11), (1,)),
((5, 1024, 57), (1,))]
metafunc.parametrize('non2batch_shape_and_axes', vals, ids=list(map(idgen, vals)))
elif 'non2problem_shape_and_axes' in metafunc.funcargnames:
def idgen(non2problem_shape_and_axes):
shape, axes = non2problem_shape_and_axes
return str(shape) + 'over' + str(axes)
vals = [
((17, 15), (1,)),
((17, 17), (1,)),
((19, 4095), (1,)),
((19, 4097), (1,)),
((39, 31, 7), (1,)),
((39, 33, 7), (1,)),
((3, 255, 7), (1,)),
((3, 257, 7), (1,)),
((17, 200, 131), (0, 1)),
((7, 1000, 11), (1, 2)),
((15, 900, 57), (0, 1, 2))]
metafunc.parametrize('non2problem_shape_and_axes', vals, ids=list(map(idgen, vals)))
elif 'perf_shape_and_axes' in metafunc.funcargnames:
vals = []
ids = []
for log_shape in perf_log_shapes:
shape = tuple(2 ** x for x in log_shape)
batch = perf_mem_limit // (2 ** sum(log_shape))
vals.append(((batch,) + shape, tuple(range(1, len(shape) + 1))))
ids.append(str(batch) + "x" + str(shape))
metafunc.parametrize('perf_shape_and_axes', vals, ids=ids)
elif 'non2problem_perf_shape_and_axes' in metafunc.funcargnames:
vals = []
ids = []
for log_shape in perf_log_shapes:
for modifier in (1, -1):
shape = tuple(2 ** (x - 1) + modifier for x in log_shape)
batch = perf_mem_limit // (2 ** sum(log_shape))
vals.append(((batch,) + shape, tuple(range(1, len(shape) + 1))))
ids.append(str(batch) + "x" + str(shape))
metafunc.parametrize('non2problem_perf_shape_and_axes', vals, ids=ids)
def check_errors(ctx, shape_and_axes):
dtype = numpy.complex64
shape, axes = shape_and_axes
data = get_test_array(shape, dtype)
data_dev = ctx.to_device(data)
res_dev = ctx.empty_like(data_dev)
fft = FFT(ctx).prepare_for(res_dev, data_dev, None, axes=axes)
# forward transform
fft(res_dev, data_dev, -1)
fwd_ref = numpy.fft.fftn(data, axes=axes).astype(dtype)
assert diff_is_negligible(res_dev.get(), fwd_ref)
# inverse transform
fft(res_dev, data_dev, 1)
inv_ref = numpy.fft.ifftn(data, axes=axes).astype(dtype)
assert diff_is_negligible(res_dev.get(), inv_ref)
def test_trivial(some_ctx):
"""
Checks that even if the FFT is trivial (problem size == 1),
the transformations are still attached and executed.
"""
dtype = numpy.complex64
shape = (128, 1, 1, 128)
axes = (1, 2)
param = 4
data = get_test_array(shape, dtype)
data_dev = some_ctx.to_device(data)
res_dev = some_ctx.empty_like(data_dev)
fft = FFT(some_ctx)
fft.connect(scale_param(), 'input', ['input_prime'], ['param'])
fft.prepare_for(res_dev, data_dev, None, param, axes=axes)
fft(res_dev, data_dev, -1, param)
assert diff_is_negligible(res_dev.get(), data * param)
def test_power_of_2_problem(ctx, shape_and_axes):
check_errors(ctx, shape_and_axes)
def test_non_power_of_2_problem(ctx, non2problem_shape_and_axes):
check_errors(ctx, non2problem_shape_and_axes)
def test_non2batch(ctx, non2batch_shape_and_axes):
"""
Tests that the normal algoritms supports both inner and outer batches that are not powers of 2.
Batches here are those part of ``shape`` that are not referenced in ``axes``.
"""
dtype = numpy.complex64
shape, axes = non2batch_shape_and_axes
data = get_test_array(shape, dtype)
data_dev = ctx.to_device(data)
res_dev = ctx.empty_like(data_dev)
fft = FFT(ctx).prepare_for(res_dev, data_dev, None, axes=axes)
# forward transform
fft(res_dev, data_dev, -1)
fwd_ref = numpy.fft.fftn(data, axes=axes).astype(dtype)
assert diff_is_negligible(res_dev.get(), fwd_ref)
def check_performance(ctx_and_double, shape_and_axes):
ctx, double = ctx_and_double
shape, axes = shape_and_axes
dtype = numpy.complex128 if double else numpy.complex64
data = get_test_array(shape, dtype)
data_dev = ctx.to_device(data)
res_dev = ctx.empty_like(data_dev)
fft = FFT(ctx).prepare_for(res_dev, data_dev, None, axes=axes)
attempts = 10
t1 = time.time()
for i in range(attempts):
fft(res_dev, data_dev, -1)
ctx.synchronize()
t2 = time.time()
dev_time = (t2 - t1) / attempts
fwd_ref = numpy.fft.fftn(data, axes=axes).astype(dtype)
assert diff_is_negligible(res_dev.get(), fwd_ref)
return dev_time, product(shape) * sum([numpy.log2(shape[a]) for a in axes]) * 5
@pytest.mark.perf
@pytest.mark.returns('GFLOPS')
def test_power_of_2_performance(ctx_and_double, perf_shape_and_axes):
return check_performance(ctx_and_double, perf_shape_and_axes)
@pytest.mark.perf
@pytest.mark.returns('GFLOPS')
def test_non_power_of_2_performance(ctx_and_double, non2problem_perf_shape_and_axes):
return check_performance(ctx_and_double, non2problem_perf_shape_and_axes)
|
"""Module container for functions that are zone-neutral."""
from re import finditer
import numpy as np
import pandas as pd
import re
import os
def file_to_df(file_in):
"""Read .csv file into dataframe."""
working_df = pd.read_csv(file_in, sep=',', encoding='iso-8859-1')
working_df = working_df.drop(['Unnamed: 0'], axis=1)
working_df = working_df[pd.notnull(working_df['text'])]
working_df.set_index('zone_num', inplace=True)
working_df['zone_num'] = working_df.index
return working_df
def test_consec_years(row):
"""Search for consecutive year strings in row values."""
current_zone = row['text']
zone_next = row['zone_next']
test_string = r'.*19\d{1,2}.{0,4}19\d{0,2}\s+.*'
# print_test = r'.{0,60}19\d{1,2}.{0,4}19\d{0,2}\s+.{1,160}'
if re.match(test_string, current_zone) or re.match(test_string, zone_next):
value_out = 1
else:
value_out = 0
return value_out
def update_and_output(zones_small, account_df_out, balance_df_out, stock_bond_df_out, reference_df_out, end_df_out):
"""Update original .csv file and save as new .csv file."""
working_df = file_to_df(zones_small)
account_column = account_df_out[['inc_table']]
balance_column = balance_df_out[['bal_sheet']]
stock_bond_columns = stock_bond_df_out[['stock_rec', 'bonds_rec']]
reference_column = reference_df_out[['ref_on_stocks', 'ref_on_bonds']]
end_search_columns = end_df_out[['provis_rtng', 'comp_name']]
working_df = working_df.join(account_column)
working_df = working_df.join(balance_column)
working_df = working_df.join(stock_bond_columns)
working_df = working_df.join(reference_column)
working_df = working_df.join(end_search_columns)
working_df = working_df[['file_name', 'manual', 'manual_yr', 'fiche', 'fiche_num',
'zone_num', 'CoName', 'CoNum', 'Hist', 'Dir', 'inc_table',
'bal_sheet', 'stock_rec', 'bonds_rec', 'ref_on_stocks',
'ref_on_bonds', 'provis_rtng', 'comp_name', 'text']]
save_name = 'ZoneClassificationsUpdate.csv'
out_path = os.getcwd()[:-8] + save_name
working_df.to_csv(out_path, index=False)
|
# 给定两个二进制字符串,返回他们的和(用二进制表示)。
#
# 输入为非空字符串且只包含数字 1 和 0。
#
# 示例 1:
#
# 输入: a = "11", b = "1"
# 输出: "100"
#
# 示例 2:
#
# 输入: a = "1010", b = "1011"
# 输出: "10101"
# Related Topics 数学 字符串
# leetcode submit region begin(Prohibit modification and deletion)
class Solution:
def addRestNum(self, c: str, tmp: int) -> str:
res = ""
for n in c:
num = int(n) + tmp
if num < 2:
res += str(num)
tmp = 0
else:
res += str(num - 2)
tmp = 1
if tmp == 1:
res += "1"
return res
def addBinary(self, a: str, b: str) -> str:
tmp = 0
a = a[::-1]
b = b[::-1]
res = ""
count = min(len(a), len(b))
for i in range(count):
num = int(a[i]) + int(b[i]) + tmp
if num < 2:
res += str(num)
tmp = 0
else:
res += str(num - 2)
tmp = 1
if count < len(a):
res += self.addRestNum(a[count:], tmp)
elif count < len(b):
res += self.addRestNum(b[count:], tmp)
elif tmp == 1:
res += "1"
return res[::-1]
# leetcode submit region end(Prohibit modification and deletion)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.