text stringlengths 8 6.05M |
|---|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 9 03:01:48 2020
@author: lifecell
"""
#Files and Functions
f = open("kipling.txt",'w')
print(type(f))
f.write('We are now going to start writing into this file using python write function.\n\n')
f.write('If you can keep your head while all about you \nare losing theirs\
and blaming it on you,\n')
f.write('If you can trust yourself when all men doubt you,\n\
But make allowance for their doubting too;\n')
f.write('If you can wait and not be tired by waiting,\n\
Or being lied about, don\'t deal in lies,\n')
f.write('Or being hated, don\'t give way to hating,\n\
And yet don\'t look too good, nor talk too wise:\n')
f.close()
f=open("kipling.txt",'r')
print(f.read())
f.close()
f=open("kipling.txt",'r')
content=print(f.readlines())
#print(f.readline())
f.close()
f=open("kipling.txt",'a')
f.write('If you can dream - and not make dreams your master;\n\
If you can think - and not make thoughts your aim;\n')
f.close()
with open('kipling.txt','r') as f:
for line in f.readline:
print(line)
#Functions
def hello():
print("Hello World!!")
hello()
def hi(name):
print(f"Hello {name}!!!")
hi("Nahush")
def hi2(name='Nahush'):
print(f"Hello {name}!!!")
hi2("Manjari")
def FibNum(num=20):
'''Calculates and returns the fibonacci series from 1 to num'''
out=[]
a=0
b=1
for i in range(1,num+1):
out.append(a)
a,b=b,a+b
return out
FibNum(10)
def calcMean(first,*remainder):
mean=(first+sum(remainder))/(1+len(remainder))
return mean
def fib2(n):
if n==0:
return 0
elif n==1:
return 1
else:
return fib2(n-1) + fib2(n-2)
def add_n_prod(n,j):
add_o=n+j
prod_o=n*j
return add_o,prod_o
out=add_n_prod(2,5)
out_sum,out_prod=add_n_prod(2,5)
|
from django.db import models
class Regulation(models.Model):
title = models.CharField(max_length=250)
media = models.FileField()
|
# Indique como um troco deve ser dado utilizando-se um número mínimo de notas. Seu
# # algoritmo deve ler o valor da conta a ser paga e o valor do pagamento efetuado desprezando
# # os centavos. Suponha que as notas para troco sejam as de 50, 20, 10, 5, 2 e 1 reais, e que
# # nenhuma delas esteja em falta no caixa
conta = int(input('Digite o valor da conta a ser paga: '))
pagamento = int(input('Digite o valor do pagamento efetuado: '))
troco = pagamento - conta
notas = [50, 20, 10, 5, 2, 1]
total_notas = 0
for x in notas:
while troco >= x:
print(f'Uma nota de {x}')
troco -= x
|
if __name__ == "__main__":
gController = GoogleSheetController()
from datetime import datetime
number = 5
for i in range(number):
nowDate = datetime.now()
nowStr = nowDate.strftime("%Y-%m-%d %H-%M-%S")
values = [
["new song listened", nowStr],
]
responseAppend = sheetController.appendFile(spreadsheetId, range = "Sheet1", valueList = values)
print("append time: ", nowStr)
print("response of appending: ", responseAppend)
responseGet = sheetController.getFileContent(spreadsheetId, range = "Sheet1")
nowDate = datetime.now()
nowStr = nowDate.strftime("%Y-%m-%d %H-%M-%S")
print("get time: ", nowStr)
print("response of getting: ", responseGet)
|
# -*- coding: utf-8 -*-
import uuid, hashlib, psycopg2
from model.exceptions import *
from model.objectView import ObjectView
"""
datos de la entidad
{
username:''
password:''
user_id:''
id:''
}
"""
class UserPassword:
''' cambia la clave de un usuario siempre y cuando se haya generado el hash previo usado getResetPasswordHash '''
def resetUserPassword(self, con, hash, username, password):
cur = con.cursor()
cur.execute('select creds_id, user_id from credentials.password_resets where executed = false and hash = %s and username = %s', (hash, username))
d = cur.fetchone()
if d is None:
raise UserNotFound()
newCreds = {
'id': d[0],
'user_id': d[1],
'username': username,
'password': password
}
self.updateUserPassword(con, newCreds)
cur.execute('update credentials.password_resets set executed = true where hash = %s and username = %s', (hash, username))
''' genera el hash necesario para resetear la clave de un usuario '''
def getResetPasswordHash(self, con, username):
creds = self.findCredentials(con, username)
if creds is None:
raise UserNotFound()
try:
hash = hashlib.sha1((str(uuid.uuid4()) + username).encode('utf-8')).hexdigest()
rreq = (creds['id'], creds['user_id'], creds['username'], hash)
cur = con.cursor()
cur.execute('insert into credentials.password_resets (creds_id, user_id, username, hash) values (%s,%s,%s,%s)', rreq)
return hash
except psycopg2.DatabaseError as e:
con.rollback()
raise e
"""
{
user_id:''
username:''
password:''
}
"""
def createUserPassword(self, con, user):
try:
rreq = (str(uuid.uuid4()), user['user_id'], user['username'], user['password'])
cur = con.cursor()
cur.execute('insert into credentials.user_password (id,user_id,username,password) values (%s,%s,%s,%s)', rreq)
except psycopg2.DatabaseError as e:
raise e
"""
{
id: ''
username: ''
password: ''
user_id: ''
}
"""
def updateUserPassword(self, con, user):
try:
rreq = (user['user_id'], user['username'], user['password'], user['id'])
cur = con.cursor()
cur.execute('update credentials.user_password set user_id = %s, username = %s, password = %s, updated = now() where id = %s', rreq)
except psycopg2.DatabaseError as e:
raise e
"""
{
username: ''
password: ''
}
"""
def findUserPassword(self, con, credentials):
cred = ObjectView(credentials)
cur = con.cursor()
cur.execute('select id, user_id, username from credentials.user_password where username = %s and password = %s', (cred.username, cred.password))
data = cur.fetchone()
if data is not None:
return self.convertToDict(data)
else:
return None
def findCredentials(self, con, username):
cur = con.cursor()
cur.execute('select id, user_id, username from credentials.user_password where username = %s', (username,))
data = cur.fetchone()
if data is not None:
return self.convertToDict(data)
else:
return None
''' transformo a diccionario las respuestas de psycopg2'''
def convertToDict(self, d):
rdata = {
'id': d[0],
'user_id': d[1],
'username': d[2]
}
return rdata
|
import random
from enum import Enum
notes = ("C", "Db", "D", "Eb", "E", "F", "Gb", "G", "Ab", "A", "Bb", "B")
chords = {'I': (0, 4, 7),
'ii': (2, 5, 9),
'iii': (4, 7, 11),
'IV': (5, 9, 0),
'V': (7, 11, 2),
'vi': (9, 0, 4),
#'vii': (11, 2, 5),
'IV/I': (0, 5, 9),
'V/I': (0, 7, 11, 2),
'I/V': (7, 0, 4),
'VI': (9, 1, 4),
'iii7b5': (4, 7, 10, 2),
'II': (2, 6, 9),
'i6': (0, 3, 7, 9),
'vi7b5/b3': (0, 9, 3, 7)
}
graph = {'I': ['IV/I','V/I'],
'ii': ['I','iii','V','I/V'],
'iii': ['I','vi','IV'],
'IV': ['I','V','ii'],
'V': ['I','iii'],
'vi': ['IV','ii'],
#'vii': ['I'],
'IV/I': ['I'],
'V/I': ['I'],
'I/V': ['I'],
'VI': ['ii'],
'iii7b5': ['VI'],
'II': ['V'],
'i6': ['II'],
'vi7b5/b3': ['II']}
def getChord(romanNumeral, key):
chord_tones = []
x = chords[romanNumeral]
for tone in x:
tone += notes.index(key)
if tone > 11:
tone -= 12
chord_tones.append(tone)
i = 0
for note in chord_tones:
chord_tones[i] = notes[note]
i += 1
return chord_tones
def nextChord(chord):
if(chord == 'I'):
return random.choice((list(chords))[1:])
return random.choice(graph[chord])
def getProgFromChord(start):
prog = []
prog.append(start)
currentChord = nextChord(start)
while(currentChord != 'I'):
prog.append(nextChord(currentChord))
currentChord = prog[-1]
print(prog)
midiNotes = []
for chord in prog:
midiNotes.append(chords[chord])
return midiNotes
def getProgWithNum(num):
prog = ['I']
currentChord = nextChord('I')
while(len(prog) < num):
prog.append(currentChord)
currentChord = nextChord(currentChord)
midiNotes = []
for chord in prog:
midiNotes.append(chords[chord])
return midiNotes
#progression = getProgWithNum(4)
#for chord in progression:
#print(chords[chord])
|
#------------------------------------------------------------------------------
# Copyright 2008-2012 Istituto Nazionale di Fisica Nucleare (INFN)
#
# Licensed under the EUPL, Version 1.1 only (the "Licence").
# You may not use this work except in compliance with the Licence.
# You may obtain a copy of the Licence at:
#
# http://joinup.ec.europa.eu/system/files/EN/EUPL%20v.1.1%20-%20Licence.pdf
#
# Unless required by applicable law or agreed to in
# writing, software distributed under the Licence is
# distributed on an "AS IS" basis,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied.
# See the Licence for the specific language governing
# permissions and limitations under the Licence.
#------------------------------------------------------------------------------
"""
Part of WNoDeS framework.
Interface between WNoDeS processes an the batch system LSF
"""
from wnodes.utils import command
from wnodes.utils import batch_system
#import sys
#import commands
#import threading
#try:
# from xml.etree import ElementTree
#except ImportError:
# try:
# from elementtree import ElementTree # pylint: disable-msg=F0401
# except ImportError:
# sys.exit("package ElementTree is missing, exiting.")
class Badmin(command.Command):
def __init__(self, profile, option, hostname):
self.__profile__ = profile
self.__hostname__ = hostname
self.__option__ = option
def _get_command(self):
cmd = 'source %s;' % self.__profile__
cmd += ' badmin %s' % self.__option__
cmd += ' -C "WNoDeS admin action" %s' % self.__hostname__
return cmd
class Bjobs(command.Command):
def __init__(self, profile, jobid='', user=''):
self.__profile__ = profile
self.__jobid__ = jobid
self.__user__ = user
def _get_command(self):
cmd = 'source %s; bjobs -w' % self.__profile__
if self.__user__ != '':
cmd += ' -u %s' % self.__user__
if self.__jobid__ != '':
cmd += ' %s | grep %s' % (self.__jobid__, self.__jobid__)
return cmd
class Bhost(command.Command):
def __init__(self, profile, hostname = ''):
self.__profile__ = profile
self.__hostname__ = hostname
def _get_command(self):
cmd = 'source %s; bhosts -w %s | grep %s' % (
self.__profile__,
self.__hostname__,
self.__hostname__,
)
return cmd
class Bkill(command.Command):
def __init__(self, profile, jobid, user=''):
self.__profile__ = profile
self.__jobid__ = jobid
self.__user__ = user
def _get_command(self):
cmd = 'source %s;' % self.__profile__
if self.__user__ != '':
cmd += ' su -s /bin/bash -l %s -c' % self.__user__
if self.__user__ != '':
cmd += ' "bkill %s"' % self.__jobid__
else:
cmd += ' bkill %s' % self.__jobid__
return cmd
class Bmod(command.Command):
def __init__(self, profile, option, jobid):
self.__profile__ = profile
self.__option__ = option
self.__jobid__ = jobid
def _get_command(self):
cmd = 'source %s;' % self.__profile__
cmd += ' bmod %s %s' % (self.__option__, self.__jobid__)
return cmd
class Breserve(command.Command):
def __init__(self, profile, action, ID, hostname='', option='',
user=''):
self.__profile__ = profile
self.__hostname__ = hostname
self.__id_reservation__ = ID
self.__option__ = option
self.__user__ = user
self.__action__ = action
self.type_reservation = {'add': 'brsvadd', 'del': 'brsvdel'}
def _get_command(self):
cmd = 'source %s;' % self.__profile__
cmd += ' %s' % self.type_reservation[self.__action__]
if 'add' == self.__action__:
cmd += ' -m %s %s' % (self.__hostname__, self.__option__)
cmd += ' -N %s -u %s' % (self.__id_reservation__, self.__user__)
else:
cmd += ' %s' % self.__id_reservation__
return cmd
class Brun(command.Command):
def __init__(self, profile, hostname, jobid):
self.__hostname__ = hostname
self.__profile__ = profile
self.__jobid__ = jobid
def _get_command(self):
cmd = 'source %s;' % self.__profile__
cmd += ' brun -m %s %s' % (self.__hostname__, self.__jobid__)
return cmd
class Bsub(command.Command):
def __init__(self, profile, option, user=''):
self.__profile__ = profile
self.__option__ = option
self.__user__ = user
def _get_command(self):
cmd = 'source %s;' % self.__profile__
if self.__user__ != '':
cmd += ' su -s /bin/bash -l %s -c' % self.__user__
if self.__user__ != '':
cmd += ' "bsub < %s"' % self.__option__
else:
cmd += ' bsub < %s' % self.__option__
return cmd
class LsfCommands(batch_system.BatchSystem):
""" Set of lsf batch system commands"""
def __init__(self, lsf_profile, expiration=15):
self.lsf_profile = lsf_profile
self.expiration = expiration
self.expiration_period = [0, 0]
self.b_jobs_output = []
self.jobs_info = {}
self.host_list = {}
def parse_cmd_output(self, my_input):
return my_input
def badmin(self, option, hostname):
"""badmin"""
b_admin = Badmin(self.lsf_profile, option, hostname)
b_admin_output = b_admin.get_output()
return b_admin_output
def bjobs(self, jobid='', user=''):
"""bjobs"""
b_jobs = Bjobs(self.lsf_profile, jobid=jobid, user=user)
b_jobs_output = b_jobs.get_output()
return b_jobs_output
def bhost(self, hostname = '', fake = False, fake_status = 1):
"""bhosts"""
b_host = Bhost(self.lsf_profile, hostname = hostname)
b_host_output = b_host.get_output()
return b_host_output
def bkill(self, jobid, user=''):
"""bkill"""
b_kill = Bkill(self.lsf_profile, jobid, user=user)
b_kill_output = b_kill.get_output()
return b_kill_output
def bmod(self, option, jobid):
"""bmod"""
b_mod = Bmod(self.lsf_profile, option, jobid)
b_mod_output = b_mod.get_output()
return b_mod_output
def breserve(self, action, ID, hostname='', option='', user=''):
"""breserve"""
b_reserve = Breserve(self.lsf_profile, action, ID, hostname=hostname,
option=option, user=user)
b_reserve_output = b_reserve.get_output()
return b_reserve_output
def brun(self, hostname, jobid):
"""brun"""
b_run = Brun(self.lsf_profile, hostname, jobid)
b_run_output = b_run.get_output()
return b_run_output
def bsub(self, option, user=''):
"""bsub"""
b_sub = Bsub(self.lsf_profile, option, user=user)
b_sub_output = b_sub.get_output()
return b_sub_output
|
#!/usr/bin/env python
import json
from datetime import datetime
from collections import defaultdict
import requests
from BeautifulSoup import BeautifulSoup
SOURCE = 'http://www.bangladeshpost.gov.bd/PostCode.asp'
LIST_PAGE = 'http://www.bangladeshpost.gov.bd/PostCodeList.asp?DivID={}'
def get_codes():
"""
Fetch all available Bangladesh post codes
:returns: a nested dictionary of
{division: district: thana: (suboffice, postcode)}
"""
codes = defaultdict(lambda: defaultdict(lambda: defaultdict(list)))
counts = {
'divisions': 0,
'districts': 0,
'thanas': 0,
'offices': 0,
}
for i in range(10):
response = requests.get(LIST_PAGE.format(i))
if response.status_code != 200:
raise Exception('Cannot download page')
soup = BeautifulSoup(response.content)
code_table = soup.findAll('table')[-1]
# last table lists all codes
rows = code_table.findAll('tr')[1:]
# ignore header row
# find the parent table that holds division name
div_table = code_table.parent
for j in range(5):
div_table = div_table.parent
division = div_table.tr.text.splitlines()[1].strip()
# second line has the division name
if not division:
continue
counts['divisions'] += 1
division = codes[division]
pdist, pthana = '', ''
for row in rows:
district, thana, po, code = [col.text for col in row.findAll('td')]
if district == 'IBH WAs Here':
district = 'Bagerhat'
if pdist != district:
pdist = district
counts['districts'] += 1
if pthana != thana:
pthana = thana
counts['thanas'] += 1
counts['offices'] += 1
division[district][thana].append((po, code))
return codes, counts
if __name__ == '__main__':
data, counts = get_codes()
codes = {
'data': data,
'meta': {
'updated_at': datetime.utcnow().isoformat(),
'source': SOURCE,
'counts': counts,
},
}
print(json.dumps(codes))
|
import urllib.request
url='http://www.taobao.com'
proxy_handle={'http':'http://222.95.16.235:9999','http':'http://60.205.57.4:9999' }
proxy=urllib.request.ProxyHandler(proxy_handle)
opener=urllib.request.build_opener(proxy)
response=opener.open(url)
print(response.read(),decode('utf8'))
if url in urls:
response=urllib.request.urlopen(url)
if response.status==200:
print('更换代理继续访问')
else:
import http.cookiejar,urllib.request
cookie=http.cookiejar.CookieJar()
cookie = http.cookiejar.MozillaCookieJar()
handler=urllib.request.HTTPCookieProcessor(cookie)
opener=urllib.request.taobao_opener(handler)
url='http:www.taobao.com'
request = urllib.request.Request(url,headers=headers)
response=opener.open('http:www.taobao.com')
for item in cookie:
print(item.name,item.value)
filename = 'yl.txt'
cookie.save(filename=filename,ignore_discard=True,ignore_expires=True)
cookie = http.cookiejar.MozillaCookieJar()
cookie.load('yl.txt')
print(cookie)
|
from os import path
import paramiko
from .fuse import UploaderFuse
from utils import log
class UploaderScp(UploaderFuse):
def __init__(self, src, dest, username, hostname, port=22, dest_file_name=None):
super(UploaderScp,self).__init__(src, dest)
self._username = username
self._hostname = hostname
self._port = port
self._dest_file_name = dest_file_name
self._client = None
def _connect(self):
key_filename = path.expanduser(path.join('~', '.ssh', 'id_rsa'))
self._client = paramiko.SSHClient()
self._client.load_system_host_keys()
self._client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self._client.connect(self._hostname, port=self._port, username=self._username, key_filename=key_filename)
log.info('Connecting to \'{}:{}\'. As user: \'{}\'. Using key from \'{}\''.format(self._hostname, self._port, self._username,
key_filename))
def _disconnect(self):
if self._client:
self._client.close()
def upload(self, after_copy=None):
try:
log.info('Uploading backup on {}:{}'.format(self._hostname, self._port))
if self._dest_file_name:
dest = path.join(path.dirname(self._dest), self._dest_file_name)
self._client.open_sftp().put(self._src, dest)
if callable(after_copy):
after_copy(self)
except Exception as e:
log.error('Upload failed. Exception: {}'.format(e))
finally:
self._disconnect();
|
import pandas as pd
import numpy as np
ts = pd.Series(np.random.randn(1000),index=pd.date_range('1/1/2000', periods=1000))
ts
|
'''
Varun D N - vdn207@nyu.edu
'''
'''Run this module to get the output of the scores per borough'''
import pandas as pd
import restaurants as res
import specific_functions as specfunc
if __name__ == '__main__':
'''Main program'''
restaurants = pd.read_csv("DOHMH_New_York_City_Restaurant_Inspection_Results.csv", low_memory = False)
restaurants_obj = res.Restaurants(restaurants)
borough_scores = specfunc.find_scores_by_borough(restaurants_obj)
for borough in borough_scores.keys():
print borough + ": " + str(borough_scores[borough])
# Plotting the graphs of entire NYC
specfunc.plot_grade_improvement(restaurants_obj, 'grade_improvement_nyc.pdf')
# Plotting the graphs of every borough
specfunc.plot_grade_improvements_by_borough(restaurants_obj)
|
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 28 09:48:23 2019
@author: Jules
"""
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="CNN", # Replace with your own username
version="1.0",
author="Students' team",
description="A small example package",
long_description=long_description,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
setuptools.package_index
) |
#!/usr/bin/env python3
import frccontrol as fct
import math
import matplotlib.pyplot as plt
import numpy as np
import scipy as sp
## Number of motors
#num_motors = 2.0
## Elevator carriage mass in kg
#m = 6.803886
## Radius of pulley in meters
#r = 0.0254
## Gear ratio
#G = 10.0
# Number of motors
num_motors = 2.0
# Elevator carriage mass in kg
m = 6.803886
# Radius of pulley in meters
r = 0.02762679089
# Gear ratio
G = 42.0 / 12.0 * 40.0 / 14.0
sysc = fct.models.elevator(fct.models.MOTOR_CIM, num_motors, m, r, G)
sysd = sysc.sample(0.020)
# generate a quiver plot from this
plt.figure()
xHat1 = []
xHat2 = []
xDot1 = []
xDot2 = []
q = [0.02, 0.4]
r = [12.0]
Q = np.diag(1.0 / np.square(q))
R = np.diag(1.0 / np.square(r))
# K = fct.lqr(sysc, Q, R) * 2
r = np.array([[5.0], [0.0]])
K = np.diag([5, 5.0])
print(K)
for x1 in np.arange(-math.pi, math.pi, math.pi / 8):
for x2 in np.arange(-5.0, 5.0, math.pi / 8):
# self.x_hat = self.sysd.A @ self.x_hat + self.sysd.B @ self.u
x_hat = np.array([[x1], [x2]])
# print(sysc.A)
#x_dot = sysc.A @ x_hat + sysc.B @ K @ (r - x_hat) #+ sysd.B @ np.array[[0.0], [0.0]]\
error = np.array([[math.pi / 2.0], [0]]) - x_hat
feedback = K @ error
# print(feedback)
x_dot = (np.array([[x_hat[1, 0]], [-9.806 / 2.0 * math.sin(x_hat[0, 0])]])) + np.array([[0], [4.903]])) + feedback
# print("xdot at %s, %s is (%s, %s)" % (x1, x2, x_dot[0, 0], x_dot[1, 0]))
xHat1.append(x_hat[0, 0])
xHat2.append(x_hat[1, 0])
xDot1.append(x_dot[0, 0])
xDot2.append(x_dot[1, 0])
x_hat = np.array([[math.pi / 2.0], [0]])
print("xdot is %s" % np.array([[x_hat[1, 0]], [-9.806 / 2.0 * math.sin(x_hat[0, 0])]]))
q = plt.quiver(xHat1, xHat2, xDot1, xDot2, angles="xy")
# plt.rc('text', usetex=True)
plt.scatter(math.pi / 2.0, 0, s=50)
plt.title("Closed loop pendulum phase plot with reference at $(\\frac{\\pi}{2}, 0)$")
plt.xlabel("Angle ($\\theta$)")
plt.ylabel("Angular velocity ($\\omega$)")
plt.show()
|
s=input('Enter:')
rev=''
for i in s:
rev=i+rev
print(rev) |
# i pledge my honor that I have abided by the stevens honor system
def square(lst):
for i in range(len(lst)):
lst[i] = lst[i]*lst[i]
# test of function
def main():
lst = [1,2,3,4,5]
square(lst)
print(lst)
main()
|
import os
import lyric_config
import logging
from gensim.models import word2vec
from pipes import quote
import scipy.io.wavfile as wav
import numpy as np
from config import nn_config
import parse_files
import config.nn_config as nn_config
conf = lyric_config.lyric_conf()
def getdict(train = False):
binfilename = conf['emb_binfile']
if train:
logging.info("Training Word Embeddings")
txtfilename = ""
cmd = 'third-party/word2vec -train {0} -output {1} -cbow 0 -size {3} -window 5 -negative 0 -hs 1 -sample 1e-3 -threads 12 -binary 1'.format(quote(txtfilename), quote(binfilename))
os.system(cmd)
return word2vec.Word2Vec.load_word2vec_format(binfilename, binary=True)
def word_to_vec(filename):
f = open(filename)
model = getdict()
sequences = []
#y_data = np.zeros(out_shape)
for line in f.readlines():
curSeq = []
words = line.rstrip().split()
if words == []: continue
for word in words:
try:
cur_vect = model[word.lower()]
except:
cur_vect = np.zeros(200)
curSeq.append(cur_vect)
sequences.append(curSeq)
return sequences
def gen_batch(X, sequences, batch_size):
batch_size = 5
sequences = sorted(sequences, key = lambda cur_seq: len(cur_seq))
num_batch = len(sequences) // batch_size
for i in range(num_batch):
cur_batch = sequences[i * batch_size: min((i + 1) * batch_size, len(sequences))]
x_idx = np.random.choice(X.shape[0], len(cur_batch))
x_batch = X[x_idx]
yield x_batch, padding(cur_batch)
def padding(batch):
max_len = len(batch[-1])
shape = batch[-1][0].shape[0]
batch_size = len(batch)
result = np.zeros([batch_size, max_len, shape])
for cur_seq in batch:
while len(cur_seq) < max_len:
cur_seq.append(np.zeros(shape))
for i in range(batch_size):
for j in range(max_len):
result[i,j,] = batch[i][j]
return result
if __name__ == "__main__":
config = nn_config.get_neural_net_configuration()
input_directory = config['dataset_directory']
output_filename = config['model_file']
freq = config['sampling_frequency'] #sample frequency in Hz
clip_len = 5 #length of clips for training. Defined in seconds
block_size = freq / 4 #block sizes used for training - this defines the size of our input state
max_seq_len = int(round((freq * clip_len) / block_size)) #Used later for zero-padding song sequences
batch_size = 20
#Step 1 - convert MP3s to WAVs
new_directory = parse_files.convert_folder_to_wav(input_directory, freq)
#Step 2 - convert WAVs to frequency domain with mean 0 and standard deviation of 1
result = parse_files.convert_wav_files_to_nptensor(new_directory, block_size, max_seq_len, output_filename)
#print result
X, x_mean, x_std = result
lyric_sequence = word_to_vec(quote("./lyrical-net/data/lyrics/taylorswift/backtodecember.txt"))
for X, y in gen_batch(X, lyric_sequence, batch_size):
print X.shape, y.shape
|
r = float(input("Enter radius of circle"))
print("Area of circle is : ",(3.14*r*r))
|
from __future__ import absolute_import, unicode_literals
from django.conf import settings
COMMON_SERVICES_ENDPOINT = getattr(settings, 'COMMON_SERVICES_ENDPOINT', 'default')
|
from django.conf.urls import url
from django.views import View
from . import views
urlpatterns = [
#127.0.0.1:8000/v1/users
url(r'/GET/$', views.get_stocks),
url(r'/total', views.get_total),
]
|
class DialogConfiguration(object):
# Threshold over which the slot is deemed as confidently-filled without a
# need for confirmation.
alpha = 0.85
# Threshold over which -- and below `alpha` -- the slot confirmed explicitly
# before being fixed.
beta = 0.25
assert (alpha >= beta)
|
a = 1
b = 2
b = a
print(b) |
"""
References:
We referred following websites for implementing the A* search for 16 tile problem.
https://gist.github.com/joshuakenzo/1291155
http://stackoverflow.com/questions/4159331/python-speed-up-an-a-star-pathfinding-algorithm
http://web.mit.edu/eranki/www/tutorials/search/
Commands to execute the code:
python solution16.py input.txt
Output will be the moves taken to return to goal state
Few input files to test the code were attached
Approach:
We started programming by considering the input state as the initial state and for every state there are 16 possible branches. L1 - 1st row towards left
L2 - 2nd row towards left ...... We considered each direction of movement as an action. There are 16 actions possible for every board.
We use A* with manhattan distance as heuristic to find the next best action.
We display the moves we take inorder to reach the goal state as our output.
The function used is g(n)+h(n)
g(n) = 1 (for every expansion)
h(n) = manhattan distance (If the tile is present in the coreners of the same row or column then we take h as 1
since one slide will be enough to push the tile to its position)
The heuristic function that we took is admissible because it is a manhattan distance. Manhattan distance gives us the lower bound and hence it is admissible.
Example oonfig 1:
1 2 3 4
5 6 7 8
9 11 12 10
13 14 15 16
The above configuration took 3 minutes to execute and the result is
All moves: U3, L3, D3, R3, R3, U3, L3, D3
Example oonfig 2:
1 2 4 3
5 6 7 8
9 11 12 10
13 14 15 16
The above configuration took 22 minutes to execute and the result is
All moves: R2, U2, L1, D2, L2, R1, U1, L1, D1
We assumed that manhattan distance is the best heuristic for this problem.
We used a priority queue to store the node intsances that were not visited yet and a list to store the node instances visited
We tried using various data structures but priority queue seemed to work best.
The overall algorithm worked fine but it is taking too long to compute the moves. A better way of using data structures might reduce the time complexity.
We are able to find solution for most complex moves but the time taken is too long.
By storing pre computed values in database the time complexity of A* can be imporoved
"""
import copy
import sys
import math
import time
import bisect
import random
import itertools
import pdb
import Queue as Q
class Board:
"""
class for storing Board configuration
"""
def __init__(self,text):
""" Initializing board with the values from input text"""
self.size = 4
matrix_size = 16
values = []
if text!= None:
for n in text.split(","):
values.append(int(n))
if sorted(values) != range(1,matrix_size+1):
print "invalid numbers"
sys.exit(1)
else:
print "enter valid input text"
self.matrix=[[0 for x in range(4)] for x in range(4)]
for y in range(4):
for x in range(4):
self.matrix[y][x]=values[(y*4)+x]
"""filling the values for goal state"""
"""storing the indices of the values as key value pairs with value being the location of the key"""
self.goals = {}
for x in range(matrix_size):
self.goals[x + 1] = x % 4, x / 4
def astar(self):
#closedlist = set()
closedlist = []
q = Q.PriorityQueue()
q.put(Node(self, None, 0, None))
visited = 0
while not q.empty():
visited+=1
node = q.get()
if node.board.h() == 0:
moves = []
while node.parent:
moves.append(node.action)
node = node.parent
moves.reverse()
print "Solution found!"
print "Number of nodes visited = ", visited
print "All moves:", ", ".join(str(move) for move in moves)
break
else:
for new_node in node.expand():
if new_node not in q.queue and new_node not in closedlist:
q.put(new_node)
#closedlist.add(node)
closedlist.append(node)
def h(self):
h = 0
for y, row in enumerate(self.matrix):
for x, tile in enumerate(row):
h1 = math.fabs(x - self.goals[tile][0]) + \
math.fabs(y - self.goals[tile][1])
if h1==3 and (x == self.goals[tile][0] or y == self.goals[tile][1]):
h1 = 1
h += h1
return h
def apply_action(self, action):
if action == 'R1':
temp = self.matrix[0][3]
for i in range(3,0,-1):
self.matrix[0][i] = self.matrix[0][i-1]
self.matrix[0][0] = temp
elif action=='R2':
temp = self.matrix[1][3]
for i in range(3,0,-1):
self.matrix[1][i] = self.matrix[1][i-1]
self.matrix[1][0] = temp
elif(action=='R3'):
temp = self.matrix[2][3]
for i in range(3,0,-1):
self.matrix[2][i] = self.matrix[2][i-1]
self.matrix[2][0] = temp
elif(action=='R4'):
temp = self.matrix[3][3]
for i in range(3,0,-1):
self.matrix[3][i] = self.matrix[3][i-1]
self.matrix[3][0] = temp
elif(action=='L1'):
temp = self.matrix[0][0]
for i in range(0,3):
self.matrix[0][i] = self.matrix[0][i+1]
self.matrix[0][3] = temp
elif(action=='L2'):
temp = self.matrix[1][0]
for i in range(0,3):
self.matrix[1][i] = self.matrix[1][i+1]
self.matrix[1][3] = temp
elif(action=='L3'):
temp = self.matrix[2][0]
for i in range(0,3):
self.matrix[2][i] = self.matrix[2][i+1]
self.matrix[2][3] = temp
elif(action=='L4'):
temp = self.matrix[3][0]
for i in range(0,3):
self.matrix[3][i] = self.matrix[3][i+1]
self.matrix[3][3] = temp
elif(action=='U1'):
temp = self.matrix[0][0]
for i in range(0,3):
self.matrix[i][0] = self.matrix[i+1][0]
self.matrix[3][0] = temp
elif(action=='U2'):
temp = self.matrix[0][1]
for i in range(0,3):
self.matrix[i][1] = self.matrix[i+1][1]
self.matrix[3][1] = temp
elif(action=='U3'):
temp = self.matrix[0][2]
for i in range(0,3):
self.matrix[i][2] = self.matrix[i+1][2]
self.matrix[3][2] = temp
elif(action=='U4'):
temp = self.matrix[0][3]
for i in range(0,3):
self.matrix[i][3] = self.matrix[i+1][3]
self.matrix[3][3] = temp
elif(action=='D1'):
temp = self.matrix[3][0]
for i in range(3,0,-1):
self.matrix[i][0] = self.matrix[i-1][0]
self.matrix[0][0] = temp
elif(action=='D2'):
temp = self.matrix[3][1]
for i in range(3,0,-1):
self.matrix[i][1] = self.matrix[i-1][1]
self.matrix[0][1] = temp
elif(action=='D3'):
temp = self.matrix[3][2]
for i in range(3,0,-1):
self.matrix[i][2] = self.matrix[i-1][2]
self.matrix[0][2] = temp
elif(action=='D4'):
temp = self.matrix[3][3]
for i in range(3,0,-1):
self.matrix[i][3] = self.matrix[i-1][3]
self.matrix[0][3] = temp
def actions(self):
actions = []
actions.append("L1")
actions.append("L2")
actions.append("L3")
actions.append("L4")
actions.append("R1")
actions.append("R2")
actions.append("R3")
actions.append("R4")
actions.append("U1")
actions.append("U2")
actions.append("U3")
actions.append("U4")
actions.append("D1")
actions.append("D2")
actions.append("D3")
actions.append("D4")
return actions
def __str__(self):
grid = "\n".join([" ".join(["{:>2}"] * 4)] * 4)
values = itertools.chain(*self.matrix)
return grid.format(*values).replace("None", " ")
class Node:
def __init__(self, board, action, cost, parent):
self.board = board
self.action = action
self.cost = cost
self.parent = parent
"""storing f = g(n)+h(n)"""
self.f = cost + board.h()
def expand(self):
nodes = []
for action in self.board.actions():
board = copy.deepcopy(self.board)
board.apply_action(action)
nodes.append(Node(board, action, self.cost + 1, self))
return nodes
def __eq__(self, rhs):
if isinstance(rhs, Node):
return self.board.matrix == rhs.board.matrix
else:
return rhs == self
def __hash__(self):
return hash((self.board,self.action,self.cost,self.parent))
def __lt__(self, rhs):
return self.f < rhs.f
def __cmp__(self, other):
return cmp(self.cost,other.cost)
def main():
file = sys.argv[1]
inputfile = open(file)
inputfile = inputfile.readlines()
temp = ""
count = 0
for line in inputfile:
count = count + 1
temp+=(line.split(" ")[0].rstrip())+","
temp+=(line.split(" ")[1].rstrip())+","
temp+=(line.split(" ")[2].rstrip())+","
if(count!=4):
temp+=(line.split(" ")[3].rstrip())+","
else:
temp+=(line.split(" ")[3].rstrip())
b = Board(temp)
b.astar()
if __name__ == "__main__":main()
|
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from query.models import Query, StopWord
class Command(BaseCommand):
help = 'Deletes all data of the guest User'
def handle(self, *args, **options):
guest_user = get_user_model().objects.get(username=settings.GUEST_USERNAME)
StopWord.objects.filter(user=guest_user).delete()
Query.objects.filter(user=guest_user).delete()
|
# Generated by Django 2.1.2 on 2018-11-07 05:16
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('misPerrisDJ', '0011_auto_20181106_2257'),
]
operations = [
migrations.AddField(
model_name='persona',
name='vivienda',
field=models.CharField(choices=[('', 'Seleccione un tipo de vivienda'), ('casa-patio-grande', 'Casa con patio grande'), ('casa-patio-pequeño', 'Casa con patio pequeño'), ('casa-sin-patio', 'Casa sin patio'), ('departamento', 'Departamento')], default='', max_length=50, verbose_name='Vivienda'),
),
migrations.AlterField(
model_name='persona',
name='ciudad',
field=models.CharField(choices=[('', 'Seleccione una ciudad'), ('santiago', 'Santiago')], default='', max_length=50, verbose_name='Ciudad'),
),
migrations.AlterField(
model_name='persona',
name='nacimiento',
field=models.DateField(default=datetime.date.today, help_text='Formato: dd/mm/aaaa', verbose_name='Fec.Nacimiento'),
),
migrations.AlterField(
model_name='persona',
name='nombre',
field=models.CharField(max_length=25, verbose_name='Nombre'),
),
migrations.AlterField(
model_name='persona',
name='region',
field=models.CharField(choices=[('', 'Seleccione una región'), ('santiago', 'Región Metropolitana'), ('valparaiso', 'Región de Valparaiso'), ('rancagua', "Región del Libertador Bernardo O'Higgins"), ('maule', 'Región del Maule')], default='', max_length=15, verbose_name='Región'),
),
migrations.AlterField(
model_name='persona',
name='rut',
field=models.CharField(help_text='(ej: 12345678-9)', max_length=10, verbose_name='Rut'),
),
migrations.AlterField(
model_name='persona',
name='telefono',
field=models.CharField(blank=True, max_length=15, null=True, verbose_name='Teléfono'),
),
]
|
from microbit import *
import random
# Length and height of display
N = 5
# Delay between gem drops
DROP_DELAY = 500
# The gem is a pair of x and y coordinates
gem = [random.randint(0, N - 1), 0]
while True:
# Draw the gem
display.clear()
display.set_pixel(gem[0], gem[1], 5)
# Drop gem by 1 pixel
gem[1] += 1
# Check if gem is no longer visible
if gem[1] > N - 1:
# If so, move gem to random position at top of display
gem = [random.randint(0, N - 1), 0]
sleep(DROP_DELAY)
|
# -*- test-case-name: mimic.test.test_auth -*-
"""
Defines get current customer
"""
from __future__ import absolute_import, division, unicode_literals
import json
from mimic.rest.mimicapp import MimicApp
from mimic.canned_responses import fastly
def text_urldata(urldata):
"""
Convert ``urldata`` into text.
:param urldata: a dictionary mapping bytes to lists of bytes.
:return: a dictionary mapping text to lists of text.
"""
return {k.decode("utf-8"): [vv.decode("utf-8") for vv in urldata[k]]
for k in urldata}
class FastlyApi(object):
"""
Rest endpoints for mocked Fastly api.
"""
app = MimicApp()
def __init__(self, core):
"""
:param MimicCore core: The core to which this FastlyApi will be
communicating.
"""
self.core = core
self.services = {}
self.fastly_response = fastly.FastlyResponse()
@app.route('/', methods=['GET'])
def get_health(self, request):
"""
Returns response with 200 OK.
"""
response = self.fastly_response.get_health()
return json.dumps(response)
@app.route('/current_customer', methods=['GET'])
def get_current_customer(self, request):
"""
Returns response with current customer details.
https://docs.fastly.com/api/account#customer_1
"""
response = self.fastly_response.get_current_customer()
return json.dumps(response)
@app.route('/service', methods=['POST'])
def create_service(self, request):
"""
Returns POST Service.
https://docs.fastly.com/api/config#service_5
"""
url_data = text_urldata(request.args).items()
response = self.fastly_response.create_service(url_data)
return json.dumps(response)
@app.route('/service/<string:service_id>/version', methods=['POST'])
def create_version(self, request, service_id):
"""
Returns POST Service.
https://docs.fastly.com/api/config#version_2
"""
response = self.fastly_response.create_version(service_id)
return json.dumps(response)
@app.route('/service/search', methods=['GET'])
def get_service_by_name(self, request):
"""
Returns response with current customer details.
https://docs.fastly.com/api/config#service_3
"""
url_data = text_urldata(request.args)
service_name = url_data['name'][0]
response = self.fastly_response.get_service_by_name(service_name)
return json.dumps(response)
@app.route(
'/service/<string:service_id>/version/<string:version_id>/domain',
methods=['POST'])
def create_domain(self, request, service_id, version_id):
"""
Returns Create Domain Response.
https://docs.fastly.com/api/config#domain_4
"""
url_data = text_urldata(request.args).items()
response = self.fastly_response.create_domain(url_data,
service_id, version_id)
return json.dumps(response)
@app.route(
'/service/<string:service_id>/version/<string:version_id>/domain/'
'check_all',
methods=['GET'])
def check_domains(self, request, service_id, version_id):
"""
Returns Check Domain.
https://docs.fastly.com/api/config#domain_3
"""
response = self.fastly_response.check_domains(service_id, version_id)
return json.dumps(response)
@app.route(
'/service/<string:service_id>/version/<string:version_id>/backend',
methods=['POST'])
def create_backend(self, request, service_id, version_id):
"""
Returns Create Backend Response.
https://docs.fastly.com/api/config#backend_2
"""
url_data = text_urldata(request.args).items()
response = self.fastly_response.create_backend(url_data,
service_id, version_id)
return json.dumps(response)
@app.route(
'/service/<string:service_id>/version/<string:version_id>/condition',
methods=['POST'])
def create_condition(self, request, service_id, version_id):
"""
Returns Create Condition Response.
https://docs.fastly.com/api/config#condition_3
"""
url_data = text_urldata(request.args).items()
response = self.fastly_response.create_condition(url_data,
service_id, version_id)
return json.dumps(response)
@app.route(
'/service/<string:service_id>/version/<string:version_id>/cache_settings',
methods=['POST'])
def create_cache_settings(self, request, service_id, version_id):
"""
Returns Create Cache Settings Response.
https://docs.fastly.com/api/config#cache_settings_3
"""
url_data = text_urldata(request.args).items()
response = self.fastly_response.create_cache_settings(url_data,
service_id, version_id)
return json.dumps(response)
@app.route(
'/service/<string:service_id>/version/<string:version_id>/response_object',
methods=['POST'])
def create_response_object(self, request, service_id, version_id):
"""
Returns Create Cache Settings Response.
https://docs.fastly.com/api/config#response_object_3
"""
url_data = text_urldata(request.args).items()
response = self.fastly_response.create_response_object(url_data,
service_id, version_id)
return json.dumps(response)
@app.route(
'/service/<string:service_id>/version/<string:version_id>/settings',
methods=['PUT'])
def create_settings(self, request, service_id, version_id):
"""
Returns Settings Response.
https://docs.fastly.com/api/config#settings_2
"""
url_data = text_urldata(request.args).items()
response = self.fastly_response.create_settings(url_data,
service_id, version_id)
return json.dumps(response)
@app.route('/service/<string:service_id>/version', methods=['GET'])
def list_versions(self, request, service_id):
"""
Returns List of Service versions.
https://docs.fastly.com/api/config#version_3
"""
response = self.fastly_response.list_versions(service_id)
return json.dumps(response)
@app.route('/service/<string:service_id>/version/<string:version_number>/'
'activate', methods=['PUT'])
def activate_version(self, request, service_id, version_number):
"""
Returns Activate Service versions.
https://docs.fastly.com/api/config#version_5
"""
response = self.fastly_response.activate_version(service_id,
version_number)
return json.dumps(response)
@app.route('/service/<string:service_id>/version/<string:version_number>/'
'deactivate', methods=['PUT'])
def deactivate_version(self, request, service_id, version_number):
"""
Returns Activate Service versions.
https://docs.fastly.com/api/config#version_6
"""
response = self.fastly_response.deactivate_version(service_id,
version_number)
return json.dumps(response)
@app.route('/service/<string:service_id>', methods=['DELETE'])
def delete_service(self, request, service_id):
"""
Returns DELETE Service.
https://docs.fastly.com/api/config#service_6
"""
response = self.fastly_response.delete_service(service_id)
return json.dumps(response)
@app.route('/service/<string:service_id>/details', methods=['GET'])
def get_service_details(self, request, service_id):
"""
Returns Service details.
https://docs.fastly.com/api/config#service_2
"""
response = self.fastly_response.get_service_details(service_id)
return json.dumps(response)
|
import pickle
import os.path
import random as rnd
import numpy as np
import time
import multiprocessing
from decision_theory_functionals import *
from experiment_setup import loadExperimentFile
#CONSTANTS
DEBUG = True
#Size of train set
TRAIN_SZ=12
#Num of training sets learned
NUM_REPEATS = 20
negInf = float("-Inf")
#Single process training function
#send_end - end of pipe to send data into
def trainBestFitModels(training_sets,modelSearchSpace,individualProspectList,choiceData,riskCompOptA,riskCompOptB,send_end=None):
bestFitModels = []
numCorrectSwitches = []
numCorrectChoices = []
perIssueSwitchPredict = []
perIssueChoicePredict = []
listMinSwitchingProbs = []
for training_set in training_sets:
#Calculate best fit model for training_set
bestFit = (None,negInf)
for model in modelSearchSpace:
modelScore = 0.0
params = model
for question_id in training_set:
prospects = individualProspectList[question_id]
if (params == None):
print "Empty params for person ",person_id," against prospect"
continue
(valA,valB) = evaluateABGambleCPT(prospects,params)
#theta
#simple
if (valA >= valB and (choiceData[question_id] == 1.0)):
#simple
modelScore = modelScore+1.0
#modelScore = modelScore + math.log(pAoverB)
if (valB >= valA and (choiceData[question_id] == 2.0)):
modelScore = modelScore+1.0
#simple
#modelScore = modelScore + math.log(1.0-pAoverB)
switchingValue = 0.0
#binary only
assert(len(prospects)==2)
((A1,p1),(A2,_)) = prospects[0]
((B1,q1),(B2,_)) = prospects[1]
if (choiceData[question_id] == 1.0):#chose A
q1_required = riskCompOptA[question_id]
switchingValue = evaluateSwitchingValue(prospects[0],((B1,q1_required),(B2,1.0-q1_required)),params)
elif (choiceData[question_id] == 2.0):#chose A
p1_required = riskCompOptB[question_id]
switchingValue = evaluateSwitchingValue(prospects[1],((A1,p1_required),(A2,1.0-p1_required)),params)
#modelScore = modelScore + math.log(logitError(switchingValue,0.0,params[5]*10.0))
if (switchingValue > 0.0):
modelScore = modelScore + 1.0
"""
#logit
pAoverB = logitError(valA,valB,params[5])
if (choiceData[question_id] == 1.0):#chose A
#choice lossj
modelScore = modelScore + math.log(pAoverB)
q1_required = riskCompOptA[question_id]
switchingValue = evaluateSwitchingValue(prospects[0],((B1,q1_required),(B2,1.0-q1_required)),params)
elif (choiceData[question_id] == 2.0):#chose A
#choice loss
modelScore = modelScore + math.log(1.0 - pAoverB)
p1_required = riskCompOptB[question_id]
switchingValue = evaluateSwitchingValue(prospects[1],((A1,p1_required),(A2,1.0-p1_required)),params)
"""
#Compare to best model (TODO also check with equality)
if (modelScore >= bestFit[1]):
bestFit = (model,modelScore)
#print "best fit model for training set", training_set,"=bestFit",bestFit
#Now that best model is calculated, try on test data
params = bestFit[0]
bestFitModels.append(params)
correctChoices = 0
correctSwitch = 0
minSwitchingProbs = []
for question_id in range(16):
if (question_id not in training_set):#test set
prospects = individualProspectList[question_id]
(valA,valB) = evaluateABGambleCPT(prospects,params)
#simple scoring
if (valA >= valB and (choiceData[question_id] == 1.0)):
correctChoices = correctChoices+1
if (valB >= valA and (choiceData[question_id] == 2.0)):
correctChoices = correctChoices+1
switchingValue = 0.0
minSwitchingProb = 0.0
((A1,p1),(A2,_)) = prospects[0]
((B1,q1),(B2,_)) = prospects[1]
if (choiceData[question_id] == 1.0):#chose A
q1_required = riskCompOptA[question_id]
switchingValue = evaluateSwitchingValue(prospects[0],((B1,q1_required),(B2,1.0-q1_required)),params)
#Calc first prob at which model predicts a switch
minSwitchingProb = calcMinSwitchingProb(prospects[0],prospects[1],params)
#Record min val under model,val given, min possible value
minSwitchingProbs.append([minSwitchingProb,q1_required,q1,question_id])
elif (choiceData[question_id] == 2.0):#chose B
p1_required = riskCompOptB[question_id]
switchingValue = evaluateSwitchingValue(prospects[1],((A1,p1_required),(A2,1.0-p1_required)),params)
#Calc first prob at which model predicts a switch
minSwitchingProb = calcMinSwitchingProb(prospects[1],prospects[0],params)
#Record min val under model,val given, min possible value
minSwitchingProbs.append([minSwitchingProb,p1_required,p1,question_id])
#simple scoring
if (switchingValue > 0.0):
correctSwitch = correctSwitch+1
numCorrectChoices.append(correctChoices)
numCorrectSwitches.append(correctSwitch)
listMinSwitchingProbs.append(minSwitchingProbs)
# if called as subprocess
send_end.send((bestFitModels,numCorrectChoices,numCorrectSwitches,listMinSwitchingProbs))
#Number of models to save (topk fitting models)
def trainModels(expFileName,inFileName,outFileName):
#Load data
(perPersonData,individualProspectList,groupToPeopleMap,groupProspectList,paramSweepRanges) = loadExperimentFile(expFileName)
print "\n***********\nLoading all data from %s"%inFileName
pkl_file = open('%s'%inFileName,'rb')
assert(pkl_file != None)
if (os.path.isfile(outFileName)):
print "Already exists a file named ",outFileName,",exiting"
exit()
perPersonModels = pickle.load(pkl_file)
topKNum = pickle.load(pkl_file)
print "Finished loading data"
pkl_file.close()
print "Closed file, now fitting models"
# Use ML style of training/test split
# =====
"""
Aggregate all models
"""
NUM_INDIVIDUALS = len(perPersonData)
#Params to select from are the aggregated list of top-k models for all individuals
modelSearchSpace = []
for person_id in range(NUM_INDIVIDUALS):
for model in perPersonModels[person_id]:
#print "Model example",model
if (model[0] not in modelSearchSpace):
modelSearchSpace.append([float(param) for param in model[0]])
#else:
# print "Duplicate model, not repeating"
modelSearchSpace = tuple(modelSearchSpace)
if (DEBUG):
print "Model search space has ",len(modelSearchSpace)," models total"
print modelSearchSpace
training_sets = []
for i in range(NUM_REPEATS):
train_example = rnd.sample(range(16),TRAIN_SZ)
while (train_example in training_sets):
train_example = rnd.sample(range(16),TRAIN_SZ)
print "Re-finding, got duplicate"
training_sets.append(train_example)
"""
Train best model using subset of questions over model search space
Fitting done with total number of correctly predicted
"""
allPeopleNumCorrectChoices = []
allPeopleNumCorrectSwitches = []
allPeopleMinSwitchingProbs = []
perPersonBestFitModels = []
#MULTI PROCESS
jobs = []
pipe_list = []
for i,person_id in enumerate(range(NUM_INDIVIDUALS)):
print "Recovering params for person ",i, "out of ", NUM_INDIVIDUALS
personData = perPersonData[person_id]
recv_end, send_end = multiprocessing.Pipe(False)
choiceData= perPersonData[person_id][2]
riskCompOptA= perPersonData[person_id][3]
riskCompOptB= perPersonData[person_id][4]
p = multiprocessing.Process(target=trainBestFitModels,args=(training_sets,modelSearchSpace,individualProspectList,choiceData,riskCompOptA,riskCompOptB,send_end))
pipe_list.append(recv_end)
jobs.append(p)
p.start()
#Blocking
start = time.time()
print "Spawned all threads, waiting for jobs"
for job in jobs:
job.join()
print "All jobs joined, recovering results..."
trainingResults = [x.recv() for x in pipe_list]
if (DEBUG):
print "\n*******\n Training results :",trainingResults
for result in trainingResults:
(perPersonBestFit,numCorrectChoices,numCorrectSwitches,listMinSwitchProbs) = result
allPeopleNumCorrectChoices.append(numCorrectChoices)
allPeopleNumCorrectSwitches.append(numCorrectSwitches)
allPeopleMinSwitchingProbs.append(listMinSwitchProbs)
perPersonBestFitModels.append(perPersonBestFit)
print "Recovered results, total time elapsed: ",(time.time()-start)
"""
#SINGLE PROCESS
for person_id in range(NUM_INDIVIDUALS):
print "Training models for person",person_id
choiceData= perPersonData[person_id][2]
riskCompOptA= perPersonData[person_id][3]
riskCompOptB= perPersonData[person_id][4]
numCorrectChoices = []
numCorrectSwitches = []
listMinSwitchingProbs = []
(perPersonBestFit,numCorrectChoices,numCorrectSwitches,listMinSwitchProbs) = trainBestFitModels(training_sets,choiceData,riskCompOptA,riskCompOptB)
#Store as one entry for individual #person_id
allPeopleNumCorrectChoices.append(numCorrectChoices)
allPeopleNumCorrectSwitches.append(numCorrectSwitches)
allPeopleMinSwitchingProbs.append(listMinSwitchingProbs)
perPersonBestFitModels.append(perPersonBestFit)
"""
#Save results
print "\n\nStoring best fit models at %s" % outFileName
pkl_file = open('%s'%outFileName,'wb')
assert(pkl_file != None)
pickle.dump(training_sets,pkl_file)
pickle.dump(perPersonBestFitModels,pkl_file)
pickle.dump(allPeopleNumCorrectChoices,pkl_file)
pickle.dump(allPeopleNumCorrectSwitches,pkl_file)
pickle.dump(allPeopleMinSwitchingProbs,pkl_file)
pkl_file.close()
#End train models
|
from lib.commands.lib import assignment_pattern, getAssignmentArgs, extractArgs, logger
from lib.commands.general import delVar, exit_, help_, print_, makeVariable
from lib.commands.errors import BotError
delVar.about = "Удалить одну или несколько переменных"
exit_.about = "Завершение работы бота "
help_.about = "Получение информации о доступных командах и переменных"
print_.about = "Напечатать значения аргументов"
availible_commands = {
"del":delVar,
"exit":exit_,
"help":help_,
"print":print_,
}
def getCommand(text):
if assignment_pattern.search(text):
command = makeVariable
command_name = "makeVariable"
args = getAssignmentArgs(text)
else:
command_name, *args = text.split(" ", 1)
args = args[0] if args else ""
command = getCommandFromName(command_name)
args = extractArgs(args)
logger.debug("Recieved command {} with args {}".format(command_name, args))
return lambda user_id: command(user_id, *args)
def getCommandFromName(name):
name = name.lower()
if name in availible_commands:
return availible_commands[name]
else:
raise BotError("Command '{}' not found".format(name))
|
import numpy as np
from src.shaft_secant_piles import (get_parameters_shaft_secant_piles, plot_shaft,
plot_shaft_3d, check_for_hoop_force, get_area_moment_of_inertia_rect)
# Initial parameters
parameters_init = {"project_name": "Sample project", "project_revision": "First issue, rev0", "shaft_name": "Shaft 1", "di": 12.0, "D": 1.2,
"n_pieces": 44, "L": 15.0, "v": 0.75, "H_drilling_platform": 0.0, "E": 30.e6,
"F_hoop_at_base": 700.0, "gamma_G": 1.35, "f_ck": 10.0, "alpha_cc": 0.7, "gamma_c": 1.5,
"check_more": False, "F_hoop": 500.0, "L_hoop": 10.0}
def main_secant_piled_shaft(st, parameters=None):
""" Main program for secant piled shaft
"""
if parameters is None:
parameters = parameters_init
st.title('Geometric and plain concrete resistance check for secant piled shaft')
st.header('Project information')
project_name = st.text_input('Project', value=parameters['project_name'], key='project_name')
st.text_input('Revision', value=parameters['project_revision'], key='project_revision')
st.header('Input parameters')
col1, col2, col3 = st.columns(3)
shaft_name = col1.text_input('Shaft identification', value=parameters['shaft_name'], key='shaft_name')
di = col1.number_input('Shaft inner diameter [m]', value=parameters['di'], format='%.2f', min_value=1.0, max_value=100.0, step=1.0, key='di')
D = col2.number_input('Pile diameter [m]', value=parameters['D'], format='%.2f', min_value=0.3, max_value=5.0, step=0.1, key='D')
n_pieces = int(col3.number_input('Numer of piles [-]', value=int(parameters['n_pieces']), format='%i', min_value=4, max_value=1000, step=1, key='n_pieces'))
L = col2.number_input('Length of shaft [m]', value=parameters['L'], step=1.0,min_value=1.0, max_value=150.0, key='L')
v = col3.number_input('Drilling verticality [%]', value=parameters['v'], step=0.1, min_value=0.05, max_value=2.0, key='v')
col1, col2 = st.columns(2)
H_drilling_platform = col1.number_input('Height of drilling platform above top of piles [m]', value=parameters['H_drilling_platform'], step=1.0, min_value=0.0, max_value=50.0, key='H_drilling_platform')
col2.write('The initial devivation by free drilling x0 = {:.2f} cm'.format(H_drilling_platform*v))
a, t_top, d_top, x0, x, t_eff, d_eff = get_parameters_shaft_secant_piles(di/2, n_pieces, D, L, H_drilling_platform, v, shaft_name=shaft_name, print_results=False)
st.header('Output parameters for {}'.format(shaft_name))
col1, col2 = st.columns(2)
#print('\nOUTPUT GEOMETRY {0}...'.format(shaft_name))
col1.write('C/c spacing at top of shaft a = {:.2f} m'.format(a))
col1.write('Overcut at top of shaft t = {:.2f} cm'.format(t_top*100))
col1.write('Effective thickness at top of shaft d = {:.2f} cm'.format(d_top*100))
col1.write('Deviation at bottom of shaft dx = {:.2f} cm'.format(x*100))
if t_eff > 0:
d_eff = 2*np.sqrt((D/2)*t_eff - (t_eff/2)**2) # overlapped thickness, m
col2.write('Overcut at bottom of shaft t_eff = {:.2f} cm'.format(t_eff*100))
col2.write('Effective thickness at bottom of shaft d_eff = {:.2f} cm'.format(d_eff*100))
with st.expander('Axial and flexural rigidity considering effective thickness at top and bottom of shaft'):
E = st.number_input("Concrete Young's modulus E [KPa]", value=parameters['E'], format='%.0f', min_value=25.0e6, max_value=35.0e6, step=1.0E6, key='E')
display_shaft_stiffnesses(d_top, d_eff, E, st)
else:
d_eff = np.nan
col2.warning('PILES DO NOT TOUCH IN BASE OF SHAFT!!')
st.header('Visualization for {}'.format(shaft_name))
fig1 = plot_shaft(di/2, n_pieces, D, x0, x, shaft_name)
st.pyplot(fig1)
fig2 = plot_shaft_3d(di/2, n_pieces, D, L, x0, x, shaft_name)
st.pyplot(fig2)
st.header('Check for hoop stress at base of shaft')
col1, col2, col3 = st.columns(3)
F_hoop_at_base = col1.number_input('Hoop force [kN/m]', value=parameters['F_hoop_at_base'], min_value=10.0, max_value=100000.0, step=100.0, key='F_hoop_at_base')
gamma_G = col2.number_input('gamma_G [-]', value=parameters['gamma_G'], min_value=1.0, max_value=2.0, step=0.05, key='gamma_G')
f_ck = col3.number_input('f_ck [MPa]', value=parameters['f_ck'], min_value=5.0, max_value=80.0, step=5.0, key='f_ck')
alpha_cc = col1.number_input('alpha_cc [-]', value=0.7, min_value=0.0, max_value=1.0, step=0.1, key='alpha_cc')
gamma_c = col2.number_input('gamma_c [-]', value=1.5, min_value=0.0, max_value=2.0, step=0.1, key='gamma_c')
sigma_cd, f_cd = check_for_hoop_force(F_hoop_at_base, d_eff, gamma_G, f_ck, alpha_cc, gamma_c)
if sigma_cd < f_cd:
st.success('Hoop stress = {0:.2f} MPa < design hoop stress = {1:.2f} MPa: PASSED'.format(sigma_cd, f_cd))
else:
st.error('Hoop stress = {0:.2f} MPa > design hoop stress = {1:.2f} MPa: NOT PASSED'.format(sigma_cd, f_cd))
check_more = st.checkbox('Check for hoop stress at any shaft depth', value=parameters['check_more'], key='check_more')
if check_more:
#st.header('Check for hoop stress at any shaft depth')
col1, col2 = st.columns(2)
F_hoop = col1.number_input('Hoop force [kN/m]', value=parameters['F_hoop'], min_value=10.0, max_value=100000.0, step=100.0, key='F_hoop')
L_hoop = col2.number_input('Depth from top of shaft [m]', value=parameters['L_hoop'], min_value=1.0, max_value=150.0, step=1.0, key='L_hoop')
a, t_top, d_top, x0, x, t_eff, d_eff = get_parameters_shaft_secant_piles(di/2, n_pieces, D, L_hoop, H_drilling_platform, v, shaft_name=shaft_name, print_results=False)
sigma_cd, f_cd = check_for_hoop_force(F_hoop, d_eff, gamma_G, f_ck, alpha_cc, gamma_c)
if sigma_cd < f_cd:
st.success('Hoop stress = {0:.2f} MPa < design hoop stress = {1:.2f} MPa: PASSED'.format(sigma_cd, f_cd))
else:
st.error('Hoop stress = {0:.2f} MPa > design hoop stress = {1:.2f} MPa: NOT PASSED'.format(sigma_cd, f_cd))
def display_shaft_stiffnesses(d_top, d_eff, E, st):
""" Displays shaft stiffness
"""
I = get_area_moment_of_inertia_rect(1.0, d_top)
EI = E*I # [kNm**2/m]
EA = E*d_top # [kN/m]
st.write('EI at top = {0:.2f} [kNm^2/m], EA at top = {1:.2f} [kN/m]'.format(EI, EA))
I = get_area_moment_of_inertia_rect(1.0, d_eff)
EI = E*I # [kNm**2/m]
EA = E*d_eff # [kN/m]
st.write('EI at bottom = {0:.2f} [kNm^2/m], EA at bottom = {1:.2f} [kN/m]'.format(EI, EA))
|
## IMPORTS
from paths import GAME_PATH
## DECLARTIONS
__all__ = (
'get_map_list',
)
## UTILS
def get_map_list():
"""Returns the map list found in the maplist.txt file"""
maplist = GAME_PATH / 'maplist.txt'
if not maplist.isfile():
raise FileNotFoundError("Missing {}".format('maplist'))
rs = []
with open(maplist) as f:
for line in f:
line = line.strip()
if not line:
continue
if line.startswith('//'):
continue
rs.append(line)
return rs
|
#!/usr/bin/env python
#
# This creates a graph of response times for the traceroute/ping command for monitoring
# the desired remote path
#
from sockets import *
host = raw_input("\n" = "IP address to trace: ")
port = 33434
ttl = 0
icmp = socket.getprotobyname('icmp')
udp = socket.getprotobyname('udp')
while ttl != 30:
s = socket.socket(socket.AF_INET, socket.SOCK_RAW, icmp) |
import py3buddy
def initialize():
global ibuddy
ibuddy = ""
global wingle
wingle = "middle"
global wing
wing = "low"
global heart
heart = "False"
global color
color = "NOCOLOUR" |
"""
148. Sort List
Given the head of a linked list, return the list after sorting it in ascending order.
Follow up: Can you sort the linked list in O(n logn) time and O(1) memory (i.e. constant space)?
"""
# Definition for singly-linked list.
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
# O(nlogn) time, O(n) space
def sortList(self, head: ListNode) -> ListNode:
cur = head
temp = []
while cur:
temp.append(cur.val)
cur = cur.next
temp.sort()
dummy = ListNode(-1)
cur = dummy
for v in temp:
cur.next = ListNode(v)
cur = cur.next
return dummy.next
# Bubble Sort: O(n^2) time, O(1) space
def sortList(self, head: ListNode) -> ListNode:
N = 0
cur = head
while cur:
N += 1
cur = cur.next
i = 0
while i < N:
j = 0
cur = head
while j < N - i and cur.next:
if cur.val > cur.next.val:
# swap values
cur.val, cur.next.val = cur.next.val, cur.val
cur = cur.next
j += 1
i += 1
return head
# Merge Sort: O(nlogn) time
def sortList(self, head: ListNode) -> ListNode:
# base case:
if not head or not head.next:
return head
# split the list and get the midpoint
mid = self.midPoint(head)
# left and right recursive
left = self.sortList(head)
right = self.sortList(mid)
# merge
return self.merge(left, right)
def midPoint(self, head):
# split linked list with fast & slow pointers
slow, fast = head, head
while fast.next and fast.next.next:
slow = slow.next
fast = fast.next.next
mid = slow.next
slow.next = None
# return the mid node
return mid
def merge(self, left, right):
dummy = cur = ListNode(-1)
while left and right:
if left.val < right.val:
cur.next = left
cur = cur.next
left = left.next
else:
cur.next = right
cur = cur.next
right = right.next
# append the rest of right or left to the merged list
cur.next = left or right
return dummy.next
|
import unittest
from katas.kyu_7.dict_from_two_lists import createDict
class CreateDictTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(createDict(['a', 'b', 'c', 'd'], [1, 2, 3]),
{'a': 1, 'b': 2, 'c': 3, 'd': None})
def test_equals_2(self):
self.assertEqual(createDict(['a', 'b', 'c'], [1, 2, 3, 4]),
{'a': 1, 'b': 2, 'c': 3})
|
from django.contrib.auth.decorators import login_required
from django.urls import reverse
from django.http import HttpResponse, JsonResponse
from django.shortcuts import render, redirect
from .forms import StakeForm, IstakeForm
from .models import Stake, WheelSpin, Selection
import json
@login_required(login_url="/user/login")
def spin(request):
trans_logz = Stake.objects.filter(
user=request.user, market=None, has_market=False
).order_by("-created_at")[:2]
if request.method == "POST":
stake_form = IstakeForm(request.POST)
if stake_form.is_valid():
stake = stake_form.save(commit=False)
stake.user = request.user
stake.save()
# return redirect('/')
else:
stake_form = IstakeForm()
# print(stake_form.errors)
spins = len(Stake.unspinned(request.user.id))
context = {
"user": request.user,
"stake_form": stake_form,
"trans_logz": trans_logz,
"spins": spins,
}
return render(request, "daru_wheel/ispind.html", context)
# @login_required(login_url='/user/login')
# def spin(request):
# stake_form = IstakeForm()
# trans_logz = Stake.objects.filter(
# user=request.user,market=None,has_market=False).order_by('-created_at')[:12]
# if request.method == 'POST':
# data = {}
# data['user'] = request.user
# data['marketselection'] = request.POST.get('marketselection')
# data['amount'] = request.POST.get('amount')
# data['bet_on_real_account'] = request.POST.get("bet_on_real_account")
# stake_form = IstakeForm(data=data)
# if stake_form.is_valid():
# stake_form.save()
# else:
# print(stake_form.errors)
# spins = Stake.unspinned(request.user.id)
# context = {
# 'user': request.user, 'stake_form': stake_form,
# 'trans_logz': trans_logz, 'spins': spins
# }
# return render(request, 'daru_wheel/ispin.html', context)
@login_required(login_url="/user/login")
def daru_spin(request):
try:
market_id = max((obj.id for obj in WheelSpin.objects.all()))
this_wheelspin = WheelSpin.objects.get(id=market_id)
except Exception as mae:
this_wheelspin, _ = WheelSpin.objects.get_or_create(id=1)
pass
stake_form = StakeForm()
trans_logz = Stake.objects.filter(user=request.user, has_market=True).order_by(
"-created_at"
)[:12]
if request.method == "POST":
# if this_wheelspin.place_stake_is_active:#
market = this_wheelspin
data = {}
data["user"] = request.user
data["market"] = market
data["marketselection"] = request.POST.get("marketselection")
data["amount"] = request.POST.get("amount")
data["bet_on_real_account"] = request.POST.get("bet_on_real_account")
stake_form = StakeForm(data=data)
if stake_form.is_valid():
stake_form.save()
else:
print(stake_form.errors)
context = {"user": request.user, "stake_form": stake_form, "trans_logz": trans_logz}
return render(request, "daru_wheel/daru_spin.html", context)
@login_required(login_url="/user/login")
def spine(req):
tmpl_vars = {
"trans_logz": Stake.objects.filter(user=req.user).order_by("-created_at")[:12],
"form": IstakeForm(),
"spins": Stake.unspinned(req.user.id),
}
return render(req, "daru_wheel/ispin.html", tmpl_vars)
@login_required(login_url="/user/login")
def spin_it(request):
if request.method == "POST":
market_id = request.POST.get("marketselection")
# print(f"ARKETTT_ID:{market_id}")
marketselection = Selection.objects.get(id=1) # market_id)
# request.POST.get('marketselection')
amount = request.POST.get("amount")
# print("AOOOO")
# print(amount)
bet_on_real_account = request.POST.get("bet_on_real_account")
# print("REL_FAK")
# print(bet_on_real_account)
if bet_on_real_account == "on" or True:
bet_on_real_account = True
else:
bet_on_real_account = False
# print("REL_FAK2")
# print(bet_on_real_account)
response_data = {}
stake = Stake(
marketselection=marketselection,
amount=amount,
bet_on_real_account=True,
user=request.user,
)
stake.save()
response_data["created_at"] = stake.created_at.strftime("%B %d, %Y %I:%M %p")
response_data["marketselection"] = stake.marketselection.name
response_data["amount"] = stake.amount
response_data["bet_status"] = stake.bet_status()
response_data["bet_on_real_account"] = stake.bet_on_real_account
return JsonResponse(response_data)
return JsonResponse({"Error": "Postin Error"})
|
from publish import cli
import shutil
import pathlib
from textwrap import dedent
from pytest import fixture
@fixture
def make_input_directory(tmpdir):
def make_input_directory(example):
input_path = pathlib.Path(tmpdir) / "input"
example_path = pathlib.Path(__file__).parent / example
shutil.copytree(example_path, input_path)
return input_path
return make_input_directory
@fixture
def output_directory(tmpdir):
output_path = pathlib.Path(tmpdir) / "output"
output_path.mkdir()
return output_path
def test_publish_cli_simple_example(make_input_directory, output_directory):
# given
input_directory = make_input_directory("example_1")
# when
cli([str(input_directory), str(output_directory)])
# then
assert (output_directory / "homeworks" / "01-intro" / "homework.pdf").exists()
def test_publish_cli_with_example_depending_on_week_start_date(
make_input_directory, output_directory
):
# given
input_directory = make_input_directory("example_8")
# when
cli(
[
str(input_directory),
str(output_directory),
"--start-of-week-one",
"2020-01-04",
"--ignore-release-time",
]
)
# then
assert (output_directory / "lectures" / "01-intro").exists()
def test_publish_cli_with_example_using_template_vars(
make_input_directory, output_directory
):
# given
input_directory = make_input_directory("example_9")
contents = dedent(
"""
name: this is a test
start_date: 2020-01-01
"""
)
with (input_directory / "myvars.yaml").open("w") as fileobj:
fileobj.write(contents)
# when
cli(
[
str(input_directory),
str(output_directory),
"--start-of-week-one",
"2020-01-04",
"--ignore-release-time",
"--vars",
f"course:{input_directory}/myvars.yaml",
]
)
# then
assert (output_directory / "homeworks" / "01-intro").exists()
|
from django.urls import path
from backend.articles.views import ReadOnlyArticleAPIView
from backend.articles.views import WriteOnlyArticleAPIView
app_name = 'articles'
urlpatterns = [
# todo
# Convert to custom router.
path(
'list/',
ReadOnlyArticleAPIView.as_view(),
name="read_only_article"
),
path(
'retrieve/<int:id>/',
ReadOnlyArticleAPIView.as_view(),
name="read_only_article"
),
path(
'create/',
WriteOnlyArticleAPIView.as_view(),
name="write_only_article"
),
path(
'update/<int:id>/',
WriteOnlyArticleAPIView.as_view(),
name="write_only_article"
),
path(
'destroy/<int:id>/',
WriteOnlyArticleAPIView.as_view(),
name="write_only_article"
),
]
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import json
string_data = ""
for line in open('model_classfier_edit.json'):
string_data = string_data + line
data_dict = json.loads(string_data)
one_data = {}
zero_data = {}
for k,v in data_dict["storage"]["weight"].items():
print( v )
if v["1"]["v1"] in one_data:
one_data.update({k: v["1"]["v1"]})
if v["0"]["v1"] in one_data:
zero_data.update({k: v["0"]["v1"]})
sorted(one_data.items(), key=lambda x: x[1])
sorted(zero_data.items(), key=lambda x: x[1])
|
'''
Author: Huanxin Xu,
Modified from Nick Lowell on 2016/12
version 0.0.8
Add vessel number ,get rid of 't','ma' file. improve ploting, convert psi to depth meter in
getmap.py and control_file.txt
For further questions ,please contact 508-564-8899, or send email to xhx509@gmail.com
Remember !!!!!! Modify control file!!!!!!!!!!!!!
'''
import sys
sys.path.insert(1, '/home/pi/Desktop/mat_modules')
import odlw_facade
import odlw
import bluepy.btle as btle
import time
import datetime
import numpy as np
import pandas as pd
import serial
import os
from pandas import read_csv
from pylab import mean, std
import OdlParsing
import glob
import logging
from shutil import copyfile
from li_parse import parse_li
from wifiandpic import p_create_pic,wifi,judgement2,parse
from func_readgps import func_readgps
logging.basicConfig() #enable more verbose logging of errors to console
if not os.path.exists('/towifi'):
os.makedirs('/towifi')
CONNECTION_INTERVAL = 30 # Minimum number of seconds between reconnects
# Set to minutes for lab testing. Set to hours/days for field deployment.
LOGGING = False #Enable log file for debugging Bluetooth COM errors. Deletes old log and creates new ble_log.txt for each connection.
#########################################################################################################################################
file='control_file.txt'
f1=open(file,'r')
logger_timerange_lim=int(f1.readline().split(' ')[0])
logger_pressure_lim=int(f1.readline().split(' ')[0])
transmit=f1.readline().split(' ')[0]
MAC_FILTER=[f1.readline().split(' ')[0]]
boat_type=f1.readline().split(' ')[0]
vessel_num=f1.readline().split(' ')[0]
f1.close()
header_file=open('/home/pi/Desktop/header.csv','w')
header_file.writelines('Probe Type,Lowell\nSerial Number,'+MAC_FILTER[0][-5:]+'\nVessel Number,'+vessel_num+'\nDate Format,YYYY-MM-DD\nTime Format,HH24:MI:SS\nTemperature,C\nDepth,m\n') # create header with logger number
header_file.close()
print MAC_FILTER
scanner = btle.Scanner()
# A dictionary mapping mac address to last communication time. This should ultimately be moved
# to a file or database. This is a lightweight example.
last_connection = {}
index_times=0
func_readgps() # We need to run function readgps twice to make sure we get at least two gps data in the gps file
func_readgps() # We need to run function readgps twice to make sure we get at least two gps data in the gps file
while True:
print('-'),
try:
wifi()
except:
time.sleep(1)
index_times=index_times+1
if index_times>=14:
index_times=0
func_readgps()
scan_list = scanner.scan(6) # Scan for 6 seconds
# Get rid of everything that isn't a MAT1W
scan_list =[device for device in scan_list if device.addr in MAC_FILTER]
# Prefer new connections, then the oldest connection
oldest_connection = time.time()
mac = None
for dev in scan_list:
if dev.addr not in last_connection:
mac = dev.addr
break
if last_connection[dev.addr] < oldest_connection and \
time.time() - last_connection[dev.addr] > CONNECTION_INTERVAL:
mac, oldest_connection = dev.addr, last_connection[dev.addr]
if not mac:
continue
print('')
print('*************New Connection*************')
print time.strftime("%c")
print('Connecting to {}'.format(mac))
try:
p = btle.Peripheral(mac) # create a peripheral object. This opens a BLE connection.
except btle.BTLEException: # There was a problem opening the BLE connection.
print('Failed to connect to ' + mac)
continue
if LOGGING: #Debug Code.Creating a log file that saves ALL coms for the current connection. Sometimes helpful for troubleshooting.
file_h = open('ble_log.txt', 'w') # This is bad form but temporary
log_obj = odlw_facade.BLELogger(file_h)
with p:
try: # all commands need to be in the try loop. This will catch dropped connections and com errors
connection = odlw_facade.OdlwFacade(p) # create a facade for easy access to the ODLW
if LOGGING:
connection.enable_logging(log_obj)
time.sleep(1) # add a short delay for unknown, but required reason
# Stop the logger from collecting data to improve reliability of comms and allow data transfer
print ('Stopping deployment: ' + connection.stop_deployment())
time.sleep(2) # delay 2 seconds to allow files to close on SD card
#Increase the BLE connection speed
print('Increasing BLE speed.')
connection.control_command('T,0006,0000,0064') #set latency and timeouts in RN4020 to minimums
# Make sure the clock is within range, otherwise sync it
try:
odlw_time = connection.get_time()
except ValueError:
print('ODL-1W returned an invalid time. Clock not checked.')
else:
# Is the clock more than a day off?
if (datetime.datetime.now() - odlw_time).total_seconds() > 10:
print('did Syncing time.')
connection.sync_time()
# Filler commands to test querying
# for i in range(1):
# print('Time: ' + str(connection.get_time()))
# print('Status: ' + connection.get_status())
# print('Firmware: ' + connection.get_firmware_version())
# print('Serial Number: ' + connection.get_serial_number())
# Download any .lis files that aren't found locally
folder = mac.replace(':', '-').lower() # create a subfolder with the ODL-1W's unique mac address
serial_num=folder[-2:]
if not os.path.exists(folder):
os.makedirs(folder)
print('Requesting file list')
files = connection.list_files() # get a list of files on the ODLW
files.reverse()
print files #Note: ODL-W has a very limited file system. Microprocessor will become RAM bound if files are over 55-65.
#The exact number depends on file name length and ???. TBD: add a check for files numbers above 55.
for name, size in files:
if not name.endswith('.lid'):
continue
# Check if the file exists and get it's size
file_path = os.path.join(folder, name)
local_size = None
if os.path.isfile(file_path):
local_size = os.path.getsize(file_path)
if not local_size or local_size != size:
with open(file_path, 'wb') as outstream:
print('Downloading ' + name)
start_time = time.time()
connection.get_file(name, size, outstream)
end_time = time.time()
print('Download of {} complete - {:0.2f} bytes/sec.'.format(name, size/(end_time-start_time)))
print('Deleting {}'.format(name))
connection.delete_file(name)
else:
print('Deleting {}'.format(name))
connection.delete_file(name)
print('Restarting Logger: ' + connection.start_deployment())
time.sleep(2) #2 second delay to write header of new data file
last_connection[mac] = time.time() # keep track of the time of the last communication
print('Disconnecting')
file_names=glob.glob('/home/pi/Desktop/00-1e-c0-3d-7a-'+serial_num+'/*.lid')
file_names.sort(key=os.path.getmtime)
file_name=file_names[-1]
#file_name2=file_names2[-1]
print file_name
#file_num =str(max(nums)+1)
file_num=datetime.datetime.now().strftime("%y%m%d_%H_%M")
os.rename(file_name,'/home/pi/Desktop/00-1e-c0-3d-7a-'+serial_num+'/'+serial_num+'('+file_num+').lid')
file_name='/home/pi/Desktop/00-1e-c0-3d-7a-'+serial_num+'/'+serial_num+'('+file_num+').lid'
print file_name
# Example - extract five-minute averaged temperature data from binary file
print('Extracting five-minute averaged temperature data...')
try:
parse_li(file_name) # default out_path
except:
print "problems on parsing lid file"
time.sleep(900)
print "remove lid file in 100 seconds"
time.sleep(100)
os.system('sudo rm /home/pi/Desktop/00-1e-c0-3d-7a-'+serial_num+'/*.lid')
# Example - extract full resolution temperature data for Aug 4, 2014
print('Extracting full resolution temperature data ')
start = datetime.datetime(2011, 8, 1) # create datetime objects for start and end time
end = datetime.datetime(2030, 8, 3)
s_file='/home/pi/Desktop/00-1e-c0-3d-7a-'+serial_num+'/'+serial_num+'('+file_num+')_S.txt'
df=pd.read_csv(s_file,sep=',',skiprows=0,parse_dates={'datet':[0]},index_col='datet',date_parser=parse)#creat a new Datetimeindex
os.rename(s_file,'/home/pi/Desktop/00-1e-c0-3d-7a-'+serial_num+'/'+serial_num+str(df.index[-1]).replace(':','')+'S.txt')
new_file_path='/home/pi/Desktop/towifi/li_'+serial_num+'_'+str(df.index[-1]).replace(':','').replace('-','').replace(' ','_')#folder path store the files to uploaded by wifi
s_file='/home/pi/Desktop/00-1e-c0-3d-7a-'+serial_num+'/'+serial_num+str(df.index[-1]).replace(':','')+'S.txt'
if len(df)>1000:
os.remove(s_file)
print 'data is more than one day, delete'
time.sleep(1800)
os.system('sudo rm /home/pi/Desktop/00-1e-c0-3d-7a-'+serial_num+'/*.lid')
os.system('sudo reboot')
df1=pd.read_csv(s_file,sep=',',skiprows=0,parse_dates={'datet':[0]},index_col='datet',date_parser=parse)
df1.index.names=['datet(GMT)']
file2=max(glob.glob('/home/pi/Desktop/gps_location/*'))
column_name=['lat','lon']
df2=pd.read_csv(file2,sep=',',skiprows=0,parse_dates={'datet':[0]},index_col='datet',date_parser=parse,header=None)
lat=[]
lon=[]
inx=str(min(df2.index,key=lambda d: abs(d-df1.index[0])))
for i in df1.index:
try:
inx=str(min(df2[inx:].index,key=lambda d: abs(d-i)))
lat.append(df2[str(min(df2[inx:].index,key=lambda d: abs(d-i)))][1].values[0])
lon.append(df2[str(min(df2[inx:].index,key=lambda d: abs(d-i)))][2].values[0])
except:
print 'gps time is not matching the logger'
time.sleep(1000)
os.system('sudo reboot')
df1['lat']=lat
df1['lon']=lon
df1=df1[['lat','lon','Temperature (C)','Depth (m)']]
print 'got the df'
print 'Parse accomplish'
try:
valid='no'
boat_type='fishing' #boat type ,pick one from 'lobster' or 'fish'
valid,st_index,end_index=judgement2(boat_type,s_file,logger_timerange_lim,logger_pressure_lim)
print 'valid is '+valid
if valid=='yes': #copy good file to 'towifi' floder
copyfile(file_name,new_file_path+').lid')
df1.to_csv(new_file_path+'_S1.csv')
fh=open('/home/pi/Desktop/header.csv','r')
content=fh.readlines()
file_saved=open(new_file_path+'.csv','w')
[file_saved.writelines(i) for i in content]
file_saved.close()
os.system('cat '+new_file_path+'_S1.csv >> '+new_file_path+'.csv')
os.system('rm '+new_file_path+'_S1.csv')
print 'file cat finished'
#copyfile(s_file,new_file_path+'_S.txt')
else :
os.remove(s_file)
except:
print "Cannot copy or find the lid,MA or T file,cause no good data"
if valid=='yes':
if transmit=='yes':
try:
#df=pd.read_csv(new_file_path+'_P.txt')
df=df.ix[(df['Depth (m)']>0.85*mean(df['Depth (m)']))] # get rid of shallow data
dft=df
#dft=pd.read_csv(new_file_path+'_T.txt')
dft=dft.ix[(dft['Temperature (C)']>mean(dft['Temperature (C)'])-3*std(dft['Temperature (C)'])) & (dft['Temperature (C)']<mean(dft['Temperature (C)'])+3*std(dft['Temperature (C)']))]
maxtemp=str(int(round(max(dft['Temperature (C)'][st_index:end_index]),2)*100))
if len(maxtemp)<4:
maxtemp='0'+maxtemp
mintemp=str(int(round(min(dft['Temperature (C)'][st_index:end_index]),2)*100))
if len(mintemp)<4:
mintemp='0'+mintemp
meantemp=str(int(round(np.mean(dft['Temperature (C)'][st_index:end_index]),2)*100))
if len(meantemp)<3:
meantemp='0'+meantemp
sdeviatemp=str(int(round(np.std(dft['Temperature (C)'][st_index:end_index]),2)*100))
for k in range(4):
if len(sdeviatemp)<4:
sdeviatemp='0'+sdeviatemp
timerange=str(int(end_index-st_index))
#time_len=str(int(round((df['yd'][-1]-df['yd'][0]),3)*1000))
for k in range(3):
if len(timerange)<3:
timerange='0'+timerange
# meandepth
meandepth=str(abs(int(round(mean(df['Depth (m)'].values)))))
#print (mean(df['Pressure (psia)'].values)-13.89)/1.457
for k in range(3):
if len(meandepth)<3:
meandepth='0'+meandepth
# meantemp
meantemp=str(int(round(mean(dft['Temperature (C)'].values),2)*100))
if len(meantemp)<4:
meantemp='0'+meantemp
# rangedepth
rangedepth=str(abs(int(round(max(df['Depth (m)'].values)-min(df['Depth (m)'].values)))))
#print (max(df['Pressure (psia)'].values)-min(df['Pressure (psia)'].values))/1.457
for k in range(3):
if len(rangedepth)<3:
rangedepth='0'+rangedepth
print 'meandepth'+meandepth+'rangedepth'+rangedepth+'timerange'+timerange+'temp'+meantemp+'sdev'+sdeviatemp
try:
ports='tty-huanxintrans'
ser=serial.Serial('/dev/'+ports, 9600) # in Linux
time.sleep(2)
ser.writelines('\n')
print 111
time.sleep(2)
ser.writelines('\n')
print 222
time.sleep(2)
ser.writelines('\n')
time.sleep(2)
ser.writelines('i\n')
print 333
time.sleep(3)
#ser.writelines('ylb00E'+maxtemp+'D'+mintemp+'C'+meantemp+'B'+sdeviatemp+'0000000000000000000000000000000000000000000000\n')
ser.writelines('ylb9'+meandepth+rangedepth+timerange+meantemp+sdeviatemp+'\n')
time.sleep(2)
print '999'+meandepth+rangedepth+timerange+meantemp+sdeviatemp
time.sleep(4)
#copyfile(ma_file,'/home/pi/Desktop/towifi/'+serial_num+'('+file_num+')_MA.txt')
#copyfile(t_file,'/home/pi/Desktop/towifi/'+serial_num+'('+file_num+')_T.txt')
print 'good data, copy file to /home/pi/Desktop/towifi/'+serial_num+'('+file_num+')_T.txt'
except:
print 'transmit error'
except:
print 'no good data'
import time
os.system('sudo rm /home/pi/Desktop/00-1e-c0-3d-7a-'+serial_num+'/*.lid')
if valid=='yes':
print 'ok,all set'
os.system('sudo rm '+file2)
time.sleep(1)
#print 'time sleep 1000'
os.system('sudo reboot')
#time.sleep(1000)
else:
print 'time sleep 1500'
for l in range (17):
time.sleep(89)
func_readgps()
except odlw_facade.Retries as error: # this exception originates in odlw_facade
print(str(error))
except btle.BTLEException: # only log time if the try block was successful
print('Connection lost.')
except odlw.XModemException as error:
print(str(error))
|
#In this code we implement Dynamic Programming with memoization
"""
memo_list=[None]*46
def fibo_driver(n):
#define a list
memo_list[0]=0
memo_list[1]=1
for i in range(2,n+1):
memo_list[i] = -1
return (fibo_main(n))
def fibo_main(n):
if(memo_list[n]==-1):
memo_list[n]=fibo_main(n-1)+fibo_main(n-2)
return (memo_list[n])
fibonacchi=fibo_driver(45)
print (fibonacchi) #(1134903170)
"""
#######################################################################
#Dynamic Programming with Memoization
def fibo_dp(n):
memo_list=[None]*46
memo_list[0]=0
memo_list[1]=1
for i in range(2,n+1):
memo_list[i]=memo_list[i-1]+memo_list[i-2]
return (memo_list[n])
fibonacchi=fibo_dp(45)
print ("Fibonacchi = ", fibonacchi)
|
#!/usr/bin/env python
def coroutine(func):
def start(*args, **kwargs):
cr = func(*args, **kwargs)
cr.next()
return cr
return start
|
from random import randint
def go():
buffer = raw_input("Press Enter to get new algs: ")
co = ["1-1", "adj-adj", "dia-dia", "adj-dia", "dia-adj"]
eo = ["1-1", "L-L", "3-3", "line-line", "L-line", "4-4", "line-L"]
cp = ["dia-dia", "null-adj", "adj-dia", "dia-dia", "null-dia", "adj-null", "dia-null", "adj-adj"]
ep = ["cwu-", "ccwu-", "z-", "h-", "-h", "-z", "adj-adj", "oppo-oppo", "-cwu", "-ccwu", "ccwo-oppo", "ccwo-oppo", "adj-"]
fco = co[randint(0, len(co) - 1)]
feo = eo[randint(0, len(eo) - 1)]
fcp = cp[randint(0, len(cp) - 1)]
fep = ep[randint(0, len(ep) - 1)]
print fco
print feo
print fcp
print fep
while True:
go()
|
#
# Module to load tgs_salt data.
#
# Chien-Hsiang Hsu, 2018.07.23
""" Tools to analyze tgs data.
"""
import glob
import os
import csv
from skimage import io
import numpy as np
def load_images(data_set='train_data'):
""" load_data.
Parameters
----------
data_set: 'train_data' or 'test_data'.
return
----------
imgs, masks: numpy array with shape of (n_samples,101,101).
"""
if data_set not in ['train_data','test_data']:
print('Invalid data_set.')
raise
img_folder = os.path.join('.data',data_set,'images')
img_files = os.path.join(img_folder,'*.png')
img_files = glob.glob(img_files)
imgs = np.stack([io.imread(f,as_gray=True) for f in img_files],axis=0)
img_base_names = [os.path.basename(f) for f in img_files]
IDs = [f.split('.')[0] for f in img_base_names]
# Get masks
masks = []
if data_set is 'train_data':
mask_folder = os.path.join('.data',data_set,'masks')
# Read masks in the same order as images
mask_files = [os.path.join(mask_folder,f) for f in img_base_names]
masks = np.stack([io.imread(f,as_gray=True) for f in mask_files],axis=0)
return IDs, imgs, masks
def load_depths():
depths_file = os.path.join('.data','depths.csv')
with open(depths_file) as f:
reader = csv.reader(f)
next(reader) # skip header
depths = {row[0]:row[1] for row in reader}
return depths
def load_train_rle_masks():
train_rle_file = os.path.join('.data','train.csv')
with open(train_rle_file) as f:
reader = csv.reader(f)
next(reader) # skip header
train_rle_masks = {row[0]:row[1] for row in reader}
return train_rle_masks
def rle_encode(mask):
pixels = mask.flatten('F')
pixels = np.concatenate([[0],pixels,[0]])
runs = np.where(pixels[1:]!=pixels[:-1])[0]+1
runs[1::2] -= runs[::2]
return ' '.join(runs.astype(str))
|
class Node(object):
"""Node of single link list.
"""
def __init__(self, ele):
self.ele = ele
self.next = None
class SingleLinkList(object):
"""Single link list.
"""
def __init__(self, node=None):
self.__head = node
def is_empty(self):
"""Return True if single link list is empty, or False if not.
"""
return self.__head is None
def length(self):
"""Return the length of single link list.
"""
cur = self.__head
count = 0
while cur != None:
count += 1
cur = cur.next
return count
def travel(self):
"""Ergodic and print the single link list."""
cur = self.__head
while cur != None:
print(cur.ele, end=' ')
cur = cur.next
def add(self, item):
"""Add the item to the start of single link list.
"""
node = Node(item)
node.next = self.__head
self.__head = node
def append(self, item):
"""Append the item to the end of single link list.
"""
node = Node(item)
if self.is_empty():
self.__head = node
else:
cur = self.__head
while cur.next != None:
cur = cur.next
cur.next = node
def insert(self, pos, item):
"""Insert the item to the appointed position of single link list.
"""
if pos < 0:
self.add(item)
elif pos > self.length()-1:
self.append(item)
else:
prior = self.__head
count = 0
while count < pos - 1:
count += 1
prior = prior.next
node = Node(item)
node.next = prior.next
prior.next = node
def remove(self, item):
"""Remove the first item in single link list."""
prior = self.__head
while prior != None:
if prior.ele == item: # the element of first node is item
self.__head = prior.next
break
if prior.next.ele == item:
prior.next = prior.next.next
break
else:
prior = prior.next
if prior.next is None: # item is not in the sll
break
def search(self, item):
"""Return True if item is in the single link list, or False if not.
"""
cur = self.__head
while cur != None:
if cur.ele == item:
return True
else:
cur = cur.next
return False
if __name__ == "__main__":
sll = SingleLinkList()
print('is sll empty?', sll.is_empty())
print('length of sll: ', sll.length())
sll.append(1)
print('is sll empty?', sll.is_empty())
print('length of sll: ', sll.length())
sll.append(2)
sll.append(3)
sll.append(4)
sll.append(5)
sll.append(6)
sll.add(10)
sll.insert(-2, 100)
sll.insert(5, 200)
sll.insert(10, 300)
sll.travel()
print('')
sll.remove(6)
sll.travel()
print('')
sll.remove(5)
sll.travel()
print('')
sll.remove(4)
sll.travel()
print('')
sll.remove(3)
sll.travel()
print('')
sll.remove(8)
sll.travel()
print('')
sll.remove(10)
sll.travel()
print('')
sll.remove(2)
sll.travel()
print('')
sll.remove(1)
sll.travel()
print('')
sll.remove(300)
sll.travel()
print('')
sll.remove(200)
sll.travel()
print('')
sll.remove(100)
sll.travel()
print('')
print('is sll empty?', sll.is_empty())
print('length of sll: ', sll.length()) |
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 23 12:34:42 2021
@author: mtran
"""
import numpy as np
import glob
import os
class GetDataset(object):
def get_dataset(foldername, patients):
'''
Get the histopathology data
Returns:
Data: Array
Array of patches location
Label: Array
Array of labels (0, 1) that correspond to data
'''
X = []
for p in patients:
X = X + glob.glob(foldername + p + "/*/*.png")
y = [GetDataset.get_label(x) for x in X]
return X, y
def get_label(filepath):
'''
Get the image label (0 or 1)
Only called by get_data
'''
filename = os.path.basename(filepath)
patch_name = filename.split(sep = ".")[0]
class_label = patch_name.split(sep = "_")[4]
label = int(class_label[-1])
return label
|
# -*- coding: utf-8 -*-
"""
Created on Tue May 29 14:41:46 2018
@author: Administrator
"""
import matplotlib.pyplot as plt
import numpy as np
from sklearn.neighbors import NearestNeighbors
from random import sample
from numpy.random import uniform
from math import isnan
x_test=[]
y_test = []
for i in range(2):
print(i)
file = file = open("../database/image_small_train/00000%d_0"%i,"r")
for line in file.readlines():
line =line.strip().split('\t')
y_test.append(line[0])
x_test.append([np.float(raw_11) for raw_11 in line[1].split(',')])
# print (line.strip().split('\t'))
file.close()
x_test = np.array(x_test)
x_train=[]
y_train = []
for i in range(6):
print(i)
file = open("../database/54w_256_features/00000%d_0"%i,"r")
for line in file.readlines():
line =line.strip().split('\t')
y_train.append(line[0])
x_train.append([np.float(raw_11) for raw_11 in line[1].split(',')])
# print (line.strip().split('\t'))
file.close()
x_train = np.array(x_train)
x_train = np.append(x_train,x_test,axis=0)
from sklearn.cross_validation import train_test_split
##x为数据集的feature熟悉,y为label.
#x_train, x_test, y_train, y_test = train_test_split(x_train, y_train, test_size = 0.3)
#from sklearn.cluster import DBSCAN
#y_pred = DBSCAN(eps = 0.001, min_samples = 50).fit_predict(heights)
from sklearn.mixture import GMM
GMM_para = GMM(n_components=40,n_iter=200,min_covar = 1e-12).fit(x_train)
y_pred = GMM_para.predict(x_test)
num = {}
print()
for i in y_pred:
if i in num.keys():
num[i] +=1
else:
num[i] = 1
print(num,len(num),set(y_pred))
plt.scatter(x_test[:, 110],x_test[:, 220],c=y_pred[:])
plt.show()
test_calculate = {}
for i in range(len(y_pred)):
if y_test[i] in test_calculate.keys():
test_calculate[y_test[i]].append(y_pred[i])
else:
test_calculate[y_test[i]] = [y_pred[i]]
file = open('./result.txt',"w")
for k,v in test_calculate.items():
file.write(k+ ":")
[file.write(" " + str(v_)) for v_ in v]
file.write("\n")
file.close()
from sklearn.manifold import TSNE
tsne = TSNE(n_components=2)
x_train, x_test, y_train, y_test = train_test_split(x_test, y_pred, test_size = 0.1)
X_embedded = tsne.fit_transform(x_test) #进行数据降维
plt.scatter(X_embedded[:,0],X_embedded[:, 2],c=y_test)
plt.show()
#plt.hist(heights[1], 100, normed=1, facecolor='g', alpha=0.75)
#plt.title('Heights Of feature')
#plt.show()
#from sklearn.cluster import KMeans
#from sklearn import metrics
#print(np.array([heights]).shape)
#kmeans_model = KMeans(n_clusters=256, random_state=128).fit(np.array([heights]).reshape(-1,1))
#labels = kmeans_model.labels_
#print(metrics.calinski_harabaz_score(np.array([heights]).reshape(-1,1), labels) )
#print(metrics.silhouette_score(np.array([heights]).reshape(-1,1), labels, metric='euclidean'))
|
import json
from django.shortcuts import render
from django.http import HttpResponse, HttpResponseBadRequest
from .models import Tweet
# Create your views here.
def create(request):
if len(request.POST.get('text')) > 140:
return HttpResponseBadRequest("Fail")
Tweet.objects.create(writer=request.user, text=request.POST.get('text'))
return HttpResponse("OK")
def list_tweets(request):
tweets = [
{'writer': writer, 'text': text}
for writer, text in Tweet.objects.all().values_list('writer__username', 'text')
]
#tweets = list(Tweet.objects.all().values('writer__username', 'text'))
#for i in tweets:
# i['writer'] = i.pop('writer__username')
return HttpResponse(json.dumps(tweets))
|
"""docstring for module"""
import re
class EnglishSentences():
"""docstring for EnglishSentence."""
def __init__(self, en_string):
self.line = en_string
self.connectives_dict = {}
def add_connective(self, en_connective, fr_connective):
"""Adds dictionary entry as corresponding en/fr connectives"""
self.connectives_dict[en_connective] = fr_connective
def get_line(self):
"""Returns the text of the current line"""
return self.line
def get_connectives(self):
"""Returns connectives_dict"""
return self.connectives_dict
def get_connectives_lst(self):
"""Returns list of connectives"""
return re.findall('\*\*([\s?A-Za-z]*)\*\*', self.get_line())
class FrenchSentences():
"""docstring for FrenchSentences"""
def __init__(self, fr_string):
self.line = fr_string
def get_line(self):
"""Returns the text of the current line"""
return self.line
|
# -*- coding:utf-8 -*-
import sys
sys.path.append('..')
import lib.Utils as U
import public.getcase
import os
class analyzelog:
def __init__(self,all_result_path):
self.all_result_path = all_result_path
@U.log_flie_function()
def __log_file(self,all_result_path,extension_name):
return public.getcase.get_all_case(all_result_path,extension_name).values()
def analyze(self,log_file):
errorId = 0
go_on_id = 0
log_filter_name = os.path.split(log_file)[1].split('.')[0]
with open(self.all_result_path + '\\log\\{}filter.log'.format(log_filter_name),'w') as s:
with open(log_file) as f:
for line in f:
if 'Exception' in line:
go_on_id =1
s.write('#' + '-' * 40 + '/n')
s.write(line)
errorId = line.split('(')[1].split(')')[0].strip()
elif go_on_id ==1:
if errorId in line:
s.write(line)
else:
go_on_id = 0
def main(self):
for logfile in self.__log_file(self.all_result_path,'.log'):
self.analyze(logfile) |
import pymongo
from tqdm import tqdm
import copy
import os
import pickle
import re
import jieba
import pandas as pd
import numpy as np
def getData(db, save_dir, restore=True, save=True):
dataset = []
if os.path.exists(save_dir) and restore:
dataset = load_pkl_data(save_dir)
else:
print(f'{save_dir} does not exist')
with tqdm(total=db.eval('db.stats()').get('objects')) as pbar:
for curCollection in db.collection_names():
pbar.set_description(f'current collection: {curCollection}')
for item in db[curCollection].find():
if '癫痫' in item.get('illnessType', 'None'):
dataset.append(item)
pbar.update(1)
if save:
save_pkl_data(dataset, save_dir)
return dataset
def delRepetition(dataset=None, save_dir=None, restore=True, save=True):
'''
根据问题文本是否重复,对数据集进行去重处理
return illnessSet_unique, repetitionList
'''
if os.path.exists(save_dir) and restore:
dataset_unique = load_pkl_data(save_dir)
else:
questionUnique = {} # 使用 python 的 字典结构 加速检索过程
dataset_unique = []
repetitionNum = 0
with tqdm(total=len(dataset)) as pbar:
pbar.set_description('数据去重')
for index, item in enumerate(dataset):
if questionUnique.get(item['Q'], 0) == 0:
questionUnique.setdefault(item['Q'], 1)
dataset_unique.append(item)
else:
repetitionNum += 1
pbar.update(1)
print(f'有 {repetitionNum} 个样本重复。剩下 {len(dataset_unique)} 个样本')
if save:
save_pkl_data(dataset_unique, save_dir)
return dataset_unique
def text_clear(dataset=None, save_dir=None, restore=True, save=True):
'''
文本处理:
(1) 删除文本中的乱码,无用的标点符号,多余的空格 只保留规定的字符
(2) 常见的英文标点符号 替换为 对应的中文标点符号
(3) 将文本中的 大写英文字母 转 小写英文字母
(4) 删除答案文本中的模板文本
'''
if os.path.exists(save_dir) and restore:
dataset = load_pkl_data(save_dir)
else:
dataset0 = copy.deepcopy(dataset)
keepChar = u'[^a-zA-Z\u4e00-\u9fa5,.:;''""?|!\^%()-\[\]{}/\`~,。:;‘’”“?|!……%()—【】{}·~~、]'
usualChinaPunc = [',', '。', ':', ';', '?', '!', '(', ')', '【', '】']
usualEngPunc = [',', '.', ':', ';', '?', '!', '(', ')', '[', ']']
with tqdm(total=len(dataset)) as pbar:
pbar.set_description('文本处理')
for index, item in enumerate(dataset):
# (1)
item['Q'] = re.sub(f'{keepChar}', '', item['Q'])
item['Q_detailed'] = re.sub(f'{keepChar}', '', item['Q_detailed'])
item['A1'] = re.sub(f'{keepChar}', '', item['A1'])
# (2)
for curEngIndex, curEngPunc in enumerate(usualEngPunc):
item['Q'] = re.sub(f'[{curEngPunc}]', f'{usualChinaPunc[curEngIndex]}', item['Q'])
item['Q_detailed'] = re.sub(f'[{curEngPunc}]', f'{usualChinaPunc[curEngIndex]}', item['Q_detailed'])
item['A1'] = re.sub(f'[{curEngPunc}]', f'{usualChinaPunc[curEngIndex]}', item['A1'])
# (3)
item['Q'] = item['Q'].lower()
item['Q_detailed'] = item['Q_detailed'].lower()
item['A1'] = item['A1'].lower()
# (4)
item['A1'] = re.sub('病情分析:|指导意见:|医生建议:', '', item['A1'])
pbar.update(1)
# 检查多少样本被处理
subNum = 0
subSample = []
for index, item in enumerate(dataset0):
if item['Q'] != dataset[index]['Q']:
subNum += 1
subSample.append({'old': item, 'new': dataset[index]})
continue
elif item['Q_detailed'] != dataset[index]['Q_detailed']:
subNum += 1
subSample.append({'old': item, 'new': dataset[index]})
continue
elif item['A1'] != dataset[index]['A1']:
subNum += 1
subSample.append({'old': item, 'new': dataset[index]})
continue
else:
continue
print(f'在“文本处理”的过程中,样本被处理的数量: {subNum}({subNum / len(dataset):.2%})')
if save:
save_pkl_data(dataset, save_dir)
return dataset
def filterQ(dataset, save_dir, restore=True, save=True):
'''
筛选 数据集 中的问题,剔除不满足条件的数据
筛选条件:
问题文本中含有关键词
问题文本不超过 2 句。
return dataset_filter
'''
if os.path.exists(save_dir) and restore:
dataset_filter = load_pkl_data(save_dir)
else:
dataset_filter = []
# 选择 问题过滤 的 关键词
filterKeyWords = ['吗', '什么', '怎么', '哪些', '呢', '怎么办', '如何', '是不是', '为什么',
'怎样', '请问', '怎么样', '多少', '怎么回事', '哪里', '好不好', '有没有',
'可不可以', '几年', '几天', '哪个', '多久', '是否', '有用吗']
with tqdm(total=len(dataset)) as pbar:
pbar.set_description('问题筛选')
for index, item in enumerate(dataset):
curQ = item['Q']
curQ_cut = list(jieba.cut(curQ))
QSentenceNum = re.split('[,。?!……]', curQ)[:-1]
if len(QSentenceNum) < 3:
for curFilterWord in filterKeyWords:
if curFilterWord in curQ_cut:
dataset_filter.append(item)
break
pbar.update(1)
subNum = len(dataset_filter)
print(f'在“问句筛选”的过程中,剩余样本个数: {subNum}({subNum / len(dataset):.2%})')
if save:
save_pkl_data(dataset_filter, save_dir)
return dataset_filter
def statisticChar(dataset):
'''
统计 “Q”,“Q_detailed”和“A” 的中文字个数
return len_Q, len_Q_detailed, len_A
'''
len_Q = []
len_Q_detailed = []
len_A = []
with tqdm(total=len(dataset)) as pbar:
for index, item in enumerate(dataset):
curQ = item['Q']
len_Q.append(len(curQ))
curQ_detailed = item['Q_detailed']
len_Q_detailed.append(len(curQ_detailed))
curA = item['A1']
len_A.append(len(curA))
pbar.update(1)
len_Q = pd.DataFrame(len_Q)
len_Q_detailed = pd.DataFrame(len_Q_detailed)
len_A = pd.DataFrame(len_A)
print('=' * 20)
print('“Q_”的字量统计情况如下:')
print(len_Q.describe(percentiles=[0.5, 0.8, 0.9, 0.99, 0.995]))
print('=' * 20)
print('=' * 20)
print('“A”的字量统计情况如下:')
print(len_A.describe(percentiles=[0.5, 0.8, 0.9, 0.95, 0.97]))
print('=' * 20)
return len_Q, len_Q_detailed, len_A
def delToLongSample(dataset, save_dir, maxLen=300, restore=True, save=True):
'''
根据 maxLen , 丢弃文本过长的样本
'''
dataset_delToLong = []
if os.path.exists(save_dir) and restore:
dataset_delToLong = load_pkl_data(save_dir)
else:
with tqdm(total=len(dataset)) as pbar:
for index, item in enumerate(dataset):
curQ_Len = len(item['Q'])
curA_Len = len(item['A1'])
if curQ_Len <= maxLen:
if curA_Len <= maxLen:
dataset_delToLong.append(item)
pbar.update(1)
print(f'经过“文本长度处理”后,剩余样本个数:{len(dataset_delToLong)}({len(dataset_delToLong) / len(dataset):.2%})')
if save:
save_pkl_data(dataset_delToLong, save_dir)
return dataset_delToLong
def final_check(dataset, save_dir, restore=True, save=True):
'''
检查 dataset 中各个字段是否存在 空值,如果有剔除
'''
goodDataset = []
if os.path.exists(save_dir) and restore:
goodDataset = load_pkl_data(save_dir)
else:
print('最终测试:')
with tqdm(total=len(dataset)) as pbar:
pbar.set_description('空值检测')
for index, item in enumerate(dataset):
if (item['Q'] == '') or (item['Q'] == []):
pbar.update(1)
continue
elif (item['A1'] == '') or (item['A1'] == []):
pbar.update(1)
continue
else:
goodDataset.append(item)
pbar.update(1)
print(f'删除含有空值的样本个数:{len(dataset) - len(goodDataset)},剩余样本个数:{len(goodDataset)}({len(goodDataset) / len(dataset):.2%})')
goodDataset = delRepetition(dataset=goodDataset, save_dir='', restore=False, save=False)
if save:
save_pkl_data(goodDataset, save_dir)
return goodDataset
def writeToDB(dataset, new_db):
with tqdm(total=len(dataset)) as pbar:
for index, item in enumerate(dataset):
new_db['data'].insert(
{
'illness_type': item['illnessType'],
'q': item['Q'],
'a': item['A1'],
}
)
pbar.update(1)
def save_pkl_data(data, filename):
data_pkl = pickle.dumps(data)
print(f'save pkl: {filename}')
with open(filename, 'wb') as fp:
fp.write(data_pkl)
def load_pkl_data(filename):
print(f'load pkl: {filename}')
with open(filename, 'rb') as fp:
data_pkl = fp.read()
return pickle.loads(data_pkl)
def gen_question_answer_csv(db, save_dir, max_num):
max_num = int(max_num)
questions = []
answers = []
# gen question csv
with tqdm(total=max_num) as pbar:
for index, item in enumerate(db['data'].find()):
if index == max_num:
break
q_content = item['q']
a_content = item['a']
qid = index + 1
aid = index + 1
questions.append([f'{qid}', q_content])
answers.append([f'{aid}', f'{qid}', a_content])
pbar.update(1)
questions_csv = pd.DataFrame(questions, columns=['question_id', 'content'])
answers_csv = pd.DataFrame(answers, columns=['ans_id', 'question_id', 'content'])
# save csv
questions_csv.to_csv(save_dir+'/question.csv', index=0)
answers_csv.to_csv(save_dir+'/answer.csv', index=0)
class Seed():
def __init__(self, init_seed):
self.seed = init_seed
def __call__(self):
self.seed += 1
return self.seed - 1
def gen_candidates(max_num, config):
seed = Seed(config.seed)
np.random.seed(seed())
random_indexes = np.random.permutation(np.arange(1, max_num+1, dtype=np.int32))
train_indexes = random_indexes[:int(max_num*(1 - config.dev_rate - config.test_rate))]
dev_indexes = random_indexes[int(max_num*(1 - config.dev_rate - config.test_rate)): int(max_num*(1 - config.test_rate))]
test_indexes = random_indexes[int(max_num*(1 - config.test_rate)):]
def get_neg_index(pos_index, max_num, pair_num):
while True:
np.random.seed(seed())
neg_index = np.random.randint(1, max_num + 1, (pair_num)).tolist()
if pos_index not in neg_index:
break
return neg_index
# gen train candidates
train_candidates = []
with tqdm(total=len(train_indexes)) as pbar:
pbar.set_description('gen train candidates')
for index in train_indexes:
question_id = index
pos_ans_id = index
neg_index = get_neg_index(pos_ans_id, max_num, pair_num=config.train_pair_num)
train_candidates.extend(
[[f'{question_id}', f'{pos_ans_id}', f'{neg_index[i]}'] for i in range(config.train_pair_num)]
)
pbar.update(1)
print(f'save {config.data_dir}/train_candidates.txt')
with open(f'{config.data_dir}/train_candidates.txt', 'w', encoding='utf-8') as fp:
fp.write('question_id,pos_ans_id,neg_ans_id\n')
for item in train_candidates:
fp.write(','.join(item)+'\n')
# gen dev candidates
dev_candidates = []
with tqdm(total=len(dev_indexes)) as pbar:
pbar.set_description('gen dev candidates')
for index in dev_indexes:
question_id = index
ans_id = index
cnt = 0
label = 1
dev_candidates.append([f'{question_id}', f'{ans_id}', f'{cnt}', f'{label}'])
neg_index = get_neg_index(ans_id, max_num, pair_num=config.dev_pair_num-1)
dev_candidates.extend(
[
[f'{question_id}', f'{neg_index[i]}', f'{i+1}', '0']
for i in range(config.dev_pair_num-1)
]
)
pbar.update(1)
print(f'save {config.data_dir}/dev_candidates.txt')
with open(f'{config.data_dir}/dev_candidates.txt', 'w', encoding='utf-8') as fp:
fp.write('question_id,ans_id,cnt,label\n')
for item in dev_candidates:
fp.write(','.join(item)+'\n')
# gen test candidates
test_candidates = []
with tqdm(total=len(test_indexes)) as pbar:
pbar.set_description('gen test candidates')
for index in test_indexes:
question_id = index
ans_id = index
cnt = 0
label = 1
test_candidates.append([f'{question_id}', f'{ans_id}', f'{cnt}', f'{label}'])
neg_index = get_neg_index(ans_id, max_num, pair_num=config.test_pair_num - 1)
test_candidates.extend(
[
[f'{question_id}', f'{neg_index[i]}', f'{i + 1}', '0']
for i in range(config.test_pair_num - 1)
]
)
pbar.update(1)
print(f'save {config.data_dir}/test_candidates.txt')
with open(f'{config.data_dir}/test_candidates.txt', 'w', encoding='utf-8') as fp:
fp.write('question_id,ans_id,cnt,label\n')
for item in test_candidates:
fp.write(','.join(item)+'\n')
class Config():
def __init__(self):
self.client = pymongo.MongoClient("mongodb://10.23.11.253:27017")
self.db = self.client['db_familyDoctor_QA_V2']
self.data_dir = os.path.abspath('.') + '/data'
if not os.path.exists(self.data_dir):
os.makedirs(self.data_dir)
self.new_db = self.client['cmcqa_epilepsy']
self.seed = 1
self.dev_rate = 0.1
self.test_rate = 0.1
self.train_pair_num = 113
self.dev_pair_num = 113
self.test_pair_num = 113
|
from functools import wraps
def check_bluetooth(f):
"""
The decorator which checks for an active Bluetooth connection.
"""
@wraps(f)
def wrapper(*args, **kwargs):
if args[0].zei:
try:
f(*args,**kwargs)
except:
args[0].the_connection_was_lost()
else:
args[0].display_message_box('showerror', 'No Connection',
'You need to have an active Bluetooth connection first.')
return wrapper |
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 20 00:17:56 2017
@author: Michael
"""
# TURN OFF AUTHORIZATION OF YOUR FIREBASE PROJECT BEFORE RUNNING THE CODE.
# THE TWO PACKAGE NAMES ARE INCLUDED BELOW
# ALSO THIS WAS DONE IN PYTHON 2.7
from faker import Factory # pip install Faker
from random import randint
from firebase import firebase # pip install python-firebase
from datetime import timedelta
import os
import datetime
faker = Factory.create()
carMake = ["Nissan", "Chevy", "Honda", "Cadillac", "Ferrari",
"Ford", "Lexus", "Hyundai", "Dodge", "Mitsubishi"]
carModel = ["Maxima", "Cobalt", "Civic", "ATS", "Enzo",
"F-150", "IS-350", "Accent", "Dart", "Lancer"]
spotTypelist = ["student", "staff", "private", "handicapped"]
index = 0
vehiclesNum = setsNum = 0
userURL = projectName = firebaseDirectory = header = vehicleString = []
lot = spotID = vacancy = spotType = occupant = []
userEmail = userID = firstName = lastName = phone = permitType = expDate = purchaseDate = []
make = model = color = licensePlate = []
authorized = False
projectName = raw_input("Firebase project = example for https://example.firebaseio.com\n"
"Please enter the name of your Firebase project: ")
userURL = "https://"
userURL = userURL + projectName
userURL = userURL + ".firebaseio.com"
setsNum = input("Please enter how many sets of data you want to create: ")
firebase = firebase.FirebaseApplication(userURL, None)
for i in range(setsNum):
# Spots
lot = faker.state_abbr() # Lot
firebaseDirectory = "/Spots/"
firebaseDirectory = firebaseDirectory + lot
spotID = randint(1, 250) # Spot ID
vacancy = faker.boolean() # Vacant
occupant = faker.email()
if vacancy != True:
authorized = faker.boolean() # Authorized
occupant = faker.email() # User Email
else:
authorized = False
occupant = 'none'
index = randint(0,3)
spotType = spotTypelist[index]
firebase.put(firebaseDirectory, spotID, params={'print': 'silent'},
data={'vacancy': vacancy,
'type': spotType,
'occupant': occupant,
'authorized': authorized})
# UserAccounts
userID = faker.ean8() # User ID
firebaseDirectory = "/UserAccounts/"
userEmail = faker.email() # User Email
firstName = faker.first_name() # First Name
lastName = faker.last_name() # Last Name
phone = faker.phone_number() # Phone Number
# permitInfo
index = randint(0,3)
permitType = spotTypelist[index] # Permit Type
# Purchase Date
purchaseDate = datetime.datetime.date(faker.date_time_between(start_date="-1y", end_date="now", tzinfo=None))
# Expiration Date (exactly one year added for permits)
expDate = purchaseDate + timedelta(days=365)
firebase.put(firebaseDirectory, userID, params={'print': 'silent'},
data={'userEmail': userEmail,
'firstName': firstName,
'lastName': lastName,
'phone': phone,
'permit': {
'type': permitType,
'expDate': expDate,
'purchaseDate': purchaseDate}})
firebaseDirectory = firebaseDirectory + userID + "/vehicles"
vehiclesNum = randint(1,4)
for j in range(vehiclesNum, 0, -1):
# vehicleInfo
vehicleString = str(j)
header = "vehicle"
header = header + vehicleString
index = randint(0,9)
make = carMake[index] # Vehicle Make
index = randint(0,9)
model = carModel[index] # Vehicle Model
color = faker.safe_color_name() # Vehicle Color
licensePlate = faker.ean8() # License Plate
firebase.put(firebaseDirectory, header, params={'print': 'silent'},
data={'make': make,
'model': model,
'color': color,
'licensePlate': licensePlate})
os._exit(0)
|
"""Common local disk utils.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import errno
import logging
import os
import re
import six
from treadmill import exc
from treadmill import fs
from treadmill import lvm
from treadmill import subproc
from treadmill import sysinfo
from treadmill import utils
_LOGGER = logging.getLogger(__name__)
#: Name of the Treadmill LVM volume group
TREADMILL_VG = 'treadmill'
#: Name of the Treadmill loopback image file
TREADMILL_IMG = 'treadmill.img'
#: Minimum size for the Treadmill volume group. If we can't use this much, the
#: server node start will fail
TREADMILL_MIN_VG_SIZE = utils.size_to_bytes('100M')
#: Minimum free disk space to leave for the OS
TREADMILL_MIN_RESERVE_SIZE = utils.size_to_bytes('100M')
#: Number of loop devices to initialize
TREADMILL_LOOPDEV_NB = 8
def refresh_vg_status(group):
"""Query LVM for the current volume group status.
"""
vg_info = lvm.vgdisplay(group=group)
status = {
'name': vg_info['name'],
'extent_size': utils.size_to_bytes(
'{kb}k'.format(kb=vg_info['extent_size'])
),
'extent_free': vg_info['extent_free'],
'extent_nb': vg_info['extent_nb'],
}
return status
def init_vg(group, block_dev):
"""Initialize volume group.
:param group:
Name of the LVM Volume Group.
:type group:
``str``
:param block_dev:
LVM Physical Volume device backing the Volume Group
:type block_dev:
``str``
"""
# Can we see the Volume Group now that we have the block device? If
# so, we are done.
try:
lvm.vgactivate(group)
return
except subproc.CalledProcessError:
# The Volume group doesn't exist, more work to do
pass
# Create Physical Volume backend
lvm.pvcreate(device=block_dev)
# Create a Volume Group using the above Physical Volume
lvm.vgcreate(group, device=block_dev)
# Activate this Volume Group
lvm.vgactivate(group)
def init_loopback_devices(loopdevice_numbers):
"""Create and initialize loopback devices."""
for i in six.moves.range(0, loopdevice_numbers):
if not os.path.exists('/dev/loop%s' % i):
subproc.check_call(['mknod', '-m660', '/dev/loop%s' % i, 'b',
'7', str(i)])
subproc.check_call(['chown', 'root.disk', '/dev/loop%s' % i])
def loop_dev_for(filename):
"""Lookup the loop device associated with a given filename.
:param filename:
Name of the file
:type filename:
``str``
:returns:
Name of the loop device or None if not found
:raises:
subproc.CalledProcessError if the file doesn't exist
"""
filename = os.path.realpath(filename)
loop_dev = subproc.check_output(
[
'losetup',
'-j',
filename
]
)
loop_dev = loop_dev.strip()
match = re.match(
r'^(?P<loop_dev>[^:]+):.*\({fname}\)'.format(fname=filename),
loop_dev
)
if match is not None:
loop_dev = match.groupdict()['loop_dev']
else:
loop_dev = None
return loop_dev
def create_image(img_name, img_location, img_size):
"""Create a sparse file of the appropriate size.
"""
fs.mkdir_safe(img_location)
filename = os.path.join(img_location, img_name)
retries = 10
while retries > 0:
retries -= 1
try:
stats = os.stat(filename)
os.unlink(filename)
_LOGGER.info('Disk image found and unlinked: %r; stat: %r',
filename, stats)
except OSError as err:
if err.errno == errno.ENOENT:
pass
else:
raise
available_size = sysinfo.disk_usage(img_location)
img_size_bytes = utils.size_to_bytes(img_size)
if img_size_bytes <= 0:
real_img_size = available_size.free - abs(img_size_bytes)
else:
real_img_size = img_size_bytes
if (real_img_size < TREADMILL_MIN_VG_SIZE or
available_size.free <
real_img_size + TREADMILL_MIN_RESERVE_SIZE):
raise exc.NodeSetupError('Not enough free disk space')
if fs.create_excl(filename, real_img_size):
break
if retries == 0:
raise exc.NodeSetupError('Something is messing with '
'disk image creation')
def init_block_dev(img_name, img_location, img_size='-2G'):
"""Initialize a block_dev suitable to back the Treadmill Volume Group.
The physical volume size will be auto-size based on the available capacity
minus the reserved size.
:param img_name:
Name of the file which is going to back the new volume group.
:type img_name:
``str``
:param img_location:
Path name to the file which is going to back the new volume group.
:type img_location:
``str``
:param img_size:
Size of the image or reserved amount of free filesystem space
to leave to the OS if negative, in bytes or using a literal
qualifier (e.g. "2G").
:type size:
``int`` or ``str``
"""
filename = os.path.join(img_location, img_name)
# Initialize the OS loopback devices (needed to back the Treadmill
# volume group by a file)
init_loopback_devices(TREADMILL_LOOPDEV_NB)
try:
loop_dev = loop_dev_for(filename)
except subproc.CalledProcessError:
# The file doesn't exist.
loop_dev = None
# Assign a loop device (if not already assigned)
if not loop_dev:
# Create image
if not os.path.isfile(filename):
create_image(img_name, img_location, img_size)
# Create the loop device
subproc.check_call(
[
'losetup',
'-f',
filename
]
)
loop_dev = loop_dev_for(filename)
if not loop_dev:
raise exc.NodeSetupError('Unable to find /dev/loop device')
_LOGGER.info('Using %r as backing for the physical volume group', loop_dev)
return loop_dev
def setup_device_lvm(block_dev, vg_name=TREADMILL_VG):
"""Setup the LVM Volume Group based on block device"""
activated = activate_vg(vg_name)
if not activated:
_LOGGER.info('Initializing Volume Group')
init_vg(vg_name, block_dev)
return activated
def setup_image_lvm(img_name, img_location, img_size,
vg_name=TREADMILL_VG):
"""Setup the LVM Volume Group based on image file"""
activated = activate_vg(vg_name)
if not activated:
_LOGGER.info('Initializing Volume Group')
block_dev = init_block_dev(
img_name,
img_location,
img_size
)
init_vg(vg_name, block_dev)
return activated
def cleanup_device_lvm(block_dev, vg_name=TREADMILL_VG):
"""Clean up lvm env"""
_LOGGER.info('Destroying Volume Group')
lvm.vgremove(vg_name)
lvm.pvremove(block_dev)
def cleanup_image_lvm(img_name, img_location, vg_name=TREADMILL_VG):
"""Clean up lvm env"""
_LOGGER.info('Destroying Volume Group')
img_file = os.path.join(img_location, img_name)
lvm.vgremove(vg_name)
loop_device = loop_dev_for(img_file)
if loop_device is not None:
lvm.pvremove(loop_device)
loopdetach(loop_device)
if os.path.isfile(img_file):
os.unlink(img_file)
def activate_vg(vg_name):
"""Try activating vg"""
try:
lvm.vgactivate(group=vg_name)
return True
except subproc.CalledProcessError:
return False
def loopdetach(device):
"""Detach specified loop device"""
return subproc.check_call(
[
'losetup',
'-d',
device,
]
)
|
class Animal():
def __init__(self, name, age, habitat, heath_level = 0, hapiness_level = 0):
self.name = name
self.age = age
self.heath_level = heath_level
self.hapiness_level = hapiness_level
self.habitat = habitat
def display_info(self):
return f'Hola me llamo {self.name}, nivel de salud {self.heath_level} manzanas y nivel de felicidad {self.hapiness_level} corazones, vivo en {self.habitat} '
def eating(self):
self.heath_level += 10
self.hapiness_level += 10
return self
|
#!/usr/bin/python
from lcs import LCS
a = [3, 1, 2, 4, 9, 5, 10, 6, 8, 7]
b = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
c = [4, 7, 2, 3, 10, 6, 9, 1, 5, 8]
d = [3, 1, 2, 4, 9, 5, 10, 6, 8, 7]
e = [2, 10, 1, 3, 8, 4, 9, 5, 7, 6]
print LCS(a, b, lambda x, y: x == y)
print LCS(a, c, lambda x, y: x == y)
print LCS(a, d, lambda x, y: x == y)
print LCS(a, e, lambda x, y: x == y)
str1 = """
die einkommen der landwirte
sind fuer die abgeordneten ein buch mit sieben siegeln
um dem abzuhelfen
muessen dringend alle subventionsgesetze verbessert werden
"""
str2 = """
die steuern auf vermoegen und einkommen
sollten nach meinung der abgeordneten
nachdruecklich erhoben werden
dazu muessen die kontrollbefugnisse der finanzbehoerden
dringend verbessert werden
"""
print LCS(str1.split(), str2.split(), lambda s1, s2: s1 == s2)
point1 = [{'x':0,'y':0},{'x':1,'y':1},{'x':2,'y':2},{'x':3,'y':3},{'x':4,'y':4}]
point2 = [{'x':3,'y':3},{'x':2,'y':2},{'x':6,'y':3},{'x':4,'y':4}]
print LCS(point1, point2, lambda a, b: a['x'] == b['x'] and a['y'] == b['y'])
|
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']
).get_hosts('all')
def test_installed_packages(host):
lsof = host.package("lsof")
assert lsof.is_installed
git = host.package("git")
assert git.is_installed
telnet = host.package("telnet")
assert telnet.is_installed
ntp = host.package("ntp")
assert ntp.is_installed
iftop = host.package("iftop")
assert iftop.is_installed
unzip = host.package("unzip")
assert unzip.is_installed
net_tools = host.package("net-tools")
assert net_tools.is_installed
assert host.package("curl").is_installed
|
from django.db import models
from django.contrib.auth.models import User
class MusicLibrary(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
playlists = models.ManyToManyField('Playlist', blank=True)
def __str__(self):
return self.user.username
class Playlist(models.Model):
name = models.CharField(max_length=50)
songs = models.ManyToManyField('Song', blank=True)
def __str__(self):
return self.name
class Song(models.Model):
name = models.CharField(max_length=50)
file_name = models.CharField(max_length=100, null=True, blank=True)
music_url = models.URLField()
def __str__(self):
return self.name |
from keras import backend as K
from segmentation_models.losses import jaccard_loss
from segmentation_models.metrics import iou_score
def iou_metric_binary(t, p):
p = K.cast(p > 0.5, K.floatx())
return iou_score(t, p)
def iou_metric_forced_binary(t, p):
p = K.cast(K.argmax(p, axis=-1) > 0, K.floatx())
t = K.cast(K.argmax(t, axis=-1) > 0, K.floatx())
return iou_score(t, p)
def iou_metric_all(t, p):
return iou_score(t, K.one_hot(K.argmax(p, axis=-1), 5))
def iou_metric_fronts(t, p):
return iou_score(t[..., 1:], K.one_hot(K.argmax(p, axis=-1), 5)[..., 1:])
def iou_metric_hot(t, p):
return iou_score(t[..., 1:2], K.one_hot(K.argmax(p), 5)[..., 1:2])
def iou_metric_cold(t, p):
return iou_score(t[..., 2:3], K.one_hot(K.argmax(p), 5)[..., 2:3])
def iou_metric_stationary(t, p):
return iou_score(t[..., 3:4], K.one_hot(K.argmax(p), 5)[..., 3:4])
def iou_metric_occlusion(t, p):
return iou_score(t[..., 4:5], K.one_hot(K.argmax(p), 5)[..., 4:5])
def weighted_categorical_crossentropy(weights):
weights = K.variable(weights)
def wcce(y_true, y_pred):
# clip to prevent NaN's and Inf's
y_pred = K.clip(y_pred, K.epsilon(), 1 - K.epsilon())
loss = y_true * K.log(y_pred) * weights
loss = -K.sum(loss, -1)
return loss
return wcce
def mixed_loss_gen(class_weights):
wcce = weighted_categorical_crossentropy(class_weights)
def mixed_loss(y_true, y_pred):
return wcce(y_true, y_pred) * 5 + jaccard_loss(y_true, y_pred)
return mixed_loss |
'''this time it comes to
different kind of movies'''
import os.path
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
STA_DIR = os.path.join(os.path.dirname(__file__), "static")
from tornado.options import define, options
define("port", default=8888, help="run on the given port", type=int)
class IndexHandler(tornado.web.RequestHandler):
'''the IndexHandler
of my movielist'''
def get(self):
__movie_name = self.get_argument('film', 'tmnt')
print __movie_name
__movie_path = os.path.join(os.path.dirname(__file__), "static",
__movie_name)
__movie_info = os.listdir(__movie_path)
with open(os.path.join(os.path.dirname(__file__), "static",
__movie_name, "info.txt"), 'r') as file_:
__info = file_.readlines()
__mtitle = [__info[0].strip(), __info[1].strip()]
__treview = [__info[2].strip(), __info[3].strip()]
if int(__treview[0]) >= 60:
__reicon = 'Fresh'
else:
__reicon = 'Rotten'
with open(os.path.join(os.path.dirname(__file__), "static",
__movie_name,
"generaloverview.txt"), 'r') as file_:
__go_views = file_.readlines()
__go_view = [x.split(':') for x in __go_views]
__per_review1 = []
__per_review2 = []
__review_list = [x for x in __movie_info if x.find('review') >= 0]
length = len(__review_list)
count = 1
for item in __review_list:
with open(os.path.join(os.path.dirname(__file__), "static",
__movie_name, item), 'r') as file_:
__review = file_.readlines()
if count <= (length/2):
__per_review1.append([__review[0].strip('\n'),
__review[1].strip('\n'),
__review[2].strip('\n'),
__review[3].strip('\n')])
else:
__per_review2.append([__review[0].strip('\n'),
__review[1].strip('\n'),
__review[2].strip('\n'),
__review[3].strip('\n')])
count = count+1
print len(__per_review1), len(__per_review2)
print __per_review1, __per_review2
self.render('movie.html',
go_view=__go_view,
mtitle=__mtitle,
treview=__treview,
reicon=__reicon,
per_review1=__per_review1,
per_review2=__per_review2,
film=__movie_name,
rv_length=length)
if __name__ == '__main__':
tornado.options.parse_command_line()
APP = tornado.web.Application(
handlers=[(r'/', IndexHandler)],
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
)
HTTP_SERVER = tornado.httpserver.HTTPServer(APP)
HTTP_SERVER.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
|
import spidev
import sys
import RPi.GPIO as GPIO
import time
from threading import Event, Thread, Timer
from influxdb import InfluxDBClient
from multiprocessing import Queue
import datetime
import fcntl
import subprocess
import sys
import random
from dateutil import parser
from dbSetting import *
from struct import *
spi_conf = spidev.SpiDev()
spi_conf.open(0, 0)
spi_conf.max_speed_hz = 6250000
spi_conf.mode = 0b00
GPIO.setmode(GPIO.BCM)
previousToF = 0
previousOffset = 0
LastFPGATime = 0
offset = 0
Nextoffset = 0
##============FPGA Functions================##
def writePWM(pwm):
pwm = pwm.to_bytes(2, byteorder='big')
pwm_high = pwm[1]
pwm_low = pwm[0]
spi_conf.xfer([10,pwm_low])
spi_conf.xfer([11,pwm_high])
def writePWMDivider(div):
div = div.to_bytes(2, byteorder='big')
div_high = div[1]
div_low = div[0]
spi_conf.xfer([12,div_low])
spi_conf.xfer([13,div_high])
def SequenceDigit():
#Enable Manual Control Nixie
spi_conf.xfer([0,0x08])
#Clear Nixie DP
spi_conf.xfer([8,0x08])
spi_conf.xfer([9,0x08])
for x in range(20):
for digitno in range(10):
data = []
for x in range(4):
data.append( digitno*16 + digitno)
pass
spi_conf.xfer([4,data[0]])
spi_conf.xfer([5,data[1]])
spi_conf.xfer([6,data[2]])
spi_conf.xfer([7,data[3]])
time.sleep(0.05)
#Disable Manual Control Nixie
spi_conf.xfer([14,0x08])
Timer(3600 - int(time.localtime().tm_sec) + int(time.localtime().tm_min) * 60, SequenceDigit, ()).start()
pass
def randomDigit():
#Enable Manual Control Nixie
spi_conf.xfer([0,0x08])
#Clear Nixie DP
spi_conf.xfer([8,0x08])
spi_conf.xfer([9,0x08])
for x in range(200):
data = []
for x in range(4):
data.append( random.randint(0,9)*16 + random.randint(0,9))
pass
spi_conf.xfer([4,data[0]])
spi_conf.xfer([5,data[1]])
spi_conf.xfer([6,data[2]])
spi_conf.xfer([7,data[3]])
time.sleep(0.02)
#Disable Manual Control Nixie
spi_conf.xfer([14,0x08])
Timer(3600 - int(time.localtime().tm_sec) + int(time.localtime().tm_min) * 60, randomDigit, ()).start()
pass
def writeConf(conf):
spi_conf.xfer([0,conf])
def writeTime(h,m,s):
h = h % 24
m = m % 60
s = s % 60
spi_conf.xfer([0x01,s])
spi_conf.xfer([0x02,m])
spi_conf.xfer([0x03,h])
def readTOF():
data = spi_conf.xfer([0x01+128,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00])
TDC_TIME1 = data[16]*65536 + data[15]*256 + data[14]
TDC_TIME2 = data[13]*65536 + data[12]*256 + data[11]
TDC_CLOCK_COUNT1 = data[10]*65536 + data[9]*256 + data[8]
TDC_CALIBRATION1 = data[7]*65536 + data[6]*256 + data[5]
TDC_CALIBRATION2 = data[4]*65536 + data[3]*256 + data[2]
calCount = (TDC_CALIBRATION2 - TDC_CALIBRATION1) / 9
normLSB = 1/10000000/calCount
#output us
print(TDC_TIME1,TDC_TIME2,TDC_CLOCK_COUNT1,calCount)
return (TDC_TIME1-TDC_TIME2)/calCount+TDC_CLOCK_COUNT1
def readFiFo():
fifo_count = spi_conf.xfer([128+2,0x00,0x00,0x00])
return fifo_count[3]*256 + fifo_count[2]
def readTime():
fifo_count = spi_conf.xfer([128+4,0x00,0x00,0x00,0x00])
return fifo_count[4]*256*256 + fifo_count[3]*256 + fifo_count[2]
def readPPSCourseCounter():
data = spi_conf.xfer([0x00+128,0x00,0x00,0x00,0x00,0x00])
return data[5]*16777216 + data[4]*65536 + data[3]*256 + data[2]
def readFiFoOverflow():
fifo_count = spi_conf.xfer([128+5,0x00,0x00])
print(fifo_count)
return fifo_count[2]
##============FPGA Functions END=============##
##============Upload To InfluxDB================##
dataQueue = Queue(300)
class uploadDataThread (Thread):
def __init__(self, ifuser, ifpass, ifdb, ifhost, queue):
Thread.__init__(self)
self.ifuser = ifuser
self.ifpass = ifpass
self.ifdb = ifdb
self.ifhost = ifhost
self.ifport = ifport
self.queue = queue
def run(self):
print("[Upload Thread] Starting")
self.ifclient = InfluxDBClient(ifhost,ifport,ifuser,ifpass,ifdb,timeout=2,retries=3)
while 1:
val = self.queue.get()
try:
self.ifclient.write_points(val)
except Exception as e:
print(e)
uploadDataThread(ifuser, ifpass, ifdb, ifhost, dataQueue).start()
##============Upload To InfluxDB END=============##
#Setup - Close LED
spi_conf.xfer([0,0x02])
#Setup - Align GPS PPS
spi_conf.xfer([0,0x01])
time.sleep(3)
spi_conf.xfer([14,0x01])
#Setup - Time
timeNow = datetime.datetime.utcnow() + datetime.timedelta(hours=-7) + datetime.timedelta(seconds=1)
writeTime(timeNow.hour,timeNow.minute,timeNow.second)
##============Polling FPGA Function=============##
def readData(dataQueue):
global previousToF
global LastFPGATime
global previousOffset
global offset
while 1:
try:
FPGATime = readTime()
if(FPGATime == LastFPGATime):
return
time.sleep(0.3)
FPGATime = readTime()
print(FPGATime)
ToF = readTOF() - 5000
PPSCycle = readPPSCourseCounter()
PPSDuration = (PPSCycle - ToF + previousToF)
fifo = readFiFo()
fifo_overflow_flag = readFiFoOverflow()
print("%f,%d,%f,%d,%d" % (ToF,PPSCycle,PPSDuration,fifo,fifo_overflow_flag))
body2 = [
{
"measurement": "FPGA",
"time": datetime.datetime.utcnow(),
"fields": {
"GPS": PPSCycle,
"Clock": ToF,
"PPS Duration": PPSDuration,
"FIFO Count": fifo,
"FIFO Overrun": fifo_overflow_flag,
"Adjusted Clock": PPSDuration+offset-previousOffset,
"Adjusted ToF": ToF+offset
}
}
]
print(body2)
previousOffset = offset
previousToF = ToF
dataQueue.put(body2)
LastFPGATime = FPGATime
return
except IOError:
pass
except Exception as e:
print(e)
pass
def call_repeatedly(interval, func, *args):
stopped = Event()
print("[call_repeatedly] Starting")
def loop():
while not stopped.wait(interval - time.time() % interval): # the first call is in `interval` secs
func(*args)
Thread(target=loop).start()
return stopped.set
time.sleep(1-time.time()%1)
cancel_future_calls = call_repeatedly(0.2, readData, dataQueue)
readData(dataQueue)
##============Polling FPGA Function END==========##
##============Timed Nixie Refresh Animate=============##
#xx:30 min -> Random Digit Animate
#xx:00 min -> Seq Digit Animate
next_switch = 3600 - int(time.localtime().tm_sec) - (int(time.localtime().tm_min)*60)
seq_digit = Timer(next_switch, SequenceDigit, ())
if time.localtime().tm_min>30:
next_switch_random = (90 - int(time.localtime().tm_min)) * 60 - int(time.localtime().tm_sec)
else:
next_switch_random = (30 - int(time.localtime().tm_min)) * 60 - int(time.localtime().tm_sec)
randomDigit_t = Timer(next_switch_random, randomDigit, ())
randomDigit_t.start()
seq_digit.start()
##============Timed Nixie Refresh Animate END=============##
##Parsing UBX Package
def Checksum(data):
a = 0x00
b = 0x00
for byte in data:
i = byte
a += i
b += a
a &= 0xff
b &= 0xff
return b*256 + a
flag_UBX = False
try:
while 1:
data = sys.stdin.buffer.read(1)
if data == b'\xB5':
flag_UBX = True
SYNC = sys.stdin.buffer.read(1)
if SYNC != b'\x62':
continue
CLASS = sys.stdin.buffer.read(1)
ID = sys.stdin.buffer.read(1)
#print("[GPS Parse]",SYNC,CLASS,ID)
LENGTH = sys.stdin.buffer.read(2)
(length,) = unpack('H', LENGTH)
#print(SYNC,CLASS,ID,LENGTH)
PAYLOAD = sys.stdin.buffer.read(length)
CHECKSUM = sys.stdin.buffer.read(2)
(msgCksum,) = unpack('H',CHECKSUM)
#print ('{:02x}'.format(msgCksum))
DATA = CLASS+ID+LENGTH+PAYLOAD
print (''.join(format(x, '02x') for x in DATA))
if CLASS == b'\x0D' and ID == b'\x01': #TIM_TP
try:
(towMS,towSubMS,qErr,week,flags,refInfo) = unpack('IIiHBB', PAYLOAD)
print (''.join(format(x, '02x') for x in PAYLOAD))
trueCksum = Checksum(DATA)
print(towMS,towSubMS,qErr,week,flags,refInfo)
if trueCksum != msgCksum:
raise Exception(
"Calculated checksum 0x{:02x} does not match 0x{:02x}."
.format(trueCksum,msgCksum)
)
qErrStr = str(time.time()) + "," + str(qErr)
qErrStr = qErrStr.encode()
print(qErrStr)
body = [
{
"measurement": "GPS",
"time": datetime.datetime.utcnow(),
"fields": {
"Next Clock Offset": qErr
}
}
]
print(body)
dataQueue.put(body)
offset = Nextoffset
Nextoffset = float(qErr)/100000
except Exception as e:
print(e)
print(CLASS+ID+PAYLOAD)
body = [
{
"measurement": "GPS",
"time": datetime.datetime.utcnow(),
"fields": {
"Next Clock Offset": 0
}
}
]
print(body)
dataQueue.put(body)
offset = 0
pass
except KeyboardInterrupt:
sys.exit() |
import re, json
def validateActionRequest(request):
req = request.form.copy().to_dict()
supportedLanguages = ("python", "docker")
if not req:
return (False, {"error": "Not a multipart-form"})
try:
if req['type'] != "action":
return (False, {"error": "Wrong message type"})
if req['name'] == "":
return (False, {"error": "Action name needed"})
if not req['language'].lower() in supportedLanguages:
return (False, {"error": "Language supported are: " +
str(supportedLanguages)})
if not str(req['cloud']) in ("0", "1", "2"):
return (False, {"error": "Cloud must have values 0, 1 or 2"})
req['in/out'] = json.loads(req['in/out'])
if (type(req['in/out']['in']) != list or
type(req['in/out']['out']) != list):
return (False, {"error": "the in/out field must contain the keys in \
and out with values lists"})
req['description']
int(req['timeout'])
except KeyError, e:
return (False, {"error": "Field '" + str(e) + "' not present"})
except ValueError, e:
return (False, {"error": "Timeout must be an integer, in [ms]"})
ret = {
'name': req['name'],
'description': req['description'],
'language': req['language'],
'cloud': str(req['cloud']),
'timeout': int(req['timeout']),
'in/out': req['in/out'],
}
try:
ret["contTag"] = req["containerTag"]
if ret["contTag"] not in ("base", "ffmpeg", "imageProc"):
return (False, {"error": "'contTag' must be base, ffmpeg or imageProc"})
except KeyError:
ret["contTag"] = "base"
if req['language'] == "python":
if 'file' not in request.files:
return (False, {"error": "No file field!!!"})
if request.files['file'].filename == '':
return (False, {"error": "no file selected"})
# elif req['language'] == "docker":
# try:
# ret["containerName"] = req["containerName"]
# except KeyError as e:
# return (False, {"error": "Field '" + str(e) + "' not present. Specify a valid container name"})
return (True, ret)
def validateNodeRequest(request):
req = request.json
pattern = re.compile('((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.|$)){4}')
supportedArch = ("arm", "x86")
if not req:
return (False, {"error": "Not a JSON"})
try:
if req.pop('type') != "node":
return (False, {"error": "Wrong message type"})
if req['name'] == "":
return (False, {"error": "Function name needed"})
if not pattern.match(req['ip']):
return (False, {"error": "Invalid IP"})
if not req['architecture'].lower() in supportedArch:
return (False, {"error": "Supported architecture are: " +
str(supportedArch)})
if not type(req['setup']) == bool:
return (False, {"error": "Setup must be a boolean"})
except KeyError, e:
return (False, {"error": "Field '" + str(e) + "' not present"})
if req['setup']:
try:
req['ssh_user']
req['ssh_password']
except KeyError, e:
return (False, {"error": "Field '" + str(e) + "' not present"})
return (True, req)
def cleanUpNode(req):
fields = ("name", "ip", "architecture", "role")
for k in req.keys():
if k not in fields:
del req[k]
return req
def validateSequence(request):
req = request.json
if not req:
return (False, {"error": "Not a JSON"})
try:
if req.pop('type') != "sequence":
return (False, {"error": "Wrong message type"})
if req['name'] == "":
return (False, {"error": "Sequence name needed"})
if (type(req['in/out']['in']) != list or
type(req['in/out']['out']) not in (list, dict)):
return (False, {"error": "the in/out field must contain the keys in \
and out with values lists"})
req['sequence']
req['description']
except KeyError, e:
return (False, {"error": "Field '" + str(e) + "' not present"})
return (True, req)
def cleanUpSeq(req):
fields = ("name", "description", "sequence", "in/out")
for k in req.keys():
if k not in fields:
del req[k]
return req
def validateInvoke(request):
req = request.json
actionClasses = {"small": 128,
"medium": 256,
"large": 512}
if not req:
return (False, {"error": "Not a JSON"})
try:
req["name"]
if type(req['param']) != dict:
return (False, {"error": "'param' must contain a formatted json"})
defClass = req['default']['actionClass']
if (defClass not in actionClasses):
return (False, {"error": "actionClass must be 'small', 'medium' or 'large'"})
except KeyError, e:
return (False, {"error": "Field '" + str(e) + "' not present"})
req['default']['memory'] = actionClasses[defClass]
try:
for e in req["except"]:
eClass = req["except"][e]['actionClass']
if (eClass not in actionClasses):
return (False, {"error": "actionClass must be 'small', 'medium' or 'large'"})
req["except"][e]['memory'] = actionClasses[eClass]
except KeyError, e:
req["except"] = {}
try:
req["optimise"] = req["optimise"].lower()
if req["optimise"] == "true":
req["optimise"] = True
elif req["optimise"] == "false":
req["optimise"] = False
else:
return (False, {"error": "'optimise' must be 'true' or 'false'"})
except:
req["optimise"] = True
return (True, req)
def validateAWS(request):
req = request.json
pattern = re.compile("arn:(aws|aws-us-gov):iam::\d{12}:role/?[a-zA-Z_0-9+=,.@\-_/]+")
if not req:
return (False, {"error": "Not a JSON"})
try:
req["accessKeyID"]
req["secretAccessID"]
if not pattern.match(req["ARN"]):
return (False, {"error": "ARN incorrect"})
except KeyError, e:
return (False, {"error": "Field '" + str(e) + "' not present"})
return (True, req)
|
#coding:utf-8
#!/usr/bin/env python
import random
from gclib.utility import is_same_day, currentTime
from gclib.json import json
from gclib.curl import curl
from game.utility.config import config
from game.routine.drop import drop
from cardgame.settings import ARENE_SERVER, SIGLE_SERVER
class tower:
@staticmethod
def make():
"""
制做
"""
return {'tiems':0, 'last_update':0, 'record':[], 'current':{}, 'floor_score':[], 'floor_point':[], 'max_floor':0, 'max_point':0, 'last_max_floor':0, 'last_max_point':0, 'ladder_position':0, 'ladder_rank_level':0}
@staticmethod
def make_data():
"""
制做当前数据
"""
return {'floor':0, 'point':0, 'energy':0, 'score':0, 'strength':0, 'intelligence':0, 'artifice':0}
@staticmethod
def getClientData(usr):
"""
得到client data
"""
now = currentTime()
tower.dayUpdate(usr, now)
gameConf = config.getConfig('game')
data = {}
data['tower_floor'] = -1
if usr.tower['current']:
data['tower_floor'] = usr.tower['current']['floor']
data['tower_point'] = usr.tower['current']['point']
data['tower_energy'] = usr.tower['current']['energy']
data['tower_strength'] = usr.tower['current']['strength']
data['tower_intelligence'] = usr.tower['current']['intelligence']
data['tower_artifice'] = usr.tower['current']['artifice']
else:
data['tower_point'] = 0
data['tower_energy'] = 0
data['tower_strength'] = 0
data['tower_intelligence'] = 0
data['tower_artifice'] = 0
if usr.tower.has_key('ladder_position'):
data['tower_ladder_position'] = usr.tower['ladder_position']
else:
data['tower_ladder_position'] = -1
if (not usr.tower.has_key('ladder_position') or usr.tower['ladder_position'] > gameConf['tower_no_markup_ladder_position']) and usr.tower['max_floor']:
towerMarkupConf = config.getConfig('tower_markup')
data['tower_markup'] = towerMarkupConf[usr.tower['max_floor']]
data['tower_max_point'] = usr.tower['max_point']
data['tower_max_floor'] = usr.tower['max_floor']
data['tower_last_max_floor'] = usr.tower['last_max_floor']
#data['tower_last_max_point'] = usr.tower['last_max_point']
data['tower_times'] = tower.times(usr, gameConf)
if usr.tower['current'].has_key('enhance'):
data['tower_enhance'] = usr.tower['current']['enhance']
else:
data['tower_enhance'] = []
return data
@staticmethod
def start(usr, markup):
"""
开始
"""
if usr.tower['current']:
return {'msg':'tower_not_finished'}
gameConf = config.getConfig('game')
if gameConf['tower_times'] - len(usr.tower['record']) <= 0:
return {'msg':'tower_max_times'}
usr.tower['current'] = tower.make_data()
markup = int(markup)
tower.do_markup(usr, markup)
usr.save()
data = {}
data['tower_point'] = usr.tower['current']['point']
data['tower_energy'] = usr.tower['current']['energy']
data['tower_strength'] = usr.tower['current']['strength']
data['tower_intelligence'] = usr.tower['current']['intelligence']
data['tower_artifice'] = usr.tower['current']['artifice']
data['tower_times'] = tower.times(usr, gameConf)
data['tower_floor'] = 0
return data
@staticmethod
def beat(usr, difficulty, star, dp, ehc):
"""
战胜
"""
if not usr.tower['current']:
return {'msg':'tower_not_start'}
if usr.tower['current'].has_key('enhance') and (ehc == -1):
return {'msg':'tower_enhance_required'}
gameConf = config.getConfig('game')
towerAwardConf = config.getConfig('tower_award')
towerMonster = config.getConfig('tower_monster')
usr.tower['current']['point'] = usr.tower['current']['point'] + star * difficulty
usr.tower['current']['energy'] = usr.tower['current']['energy'] + star * difficulty
usr.tower['current']['score'] = usr.tower['current']['score'] + star * difficulty
usr.tower['current']['floor'] = usr.tower['current']['floor'] + 1
if usr.tower['max_point'] < usr.tower['current']['point']:
usr.tower['max_point'] = usr.tower['current']['point']
while len(usr.tower['floor_score']) < usr.tower['current']['floor']:
usr.tower['floor_score'].append(0)
while len(usr.tower['floor_point']) < usr.tower['current']['floor']:
usr.tower['floor_point'].append(0)
newPoint = False
if usr.tower['floor_point'][usr.tower['current']['floor'] - 1] < usr.tower['current']['point']:
newPoint = True
newScore = False
if usr.tower['floor_score'][usr.tower['current']['floor'] - 1] < usr.tower['current']['score']:
newScore = True
print usr.tower['floor_score'][usr.tower['current']['floor'] - 1], usr.tower['current']['score']
data = {}
if ehc != -1:
if not usr.tower['current'].has_key('enhance'):
return {'msg':'tower_enhance_not_exsit'}
tower.do_enhance(usr, ehc, gameConf)
enhance = []
if usr.tower['current']['floor'] % gameConf['tower_enhance_interval_floor'] == 0:
enhance = tower.make_enhance_list()
usr.tower['current']['enhance'] = enhance
if usr.tower['current']['floor'] % gameConf['tower_award_interval_floor'] == 0:
towerAwardInfo = towerAwardConf[str(usr.tower['current']['floor'])]
if newPoint:
awd = {}
awd = drop.open(usr, towerAwardInfo[1], awd)
data = drop.makeData(awd, data,'top_drop')
usr.tower['floor_point'][usr.tower['current']['floor'] - 1] = usr.tower['current']['point']
if newScore:
if usr.tower['current']['score'] > 45:
awd = {}
awd = drop.open(usr, towerAwardInfo[3], awd)
data = drop.makeData(awd, data, 'record_drop')
if usr.tower['current']['score'] > 30:
awd = {}
awd = drop.open(usr, towerAwardInfo[2], awd)
data = drop.makeData(awd, data, 'record_drop')
usr.tower['floor_score'][usr.tower['current']['floor'] - 1] = usr.tower['current']['score']
usr.tower['current']['score'] = 0
if usr.tower['max_floor'] < usr.tower['current']['floor']:
usr.tower['max_floor'] = usr.tower['current']['floor']
if dp:
awd = {}
awd = drop.open(usr, towerMonster[usr.tower['current']['floor']]['dropid'], awd)
data = drop.makeData(awd, data, 'random_drop')
usr.save()
if enhance:
data['tower_enhance'] = enhance
data['tower_point'] = usr.tower['current']['point']
data['tower_energy'] = usr.tower['current']['energy']
data['tower_strength'] = usr.tower['current']['strength']
data['tower_intelligence'] = usr.tower['current']['intelligence']
data['tower_artifice'] = usr.tower['current']['artifice']
data['tower_floor'] = usr.tower['current']['floor']
data['tower_max_floor'] = usr.tower['max_floor']
data['tower_max_point'] = usr.tower['max_point']
return data
@staticmethod
def do_enhance(usr, ehc, gameConf):
"""
加强属性
"""
point = 0
if ehc == 0:
usr.tower['current']['strength'] = usr.tower['current']['strength'] + usr.tower['current']['enhance'][0]
point = usr.tower['current']['enhance'][0]
elif ehc == 1:
usr.tower['current']['intelligence'] = usr.tower['current']['intelligence'] + usr.tower['current']['enhance'][1]
point = usr.tower['current']['enhance'][1]
elif ehc == 2:
usr.tower['current']['artifice'] = usr.tower['current']['artifice'] + usr.tower['current']['enhance'][2]
point = usr.tower['current']['enhance'][2]
del usr.tower['current']['enhance']
for item in gameConf['tower_enhance_probability']:
if item['point'] == point:
usr.tower['current']['point'] = usr.tower['current']['point'] - point
@staticmethod
def do_markup(usr, mkp):
"""
加强属性
"""
if mkp == 0:
return
if usr.tower['max_floor'] == 0:
return
towerMarkupConf = config.getConfig('tower_markup')
markup = towerMarkupConf[usr.tower['max_floor']]
if mkp == 'strength':
usr.tower['current']['strength'] = usr.tower['current']['strength'] + usr.tower['last_max_floor']
if mkp == 'intelligence':
usr.tower['current']['intelligence'] = usr.tower['current']['intelligence'] + usr.tower['last_max_floor']
if mkp == 'artifice':
usr.tower['current']['artifice']= usr.tower['current']['artifice'] + usr.tower['last_max_floor']
@staticmethod
def fail(usr):
"""
爬塔失败
"""
if not usr.tower['current']:
return {'msg':'tower_not_start'}
res = tower.stand(usr)
if usr.tower['max_floor'] < usr.tower['current']['floor']:
usr.tower['max_floor'] = usr.tower['current']['floor']
usr.tower['record'].append(usr.tower['current'])
usr.tower['current'] = {}
usr.save()
if not res.has_key('msg'):
if usr.tower['ladder_position'] < res['position']:
usr.tower['ladder_position'] = res['position']
if usr.tower['ladder_rank_level'] < res['rank_level']:
usr.tower['ladder_rank_level'] = res['rank_level']
return {'tower_max_floor': usr.tower['max_floor'], 'tower_max_point':usr.tower['max_point'], 'tower_times': usr.tower['times']}
@staticmethod
def make_enhance_list():
"""
制做强化列表
"""
point = []
for i in range(3):
point.extend(random.sample([2, 10, 20], 1))
if 2 not in point:
idx = random.randint(0, 2)
point[idx] = 2
return point
@staticmethod
def current_floor(usr):
"""
当前层数
"""
if usr.tower['current']:
return usr.tower['current']['floor']
return 0
@staticmethod
def dayUpdate(usr, now):
"""
每日更新
"""
if not is_same_day(usr.tower['last_update'], now):
if usr.tower['current']:
usr.tower['record'].append(usr.tower['current'])
usr.tower['max_floor'] = 0
floor = 0
point = 0
for rd in usr.tower['record']:
if rd['floor'] > floor:
usr.tower['last_max_floor'] = rd['floor']
if rd['point'] > point:
usr.tower['last_max_point'] = rd['point']
usr.tower['current'] = {}
usr.tower['record'] = []
usr.tower['times'] = 0
usr.tower['max_floor'] = 0
usr.tower['last_update'] = now
usr.tower['floor_point'] = []
@staticmethod
def stand(usr):
"""
加入排行榜
"""
if SIGLE_SERVER:
from arenarank.routine.tower import tower as towerR
return towerR.stand(str(usr.roleid), usr.name, usr.level, usr.tower['current']['point'], tower.current_floor(usr))
else:
return json.loads(curl.url(ARENE_SERVER + '/arena/tower_stand/', None, {'roleid': usr.roleid, 'level': usr.level, 'point': usr.tower['current']['point'], 'name':usr.name, 'floor':tower.current_floor(usr)}))
@staticmethod
def show_ladder(usr):
"""
显示楼梯
"""
if SIGLE_SERVER:
from arenarank.routine.tower import tower as towerR
return {'tower_ladder':towerR.show_ladder()}
else:
return {'tower_ladder':json.loads(curl.url(ARENE_SERVER + '/arena/tower_show/', None, {'roleid': usr.roleid, 'level': usr.level}))}
@staticmethod
def times(usr, gameConf):
"""
剩余次数
"""
towerTimes = gameConf['tower_times'] - len(usr.tower['record'])
if usr.tower['current']:
towerTimes = towerTimes - 1
return towerTimes
|
from typing import List, Literal, Optional
from uuid import UUID
from pydantic import BaseModel, Field, validator
from pathlib import Path
from datetime import datetime, timedelta
import pandas as pd
import numpy as np
import json
import uuid
import logging
import os
import orjson
import typing
from fastapi import FastAPI, Request, HTTPException
from fastapi.responses import JSONResponse
from fastapi import Depends, status
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
from auth import User, Token, authenticate_user, fake_users_db, create_access_token, get_current_active_user, get_password_hash
from model import Model, get_model, n_features
app = FastAPI()
logger = logging.getLogger(__name__)
@app.get("/items/{item_id}")
def read_item(item_id: int, q: Optional[str] = None):
return {"item_id": item_id, "q": q}
class PredictRequest(BaseModel):
data: List[List[float]]
@validator("data")
def check_dimensionality(cls, v):
for point in v:
if len(point) != n_features:
raise ValueError(f"Each data point must contain {n_features} features")
return v
class PredictResponse(BaseModel):
data: List[float]
@app.post("/predict/", response_model=PredictResponse)
def predict(input: PredictRequest, model: Model = Depends(get_model), current_user: User = Depends(get_current_active_user)):
"""
Endpoint to predictions from ML model.
Parameters
----------
input : PredictRequest
PredictRequest according to class definition.
Returns
----------
prediction : PredictResponse
PredictResponse according to class definition
"""
try:
X = np.array(input.data)
y_pred = model.predict(X)
prediction = PredictResponse(data=y_pred.tolist())
except:
raise HTTPException(status_code=500, detail=f"Could not estimate with input: {input}")
return prediction
@app.post("/token", response_model=Token)
async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends()):
user = authenticate_user(fake_users_db, form_data.username, form_data.password)
if not user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect username or password",
headers={"WWW-Authenticate": "Bearer"},
)
access_token_expires = timedelta(minutes=30)
access_token = create_access_token(
data={"sub": user.username}, expires_delta=access_token_expires
)
return {"access_token": access_token, "token_type": "bearer"}
|
from django.shortcuts import render
from django.http import HttpResponse, JsonResponse
from django.views.decorators.csrf import csrf_exempt
import json
import os
from .config import JSON_FILE_URL, LOG_FILE_URL, STORAGE_DIR
from .util_functions import DoProcessing, FindContentForFile, FindContentForAll
@csrf_exempt
def home( request ):
''' Returns html page for home page '''
obj = DoProcessing( JSON_FILE_URL, LOG_FILE_URL, STORAGE_DIR )
return render( request, 'home.html' )
@csrf_exempt
def invokeCreationOfFiles( request ):
''' Returns html page for storing files and invokes the operation of creating files '''
obj = DoProcessing( JSON_FILE_URL, LOG_FILE_URL, STORAGE_DIR )
error = obj.createDirectoryAndFiles()
if error :
return render( request, 'error.html', status = 500 )
'''To get JSON response on Postman or Swagger, comment out the above line and uncomment the below line.'''
##return JsonResponse( {'error': 'error'}, status = 500 )
return render( request, 'invoke.html', status = 200 )
'''To get JSON response on Postman or Swagger, comment out the above line and uncomment the below line.'''
##return JsonResponse( {'Success': 'Success'}, status = 200 )
@csrf_exempt
def getListOfAllFiles( request ):
''' Returns html page for showing list of all the files present in storage '''
obj = FindContentForAll( STORAGE_DIR )
contentDictList = obj.getContentForAll()
context = {}
context[ 'rows' ] = contentDictList
if len( contentDictList ) == 0 :
return render( request, 'empty.html', status = 404 )
'''To get JSON response on Postman or Swagger, comment out the above line and uncomment the below line.'''
##return JsonResponse( context, status = 404 )
return render( request, 'listoffiles.html', context, status = 200 )
'''To get JSON response on Postman or Swagger, comment out the above line and uncomment the below line.'''
##return JsonResponse( context, status = 200)
@csrf_exempt
def getContentOfFile( request ):
''' Returns html page to show the content of a file '''
folderName = request.GET[ 'Folder Name' ]
fileName = request.GET[ 'File Name' ]
obj = FindContentForFile( STORAGE_DIR, folderName, fileName )
( error, message ) = obj.getFileContent()
message = message.replace( "\n", "<br>" )
messageDict = { 'message' : message }
if error :
return render( request, 'wrongname.html', status = 400 )
'''To get JSON response on Postman or Swagger, comment out the above line and uncomment the below line.'''
##return JsonResponse( messageDict, status = 400 )
return render( request, 'message.html', messageDict, status = 200 )
'''To get JSON response on Postman or Swagger, comment out the above line and uncomment the below line.'''
##return JsonResponse( messageDict, status = 200 )
|
from ..lib.Qt import QtCore, QtGui, QtWidgets
import playblast_ui as customUI
from . import playblast_utils
from maya import cmds
import pymel.core as pm
try:
from shiboken import wrapInstance
except:
from shiboken2 import wrapInstance
import maya.OpenMayaUI as omui
reload(playblast_utils)
reload(customUI)
__all__ = [
'launch',
]
def maya_main_window():
main_window_ptr = omui.MQtUtil.mainWindow()
return wrapInstance(long(main_window_ptr), QtWidgets.QWidget)
class ControlMainWindow(QtWidgets.QDialog):
version = "1.1"
def __init__(self, parent=None):
super(ControlMainWindow, self).__init__(parent)
self.playblaster = playblast_utils.Playblaster()
self.setWindowFlags(QtCore.Qt.Tool)
self.ui = customUI.Ui_playblast_dlg()
self.ui.setupUi(self)
# Setup GUI values
title = self.ui.title_lbl.text()
self.ui.title_lbl.setText("%s V%s" % (title, self.version))
self.ui.filename_le.setText(self.playblaster.filename)
self.ui.start_le.setText(str(self.playblaster.start))
self.ui.end_le.setText(str(self.playblaster.end))
self.ui.width_le.setText(str(self.playblaster.w))
self.ui.height_le.setText(str(self.playblaster.h))
self.ui.hud_chk.setChecked(self.playblaster.hud)
self.ui.frameHud_chk.setChecked(self.playblaster.hud_frame_chk)
self.ui.cstmHud_chk.setChecked(self.playblaster.custom_hud_chk)
self.ui.clearViewport_chk.setChecked(self.playblaster.clean_vp)
self.ui.green_chk.setChecked(self.playblaster.green)
self.ui.filename_le.setEnabled(self.playblaster.editname)
self.ui.overwrite_chk.setChecked(self.playblaster.overwrite)
self.ui.offscreen_chk.setChecked(self.playblaster.offscreen)
self.ui.cam_chk.setChecked(self.playblaster.hidecameragates)
self.set_button_color()
self.custom_hud_chk()
self.green_chk()
self.ui.filename_le.textChanged.connect(self.filename)
self.ui.editName_chk.clicked.connect(self.editname)
self.ui.overwrite_chk.clicked.connect(self.overwrite)
self.ui.offscreen_chk.clicked.connect(self.offscreen)
self.ui.width_le.textChanged.connect(self.width)
self.ui.height_le.textChanged.connect(self.height)
self.ui.start_le.textChanged.connect(self.start)
self.ui.end_le.textChanged.connect(self.end)
self.ui.hud_chk.clicked.connect(self.hud)
self.ui.frameHud_chk.clicked.connect(self.frameHud)
self.ui.cstmHud_chk.clicked.connect(self.custom_hud)
self.ui.cstmHud_le.textChanged.connect(self.custom_hud_text)
self.ui.clearViewport_chk.clicked.connect(self.clean_vp)
self.ui.green_chk.clicked.connect(self.green)
self.ui.playblast_btn.clicked.connect(self.playblast)
self.ui.hud_chk.clicked.connect(self.custom_hud_chk)
self.ui.green_chk.clicked.connect(self.green_chk)
self.ui.color_btn.clicked.connect(self.set_color)
self.ui.loadRender_btn.clicked.connect(self.load_render)
self.ui.reset_btn.clicked.connect(self.reset)
self.get_custom_hud_text()
def playblast(self):
cmds.optionVar(sv=("pbCustomHud", self.ui.cstmHud_le.text()))
self.playblaster.playblast()
def green_chk(self):
is_checked = self.ui.green_chk.checkState()
if not is_checked:
filename = self.playblaster.pb_filename()
self.ui.filename_le.setText(filename)
self.ui.cstmHud_chk.setDisabled(False)
self.ui.cstmHud_le.setDisabled(False)
self.ui.cam_chk.setDisabled(False)
self.ui.frameHud_chk.setDisabled(False)
self.ui.hud_chk.setDisabled(False)
self.ui.clearViewport_chk.setDisabled(False)
else:
filename = self.playblaster.pb_filename()
self.ui.filename_le.setText(filename)
self.ui.cstmHud_chk.setDisabled(True)
self.ui.cam_chk.setDisabled(True)
self.ui.cstmHud_le.setDisabled(True)
self.ui.hud_chk.setDisabled(True)
self.ui.frameHud_chk.setDisabled(True)
self.ui.clearViewport_chk.setDisabled(True)
def offscreen(self):
isChecked = self.ui.offscreen_chk.checkState()
if isChecked:
self.playblaster.offscreen = True
else:
self.playblaster.offscreen = False
def custom_hud_chk(self):
is_checked = self.ui.hud_chk.checkState()
if is_checked:
self.ui.cstmHud_chk.setEnabled(True)
self.ui.cstmHud_le.setEnabled(True)
else:
self.ui.cstmHud_chk.setDisabled(True)
self.ui.cstmHud_le.setDisabled(True)
def filename(self):
self.playblaster.filename = self.ui.filename_le.text()
def editname(self):
is_checked = self.ui.editName_chk.checkState()
if is_checked:
self.ui.filename_le.setDisabled(False)
else:
self.ui.filename_le.setDisabled(True)
def load_render(self):
w, h = self.playblaster.render_resolution()
self.ui.width_le.setText(w)
self.ui.height_le.setText(h)
def overwrite(self):
self.playblaster.overwrite = self.ui.overwrite_chk.checkState()
def height(self):
self.playblaster.h = int(self.ui.height_le.text())
def width(self):
self.playblaster.w = int(self.ui.width_le.text())
def start(self):
self.playblaster.start = int(self.ui.start_le.text())
def end(self):
self.playblaster.end = int(self.ui.end_le.text())
def hud(self):
self.playblaster.hud = self.ui.hud_chk.checkState()
def frameHud(self):
self.playblaster.hud_frame_chk = self.ui.frameHud_chk.checkState()
def custom_hud(self):
isChecked = self.ui.cstmHud_chk.checkState()
if isChecked:
self.playblaster.custom_hud_chk = True
else:
self.playblaster.custom_hud_chk = False
print self.playblaster.custom_hud_chk
def get_custom_hud_text(self):
if cmds.optionVar(exists="pbCustomHud"):
self.playblaster.custom_hud_text = cmds.optionVar(q="pbCustomHud")
self.ui.cstmHud_le.setText(self.playblaster.custom_hud_text)
def custom_hud_text(self):
self.playblaster.custom_hud_text = self.ui.cstmHud_le.text()
if self.ui.cstmHud_le.text():
self.ui.cstmHud_chk.setChecked(True)
else:
self.ui.cstmHud_chk.setChecked(False)
def clean_vp(self):
self.playblaster.clean_vp = self.ui.clearViewport_chk.checkState()
def green(self):
self.playblaster.green = self.ui.green_chk.checkState()
def set_button_color(self, color=None):
# This function sets the color on the color picker button
if not color:
color = self.playblaster.default_color
else:
self.playblaster.default_color = color
assert len(color) == 3, "You must provide a list of 3 colors"
# Qt expects it in integer values from 0 to 255
r, g, b = [c * 255 for c in color]
self.ui.color_btn.setStyleSheet('background-color: rgba(%s, %s, %s, 1.0);' % (r, g, b))
def set_color(self):
color = self.playblaster.default_color
# Then we provide this to the maya's color editor which gives us back the color the user specified
color = pm.colorEditor(rgbValue=color)
# Annoyingly, it gives us back a string instead of a list of numbers.
# So we split the string, and then convert it to floats
r, g, b, a = [float(c) for c in color.split()]
# We then use the r,g,b to set the colors on the light and the button
color = (r, g, b)
self.set_button_color(color)
def reset(self):
cmds.optionVar(remove="pbCustomHud")
launch()
#############################################################################################################
def launch():
global dialog
#if dialog is None:
# dialog = ControlMainWindow(parent=maya_main_window())
#dialog.show()
try:
if dialog:
delete()
except:
pass
dialog = ControlMainWindow(parent=maya_main_window())
dialog.show()
def delete():
global dialog
if dialog is None:
return
dialog.deleteLater()
dialog = None
|
import sys
from pythonlib import ConnectionManager
import pyinotify
import json
import requests
import os
from threading import Thread
def main():
name = sys.argv[1]
streamingServerIP = "http://192.168.1.8:2222/"
streamsOut = sys.argv[2:]
def createFolders():
post = {
"name": name,
"streamsOut": streamsOut
}
requests.post(streamingServerIP, json=post)
#######################
def streamVideo(path):
sys.stdout.write(
'video complete: {}\n'.format(path)
)
sys.stdout.flush()
cm = ConnectionManager("192.168.1.50")
param = {
"videoId": "",
"inConf": "",
"outConf": "-f mpegts -vf hue=s=0 ",
"namePrefix": ""}
res = cm.invoker.invoke("streamProcess", param, "small",
{"streamProcess": {"actionClass": "large"}}, filePath=path, paramID="videoId")
if res[0] != 200:
res = cm.invoker.invoke("streamProcess", param, "small",
{"streamProcess": {"actionClass": "large"}}, filePath=path, paramID="videoId")
print res
idsOut = json.loads(res[1])["fileIds"]
filename = os.path.basename(path)
for stream in idsOut:
addr = streamingServerIP + name + "/" + stream + "/" + filename
requests.post(addr, json={"id": idsOut[stream]})
if filename.split(".")[0].endswith("002"):
addr = streamingServerIP + name + "/" + stream
requests.get(addr)
class VideoComplete(pyinotify.ProcessEvent):
def process_IN_MOVED_TO(self, event):
Thread(target=streamVideo, args=(event.pathname,)).start()
def process_IN_CLOSE_WRITE(self, event):
Thread(target=streamVideo, args=(event.pathname,)).start()
createFolders()
wm = pyinotify.WatchManager()
notifier = pyinotify.Notifier(wm, VideoComplete())
mask = pyinotify.ALL_EVENTS
path = './files'
wm.add_watch(path, mask, rec=True, auto_add=True)
notifier.loop()
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
"""
2021/07/31
@author: renfong
"""
import tkinter as tk
import os
from tkinter import filedialog
import data_process as dp
#%% ========================================================
# define window size
root = tk.Tk()
root.geometry("600x300+200+200")
root.title("Renfong")
#%% ========================================================
# Create section 1
frame1 = tk.LabelFrame(root,text="Section 1", borderwidth=2)
frame1.pack(fill="x", padx=10)
# select a folder and save a figure in it
def GetFolder():
mainfolder = filedialog.askdirectory()
os.chdir(mainfolder)
FolderEntry.insert(0, mainfolder)
GetFolderBtn = tk.Button(frame1, text = "Select folder", command=GetFolder)
GetFolderBtn.grid(row=0, column=0, padx=15, pady=10)
FolderEntry = tk.Entry(frame1, width=75)
FolderEntry.grid(row=0, column=1, padx=5, pady=10, sticky='WE')
def btn1_response():
mainfolder = FolderEntry.get()
dp.btn1(mainfolder)
Btn1 = tk.Button(root, text = "Do Function 1", command=btn1_response)
Btn1.pack(fill="x", padx=20)
#%% ========================================================
# Create section 2
frame2 = tk.LabelFrame(root,text="Section 2", borderwidth=2)
frame2.pack(fill="x", padx=10)
# nth folder
tk.Label(frame2,text="Site : ").grid(pady=5, row=0, column=0)
SiteEntry = tk.Entry(frame2, width=10)
SiteEntry.grid(pady=5, row=0, column=1)
# files
tk.Label(frame2, text="Position : ").grid(pady=2, row=1, column=0)
PosEntry = tk.Entry(frame2, width=10)
PosEntry.grid(pady=2, row=1, column=1)
# ROI
tk.Label(frame2, text="start pixel : ").grid(pady=2, row=2, column=0)
SPEntry = tk.Entry(frame2, width=10)
SPEntry.grid(pady=2, row=2, column=1)
tk.Label(frame2, text="Range : ").grid(pady=2, row=2, column=2)
RNGEntry = tk.Entry(frame2, width=10)
RNGEntry.grid(pady=2, row=2, column=3)
# process
def btn2_response():
site = "SITE"+SiteEntry.get()
pos = PosEntry.get()
sp = SPEntry.get()
rng = RNGEntry.get()
dp.btn2(site, pos, sp, rng)
Btn2 = tk.Button(root, text= "Do Function 2", command=btn2_response)
Btn2.pack(fill="x", padx=20)
#%% ========================================================
# Create section 3
frame3 = tk.LabelFrame(root,text="Section 3", borderwidth=2)
frame3.pack(fill="both", padx=10, expand="yes")
def btn3_response():
dp.btn3(FolderEntry.get())
Btn2 = tk.Button(frame3, text= "Do Function 3", command=btn3_response)
Btn2.pack(fill="x", padx=10, pady=10)
#%% ========================================================
root.mainloop()
|
from tkinter import simpledialog, messagebox, Tk
import webbrowser
# Use this function to play a video from the internet
def play_video(url):
webbrowser.open(url)
# =================== DO NOT MODIFY THE CODE ABOVE ===========================
if __name__ == '__main__':
window = Tk()
window.withdraw()
cats = simpledialog.askinteger(title= '', prompt = 'How many cats do you have?')
if cats > 3:
messagebox.showinfo(title = '', message = 'You are a crazy cat lady')
elif cats < 3 and cats >0:
play_video('https://www.google.com/url?sa=i&url=https%3A%2F%2Fgiphy.com%2Fexplore%2Ftime-to-move&psig=AOvVaw1VUXbehLn9wW56eAszMskE&ust=1618255685148000&source=images&cd=vfe&ved=0CAIQjRxqFwoTCOCf96329u8CFQAAAAAdAAAAABAD')
elif cats == 0:
play_video('https://www.google.com/search?q=a+frog+sitting+on+a+bench+like+a+human&rlz=1CARJNJ_enUS842&sxsrf=ALeKk03xNKY-pgQuOWTOVvshKtS5ghIkbw:1618169383531&source=lnms&tbm=isch&sa=X&ved=2ahUKEwiLpqra9vbvAhWbJDQIHZNpBuoQ_AUoAXoECAEQAw&biw=1366&bih=617&safe=active&ssui=on#imgrc=d_HqJ4Jz_VDj9M')
# TODO 1) Make a new window variable, window = Tk()
# 2) Hide the window using the window's .withdraw() method
# 3) Ask the user how many cats they have
# 4) If they have 3 or more cats, tell them they are a crazy cat lady
# 5) If they have less than 3 cats AND more than 0 cats, call the
# play_video function below to show them a cat video
# 6) If they have 0 cats, show them a video of A Frog Sitting on a
# Bench Like a Human
pass
|
import operator as op
from dataclasses import dataclass
from typing import Callable, Optional, Union
import common.input_data as input_data
def get_sum_of_equations(equations: list[str], add_precedence: bool = False) -> int:
if add_precedence:
return sum(evaluate_equation_with_add_precedence(eq) for eq in equations)
return sum(evaluate_equation(eq) for eq in equations)
Value = Union[Callable,int]
@dataclass
class EquationTree:
value: Value
left: Optional['EquationTree']= None
right: Optional['EquationTree'] = None
def compute(self) -> int:
if self.value is op.add:
assert self.left is not None and self.right is not None
return self.left.compute() + self.right.compute()
if self.value is op.mul:
assert self.left is not None and self.right is not None
return self.left.compute() * self.right.compute()
assert isinstance(self.value, int)
return self.value
def get_rightmost_tree_with_empty_right(self) -> 'EquationTree':
if self.right is None:
return self
return self.right.get_rightmost_tree_with_empty_right()
def evaluate_equation_with_add_precedence(equation: str) -> int:
tree: Optional[EquationTree] = None
tree_stack: list[Optional[EquationTree]] = []
for char in equation:
if char in '0123456789':
value_tree = EquationTree(int(char))
if tree is not None:
tree.get_rightmost_tree_with_empty_right().right = value_tree
else:
tree = value_tree
if char == '+':
if tree is not None and tree.right is not None:
new_tree = EquationTree(op.add, tree.right)
tree.right = new_tree
else:
tree = EquationTree(op.add, tree)
if char == '*':
tree = EquationTree(op.mul, tree)
if char == '(':
tree_stack.append(tree)
tree = None
if char == ')':
assert tree is not None
value = tree.compute()
parent_tree = tree_stack.pop()
if parent_tree is not None:
parent_tree.get_rightmost_tree_with_empty_right().right = EquationTree(value)
tree = parent_tree
else:
tree = EquationTree(value)
assert tree is not None
return tree.compute()
def evaluate_equation(equation: str) -> int:
value = 1
op_func = op.mul
value_stack: list[tuple[int, Callable]] = []
for char in equation:
if char in '0123456789':
value = op_func(value, int(char))
elif char == '(':
value_stack.append((value, op_func))
value = 1
op_func = op.mul
elif char == ')':
last_val, last_func = value_stack.pop()
value = last_func(last_val, value)
elif char == '+':
op_func = op.add
elif char == '*':
op_func = op.mul
return value
def to_equation(data: str) -> str:
return data.replace(" ", "")
EQUATIONS = input_data.read("input/input18.txt", to_equation)
if __name__ == "__main__":
print(get_sum_of_equations(EQUATIONS))
print(get_sum_of_equations(EQUATIONS, True))
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import numpy as np
import pandas as pd
import sklearn.pipeline as pipeline
import sklearn.decomposition as decomposition
import sklearn.linear_model as linear_model
import sklearn.model_selection as model_selection
import sklearn.preprocessing as preprocessing
import os
def build_pipeline(portion):
return pipeline.Pipeline([('poly', preprocessing.PolynomialFeatures(degree=2)), ('pca', decomposition.PCA()), ('logisticregression', linear_model.LogisticRegression())])
def main():
df_train = pd.read_csv(os.getenv('PREPARED_TRAINING'))
df_valid = pd.read_csv(os.getenv('PREPARED_VALIDATING'))
feature_cols = list(df_train.columns[:-1])
target_col = df_train.columns[-1]
X_train = df_train[feature_cols].values
y_train = df_train[target_col].values
X_valid = df_valid[feature_cols].values
y_valid = df_valid[target_col].values
tsne_data = np.load(os.path.join(os.getenv('STORING'), 'tsne.npz'))
X_train_tsne = tsne_data['X_train']
X_valid_tsne = tsne_data['X_valid']
params = {
# 'featureunion__polynomialfeatures__degree': range(2, 4),
# 'featureunion__portionkernelpca__n_components': range(2, 102, 2),
# 'featureunion__portionkernelpca__degree': range(2, 4),
# 'featureunion__portionkernelpca__kernel': ['cosine', 'rbf'],
# 'featureunion__portionisomap__n_neighbors': range(1, 11),
# 'featureunion__portionisomap__n_components': range(2, 102, 2),
'pca__n_components': list(range(2, 202, 2)),
'pca__whiten': [True, False],
'logisticregression__C': [1e-4, 1e-3, 1e-2, 1e-1, 1e-0],
'logisticregression__penalty': ['l1', 'l2']
}
pipeline = build_pipeline(portion=0.1)
X_search = np.concatenate([
np.concatenate([X_train, X_train_tsne], axis=1),
np.concatenate([X_valid, X_valid_tsne], axis=1),
], axis=0)
y_search = np.concatenate([y_train, y_valid], axis=0)
train_indices = range(0, len(X_train))
valid_indices = range(len(X_train), len(X_train)+len(X_valid))
assert(len(train_indices) == len(X_train))
assert(len(valid_indices) == len(X_valid))
cv = [(train_indices, valid_indices)]
search = model_selection.RandomizedSearchCV(pipeline, params, cv=cv, n_iter=100, verbose=2)
search.fit(X_search, y_search)
print(search.best_score_)
print(search.best_params_)
if __name__ == '__main__':
main()
|
from selenium.common.exceptions import TimeoutException
from selenium.webdriver import ActionChains
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as ec
class BasePage(object):
def __init__(self, driver, base_url='https://web.whatsapp.com/'):
self.base_url = base_url
self.driver = driver
self.suffix_link = 'https://wa.me/'
self.driver.get(self.base_url)
def find_element(self, *locator):
return self.driver.find_element(*locator)
def open(self, number):
url = self.suffix_link + number
self.driver.get(url)
def get_title(self):
return self.driver.title
def get_url(self):
return self.driver.current_url
def hover(self, *locator):
element = self.find_element(*locator)
hover_element = ActionChains(self.driver).move_to_element(element)
hover_element.perform()
def wait_element(self, *locator):
try:
WebDriverWait(self.driver, 10).until(ec.presence_of_element_located(locator))
except TimeoutException:
print("\n * ELEMENT NOT FOUND WITHIN GIVEN TIME! --> %s" % (locator[1]))
self.driver.quit()
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-12-05 14:18
from __future__ import unicode_literals
from django.db import migrations, models
import user_input.models
class Migration(migrations.Migration):
dependencies = [
('user_input', '0040_auto_20171205_1123'),
]
operations = [
migrations.AddField(
model_name='dailyuserinputstrong',
name='temperature_feels_like',
field=models.CharField(blank=True, max_length=10, validators=[user_input.models.CharMinValueValidator(-20), user_input.models.CharMaxValueValidator(120)]),
),
migrations.AddField(
model_name='dailyuserinputstrong',
name='wind',
field=models.CharField(blank=True, max_length=10, validators=[user_input.models.CharMinValueValidator(0), user_input.models.CharMaxValueValidator(350)]),
),
]
|
import pandas as pd
import numpy as np
from logging import StreamHandler, DEBUG, Formatter, FileHandler, getLogger
from sklearn.linear_model import ElasticNet, Lasso, BayesianRidge, LassoLarsIC
from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor
from sklearn.kernel_ridge import KernelRidge
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import RobustScaler
from sklearn.base import BaseEstimator, TransformerMixin, RegressorMixin, clone
from sklearn.model_selection import KFold,cross_val_score, train_test_split
from sklearn.metrics import mean_squared_error, r2_score
import xgboost as xgb
import lightgbm as lgb
from load_data import read_csv
logger = getLogger(__name__)
DIR = 'result_tmp/'
SAMPLE_SUBMIT_FILE = '../input/sample_submission.csv'
# Define Cross validation evaluation
def rmsle_cv(model):
kf = KFold(n_folds, shuffle=True, random_state=42).get_n_splits(train.values)
rmse = np.sqrt(-cross_val_score(model, train.values, y_train, scoring='neg_mean_squared_error', cv=kf, n_jobs=-1))
return (rmse)
# Define evaluatino function
def rmsle(y, y_pred):
return np.sqrt(mean_squared_error(y, y_pred))
# Define Stacking model
class StackingAverageModels(BaseEstimator, RegressorMixin, TransformerMixin):
def __init__(self, base_models, meta_model, n_folds=5):
self.base_models = base_models
self.meta_model = meta_model
self.n_folds = n_folds
# we again fit the data on clones of the original models
def fit(self, X, y):
self.base_models_ = [list() for x in self.base_models]
self.meta_model_ = clone(self.meta_model)
kfold = KFold(n_splits=self.n_folds, shuffle=True, random_state=156)
# Train cloned base models then create out of fold predictions
# that are needed to train the cloned meta-model
out_of_fold_predictions = np.zeros((X.shape[0], len(self.base_models)))
for i, model in enumerate(self.base_models):
for train_index, holdout_index in kfold.split(X, y):
instance = clone(model)
self.base_models_[i].append(instance)
instance.fit(X[train_index], y[train_index])
y_pred = instance.predict(X[holdout_index])
out_of_fold_predictions[holdout_index, i] = y_pred
# now train the cloned metamodel using the out of fold predictions as new feature
self.meta_model_.fit(out_of_fold_predictions, y)
return self
# Do the predictions of all base models on the test data and use the averaged predictions as
# meta features for the final predictions which is done by the metamodel
def predict(self, X):
meta_features = np.column_stack([
np.column_stack([model.predict(X) for model in base_models]).mean(axis=1)
for base_models in self.base_models_])
return self.meta_model_.predict(meta_features)
if __name__ == '__main__':
log_fmt = Formatter('%(asctime)s %(name)s %(lineno)d [%(levelname)s][%(funcName)s] %(message)s')
handler = StreamHandler()
handler.setLevel('INFO')
handler.setFormatter(log_fmt)
logger.addHandler(handler)
handler = FileHandler(DIR + 'train.py.log', 'a')
handler.setLevel(DEBUG)
handler.setFormatter(log_fmt)
logger.setLevel(DEBUG)
logger.addHandler(handler)
logger.info('start')
train = read_csv(DIR + 'train_preprocess.csv')
test = read_csv(DIR + 'test_preprocess.csv')
logger.info('Load train data shape:{}'.format(train.shape))
y_train = train['SalePrice'].values
train.drop('SalePrice', axis=1, inplace=True)
test_id = test['Id']
test.drop('Id', axis=1, inplace=True)
logger.info('After loading')
logger.info('train:{}, target: {}'.format(train.shape, y_train.shape))
logger.info('test:{}, 1d: {}'.format(test.shape, test_id.shape))
# Cross validation strategy
n_folds = 5
# Lasso Regression
lasso = Lasso(alpha=0.0005, random_state=1)
#score = rmsle_cv(lasso)
#logger.info('Lasso score:{:.4f}({:.4f})'.format(score.mean(), score.std()))
# Kernel Ridge Regression
KRR = KernelRidge(alpha=0.6, kernel='polynomial', degree=2, coef0=2.5)
#score = rmsle_cv(KRR)
#logger.info('KernelRidge score:{:.4f}({:.4f})'.format(score.mean(), score.std()))
# Elastic Net regression
ENet = ElasticNet(alpha=0.0005, l1_ratio=.9,random_state=3)
#score = rmsle_cv(ENet)
#logger.info('ElasticNet score:{:.4f}({:.4f})'.format(score.mean(), score.std()))
# Gradient Boosting Regression
GBoost = GradientBoostingRegressor(n_estimators=3000, learning_rate=0.05, max_depth=4, max_features='sqrt', min_samples_leaf=15,
min_samples_split=10, loss='huber', random_state=5)
#score = rmsle_cv(GBoost)
#logger.info('Gradient Boosting score:{:.4f}({:.4f})'.format(score.mean(), score.std()))
# XGBoost
model_xgb = xgb.XGBRegressor(colsample_bytree=0.4603, gamma=0.0468, learning_rate=0.05, max_depth=3, min_child_weight=1.7817,
n_estimators=2200, reg_alpha=0.4640, reg_lambda=0.8571, subsample=0.5213, silent=1, random_state=7, nthread=-1)
#score = rmsle_cv(model_xgb)
#logger.info('XGBoost score:{:.4f}({:.4f})'.format(score.mean(), score.std()))
# LightGBM
model_lgb = lgb.LGBMRegressor(objective='regression', num_leaves=5, learning_rate=0.05, n_estimators=720, max_bin=55,
bagging_fraction=0.8, bagging_freq=5, feature_fraction=0.2319, feature_fraction_seed=9, bagging_seed=9,
min_data_in_leaf=6,min_sum_hessian_in_leaf=11)
#score = rmsle_cv(model_xgb)
#logger.info('LightGBM socre:{:.4f}({:.4f})'.format(score.mean(), score.std()))
# Stack 3models as base model
stacked_averaged_models = StackingAverageModels(base_models= (ENet, GBoost, KRR), meta_model=lasso)
#score = rmsle_cv(stacked_averaged_models)
#logger.info('Stacking Averaged models score: {:.4f} ({:.4f})'.format(score.mean(), score.std()))
# Stacked regressor
stacked_averaged_models.fit(train.values, y_train)
stacked_train_pred = stacked_averaged_models.predict(train.values)
stacked_pred = np.expm1(stacked_averaged_models.predict(test.values))
logger.info('Stacking regressor score: {}'.format(rmsle(y_train, stacked_train_pred)))
# XGBoost
model_xgb.fit(train, y_train)
xgb_train_pred = model_xgb.predict(train)
xgb_pred = np.expm1(model_xgb.predict(test))
logger.info('XGBoost regressor score: {}'.format(rmsle(y_train, xgb_train_pred)))
# LightGBM
model_lgb.fit(train, y_train)
lgb_train_pred = model_lgb.predict(train)
lgb_pred = np.expm1(model_lgb.predict(test.values))
logger.info('LightGBM regressor score: {}'.format(rmsle(y_train, lgb_train_pred)))
# Ensemble prediction
ensemble = stacked_pred * 0.7 + xgb_pred * 0.15 + lgb_pred * 0.15
# Submission
sub = pd.DataFrame()
sub['Id'] = test_id
sub['SalePrice'] = ensemble
sub.to_csv(DIR + 'submission.csv', index=False)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from sklearn import linear_model
from models.binary_classifier import BinaryClassifier
class LogisticRegression(BinaryClassifier, linear_model.LogisticRegression):
pass
|
import unittest
from liveproxy.argparser import ip_address
class TestArgparser(unittest.TestCase):
def test_can_handle_url(self):
should_match = [
'127.0.0.1',
'0.0.0.0',
]
for ip in should_match:
self.assertTrue(ip_address(ip))
should_not_match = [
'abc',
'123abc',
]
for ip in should_not_match:
with self.assertRaises(ValueError):
ip_address(ip)
|
from kernel.database import BaseHelper, DB
class User(DB.Model):
__tablename__ = 'users'
id = DB.Column(DB.Integer(), nullable=False)
name = DB.Column(DB.String(64), nullable=False, default='')
class UserHelper(BaseHelper):
__model__ = User
id = None
name = None
|
import pytest
from izigraph import Link, Node
def test_link_can_be_created():
node1 = Node('node_1')
node2 = Node('node_2')
l = Link(node1, node2, weight=10)
assert isinstance(l, Link)
def test_link_raises_value_error_when_created_with_not_valid_node():
with pytest.raises(ValueError):
Link(source=None, destination=None)
Link(source='node_1', destination='node_2')
Link(source=123, destination=456)
Link(source=True, destination=False)
def test_link_raises_value_error_when_created_with_unvalid_weight():
with pytest.raises(ValueError):
node1 = Node('node_1')
node2 = Node('node_2')
Link(node1, node2, weight='asd')
Link(node1, node2, weight=None)
Link(node1, node2, weight=True)
def test_link_can_be_created_with_default_weight():
node1 = Node('node_1')
node2 = Node('node_2')
l = Link(node1, node2)
assert isinstance(l, Link)
def test_link_can_return_node():
node1 = Node('node_1')
node2 = Node('node_2')
l = Link(node1, node2)
assert isinstance(l.source(), Node)
assert isinstance(l.destination(), Node)
assert l.source().label() == node1.label()
assert l.destination().label() == node2.label()
def test_link_can_return_weight():
node1 = Node('node_1')
node2 = Node('node_2')
weight = 12345
l = Link(node1, node2, weight=weight)
assert l.weight() == weight
|
#!flask/bin/python
from flask import Flask
app = Flask(__name__)
@app.route('/operacoesint/')
@app.route('/operacoesint/<int:num1>/<int:num2>')
def escrevanatelaoperacoesint(num1=1,num2=2):
soma = 1
soma = soma + 1
# Outra forma
soma += 1
subtracao = 1
subtracao = subtracao - 1
# Outra forma
subtracao -= 1
return "<h1>Números num1: {}, num2: {}</h1> <br/>" \
"<h1>Soma {}</h1> <br/> " \
"<h1>Subtracao {}</h1> <br/> " \
"<br/>".format(num1,num2,soma, subtracao)
@app.route('/operacoesf/')
@app.route('/operacoesf/<float:num1>/<float:num2>')
def escrevanatelaoperacoesf(num1=7.8,num2=8.3):
soma = num1 + num2
subtracao = num1 - num2
multiplicacao = num1 * num2
divisao = num1 / num2
modulo = num1 % num2
return "<h1>Números num1: {}, num2: {}</h1> <br/>" \
"<h1>Soma {}</h1> <br/> " \
"<h1>Subtracao {}</h1> <br/> " \
"<h1>Multiplicacao {}</h1> <br/> " \
"<h1>Divisao {}</h1> <br/>" \
"<h1>Modulo, resto da divisao {}</h1> " \
"<h1>Flutuante {}</h1> " \
"<br/>".format(num1,num2,soma, subtracao, multiplicacao, divisao, modulo,num1)
@app.route('/operacoes/')
@app.route('/operacoes/<int:num1>/<int:num2>')
#Você pode adicionar seções variáveis a um URL marcando seções com <nome da variável>.
# Sua função então recebe o <nome da variável> como um argumento de palavra-chave.
# Opcionalmente, você pode usar um conversor para especificar o tipo de argumento como <converter: variable_name>.
def escrevanatelaoperacoes(num1=10,num2=10):
soma = num1 + num2
subtracao = num1 - num2
multiplicacao = num1 * num2
divisao = num1 / num2
modulo = num1 % num2
return "<h1>Números num1: {}, num2: {}</h1> <br/>" \
"<h1>Soma {}</h1> <br/> " \
"<h1>Subtracao {}</h1> <br/> " \
"<h1>Multiplicacao {}</h1> <br/> " \
"<h1>Divisao {}</h1> <br/>" \
"<h1>Modulo, resto da divisao {}</h1> " \
"<br/>".format(num1,num2,soma, subtracao, multiplicacao, divisao, modulo)
if __name__ == '__main__':
app.run(debug=True) |
from flask import Flask
app = Flask(__name__)
@app.route("/")
def main():
return "Welcome to my Flask Page"
|
from django.contrib.messages.views import SuccessMessageMixin
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from django.views.generic import CreateView
from rest_framework import viewsets
from .forms import MimicManageForm
from .models import Mimic
from .serializers import MimicSerializer
from windows.models import Window
class MimicManageView(SuccessMessageMixin, CreateView):
model = Mimic
template_name = 'mimics/mimic_manage_form.html'
success_message = "Mimic was added."
form_class = MimicManageForm
def get_success_url(self):
return reverse('mimic_manage_window', args=(self.window.slug,))
def get_initial(self):
"""
Add window field.
"""
initial = self.initial.copy()
self.window = get_object_or_404(Window, slug=self.kwargs.get('window'))
initial['window'] = self.window.pk
return initial
def get_context_data(self, **kwargs):
context = super(MimicManageView, self).get_context_data(**kwargs)
context['window'] = self.window
return context
class MimicViewSet(viewsets.ModelViewSet):
queryset = Mimic.objects.all()
serializer_class = MimicSerializer
filter_fields = ('window',)
|
"""
Created by Alex Wang
On 2018-06-06
ref: http://www.bogotobogo.com/VideoStreaming/YouTube/youtube-dl-embedding.php
"""
import traceback
import youtube_dl
def video_download_func(url, save_dir):
"""
download video by url
:param url:
:param save_dir:
:return:
"""
try:
ydl_opts = {'outtmpl': '{}/%(id)s'.format(save_dir)}
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
ydl.download([url])
except Exception as e:
print('download failed:', url)
traceback.print_exc()
return False
def download_videos(video_save_dir, video_url_list):
for url in video_url_list:
video_download_func(url, video_save_dir)
if __name__ == '__main__':
video_save_dir = '/Users/alexwang/data/activitynet'
video_url_list = [
'https://www.youtube.com/watch?v=sJFgo9H6zNo',
'https://www.youtube.com/watch?v=V1zhqaGFY2A',
'https://www.youtube.com/watch?v=JDg--pjY5gg'
]
download_videos(video_save_dir, video_url_list) |
from sklearn import tree
from sklearn import neighbors
from sklearn import svm
#height,weight,shoesize
X = [[181, 80, 44], [177, 70, 43], [160, 60, 38], [154, 54, 37], [166, 65, 40],
[190, 90, 47], [175, 64, 39],
[177, 70, 40], [159, 55, 37], [171, 75, 42], [181, 85, 43]]
Y = ['male', 'male', 'female', 'female', 'male', 'male', 'female', 'female',
'female', 'male', 'male']
test_sample = [180,80,40]
#1 Decision Tree Classifier being used here
clf = tree.DecisionTreeClassifier()
clf = clf.fit(X,Y)
prediction = clf.predict(test_sample)
print prediction
#2 k nearest neighbor classification
clf = neighbors.KNeighborsClassifier()
clf = clf.fit(X,Y)
prediction = clf.predict(test_sample)
print prediction
#3 linear SVM
clf = svm.SVC(kernel='linear')
clf = clf.fit(X,Y)
prediction = clf.predict(test_sample)
print prediction |
from django.shortcuts import render, render_to_response
from django.shortcuts import redirect
from django.http import JsonResponse, HttpResponse, HttpResponseRedirect
from django.views.generic import View,TemplateView
from django.views.generic.edit import CreateView, UpdateView, FormView
from core.models import Locations,Ad, Topup, TopupLocationCounter, Transaction
import redis_utils
import core.models as coremodels
import core.forms as coreforms
import django.contrib.auth
from django.contrib.auth.models import User
from sitegate.decorators import redirect_signedin, sitegate_view
import datetime
from django.utils import timezone
import redis_utils
import damadam_utils
import sms_utils
import SMS_MESSAGES
from django.views.generic.list import ListView
def Hello(request, **kwargs):
return JsonResponse({'foo':'bar'})
# Create your views here.
def get_ad():
pass
def is_Agent(user):
return user.groups.filter(name='Agent').exists()
def adApprove(request, pk=None, *args, **kwargs):
if request.user.get_SalesAgent() is not None:
ad1 = Ad.objects.get(id=pk)
ad1.approve()
clicks = 5
topup = Topup(ad = ad1,money_paid=0, status=2, clicks=clicks,
closed_by=request.user.get_SalesAgent(), phone_number=ad1.phone_number)
topup.save()
topup.make_it_live()
# put ad to my own redis.
# TODO: ADD ad to REDIS.
return redirect('sales_agent')
def adDelete(request, pk=None, *args, **kwargs):
if request.user.get_SalesAgent() is not None:
Ad.objects.get(id=pk).delete()
return redirect('sales_agent')
def adSunset(request, pk=None, *args, **kwargs):
if request.user.get_SalesAgent() is not None:
ad = Ad.objects.get(id=pk)
ad.status = 8
ad.save()
return redirect('sales_agent')
def adClaim(request, pk=None, *args, **kwargs):
agent = request.user.get_SalesAgent()
if agent is not None:
ad_claimed = Ad.objects.get(id=pk)
ad_claimed.claimed_by = agent
ad_claimed.status = 3
ad_claimed.save()
agent.last_ad_claim_time = datetime.datetime.now()
agent.save()
return redirect('sales_agent')
class PutKhoofia(UpdateView):
form_class = coreforms.DashboardKhoofiaForm
model = coremodels.Topup
template_name = 'form.html'
def form_valid(self, form):
# this ssaves the ad fields.
self.object = form.save()
self.object.make_it_live()
# This saves the Location field. Copied from Ad Create view form_valid
return redirect('sales_agent')
class Dashboard(View):
def get(self, request, *args, **kwargs):
if request.user.get_SalesAgent() is not None:
unapproved_ads = Ad.objects.filter(status=0).all()
approved_ads = Ad.objects.filter(status=1).all()
paused_ads = Ad.objects.filter(status=2).all()
agent = request.user.get_SalesAgent()
my_claimed_ads = Ad.objects.filter(status=3, claimed_by=agent).all()
my_closed_topup = Topup.objects.filter(status=0, closed_by=agent).all()
unverified_topup = Topup.objects.filter(status=5, closed_by=agent).all()
verified_topup = Topup.objects.filter(status=1, closed_by=agent).all()
my_stopped_ads = Ad.objects.filter(status=7, claimed_by=agent).all()
# for ad in uapproved_ads:
# pass
can_claim = False
timediff = None
if agent.last_ad_claim_time is None:
can_claim = True
else:
timediff = timezone.now()- agent.last_ad_claim_time
print timediff
if timediff.seconds > coremodels.COOLDOWN_TIME:
can_claim = True
data = {'unapproved_ads':unapproved_ads,'approved_ads':approved_ads,
'paused_ads':paused_ads,'timediff': timediff, 'can_claim': can_claim,
'my_claimed_ads':my_claimed_ads, 'my_closed_topup':my_closed_topup,
'my_stopped_ads': my_stopped_ads, 'unverified_topup':unverified_topup,
'verified_topup':verified_topup
}
return render_to_response('SalesAgent.html', data)
elif request.user.is_superuser:
return redirect('super_user')
elif request.user.is_authenticated():
return HttpResponse("u r not admin/agent")
else:
return HttpResponse("plz login")
class SalesAgentCreateView(CreateView):
form_class = coreforms.AgentCreateForm
template_name = 'form.html'
def get_context_data(self,**kwargs):
if self.request.user.is_superuser:
return super(SalesAgentCreateView,self).get_context_data(**kwargs)
else:
return HttpResponse('BRO dnt hack my website')
def form_valid(self, form):
self.object = form.save(commit=False)
#BILAL:WHY AM I DOING THIS?
#BECAUSE otherwise the raw password gets saved and the authenticationdoesnt work.
self.object.set_password(form.cleaned_data["password"])
self.object.save()
#----------
agent = coremodels.SalesAgent(user=self.object)
agent.location = form.cleaned_data["location"]
agent.phone_number = form.cleaned_data['phone_number']
agent.save()
return redirect('super_user')
class Superuser(View):
def get(self, request, *args, **kwargs):
pending_sms = coremodels.SMSOutgoing.objects.filter(status=0)
mismatched_transaction = coremodels.Transaction.objects.filter(status=5)
pending_payment = coremodels.Transaction.objects.filter(status=0)
pending_khoofia = coremodels.Transaction.objects.filter(status=1)
pending_khoofia_verification = coremodels.Transaction.objects.filter(status=2)
less_transaction = coremodels.Transaction.objects.filter(status=4)
data = {'pending_sms': pending_sms,'mismatched_transaction':mismatched_transaction,
'pending_payment':pending_payment, 'pending_khoofia':pending_khoofia,
'pending_khoofia_verification':pending_khoofia_verification,
'less_transaction':less_transaction}
return render_to_response('admin.html', data)
class AdUpdateView(UpdateView):
form_class = coreforms.AdUpdateForm
model = coremodels.Ad
template_name = 'form.html'
def get_initial(self):
variables = super(AdUpdateView, self).get_initial()
variables['location'] = self.object.getLocations()
return variables
def form_valid(self, form):
# this ssaves the ad fields.
self.object = form.save()
# This saves the Location field. Copied from Ad Create view form_valid
ad_locations = form.cleaned_data['location']
# [ do_something(x) for x in a_list_of_objects ]
loc_ids = self.object.locations_set.all().values_list('id',flat=True)
Locations.objects.filter(id__in= loc_ids).delete()
for loc in ad_locations:
loc_object = Locations(ad=self.object, location=loc) # create location counters.
# these are for tracking hits
loc_object.save()
return redirect('sales_agent')
def resendSMS(request, pk=None, *args, **kwargs):
sms = coremodels.SMSOutgoing.objects.get(id=pk)
sms.resend()
return redirect('super_user')
def suspendTopupAd(request, pk=None, *args, **kwargs):
topup = coremodels.Topup.objects.get(id=pk)
topup.suspend()
return redirect('super_user')
def resumeTopupAd(request, pk=None, *args, **kwargs):
topup = coremodels.Topup.objects.get(id=pk)
topup.resumeReq()
return redirect('super_user')
def deleteTopupAd(request, pk=None, *args, **kwargs):
topup = coremodels.Topup.objects.get(id=pk)
topup.deleteReq()
return redirect('super_user')
def verifyTopupAd(request, pk=None, *args, **kwargs):
topup = coremodels.Topup.objects.get(id=pk)
topup.status = 1
topup.save()
return redirect('sales_agent')
class AllAds(ListView):
model = coremodels.Ad
template_name = "ad_list.html"
class AdCloseView(UpdateView):
form_class = coreforms.AdCloseForm
# fields = ['title', 'description', 'address', 'link_url', 'button_label', 'contact_preference']
model = coremodels.Ad
template_name = 'form.html'
def get_initial(self):
variables = super(AdCloseView, self).get_initial()
variables['location'] = self.object.getLocations()
return variables
def form_valid(self, form):
# this ssaves the ad fields.
self.object = form.save()
# This saves the Location field. Copied from Ad Create view form_valid
ad_locations = form.cleaned_data['location']
old_locs = self.object.locations_set.all()
# deleting old locations
for old_loc in old_locs:
old_loc.delete()
# adding new locations
for loc in ad_locations:
loc_object = Locations(ad=self.object, location=loc) # create location counters.
# these are for tracking hits
loc_object.save()
#Saving a TopUp
topup = coremodels.Topup(closed_by = self.request.user.get_SalesAgent(), ad = self.object,
clicks = form.cleaned_data['clicks_promised'],
cnic = form.cleaned_data['cnic'],
money_paid = form.cleaned_data['money_negotiated'],
phone_number = form.cleaned_data['phone_number']
)
topup.save()
Transaction(status=0, topup=topup, phone_number=form.cleaned_data['phone_number'],
cnic=form.cleaned_data['cnic']).save()
# topuplocationcounters = []
# for loc in ad_locations:
# topuplocationcounters.append(TopupLocationCounter(topup=topup, location= loc))
# TopupLocationCounter.objects.bulk_create(topuplocationcounters)
self.object.status = 4
self.object.save()
return redirect('sales_agent')
class AdCreateView(CreateView):
form_class = coreforms.AdCreateForm
template_name = 'form.html'
# template_name = 'kunden/kunde_update.html'
success_url = '/'
def form_valid(self, form):
self.object = form.save() # create the AD
ad_locations = form.cleaned_data['location']
for loc in ad_locations:
loc_object = Locations(ad=self.object, location=loc) # create location counters.
# these are for tracking hits
loc_object.save()
return HttpResponse("saved!")
@sitegate_view(redirect_to='/dashboard',widget_attrs={'class': 'form-control', 'placeholder': lambda f: f.label}, template='form_bootstrap3') # This also prevents logged in users from accessing our sign in/sign up page.
def entrance(request):
return render(request, 'entrance.html', {'title': 'Sign in & Sign up'})
def logout_view(request):
django.contrib.auth.logout(request)
# Redirect to a success page.
return HttpResponseRedirect("/?logout=successful")
|
from django.contrib import admin
from .models import Sentence
class SentenceAdmin(admin.ModelAdmin):
fields = ['original_text', 'encoding_text']
admin.site.register(Sentence, SentenceAdmin)
# 檔名: admin.py
# 作者: Kaiching Chang
# 時間: June, 2018
|
from proxies import Foo, Bar
Foo().foo()
Bar().bar() |
from .hoplite import Hoplite, Config, utils
import argparse
import sys
import RPi.GPIO as GPIO
from hx711 import HX711
def calibrate(conf_file, index, channel, weight):
h = Hoplite(debug=parsed_args.debug)
c = Config(conf_file, debug=parsed_args.debug)
config = c.config
try:
hx = h.init_hx711(config['hx'][int(index)])
except (KeyError, ValueError):
print("Sensor %s not found!" % index)
sys.exit()
if channel == 'A':
cal = h.hx711_cal_chA(hx, weight)
ch = 'A'
elif channel == 'B':
cal = h.hx711_cal_chB(hx, weight)
ch = 'B'
else:
print("Sensor %s channel %s not found!" % (index, channel))
GPIO.cleanup()
sys.exit()
try:
config['hx'][int(index)]['channels'][ch]['refunit'] = cal
print("Calibration unit %s, saved to config" % cal)
except KeyError:
print("Sensor %s channel %s not found!" % (index, channel))
c.save_config()
GPIO.cleanup()
sys.exit()
def tare(conf_file, index=None, channel=None):
h = Hoplite(debug=parsed_args.debug)
c = Config(conf_file, debug=parsed_args.debug)
config = c.config
# one sensor, one or both channels
if index != None:
try:
hx_conf = config['hx'][int(index)]
dout = hx_conf['dout']
pd_sck = hx_conf['pd_sck']
utils.debug_msg(h, "dout, pd_sck: %s %s" % (dout, pd_sck))
hx = HX711(dout, pd_sck)
hx.set_reading_format("MSB", "MSB")
hx.reset()
except (KeyError, IndexError):
print("Sensor at index %s not found!" % ( index ))
sys.exit()
if channel == 'A' or channel == None:
hx.set_reference_unit_A(1)
hx.tare_A()
utils.debug_msg(h, "refunit, offset: %s %s" % (hx.get_reference_unit_A(), hx.get_offset_A()))
try:
hx_conf['channels']['A']['offset'] = hx.OFFSET
print("Sensor %s channel A offset saved as %s" % (index, hx.OFFSET))
except KeyError:
print("Sensor %s channel %s not found!" % ( index, channel ))
elif channel == 'B' or channel == None:
hx.set_reference_unit_B(1)
hx.tare_B()
utils.debug_msg(h, "refunit, offset: %s %s" % (hx.get_reference_unit_B(), hx.get_offset_B()))
try:
hx_conf['channels']['B']['offset'] = hx.OFFSET_B
print("Sensor %s channel B offset saved as %s" % (index, hx.OFFSET_B))
except KeyError:
print("Sensor %s channel %s not found!" % ( index, channel ))
else:
print("Sensor %s channel %s not found!" % ( index, channel ))
# all sensors, all channels
else:
for index, hx_conf in enumerate(config['hx']):
dout = hx_conf['dout']
pd_sck = hx_conf['pd_sck']
hx = HX711(dout, pd_sck)
utils.debug_msg(h, "dout, pd_sck: %s %s" % (dout, pd_sck))
hx.set_reading_format("MSB", "MSB")
hx.reset()
hx.set_reference_unit_A(1)
hx.tare_A()
utils.debug_msg(h, "refunit, offset: %s %s" % (hx.get_reference_unit_A(), hx.get_offset_A()))
try:
hx_conf['channels']['A']['offset'] = hx.OFFSET
print("Sensor %s channel A offset saved as %s" % (str(index), hx.OFFSET))
except KeyError:
pass
hx.set_reference_unit_B(1)
hx.tare_B()
utils.debug_msg(h, "refunit, offset: %s %s" % (hx.get_reference_unit_B(), hx.get_offset_B()))
try:
hx_conf['channels']['B']['offset'] = hx.OFFSET_B
print("Sensor %s channel B offset saved as %s" % (str(index), hx.OFFSET_B))
except KeyError:
pass
c.save_config()
GPIO.cleanup()
sys.exit()
def __main__():
global parsed_args
parser = argparse.ArgumentParser(description="HOPLITE: A kegerator monitoring script for RasPi")
parser.add_argument('--config',
type=str,
help='Config file location. Default: ./config.json')
parser.add_argument('--api',
type=str,
help='Address where the API should listen. Format is <ip>:<port>. Port is optional. Default is 0.0.0.0:5000 (listen on all IPs at port 5000)')
parser.add_argument('--cal',
type=str,
nargs=3,
metavar=('INDEX', 'CHAN', 'W'),
help='Calibrate a weight sensor using a test weight in grams. Weight sensor index is integer as defined in the config file: first sensor is 0, second is 1, etc. Channel is either \'A\' or \'B\'. Usage: --cal <N> <channel> <test_weight>')
parser.add_argument('--tare',
type=str,
nargs='*',
metavar=('INDEX', 'CHAN'),
help='Tare all sensors. If run without any parameters, tares all sensors configured; otherwise tares the specific channel or sensor given. Make sure the sensor platforms are empty and sitting level before you run this! Usage: --tare [N] [channel]')
parser.add_argument('--debug',
action='store_true',
help='Enable debugging messages')
parsed_args = parser.parse_args()
if parsed_args.config:
config = parsed_args.config
else:
config = "config.json"
if parsed_args.tare != None:
if len(parsed_args.tare) == 0:
tare(config)
elif len(parsed_args.tare) == 1:
tare(config, index=parsed_args.tare[0])
elif len(parsed_args.tare) == 2:
tare(config, index=parsed_args.tare[0], channel=parsed_args.tare[1])
else:
raise argparse.ArgumentTypeError('--tare takes up to two arguments')
elif parsed_args.cal:
calibrate(config, parsed_args.cal[0], parsed_args.cal[1], parsed_args.cal[2])
else:
h = Hoplite(debug=parsed_args.debug)
h.main( config_file = config , api_listen = parsed_args.api )
if __name__ == "__main__":
__main__()
|
from django.test import TestCase
# Create your tests here.
from django.contrib.auth import get_user_model
from django.test import SimpleTestCase, TestCase
from django.urls import reverse
class HomePageTests (SimpleTestCase):
def test_home_page_status_code( self ):
response =self . client . get( '/' )
self . assertEqual(response . status_code, 200 )
def test_view_url_by_name ( self ):
response =self . client . get(reverse( 'home' ))
self . assertEqual(response . status_code,200 )
def test_view_uses_correct_template ( self ):
response = self . client . get(reverse( 'home' ))
self . assertEqual(response . status_code,200 )
self . assertTemplateUsed(response,'home.html' )
class SignupPageTests (TestCase):
username = 'newuser'
email = 'newuser@email.com'
def test_signup_page_status_code ( self ):
response =self . client . get( '/users/signup/' )
self . assertEqual(response . status_code,200 )
def test_view_url_by_name ( self ):
response = self . client . get(reverse( 'signup' ))
self . assertEqual(response . status_code,200 )
def test_view_uses_correct_template ( self ):
response = self . client . get(reverse( 'signup' ))
self . assertEqual(response . status_code,200 )
self . assertTemplateUsed(response, 'signup.html' )
def test_signup_form ( self ):
new_user =get_user_model() . objects . create_user(self . username, self . email)
self . assertEqual(get_user_model() . objects . all() . count(),1 )
self . assertEqual(get_user_model() . objects . all()[ 0 ] . username, self . username)
self . assertEqual(get_user_model() . objects . all()[ 0 ] . email, self . email) |
import bcoding,urllib.request,hashlib
import threading,re
from time import sleep
def isFileVaslid(f):
'''检验文件格式和是否能正确解码,传入文件名'''
try:
fi = open(f,"rb")
torrent=bcoding.bdecode(fi)#文件能否解码
a,b=torrent['info'],torrent['announce'] #是否具有info和announce字段
return 1
except Exception:
return 0
m = isFileVaslid("833A8D93CC94FE1E699779C3C403B062763CE4BE.torrent")
def getTrackers(fi):
"'获取tracker服务器地址列表'"
f = open(fi, "rb")
torrent = bcoding.bdecode(f)
filehash = hashlib.sha1(f.read()).hexdigest()
trackers = []
trackers.append(torrent['announce'])
try:
li = torrent['announce-list']
for item in li:
# print(type(item[0]),end="")
# print(item[0])
trackers.append(item[0])
except KeyError:
return trackers
return [trackers,filehash]
# getTrackers("833A8D93CC94FE1E699779C3C403B062763CE4BE.torrent")
def expansion(trakers,info_hash):
l = len(trakers)
for i in range(0,l):
url = trakers[i] +"?info_hash="+info_hash+"&peer_id=AZ34343&port=6881"
trakers[i] = url
return trakers
v = []
def connect(url):
try:
reponse = urllib.request.urlopen(url)
sleep(5)
data = reponse.read()
data = data.decode("utf-8")
m = re.match("fail reason",data)
if m is not None:
v.append(1)
else:
v.append((0))
except:
v.append(0)
def communicate(fi):
info= getTrackers(fi)
trackers = expansion(info[0],info[1])
len_trackers = len(trackers)
print(trackers[0])
print(trackers[1])
threads = []
for i in range(0,len_trackers):
t=threading.Thread(target=connect,args={trackers[i]})
threads.append(t)
for i in range(0,len_trackers):
threads[i].start()
for i in range(0, len_trackers):
threads[i].join()
def verify(file_name):
n = isFileVaslid(file_name)
if(n == 0):
return False
count = 0;
for i in v:
if i == 1:
count+=1
if count>0:
return True
else:
return False
|
'''
Created on Dec 6, 2015
@author: ams889
This module contains the unit test class for assignment 10
'''
import unittest
import random
from functions import *
from userDefinedErrorHandling import *
from restaurant_grades import *
class Test(unittest.TestCase):
#testing class for the main components of this assignment
def testingClass(self):
df=dataLoad()
df=dataClean(df)
classInstance1=variousGrades(df)
#Testing valid input for variousGrades Class
self.assertIsInstance(variousGrades(df),variousGrades)
#Testing invalid input
self.assertRaises(ValueError, classInstance1.test_restaurant_grades, "purple")
self.assertRaises(CamisError, classInstance1.test_restaurant_grades, 99999999999)
self.assertRaises(ValueError, classInstance1.boro_grades, "Not a borough")
def testingFunctions(self):
self.assertRaises(grade_listFormatError, test_grades, [])
if __name__ == "__main__":
unittest.main() |
#import pandas as pd
from typing import List, Iterator
from pathlib import Path
data_folder = Path('/Users/pietro.pravettoni/developer/adventofcode/data/')
'''
with open(data_folder / 'frequencies.txt') as raw_data:
frequencies = []
lines = raw_data.read().splitlines()
for i in lines:
frequencies.append(int(i))
'''
with open(data_folder / 'frequencies.txt') as f:
frequencies = [int(line.strip()) for line in f]
#print(sum(frequencies))
'''
def calibrate_device(f: list, first=0, last=1) -> int:
#first = 0
#last = 1
partial_sum = f[first] + f[last]
next_partial = partial_sum + f[last+1]
while last < len(f):
if partial_sum == next_partial:
print(partial_sum)
return partial_sum
else:
return calibrate_device(f, first+1, last+1)
'''
def all_frequencies(numbers: List[int], start: int = 0) -> Iterator[int]:
"""
Generate all frequencies by adding the number in a cycle
Start at 0
"""
frequency = start
while True:
for number in numbers:
yield frequency
# next execution resumes from this point
frequency += number
def first_repeat_frequency(numbers: List[int], start: int = 0) -> int:
seen = set()
for frequency in all_frequencies(numbers, start):
if frequency in seen:
return frequency
else:
seen.add(frequency)
'''
tests = [
[+1, -1],
[+3, +3, +4, -2, -4 ],
[-6, +3, +8, +5, -6 ],
[+7, +7, -2, -7, -4]
]
'''
# test against the examples
assert first_repeat_frequency([1, -1]) == 0
assert first_repeat_frequency([+3, +3, +4, -2, -4 ]) == 10
assert first_repeat_frequency([-6, +3, +8, +5, -6 ]) == 5
assert first_repeat_frequency([+7, +7, -2, -7, -4]) == 14
print(first_repeat_frequency(frequencies))
|
# -*- coding: utf-8 -*-
from .modules_test import QuestionMethodTests
from .views_test import QuestionIndexDetailTests, QuestionViewTests
|
from config_updated import preprocess
from config_updated import get_count
from config_updated import get_item
from config_updated import get_sugar
from config_updated import top_classifier
import random
from appos import *
def generate_reply_for_items(reply, Dia):
items = ['espresso', 'doppio', 'macchiato', 'ristretto', 'americano', 'cappuccino', 'latte', 'mocha', 'affogato', 'black tea', 'lemon tea', 'green tea', 'oolong tea', 'white tea', 'fermented tea', 'yellow tea']
items = ",".join(items)
first_reply = 'We provide ' + items.title() + ' as drinks here'
second_reply = random.choice(items_quest)
reply['messageText'] = [[first_reply], [second_reply]]
return reply
def generate_reply(entity, reply, entity_list_available):
if entity == 'Num':
items = [i['Items'] for i in reply['property'] if 'Items' in i]
if len(items) == 0:
items = ''
else:
items = ",".join(items[0])
first_reply = ['Choosing ' + items.title() + ' is a good option.', items.title() + '\
is a fine selection', items.title() + ' is a nice choice']
if items != '':
reply['messageText'] = [[random.choice(first_reply)], [random.choice(num_quest)]]
else:
reply['messageText'] = [[random.choice(fillers)], [random.choice(num_quest)]]
return reply
elif entity == 'Items':
Dia = [i['Dia'] for i in reply['property'] if 'Dia' in i]
if len(Dia) == 0:
Dia = ''
else:
Dia = Dia[0]
if Dia != '':
reply = generate_reply_for_items(reply, Dia)
return reply
else:
reply['messageText'] = [['Sorry We dont have that item here..' ], ['We offer espresso, doppio, macchiato, ristretto, americano, cappuccino, latte, mocha, affogato, black tea, lemon tea, green tea, oolong tea, white tea, fermented tea and yellow tea.'], ['What would you like to have..?']]
return reply
def build_model(question, kern_medical, symp_list):
input = question['messageText']
cleaned_user_input = preprocess(question['messageText'])
question['messageText'] = cleaned_user_input
kernel_reply = kern_medical.respond(question['messageText'])
if not "Sorry, I didn't get you.." in kernel_reply:
response = {}
response['property'] = []
response['messageText'] = [kernel_reply]
return response
response = {}
response['property'] = []
response['property'].extend(symp_list)
t_label = top_classifier(question['messageText'])
if t_label == 1 and question['messageSource'] == 'messageFromUser':
response['messageText'] = [['We offer a wide variety of Tea and Coffee..'], ['Espresso : 70Rs', ' Doppio : 60Rs', ' Macchiato : 80Rs', ' Ristretto : 70Rs', ' Americano : 90Rs', ' Cappuccino : 60Rs', ' Latte : 80Rs', ' Mocha : 70Rs', ' Affogato : 90Rs', ' Black Tea : 40Rs', ' Lemon Tea : 40Rs', ' Green Tea : 50Rs', ' Oolong Tea : 60Rs', ' White Tea : 50Rs', ' Fermented Tea : 60Rs', ' Yellow Tea : 70Rs']
, ['Please Select from the above List']]
return response
elif t_label == 2 and question['messageSource'] == 'messageFromUser':
response['property'] = get_item(question['messageText'], response['property'])
response['property'] = get_count(question['messageText'], response['property'])
response['property'] = get_sugar(question['messageText'], response['property'])
entity_list = ['Items', 'Num', 'Dia']
entity_list_available = [i.keys()[0] for i in response['property']]
entity_list = [i for i in entity_list if i not in entity_list_available]
for i in entity_list:
if i == 'Dia':
items = [i['Items'] for i in response['property'] if 'Items' in i]
num = [i['Num'] for i in response['property'] if 'Num' in i]
if len(items) != 0 and len(num) != 0:
items_ = ",".join(items[0])
num_ = ",".join(num[0])
if len(items[0]) >= 2:
response['messageText'] = [['You ordered ' + num_ + ' ' + 'cups of ' + ' ' + items_.title() + ' ' + 'respectively'], [random.choice(fillers)], ['Please tell me.., you want it with or without sugar..?']]
else:
response['messageText'] = [['You ordered ' + num_ + ' ' + 'cups of ' + ' ' + items_.title()], [random.choice(fillers)], ['Please tell me.., you want it with or without sugar..?']]
return response
else:
response['messageText'] = [[random.choice(fillers)], ['With or Without Sugar..?']]
return response
elif i not in entity_list_available:
response = generate_reply(i, response, entity_list_available)
return response
else:
continue
return response
elif t_label == 3 and "Sorry, I didn't get you.." in kernel_reply:
response['messageText'] = ['Please Ask Something that we can provide / related to coffee shop']
return response
|
# Exercies 20
print "Exercise 20: Functions and Files\n"
#import module needed from associated package
from sys import argv
#identify arguments needed for the command line
script, input_file = argv
# Function to read and print all the contents of argument f, if something were
# specified within the read(), those params would dictate what to read
# Reading large files can do bad things so be specific (example is small file)
def print_all(f):
print f.read()
# f.seek(offset, from_what) changes the file object's position. The position is
# computed from adding offset to a reference point; the reference point is
# selected by the from_what argument. A from_what value of 0 measures from
# the beginning of the file, 1 uses the current file position, and 2 uses
# the end of the file as the reference point. from_what can be omitted and
# defaults to 0, using the beginning of the file as the reference point.
# See MethodsOfFileObjects.py for more details
def rewind(f):
f.seek(0)
# Print the value of line_count and then read one line of file object f
def print_a_line(line_count, f):
print line_count, f.readline()
# Open the file and assign it to a variable name so we can take actions on it
current_file = open(input_file)
# Print the function print_all using the argument current_file as defined above
print "First let's print the whole file:\n"
print_all(current_file)
#Print the function rewind using the argument current_file as defined above
print "Now let's rewind, kind of like a tape."
rewind(current_file)
# Note:
# The statement x = x + y which is essentially concatenation, adding y to
# whatever x is
# The statement x += y is equivalent to x = operator.iadd(x, y).
# Another way to put it is to say that z = operator.iadd(x, y) is equivalent
# to the compound statement z = x; z += y.
# a = iconcat(a, b) is equivalent to a += b for a and b sequences.
#Since we set seek(0) we are starting at the beginning of the document
print "Let's print three lines:"
current_line = 1
print_a_line(current_line, current_file)
current_line = current_line + 1
print_a_line(current_line, current_file)
current_line = current_line + 1
print_a_line(current_line, current_file)
print "Let's print three lines using one line of code:"
current_line = 1
print_a_line(current_line, current_file)
# MacBook-Pro-6:HelloWorld jessicadeluca$ python ex20.py test.txt
# Exercise 20: Functions and Files
#
# First let's print the whole file:
#
# This is a test to see
# if this text can be copied to a new file.
# Let's see if it works.
#
# Hurray!!
# The End.
#
# Now let's rewind, kind of like a tape.
# Let's print three lines:
# 1 This is a test to see
#
# 2 if this text can be copied to a new file.
#
# 3 Let's see if it works.
# q: What is f in the print_all and other functions?
# The f is a variable just like you had in other functions in Exercise 18,
# except this time it's a file. A file in Python is kind of like an old
# tape drive on a mainframe, or maybe a DVD player. It has a "read head,"
# and you can "seek" this read head around the file to positions, then
# work with it there. Each time you do f.seek(0) you're moving to the
# start of the file. Each time you do f.readline() you're reading a line
# from the file, and moving the read head to right after the \n that ends
# that line. This will be explained more as you go on.
# q: Why does seek(0) not set the current_line to 0?
# First, the seek() function is dealing in bytes, not lines. The code seek(0) moves the file to the 0 byte (first byte) in the file. Second, current_line is just a variable and has no real connection to the file at all. We are manually incrementing it.
# q: What is +=?
# You know how in English I can rewrite "it is" as "it's"? Or I can rewrite "you are" as "you're"? In English this is called a contraction, and this is kind of like a contraction for the two operations = and +. That means x = x + y is the same as x += y.
# q: How does readline() know where each line is?
# Inside readline() is code that scans each byte of the file until it finds a \n character, then stops reading the file to return what it found so far. The file f is responsible for maintaining the current position in the file after each readline() call, so that it will keep reading each line.
# q: Why are there empty lines between the lines in the file?
# The readline() function returns the \n that's in the file at the end of that line. Add a , at the end of your print function calls to avoid adding double \n to every line.
|
__author__ = 'lora'
from server_connector import UGRacingSQLServerConnector as sq
import time
import random
flag = 0
def GEAR(GEAR, RPM):
global flag
if(RPM <800):
return 1
else:
if(flag == 0):
GEAR = GEAR + 1
if(flag == 1 ):
GEAR = GEAR - 1
if(GEAR >= 5):
flag = 1
if(GEAR <= 1):
flag = 0
return GEAR
flag2 = 0
def randomize(i):
global flag2
if(flag2 == 0):
return round(i+random.uniform(1,4),3)
flag2 = 1
else:
return round(i-random.uniform(1,4),3)
flag2 = 0
def WHEEL_Temperature(SPEED):
return SPEED*2.3
def SPEED(RPM):
return RPM/100
def BRAKE(RPM_reference,RPM_current):
if(RPM_reference-RPM_current < 0):
r = abs( RPM_reference - RPM_current)/2000
if(r > 1):
return 1
else:
return round(r, 3)
return 0
def normalise_THROTTLE(value):
if (float(value[4]) < 10):
if (float(value[4]) < 0):
value[4] = float(value[4]) * -9
else:
value[4] = float(value[4]) * 9
return value
def main():
connection = sq('http://127.0.0.1:8000/telemetry-api/add-values/', 'root', 'Ellsolan', 'Hackathon')
max = 0
RPM_current = 0
RPM_comparison = 0
f = open('engine_log.txt', 'r')
value_position = {2:'RPM', 3:'MAP',4: 'THROTTLE', 5: 'O2', 6: 'MAT', 7: 'Coolant_T', 29:'Battery'}
non_engine_sensors = {'SPEED': 0, 'GEAR': 0, 'BRAKE' : 0 , 'Wheel_V': 0, 'WHEEL_T': 0, 'FUEL': 0.8}
WHEEL_Temperature_array = [0,0,0,0]
wheel_SPEED_array = [0,0,0,0]
lines = f.readlines()
counter_BRAKE = 0
counter_GEAR = 0
while (True):
for line in lines:
start = time.time()
print 'in for loop'
counter_GEAR = counter_GEAR + 1
values = line.split('\t')
RPM_current = float(values[2])
values = normalise_THROTTLE(values)
non_engine_sensors['SPEED'] = SPEED(RPM_current)
non_engine_sensors['Wheel_V'] = 1.76*non_engine_sensors['SPEED']
non_engine_sensors['WHEEL_T'] = WHEEL_Temperature(non_engine_sensors['SPEED'])
if(counter_GEAR == 10 ):
counter_GEAR = 0
non_engine_sensors['GEAR'] = GEAR(non_engine_sensors['GEAR'], RPM_current)
for i in range(0,4,1):
WHEEL_Temperature_array[i] = randomize(non_engine_sensors['WHEEL_T'])
wheel_SPEED_array[i] = randomize(non_engine_sensors['Wheel_V'])
counter_BRAKE = counter_BRAKE + 1
if(counter_BRAKE == 4) :
counter_BRAKE = 0
if(float(values[0])<3370):
reference_line = lines[lines.index(line)+4].split('\t')
RPM_reference = int(reference_line[2])
non_engine_sensors['BRAKE']= BRAKE(RPM_reference, RPM_current)
connection.write_tag('SPEED', float(non_engine_sensors['SPEED']))
i = 0
for wheel in wheel_SPEED_array:
connection.write_tag('WHEEL_V_' + str(i+1), float(wheel_SPEED_array[i]))
i = i + 1
i = 0
for wheel in wheel_SPEED_array:
connection.write_tag('WHEEL_T_' + str(i+1), float(WHEEL_Temperature_array[i]))
i = i + 1
connection.write_tag('BRAKE', float(non_engine_sensors['BRAKE']))
connection.write_tag('GEAR', float(non_engine_sensors['GEAR']))
connection.write_tag('FUEL', float(non_engine_sensors['FUEL']))
for i in range(0,len(values), 1):
if(i in value_position.keys()):
connection.write_tag(value_position[i], float(values[i]))
connection.commit()
end = time.time()
while ((end-start) < 0.2):
time.sleep(0.1)
end = time.time()
end = time.time()
print 'time taken is ' + str(end - start)
main()
|
import dataclasses
import datetime
import hashlib
import json
import logging
import types
from typing import Dict, List
import sbol3
import labop
import labop.data
import uml
from labop.lab_interface import LabInterface
l = logging.getLogger(__file__)
l.setLevel(logging.ERROR)
PRIMITIVE_BASE_NAMESPACE = "https://bioprotocols.org/labop/primitives/"
def call_behavior_execution_compute_output(self, parameter, sample_format):
"""
Get parameter value from call behavior execution
:param self:
:param parameter: output parameter to define value
:return: value
"""
primitive = self.node.lookup().behavior.lookup()
call = self.call.lookup()
inputs = [
x
for x in call.parameter_values
if x.parameter.lookup().property_value.direction == uml.PARAMETER_IN
]
value = primitive.compute_output(inputs, parameter, sample_format)
return value
labop.CallBehaviorExecution.compute_output = call_behavior_execution_compute_output
def call_behavior_action_compute_output(self, inputs, parameter, sample_format):
"""
Get parameter value from call behavior action
:param self:
:param inputs: token values for object pins
:param parameter: output parameter to define value
:return: value
"""
primitive = self.behavior.lookup()
inputs = self.input_parameter_values(inputs=inputs)
value = primitive.compute_output(inputs, parameter, sample_format)
return value
uml.CallBehaviorAction.compute_output = call_behavior_action_compute_output
def call_behavior_action_input_parameter_values(self, inputs=None):
"""
Get parameter values for all inputs
:param self:
:param parameter: output parameter to define value
:return: value
"""
# Get the parameter values from input tokens for input pins
input_pin_values = {}
if inputs:
input_pin_values = {
token.token_source.lookup()
.node.lookup()
.identity: uml.literal(token.value, reference=True)
for token in inputs
if not token.edge
}
# Get Input value pins
value_pin_values = {
pin.identity: pin.value for pin in self.inputs if hasattr(pin, "value")
}
# Convert References
value_pin_values = {
k: (
uml.LiteralReference(value=self.document.find(v.value))
if hasattr(v, "value")
and (
isinstance(v.value, sbol3.refobj_property.ReferencedURI)
or isinstance(v, uml.LiteralReference)
)
else uml.LiteralReference(value=v)
)
for k, v in value_pin_values.items()
}
pin_values = {**input_pin_values, **value_pin_values} # merge the dicts
parameter_values = [
labop.ParameterValue(
parameter=self.pin_parameter(pin.name).property_value,
value=pin_values[pin.identity],
)
for pin in self.inputs
if pin.identity in pin_values
]
return parameter_values
uml.CallBehaviorAction.input_parameter_values = (
call_behavior_action_input_parameter_values
)
def resolve_value(v):
if not isinstance(v, uml.LiteralReference):
return v.value
else:
resolved = v.value.lookup()
if isinstance(resolved, uml.LiteralSpecification):
return resolved.value
else:
return resolved
def input_parameter_map(inputs: List[labop.ParameterValue]):
map = {input.parameter.lookup().property_value.name: [] for input in inputs}
for input in inputs:
i_parameter = input.parameter.lookup().property_value
value = input.value.get_value()
map[i_parameter.name].append(value)
map = {k: (v[0] if len(v) == 1 else v) for k, v in map.items()}
return map
def empty_container_compute_output(self, inputs, parameter, sample_format):
if (
parameter.name == "samples"
and parameter.type == "http://bioprotocols.org/labop#SampleArray"
):
# Make a SampleArray
input_map = input_parameter_map(inputs)
if "sample_array" in input_map:
sample_array = input_map["sample_array"]
else:
spec = input_map["specification"]
sample_array = (
input_map["sample_array"] if "sample_array" in input_map else None
)
if not sample_array:
sample_array = labop.SampleArray.from_container_spec(
spec, sample_format=sample_format
)
sample_array.name = spec.name
return sample_array
else:
return None
def empty_rack_compute_output(self, inputs, parameter, sample_format):
if (
parameter.name == "slots"
and parameter.type == "http://bioprotocols.org/labop#SampleArray"
):
# Make a SampleArray
input_map = input_parameter_map(inputs)
spec = input_map["specification"]
sample_array = labop.SampleArray.from_container_spec(
spec, sample_format=sample_format
)
return sample_array
else:
return None
def load_container_on_instrument_compute_output(self, inputs, parameter, sample_format):
if (
parameter.name == "samples"
and parameter.type == "http://bioprotocols.org/labop#SampleArray"
):
# Make a SampleArray
input_map = input_parameter_map(inputs)
spec = input_map["specification"]
sample_array = labop.SampleArray.from_container_spec(
spec, sample_format=sample_format
)
return sample_array
else:
return None
def plate_coordinates_compute_output(self, inputs, parameter, sample_format):
if (
parameter.name == "samples"
and parameter.type == "http://bioprotocols.org/labop#SampleCollection"
):
input_map = input_parameter_map(inputs)
source = input_map["source"]
coordinates = input_map["coordinates"]
# convert coordinates into a boolean sample mask array
# 1. read source contents into array
# 2. create parallel array for entries noted in coordinates
mask = labop.SampleMask.from_coordinates(
source, coordinates, sample_format=sample_format
)
return mask
def get_short_uuid(obj):
"""
This function generates a 3 digit id for an object that is stable.
Parameters
----------
obj : object
object needing an id
"""
def json_default(thing):
try:
return dataclasses.asdict(thing)
except TypeError:
pass
if isinstance(thing, datetime.datetime):
return thing.isoformat(timespec="microseconds")
raise TypeError(f"object of type {type(thing).__name__} not serializable")
def json_dumps(thing):
return json.dumps(
thing,
default=json_default,
ensure_ascii=False,
sort_keys=True,
indent=None,
separators=(",", ":"),
)
j = int(hashlib.md5(json_dumps(obj).encode("utf-8")).digest().hex(), 16) % 1000
return j
def measure_absorbance_compute_output(self, inputs, parameter, sample_format):
if (
parameter.name == "measurements"
and parameter.type == "http://bioprotocols.org/labop#Dataset"
):
input_map = input_parameter_map(inputs)
samples = input_map["samples"]
wl = input_map["wavelength"]
measurements = LabInterface.measure_absorbance(
samples.get_coordinates(sample_format), wl.value, sample_format
)
name = f"{self.display_id}.{parameter.name}.{get_short_uuid([self.identity, parameter.identity, [i.value.identity for i in inputs]])}"
sample_data = labop.SampleData(
name=name, from_samples=samples, values=measurements
)
sample_metadata = labop.SampleMetadata.for_primitive(
self, input_map, samples, sample_format=sample_format
)
sample_dataset = labop.Dataset(data=sample_data, metadata=[sample_metadata])
return sample_dataset
def measure_fluorescence_compute_output(self, inputs, parameter, sample_format):
if (
parameter.name == "measurements"
and parameter.type == "http://bioprotocols.org/labop#Dataset"
):
input_map = input_parameter_map(inputs)
samples = input_map["samples"]
exwl = input_map["excitationWavelength"]
emwl = input_map["emissionWavelength"]
bandpass = input_map["emissionBandpassWidth"]
measurements = LabInterface.measure_fluorescence(
samples.get_coordinates(sample_format),
exwl.value,
emwl.value,
bandpass.value,
sample_format,
)
name = f"{self.display_id}.{parameter.name}.{get_short_uuid([self.identity, parameter.identity, [i.value.identity for i in inputs]])}"
sample_data = labop.SampleData(
name=name, from_samples=samples, values=measurements
)
sample_metadata = labop.SampleMetadata.for_primitive(
self, input_map, samples, sample_format=sample_format
)
sample_dataset = labop.Dataset(data=sample_data, metadata=[sample_metadata])
return sample_dataset
def join_metadata_compute_output(self, inputs, parameter, sample_format):
if (
parameter.name == "enhanced_dataset"
and parameter.type == "http://bioprotocols.org/labop#Dataset"
):
input_map = input_parameter_map(inputs)
dataset = input_map["dataset"]
metadata = input_map["metadata"]
enhanced_dataset = labop.Dataset(dataset=[dataset], linked_metadata=[metadata])
return enhanced_dataset
def join_datasets_compute_output(self, inputs, parameter, sample_format):
if (
parameter.name == "joint_dataset"
and parameter.type == "http://bioprotocols.org/labop#Dataset"
):
input_map = input_parameter_map(inputs)
datasets = input_map["dataset"]
metadata = (
input_map["metadata"]
if "metadata" in input_map and input_map["metadata"]
else []
)
joint_dataset = labop.Dataset(dataset=datasets, linked_metadata=metadata)
return joint_dataset
def excel_metadata_compute_output(self, inputs, parameter, sample_format):
if (
parameter.name == "metadata"
and parameter.type == "http://bioprotocols.org/labop#SampleMetadata"
):
input_map = input_parameter_map(inputs)
filename = input_map["filename"]
for_samples = input_map["for_samples"] # check dataarray
metadata = labop.SampleMetadata.from_excel(
filename, for_samples, sample_format=sample_format
)
return metadata
def compute_metadata_compute_output(self, inputs, parameter, sample_format):
if (
parameter.name == "metadata"
and parameter.type == "http://bioprotocols.org/labop#SampleMetadata"
):
input_map = input_parameter_map(inputs)
for_samples = input_map["for_samples"]
samples = for_samples.to_data_array()
trajectory_graph.metadata(samples, tick)
metadata = labop.SampleMetadata.from_excel(
filename, for_samples, sample_format=sample_format
)
return metadata
primitive_to_output_function = {
"EmptyContainer": empty_container_compute_output,
"PlateCoordinates": plate_coordinates_compute_output,
"MeasureAbsorbance": measure_absorbance_compute_output,
"MeasureFluorescence": measure_fluorescence_compute_output,
"EmptyInstrument": empty_rack_compute_output,
"EmptyRack": empty_rack_compute_output,
"LoadContainerOnInstrument": load_container_on_instrument_compute_output,
"JoinMetadata": join_metadata_compute_output,
"JoinDatasets": join_datasets_compute_output,
"ExcelMetadata": excel_metadata_compute_output,
}
def initialize_primitive_compute_output(doc: sbol3.Document):
for k, v in primitive_to_output_function.items():
try:
p = labop.get_primitive(doc, k, copy_to_doc=False)
p.compute_output = types.MethodType(v, p)
except Exception as e:
l.warning(
f"Could not set compute_output() for primitive {k}, did you import the correct library?"
)
def primitive_compute_output(self, inputs, parameter, sample_format):
"""
Compute the value for parameter given the inputs. This default function will be overridden for specific primitives.
:param self:
:param inputs: list of labop.ParameterValue
:param parameter: Parameter needing value
:return: value
"""
if hasattr(parameter, "type") and parameter.type in sbol3.Document._uri_type_map:
# Generalized handler for output tokens, see #125
# TODO: This currently assumes the output token is an sbol3.TopLevel
# Still needs special handling for non-toplevel tokens
builder_fxn = sbol3.Document._uri_type_map[parameter.type]
# Construct object with a unique URI
instance_count = 0
successful = False
while not successful:
try:
token_id = f"{parameter.name}{instance_count}"
output_token = builder_fxn(token_id, type_uri=parameter.type)
if isinstance(output_token, sbol3.TopLevel):
self.document.add(output_token)
else:
output_token = builder_fxn(None, type_uri=parameter.type)
successful = True
except ValueError:
instance_count += 1
return output_token
else:
l.warning(
f"No builder found for output Parameter of {parameter.name}. Returning a string literal by default."
)
return f"{parameter.name}"
labop.Primitive.compute_output = primitive_compute_output
# def empty_container_initialize_contents(self, sample_format, geometry='A1:H12'):
# l.warning("Warning: Assuming that the SampleArray is a 96 well microplate!")
# aliquots = get_sample_list(geometry)
# #initial_contents = json.dumps(xr.DataArray(dims=("aliquot", "initial_contents"),
# # coords={"aliquot": aliquots}).to_dict())
# if sample_format == 'xarray':
# initial_contents = json.dumps(xr.DataArray(aliquots, dims=("aliquot")).to_dict())
# elif sample_format == 'json':
# initial_contents = quote(json.dumps({c: None for c in aliquots}))
# else:
# raise Exception(f"Cannot initialize contents of: {self.identity}")
# return initial_contents
# labop.Primitive.initialize_contents = empty_container_initialize_contents
def transfer_out(self, source, target, plan, sample_format):
if sample_format == "xarray":
sourceResult, targetResult = self.transfer(source, target, plan, sample_format)
return json.dumps(sourceResult.to_dict())
elif sample_format == "json":
contents = quote(json.dumps({c: None for c in aliquots}))
else:
raise Exception(f"Cannot initialize contents of: {self.identity}")
return contents
labop.Primitive.transfer_out = transfer_out
def transfer_in(self, source, target, plan, sample_format):
if sample_format == "xarray":
sourceResult, targetResult = self.transfer(source, target, plan, sample_format)
return json.dumps(targetResult.to_dict())
elif sample_format == "json":
contents = quote(json.dumps({c: None for c in aliquots}))
else:
raise Exception(f"Cannot initialize contents of: {self.identity}")
return contents
labop.Primitive.transfer_in = transfer_in
def transfer(self, source, target, plan, sample_format):
if sample_format == "xarray":
source_contents = source.to_data_array()
target_contents = target.to_data_array()
transfer = plan.get_map()
if (
source.name in transfer.source_array
and target.name in transfer.target_array
):
source_result = source_contents.rename(
{"aliquot": "source_aliquot", "array": "source_array"}
)
target_result = target_contents.rename(
{"aliquot": "target_aliquot", "array": "target_array"}
)
source_concentration = source_result / source_result.sum(dim="contents")
amount_transferred = source_concentration * transfer
source_result = source_result - amount_transferred.sum(
dim=["target_aliquot", "target_array"]
)
target_result = target_result + amount_transferred.sum(
dim=["source_aliquot", "source_array"]
)
return source_result, target_result
else:
return source_contents, target_contents
elif sample_format == "json":
contents = quote(json.dumps({c: None for c in aliquots}))
else:
raise Exception(f"Cannot initialize contents of: {self.identity}")
return contents
labop.Primitive.transfer = transfer
def declare_primitive(
document: sbol3.Document,
library: str,
primitive_name: str,
template: labop.Primitive = None,
inputs: List[Dict] = {},
outputs: List[Dict] = {},
description: str = "",
):
old_ns = sbol3.get_namespace()
sbol3.set_namespace(PRIMITIVE_BASE_NAMESPACE + library)
try:
primitive = labop.get_primitive(name=primitive_name, doc=document)
if not primitive:
raise Exception("Need to create the primitive")
except Exception as e:
primitive = labop.Primitive(primitive_name)
primitive.description = description
if template:
primitive.inherit_parameters(template)
for input in inputs:
optional = input["optional"] if "optional" in input else False
default_value = input["default_value"] if "default_value" in input else None
primitive.add_input(
input["name"],
input["type"],
optional=optional,
default_value=None,
)
for output in outputs:
primitive.add_output(output["name"], output["type"])
document.add(primitive)
sbol3.set_namespace(old_ns)
return primitive
|
import tensorflow as tf
import numpy as np
from tensorflow.keras.layers import Dense
from tensorflow.keras import Sequential
import warnings
warnings.filterwarnings('ignore')
'''
CRIM: 범죄율
INDUS: 비소매상업지역 면적 비율
NOX: 일산화질소 농도
RM: 주택당 방 수
LSTAT: 인구 중 하위 계층 비율
B: 인구 중 흑인 비율
PTRATIO: 학생/교사 비율
ZN: 25,000 평방피트를 초과 거주지역 비율
CHAS: 찰스강의 경계에 위치한 경우는 1, 아니면 0
AGE: 1940년 이전에 건축된 주택의 비율
RAD: 방사형 고속도로까지의 거리
DIS: 직업센터의 거리
TAX: 재산세율
MEDV : 주택가격
문제
다음 데이터가 주어졌을 때 주택가격 예측하기
predict_data = [[0.02729 0 7.07 0 0.469 7.185 61.1 4.9671 2 242 17.8 392.83 4.03]]
'''
data = np.loadtxt("../../../data/BostonHousing.csv", skiprows=1, delimiter=",", dtype=np.float32)
x_data = data[:, :-1]
y_data = data[:, -1:]
print(x_data.shape)
print(y_data.shape)
layer1 = Dense(units=20, input_shape=[13])
layer2 = Dense(units=20, input_shape=[20])
output_layer = Dense(units=1, input_shape=[20])
model = Sequential()
model.add(layer1)
model.add(layer2)
model.add(output_layer)
model.compile(loss='mean_squared_error', optimizer='adam')
history = model.fit(x_data, y_data, epochs=2000)
print(model.predict(x_data))
print(model.evaluate(x_data, y_data))
predict_data = np.array([[0.02729, 0, 7.07, 0, 0.469, 7.185, 61.1, 4.9671, 2, 242, 17.8, 392.83, 4.03]])
print(model.predict(predict_data))
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'showInfo.ui'
#
# Created by: PyQt5 UI code generator 5.15.2
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
from views.supervisor.residentInfo import Ui_residentInfo
class Ui_ShowInfo(object):
def __init__(self,*args):
self.idVal = None
self.locVal = None
self.startVal = None
self.endVal = None
self.residentId = None
self.priorityVal = None
self.rawMaterialVal = None
self.machinesVal = None
self.statisticsVal = None
if len(args) == 1 and len(args[0]) == 10:
args = args[0]
self.idVal = str(args[0])
self.locVal = args[1]
self.startVal = args[2]
self.endVal = args[3]
self.residentId = args[4]
self.priorityVal = str(args[6])
self.rawMaterialVal = args[7]
self.machinesVal = args[8]
self.statisticsVal = args[9]
def setupUi(self, ShowInfo):
ShowInfo.setObjectName("ShowInfo")
ShowInfo.resize(530, 679)
font = QtGui.QFont()
font.setPointSize(9)
ShowInfo.setFont(font)
self.buttonBox = QtWidgets.QDialogButtonBox(ShowInfo)
self.buttonBox.setGeometry(QtCore.QRect(410, 630, 91, 32))
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.label = QtWidgets.QLabel(ShowInfo)
self.label.setGeometry(QtCore.QRect(180, 10, 191, 41))
font = QtGui.QFont()
font.setPointSize(13)
self.label.setFont(font)
self.label.setObjectName("label")
self.line = QtWidgets.QFrame(ShowInfo)
self.line.setGeometry(QtCore.QRect(40, 50, 441, 20))
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.label_3 = QtWidgets.QLabel(ShowInfo)
self.label_3.setGeometry(QtCore.QRect(20, 90, 91, 21))
self.label_3.setObjectName("label_3")
self.idInput = QtWidgets.QLineEdit(ShowInfo)
self.idInput.setEnabled(False)
self.idInput.setGeometry(QtCore.QRect(120, 90, 113, 22))
self.idInput.setObjectName("idInput")
self.label_4 = QtWidgets.QLabel(ShowInfo)
self.label_4.setGeometry(QtCore.QRect(260, 90, 71, 21))
self.label_4.setObjectName("label_4")
self.roadInput = QtWidgets.QLineEdit(ShowInfo)
self.roadInput.setEnabled(False)
self.roadInput.setGeometry(QtCore.QRect(342, 90, 151, 22))
self.roadInput.setObjectName("roadInput")
self.label_5 = QtWidgets.QLabel(ShowInfo)
self.label_5.setGeometry(QtCore.QRect(30, 140, 91, 21))
self.label_5.setObjectName("label_5")
self.startInput = QtWidgets.QLineEdit(ShowInfo)
self.startInput.setEnabled(False)
self.startInput.setGeometry(QtCore.QRect(102, 140, 141, 22))
self.startInput.setObjectName("startInput")
self.label_6 = QtWidgets.QLabel(ShowInfo)
self.label_6.setGeometry(QtCore.QRect(260, 140, 91, 21))
self.label_6.setObjectName("label_6")
self.endInput = QtWidgets.QLineEdit(ShowInfo)
self.endInput.setEnabled(False)
self.endInput.setGeometry(QtCore.QRect(342, 140, 151, 22))
self.endInput.setObjectName("endInput")
self.line_2 = QtWidgets.QFrame(ShowInfo)
self.line_2.setGeometry(QtCore.QRect(120, 220, 281, 16))
self.line_2.setFrameShape(QtWidgets.QFrame.HLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.priorityInput = QtWidgets.QLineEdit(ShowInfo)
self.priorityInput.setEnabled(False)
self.priorityInput.setGeometry(QtCore.QRect(260, 250, 113, 22))
font = QtGui.QFont()
font.setPointSize(10)
self.priorityInput.setFont(font)
self.priorityInput.setObjectName("priorityInput")
self.label_7 = QtWidgets.QLabel(ShowInfo)
self.label_7.setGeometry(QtCore.QRect(160, 250, 91, 21))
font = QtGui.QFont()
font.setPointSize(10)
self.label_7.setFont(font)
self.label_7.setObjectName("label_7")
self.residentBtn = QtWidgets.QPushButton(ShowInfo)
self.residentBtn.setGeometry(QtCore.QRect(190, 180, 141, 31))
self.residentBtn.setObjectName("residentBtn")
self.residentBtn.clicked.connect(self.showResidents)
if not self.residentId:
self.residentBtn.setEnabled(False)
self.label_2 = QtWidgets.QLabel(ShowInfo)
self.label_2.setGeometry(QtCore.QRect(20, 280, 91, 31))
self.label_2.setObjectName("label_2")
self.rawMaterialInput = QtWidgets.QTextBrowser(ShowInfo)
self.rawMaterialInput.setGeometry(QtCore.QRect(120, 290, 381, 91))
self.rawMaterialInput.setObjectName("rawMaterialInput")
self.machinesInput = QtWidgets.QTextBrowser(ShowInfo)
self.machinesInput.setGeometry(QtCore.QRect(120, 400, 381, 91))
self.machinesInput.setObjectName("machinesInput")
self.label_8 = QtWidgets.QLabel(ShowInfo)
self.label_8.setGeometry(QtCore.QRect(20, 390, 91, 31))
self.label_8.setObjectName("label_8")
self.statisticsInput = QtWidgets.QTextBrowser(ShowInfo)
self.statisticsInput.setGeometry(QtCore.QRect(120, 500, 381, 91))
self.statisticsInput.setObjectName("statisticsInput")
self.label_9 = QtWidgets.QLabel(ShowInfo)
self.label_9.setGeometry(QtCore.QRect(20, 490, 91, 31))
self.label_9.setObjectName("label_9")
self.retranslateUi(ShowInfo)
self.buttonBox.accepted.connect(ShowInfo.accept)
self.buttonBox.rejected.connect(ShowInfo.reject)
QtCore.QMetaObject.connectSlotsByName(ShowInfo)
def retranslateUi(self, ShowInfo):
_translate = QtCore.QCoreApplication.translate
ShowInfo.setWindowTitle(_translate("ShowInfo", "show repair info"))
self.label.setText(_translate("ShowInfo", "Road Repair Info"))
self.label_3.setText(_translate("ShowInfo", "Complaint Id:"))
self.idInput.setText(_translate("ShowInfo", self.idVal))
self.label_4.setText(_translate("ShowInfo", "Road Loc:"))
self.roadInput.setText(_translate("ShowInfo", self.locVal))
self.label_5.setText(_translate("ShowInfo", "StartLoc:"))
self.startInput.setText(_translate("ShowInfo", self.startVal))
self.label_6.setText(_translate("ShowInfo", "EndLoc:"))
self.endInput.setText(_translate("ShowInfo", self.endVal))
self.priorityInput.setText(_translate("ShowInfo", self.priorityVal))
self.label_7.setText(_translate("ShowInfo", "Priority:"))
self.residentBtn.setText(_translate("ShowInfo", "Show Resident"))
self.label_2.setText(_translate("ShowInfo", "Raw Material:"))
self.rawMaterialInput.setText(_translate("ShowInfo",self.rawMaterialVal))
self.label_8.setText(_translate("ShowInfo", "Machines:"))
self.machinesInput.setText(_translate("ShowInfo",self.machinesVal))
self.label_9.setText(_translate("ShowInfo", "Statistics:"))
self.statisticsInput.setText(_translate("ShowInfo",self.statisticsVal))
def showResidents(self):
residentInfo = QtWidgets.QDialog()
ui = Ui_residentInfo(self.residentId)
ui.setupUi(residentInfo)
residentInfo.show()
residentInfo.exec_()
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
ShowInfo = QtWidgets.QDialog()
ui = Ui_ShowInfo()
ui.setupUi(ShowInfo)
ShowInfo.show()
sys.exit(app.exec_())
|
# Generated by Django 2.2.6 on 2019-11-25 08:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('consumers', '0004_auto_20191123_0636'),
]
operations = [
migrations.AddField(
model_name='consumer',
name='isInPortal',
field=models.BooleanField(blank=True, null=True),
),
]
|
# 5-2
print('5-2')
print('\nTest for equality and inequality with strings.')
str1 = 'Mark is a beast.'
str2 = 'Mark is a savage.'
print(str1 == str2)
print(str1 != str2)
print('\nTest for whether an item is on a list or not on a list.')
names = ['Kevin', 'Mark', 'Tony']
if 'Mark' in names:
print('Mark is in the club.')
else:
print('Mark aint in the club.')
if 'Jim' not in names:
print('Jim aint in the club.')
else:
print('Jim is in the club.')
print('\r')
# 5-8, 5-9
print('5-8 and 5-9\n')
users = []
if users:
for user in users:
if user == 'admin':
print('Welcome back, master.')
else:
print('Welcome back, ' + user + '. Thank you for logging in again.')
else:
print('We need to find some users!')
print('\r')
# 5-10
# current_users = ['markfromjoberg', 'ncmbartlett', 'nakedcranium', 'naomiche', 'therealekevin']
# new_user = input('Please enter your desired username: ')
# new_user = new_user.lower()
# while new_user in current_users:
# print('Sorry that username is taken.')
# new_user = input('Please enter another username: ')
# new_user = new_user.lower()
# current_users.append(new_user)
# if new_user in current_users:
# print('Username successfully registered.')
# print('\r')
# 5-11
print('5-11\n')
nums = list(range(1,10))
for number in nums:
if number == 1:
print(str(number) + 'st')
elif number == 2:
print(str(number) + 'nd')
elif number == 3:
print(str(number) + 'rd')
elif number in range(4,10):
print(str(number) + 'th') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.