text stringlengths 8 6.05M |
|---|
# Design & idea by Haider Alghifary www.haider.se
# Coding by <enter everything you want to put in here>
import sys
from PIL import Image
import PIL.ImageOps
import tkinter as tk
from tkinter import messagebox
import os
from os import path
root= tk.Tk()
root.withdraw()
if len(sys.argv) == 1:
messagebox.showinfo("Information", "Please run the program from the Windows Explorer context menu.")
sys.exit()
img_full_path = sys.argv[2]
sep_pos = img_full_path.rfind(path.sep)
img_path = img_full_path[:sep_pos]
dot_pos = img_full_path.rfind(".")
img_name = img_full_path[sep_pos + 1 : dot_pos]
img_ext = img_full_path[dot_pos + 1 :].lower()
if img_ext == "jpeg" or img_ext == "jpg" or img_ext == "png" or img_ext == "tga":
image = Image.open(img_full_path)
if sys.argv[1] == "invert_image":
save_img_name = img_path + path.sep + img_name + "_invert." + img_ext
if path.exists(save_img_name) :
MsgBox = tk.messagebox.askquestion ('Warning','Are you sure you want to replace the existing image?',icon = 'warning')
if MsgBox == 'no':
sys.exit()
if image.mode == 'RGBA':
r,g,b,a = image.split()
rgb_image = Image.merge('RGB', (r,g,b))
inverted_image = PIL.ImageOps.invert(rgb_image)
r2,g2,b2 = inverted_image.split()
final_transparent_image = Image.merge('RGBA', (r2,g2,b2,a))
final_transparent_image.save(save_img_name)
else:
inverted_image = PIL.ImageOps.invert(image)
inverted_image.save(save_img_name)
elif sys.argv[1] == "invert_green_channel":
save_img_name = img_path + path.sep + img_name + "_invert green." + img_ext
if path.exists(save_img_name) :
MsgBox = tk.messagebox.askquestion ('Warning','Are you sure you want to replace the existing image?',icon = 'warning')
if MsgBox == 'no':
sys.exit()
if image.mode == 'RGBA':
r,g,b,a = image.split()
g = image.getchannel("G")
g = PIL.ImageOps.invert(g)
final_transparent_image = Image.merge('RGBA', (r,g,b,a))
final_transparent_image.save(save_img_name)
else:
r,g,b = image.split()
g = image.getchannel("G")
g = PIL.ImageOps.invert(g)
final_transparent_image = Image.merge('RGB', (r,g,b))
final_transparent_image.save(save_img_name)
|
import FWCore.ParameterSet.Config as cms
import os, sys, imp, re
CMSSW_VERSION=os.getenv("CMSSW_VERSION")
process = cms.Process("GenPhotonTuplizer")
# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('SimGeneral.MixingModule.mixNoPU_cfi')
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
process.load('Configuration.StandardSequences.MagneticField_38T_PostLS1_cff')
process.load('Configuration.StandardSequences.Generator_cff')
process.load('IOMC.EventVertexGenerators.VtxSmearedNominalCollision2015_cfi')
process.load('GeneratorInterface.Core.genFilterSummary_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.GlobalTag.globaltag = 'MCRUN2_71_V1::All'
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
process.MessageLogger.cerr.FwkReport.reportEvery = 100
#load input file
process.source = cms.Source('PoolSource',
fileNames = cms.untracked.vstring(
#'root://cms-xrd-global.cern.ch//store/mc/RunIIWinter15GenOnly/GluGluHToGG_M125_13TeV_powheg2_minloHJJ_pythia8/GEN/MCRUN2_71_V1-v2/110000/04485D22-A46A-E711-B25D-0025904E41E4.root'
'root://cms-xrd-global.cern.ch//store/mc/RunIIWinter15GenOnly/GluGluHToGG_M125_13TeV_powheg2_minloHJJ_pythia8/GEN/MCRUN2_71_V1-v2/110000/1E9D9DAB-616A-E711-B44A-002590E50AFE.root'
)
)
#process.Out = cms.OutputModule("PoolOutputModule",
# fileName = cms.untracked.string ("MyOutputFile.root")
#)
#define output file
process.TFileService = cms.Service("TFileService",
fileName = cms.string("genPhotonNtuple.root"),
closeFileFast = cms.untracked.bool(True)
)
#########################paratmeters for the tuplizer##############################
process.ntuples = cms.EDAnalyzer('GenPhotonTuplizer',
genParticles = cms.InputTag("genParticles"),
genJets = cms.InputTag("ak4GenJets"),
genInfo = cms.InputTag("generator", "", "GEN"),
lheInfo = cms.InputTag("externalLHEProducer", "", "LHE")
)
#define path
process.p = cms.Path()
process.p *= process.ntuples
#process.end = cms.EndPath(process.Out)
|
from django.contrib.auth.models import User, Group
from rest_framework import serializers
from django.core.validators import EmailValidator
#from rest_framework.validators import UniqueValidator
from django.contrib.auth import authenticate
class UserSerializer(serializers.ModelSerializer):
#user_pk=serializers.Field(source=User.id)
class Meta:
model = User
fields = ['id', 'username', 'email',]
#lookup_field='user_pk'
class RegisterSerializer(serializers.ModelSerializer):
email =serializers.EmailField()
class Meta:
model = User
fields = ('id','username','email','password')
extra_kwargs = {'password':{'write_only': True},'email':{'validators':[EmailValidator,]}
}
def validate_email(self,value):
if User.objects.filter(email=value).exists():
raise serializers.ValidationError("email already exists.")
return value
def create(self,validated_data):
user = User.objects.create_user(validated_data['username'],validated_data['email'],validated_data['password'])
return user
class LoginSerializer(serializers.Serializer):
username = serializers.CharField()
password = serializers.CharField()
def validate(self,data):
user =authenticate(**data)
if user and user.is_active:
return user
#else:
#print('incorrect')
raise serializers.ValidationError("incorrect Credentials") |
import sys
import PyQt4
from PyQt4 import QtGui, QtOpenGL
from PyQt4.QtGui import QWidget, QHBoxLayout, QColor
from PyQt4.QtOpenGL import QGLWidget
from OpenGL.GL import *
from OpenGL import GLUT
import math
from PyQt4 import Qt
#from text import Text
from quadrilaterals import JRectangle, Text
class Label(JRectangle):
def __init__(self, text):
JRectangle.__init__(self)
self._text = Text(text, 0, 10)
self.setWidth( len(self._text)*104.76)
self.setHeight( 119.05 + 20 )
self.set_drawable(False)
def get_text(self):
return self._text.get_text()
# Imposto il nome dell'entita'
def set_text(self, newText):
self._text.set_text(newText)
# Larghezza e altezza del rettangolo rappresentate l'entita', dipendono
# dal nome dell'entita' stessa
self.setWidth( len(self._name)*104.76 )
self.setHeight( 119.05 + 20 )
def get_selected(self, x, y):
# Se il punto x, y e' contenuto nel rettangolo che descrive l'entita',
# produce questo oggetto, altrimenti ...
result = None
if self._visible or True:
if result != None:
return result
elif self.isSelected(x, y):
return self
return result
def paint(self, paintHidden=False):
#self.setOpenGlColor()
if self.getVisibility() or paintHidden:
JRectangle.paint(self)
glMatrixMode(GL_MODELVIEW)
glPushMatrix()
# La posizine dell'entita' viene resa, attraverso una traslazione
# relativamente alla sua origine*
glTranslatef(self._tx, self._ty, 0)
self._text.paint()
glPopMatrix()
# * "L'origine dell'entita' e' l'angolo basso sinistro del rettangolo che la rappresenta "
|
# -*- coding: utf-8 -*-
'''
Manage DNS records and zones using libcloud
:codeauthor: :email:`Anthony Shaw <anthonyshaw@apache.org>`
.. versionadded:: Carbon
Create and delete DNS records or zones through Libcloud. Libcloud's DNS system supports over 20 DNS
providers including Amazon, Google, GoDaddy, Softlayer
This module uses ``libcloud``, which can be installed via package, or pip.
:configuration:
This module uses a configuration profile for one or multiple DNS providers
.. code-block:: yaml
libcloud_dns:
profile1:
driver: godaddy
key: 2orgk34kgk34g
profile2:
driver: route53
key: blah
secret: blah
Example:
.. code-block:: yaml
webserver:
libcloud_dns.zone_present:
name: mywebsite.com
profile: profile1
libcloud_dns.record_present:
name: www
zone: mywebsite.com
type: A
data: 12.34.32.3
profile: profile1
:depends: apache-libcloud
'''
# Import Python Libs
from __future__ import absolute_import
from distutils.version import LooseVersion as _LooseVersion # pylint: disable=import-error,no-name-in-module
import salt.modules.libcloud_dns as libcloud_dns_module
# Import salt libs
import salt.utils
import logging
log = logging.getLogger(__name__)
# Import third party libs
REQUIRED_LIBCLOUD_VERSION = '1.0.0'
try:
#pylint: disable=unused-import
import libcloud
from libcloud.dns.providers import get_driver
#pylint: enable=unused-import
if hasattr(libcloud, '__version__') and _LooseVersion(libcloud.__version__) < _LooseVersion(REQUIRED_LIBCLOUD_VERSION):
raise ImportError()
logging.getLogger('libcloud').setLevel(logging.CRITICAL)
HAS_LIBCLOUD = True
except ImportError:
HAS_LIBCLOUD = False
def __virtual__():
'''
Only load if libcloud libraries exist.
'''
if not HAS_LIBCLOUD:
msg = ('A apache-libcloud library with version at least {0} was not '
'found').format(REQUIRED_LIBCLOUD_VERSION)
return (False, msg)
return True
def __init__(opts):
salt.utils.compat.pack_dunder(__name__)
def _get_driver(profile):
config = __salt__['config.option']('libcloud_dns')[profile]
cls = get_driver(config['driver'])
key = config.get('key')
secret = config.get('secret', None)
secure = config.get('secure', True)
host = config.get('host', None)
port = config.get('port', None)
return cls(key, secret, secure, host, port)
def state_result(result, message):
return {'result': result, 'comment': message}
def zone_present(domain, type, profile):
'''
Ensures a record is present.
:param domain: Zone name, i.e. the domain name
:type domain: ``str``
:param type: Zone type (master / slave), defaults to master
:type type: ``str``
:param profile: The profile key
:type profile: ``str``
'''
zones = libcloud_dns_module.list_zones(profile)
if not type:
type = 'master'
matching_zone = [z for z in zones if z.domain == domain]
if len(matching_zone) > 0:
return state_result(True, "Zone already exists")
else:
result = libcloud_dns_module.create_zone(domain, profile, type)
return state_result(result, "Created new zone")
def zone_absent(domain, profile):
'''
Ensures a record is absent.
:param domain: Zone name, i.e. the domain name
:type domain: ``str``
:param profile: The profile key
:type profile: ``str``
'''
zones = libcloud_dns_module.list_zones(profile)
matching_zone = [z for z in zones if z.domain == domain]
if len(matching_zone) == 0:
return state_result(True, "Zone already absent")
else:
result = libcloud_dns_module.delete_zone(matching_zone[0].id, profile)
return state_result(result, "Deleted zone")
def record_present(name, zone, type, data, profile):
'''
Ensures a record is present.
:param name: Record name without the domain name (e.g. www).
Note: If you want to create a record for a base domain
name, you should specify empty string ('') for this
argument.
:type name: ``str``
:param zone: Zone where the requested record is created, the domain name
:type zone: ``str``
:param type: DNS record type (A, AAAA, ...).
:type type: ``str``
:param data: Data for the record (depends on the record type).
:type data: ``str``
:param profile: The profile key
:type profile: ``str``
'''
zones = libcloud_dns_module.list_zones(profile)
try:
matching_zone = [z for z in zones if z.domain == zone][0]
except IndexError:
return state_result(False, "Could not locate zone")
records = libcloud_dns_module.list_records(matching_zone.id, profile)
matching_records = [record for record in records
if record.name == name and
record.type == type and
record.data == data]
if len(matching_records) == 0:
result = libcloud_dns_module.create_record(
name, matching_zone.id,
type, data, profile)
return state_result(result, "Created new record")
else:
return state_result(True, "Record already exists")
def record_absent(name, zone, type, data, profile):
'''
Ensures a record is absent.
:param name: Record name without the domain name (e.g. www).
Note: If you want to create a record for a base domain
name, you should specify empty string ('') for this
argument.
:type name: ``str``
:param zone: Zone where the requested record is created, the domain name
:type zone: ``str``
:param type: DNS record type (A, AAAA, ...).
:type type: ``str``
:param data: Data for the record (depends on the record type).
:type data: ``str``
:param profile: The profile key
:type profile: ``str``
'''
zones = libcloud_dns_module.list_zones(profile)
try:
matching_zone = [z for z in zones if z.domain == zone][0]
except IndexError:
return state_result(False, "Zone could not be found")
records = libcloud_dns_module.list_records(matching_zone.id, profile)
matching_records = [record for record in records
if record.name == name and
record.type == type and
record.data == data]
if len(matching_records) > 0:
result = []
for record in matching_records:
result.append(libcloud_dns_module.delete_record(
matching_zone.id,
record.id,
profile))
return state_result(all(result), "Removed {0} records".format(len(result)))
else:
return state_result(True, "Records already absent")
|
from test.test_bot import TestBot
def main():
bot = TestBot()
bot.start()
if __name__ == '__main__':
main() |
import numpy as np
from readcol import *
def bp_binning( cls, bin_file, verbose=False):
code = ' > bp_binning: '
if verbose:
print code+'Assuming l=0, len(cl)-1; lmax=len(cl)-1'
print code+'Reading bin_file %s' %bin_file
nl = len(cls)
fl,ll = readcol(bin_file, verbose=verbose)
nb = len(ll)
if verbose:
print nb, ll[nb-1], ll[nb]
lmax = np.min( [nl-1,ll[nb]] )
if verbose:
print 'lmax = %s' %lmax
#bok = (ll - lmax) > 0
#bmax = nb-1
#if bok[0] > 0:
# bmax = bok[0]-1
bin_cls = np.zeros( nb )
# for ib=0,bmax do bin_cls[ib] = total( cls[fl[ib]:ll[ib]] ) / (ll[ib]-fl[ib]+1)
for ib in np.arange( nb ):
#bin_cls[ib] = np.sum( cls[fl[ib]:ll[ib]] ) / (ll[ib]-fl[ib]+1)
bnl = np.sum( np.nonzero( cls[fl[ib]:ll[ib]] ) )
bin_cls[ib] = np.sum( cls[fl[ib]:ll[ib]] ) / bnl #(ll[ib]-fl[ib]+1)
if verbose:
print ib, fl[ib], ll[ib], bin_cls[ib]
tnl = np.sum( np.nonzero( bin_cls ) )
return bin_cls[0:tnl]
|
from sklearn.model_selection import train_test_split
from sklearn.naive_bayes import GaussianNB
import pandas as pd
import numpy as np
tennis_data = pd.read_csv('playtennis.csv')
# print(tennis_data)
tennis_data.Outlook = tennis_data.Outlook.replace('Sunny', 0)
tennis_data.Outlook = tennis_data.Outlook.replace('Overcast', 1)
tennis_data.Outlook = tennis_data.Outlook.replace('Rain', 2)
tennis_data.Temperature = tennis_data.Temperature.replace('Hot', 3)
tennis_data.Temperature = tennis_data.Temperature.replace('Mild', 4)
tennis_data.Temperature = tennis_data.Temperature.replace('Cool', 5)
tennis_data.Humidity = tennis_data.Humidity.replace('High', 6)
tennis_data.Humidity = tennis_data.Humidity.replace('Normal', 7)
tennis_data.Wind = tennis_data.Wind.replace('Weak', 8)
tennis_data.Wind = tennis_data.Wind.replace('Strong', 9)
tennis_data.PlayTennis = tennis_data.PlayTennis.replace('No', 10)
tennis_data.PlayTennis = tennis_data.PlayTennis.replace('Yes', 11)
# print(tennis_data)
X = np.array(pd.DataFrame(tennis_data, columns = ['Outlook', 'Temperature', 'Humidity', 'Wind']))
y = np.array(pd.DataFrame(tennis_data, columns = ['PlayTennis']))
X_train, X_test, y_train, y_test = train_test_split(X, y) # 랜덤으로 데이터 선별
# print('X_train :', X_train)
# print('X_test :', X_test)
# print('y_train :', y_train)
# print('y_test :', y_test)
gnb_clf = GaussianNB() # 가우시안나이브베이즈모듈 생성
gnb_clf = gnb_clf.fit(X_train, y_train) #트레인 데이터로 모듈 학습
gnb_prediction = gnb_clf.predict(X_test) #테스트값을 예측해서 prediction에 저장
# print(gnb_prediction)
####성능평가
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
from sklearn.metrics import f1_score
from sklearn.metrics import accuracy_score
# print(confusion_matrix(y_test, gnb_prediction))
# print(classification_report(y_test, gnb_prediction))
fmeasure = round(f1_score(y_test, gnb_prediction, average = 'weighted'), 2)
accuracy = round(accuracy_score(y_test, gnb_prediction, normalize = True), 2)
df_nbclf = pd.DataFrame(columns=['Classifier', 'F-Measure', 'Accuracy'])
df_nbclf.loc[len(df_nbclf)] = ['Naive Bayes', fmeasure, accuracy]
print(df_nbclf) |
from sklearn.datasets import make_classification
import numpy as np
import matplotlib.pyplot as plt
# created imbalanced sample
X,y = make_classification(n_samples=5000, n_features=2, n_informative=2,
n_redundant=0, n_repeated=0, n_classes=3,
n_clusters_per_class =1, weights=[0.01, 0.05, 0.94],
class_sep=0.8, random_state=0)
plt.scatter(X[:,0], X[:,1], c=y)
''''simple over sampling'''
from imblearn.over_sampling import RandomOverSampler
ros = RandomOverSampler(sampling_strategy='auto',random_state=0) # 교수님이랑 버전이 달라서 요로케 해야됨,,
X_resampled, y_resampled = ros.fit_resample(X,y) # 새버전 ind 없음
np.bincount(y) # number of sample in each cluster
np.bincount(y_resampled) #모든 cluster가 갯수가 같다
# X 갯수 늘어난것도 확인 가능
# 그래프 그리기,,
'''
num_samples = np.bincount(ind)
plt.scatter(X[:,0], X[:,1], c=y, s=num_samples)
'''
'''simple under sampling'''
from imblearn.under_sampling import RandomUnderSampler
rus = RandomUnderSampler(random_state=0, sampling_strategy='auto')
X_resampled, y_resampled = rus.fit_resample(X,y)
np.bincount(y_resampled) #가장 작았던 수에 맞춰짐
#그래프
plt.scatter(X_resampled[:,0], X_resampled[:,1], c=y_resampled)
rus = RandomUnderSampler(random_state=0, sampling_strategy={1:64*2,2:64*10})
X_resampled, y_resampled = rus.fit_resample(X,y)
np.bincount(y_resampled)
'''SMOTE'''
from imblearn.over_sampling import SMOTE
sm = SMOTE(k_neighbors=5, random_state=0)
X_resampled, y_resampled = sm.fit_resample(X,y)
np.bincount(y_resampled)
plt.scatter(X_resampled[:,0], X_resampled[:,1], c=y_resampled)
'''ADASYN'''
from imblearn.over_sampling import ADASYN
ada = ADASYN(random_state = 0, n_neighbors=5)
X_resampled, y_resampled = ada.fit_resample(X,y)
np.bincount(y_resampled)
plt.scatter(X_resampled[:,0], X_resampled[:,1], c=y_resampled)
#SMOTE와 약간 다르게 생김. boundary가 더 깔끔한 편
'''NearMiss'''
from imblearn.under_sampling import NearMiss
nm = NearMiss(version=1)
X_resampled, y_resampled = nm.fit_resample(X,y)
np.bincount(y_resampled)
plt.scatter(X_resampled[:,0], X_resampled[:,1], c=y_resampled)
'''one sided'''
from imblearn.under_sampling import OneSidedSelection
oss = OneSidedSelection(random_state = 0, n_neighbors=1, n_seeds_S=1)
X_resampled, y_resampled = oss.fit_resample(X,y)
np.bincount(y_resampled)
'''TomekLink'''
from imblearn.under_sampling import TomekLinks
t1 = TomekLinks(sampling_strategy='all')
#ind가 필요한데 난 ind가 없는걸?ㅜㅜ
from sklearn.linear_model import LogisticRegression
clf = LogisticRegression()
clf.fit(X,y)
xmin, xmax, ymin, ymax = X[:,0].min(), X[:,0].max(), X[:,1].min(), X[:,1].max()
xx, yy = np.meshgrid(np.linspace(xmin-0.5, xmax+0.5, 10), np.linspace(ymin-0.5, ymax+0.5,10))
zz = np.c_[xx.ravel(), yy.ravel()]
zz_pred = clf.predict(zz)
plt.contourf(xx,yy,zz_pred.reshape(xx.shape), alpha=0.7)
plt.scatter(X[:,0],X[:,1],c=y)
|
import collections
import math
import random
import sys
import numpy as np
import pandas as pd
from torch.utils.data import Dataset, DataLoader, WeightedRandomSampler
sys.path.append('../')
from example.util_file import matrix_normalization
import torch
class SingleDataset(Dataset):
# 重写单个样本的 dataset
def __init__(self, data, time_info, label):
self.data = data
self.time_info = time_info
self.label = label
def __getitem__(self, item):
start, end = self.time_info[item][0], self.time_info[item][1]
data = self.data[:, start:end]
result = matrix_normalization(data, (100, -1))
result = result.astype('float32')
result = result[np.newaxis, :]
return result, self.time_info[item], self.label
def __len__(self):
"""
数据的长度
:return:
"""
return len(self.time_info)
class DataInfo:
"""
用于模型训练的分段信息
"""
def mcm(self, num): # 求最小公倍数
minimum = 1
for i in num:
minimum = int(i) * int(minimum) / math.gcd(int(i), int(minimum))
return int(minimum)
def __init__(self, path_data):
data = pd.read_csv(path_data)
data_path = data['id'].tolist()
labels = data['label'].tolist()
self.data = []
self.count = collections.defaultdict(int)
for i in range(len(data_path)):
self.data.append((data_path[i], int(labels[i])))
self.count[labels[i]] += 1 # 需要计算总数
self.weight = self.mcm(self.count.values())
self.data_length = len(self.data)
def next_batch_data(self, batch_size): # 用于返回一个batch的数据
N = self.data_length
start = 0
end = batch_size
random.shuffle(self.data)
while end < N:
yield self.data[start:end]
start = end
end += batch_size
if end >= N:
start = 0
end = batch_size
class MyDataset(Dataset): # 重写dateset的相关类
def __init__(self, data, transform=None, target_transform=None):
self.data = data
self.transform_data = transform
self.target_transform = target_transform
def __getitem__(self, index):
data_path, label = self.data[index]
data = np.load(data_path)
# 获得该数据的
if self.transform_data:
data = self.transform_data(data)
# result = matrix_normalization(data, (32, 128)) # 设置输入的格式问题,只有在对应二维矩阵的时候才需要
result = data.astype('float32')
result = result[np.newaxis, :]
# result = trans_data(vae_model, result)
return result, label
def __len__(self):
return len(self.data)
class MyData:
def __init__(self, path_train=None, path_test=None, path_val=None, batch_size=16):
"""
:param path_train: 训练集数据的路径
:param path_test: 测试集数据的路径
:param path_val: 验证集数据的路径
:param batch_size: 批量的数据
"""
self.path_train = path_train
self.path_test = path_test
self.path_val = path_val
self.batch_size = batch_size
def collate_fn(self, data): #
"""
用于自己构造时序数据,包含数据对齐以及数据长度
:param data: torch dataloader 的返回形式
:return:
"""
# 主要是用数据的对齐
data.sort(key=lambda x: x[0].shape[-1], reverse=True)
max_shape = data[0][0].shape
labels = [] # 每个数据对应的标签
length = [] # 记录真实的数目长度
for i, (d, label) in enumerate(data):
reshape = d.shape
length.append(d.shape[-1])
if reshape[-1] < max_shape[-1]:
tmp_d = np.pad(d, ((0, 0), (0, 0), (0, max_shape[-1] - reshape[-1])), 'constant')
data[i] = tmp_d
else:
data[i] = d
labels.append(label)
return torch.from_numpy(np.array(data)), torch.tensor(labels)
def data_loader(self, transform, mode='train'): # 这里只有两个模式,一个是train/一个是test
dataloader = None
if mode == 'train':
# 如果加入了少样本学习的方法,需要额外的处理
data_info = DataInfo(self.path_train)
dataset = MyDataset(data_info.data, transform=transform)
# 因为样本的数目不均衡,需要进行不均衡采样
# 需要计算每一个样本的权重值
weight = [data_info.weight // data_info.count[x[1]] for x in data_info.data]
sampler = WeightedRandomSampler(weight, len(dataset), replacement=True)
dataloader = DataLoader(dataset, sampler=sampler, batch_size=self.batch_size)
elif mode == 'test': # test
data_info = DataInfo(self.path_test)
dataset = MyDataset(data_info.data, transform=transform)
dataloader = DataLoader(dataset, batch_size=self.batch_size, shuffle=True)
else:
pass
return dataloader
def next_batch_val_data(self, transform):
data_info = DataInfo(self.path_val)
dataset = MyDataset(next(data_info.next_batch_data(self.batch_size)), transform=transform)
next_batch_data_loader = DataLoader(dataset, batch_size=self.batch_size, shuffle=True)
yield next_batch_data_loader
|
person= {
"nombre": "cercio",
"apellido": "viloria",
"edad": 24
}
|
"""
198. House Robber
Medium
You are a professional robber planning to rob houses along a street. Each house has a certain amount of money stashed, the only constraint stopping you from robbing each of them is that adjacent houses have security systems connected and it will automatically contact the police if two adjacent houses were broken into on the same night.
Given an integer array nums representing the amount of money of each house, return the maximum amount of money you can rob tonight without alerting the police.
Example 1:
Input: nums = [1,2,3,1]
Output: 4
Explanation: Rob house 1 (money = 1) and then rob house 3 (money = 3).
Total amount you can rob = 1 + 3 = 4.
"""
from typing import List
class Solution:
def rob(self, nums: List[int]) -> int:
print (nums)
a = b = 0
for i in range(len(nums)):
if i%2 == 0:
a = a + nums[i]
a = max(a, b)
else:
b = b + nums[i]
b = max(a,b)
#print (a,b)
return max(a,b)
nums = [1,2,3,1]
ans = Solution().rob(nums)
print(ans)
nums = [2,7,9,3,1]
ans = Solution().rob(nums)
print(ans) |
import sqlite3
from textwrap import dedent
from typing import Iterator, Sequence, Type, TypeVar
from uuid import uuid4
from ...query import Clause
from .base import Architect, Engine, SelectStatement
class SQLite3Architect(Architect):
def execute(self, query, params=None):
cursor = self._con.cursor()
cursor.execute(query, params or [])
return cursor
def was_model_created(self, model):
table_name = model.get_table_name()
sql = 'SELECT name FROM sqlite_master WHERE type=? AND name=?;'
cursor = self.execute(sql, ['table', table_name])
results = cursor.fetchone()
cursor.close()
return results is not None
def create_model(self, model):
table_name = model.get_table_name()
self.execute(f'CREATE TABLE {table_name}(id);')
for name, field in model.enumerate_fields():
self.create_field(table_name, field)
self._con.commit()
def create_field(self, table_name, field):
name = field.name
if name != 'id':
self.execute(f'ALTER TABLE {table_name} ADD COLUMN {name};')
self._con.commit()
class SQLite3Clause(Clause):
CLS = TypeVar('SQLite3Clause', bound='SQLite3Clause')
def __init__(self, field, operator, value):
super().__init__(field, operator, value)
@classmethod
def from_clause(cls: Type[CLS], clause: Clause) -> CLS:
return cls(clause.field, clause.operator, clause.value)
def to_sql(self):
return f'{self.field} = ?'
class SQLite3Select(SelectStatement):
def iterate_filters(self) -> Iterator[Clause]:
for current in self.plan.filters:
yield SQLite3Clause.from_clause(current)
def iterate_includes(self) -> Iterator[Clause]:
for current in self.plan.includes:
yield SQLite3Clause.from_clause(current)
def _build_where_clause_part(self, clauses: Sequence[Clause],
operator: str) -> str:
parts = []
for current in clauses:
parts.append(current.to_sql())
self.append_param(current.value)
sql = f' {operator} '.join(parts)
return sql
def build_where_filters(self):
filters = list(self.iterate_filters())
return self._build_where_clause_part(filters, 'AND')
def build_where_includes(self):
includes = list(self.iterate_includes())
return self._build_where_clause_part(includes, 'OR')
def build_where_clause(self):
filters = self.build_where_filters()
includes = self.build_where_includes()
not_empty = []
if filters:
not_empty.append(filters)
if includes:
not_empty.append(includes)
condition = ' OR '.join(not_empty)
return f'WHERE {condition}'
def build_fields(self):
columns = ', '.join(self.plan.columns)
return columns
def build_from_clause(self):
if self.plan.sources:
sources = ', '.join(self.plan.sources)
return f'FROM {sources}'
return ''
def build_sql(self):
fields = self.build_fields()
from_clause = self.build_from_clause()
where_clause = self.build_where_clause()
return dedent(f"""
SELECT {fields}
{from_clause}
{where_clause}
""").strip()
class SQLite3Engine(Engine):
ddl_class = SQLite3Architect
def __init__(self, in_memory=False):
self._in_memory = in_memory
super().__init__()
def _get_filename(self):
if self._in_memory:
filename = ':memory:'
else:
filename = 'albus.db'
return filename
def _generate_pk(self, model):
return uuid4().hex
def connect(self):
filename = self._get_filename()
self._con = sqlite3.connect(filename)
def cursor(self):
return self._con.cursor()
def commit(self):
self._con.commit()
def select_query(self, query):
select = SQLite3Select.from_query(query)
sql = select.build_sql()
params = select.params
cursor = self.cursor()
cursor.execute(sql, params)
all_rows = cursor.fetchall()
cursor.close()
return all_rows
def fetch(self, model, pk, fields):
table = model.get_table_name()
columns = ', '.join([f.name for f in fields])
sql = f'SELECT {columns} from {table} WHERE id=?;'
cursor = self.cursor()
cursor.execute(sql, [pk])
row = cursor.fetchone()
cursor.close()
values = []
assert len(row) == len(fields)
for idx in range(len(fields)):
current_field = fields[idx]
current_value = current_field.from_db(row[idx])
values.append(current_value)
return values
def insert(self, model, fields, values):
pk = self._generate_pk(model)
names = ['id'] + [f.name for f in fields]
literals = [pk] + [v for v in values]
columns = ', '.join(names)
params = ', '.join(['?'] * len(literals))
table = model.get_table_name()
dml = f'INSERT INTO {table} ({columns}) VALUES ({params});'
cursor = self.cursor()
cursor.execute(dml, literals)
self.commit()
cursor.close()
return pk
def update(self, model, fields, values):
table = model.get_table_name()
assignments_list = []
for current_field in fields:
column_name = current_field.name
current_assignment = f'\n\t{column_name} = ?'
assignments_list.append(current_assignment)
assignments = ','.join(assignments_list)
dml = f'UPDATE {table} SET {assignments};'
cursor = self.cursor()
cursor.execute(dml, values)
self.commit()
cursor.close()
def delete(self, model, pk):
table = model.get_table_name()
dml = f'DELETE FROM {table} WHERE id=?;'
cursor = self.cursor()
cursor.execute(dml, [pk])
self.commit()
cursor.close()
|
import json
from datetime import datetime
start_time = datetime.now()
namesfile = open('necessary_names.txt')
taxfile = open('necessary_taxes.txt')
filterfile = open('FilterData', 'w+')
missmatch = 0
mainDict = {9606 : [[], [], []], 9596 : [[], [], []], 9844 : [[], [], []], 10090 : [[], [], []], 9838 : [[], [], []], 9986 : [[], [], []]}
taxdict = {}
for pair in taxfile.readlines():
gi, tax = (int(i) for i in pair.split())
taxdict[gi] = tax
print('taxdict is completed')
for pair in namesfile.readlines():
ls = pair.split(' ')
current_gi = int(ls[0])
if 'heavy' in ls[1] or 'VH' in ls[1] or 'Vh' in ls[1]:
chain = 1
elif 'light' in ls[1] or 'VL' in ls[1] or 'Vl' in ls[1]:
chain = 2
else:
chain = 0
if 'IgG' in ls[1]:
type = 1
elif 'IgM' in ls[1]:
type = 2
elif 'IgA' in ls[1]:
type = 3
elif 'IgD' in ls[1]:
type = 4
elif 'IgE' in ls[1]:
type = 5
else:
type = 0
if taxdict.get(current_gi) == None:
missmatch += 1
continue
taxid = taxdict[current_gi]
mainDict[taxid][0].append(current_gi)
mainDict[taxid][1].append(chain)
mainDict[taxid][2].append(type)
filterfile.write(json.dumps(mainDict))
filterfile.close()
print(missmatch, datetime.now()-start_time)
input()
|
from datetime import date, timedelta
class DateUtils:
DAY_NAMES = [
"monday",
"tuesday",
"wednesday",
"thursday",
"friday",
"saturday",
"sunday"
]
startDate: date
currentDate: date
days: int
@staticmethod
def init(day: date):
DateUtils.days = 0
DateUtils.startDate = day
DateUtils.currentDate = day
@staticmethod
def nextDay():
DateUtils.currentDate += timedelta(days=1)
DateUtils.days += 1
@staticmethod
def isSunday() -> bool:
return DateUtils.currentDate.isoweekday() == 7
@staticmethod
def isSaturday() -> bool:
return DateUtils.currentDate.isoweekday() == 6
@staticmethod
def isMonday() -> bool:
return DateUtils.currentDate.isoweekday() == 1
@staticmethod
def isTuesday() -> bool:
return DateUtils.currentDate.isoweekday() == 2
@staticmethod
def isWednesday() -> bool:
return DateUtils.currentDate.isoweekday() == 3
@staticmethod
def weekday() -> int:
return DateUtils.currentDate.isoweekday()
@staticmethod
def dayname() -> str:
return DateUtils.DAY_NAMES[DateUtils.currentDate.weekday()]
@staticmethod
def isGwRewardDay() -> bool:
return DateUtils.currentDate.isoweekday() == 6
@staticmethod
def getDays() -> int:
difference = DateUtils.currentDate - DateUtils.startDate
return difference.days
|
import functools
import requests as _requests
from .config import get_config
from .log import lg
class LazyRequests(object):
allow_methods = ['get', 'post']
user_agent = (
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 '
'(KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36')
def __init__(self):
self.configured = False
self.config = None
pass
def _configure(self):
if not self.configured:
self.config = get_config()
self.configured = True
def __getattr__(self, key):
if key not in self.allow_methods:
raise AttributeError('{} is not an allowed method'.format(key, self.allow_methods))
self._configure()
kwargs = {
'headers': {
'User-Agent': self.user_agent,
},
}
proxy = self.config.get('proxy')
if proxy:
kwargs['proxies'] = {
'http': self.config['proxy'],
'https': self.config['proxy'],
}
lg.debug('request use proxy: %s', proxy)
request_func = getattr(_requests, key)
return functools.partial(request_func, **kwargs)
requests = LazyRequests()
|
__author__ = "Narwhale"
def binary_search(alist,item):
"""二分查找"""
low = 0
high = len(alist) - 1
while low <= high:
mid = (low + high) // 2
if alist[mid] == item:
return mid
elif alist[mid] > item:
high = mid - 1
else:
low = mid + 1
return
a = [1,2,3,5,6,7]
b = binary_search(a,0)
print(b) |
from app import db
# constants
DEFAULT_TZ = "Australia/Brisbane"
INACTIVE = 0
ADMIN = 1
SUPERVISOR = 2
ROLE = {
INACTIVE: 'inactive',
ADMIN: 'admin',
SUPERVISOR: 'supervisor'
}
# Models interfacing with the database tables
class Admin(db.Model):
"""
Admin model for any registered user
"""
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
password = db.Column(db.String(20))
role = db.Column(db.SmallInteger, default=INACTIVE)
timezone = db.Column(db.String(80), default=DEFAULT_TZ)
def is_authenticated(self):
"""
helper function to check if the user is logged in
"""
return True
def get_role(self):
"""
helper function that returns the role of the queried user
"""
try:
return ROLE[self.role]
except KeyError:
return "No role assigned"
def is_active(self):
"""
helper function that returns true if the user is currently on duty
"""
return True
def get_id(self):
"""
helper function that returns the queried user's id
"""
return unicode(self.id)
def __init__(self, name, email, password, role=0):
"""
constructor function to create a new user, the role defaults to Inactive (0)
:param name:
:param email:
:param password:
:param role:
:return:
"""
self.name = name
self.email = email
self.password = password
self.role = role
def __repr__(self):
"""
helper function to represent the query in a human readable form
"""
return '<Admin %r>' % self.name
class Timer(db.Model):
"""
Model that represents one discrete work shift, with a start, an end, the moment when the user requests to terminate
the shift and an associated user, as well as notes the user might leave at the end of the shift.
"""
id = db.Column(db.Integer, primary_key=True)
startDate = db.Column(db.DateTime)
endRequestedDate = db.Column(db.DateTime)
endDate = db.Column(db.DateTime)
notes = db.Column(db.Text)
admin_id = db.Column(db.Integer, db.ForeignKey('admin.id'))
admin = db.relationship('Admin',
backref=db.backref('dates'))
def __init__(self, startDate, admin=None,endRequestedDate = None, endDate = None):
"""
Constructor function for the timer model.
Defaults endRequestedDate, endDate to none because they are to be filled in later
Defaults admin to none to help debugging
:param startDate:
:param admin:
:param endRequestedDate:
:param endDate:
:return:
"""
self.startDate = startDate
self.admin = admin
self.endRequestedDate = endRequestedDate
self.endDate = endDate
def __repr__(self):
"""
helper function to represent the query in a human readable form
"""
return '<id %d s %r - e %r - %r>' %(self.id, self.startDate.isoformat() , self.endDate.isoformat(), self.admin.name)
|
# -*- coding: utf-8 -*-
"""
Combines the existing melody and rhythm processing methods to form complete
melody processing
"""
import rhythm_hmm as rh
import long_rhythm_distance as lrd
import todd_ann as mel
import numpy as np
import midi
import time
from copy import deepcopy
import pdb
import pickle
class TrackDataSet:
def __init__(self,tracks):
rhythms = [None]*len(tracks)
melodies = [None]*len(tracks)
for i,t in enumerate(tracks):
rhythms[i] = rh.makeTrackRhythm(t)
melodies[i] = mel.makeTrackMelody(t,0)
(self.rhythmSamps,self.rhythmLens) = rh.makeRhythmSamples(rhythms)
self.rhythmTimesteps = [r.timesteps for r in rhythms]
self.melodyDS = mel.makeMelodyDataSet(melodies)
class MelodyGenerator:
def __init__(self, stateCount, layerSize, barLen, barCount, clusterCount, hmmIters=1000, octave=5):
self.net = mel.buildToddNetwork(layerSize)
self.hmm = rh.buildHMM(stateCount, n_iter=hmmIters, tol=0.00001)
self.rdm = lrd.RhythmDistanceModel(barLen, barCount, clusterCount)
self.stateCount = stateCount
self.distTheta = []
self.barLen = barLen
self.barCount = barCount
self.octave = octave
def train(self, epochs, tracks):
trackDS = TrackDataSet(tracks)
mel.trainNetwork(self.net, trackDS.melodyDS, epochs)
bestHMM = self.hmm
bestScore = 0
for i in range(20):
nextHMM = deepcopy(self.hmm)
nextHMM.fit(trackDS.rhythmSamps, trackDS.rhythmLens)
nextScore = nextHMM.score(trackDS.rhythmSamps, trackDS.rhythmLens)
if nextScore > bestScore:
bestHMM = nextHMM
bestScore = nextScore
self.rdm.train(trackDS.rhythmTimesteps)
self.hmm = bestHMM
def trainTimed(self, epochs, ds):
start = time.clock()
self.rdm.train(ds.rhythmTimesteps)
rdm = time.clock()
print('RDM: {}'.format(rdm-start))
mel.trainNetwork(self.net, ds.melodyDS, epochs)
net = time.clock()
print('Net: {}'.format(net-rdm))
bestHMM = self.hmm
bestScore = -np.inf
bestI = -1
for i in range(10):
nextHMM = deepcopy(self.hmm)
nextHMM.fit(ds.rhythmSamps, ds.rhythmLens)
nextScore = nextHMM.score(ds.rhythmSamps, ds.rhythmLens)
# print('Score {}: {}'.format(i,nextScore))
if nextScore > bestScore:
bestHMM = nextHMM
bestScore = nextScore
bestI = i
self.hmm = bestHMM
hmm = time.clock()
print('RDM: {}'.format(rdm-start))
print('Net: {}'.format(net-rdm))
print('HMM: {}'.format(hmm-net))
print('Total: {}'.format(hmm-start))
# print('Best: {} : {}'.format(bestI,bestScore))
# Returns the original track + a generated bar
def generateBar(self, track, rdmLam=4.0):
# Format data for prediction
rhythm = rh.makeTrackRhythm(track)
(rhythmSamps,_) = rh.makeRhythmSamples([rhythm])
melody = mel.makeTrackMelody(track,0)
melodyDS = mel.makeMelodyDataSet([melody])
# Generate notes
rhythmOutTS = lrd.generateNextBar(self.rdm, self.hmm, rdmLam, rhythmSamps)
#self.net.reset()
#for sample in melodyDS.getSequenceIterator(0):
# self.net.activate(sample[0])
#Whatever
pitchOutTS = mel.getNextPitches(self.net, melody.pitches[-1], melodyDS,
rhythm.timesteps[-1], rhythmOutTS)
# Load output into classes
t = 0
for t in range(len(rhythmOutTS)):
rhythm.addTimestep(rhythmOutTS[t])
newNote = (rhythmOutTS[t] == 1)
melody.addNote(pitchOutTS[t],newNote)
trackOut = makeTrackFromRhythmMelody(rhythm, melody, self.octave)
return trackOut
# Saves the melody generator's learned characteristics to a file to be loaded later
def save(self, filename):
file = open(filename, "wb")
pickle.dump(self, file)
file.close()
def loadMelodyGenerator(filename):
file = open(filename, 'rb')
mg = pickle.load(file)
file.close()
mg.net.sorted = False
mg.net.sortModules()
return mg
def makeTrackFromRhythmMelody(rhythm, melody, octave):
assert rhythm.length() == melody.length(), "Rhythm and melody must have equal lengths"
track = midi.Track()
t = 0
noteTime = 0
notePitch = 0
noteOn = False
while t < rhythm.length():
if rhythm.timesteps[t] != 2 and noteOn == True:
track.addNote(midi.Note(notePitch, noteTime, t - noteTime))
noteOn = False
if rhythm.timesteps[t] == 1:
noteTime = t
notePitch = melody.pitches[t] + octave*12
noteOn = True
t = t + 1
if noteOn == True:
track.addNote(midi.Note(notePitch, noteTime, t - noteTime))
return track
|
#!/bin/env python3
from itertools import count
from logging import debug
from os import access, name
from typing import OrderedDict
from matplotlib.pyplot import table, twinx
from numpy import angle, mat
from numpy.core.fromnumeric import put
from py_trees import behaviour, common
from py_trees.behaviours import Success
from roslaunch.pmon import rl_signal
import rospy
import roslaunch
import rosnode
import actionlib
import moveit_commander
import moveit_msgs.msg
import tf
import py_trees
import argparse
import sys
import time
import math
import py_trees.console as console
import subprocess
from std_msgs.msg import String, Int64MultiArray
from nav_msgs.msg import Odometry
from sensor_msgs.msg import Image
from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
from moveit_msgs.msg import JointConstraint, Constraints
from nav_msgs.msg import Odometry
from sensor_msgs.msg import LaserScan
from vision_msgs.msg import Detection2DArray
from tf import transformations
from tf.transformations import euler_from_quaternion, quaternion_from_euler
from geometry_msgs.msg import Quaternion, Twist, PoseStamped
from py_trees_test.msg import battery_status
from datetime import datetime
from gazebo_ros_link_attacher.srv import Attach, AttachRequest
from countertop_spray import Countertop
from pick_pipeline import PickPipeline
from name import BoundingBoxes
from fusion_class import fusion
from table_top import TableTop
from sink_detection import SinkDetection
from wall_align import WallAlign
#----------------- Meta data start -----------------#
def description():
content = "Bathroom cleaning robot demo\n"
content += "The robot finds a way to enter the bathroom\n"
content += "through the door, pick and place the trash in\n"
content += "the dustbin, spray sanitizing liquid on the\n"
content += "basin and countertop, mop the floor, wipe\n"
content += "the basin and countertop and finally return\n"
content += "to the position where it started from.\n"
content += "\n"
content += "EVENTS\n"
content += "\n"
content += " - 1 : Enter bathroom\n"
content += " - 2 : Detect sink\n"
content += " - 3 : Pick and place trash\n"
content += " - 4 : Spray sanitizing liquid\n"
content += " - 5 : Mop the floor\n"
content += " - 6 : Wipe the basin and countertop\n"
content += " - 7 : Return to its start position\n"
content += "\n"
if py_trees.console.has_colours:
banner_line = console.green + "*" * 79 + "\n" + console.reset
s = "\n"
s += banner_line
s += console.bold_white + "Artpark Robotics Challenge".center(79) + "\n" + console.reset
s += banner_line
s += "\n"
s += content
s += "\n"
s += banner_line
else:
s = content
return s
def epilog():
if py_trees.console.has_colours:
return console.cyan + "And his noodly appendage reached forth to tickle the blessed...\n" + console.reset
else:
return None
def command_line_argument_parser():
parser = argparse.ArgumentParser(description=description(),
epilog=epilog(),
formatter_class=argparse.RawDescriptionHelpFormatter,
)
group = parser.add_mutually_exclusive_group()
group.add_argument('-r', '--render', action='store_true', help='render dot tree to file')
group.add_argument(
'--render-with-blackboard-variables',
action='store_true',
help='render dot tree to file with blackboard variables'
)
group.add_argument('-i', '--interactive', action='store_true', help='pause and wait for keypress at each tick')
return parser
def pre_tick_handler(behaviour_tree):
print("\n--------- Run %s ---------\n" % behaviour_tree.count)
#----------------- Meta data end -----------------#
#----------------- Moveit class start -----------------#
class MoveitSetup:
# Constructor
def __init__(self,x):
# Instatiating related obejcts
self._planning_group = x
self._commander = moveit_commander.roscpp_initialize(sys.argv)
self._robot = moveit_commander.RobotCommander()
self._scene = moveit_commander.PlanningSceneInterface()
self._group = moveit_commander.MoveGroupCommander(self._planning_group)
self._display_trajectory_publisher = rospy.Publisher(
'/move_group/display_planned_path', moveit_msgs.msg.DisplayTrajectory, queue_size=1)
self._exectute_trajectory_client = actionlib.SimpleActionClient(
'execute_trajectory', moveit_msgs.msg.ExecuteTrajectoryAction)
self._exectute_trajectory_client.wait_for_server()
self._planning_frame = self._group.get_planning_frame()
self._eef_link = self._group.get_end_effector_link()
self._group_names = self._robot.get_group_names()
# Current State of the Robot is needed to add box to planning scene
# self._curr_state = self._robot.get_current_state()
# rospy.loginfo(
# '\033[94m' + "Planning Group: {}".format(self._planning_frame) + '\033[0m')
# rospy.loginfo(
# '\033[94m' + "End Effector Link: {}".format(self._eef_link) + '\033[0m')
# rospy.loginfo(
# '\033[94m' + "Group Names: {}".format(self._group_names) + '\033[0m')
# rospy.loginfo('\033[94m' + " >>> Ur5Moveit init done." + '\033[0m')
# Function to go to specified position
def go_to_pose(self, arg_pose):
pose_values = self._group.get_current_pose().pose
# rospy.loginfo('\033[94m' + ">>> Current Pose:" + '\033[0m')
# rospy.loginfo(pose_values)
self._group.set_pose_target(arg_pose)
flag_plan = self._group.go(wait=True) # wait=False for Async Move
pose_values = self._group.get_current_pose().pose
# rospy.loginfo('\033[94m' + ">>> Final Pose:" + '\033[0m')
# rospy.loginfo(pose_values)
list_joint_values = self._group.get_current_joint_values()
# rospy.loginfo('\033[94m' + ">>> Final Joint Values:" + '\033[0m')
# rospy.loginfo(list_joint_values)
# if (flag_plan == True):
# rospy.loginfo(
# '\033[94m' + ">>> go_to_pose() Success" + '\033[0m')
# else:
# rospy.loginfo(
# '\033[94m' + ">>> go_to_pose() Failed. Solution for Pose not Found." + '\033[0m')
return flag_plan
# Function to set joint angles
def set_joint_angles(self, arg_list_joint_angles):
list_joint_values = self._group.get_current_joint_values()
# rospy.loginfo('\033[94m' + ">>> Current Joint Values:" + '\033[0m')
# rospy.loginfo(list_joint_values)
self._group.set_joint_value_target(arg_list_joint_angles)
flag_plan = self._group.go(wait=True)
list_joint_values = self._group.get_current_joint_values()
# rospy.loginfo('\033[94m' + ">>> Final Joint Values:" + '\033[0m')
# rospy.loginfo(list_joint_values)
pose_values = self._group.get_current_pose().pose
# rospy.loginfo('\033[94m' + ">>> Final Pose:" + '\033[0m')
# rospy.loginfo(pose_values)
# if (flag_plan == True):
# rospy.loginfo(
# '\033[94m' + ">>> set_joint_angles() Success" + '\033[0m')
# else:
# rospy.logerr(
# '\033[94m' + ">>> set_joint_angles() Failed." + '\033[0m')
return flag_plan
# Function to go to pre defined position
def go_to_predefined_pose(self, arg_pose_name):
# rospy.loginfo('\033[94m' + "Going to Pose: {}".format(arg_pose_name) + '\033[0m')
try:
rospy.sleep(1)
self._group.set_named_target(arg_pose_name)
plan = self._group.go()
except:
pass
def cartesian_path(self, waypoints):
(plan, fraction) = self._group.compute_cartesian_path(
waypoints, # waypoints to follow
# 0.00005, # eef_step
0.001,
0.0) # jump_threshold
self._group.execute(plan, wait=True)
def cartesian_path2(self, waypoints):
(plan, fraction) = self._group.compute_cartesian_path(
waypoints, # waypoints to follow
0.0001, # eef_step
0.0) # jump_threshold
self._group.execute(plan, wait=True)
def init_stay_up_constraints(self):
self.up_constraints = Constraints()
joint_constraint = JointConstraint()
self.up_constraints.name = "stay_up"
joint_constraint.position = 0.7
joint_constraint.tolerance_above = .1
joint_constraint.tolerance_below = .1
joint_constraint.weight = 1
joint_constraint.joint_name = "apbot_joint"
self.up_constraints.joint_constraints.append(joint_constraint)
self._group.set_path_constraints(self.up_constraints)
def init_wipe_constraints(self):
self.wipe_constraints = Constraints()
rotation_joint_constraint = JointConstraint()
height_joint_constraint = JointConstraint()
self.wipe_constraints.name = "wipe"
rotation_joint_constraint.position = 0
rotation_joint_constraint.tolerance_above = 1.6
rotation_joint_constraint.tolerance_below = 1.6
rotation_joint_constraint.weight = 1
rotation_joint_constraint.joint_name = "shoulder_pan_joint"
height_joint_constraint.position = .7
height_joint_constraint.tolerance_above = .1
height_joint_constraint.tolerance_below = .1
height_joint_constraint.weight = 1
height_joint_constraint.joint_name = "apbot_joint"
self.wipe_constraints.joint_constraints.append(rotation_joint_constraint)
self.wipe_constraints.joint_constraints.append(height_joint_constraint)
self._group.set_path_constraints(self.wipe_constraints)
def init_spray_constraints(self):
self.spray_constraints = Constraints()
rotation_joint_constraint = JointConstraint()
height_joint_constraint = JointConstraint()
elbow_joint_constraint = JointConstraint()
self.spray_constraints.name = "spray"
rotation_joint_constraint.position = 0
rotation_joint_constraint.tolerance_above = 1.6
rotation_joint_constraint.tolerance_below = 1.6
rotation_joint_constraint.weight = 1
rotation_joint_constraint.joint_name = "shoulder_pan_joint"
height_joint_constraint.position = .7
height_joint_constraint.tolerance_above = .1
height_joint_constraint.tolerance_below = .1
height_joint_constraint.weight = 1
height_joint_constraint.joint_name = "apbot_joint"
elbow_joint_constraint.position = 0
elbow_joint_constraint.tolerance_above = 0
elbow_joint_constraint.tolerance_below = -3.14
elbow_joint_constraint.weight = 1
height_joint_constraint.joint_name = "elbow_joint"
self.spray_constraints.joint_constraints.append(rotation_joint_constraint)
self.spray_constraints.joint_constraints.append(height_joint_constraint)
self.spray_constraints.joint_constraints.append(elbow_joint_constraint)
self._group.set_path_constraints(self.spray_constraints)
# Destructor
def __del__(self):
moveit_commander.roscpp_shutdown()
# rospy.loginfo(
# '\033[94m' + "Object of class Ur5Moveit Deleted." + '\033[0m')
#----------------- Moveit class end -----------------#
# Move base goal function
def movebase_client(goal_x, goal_y, quat):
# Create an action client called "move_base" with action definition file "MoveBaseAction"
client = actionlib.SimpleActionClient('move_base',MoveBaseAction)
# Waits until the action server has started up and started listening for goals.
client.wait_for_server()
# Creates a new goal with the MoveBaseGoal constructor
goal = MoveBaseGoal()
# Set frame id
goal.target_pose.header.frame_id = "map"
goal.target_pose.header.stamp = rospy.Time.now()
# Set goal position
goal.target_pose.pose.position.x = goal_x
goal.target_pose.pose.position.y = goal_y
# Set goal orientation
goal.target_pose.pose.orientation.z = quat[2]
goal.target_pose.pose.orientation.w = quat[3]
# Sends the goal to the action server.
client.send_goal(goal)
# Waits for the server to finish performing the action.
wait = client.wait_for_result()
# If the result doesn't arrive, assume the Server is not available
if not wait:
rospy.logerr("Action server not available!")
rospy.signal_shutdown("Action server not available!")
else:
# Result of executing the action
return client.get_result()
# Move base goal function end
#----------------- Tree nodes start -----------------#
# Initialize reusable variables
class InitializeReusables(py_trees.behaviour.Behaviour):
def __init__(self):
super().__init__(name="InitializeReusables")
self.blackboard = self.attach_blackboard_client(name=self.name)
self.blackboard.register_key("initializations", access=py_trees.common.Access.WRITE)
self.blackboard.register_key("start_coordinates", access=py_trees.common.Access.WRITE)
def get_pose(self, data):
self.blackboard.start_coordinates = [data.pose.pose.position.x, data.pose.pose.position.y, 0]
self.pose_sub.unregister()
def setup(self):
global client, apbot_arm, apbot_gripper, ct, bb, duf, markings_bb, door_bb, cf, df, tp, mp, table_top, sink, wall_align, pose_x, pose_y
global dustbin_bb
self.pose_sub = rospy.Subscriber('/odom', Odometry, self.get_pose)
# Move base client
client = actionlib.SimpleActionClient('move_base',MoveBaseAction)
client.wait_for_server()
# Moveit class object
apbot_arm = MoveitSetup("arm")
apbot_gripper = MoveitSetup("gripper")
# Countertop object
ct = Countertop(apbot_arm)
# Bounding boxes class for yolo
bb = BoundingBoxes(trash_marking = True)
markings_bb = BoundingBoxes(trash_marking=True, item_specific="markings")
dustbin_bb = BoundingBoxes(item="dustbin", trash_marking=False, isDustbin=True)
# Fusion class for cans
cf = fusion(debug="trash")
# Fusion class for door
df = fusion(debug="marking")
# Fusion class for dustbin
duf = fusion()
# Pick pipeline class
tp = PickPipeline()
# Pick pipeline for markings
mp = PickPipeline()
# Table top detection object
table_top = TableTop()
# Sink detection object
sink = SinkDetection()
# Wall align object
wall_align = WallAlign()
def update(self):
return py_trees.common.Status.SUCCESS
# Send move base goal
class GoToGoal(py_trees.behaviour.Behaviour):
def __init__(self, name, goal=[0,0,0], isStart = False, isCounter=False, isDustbin=False, isCenter=False, isDoor=False):
super().__init__(name=name)
self.goal = goal
self.isCounter = isCounter
self.isDustbin = isDustbin
self.isCenter = isCenter
self.isDoor = isDoor
self.isStart = isStart
self.blackboard = self.attach_blackboard_client(name=self.name)
def initialise(self):
self.blackboard.register_key("countertop_coordinates", access=py_trees.common.Access.READ)
self.blackboard.register_key("door_coordinates", access=py_trees.common.Access.READ)
self.blackboard.register_key("center_coordinates", access=py_trees.common.Access.READ)
self.blackboard.register_key("start_coordinates", access=py_trees.common.Access.READ)
if self.isCounter:
self.goal = self.blackboard.countertop_coordinates
if self.isCenter:
self.goal = self.blackboard.center_coordinates
if self.isDoor:
self.goal = self.blackboard.door_coordinates
if self.isStart:
self.goal = self.blackboard.start_coordinates
# Convert euler angles to quaternion
quat_angle = quaternion_from_euler(0, 0, self.goal[2])
# Creates a new goal with the MoveBaseGoal constructor
self.mb_goal = MoveBaseGoal()
# Set frame id
self.mb_goal.target_pose.header.frame_id = "map"
self.mb_goal.target_pose.header.stamp = rospy.Time.now()
# Set goal position
self.mb_goal.target_pose.pose.position.x = self.goal[0]
self.mb_goal.target_pose.pose.position.y = self.goal[1]
# Set goal orientation
self.mb_goal.target_pose.pose.orientation.x = quat_angle[0]
self.mb_goal.target_pose.pose.orientation.y = quat_angle[1]
self.mb_goal.target_pose.pose.orientation.z = quat_angle[2]
self.mb_goal.target_pose.pose.orientation.w = quat_angle[3]
# Sends the goal to the action server
client.send_goal(self.mb_goal)
def update(self):
print(console.green + "---------------------------")
state = client.get_state()
if state==3:
return py_trees.common.Status.SUCCESS
else:
return py_trees.common.Status.RUNNING
# Navigate to the front of the bathroom door
class ReachBathroomDoor(py_trees.behaviour.Behaviour):
def __init__(self):
super().__init__(name="ReachBathroomDoor")
self.blackboard = self.attach_blackboard_client(name=self.name)
self.complete = False
def initialise(self):
cmd = ["rosrun","apbot_nav","enter.py"]
self.proc = subprocess.Popen(cmd)
def enter_done_callback(self, data):
if data.data == "Done":
self.complete = True
def update(self):
print("\n" +console.green + "---------------------------")
enter_done_sub = rospy.Subscriber('/enter_done', String, self.enter_done_callback)
if self.complete:
enter_done_sub.unregister()
self.proc.terminate()
return py_trees.common.Status.SUCCESS
else:
return py_trees.common.Status.RUNNING
# Get basic poses
class GetBasicPoses(py_trees.behaviour.Behaviour):
def __init__(self):
super().__init__(name="GetBasicPoses")
self.blackboard = self.attach_blackboard_client(name=self.name)
self.blackboard.register_key("center_coordinates", access=py_trees.common.Access.WRITE)
self.blackboard.register_key("door_coordinates", access=py_trees.common.Access.WRITE)
def update(self):
self.center_pose = PoseStamped()
self.center_pose.header.frame_id = "base_footprint"
self.center_pose.pose.position.x = 1.75
self.center_pose.pose.position.y = .9
self.center_pose.pose.position.z = .6
quat_center = quaternion_from_euler(0,0,math.pi/2)
self.center_pose.pose.orientation.x = quat_center[0]
self.center_pose.pose.orientation.y = quat_center[1]
self.center_pose.pose.orientation.z = quat_center[2]
self.center_pose.pose.orientation.w = quat_center[3]
self.door_pose = PoseStamped()
self.door_pose.header.frame_id = "base_footprint"
self.door_pose.pose.position.x = .75
self.door_pose.pose.position.y = 0
self.door_pose.pose.position.z = .6
quat_door = quaternion_from_euler(0,0,0)
self.door_pose.pose.orientation.x = quat_door[0]
self.door_pose.pose.orientation.y = quat_door[1]
self.door_pose.pose.orientation.z = quat_door[2]
self.door_pose.pose.orientation.w = quat_door[3]
while True:
try:
self.center_in_map = t.transformPose("/map", self.center_pose)
break
except:
continue
while True:
try:
self.door_in_map = t.transformPose("/map", self.door_pose)
break
except:
continue
center_in_map_quat = (self.center_in_map.pose.orientation.x, self.center_in_map.pose.orientation.y, self.center_in_map.pose.orientation.z, self.center_in_map.pose.orientation.w)
door_in_map_quat = (self.door_in_map.pose.orientation.x, self.door_in_map.pose.orientation.y, self.door_in_map.pose.orientation.z, self.door_in_map.pose.orientation.w)
center_z = euler_from_quaternion(center_in_map_quat)
door_z = euler_from_quaternion(door_in_map_quat)
self.blackboard.center_coordinates = [self.center_in_map.pose.position.x, self.center_in_map.pose.position.y, center_z[2]]
self.blackboard.door_coordinates = [self.door_in_map.pose.position.x, self.door_in_map.pose.position.y, door_z[2]]
return py_trees.common.Status.SUCCESS
# Set arm to pre-defined pose
class SetPredefinedArmPose(py_trees.behaviour.Behaviour):
def __init__(self, pose_name, name):
super().__init__(name=name)
self.pose_name = pose_name
self.blackboard = self.attach_blackboard_client(name=self.name)
def update(self):
print(console.yellow + "Sending arm to " + str(self.pose_name) + " pose")
apbot_arm.go_to_predefined_pose(self.pose_name)
rospy.sleep(1)
return py_trees.common.Status.SUCCESS
# Set gripper to pre-defined pose
class SetPredefinedGripperPose(py_trees.behaviour.Behaviour):
def __init__(self, pose_name, name):
super().__init__(name=name)
self.pose_name = pose_name
self.blackboard = self.attach_blackboard_client(name=self.name)
def update(self):
print(console.yellow + "Sending gripper to " + str(self.pose_name) + " pose")
for i in range(3):
try:
apbot_gripper.go_to_predefined_pose(self.pose_name)
except:
pass
rospy.sleep(1)
return py_trees.common.Status.SUCCESS
# Detect countertop
class DetectCountertop(py_trees.behaviour.Behaviour):
def __init__(self):
super().__init__(name="DetectCounter")
self.blackboard = self.attach_blackboard_client(name=self.name)
self.blackboard.register_key(key="counter_details",access=py_trees.common.Access.WRITE)
def update(self):
counter_details = table_top.start_detection()
self.blackboard.counter_details = counter_details
if self.blackboard.counter_details["Table Dimension"][1] < .2:
self.blackboard.counter_details["Table Dimension"][1] = .6
if self.blackboard.counter_details["Table Dimension"][0] < 1:
self.blackboard.counter_details["Table Dimension"][0] = 1.2
return py_trees.common.Status.SUCCESS
# Get countertop co-ordinates
class GetCounterCooridnates(py_trees.behaviour.Behaviour):
def __init__(self):
super().__init__(name="GetCounterCooridnates")
self.blackboard = self.attach_blackboard_client(name=self.name)
self.blackboard.register_key(key="countertop_coordinates",access=py_trees.common.Access.WRITE)
self.blackboard.register_key(key="sink_side",access=py_trees.common.Access.WRITE)
self.blackboard.register_key(key="counter_details",access=py_trees.common.Access.READ)
def initialise(self):
print("\n" +console.green + "---------------------------")
print(console.yellow + "Getting countertop co-ordinates...")
def update(self):
counter_details = self.blackboard.counter_details
if counter_details["Sink Center"].pose.position.x > counter_details["Table Center"].pose.position.x:
self.blackboard.sink_side = "Left"
else:
self.blackboard.sink_side = "Right"
while True:
try:
counter_pose_in_map = t.transformPose("/map", counter_details["Table Center"])
break
except:
continue
counter_in_map_quat = (counter_pose_in_map.pose.orientation.x, counter_pose_in_map.pose.orientation.y, counter_pose_in_map.pose.orientation.z, counter_pose_in_map.pose.orientation.w)
counter_z = euler_from_quaternion(counter_in_map_quat)
self.blackboard.countertop_coordinates = (counter_pose_in_map.pose.position.x - .7*math.cos(counter_z[2]), counter_pose_in_map.pose.position.y - .7*math.sin(counter_z[2]), counter_z[2])
return py_trees.common.Status.SUCCESS
# Detect Sink
class DetectSink(py_trees.behaviour.Behaviour):
def __init__(self, side):
super().__init__(name="DetectSink"+side)
self.side = side
self.blackboard = self.attach_blackboard_client(name=self.name)
self.blackboard.register_key(key="sink_side",access=py_trees.common.Access.READ)
self.blackboard.register_key(key="sink_details",access=py_trees.common.Access.WRITE)
def update(self):
print(self.blackboard.sink_side)
if self.side == self.blackboard.sink_side:
apbot_arm.go_to_predefined_pose("sink_detect")
sink_details = sink.start_sink_detection()
if sink_details:
self.blackboard.sink_details = sink_details
else:
self.blackboard.sink_details = "NA"
return py_trees.common.Status.SUCCESS
else:
return py_trees.common.Status.SUCCESS
# Align robot at counter
class CounterAlign(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
def initialise(self):
self.aligned = False
self.dist_fixed = False
self.counter_aligned = False
wall_align.t0 = rospy.get_time()
def update(self):
if not self.aligned:
self.aligned = wall_align.align()
return py_trees.common.Status.RUNNING
if not self.dist_fixed:
self.dist_fixed = wall_align.dist_fix()
return py_trees.common.Status.RUNNING
return py_trees.common.Status.SUCCESS
# Go to right side of counter
class GoRight(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
self.blackboard = self.attach_blackboard_client(name=self.name)
self.blackboard.register_key(key="countertop_coordinates",access=py_trees.common.Access.READ)
self.blackboard.register_key(key="counter_details",access=py_trees.common.Access.READ)
def initialise(self):
print("\n" +console.green + "---------------------------")
print(console.yellow + "Going right...")
self.counter_coordinates = self.blackboard.countertop_coordinates
self.table_length = self.blackboard.counter_details["Table Dimension"][0]
def update(self):
counter_pose =PoseStamped()
counter_pose.header.frame_id = "/map"
counter_pose.pose.position.x = self.counter_coordinates[0]
counter_pose.pose.position.y = self.counter_coordinates[1]
while True:
try:
counter_in_base = t.transformPose("/base_footprint", counter_pose)
break
except:
continue
y_counter = counter_in_base.pose.position.y
vel = Twist()
if (y_counter - self.table_length/4) < -0.01:
vel.linear.y = (y_counter - self.table_length/4)*.2
vel_pub.publish(vel)
return py_trees.common.Status.RUNNING
else:
vel.linear.y = 0
vel_pub.publish(vel)
return py_trees.common.Status.SUCCESS
# Go to left side of counter
class GoLeft(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
self.blackboard = self.attach_blackboard_client(name=self.name)
self.blackboard.register_key(key="countertop_coordinates",access=py_trees.common.Access.READ)
self.blackboard.register_key(key="counter_details",access=py_trees.common.Access.READ)
def initialise(self):
print("\n" +console.green + "---------------------------")
print(console.yellow + "Going left...")
self.counter_coordinates = self.blackboard.countertop_coordinates
self.table_length = self.blackboard.counter_details["Table Dimension"][0]
def update(self):
counter_pose =PoseStamped()
counter_pose.header.frame_id = "/map"
counter_pose.pose.position.x = self.counter_coordinates[0]
counter_pose.pose.position.y = self.counter_coordinates[1]
while True:
try:
counter_in_base = t.transformPose("/base_footprint", counter_pose)
break
except:
continue
y_counter = counter_in_base.pose.position.y
vel = Twist()
if (y_counter + self.table_length/4) > 0.01:
vel.linear.y = (y_counter + self.table_length/4)*.2
vel_pub.publish(vel)
return py_trees.common.Status.RUNNING
else:
vel.linear.y = 0
vel_pub.publish(vel)
return py_trees.common.Status.SUCCESS
# Spray countertop
class CountertopSpray(py_trees.behaviour.Behaviour):
def __init__(self, name: str, side):
super().__init__(name=name)
self.side = side
self.blackboard = self.attach_blackboard_client(name=self.name)
self.blackboard.register_key(key="sink_side",access=py_trees.common.Access.READ)
self.blackboard.register_key(key="counter_details",access=py_trees.common.Access.READ)
def initialise(self):
print("\n" +console.green + "---------------------------")
print(console.yellow + "Spraying countertop")
def update(self):
counter_dimensions = self.blackboard.counter_details["Table Dimension"]
ct.wipe_right(counter_dimensions)
rospy.sleep(1)
return py_trees.common.Status.SUCCESS
# Spray countertop
class CountertopWipe(py_trees.behaviour.Behaviour):
def __init__(self, name: str, side):
super().__init__(name=name)
self.side = side
self.blackboard = self.attach_blackboard_client(name=self.name)
self.blackboard.register_key(key="sink_side",access=py_trees.common.Access.READ)
self.blackboard.register_key(key="counter_details",access=py_trees.common.Access.READ)
self.blackboard.register_key(key="sink_details",access=py_trees.common.Access.READ)
def initialise(self):
print("\n" +console.green + "---------------------------")
print(console.yellow + "Wiping countertop")
def update(self):
counter_dimensions = self.blackboard.counter_details["Table Dimension"]
if self.blackboard.sink_side == self.side:
if self.blackboard.sink_details == "NA":
return py_trees.common.Status.SUCCESS
sink_dimensions = self.blackboard.sink_details
sink_xy = [sink_dimensions["Sink Center"][0], sink_dimensions["Sink Center"][1]]
sink_height = sink_dimensions["Sink Center"][2]
ct.sink_clean(sink_xy, sink_height, sink_dimensions["Sink Top Dimensions"], sink_dimensions["Sink Bottom Dimensions"])
rospy.sleep(1)
ct.sink_side_clean_right(sink_xy, counter_dimensions, sink_dimensions["Sink Top Dimensions"])
rospy.sleep(1)
return py_trees.common.Status.SUCCESS
else:
ct.spray_right(counter_dimensions)
rospy.sleep(.1)
ct.spray_left(counter_dimensions)
return py_trees.common.Status.SUCCESS
# Attach/Detach trash tray
class JointAttach(py_trees.behaviour.Behaviour):
def __init__(self, name, link1, link2, attach):
super().__init__(name=name)
self.link1 = link1
self.link2 = link2
self.attach = attach
def initialise(self):
self.req = AttachRequest()
self.req.model_name_1 = "apbot"
self.req.link_name_1 = self.link1
self.req.model_name_2 = "apbot"
self.req.link_name_2 = self.link2
if self.attach:
self.joint_service = rospy.ServiceProxy('/link_attacher_node/attach', Attach)
self.joint_service.wait_for_service()
else:
self.joint_service = rospy.ServiceProxy('/link_attacher_node/detach', Attach)
self.joint_service.wait_for_service()
def update(self):
self.joint_service.call(self.req)
return py_trees.common.Status.SUCCESS
# Attach/Detach bot
class BotAttach(py_trees.behaviour.Behaviour):
def __init__(self, name, attach):
super().__init__(name=name)
self.attach = attach
def initialise(self):
self.req = AttachRequest()
self.req.model_name_1 = "artpark_world"
self.req.link_name_1 = "wall_front_1"
self.req.model_name_2 = "apbot"
self.req.link_name_2 = "base_footprint"
if self.attach:
self.joint_service = rospy.ServiceProxy('/link_attacher_node/attach', Attach)
self.joint_service.wait_for_service()
else:
self.joint_service = rospy.ServiceProxy('/link_attacher_node/detach', Attach)
self.joint_service.wait_for_service()
def update(self):
self.joint_service.call(self.req)
return py_trees.common.Status.SUCCESS
# Attach/detach trash with tray
class TrashAttach(py_trees.behaviour.Behaviour):
def __init__(self, name, attach):
super().__init__(name=name)
self.attach = attach
def update(self):
tp.trash_tray_joints(attach=self.attach)
rospy.sleep(1)
return py_trees.common.Status.SUCCESS
# Reinitialize namy.py var
class ReName(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
def update(self):
bb.__init__(item="dustbin", trash_marking=False, isDustbin=True)
return py_trees.common.Status.SUCCESS
# Start dustbin detection
class StartDustbinDetect(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
def update(self):
bb.listener(True)
# rospy.sleep(1)
return py_trees.common.Status.SUCCESS
# Stop dustbin detection
class StopDustbinDetect(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
def update(self):
bb.listener(False)
# rospy.sleep(1)
return py_trees.common.Status.SUCCESS
# Set dustbin detected to false
class DustbinFalse(py_trees.behaviour.Behaviour):
def __init__(self):
super().__init__(name="DustbinDetect")
self.blackboard = self.attach_blackboard_client(name=self.name)
self.blackboard.register_key(key="has_dustbin_detected", access=py_trees.common.Access.WRITE)
def update(self):
bb.listener(False)
self.blackboard.has_dustbin_detected = False
objects_none_pub = rospy.Publisher('/objects', Detection2DArray, queue_size=10)
arr = Detection2DArray()
arr.detections = []
objects_none_pub.publish(arr)
return py_trees.common.Status.SUCCESS
# Start dustbin fusion
class StartDustbinFusion(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
self.blackboard = self.attach_blackboard_client(name=self.name)
self.blackboard.register_key(key="has_dustbin_detected", access=py_trees.common.Access.WRITE)
self.blackboard.register_key(key="dustbin_fusion_coordinates", access=py_trees.common.Access.WRITE)
self.blackboard.has_dustbin_detected = False
def update(self):
duf.listener_dustbin(True)
rospy.sleep(3)
has_detected = duf.get_dustbin_pose()
if has_detected:
self.blackboard.has_dustbin_detected = True
self.blackboard.dustbin_fusion_coordinates = has_detected
return py_trees.common.Status.SUCCESS
# Start dustbin fusion
class UpdateDustbinFusion(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
self.blackboard = self.attach_blackboard_client(name=self.name)
self.blackboard.register_key(key="has_dustbin_detected", access=py_trees.common.Access.WRITE)
self.blackboard.register_key(key="dustbin_fusion_coordinates", access=py_trees.common.Access.WRITE)
def update(self):
rospy.sleep(3)
has_detected = duf.get_dustbin_pose()
if has_detected:
self.blackboard.has_dustbin_detected = True
self.blackboard.dustbin_fusion_coordinates = has_detected
return py_trees.common.Status.SUCCESS
# Go to dustbin
class GoToDustbin(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
self.blackboard = self.attach_blackboard_client(name=self.name)
self.blackboard.register_key(key="dustbin_fusion_coordinates", access=py_trees.common.Access.READ)
self.got_laser = False
def laser_callback(self, data):
self.got_laser = True
self.laser_data = data.ranges
self.laser_sub.unregister()
def update(self):
while not self.got_laser:
self.laser_sub = rospy.Subscriber('/scan', LaserScan, self.laser_callback)
dbc = self.blackboard.dustbin_fusion_coordinates
result = movebase_client(dbc[0], dbc[1], dbc[2])
vel = Twist()
vel_pub = rospy.Publisher('/cmd_vel', Twist, queue_size=10)
if result:
if self.laser_data[180] > .1:
vel.linear.x = .05
vel_pub.publish(vel)
rospy.sleep(2)
vel.linear.x = 0
vel_pub.publish(vel)
return py_trees.common.Status.SUCCESS
# Stop dustbin fusion
class StopDustbinFusion(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
def update(self):
duf.listener_dustbin(False)
return py_trees.common.Status.SUCCESS
# Has dustbin been detected
class HasDustbinDetected(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
self.blackboard = self.attach_blackboard_client(name=self.name)
self.blackboard.register_key(key="has_dustbin_detected", access=py_trees.common.Access.READ)
def update(self):
dustbin_detection_status = self.blackboard.has_dustbin_detected
if dustbin_detection_status:
return py_trees.common.Status.SUCCESS
else:
return py_trees.common.Status.FAILURE
# Turn bot to detect dustbin
class TurnBotDustbin(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
self.blackboard = self.attach_blackboard_client(name=self.name)
self.blackboard.register_key(key="has_dustbin_detected", access=py_trees.common.Access.READ)
self.pub_vel = rospy.Publisher('/cmd_vel', Twist, queue_size=10)
def update(self):
if not self.blackboard.has_dustbin_detected:
vel = Twist()
vel.angular.z = -.3
self.pub_vel.publish(vel)
rospy.sleep(4)
vel.angular.z = 0
self.pub_vel.publish(vel)
return py_trees.common.Status.SUCCESS
# Start trash detection
class StartDetect(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
def update(self):
bb.listener(True)
return py_trees.common.Status.SUCCESS
# Stop trash detection
class StopDetect(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
def update(self):
bb.listener(False)
return py_trees.common.Status.SUCCESS
# Start marking detection
class MarkingDetect(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
def update(self):
markings_bb.listener(True)
return py_trees.common.Status.SUCCESS
# Stop marking detection
class MarkingDetectStop(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
def update(self):
markings_bb.listener(False)
return py_trees.common.Status.SUCCESS
# Publish check
class PublishCheck(py_trees.behaviour.Behaviour):
def __init__(self, name, on_or_off):
super().__init__(name=name)
self.on_or_off = on_or_off
self.check_pub = rospy.Publisher("/check", String, queue_size=1, latch=True)
def update(self):
if self.on_or_off == "on":
rospy.sleep(3)
self.check_pub.publish("Detect")
else:
rospy.sleep(4)
self.check_pub.publish("stop")
return py_trees.common.Status.SUCCESS
# Start fusion
class StartFusion(py_trees.behaviour.Behaviour):
def __init__(self, name, item="trash"):
super().__init__(name=name)
self.item = item
def update(self):
# if self.item=="trash":
# cf.listener(True)
# else:
# df.listener(True, item="marking")
if self.item=="marking":
cf.__init__(debug="marking")
cf.listener(True)
return py_trees.common.Status.SUCCESS
# End fusion
class EndFusion(py_trees.behaviour.Behaviour):
def __init__(self, name, item="trash"):
super().__init__(name=name)
self.item = item
def update(self):
cf.listener(False)
return py_trees.common.Status.SUCCESS
# Clean Entrance
class CleanEntrance(py_trees.behaviour.Behaviour):
def __init__(self, name, entrance_status):
super().__init__(name=name)
self.entrance_status = entrance_status
def check_callback(self, data):
if data.data=="stop":
self.check_status = True
def initialise(self):
self.check_status = False
tp.entrance = self.entrance_status
def update(self):
while not self.check_status:
rospy.Subscriber("check", String, self.check_callback, queue_size=10)
tp.go_to_object(apbot_arm, apbot_gripper)
return py_trees.common.Status.SUCCESS
# Clean markings
class CleanMarking(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
def check_callback(self, data):
if data.data=="stop":
self.check_status = True
def initialise(self):
self.check_status = False
def update(self):
# while not self.check_status:
# rospy.Subscriber("check", String, self.check_callback, queue_size=10)
mp.go_to_markings(apbot_arm)
return py_trees.common.Status.SUCCESS
# Drop Trash
class DropTrash(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
def update(self):
apbot_arm.go_to_predefined_pose("tray_up")
rospy.sleep(.2)
apbot_arm.go_to_predefined_pose("before_drop1")
rospy.sleep(.2)
apbot_arm.go_to_predefined_pose("before_drop2")
rospy.sleep(.2)
apbot_arm.go_to_predefined_pose("drop_trash3")
rospy.sleep(2)
apbot_arm.go_to_predefined_pose("before_drop2")
rospy.sleep(.2)
apbot_arm.go_to_predefined_pose("before_drop1")
rospy.sleep(.2)
apbot_arm.go_to_predefined_pose("tray_up")
rospy.sleep(.2)
return py_trees.common.Status.SUCCESS
# Exit Door
class ExitDoor(py_trees.behaviour.Behaviour):
def __init__(self, name):
super().__init__(name=name)
self.vel_pub = rospy.Publisher('/cmd_vel', Twist, queue_size=10)
def update(self):
vel = Twist()
vel.linear.x = -.35
self.vel_pub.publish(vel)
rospy.sleep(3)
vel.linear.x = 0
self.vel_pub.publish(vel)
return py_trees.common.Status.SUCCESS
#----------------- Tree nodes end -----------------#
# Create tree
def create_tree():
# Initial use
init_reusables = InitializeReusables()
tray_base_attach1 = JointAttach(name="base_tray_attach1", link1="trash_tray_1", link2="base_footprint", attach=True)
sponge_tray_attach1 = JointAttach(name="sponge_tray_attach1", link1="sponge", link2="base_footprint", attach=True)
tray_gripper_detach = JointAttach(name="trash_gripper_detach", link1="trash_tray_1", link2="gripper_base_link", attach=False)
reach_door_front = ReachBathroomDoor()
get_basic_poses = GetBasicPoses()
# Door sequence end
# Countertop & sink detection
detect_counter_pose1 = SetPredefinedArmPose("look_left", name="DetectCountertop1")
detect_counter_pose2 = SetPredefinedArmPose("look_left", name="DetectCountertop2")
detect_counter_pose_up = SetPredefinedArmPose("counter_door_detect", name="DetectCountertopUp")
detect_counter = DetectCountertop()
get_counter_coordinates = GetCounterCooridnates()
parallel_counter_detect = py_trees.composites.Parallel(name="ParallelCounterDetect", policy=py_trees.common.ParallelPolicy.SuccessOnOne())
parallel_counter_detect.add_children([detect_counter, get_counter_coordinates])
# Dustbin detect sequence
go_to_center_dustbin = GoToGoal(name="DustbinDetectCenter", isCenter=True)
go_to_dustbin_detect_pose = SetPredefinedArmPose(name="DustbinDetectPose", pose_name="dustbin_detect")
dustbin_false = DustbinFalse()
has_dustbin_detected = HasDustbinDetected(name="HasDustbinDetected")
start_dustbin_detection = StartDustbinDetect(name="StartDustbinDetect")
start_dustbin_fusion = StartDustbinFusion(name="StartDustbinFusion")
publish_check_dustbin = PublishCheck(name="PublishCheckDustbin", on_or_off="on")
stop_dustbin_detection = StopDustbinDetect(name="StopDustbinDetect")
publish_check_dustbin_off = PublishCheck(name="PublishCheckDustbinOff", on_or_off="off")
update_dustbn_fusion = UpdateDustbinFusion(name="UpdateDustbinFusion")
turn_bot_dustbin = TurnBotDustbin(name="TurnBotDustbin")
dustbin_sequence = py_trees.composites.Sequence(name="DustbinSequence")
dustbin_sequence.add_children(children=[
start_dustbin_detection,
start_dustbin_fusion,
# publish_check_dustbin,
stop_dustbin_detection,
# publish_check_dustbin_off,
update_dustbn_fusion,
turn_bot_dustbin,])
dustbin_sequence_failure = py_trees.decorators.SuccessIsFailure(child=dustbin_sequence)
dustbin_selector = py_trees.composites.Selector(name="DustbinSelector")
dustbin_selector.add_children(children=[has_dustbin_detected, dustbin_sequence_failure])
go_to_dustbin = GoToDustbin(name="GoToDustbin")
# Dustbin detect sequence end
# Trash drop
create_trash_joints = TrashAttach(attach=True, name="AttachTrash")
go_to_tray_top1 = SetPredefinedArmPose("tray_top", name="TrayTop1")
gripper_open1 = SetPredefinedGripperPose("gripper_open", name="GripperOpen1")
go_to_tray_grab1 = SetPredefinedArmPose("tray_grab", name="TrayGrab1")
gripper_tray_grab = SetPredefinedGripperPose("close_full", name="GripperTrayGrab")
tray_base_detach = JointAttach(name="tray_base_detach", link1="trash_tray_1", link2="base_footprint", attach=False)
tray_gripper_attach = JointAttach(name="tray_gripper_attach", link1="trash_tray_1", link2="gripper_base_link", attach=True)
go_to_tray_up1 = SetPredefinedArmPose("tray_up", name="TrayUp1")
before_drop1 = SetPredefinedArmPose("before_drop1", name="BeforeDrop1")
drop_trash_pose = SetPredefinedArmPose("drop_trash5", name="DropTrashPose1")
timer_wait = py_trees.behaviours.TickCounter(duration=2)
drop_trash = DropTrash(name="DropTrash")
break_trash_joints = TrashAttach(attach=False, name="DetachTrash")
go_to_tray_up2 = SetPredefinedArmPose("tray_up", name="TrayUp2")
go_to_tray_top2 = SetPredefinedArmPose("tray_top", name="TrayTop2")
go_to_tray_grab2 = SetPredefinedArmPose("tray_grab", name="TrayGrab2")
tray_gripper_detach = JointAttach(name="trash_gripper_detach", link1="trash_tray_1", link2="gripper_base_link", attach=False)
gripper_open2 = SetPredefinedGripperPose("gripper_open", name="OpenGripper2")
tray_base_attach2 = JointAttach(name="base_tray_attach2", link1="trash_tray_1", link2="base_footprint", attach=True)
trash_drop_sequence = py_trees.composites.Sequence(name="TrashDropSequence")
go_to_tray_top3 = SetPredefinedArmPose("tray_top", name="TrayTop3")
trash_drop_sequence.add_children(
[
create_trash_joints,
go_to_tray_top1,
gripper_open1,
go_to_tray_grab1,
gripper_tray_grab,
tray_base_detach,
tray_gripper_attach,
go_to_tray_up1,
before_drop1,
drop_trash_pose,
timer_wait,
break_trash_joints,
go_to_tray_up2,
go_to_tray_grab2,
tray_gripper_detach,
gripper_open2,
tray_base_attach2,
go_to_tray_top3]
)
trash_drop_oneshot = py_trees.idioms.oneshot(
behaviour=trash_drop_sequence,
name="TrashDropOneshot",
policy=common.OneShotPolicy.ON_SUCCESSFUL_COMPLETION
)
# Initializations tree
parllel_init = py_trees.composites.Parallel("ParallelInit", policy=py_trees.common.ParallelPolicy.SuccessOnAll())
parllel_init.add_children([init_reusables, tray_base_attach1, sponge_tray_attach1])
# Initializations tree end
#----- Entrance -----#
# Entrance trash parallel fusion & pick
trash_detect_pose_entrance = SetPredefinedArmPose("trash_detect", name="EntranceDetect")
publish_check_entrance = PublishCheck(name="PublishCheckEntrance", on_or_off="on")
publish_check_entrance_off = PublishCheck(name="PublishCheckEntrance", on_or_off="off")
start_detect_entrance = StartDetect(name="SDEntrance")
start_entrance_fusion = StartFusion(name="EntranceFusion")
clean_entrance = CleanEntrance(name="CleanEntrance", entrance_status=1)
entrance_sequence = py_trees.composites.Sequence("EntranceSequence")
stop_detection_entrance = StopDetect(name="StopDetectionEntrance")
go_to_door = GoToGoal(name="Door", isDoor=True)
entrance_sequence.add_children([start_detect_entrance, start_entrance_fusion, publish_check_entrance, stop_detection_entrance, publish_check_entrance_off, clean_entrance, go_to_door])
# Entrance trash parallel fusion & pick end
#----- Entrance End -----#
#----- Trash detection after entering -----#
bot_attach_trash_detection = BotAttach(attach=True, name="BotAttachTrashDetection")
trash_detect_pose1 = SetPredefinedArmPose("trash_detect", name="TrashDetect1")
trash_detect_far_pose1 = SetPredefinedArmPose("trash_detect_far", name="TrashDetectFar1")
trash_detect_left_pose1 = SetPredefinedArmPose("trash_detect_left", name="TrashDetectLeft1")
trash_detect_right_pose1 = SetPredefinedArmPose("trash_detect_right", name="TrashDetectRight1")
trash_detect_lf_pose1 = SetPredefinedArmPose("trash_detect_lf", name="TrashDetectLf1")
trash_detect_rf_pose1 = SetPredefinedArmPose("trash_detect_rf", name="TrashDetectRf1")
trash_detect_left_far_pose1 = SetPredefinedArmPose("trash_detect_left_far", name="TrashDetectLeftFar1")
trash_detect_right_far_pose1 = SetPredefinedArmPose("trash_detect_right_far", name="TrashDetectRightFar1")
trash_detect_lf_far_pose1 = SetPredefinedArmPose("trash_detect_lf_far", name="TrashDetectLfFar1")
trash_detect_rf_far_pose1 = SetPredefinedArmPose("trash_detect_rf_far", name="TrashDetectRfFar1")
publish_check1 = PublishCheck(name="PublishCheck1", on_or_off="on")
publish_check2 = PublishCheck(name="PublishCheck2", on_or_off="on")
publish_check3 = PublishCheck(name="PublishCheck3", on_or_off="on")
publish_check4 = PublishCheck(name="PublishCheck4", on_or_off="on")
publish_check5 = PublishCheck(name="PublishCheck5", on_or_off="on")
publish_check6 = PublishCheck(name="PublishCheck6", on_or_off="on")
publish_check7 = PublishCheck(name="PublishCheck7", on_or_off="on")
publish_check8 = PublishCheck(name="PublishCheck8", on_or_off="on")
publish_check9 = PublishCheck(name="PublishCheck9", on_or_off="on")
publish_check10 = PublishCheck(name="PublishCheck10", on_or_off="on")
publish_check_off1 = PublishCheck(name="PublishCheck1", on_or_off="off")
publish_check_off2 = PublishCheck(name="PublishCheck2", on_or_off="off")
publish_check_off3 = PublishCheck(name="PublishCheck3", on_or_off="off")
publish_check_off4 = PublishCheck(name="PublishCheck4", on_or_off="off")
publish_check_off5 = PublishCheck(name="PublishCheck5", on_or_off="off")
publish_check_off6 = PublishCheck(name="PublishCheck6", on_or_off="off")
publish_check_off7 = PublishCheck(name="PublishCheck7", on_or_off="off")
publish_check_off8 = PublishCheck(name="PublishCheck8", on_or_off="off")
publish_check_off9 = PublishCheck(name="PublishCheck9", on_or_off="off")
publish_check_off10 = PublishCheck(name="PublishCheck10", on_or_off="off")
start_trash_fusion1 = StartFusion(name="TrashFusion1")
start_trash_fusion2 = StartFusion(name="TrashFusion2")
start_trash_fusion3 = StartFusion(name="TrashFusion3")
start_trash_fusion4 = StartFusion(name="TrashFusion4")
start_trash_fusion5 = StartFusion(name="TrashFusion5")
start_trash_fusion6 = StartFusion(name="TrashFusion6")
start_trash_fusion7 = StartFusion(name="TrashFusion7")
start_trash_fusion8 = StartFusion(name="TrashFusion8")
end_trash_fusion1 = EndFusion(name="EndTrashFusion1")
end_trash_fusion2 = EndFusion(name="EndTrashFusion2")
end_trash_fusion3 = EndFusion(name="EndTrashFusion3")
end_trash_fusion4 = EndFusion(name="EndTrashFusion4")
end_trash_fusion5 = EndFusion(name="EndTrashFusion5")
end_trash_fusion6 = EndFusion(name="EndTrashFusion6")
end_trash_fusion7 = EndFusion(name="EndTrashFusion7")
end_trash_fusion8 = EndFusion(name="EndTrashFusion8")
start_detect1 = StartDetect(name="SD1")
start_detect2 = StartDetect(name="SD2")
start_detect3 = StartDetect(name="SD3")
start_detect4 = StartDetect(name="SD4")
start_detect5 = StartDetect(name="SD5")
start_detect6 = StartDetect(name="SD6")
start_detect7 = StartDetect(name="SD7")
start_detect8 = StartDetect(name="SD8")
start_detect9 = StartDetect(name="SD9")
start_detect10 = StartDetect(name="SD10")
stop_detection1 = StopDetect(name="StopDetection1")
stop_detection2 = StopDetect(name="StopDetection2")
stop_detection3 = StopDetect(name="StopDetection3")
stop_detection4 = StopDetect(name="StopDetection4")
stop_detection5 = StopDetect(name="StopDetection5")
stop_detection6 = StopDetect(name="StopDetection6")
stop_detection7 = StopDetect(name="StopDetection7")
stop_detection8 = StopDetect(name="StopDetection8")
stop_detection9 = StopDetect(name="StopDetection9")
stop_detection10 = StopDetect(name="StopDetection10")
bot_detach_trash_detection = BotAttach(attach=False, name="BotDetachTrashDetection")
trash_detect_sequence = py_trees.composites.Sequence("TrashDetectSequence")
trash_detect_sequence.add_children(
[bot_attach_trash_detection,
trash_detect_left_pose1,
start_detect1,
start_trash_fusion1,
publish_check1,
stop_detection1,
publish_check_off1,
# end_trash_fusion1,
trash_detect_left_far_pose1,
start_detect2,
# start_trash_fusion2,
publish_check2,
stop_detection2,
publish_check_off2,
# end_trash_fusion2,
trash_detect_lf_pose1,
start_detect3,
# start_trash_fusion3,
publish_check3,
stop_detection3,
publish_check_off3,
# end_trash_fusion3,
trash_detect_lf_far_pose1,
start_detect4,
# start_trash_fusion4,
publish_check4,
stop_detection4,
publish_check_off4,
# end_trash_fusion4,
trash_detect_pose1,
start_detect5,
# start_trash_fusion5,
publish_check5,
stop_detection5,
publish_check_off5,
# end_trash_fusion5,
trash_detect_far_pose1,
start_detect6,
# start_trash_fusion6,
publish_check6,
stop_detection6,
publish_check_off6,
# end_trash_fusion6,
# trash_detect_rf_pose1,
# start_detect7,
# publish_check7,
# stop_detection7,
# publish_check_off7,
trash_detect_rf_far_pose1,
start_detect8,
# start_trash_fusion8,
publish_check8,
stop_detection8,
publish_check_off8,
# end_trash_fusion8,
# start_trash_fusion7,
# trash_detect_right_pose1,
# start_detect9,
# publish_check9,
# stop_detection9,
# publish_check_off9,
# trash_detect_right_far_pose1,
# start_detect10,
# publish_check10,
# stop_detection10,
# publish_check_off10,
bot_detach_trash_detection
]
)
end_entrance_fusion = EndFusion(name="EndEntranceFusion")
clear_trash = CleanEntrance(name="ClearTrash1", entrance_status=0)
#----- Trash detection after entering end -----#
# Trash drop sub sequence
# Trash drop sub sequence end
# Countertop spray sequence
go_to_center1 = GoToGoal(name="Center1", isCenter=True)
go_to_countertop = GoToGoal(name='CountertopGoal', isCounter=True)
counter_align1 = CounterAlign(name="Align1")
counter_align2 = CounterAlign(name="Align2")
bot_attach_spray1 = BotAttach(attach=True, name="BotAttachSpray1")
bot_attach_spray2 = BotAttach(attach=True, name="BotAttachSpray2")
bot_detach_spray1 = BotAttach(attach=False, name="BotDetachSpray1")
bot_detach_spray2 = BotAttach(attach=False, name="BotDetachSpray2")
counter_align3 = CounterAlign(name="Align3")
counter_align4 = CounterAlign(name="Align4")
counter_right1 = GoRight(name="CounterRight1")
counter_left1 = GoLeft(name="CounterLeft1")
countertop_spray1 = CountertopSpray("Spray1", side="Right")
countertop_spray2 = CountertopSpray("Spray2", side="Left")
arm_up1 = SetPredefinedArmPose("up", name="ArmUp1")
arm_start1 = SetPredefinedArmPose("start", name="ArmStart1")
detect_sink_left = DetectSink(side="Left")
detect_sink_right = DetectSink(side="Right")
countertop_spray_sequence = py_trees.composites.Sequence("CountertopSpraySequence", children=[
go_to_center1,
go_to_countertop,
counter_align1,
counter_left1,
counter_align2,
bot_attach_spray1,
arm_start1,
arm_up1,
detect_sink_left,
countertop_spray1,
bot_detach_spray1,
counter_align3,
counter_right1,
counter_align4,
bot_attach_spray2,
detect_sink_right,
countertop_spray2,
# bot_detach_spray2
])
# Countertop spray sequence end
# Grab sponge sequence
grab_sponge_sequence = py_trees.composites.Sequence("GrabSpongeSequence")
go_to_sponge_up1 = SetPredefinedArmPose("sponge_grip_up", name="SpongeUp1")
open_gripper_sponge1 = SetPredefinedGripperPose("sponge_open", name="SpongeGripperOpen1")
go_to_sponge1 = SetPredefinedArmPose("sponge_grip", name="SpongePose1")
detach_sponge_base = JointAttach(name="SpongeBaseDetach", link1="sponge", link2="base_footprint", attach=False)
grip_sponge1 = SetPredefinedGripperPose("sponge_grip", name="SpongeGrip1")
attach_sponge_gripper = JointAttach(name="SpongeGripperAttach", link1="sponge", link2="gripper_base_link", attach=True)
go_to_sponge_up2 = SetPredefinedArmPose("sponge_grip_up", name="SpongeUp2")
arm_start2 = SetPredefinedArmPose("start", name="ArmStart2")
grab_sponge_sequence.add_children([
# go_to_center3,
go_to_sponge_up1,
open_gripper_sponge1,
go_to_sponge1,
detach_sponge_base,
grip_sponge1,
attach_sponge_gripper,
go_to_sponge_up2,
# arm_start2,
])
# Grab sponge sequence end
# Place sponge back sequence
place_sponge_back_sequence = py_trees.composites.Sequence("PlaceSpongeBackSequence")
go_to_center3 = GoToGoal(name="Center3", isCenter=True)
go_to_sponge_up3 = SetPredefinedArmPose("sponge_grip_up", name="SpongeUp3")
open_gripper_sponge2 = SetPredefinedGripperPose("sponge_open", name="SpongeGripperOpen2")
go_to_sponge2 = SetPredefinedArmPose("sponge_grip", name="SpongePose2")
attach_sponge_base = JointAttach(name="SpongeBaseAttach", link1="sponge", link2="base_footprint", attach=True)
grip_sponge2 = SetPredefinedGripperPose("sponge_grip", name="SpongeGrip2")
detach_sponge_gripper = JointAttach(name="SpongeGripperDetach", link1="sponge", link2="gripper_base_link", attach=False)
go_to_sponge_up4 = SetPredefinedArmPose("sponge_grip_up", name="SpongeUp4")
arm_start2 = SetPredefinedArmPose("start", name="ArmStart2")
place_sponge_back_sequence.add_children([
go_to_center3,
go_to_sponge_up3,
go_to_sponge2,
open_gripper_sponge2,
detach_sponge_gripper,
attach_sponge_base,
go_to_sponge_up4,
arm_start2,
])
# Place sponge back sequence end
# Countertop wiping sequence
go_to_center2 = GoToGoal(name="Center2", isCenter=True)
go_to_countertop2 = GoToGoal(name='CountertopGoal2', isCounter=True)
counter_align5 = CounterAlign(name="Align5")
counter_align6 = CounterAlign(name="Align6")
bot_attach_wipe1 = BotAttach(attach=True, name="BotAttachWipe1")
bot_attach_wipe2 = BotAttach(attach=True, name="BotAttachWipe2")
bot_detach_wipe1 = BotAttach(attach=False, name="BotDetachWipe1")
bot_detach_wipe2 = BotAttach(attach=False, name="BotDetachWipe2")
counter_align7 = CounterAlign(name="Align7")
counter_align8 = CounterAlign(name="Align8")
counter_right2 = GoRight(name="CounterRight2")
counter_left2 = GoLeft(name="CounterLeft2")
countertop_wipe1 = CountertopWipe("Wipe1", side="Right")
countertop_wipe2 = CountertopWipe("Wipe2", side="Left")
arm_up_wipe1 = SetPredefinedArmPose("up", name="ArmUpWipe1")
arm_start_wipe1 = SetPredefinedArmPose("start", name="ArmStartWipe1")
countertop_wiping_sequence = py_trees.composites.Sequence("CountertopWipeSequence", children=[
# go_to_center2,
# go_to_countertop2,
# counter_align5,
# counter_right2,
# bot_attach_wipe1,
# arm_start_wipe1,
arm_up_wipe1,
countertop_wipe1,
bot_detach_wipe1,
counter_align7,
counter_left2,
counter_align8,
bot_attach_wipe2,
countertop_wipe2,
bot_detach_wipe2
])
# Countertop spray sequence end
# Marking cleaning sequence
go_to_door_marking = GoToGoal(name="MarkingDoor", isDoor=True)
marking_detect_pose1 = SetPredefinedArmPose("trash_detect", name="MarkingDetect1")
marking_detect_far_pose1 = SetPredefinedArmPose("trash_detect_far", name="MarkingDetectFar1")
marking_detect_left_pose1 = SetPredefinedArmPose("trash_detect_left", name="MarkingDetectLeft1")
marking_detect_right_pose1 = SetPredefinedArmPose("trash_detect_right", name="MarkingDetectRight1")
marking_detect_lf_pose1 = SetPredefinedArmPose("trash_detect_lf", name="MarkingDetectLf1")
marking_detect_rf_pose1 = SetPredefinedArmPose("trash_detect_rf", name="MarkingDetectRf1")
marking_detect_left_far_pose1 = SetPredefinedArmPose("trash_detect_left_far", name="MarkingDetectLeftFar1")
marking_detect_right_far_pose1 = SetPredefinedArmPose("trash_detect_right_far", name="MarkingDetectRightFar1")
marking_detect_lf_far_pose1 = SetPredefinedArmPose("trash_detect_lf_far", name="MarkingDetectLfFar1")
marking_detect_rf_far_pose1 = SetPredefinedArmPose("trash_detect_rf_far", name="MarkingDetectRfFar1")
publish_check11 = PublishCheck(name="PublishCheck11", on_or_off="on")
publish_check12 = PublishCheck(name="PublishCheck12", on_or_off="on")
publish_check13 = PublishCheck(name="PublishCheck13", on_or_off="on")
publish_check14 = PublishCheck(name="PublishCheck14", on_or_off="on")
publish_check15 = PublishCheck(name="PublishCheck15", on_or_off="on")
publish_check16 = PublishCheck(name="PublishCheck16", on_or_off="on")
publish_check17 = PublishCheck(name="PublishCheck17", on_or_off="on")
publish_check18 = PublishCheck(name="PublishCheck18", on_or_off="on")
publish_check19 = PublishCheck(name="PublishCheck19", on_or_off="on")
publish_check20 = PublishCheck(name="PublishCheck20", on_or_off="on")
publish_check_off11 = PublishCheck(name="PublishCheck11", on_or_off="off")
publish_check_off12 = PublishCheck(name="PublishCheck12", on_or_off="off")
publish_check_off13 = PublishCheck(name="PublishCheck13", on_or_off="off")
publish_check_off14 = PublishCheck(name="PublishCheck14", on_or_off="off")
publish_check_off15 = PublishCheck(name="PublishCheck15", on_or_off="off")
publish_check_off16 = PublishCheck(name="PublishCheck16", on_or_off="off")
publish_check_off17 = PublishCheck(name="PublishCheck17", on_or_off="off")
publish_check_off18 = PublishCheck(name="PublishCheck18", on_or_off="off")
publish_check_off19 = PublishCheck(name="PublishCheck19", on_or_off="off")
publish_check_off20 = PublishCheck(name="PublishCheck20", on_or_off="off")
start_marking_fusion1 = StartFusion(name="MarkingFusion1", item="marking")
start_marking_fusion2 = StartFusion(name="MarkingFusion2", item="marking")
start_marking_fusion3 = StartFusion(name="MarkingFusion3", item="marking")
start_marking_fusion4 = StartFusion(name="MarkingFusion4", item="marking")
start_marking_fusion5 = StartFusion(name="MarkingFusion5", item="marking")
start_marking_fusion6 = StartFusion(name="MarkingFusion6", item="marking")
start_marking_fusion7 = StartFusion(name="MarkingFusion7", item="marking")
start_marking_fusion8 = StartFusion(name="MarkingFusion8", item="marking")
end_marking_fusion1 = EndFusion(name="EndMarkingFusion1")
end_marking_fusion2 = EndFusion(name="EndMarkingFusion2")
end_marking_fusion3 = EndFusion(name="EndMarkingFusion3")
end_marking_fusion4 = EndFusion(name="EndMarkingFusion4")
end_marking_fusion5 = EndFusion(name="EndMarkingFusion5")
end_marking_fusion6 = EndFusion(name="EndMarkingFusion6")
end_marking_fusion7 = EndFusion(name="EndMarkingFusion7")
end_marking_fusion8 = EndFusion(name="EndMarkingFusion8")
marking_detect1 = MarkingDetect(name="MD1")
marking_detect2 = MarkingDetect(name="MD2")
marking_detect3 = MarkingDetect(name="MD3")
marking_detect4 = MarkingDetect(name="MD4")
marking_detect5 = MarkingDetect(name="MD5")
marking_detect6 = MarkingDetect(name="MD6")
marking_detect7 = MarkingDetect(name="MD7")
marking_detect8 = MarkingDetect(name="MD8")
marking_detect9 = MarkingDetect(name="MD9")
marking_detect10 = MarkingDetect(name="MD10")
marking_stop1 = MarkingDetectStop(name="MS1")
marking_stop2 = MarkingDetectStop(name="MS2")
marking_stop3 = MarkingDetectStop(name="MS3")
marking_stop4 = MarkingDetectStop(name="MS4")
marking_stop5 = MarkingDetectStop(name="MS5")
marking_stop6 = MarkingDetectStop(name="MS6")
marking_stop7 = MarkingDetectStop(name="MS7")
marking_stop8 = MarkingDetectStop(name="MS8")
marking_stop9 = MarkingDetectStop(name="MS9")
marking_stop10 = MarkingDetectStop(name="MS10")
clean_marking = CleanMarking(name="CleanMarking1")
marking_detect_sequence = py_trees.composites.Sequence("MarkingDetectSequence")
marking_detect_sequence.add_children(
[
marking_detect_left_pose1,
marking_detect1,
start_marking_fusion1,
publish_check11,
marking_stop1,
publish_check_off11,
# end_marking_fusion1,
marking_detect_left_far_pose1,
marking_detect2,
# start_marking_fusion2,
publish_check12,
marking_stop2,
publish_check_off12,
# end_marking_fusion2,
marking_detect_lf_pose1,
marking_detect3,
# start_marking_fusion3,
publish_check13,
marking_stop3,
publish_check_off13,
# end_marking_fusion3,
marking_detect_lf_far_pose1,
marking_detect4,
# start_marking_fusion4,
publish_check14,
marking_stop4,
publish_check_off14,
# end_marking_fusion4,
marking_detect_pose1,
marking_detect5,
# start_marking_fusion5,
publish_check15,
marking_stop5,
publish_check_off15,
# end_marking_fusion5,
marking_detect_far_pose1,
marking_detect6,
# start_marking_fusion6,
publish_check16,
marking_stop6,
publish_check_off16,
# end_marking_fusion6,
# marking_detect_rf_pose1,
# marking_detect7,
# publish_check17,
# marking_stop7,
# publish_check_off17,
marking_detect_rf_far_pose1,
marking_detect8,
# start_marking_fusion8,
publish_check18,
marking_stop8,
publish_check_off18,
# end_marking_fusion8,
# start_marking_fusion7,
# marking_detect_right_pose1,
# marking_detect9,
# publish_check19,
# marking_stop9,
# publish_check_off19,
# marking_detect_right_far_pose1,
# marking_detect10,
# publish_check20,
# marking_stop10,
# publish_check_off20,
]
)
# Marking cleaning sequence end
# Go to start
go_to_door2 = GoToGoal(name="GoToDoor2", isDoor=True)
exit_door = ExitDoor(name="ExitDoor")
go_to_start = GoToGoal(name="GoToStart", isStart=True)
# Go to start end
reIn = ReName(name="ReName")
# Main sequence
main_sequence = py_trees.idioms.pick_up_where_you_left_off(
name="MainSequence",
tasks=[
parllel_init,
reach_door_front,
get_basic_poses,
trash_detect_pose_entrance,
entrance_sequence,
end_entrance_fusion,
# detect_counter_pose1,
# detect_counter_pose_up,
# parallel_counter_detect,
# detect_counter_pose2,
trash_detect_sequence,
clear_trash,
reIn,
dustbin_false,
go_to_center_dustbin,
go_to_dustbin_detect_pose,
dustbin_selector,
go_to_dustbin,
trash_drop_oneshot,
# end_trash_fusion7,
# countertop_spray_sequence,
# grab_sponge_sequence,
# countertop_wiping_sequence,
# place_sponge_back_sequence,
# marking_detect_sequence,
# go_to_door_marking,
clean_marking,
go_to_door2,
exit_door,
go_to_start,
]
)
root = main_sequence
return root
if __name__ == '__main__':
# Declare global variables
global counter_detected, sink_detected
global dustbin_bb, apbot_arm, apbot_gripper, client, ct, cf, df, duf, tp, mp, table_top, sink, wall_align, bb, markings_bb, door_bb
global plume, pose_y, pose_x
# Initialize global variables
counter_detected = False
sink_detected = False
vel_pub = rospy.Publisher('/cmd_vel', Twist, queue_size=10)
# Initialize ros node
rospy.init_node('artpark_node', anonymous=True)
t = tf.TransformListener()
# Start py trees
args = command_line_argument_parser().parse_args()
py_trees.logging.level = py_trees.logging.Level.DEBUG
tree = create_tree()
print(description())
py_trees.display.render_dot_tree(tree)
py_trees.blackboard.Blackboard.enable_activity_stream(100)
behaviour_tree = py_trees.trees.BehaviourTree(tree)
behaviour_tree.add_pre_tick_handler(pre_tick_handler)
behaviour_tree.visitors.append(py_trees.visitors.DebugVisitor())
behaviour_tree.visitors.append(
py_trees.visitors.DisplaySnapshotVisitor(
display_blackboard=True,
display_activity_stream=True)
)
behaviour_tree.setup(timeout=15)
while True:
try:
behaviour_tree.tick()
time.sleep(0.5)
except KeyboardInterrupt:
break
print("\n")
|
# Copyright 2023 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from pants.backend.scala.target_types import ScalaArtifactFieldSet
from pants.engine.fs import EMPTY_DIGEST
from pants.engine.rules import Get, collect_rules, rule
from pants.engine.unions import UnionRule
from pants.jvm.compile import (
ClasspathDependenciesRequest,
ClasspathEntry,
ClasspathEntryRequest,
CompileResult,
FallibleClasspathEntries,
FallibleClasspathEntry,
)
class ScalaArtifactClasspathEntryRequest(ClasspathEntryRequest):
field_sets = (ScalaArtifactFieldSet,)
@rule
async def scala_artifact_classpath(
request: ScalaArtifactClasspathEntryRequest,
) -> FallibleClasspathEntry:
fallible_entries = await Get(FallibleClasspathEntries, ClasspathDependenciesRequest(request))
classpath_entries = fallible_entries.if_all_succeeded()
if classpath_entries is None:
return FallibleClasspathEntry(
description=str(request.component),
result=CompileResult.DEPENDENCY_FAILED,
output=None,
exit_code=1,
)
return FallibleClasspathEntry(
description=str(request.component),
result=CompileResult.SUCCEEDED,
output=ClasspathEntry(EMPTY_DIGEST, dependencies=classpath_entries),
exit_code=0,
)
def rules():
return [
*collect_rules(),
UnionRule(ClasspathEntryRequest, ScalaArtifactClasspathEntryRequest),
]
|
n, k = map(int, input().split())
a = list(map(int, input().split()))
index = [i for i in range(n) if a[i] > k]
print(n - (index[-1] - index[0] + 1) if index else n)
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.home, name='home'),
path('post_list', views.post_list, name='post_list'),
path('about', views.about, name='about'),
path('contato', views.contato, name='contato'),
] |
# -*- coding: utf8 -*-
from scapy.all import sniff, sendp
from scapy.all import Packet
from scapy.all import ShortField, IntField, LongField, BitField
from mininet.log import info
import sys
import struct
import time
from collections import Counter
import fire
import random
import re
import os
packet_counts = Counter()
packet_queue = []
global Tloss #丢包率临时变量
class action:
def __init__(self, IP, rc_pkt):
self.ip = IP
self.rc_pkt = rc_pkt
def custom_action(self, packet):
key = tuple([packet[0][1].src, packet[0][1].dst])
if packet[0][1].dst == self.ip:
#将数据写入到缓存文件中
filename = '/home/shlled/mininet-project-duan/TimeSchedule/Log/RU.txt'
# filename = '/home/shlled/mininet-project-duan/TimeSchedule/Log/%s.txt' % packet[0][1].dst[7:8]
f1 = open(filename, "a+")
packet_queue.append(packet[0][3].load)
self.rc_pkt.append(packet[0][3].load)
packet_counts.update([key])
now = time.time()
#缓存中写入了才表示实际接收到了该数据包,根据丢包率来确定是否实际接收到了该数据包
global Tloss
loss = Tloss
top = int(100-100*loss)
key = random.randint(1,100)
if key in range(1,top):
info = "receive_time: " + "%.6f" % float(now) + " " + packet[0][3].load
print("info in action :", info)
f1.write('Receive Packet #%d: %s ==> %s : %s' % (
sum(packet_counts.values()), packet[0][1].src, packet[0][1].dst, info))
f1.close()
sys.stdout.flush()
#loss 表示丢包率
def receive(ip, iface, loss,filter="icmp", rc_pkt=[]):
global Tloss
Tloss = loss
sniff(iface=iface, filter="icmp", timeout=3, prn=action(ip, rc_pkt).custom_action)
#在sniff之后可以查看哪些编码包序号的被接收到了
def packetQueue():
print(packet_counts)
print(packet_queue)
fire.Fire(receive)
|
class Solution:
def largestOverlap(self, A: List[List[int]], B: List[List[int]]) -> int:
n, m = len(A), len(A[0])
# keep track of the i,j coordinates where we have a 1 in both matrices: O(n**2)
A1s = []
B1s = []
for i in range(n):
for j in range(m):
if A[i][j] == 1:
A1s.append((i, j))
if B[i][j] == 1:
B1s.append((i, j))
# for each i,j coordinate that is 1 in both A and B, find the delta between
# the i and j coordinates (here by subtracting bi from ai and bj from aj)
# and keep track of these: O(n**2)
mem = {}
for ai, aj in A1s:
for bi, bj in B1s:
k = (ai-bi, aj-bj)
if k in mem:
mem[k] += 1
else:
mem[k] = 1
# return our max coordinate if we have on else 0: O(n)
return max(mem.values() or [0]) |
from matplotlib.pyplot import imread, imshow, show
from numpy import copy, average
class lineObject:
"""
Line object that contains a line within a specific image
"""
def __init__(self, line):
self.length = len(line)
self.endPoint1 = line[0]
self.endPoint2 = line[-1]
self.midpoint = [abs(self.endPoint1[0] - self.endPoint2[0]) / 2,
abs(self.endPoint1[1] - self.endPoint2[1]) / 2]
def Correlation(line, resolution, threshold):
"""
Given an array of adjacent pixel locations, it will determine
if the line is straight enought to be considered a line.
it uses the two endpoints to create the line to which its
correlation is measured. The line is split into 'resolution'
lines whose slopes are then compared to the ideal line.
'threshold' is the variability allowed in the difference
between these slopes
"""
start = line[0]
end = line[-1]
length = len(line)
dy = end[0] - start[0]
dx = end[1] - start[1]
try:
masterSlope = float(dy)/float(dx)
except ZeroDivisionError:
masterSlope = dy / length
segmentLength = length / resolution
segments = []
startPoint = start
for i in range(1, resolution + 1):
endPoint = line[segmentLength * i - 1]
segments.append([startPoint, endPoint])
startPoint = endPoint
segmentSlopes = []
for i in segments:
start = i[0]
end = i[1]
dy = end[0] - start[0]
dx = end[1] - start[1]
try:
slope = dy/float(dx)
except ZeroDivisionError:
slope = (dy * resolution / length)
segmentSlopes.append(slope)
ave = average(segmentSlopes)
if(ave < (masterSlope + threshold) and ave > (masterSlope - threshold)):
return True
def TestGrid(im,x,y):
"""
given a bitmap image and a true pixel, it searches for another true pixel
that is adjacent to it. It then returns a bool telling if a true pixel
was found and an integer corresponding to that pixels position.
"""
try:
up = im[y-1][x]
down = im[y+1][x]
right = im[y][x+1]
left = im[y][x-1]
upRight = im[y-1][x+1]
upLeft = im[y-1][x-1]
lowRight = im[y+1][x+1]
lowLeft = im[y+1][x-1]
grid = [upLeft,up,upRight,left,0,right,lowLeft,down,lowRight]
for index in range(len(grid)):
if(grid[index] == 1):
return True, index
return False, -1
except IndexError:
return False, -1
def TestPossibleLine(im,x,y,minLength, maxLength):
"""
given a bitmap image and a true pixel, it will iterativly call
TestGrid to find the next pixel in a possible line until TestGrid
returns false. It then check to see if the line is long enough
and whether it is straight enough
"""
linePoints = []
flag = True
while(flag):
flag, index = TestGrid(im,x,y)
if(flag):
if(index == 2):
linePoints.append([y,x])
im[y][x] = 2
x = x + 1
y = y - 1
elif(index == 5):
linePoints.append([y,x])
im[y][x] = 2
x = x + 1
elif(index == 8):
linePoints.append([y,x])
im[y][x] = 2
x = x + 1
y = y + 1
elif(index == 1):
linePoints.append([y,x])
im[y][x] = 2
y = y - 1
elif(index == 7):
linePoints.append([y,x])
im[y][x] = 2
y = y + 1
if(index == 0):
linePoints.append([y,x])
im[y][x] = 2
x = x - 1
y = y - 1
elif(index == 3):
linePoints.append([y,x])
im[y][x] = 2
x = x - 1
elif(index == 6):
linePoints.append([y,x])
im[y][x] = 2
x = x - 1
print(len(linePoints))
if(len(linePoints) >= minLength and len(linePoints) <= maxLength and Correlation(linePoints,3,5)):
for i in linePoints:
im[i[0]][i[1]] = 3
return lineObject(linePoints)
else:
return "notLine"
def FindLines(im, minLength, maxLength, resolution, threshold):
"""
Input a canny edge detected image and the minimum length of a line in pixles
0 = pixle is not a part of a line
1 = pixle may be a part of a line
2 = pixle is a part of the line undertest
"""
lines = [] # array of line objects
y, x = im.shape
for j in range(1,y-1):
for i in range(1,x-1):
if(im[j][i] == 1):
im[j][i] = 2
line = TestPossibleLine(im, j, i, minLength, maxLength)
if (line != "notLine"):
lines.append(line)
return lines |
import numpy as np
class LinearRegressor:
def __init__(self, num_features=2, iterations=100, alpha=0.0001, of=None):
self.out = of
self.num_features = num_features + 1 # remember to include the intercept...
self.alpha = alpha
self.iterations = iterations
self.weights = np.zeros(self.num_features)
def predict(self, data): # given alpha row, what is the predicted value
return np.dot(data, self.weights)
def fit(self, data, expectations):
e = 0.000001 # minimum change needed to declare convergence
for i in range(self.iterations):
v = self.weights.copy()
self.fit_incremental_gradient_descent(data, expectations)
total_adj = sum([abs(wi - vi) for wi, vi in zip(self.weights, v)])
if total_adj < e:
break
self.out.write("%0.4f, %d, %0.4f, %0.4f, %0.4f\n" % (
self.alpha, self.iterations, self.weights[0], self.weights[1], self.weights[2]))
def fit_incremental_gradient_descent(self, X: np.ndarray, Y: np.ndarray):
w = self.weights;
h = self.predict # abbreviations
n = w.shape[0]
for i, x_i in enumerate(X): # for each x_i in X (the training data set)
for j in range(n): # for each feature in the test data instance, x_i,j
w[j] -= self.adj_weight(Y[i], h(x_i), x_i[j], self.alpha)
@staticmethod
def adj_weight(y, hx, xij, a):
# get a proportional fraction of the feature and remove from the corresponding weight
try:
return a * xij * (hx - y)
except OverflowError:
return 0.0
|
# 递归
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def sumRootToLeaf(self, root: Optional[TreeNode]) -> int:
res = 0
def rangeLeaf(node, curStr):
nonlocal res
if not node.left and not node.right:
res += int(curStr, 2)
if node.left:
rangeLeaf(node.left, curStr + str(node.left.val))
if node.right:
rangeLeaf(node.right, curStr + str(node.right.val))
rangeLeaf(root, str(root.val))
return res |
import numpy as np
import math
import matplotlib.pyplot as plt
#plt.switch_backend('Qt4Agg')
# VELOCITY HISTOGRAM
fname1='v_hist.txt'
data1 = np.loadtxt(fname1, skiprows=4)
V = data1[:,1]
P = data1[:,2]
T = 1.5 # value set in simulation
# CALCULATE MAXW.-BOLTZM.-DISTR. IN 2 DIMENSIONS !!!
v = np.linspace(0.05, 9.95, 50);
m = 1.0 # mass
p = m / T * v * np.exp(-1.0 * (m * v**2)/(2 * T))
### PLOTS
csfont = {'size':20}
hfont = {'size':20}
plt.plot(V,P/np.sum(P),lw=2,color='b',label='MD histo.')
plt.plot(v,p/np.sum(p),lw=2,ls='--',color='r',label='MWB, T=%.4f'%T)
plt.rcParams.update({'font.size': 14})
plt.legend(frameon=False)
plt.xlabel('|v|',**csfont)
plt.ylabel('p(|v|)',**hfont)
plt.title('Abs. Velocity Distr.',**csfont)
plt.savefig("v_hist.png", bbox_inches="tight")
plt.show()
|
import json
import unittest
import pyyoutube.models as models
class CommentModelModelTest(unittest.TestCase):
BASE_PATH = "testdata/modeldata/comments/"
with open(BASE_PATH + "comment_snippet.json", "rb") as f:
SNIPPET_INFO = json.loads(f.read().decode("utf-8"))
with open(BASE_PATH + "comment_info.json", "rb") as f:
COMMENT_INFO = json.loads(f.read().decode("utf-8"))
with open(BASE_PATH + "comment_api_response.json", "rb") as f:
COMMENT_API_INFO = json.loads(f.read().decode("utf-8"))
def testCommentSnippet(self) -> None:
m = models.CommentSnippet.from_dict(self.SNIPPET_INFO)
self.assertEqual(m.videoId, "wtLJPvx7-ys")
self.assertTrue(m.canRate)
self.assertEqual(m.authorChannelId.value, "UCqPku3cxM-ED3poX8YtGqeg")
self.assertEqual(
m.string_to_datetime(m.publishedAt).isoformat(), "2019-03-28T11:33:46+00:00"
)
def testComment(self) -> None:
m = models.Comment.from_dict(self.COMMENT_INFO)
self.assertEqual(m.id, "UgwxApqcfzZzF_C5Zqx4AaABAg")
self.assertEqual(m.snippet.authorDisplayName, "Oeurn Ravuth")
self.assertEqual(
m.snippet.string_to_datetime(m.snippet.updatedAt).isoformat(),
"2019-03-28T11:33:46+00:00",
)
def testCommentListResponse(self) -> None:
m = models.CommentListResponse.from_dict(self.COMMENT_API_INFO)
self.assertEqual(m.kind, "youtube#commentListResponse")
self.assertEqual(m.items[0].id, "UgxKREWxIgDrw8w2e_Z4AaABAg")
class CommentThreadModelTest(unittest.TestCase):
BASE_PATH = "testdata/modeldata/comments/"
with open(BASE_PATH + "comment_thread_snippet.json", "rb") as f:
SNIPPET_INFO = json.loads(f.read().decode("utf-8"))
with open(BASE_PATH + "comment_thread_replies.json", "rb") as f:
REPLIES_INFO = json.loads(f.read().decode("utf-8"))
with open(BASE_PATH + "comment_thread_info.json", "rb") as f:
COMMENT_THREAD_INFO = json.loads(f.read().decode("utf-8"))
with open(BASE_PATH + "comment_thread_api_response.json", "rb") as f:
COMMENT_THREAD_API_INFO = json.loads(f.read().decode("utf-8"))
def testCommentThreadSnippet(self) -> None:
m = models.CommentThreadSnippet.from_dict(self.SNIPPET_INFO)
self.assertEqual(m.videoId, "D-lhorsDlUQ")
self.assertEqual(m.topLevelComment.id, "UgydxWWoeA7F1OdqypJ4AaABAg")
self.assertEqual(m.topLevelComment.snippet.videoId, "D-lhorsDlUQ")
def testCommentThreadReplies(self) -> None:
m = models.CommentThreadReplies.from_dict(self.REPLIES_INFO)
self.assertEqual(len(m.comments), 1)
self.assertEqual(
m.comments[0].id, "UgydxWWoeA7F1OdqypJ4AaABAg.8wWQ3tdHcFx8xcDheui-qb"
)
self.assertEqual(m.comments[0].snippet.videoId, "D-lhorsDlUQ")
def testCommentThread(self) -> None:
m = models.CommentThread.from_dict(self.COMMENT_THREAD_INFO)
self.assertEqual(m.id, "UgydxWWoeA7F1OdqypJ4AaABAg")
self.assertEqual(m.snippet.videoId, "D-lhorsDlUQ")
self.assertEqual(
m.replies.comments[0].id,
"UgydxWWoeA7F1OdqypJ4AaABAg.8wWQ3tdHcFx8xcDheui-qb",
)
def testCommentThreadListResponse(self) -> None:
m = models.CommentThreadListResponse.from_dict(self.COMMENT_THREAD_API_INFO)
self.assertEqual(m.kind, "youtube#commentThreadListResponse")
self.assertEqual(m.items[0].id, "Ugz097FRhsQy5CVhAjp4AaABAg")
|
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import pytest
from pants.backend.openapi import dependency_inference
from pants.backend.openapi.goals import tailor
from pants.backend.openapi.goals.tailor import PutativeOpenApiTargetsRequest
from pants.backend.openapi.target_types import (
OpenApiDocumentGeneratorTarget,
OpenApiSourceGeneratorTarget,
)
from pants.core.goals.tailor import AllOwnedSources, PutativeTarget, PutativeTargets
from pants.engine.rules import QueryRule
from pants.testutil.rule_runner import RuleRunner
@pytest.fixture
def rule_runner() -> RuleRunner:
return RuleRunner(
rules=[
*dependency_inference.rules(),
*tailor.rules(),
QueryRule(PutativeTargets, (PutativeOpenApiTargetsRequest, AllOwnedSources)),
],
target_types=[OpenApiDocumentGeneratorTarget, OpenApiSourceGeneratorTarget],
)
def test_find_putative_targets(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"src/owned/BUILD": "openapi_documents()\n",
"src/owned/openapi.json": '{"$ref": "foobar.json"}',
"src/owned/foobar.json": "{}",
"src/unowned/foobar.json": "{}",
"src/unowned/openapi.json": '{"$ref": "subdir/foobar.json"}',
"src/unowned/openapi.yaml": "{}",
"src/unowned/subdir/foobar.json": '{"$ref": "../foobar.json"}',
}
)
putative_targets = rule_runner.request(
PutativeTargets,
[
PutativeOpenApiTargetsRequest(("src/owned", "src/unowned")),
AllOwnedSources(["src/owned/openapi.json"]),
],
)
assert (
PutativeTargets(
[
PutativeTarget.for_target_type(
OpenApiDocumentGeneratorTarget,
"src/unowned",
"openapi",
["openapi.json", "openapi.yaml"],
),
PutativeTarget.for_target_type(
OpenApiSourceGeneratorTarget,
"src/unowned",
"unowned",
["foobar.json", "openapi.json", "openapi.yaml"],
),
PutativeTarget.for_target_type(
OpenApiSourceGeneratorTarget,
"src/unowned/subdir",
"subdir",
["foobar.json"],
),
PutativeTarget.for_target_type(
OpenApiSourceGeneratorTarget,
"src/owned",
"owned",
["foobar.json"],
),
]
)
== putative_targets
)
def test_find_putative_targets_when_disabled(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"src/unowned/openapi.json": "{}",
}
)
rule_runner.set_options(["--no-openapi-tailor-targets"])
putative_targets = rule_runner.request(
PutativeTargets,
[
PutativeOpenApiTargetsRequest(("src/unowned",)),
AllOwnedSources(),
],
)
assert PutativeTargets() == putative_targets
|
from django.shortcuts import render
# Create your views here.
from my_views import channelViews
from rest_framework.routers import DefaultRouter
router = DefaultRouter()
router.register(r"myusers",channelViews.ChannelViewSet, base_name='user')
urlpatterns = router.urls |
# TAI content
def c_peg ():
return "O"
def c_empty ():
return "_"
def c_blocked ():
return "X"
def is_empty (e):
return e == c_empty()
def is_peg (e):
return e == c_peg()
def is_blocked (e):
return e == c_blocked()
# TAI pos
# Tuplo (l, c)
def make_pos (l, c):
return (l, c)
def pos_l (pos):
return pos[0]
def pos_c (pos):
return pos[1]
# TAI move
# Lista [p_initial, p_final]
def make_move (i, f):
return [i, f]
def move_initial (move):
return move[0]
def move_final (move):
return move[1]
""" funcao auxiliar. pega nas linhas e colunas e forma tuplos. coloca-os na lista"""
def create_possible_move(line_current,column_current,line_possible,column_possible):
current = make_pos(line_current,column_current)
possible = make_pos(line_possible,column_possible)
lst = []
lst.append(current)
lst.append(possible)
return lst
b1 = [["_","O","O","O","_"], ["O","_","O","_","O"], ["_","O","_","O","_"],["O","_","O","_","_"],["_","O","_","_","_"]]
def board_moves(board):
line_len = len(board[0]) #tamanho de uma linha
res = []
l = 0
c = 0
while l < line_len:
while c < line_len:
if(is_peg(board[l][c])):
if (l > 1): #limite superior do tabuleiro
if (is_empty(board[l-2][c])):
res.append(create_possible_move( l, c, l-2, c))
if (l < line_len - 2 ): #limite inferior tabuleiro
if (is_empty(board[l+2][c])):
res.append(create_possible_move( l, c, l+2, c))
if (c > 1): #limite da esquerda do tabuleiro
if (is_empty(board[l][c-2])):
res.append(create_possible_move(l,c,l,c-2))
if (c < line_len - 2): #limite da direita do tabuleiro
if (is_empty(board[l][c+2])):
res.append(create_possible_move(l, c, l, c+2))
c = c + 1
l = l + 1
print res
return res
board_moves(b1)
def board_perform_move(board,move):
res = board
res[pos_l(move_initial(move))][pos_c(move_initial(move))] = c_empty()
res[pos_l(move_final(move))][pos_c(move_final(move))] = c_peg()
print res
return res
board_perform_move(b1,[(0,2),(0,0)]) |
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
INDCHOICES= (
('FINANCE','FINANCE'),
('HEALTHCARE','HEALTHCARE'),
('INSURANCE', 'INSURANCE'),
('LEGAL', 'LEGAL'),
('MANUFACTURING', 'MANUFACTURING'),
('PUBLISHING', 'PUBLISHING'),
('REAL ESTATE', 'REAL ESTATE'),
('SOFTWARE', 'SOFTWARE'),
)
class Account(models.Model):
name = models.CharField('Name of Account', 'name', max_length=64)
emai = models.EmailField(blank = True, null = True)
phone = models.CharField(max_length=20, blank = True, null = True)
industry = models.CharField('Industry Type', max_length=255, choices=INDCHOICES, blank=True, null=True)
website = models.URLField('Website', blank = True, null = True)
description = models.TextField(blank = True, null = True)
createdBy = models.ForeignKey(User, related_name='account_created_by', on_delete=models.CASCADE)
createdAt = models.DateTimeField('Created At', auto_now_add = True)
isActive = models.BooleanField(default=False)
def __str__(self):
return self.name
class ContactStatus(models.Model):
status = models.CharField('Contact Source', max_length = 20)
def __str__(self):
return self.status
class ContactSource(models.Model):
status = models.CharField('Contact Status', max_length = 20)
def __str__(self):
return self.status
class Contact(models.Model):
first_name = models.CharField('First Name', max_length = 255, blank = True, null = True)
last_name = models.CharField('Last Name', max_length = 255, blank = True, null = True)
account = models.ForeignKey(Account, related_name = 'lead_account_contacts', on_delete = models.CASCADE, blank = True, null=True)
email = models.EmailField()
phone = models.CharField(max_length=20, blank=True, null=True)
address = models.TextField(blank=True, null=True)
description = models.TextField(blank=True, null=True)
createdBy = models.ForeignKey(User, related_name="contact_created_by", on_delete=models.CASCADE)
createdAt = models.DateTimeField('Created At', auto_now_add=True)
isActive = models.BooleanField(default=False)
def __str__(self):
return self.first_name
class ActivityStatus(models.Model):
status = models.CharField('Activity Status', max_length=20)
def __str__(self):
return self.status
class Activity(models.Model):
description = models.TextField(blank=True, null=True)
createdAt = models.DateTimeField('Created At', auto_now_add=True)
contact = models.ForeignKey(Contact, on_delete=models.CASCADE, blank=True, null=True)
def __str__(self):
return self.description
|
"""
TIMIT database.py
Purpose of this script is to extract the files .wav files and .phn files from the various folders and sub-folders in the
TIMIT database and place then in a separate folder. It also creates TextGrid files for each .phn files and thus completes
the data set.
Author: Rishabh Brajabasi
Date: 25th April 2017
"""
import fnmatch
import os
import glob
import csv
from shutil import copyfile
matches = []
for root, dirnames, filenames in os.walk('F:\Projects\Active Projects\Project Intern_IITB\\timit\\cd1_timit\\test'):
for filename in fnmatch.filter(filenames, '*.wav'):
matches.append(os.path.join(root, filename))
for i in range(len(matches)):
start = matches[i].find('test')
copyfile(matches[i], 'F:\Projects\Active Projects\Project Intern_IITB\\timit\\TIMIT Database\\' + matches[i][start+5:-4].replace('\\', '_') + '_T1.wav')
matches = []
for root, dirnames, filenames in os.walk('F:\Projects\Active Projects\Project Intern_IITB\\timit\\cd1_timit\\test'):
for filename in fnmatch.filter(filenames, '*.phn'):
matches.append(os.path.join(root, filename))
for i in range(len(matches)):
start = matches[i].find('test')
copyfile(matches[i], 'F:\Projects\Active Projects\Project Intern_IITB\\timit\\TIMIT Database\\' + matches[i][start+5:-4].replace('\\', '_') + '_T1.phn')
matches = []
for root, dirnames, filenames in os.walk('F:\Projects\Active Projects\Project Intern_IITB\\timit\\cd1_timit\\train'):
for filename in fnmatch.filter(filenames, '*.wav'):
matches.append(os.path.join(root, filename))
for i in range(len(matches)):
start = matches[i].find('train')
copyfile(matches[i], 'F:\Projects\Active Projects\Project Intern_IITB\\timit\\TIMIT Database\\' + matches[i][start+6:-4].replace('\\', '_') + '_T2.wav')
matches = []
for root, dirnames, filenames in os.walk('F:\Projects\Active Projects\Project Intern_IITB\\timit\\cd1_timit\\train'):
for filename in fnmatch.filter(filenames, '*.phn'):
matches.append(os.path.join(root, filename))
for i in range(len(matches)):
start = matches[i].find('train')
copyfile(matches[i], 'F:\Projects\Active Projects\Project Intern_IITB\\timit\\TIMIT Database\\' + matches[i][start+6:-4].replace('\\', '_') + '_T2.phn')
only_phn = glob.glob('F:\Projects\Active Projects\Project Intern_IITB\\timit\TIMIT Database\*.phn') # Extract file name of all audio samples
for phn in only_phn:
csvFileName = phn[:-3] + str('csv')
text_file_1 = open(csvFileName, 'w') # Opening CSV file to store results and to create TextGrid
phone = open(phn, 'r')
phones = phone.read()
aa = phones.split('\n')
for i in range(len(aa)-1):
split = aa[i].split(' ')
text_file_1.write('%06.3f' % (float(split[0])*0.0000625) + "\t" + '%06.3f' % (float(split[1])*0.0000625) + "\t" + split[2] + "\n")
text_file_1.close()
TGFileName = csvFileName.split('.')[0] + '.TextGrid' # Setting name of TextGrid file
fidcsv = open(csvFileName, 'r')
fidTG = open(TGFileName, 'w')
reader = csv.reader(fidcsv, delimiter="\t") # Reading data from csv file
data_tg = list(reader) # Converting read data into python list format
label_count = len(data_tg) # Finding total number of rows in csv file
end_time = data_tg[-1][1]
fidTG.write('File type = "ooTextFile"\n')
fidTG.write('Object class = "TextGrid"\n')
fidTG.write('xmin = 0\n')
fidTG.write('xmax = ' + str(end_time) + '\n')
fidTG.write('tiers? <exists>\n')
fidTG.write('size = 1\n')
fidTG.write('item []:\n')
fidTG.write('\titem [1]:\n')
fidTG.write('\t\tclass = "IntervalTier"\n')
fidTG.write('\t\tname = "Labels"\n')
fidTG.write('\t\txmin = 0\n')
fidTG.write('\t\txmax = ' + str(end_time) + '\n')
fidTG.write('\t\tintervals: size = ' + str(label_count) + '\n');
for j in range(label_count):
fidTG.write('\t\tintervals [' + str(j) + ']:\n')
fidTG.write('\t\t\txmin = ' + str(data_tg[j][0]) + '\n')
fidTG.write('\t\t\txmax = ' + str(data_tg[j][1]) + '\n')
fidTG.write('\t\t\ttext = "' + str(data_tg[j][2]) + '"\n')
fidcsv.close()
fidTG.close()
|
#!/usr/bin/env python
#-----------------------------------------------------------------------------
import sys
import os
# analyse environment variables
queryString=''
if os.environ.get('REQUEST_METHOD','')=='POST':
remaining=os.environ.get('CONTENT_LENGTH','0')
if remaining:
remaining=int(remaining)
while remaining:
r=sys.stdin.read(remaining)
if r:
queryString+=r
remaining-=len(r)
else:
break
else:
try:
queryString=os.environ.get('REQUEST_URI','').split('?',1)[1]
except:
pass
# analyse arguments
cgiArgs=[]
for i in queryString.split('&'):
if i:
r=i.split('=',1)
if len(r)==2:
cgiArgs.append((r[0],r[1]))
else:
cgiArgs.append((r[0],''))
# finish header (insert \r before \n)
sys.stdout.write('Content-Type: text/html\r\n')
sys.stdout.write('\r\n')
# then produce content
sys.stdout.write('<!DOCTYPE html>\n')
sys.stdout.write('<html><head>\n')
sys.stdout.write('<meta charset="utf-8">\n')
sys.stdout.write('</head><body>\n')
sys.stdout.write('<h2>Query Properties</h2>\n')
sys.stdout.write('<p>[<a href="/">home</a>]</p>\n')
sys.stdout.write('<hr>\n')
sys.stdout.write('<p>Interesting environment variables:</p>\n')
sys.stdout.write('<p><ul>\n')
for i in ['REQUEST_METHOD','REQUEST_URI','CONTENT_LENGTH']:
sys.stdout.write('<li>%s=<tt>%s</tt></li>\n'%(i,os.environ.get(i,'')))
sys.stdout.write('</ul></p>\n')
sys.stdout.write('<hr>\n')
sys.stdout.write('<p>Received arguments: <tt>%s</tt></p>\n'%queryString)
sys.stdout.write('<p><ul>\n')
for i in cgiArgs:
sys.stdout.write('<li>%s: <tt>%s</tt></li>\n'%i)
sys.stdout.write('</ul></p>\n')
sys.stdout.write('<hr>\n')
sys.stdout.write('</body></html>\n')
#-----------------------------------------------------------------------------
|
__author__ = 'ebirger'
import os
import glob
import sys
files = list()
def walk_files(search_path, depth):
n = 1
extra = '/*'
while n <= int(depth):
path = search_path + (extra * n)
for i in glob.glob(path):
files.append(os.path.join(path, i))
n += 1
so = sorted(files)
separator = '--'
up = u"\u2514"
print os.path.basename(search_path)
for i in so:
intend = i.count(os.path.sep) - search_path.count(os.path.sep)
print intend * ' ' + up.encode('utf-8') + separator + os.path.basename(i)
def main():
walk_files(sys.argv[1], sys.argv[2])
if __name__ == '__main__':
try:
arg1 = sys.argv[1]
arg2 = sys.argv[2]
except IndexError:
print "Usage: tree.py <PATH> <DEPTH>"
sys.exit(1)
main()
|
# basic one time blink
import RPi.GPIO as GPIO # import gpio module and refer it as GPIO
import time # import time module
GPIO.setmode(GPIO.BOARD) # to use Raspberry Pi board pin numbers
GPIO.setup(11, GPIO.OUT) # set up pin 11 as GPIO output channel
GPIO.output(11, GPIO.LOW) # set RPi board pin 11 low. Turn off LED.
time.sleep(1)
GPIO.output(11, GPIO.HIGH) # set RPi board pin 11 high. Turn on LED.
time.sleep(2)
#==================================================================================
# Use BCM pin numbering system
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM) # to use Raspberry Pi BCM pin numbers
GPIO.setup(17, GPIO.OUT) # set up pin 17 as GPIO output channel
print('LED OFF') # print the string
GPIO.output(17, GPIO.LOW) # set RPi bcm pin 17 low. Turn off LED.
time.sleep(1)
print('LED ON')
GPIO.output(17, GPIO.HIGH) # set RPi bcm pin 17 high. Turn on LED.
time.sleep(2)
#==================================================================================
# Infinite Loop
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM) # to use Raspberry Pi BCM pin numbers
GPIO.setup(17, GPIO.OUT) # set up GPIO output channel
while True: # while loop. Runs infinitely
GPIO.output(17, False) # set RPi BCM pin 17 low. Turn off LED.
time.sleep(1)
GPIO.output(17, True) # set RPi BCM pin 17 high. Turn on LED.
time.sleep(2)
#==================================================================================
# Clean Up
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM) # to use Raspberry Pi BCM pin numbers
GPIO.setup(17, GPIO.OUT) # set up GPIO output channel
print('Hit Ctrl + C to exit')
try:
while True:
GPIO.output(17, False) # set RPi BCM pin 17 low. Turn off LED.
time.sleep(1)
GPIO.output(17, True) # set RPi BCM pin 17 high. Turn on LED.
time.sleep(2)
except KeyboardInterrupt: # runs except block when Ctrl + C is pressed
GPIO.cleanup() # reset GPIO pins
#==================================================================================
# PWM - Pulse Width Modulation
import RPi.GPIO as GPIO
import time
GPIO.setwarnings(False) # disable any warnings
GPIO.setmode (GPIO.BCM)
GPIO.setup(17,GPIO.OUT) # initialize GPIO19 as an output.
p = GPIO.PWM(17,100) # 100Hz frequency
p.start(0) # start at 0% duty cycle
while True:
for x in range (50):
p.ChangeDutyCycle(x)
time.sleep(0.1)
for x in range (50):
p.ChangeDutyCycle(50-x)
time.sleep(0.1)
#================================================================================
# Multiple LED's
import RPi.GPIO as GPIO
import time
pins = [11, 12, 13, 15]
GPIO.setmode(GPIO.BOARD) # to use Raspberry Pi board pin numbers
for pin in pins:
GPIO.setup(pin, GPIO.OUT) # Set all pins mode as output
#GPIO.output(pin, GPIO.HIGH) # Set all pins to high(+3.3V)
def setup():
try:
loop()
except KeyboardInterrupt: # When 'Ctrl+C' is pressed, the child program destroy() will be executed.
destroy()
def loop():
while True:
for pin in pins:
GPIO.output(pin, GPIO.LOW)
time.sleep(0.05)
GPIO.output(pin, GPIO.HIGH)
for pin in reversed(pins):
GPIO.output(pin, GPIO.LOW)
time.sleep(0.05)
GPIO.output(pin, GPIO.HIGH)
def destroy():
for pin in pins:
GPIO.output(pin, GPIO.HIGH) # turn off all leds
GPIO.cleanup() # Release resource
setup()
#------------------------------------------------------------------------------------------
# Multiple LED's [Refactored]
import RPi.GPIO as GPIO
import time
pins = [11, 12, 13, 15]
def setup():
GPIO.setmode(GPIO.BOARD) # Numbers GPIOs by physical location
for pin in pins:
GPIO.setup(pin, GPIO.OUT) # Set all pins mode as output
GPIO.output(pin, GPIO.HIGH) # Set all pins to high(+3.3V)
def loop():
while True:
for pin in pins:
GPIO.output(pin, GPIO.LOW)
time.sleep(0.05)
GPIO.output(pin, GPIO.HIGH)
for pin in reversed(pins):
GPIO.output(pin, GPIO.LOW)
time.sleep(0.05)
GPIO.output(pin, GPIO.HIGH)
def destroy():
for pin in pins:
GPIO.output(pin, GPIO.HIGH) # turn off all leds
GPIO.cleanup() # Release resource
if __name__ == '__main__': # Program start from here
setup()
try:
loop()
except KeyboardInterrupt: # When 'Ctrl+C' is pressed, the child program destroy() will be executed.
destroy()
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-01-10 13:29
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("elections", "0018_election_group_type")]
operations = [
migrations.AlterField(
model_name="election",
name="group_type",
field=models.CharField(blank=True, max_length=100, null=True),
)
]
|
opc = 0
n1 = int(input('Primeiro valor: '))
n2 = int(input('Segundo valor: '))
while opc != 5:
print("""\t[ 1 ] somar
[ 2 ] multiplicar
[ 3 ] maior
[ 4 ] novos números
[ 5 ] sair do programa""")
opc = int(input('>>>>> Qual é a sua opção? '))
if opc == 1:
soma = n1 + n2
print(f'A soma entre {n1} + {n2} é {soma}')
elif opc == 2:
mult = n1 * n2
print(f'O resultado de {n1} x {n2} é {mult}')
elif opc == 3:
if n1 > n2:
maior = n1
else:
maior = n2
print(f'Entre {n1} e {n2}, o maior valor é {maior}')
elif opc == 4:
print('Informe os números novamente: ')
n1 = int(input('Primeiro valor: '))
n2 = int(input('Segundo valor: '))
elif opc == 5:
print('Finalizando...')
else:
print('Opção inválida. Tente novamente!')
print('=-=' * 12)
|
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.auth.decorators import REDIRECT_FIELD_NAME
from django.http import HttpResponseForbidden
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.utils.decorators import method_decorator
class LoginRequiredMixin(object):
"""
A login required mixin for use with class based views. This Class is a light wrapper around the
`login_required` decorator and hence function parameters are just attributes defined on the class.
Due to parent class order traversal this mixin must be added as the left most
mixin of a view.
The mixin has exaclty the same flow as `login_required` decorator:
If the user isn't logged in, redirect to settings.LOGIN_URL, passing the current
absolute path in the query string. Example: /accounts/login/?next=/polls/3/.
If the user is logged in, execute the view normally. The view code is free to
assume the user is logged in.
**Class Settings**
`redirect_field_name - defaults to "next"
`login_url` - the login url of your site
"""
redirect_field_name = REDIRECT_FIELD_NAME
login_url = None
@method_decorator(login_required(redirect_field_name=redirect_field_name, login_url=login_url))
def dispatch(self, request, *args, **kwargs):
return super(LoginRequiredMixin, self).dispatch(request, *args, **kwargs)
class PermissionRequiredMixin(object):
"""
A view mixin that verifies if the current logged in user has the specified permission
by wrapping the ``request.user.has_perm(..)`` method.
If a `get_object()` method is defined either manually or by including another mixin (for example
``SingleObjectMixin``) or ``self.object`` is defiend then the permission will be tested against
that specific instance.
.. NOTE: Testing of a permission against a specific object instance requires an authentication backend
that supports. Please see ``django-guardian`` to add object level permissions to your project.
The mixin does the following:
If the user isn't logged in, redirect to settings.LOGIN_URL, passing the current
absolute path in the query string. Example: /accounts/login/?next=/polls/3/.
If the `raise_exception` is set to True than rather than redirect to login page
a `PermisionDenied` (403) is raised.
If the user is logged in, and passes the permission check than the view is executed
normally.
**Example Usage**
class FitterEditView(PermissionRequiredMixin, UpdateView):
...
### PermissionRequiredMixin settings
permission_required = 'fitters.change_fitter'
### other view settings
context_object_name="fitter"
queryset = Fitter.objects.all()
form_class = FitterForm
...
**Class Settings**
`permission_required` - the permission to check of form "<app_label>.<permission codename>"
i.e. 'polls.can_vote' for a permission on a model in the polls application.
`login_url` - the login url of your site
`redirect_field_name - defaults to "next"
`raise_exception` - defaults to False - raise PermisionDenied (403) if set to True
"""
### default class view settings
login_url = settings.LOGIN_URL
raise_exception = False
permission_required = None
redirect_field_name=REDIRECT_FIELD_NAME
def dispatch(self, request, *args, **kwargs):
# call the parent dispatch first to pre-populate few things before we check for permissions
original_return_value = super(PermissionRequiredMixin, self).dispatch(request, *args, **kwargs)
# verify class settings
if self.permission_required == None or len(self.permission_required.split('.')) != 2:
raise ImproperlyConfigured("'PermissionRequiredMixin' requires 'permission_required' attribute to be set to '<app_label>.<permission codename>' but is set to '%s' instead" % self.permission_required)
# verify permission on object instance if needed
has_permission = False
if hasattr(self, 'object') and self.object is not None:
has_permission = request.user.has_perm(self.permission_required, self.object)
elif hasattr(self, 'get_object') and callable(self.get_object):
has_permission = request.user.has_perm(self.permission_required, self.get_object())
else:
has_permission = request.user.has_perm(self.permission_required)
# user failed permission
if not has_permission:
if self.raise_exception:
return HttpResponseForbidden()
else:
path = urlquote(request.get_full_path())
tup = self.login_url, self.redirect_field_name, path
return HttpResponseRedirect("%s?%s=%s" % tup)
# user passed permission check so just return the result of calling .dispatch()
return original_return_value
|
# coding: utf-8
sbu_training_root = './Datasets/SBU/SBUTrain4KRecoveredSmall'
sbu_testing_root = './Datasets/SBU/SBU-Test'
|
t = int(input())
while t > 0:
n,k = map(int,input().split())
arr = []
for i in range(k-3):
arr.append(1)
a,b,c = 0,0,0
if (n-k+3) % 2 == 1:
a = (n-k+3)//2
b = (n-k+3)//2
c = 1
elif (n-k+3) % 2 == 0 and (n-k+3) % 4 != 0:
a = (n-k+3-2)//2
b = (n-k+3-2)//2
c = 2
elif (n-k+3) % 4 == 0:
a = (n-k+3)// 4
b = (n-k+3) // 4
c = (n-k+3)// 2
arr.append(a)
arr.append(b)
arr.append(c)
print(*arr,sep=" ")
t = t-1 |
import sys
from keymaps import *
from phrase_key_travel import *
def main():
n = len(sys.argv)
# arg 0: print
if n > 2:
printbol = eval(sys.argv[1])
continuation = False
# arg 1: phrase
if n > 3:
arg1 = str(sys.argv[2])
continuation = True
# arg 2: map
if n > 4:
arg2 = eval(sys.argv[3]+"()")
else:
arg2 = DK_keymap()
# arg 3: debug
if n == 5:
arg3 = eval(sys.argv[4])
else:
arg3 = False
# call
if continuation == True:
if printbol == False:
phrase_key_travel(phrase=arg1, dict_map=arg2, db_vec=arg3)
else:
print(phrase_key_travel(phrase=arg1, dict_map=arg2, db_vec=arg3),"cm")
if __name__ == "__main__":
main()
|
from collections import Counter
def solution(X, Y):
answer = ''
CX = Counter(X)
CY = Counter(Y)
check = 0
for i in "9876543210":
if i in X:
if i == '0':
if answer:
answer += i * min(CX[i], CY[i])
else:
if min(CX[i], CY[i]) != 0:
return '0'
else:
return '-1'
else:
answer += i * min(CX[i], CY[i])
return answer if answer else '-1'
# 잘못된 풀이
# 만약 Y에 0이 없다면 -1의 결과가 나올 수 없음
# 0일 때에만 -1 처리를 해주었기 때문 그래서 마지막에 return answer을 할때 조건문을 넣어줘야함
from collections import Counter
def solution(X, Y):
answer = ''
CX = Counter(X)
CY = Counter(Y)
check = 0
for i in "9876543210":
if i in X:
if i == '0':
if answer:
answer += i * min(CX[i], CY[i])
else:
if min(CX[i], CY[i]) != 0:
return '0'
else:
return '-1'
else:
answer += i * min(CX[i], CY[i])
return answer
# 잘못된 풀이
from collections import Counter
def solution(X, Y):
answer = []
CX = Counter(X)
CY = Counter(Y)
for i in CX:
if i in CY:
answer.extend([i * min(CX[i], CY[i])])
answer.sort(reverse=True)
return str(int(''.join(answer))) if answer else "-1" |
import gspread
from oauth2client.service_account import ServiceAccountCredentials
import pandas as pd
import logging
def fetchRtlr():
logging.debug("fetchh items")
scope = ['https://www.googleapis.com/auth/spreadsheets', "https://www.googleapis.com/auth/drive.file", "https://www.googleapis.com/auth/drive"]
creds = ServiceAccountCredentials.from_json_keyfile_name('apps/data/credentials.json',scope)
client = gspread.authorize(creds)
logging.debug("time")
sheet = client.open('salesforce_items').worksheet('Sheet2')
#row=[sheet.row_count-1,"bulb"]
logging.debug(sheet.row_count)
#sheet.insert_row(row,sheet.row_count)
legislators = sheet.get_all_records()
df = pd.DataFrame(legislators)
columns = list(df)
values = df.values.tolist()
resp = {
"values":values,
"columns":columns,
"name": "TB_CUST",
"pk": ["id"],
"types": [
'VARCHAR(45)',
'VARCHAR(45)',
'VARCHAR(45)',
'VARCHAR(45)',
]
}
#df = pd.DataFrame(legislators)
return resp
|
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import re
from pants.testutil.pants_integration_test import run_pants
def test_list_all() -> None:
pants_run = run_pants(["--backend-packages=pants.backend.python", "list", "::"])
pants_run.assert_success()
assert len(pants_run.stdout.strip().split()) > 1
def test_list_none() -> None:
pants_run = run_pants(["list"])
pants_run.assert_success()
assert re.search("WARN.* No targets were matched in", pants_run.stderr)
def test_list_invalid_dir() -> None:
pants_run = run_pants(["list", "abcde::"])
pants_run.assert_failure()
assert "Unmatched glob from CLI arguments:" in pants_run.stderr
def test_list_testproject() -> None:
pants_run = run_pants(
[
"--backend-packages=pants.backend.python",
"list",
"testprojects/src/python/hello::",
]
)
pants_run.assert_success()
assert pants_run.stdout.strip() == "\n".join(
[
"testprojects/src/python/hello:hello",
"testprojects/src/python/hello:hello-dist",
"testprojects/src/python/hello:resource",
"testprojects/src/python/hello/__init__.py",
"testprojects/src/python/hello/dist_resource.txt:resource",
"testprojects/src/python/hello/greet:greet",
"testprojects/src/python/hello/greet:greeting",
"testprojects/src/python/hello/greet/__init__.py",
"testprojects/src/python/hello/greet/greet.py",
"testprojects/src/python/hello/greet/greeting.txt:greeting",
"testprojects/src/python/hello/main:main",
"testprojects/src/python/hello/main:lib",
"testprojects/src/python/hello/main/__init__.py:lib",
"testprojects/src/python/hello/main/main.py:lib",
]
)
|
import pygame
class Varg:
def __init__(self, xpos, ypos, width, height):
self.startvalues = [xpos,ypos,width,height]
self.xpos = xpos
self.ypos = ypos
self.width = width
self.height = height
self.speed = 0.1
self.bool1 = True
self.world_x = 0
self.image = pygame.image.load("img/varg.png")
self.image2 = pygame.image.load("img/vargL.png")
self.image_rect = (self.xpos, self.ypos, self.width, self.height)
def reset(self):
self.xpos = self.startvalues[0]
self.ypos = self.startvalues[1]
self.width = self.startvalues[2]
self.height = self.startvalues[3]
def render(self, screen):
self.image_rect= (self.xpos - self.world_x, self.ypos, self.width, self.height)
if self.bool1:
self.image = pygame.transform.scale(self.image,(self.width, self.height))
screen.blit(self.image, self.image_rect)
else:
self.image2 = pygame.transform.scale(self.image2,(self.width, self.height))
screen.blit(self.image2, self.image_rect)
def update(self, world_x, dt):
self.world_x = world_x
if int(self.xpos) <= self.startvalues[0] - 200:
self.bool1 = True
self.move_right(dt)
elif int(self.xpos) >= self.startvalues[0] + 200:
self.bool1 = False
self.move_left(dt)
elif self.xpos < self.startvalues[0] + 200 and self.bool1 == True:
self.move_right(dt)
else:
self.move_left(dt)
def get_rect(self):
return self.image_rect
def move_right(self, dt):
self.xpos += self.speed * (1 + dt)
def move_left(self, dt):
self.xpos -= self.speed * (1 + dt)
|
import os
import numpy as np
import bpy
import math
# from photogrammetry_importer.point import Point
from photogrammetry_importer.blender_logging import log_report
from photogrammetry_importer.utils.blender_utils import add_collection
from photogrammetry_importer.initialization import Initializer
from photogrammetry_importer.file_handler.image_file_handler import ImageFileHandler
from photogrammetry_importer.file_handler.meshroom_file_handler import MeshroomFileHandler
from photogrammetry_importer.file_handler.openmvg_json_file_handler import OpenMVGJSONFileHandler
from photogrammetry_importer.file_handler.opensfm_json_file_handler import OpenSfMJSONFileHandler
from photogrammetry_importer.file_handler.colmap_file_handler import ColmapFileHandler
from photogrammetry_importer.file_handler.nvm_file_handler import NVMFileHandler
from photogrammetry_importer.file_handler.open3D_file_handler import Open3DFileHandler
from photogrammetry_importer.file_handler.ply_file_handler import PLYFileHandler
from photogrammetry_importer.file_handler.transformation_file_handler import TransformationFileHandler
from photogrammetry_importer.camera_import_properties import CameraImportProperties
from photogrammetry_importer.point_import_properties import PointImportProperties
from photogrammetry_importer.mesh_import_properties import MeshImportProperties
from photogrammetry_importer.transformation_import_properties import TransformationImportProperties
from photogrammetry_importer.camera import Camera
# Notes:
# http://sinestesia.co/blog/tutorials/using-blenders-filebrowser-with-python/
# Nice blender tutorial
# https://blog.michelanders.nl/2014/07/inheritance-and-mixin-classes-vs_13.html
# - The class that is actually used as operator must inherit from bpy.types.Operator and ImportHelper
# - Properties defined in the parent class, which inherits from bpy.types.Operator and ImportHelper
# are not considered
# https://blender.stackexchange.com/questions/717/is-it-possible-to-print-to-the-report-window-in-the-info-view
# The color depends on the type enum: INFO gets green, WARNING light red, and ERROR dark red
from bpy.props import (CollectionProperty,
StringProperty,
BoolProperty,
EnumProperty,
FloatProperty,
IntProperty
)
from bpy_extras.io_utils import (ImportHelper,
ExportHelper,
axis_conversion)
def get_addon_name():
return __name__.split('.')[0]
def get_default_image_path(reconstruction_fp, image_dp):
if image_dp is None:
return None
elif image_dp == '':
image_default_same_dp = os.path.dirname(reconstruction_fp)
image_default_sub_dp = os.path.join(image_default_same_dp, 'images')
if os.path.isdir(image_default_sub_dp):
image_dp = image_default_sub_dp
else:
image_dp = image_default_same_dp
return image_dp
class ImportColmap(CameraImportProperties, PointImportProperties, MeshImportProperties, bpy.types.Operator):
"""Import a Colmap model (folder with .txt/.bin) or a Colmap workspace folder with dense point clouds and meshes."""
bl_idname = "import_scene.colmap_model"
bl_label = "Import Colmap Model Folder"
bl_options = {'PRESET'}
directory : StringProperty()
#filter_folder : BoolProperty(default=True, options={'HIDDEN'})
def execute(self, context):
path = self.directory
# Remove trailing slash
path = os.path.dirname(path)
self.report({'INFO'}, 'path: ' + str(path))
self.image_dp = get_default_image_path(
path, self.image_dp)
self.report({'INFO'}, 'image_dp: ' + str(self.image_dp))
cameras, points, mesh_ifp = ColmapFileHandler.parse_colmap_folder(
path, self.image_dp, self.image_fp_type, self.suppress_distortion_warnings, self)
self.report({'INFO'}, 'Number cameras: ' + str(len(cameras)))
self.report({'INFO'}, 'Number points: ' + str(len(points)))
self.report({'INFO'}, 'Mesh file path: ' + str(mesh_ifp))
reconstruction_collection = add_collection('Reconstruction Collection')
self.import_photogrammetry_cameras(cameras, reconstruction_collection)
self.import_photogrammetry_points(points, reconstruction_collection)
self.import_photogrammetry_mesh(mesh_ifp, reconstruction_collection)
self.report({'INFO'}, 'Parse Colmap model folder: Done')
return {'FINISHED'}
def invoke(self, context, event):
addon_name = get_addon_name()
import_export_prefs = bpy.context.preferences.addons[addon_name].preferences
Initializer.initialize_options(import_export_prefs, self)
# See:
# https://blender.stackexchange.com/questions/14738/use-filemanager-to-select-directory-instead-of-file/14778
# https://docs.blender.org/api/current/bpy.types.WindowManager.html#bpy.types.WindowManager.fileselect_add
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def draw(self, context):
layout = self.layout
self.draw_camera_options(layout)
self.draw_point_options(layout)
self.draw_mesh_options(layout)
class ImportNVM(CameraImportProperties, PointImportProperties, bpy.types.Operator, ImportHelper):
"""Import a VisualSfM NVM file"""
bl_idname = "import_scene.nvm"
bl_label = "Import NVM"
bl_options = {'PRESET'}
filepath: StringProperty(
name="NVM File Path",
description="File path used for importing the NVM file")
directory: StringProperty()
filter_glob: StringProperty(default="*.nvm", options={'HIDDEN'})
def enhance_camera_with_images(self, cameras):
# Overwrites CameraImportProperties.enhance_camera_with_images()
cameras, success = ImageFileHandler.parse_camera_image_files(
cameras, self.default_width, self.default_height, self)
return cameras, success
def execute(self, context):
path = os.path.join(self.directory, self.filepath)
self.report({'INFO'}, 'path: ' + str(path))
self.image_dp = get_default_image_path(
path, self.image_dp)
self.report({'INFO'}, 'image_dp: ' + str(self.image_dp))
cameras, points = NVMFileHandler.parse_nvm_file(
path, self.image_dp, self.image_fp_type, self.suppress_distortion_warnings, self)
self.report({'INFO'}, 'Number cameras: ' + str(len(cameras)))
self.report({'INFO'}, 'Number points: ' + str(len(points)))
reconstruction_collection = add_collection('Reconstruction Collection')
self.import_photogrammetry_cameras(cameras, reconstruction_collection)
self.import_photogrammetry_points(points, reconstruction_collection)
return {'FINISHED'}
def invoke(self, context, event):
addon_name = get_addon_name()
import_export_prefs = bpy.context.preferences.addons[addon_name].preferences
Initializer.initialize_options(import_export_prefs, self)
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def draw(self, context):
layout = self.layout
self.draw_camera_options(layout, draw_size_and_pp=True)
self.draw_point_options(layout)
class ImportOpenMVG(CameraImportProperties, PointImportProperties, bpy.types.Operator, ImportHelper):
"""Import an OpenMVG JSON file"""
bl_idname = "import_scene.openmvg_json"
bl_label = "Import OpenMVG JSON"
bl_options = {'PRESET'}
filepath: StringProperty(
name="OpenMVG JSON File Path",
description="File path used for importing the OpenMVG JSON file")
directory: StringProperty()
filter_glob: StringProperty(default="*.json", options={'HIDDEN'})
def execute(self, context):
path = os.path.join(self.directory, self.filepath)
self.report({'INFO'}, 'path: ' + str(path))
self.image_dp = get_default_image_path(
path, self.image_dp)
self.report({'INFO'}, 'image_dp: ' + str(self.image_dp))
cameras, points = OpenMVGJSONFileHandler.parse_openmvg_file(
path, self.image_dp, self.image_fp_type, self.suppress_distortion_warnings, self)
self.report({'INFO'}, 'Number cameras: ' + str(len(cameras)))
self.report({'INFO'}, 'Number points: ' + str(len(points)))
reconstruction_collection = add_collection('Reconstruction Collection')
self.import_photogrammetry_cameras(cameras, reconstruction_collection)
self.import_photogrammetry_points(points, reconstruction_collection)
return {'FINISHED'}
def invoke(self, context, event):
addon_name = get_addon_name()
import_export_prefs = bpy.context.preferences.addons[addon_name].preferences
Initializer.initialize_options(import_export_prefs, self)
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def draw(self, context):
layout = self.layout
self.draw_camera_options(layout)
self.draw_point_options(layout)
class ImportOpenSfM(CameraImportProperties, PointImportProperties, bpy.types.Operator, ImportHelper):
"""Import an OpenSfM JSON file"""
bl_idname = "import_scene.opensfm_json"
bl_label = "Import OpenSfM JSON"
bl_options = {'PRESET'}
filepath: StringProperty(
name="OpenSfM JSON File Path",
description="File path used for importing the OpenSfM JSON file")
directory: StringProperty()
filter_glob: StringProperty(default="*.json", options={'HIDDEN'})
reconstruction_number: IntProperty(
name="Reconstruction Number",
description = "If the input file contains multiple reconstructions, use this property to select the desired reconstruction.",
default=0)
def execute(self, context):
path = os.path.join(self.directory, self.filepath)
self.report({'INFO'}, 'path: ' + str(path))
self.image_dp = get_default_image_path(
path, self.image_dp)
self.report({'INFO'}, 'image_dp: ' + str(self.image_dp))
cameras, points = OpenSfMJSONFileHandler.parse_opensfm_file(
path, self.image_dp, self.image_fp_type, self.suppress_distortion_warnings, self.reconstruction_number, self)
self.report({'INFO'}, 'Number cameras: ' + str(len(cameras)))
self.report({'INFO'}, 'Number points: ' + str(len(points)))
reconstruction_collection = add_collection('Reconstruction Collection')
self.import_photogrammetry_cameras(cameras, reconstruction_collection)
self.import_photogrammetry_points(points, reconstruction_collection)
return {'FINISHED'}
def invoke(self, context, event):
addon_name = get_addon_name()
import_export_prefs = bpy.context.preferences.addons[addon_name].preferences
Initializer.initialize_options(import_export_prefs, self)
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def draw(self, context):
layout = self.layout
layout.prop(self, "reconstruction_number")
self.draw_camera_options(layout)
self.draw_point_options(layout)
class ImportMeshroom(CameraImportProperties, PointImportProperties, MeshImportProperties, bpy.types.Operator, ImportHelper):
"""Import a Meshroom MG/SfM/JSON file"""
bl_idname = "import_scene.meshroom_sfm_json"
bl_label = "Import Meshroom SfM/JSON/MG"
bl_options = {'PRESET'}
filepath: StringProperty(
name="Meshroom JSON File Path",
description="File path used for importing the Meshroom SfM/JSON/MG file")
directory: StringProperty()
filter_glob: StringProperty(default="*.sfm;*.json;*.mg", options={'HIDDEN'})
# Structure From Motion Node
sfm_node_items = [
("AUTOMATIC", "AUTOMATIC", "", 1),
("ConvertSfMFormatNode", "ConvertSfMFormatNode", "", 2),
("StructureFromMotionNode", "StructureFromMotionNode", "", 3)
]
sfm_node_type: EnumProperty(
name="Structure From Motion Node Type",
description = "Use this property to select the node with the structure from motion results to import.",
items=sfm_node_items)
sfm_node_number: IntProperty(
name="ConvertSfMFormat Node Number",
description = "Use this property to select the desired node." +
"By default the node with the highest number is imported.",
default=-1)
# Mesh Node
mesh_node_items = [
("AUTOMATIC", "AUTOMATIC", "", 1),
("Texturing", "Texturing", "", 2),
("MeshFiltering", "MeshFiltering", "", 3),
("Meshing", "Meshing", "", 4)
]
mesh_node_type: EnumProperty(
name="Mesh Node Type",
description = "Use this property to select the node with the mesh results to import.",
items=mesh_node_items)
mesh_node_number: IntProperty(
name="Mesh Node Number",
description = "Use this property to select the desired node." +
"By default the node with the highest number is imported.",
default=-1)
def execute(self, context):
path = os.path.join(self.directory, self.filepath)
self.report({'INFO'}, 'path: ' + str(path))
self.image_dp = get_default_image_path(
path, self.image_dp)
self.report({'INFO'}, 'image_dp: ' + str(self.image_dp))
cameras, points, mesh_fp = MeshroomFileHandler.parse_meshroom_file(
path, self.image_dp, self.image_fp_type, self.suppress_distortion_warnings,
self.sfm_node_type, self.sfm_node_number, self.mesh_node_type, self.mesh_node_number, self)
self.report({'INFO'}, 'Number cameras: ' + str(len(cameras)))
self.report({'INFO'}, 'Number points: ' + str(len(points)))
reconstruction_collection = add_collection('Reconstruction Collection')
self.import_photogrammetry_cameras(cameras, reconstruction_collection)
self.import_photogrammetry_points(points, reconstruction_collection)
self.import_photogrammetry_mesh(mesh_fp, reconstruction_collection)
return {'FINISHED'}
def invoke(self, context, event):
addon_name = get_addon_name()
import_export_prefs = bpy.context.preferences.addons[addon_name].preferences
Initializer.initialize_options(import_export_prefs, self)
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def draw(self, context):
layout = self.layout
node_box = layout.box()
node_box.prop(self, "sfm_node_type")
node_box.prop(self, "sfm_node_number")
node_box.prop(self, "mesh_node_type")
node_box.prop(self, "mesh_node_number")
self.draw_camera_options(layout)
self.draw_point_options(layout)
self.draw_mesh_options(layout)
class ImportOpen3D(CameraImportProperties, PointImportProperties, bpy.types.Operator, ImportHelper):
"""Import an Open3D LOG/JSON file"""
bl_idname = "import_scene.open3d_log_json"
bl_label = "Import Open3D LOG/JSON"
bl_options = {'PRESET'}
filepath: StringProperty(
name="Open3D LOG/JSON File Path",
description="File path used for importing the Open3D LOG/JSON file")
directory: StringProperty()
filter_glob: StringProperty(default="*.log;*.json", options={'HIDDEN'})
def enhance_camera_with_intrinsics(self, cameras):
intrinsic_missing = False
for cam in cameras:
if not cam.has_intrinsics():
intrinsic_missing = True
break
if not intrinsic_missing:
self.report({'INFO'}, 'Using intrinsics from file (.json).')
return cameras, True
else:
self.report({'INFO'}, 'Using intrinsics from user options, since not present in the reconstruction file (.log).')
if math.isnan(self.default_focal_length):
self.report({'ERROR'}, 'User must provide the focal length using the import options.')
return [], False
if math.isnan(self.default_pp_x) or math.isnan(self.default_pp_y):
self.report({'WARNING'}, 'Setting the principal point to the image center.')
for cam in cameras:
if math.isnan(self.default_pp_x) or math.isnan(self.default_pp_y):
assert cam.width is not None # If no images are provided, the user must provide a default principal point
assert cam.height is not None # If no images are provided, the user must provide a default principal point
default_cx = cam.width / 2.0
default_cy = cam.height / 2.0
else:
default_cx = self.default_pp_x
default_cy = self.default_pp_y
intrinsics = Camera.compute_calibration_mat(
focal_length=self.default_focal_length, cx=default_cx, cy=default_cy)
cam.set_calibration_mat(intrinsics)
return cameras, True
def enhance_camera_with_images(self, cameras):
# Overwrites CameraImportProperties.enhance_camera_with_images()
cameras, success = ImageFileHandler.parse_camera_image_files(
cameras, self.default_width, self.default_height, self)
return cameras, success
def execute(self, context):
path = os.path.join(self.directory, self.filepath)
self.report({'INFO'}, 'path: ' + str(path))
self.image_dp = get_default_image_path(
path, self.image_dp)
self.report({'INFO'}, 'image_dp: ' + str(self.image_dp))
cameras = Open3DFileHandler.parse_open3d_file(
path, self.image_dp, self.image_fp_type, self)
self.report({'INFO'}, 'Number cameras: ' + str(len(cameras)))
reconstruction_collection = add_collection('Reconstruction Collection')
self.import_photogrammetry_cameras(cameras, reconstruction_collection)
return {'FINISHED'}
def invoke(self, context, event):
addon_name = get_addon_name()
import_export_prefs = bpy.context.preferences.addons[addon_name].preferences
Initializer.initialize_options(import_export_prefs, self)
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def draw(self, context):
layout = self.layout
self.draw_camera_options(layout, draw_size_and_pp=True, draw_focal_length=True)
class ImportPLY(PointImportProperties, TransformationImportProperties, bpy.types.Operator, ImportHelper):
"""Import a PLY file as point cloud"""
bl_idname = "import_scene.ply"
bl_label = "Import PLY"
bl_options = {'PRESET'}
filepath: StringProperty(
name="PLY File Path",
description="File path used for importing the PLY file")
directory: StringProperty()
filter_glob: StringProperty(default="*.ply", options={'HIDDEN'})
def execute(self, context):
path = os.path.join(self.directory, self.filepath)
self.report({'INFO'}, 'path: ' + str(path))
points = PLYFileHandler.parse_ply_file(path)
self.report({'INFO'}, 'Number points: ' + str(len(points)))
transformations_sorted = TransformationFileHandler.parse_transformation_folder(
self.path_to_transformations, self)
reconstruction_collection = add_collection('Reconstruction Collection')
self.import_photogrammetry_points(points, reconstruction_collection, transformations_sorted)
return {'FINISHED'}
def invoke(self, context, event):
addon_name = get_addon_name()
import_export_prefs = bpy.context.preferences.addons[addon_name].preferences
Initializer.initialize_options(import_export_prefs, self)
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def draw(self, context):
layout = self.layout
self.draw_point_options(layout)
self.draw_transformation_options(layout)
|
from .base import *
DEBUG = True
ALLOWED_HOSTS = []
INTERNAL_IPS = '127.0.0.1'
INSTALLED_APPS += [
'debug_toolbar',
]
MIDDLEWARE += [
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'azov_frontend'),
os.path.join(BASE_DIR, 'static'),
)
WEBPACK_LOADER = {
'DEFAULT': {
'CACHE': not DEBUG,
'BUNDLE_DIR_NAME': 'dist/',
'STATS_FILE': os.path.join(BASE_DIR, 'azov_frontend/webpack-stats.json'),
}
}
|
from keras.layers import Dense, Dropout, LSTM
import numpy as np
idxx2char = ['e', 'h', 'i', 'l', 'o']
_data = np.array([['h', 'i', 'h', 'e', 'l', 'l', 'o']], dtype=np.str).reshape(-1, 1)
from sklearn.preprocessing import OneHotEncoder
enc = OneHotEncoder()
enc.fit(_data)
_data = enc.transform(_data).toarray().astype('float32')
x_data = _data[:6,]
y_data = _data[1:,]
y_data = np.argmax(y_data, axis=1)
print(y_data)
x_data = x_data.reshape(1, 6, 5)
y_data = y_data.reshape(1, 6)
print(x_data.shape)
num_classes = 5
batch_size = 1
sequence_length = 6
input_dim = 5
hidden_size = 5
learning_rate = 0.1
# def split_5(seq, size):
# aaa = []
# for i in range(len(seq)-size+1):
# subset = seq[i:(i+size)]
# aaa.append(subset)
# print(type(aaa))
# return np.array(aaa)
# x_train = split_5(x_data, 5)
# print(x_train.shape)
# x_data = x_data.reshape(-1, 5, 1)
from keras.models import Sequential
model = Sequential()
model.add(LSTM(10, input_shape=(6, 5)) )
model.add(Dense(512, activation='relu'))
model.add(Dense(6, activation='relu'))
model.compile(optimizer='adam', loss='mse', metrics=['mse'])
model.fit(x_data, y_data, batch_size=1, epochs=100, verbose=1)
y_pred = model.predict(x_data)
print(y_pred)
y_pred = np.argmax(y_pred, axis=2)
print(y_pred)
# result_str = [idxx2char[c] for c in np.squeeze(result)]
# print("\nPrediction str: ", ''.join(result_str)) |
#-*—coding:utf8-*-
import numpy as np
import gc
import re
import csv
import codecs
from decimal import *
import os
import matplotlib.pyplot as plt
try:
fil_winsize = codecs.open("/home/oneT/data/list.txt", "r", 'utf_8_sig')
# fil6 = codecs.open("channel_ssid_time.csv", "w", 'utf_8_sig')
winsize = csv.reader(fil_winsize)
# write_ssid = csv.writer(fil6)
except Exception:
print "winsize_filelist open failed"
exit()
ratio = 1000
iw_file_list = []
pre = '/home/oneT/data/'
for i in winsize:
tmp = i[0] + '/split/'
# print tmp
res = os.listdir(pre + tmp)
# print res
for j in res:
if j.find('iw.csv') > 0:
jj = tmp + j
iw_file_list.append(jj)
data_list = []
for xx in iw_file_list:
print xx
iw_f = pre + xx
k = xx.replace('/', '_')
k = k.replace('iw.csv', 'ratio.csv')
re_f = pre + 'new/' + k
wr_f = re_f.replace('ratio', 'stats')
try:
wr_file = open(wr_f, 'rb')
wr_r = csv.reader(wr_file)
except Exception:
print wr_f, 'open failed'
continue
for i in wr_r:
try:
(rat, stations, busy, recv, tran, bytes1,
packets, qlen, backlog, drops, requeues, neibours, drop) = i
except:
print i, wr_f
(rat, stations, busy, recv, tran, bytes1,
packets, qlen, backlog, drops, requeues, drop) = i
print i, wr_f
x = [rat, stations, busy, recv, tran, bytes1, packets,
qlen, backlog, drops, requeues, neibours, drop]
for j in range(0, len(x)):
x[j] = float(x[j])
data_list.append(x)
# exit()
if wr_file:
wr_file.close()
# print wr_f
try:
data_file = open('/home/oneT/data/new/datalist.csv', 'wb')
write_record = csv.writer(data_file)
except Exception:
print "file open failed"
write_record.writerows(data_list)
if data_file:
data_file.close()
del data_list
if fil_winsize:
fil_winsize.close()
gc.collect()
|
""" boundaryUtils - a module of mathematical functions"""
import math
import part
import assembly
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def dist( pt1, pt2 ):
dx = pt1[0] - pt2[0]
dy = pt1[1] - pt2[1]
dz = pt1[2] - pt2[2]
dist = math.sqrt( dx*dx + dy*dy + dz*dz )
return dist
#----------------------------------------
def checkEdge( cntnr, idx, xyz, r ):
result = 1
vtxs = cntnr.edges[idx].getVertices()
for i in range( len( vtxs ) ):
if ( abs( cntnr.vertices[vtxs[i]].pointOn[0][xyz] - r) > 1.e-6 ):
result = 0
return result
#----------------------------------------
def checkLineEdge( cntnr, idx, xyz1, r1, xyz2, r2 ):
result = 1
vtxs = cntnr.edges[idx].getVertices()
for i in range( len( vtxs ) ):
if ( abs( cntnr.vertices[vtxs[i]].pointOn[0][xyz1] - r1) > 1.e-6 ):
result = 0
if ( abs( cntnr.vertices[vtxs[i]].pointOn[0][xyz2] - r2) > 1.e-6 ):
result = 0
return result
#----------------------------------------
def checkColinearEdge( cntnr, idx, vctr ):
result = 1
vtxs = cntnr.edges[idx].getVertices()
pt0 = cntnr.vertices[vtxs[0]].pointOn[0]
for i in range( 1,len( vtxs ) ):
pt1 = cntnr.vertices[vtxs[i]].pointOn[0]
dot = abs ( (pt0[0]-pt1[0])*vctr[0] + (pt0[1]-pt1[1])*vctr[1] + (pt0[2]-pt1[2])*vctr[2] )
mag = dist( pt1, pt0 ) * dist( (0,0,0), vctr )
if ( abs( dot - mag ) > 1.e-6 ):
result = 0
return result
#----------------------------------------
def checkEdgeOnCircle( edge, x0, r ):
pt = edge.pointOn[0]
dist = math.sqrt( (pt[0]-x0[0])*(pt[0]-x0[0]) + (pt[1]-x0[1])*(pt[1]-x0[1]) )
if ( abs( dist - r ) < 1.e-6 ):
return 1
else:
return 0
#----------------------------------------
def checkFace( cntnr, idx, xyz, r ):
result = 1
edges = cntnr.faces[idx].getEdges()
for i in range( len( edges ) ):
if ( checkEdge(cntnr,edges[i],xyz,r) == 0 ):
result = 0
return result
#----------------------------------------
def checkFaceOnSphere( face, x0, r ):
pt = face.pointOn[0]
dist = math.sqrt( (pt[0]-x0[0])*(pt[0]-x0[0]) + (pt[1]-x0[1])*(pt[1]-x0[1]) + (pt[2]-x0[2])*(pt[2]-x0[2]) )
if ( abs( dist - r ) < 1.e-6 ):
return 1
else:
return 0
#----------------------------------------
def checkFaceOnCircle( face, x0, r ):
pt = face.pointOn[0]
dist = math.sqrt( (pt[0]-x0[0])*(pt[0]-x0[0]) + (pt[1]-x0[1])*(pt[1]-x0[1]) )
if ( abs( dist - r ) < 1.e-6 ):
return 1
else:
return 0
#----------------------------------------
def getEdges( cntnr, xyz, r ):
bndEdges = cntnr.edges[0:0]
for i in range( len( cntnr.edges ) ):
if ( checkEdge(cntnr,i,xyz,r) == 1 ):
bndEdges = bndEdges + cntnr.edges.findAt( cntnr.edges[i].pointOn, printWarning=False )
return( bndEdges )
#----------------------------------------
def getLineEdges( cntnr, xyz1, r1, xyz2, r2 ):
bndEdges = cntnr.edges[0:0]
for i in range( len( cntnr.edges ) ):
if ( checkLineEdge(cntnr,i,xyz1,r1,xyz2,r2) == 1 ):
bndEdges = bndEdges + cntnr.edges.findAt( cntnr.edges[i].pointOn, printWarning=False )
return( bndEdges )
#----------------------------------------
def getColinearEdges( cntnr, vctr ):
bndEdges = cntnr.edges[0:0]
for i in range( len( cntnr.edges ) ):
if ( checkColinearEdge(cntnr,i,vctr) == 1 ):
bndEdges = bndEdges + cntnr.edges.findAt( cntnr.edges[i].pointOn, printWarning=False )
return( bndEdges )
#----------------------------------------
def getEdgesOnCircle( cntnr, x0, r ):
bndEdges = cntnr.edges[0:0]
for i in range( len( cntnr.edges ) ):
if ( checkEdgeOnCircle(cntnr.edges[i],x0,r) == 1 ):
bndEdges = bndEdges + cntnr.edges.findAt( cntnr.edges[i].pointOn, printWarning=False )
return( bndEdges )
#----------------------------------------
def getFaces( cntnr, xyz, r ):
bndFaces = cntnr.faces[0:0]
for i in range( len( cntnr.faces ) ):
if ( checkFace(cntnr,i,xyz,r) == 1 ):
bndFaces = bndFaces + cntnr.faces.findAt( cntnr.faces[i].pointOn, printWarning=False )
return( bndFaces )
#----------------------------------------
def getFacesOnSphere( cntnr, x0, r ):
bndFaces = cntnr.faces[0:0]
for i in range( len( cntnr.faces ) ):
if ( checkFaceOnSphere(cntnr.faces[i],x0,r) == 1 ):
bndFaces = bndFaces + cntnr.faces.findAt( cntnr.faces[i].pointOn, printWarning=False )
return( bndFaces )
#----------------------------------------
def getFacesOnCircle( cntnr, x0, r ):
bndFaces = cntnr.faces[0:0]
for i in range( len( cntnr.faces ) ):
if ( checkFaceOnCircle(cntnr.faces[i],x0,r) == 1 ):
bndFaces = bndFaces + cntnr.faces.findAt( cntnr.faces[i].pointOn, printWarning=False )
return( bndFaces )
#----------------------------------------
def getSequence( array ):
seq = array[0:0]
for i in range( len( array ) ):
seq = seq + array.findAt( array[i].pointOn, printWarning=False )
return( seq )
#----------------------------------------
def getCellsFromCntnr( cntnr1, cntnr2 ):
cells = cntnr1.cells[0:0]
for i in range( len( cntnr2.cells ) ):
x = y = z = 0
face = cntnr2.cells[i].getFaces()
for j in range( len( face ) ):
x = x + cntnr2.faces[ face[j] ].getCentroid()[0][0] / len(face)
y = y + cntnr2.faces[ face[j] ].getCentroid()[0][1] / len(face)
z = z + cntnr2.faces[ face[j] ].getCentroid()[0][2] / len(face)
try:
cell = cntnr1.cells.findAt( ((x,y,z),), printWarning=False )
except:
cell = cntnr1.cells[0:0]
cells = cells + cell
return( cells )
#----------------------------------------
def getFacesFromCntnr( cntnr1, cntnr2 ):
faces = cntnr1.faces[0:0]
for i in range( len( cntnr2.faces ) ):
try:
x = cntnr2.faces[i].getCentroid()
face = cntnr1.faces.findAt( x, printWarning=False )
except:
face = cntnr1.faces[0:0]
faces = faces + face
return( faces )
#----------------------------------------
def checkFaceList( cntnr, chkFaces, idx, xyz, r ):
result = 1
edges = chkFaces[idx].getEdges()
for i in range( len( edges ) ):
if ( checkEdge(cntnr,edges[i],xyz,r) == 0 ):
result = 0
return result
#----------------------------------------
def getFacesList( cntnr, chkFaces, xyz, r ):
bndFaces = cntnr.faces[0:0]
for i in range( len( chkFaces ) ):
if ( checkFaceList(cntnr,chkFaces,i,xyz,r) == 1 ):
bndFaces = bndFaces + cntnr.faces.findAt( chkFaces[i].getCentroid(), printWarning=False )
return( bndFaces )
|
# -*- coding: utf-8 -*-
DEVELOPER_MAIL = "shinichiro.su@gmail.com"
import os
# import pickle
from google.appengine.ext.webapp import template
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from django.utils import simplejson as json
import logging
import inspect
#logging.debug(inspect.currentframe().f_lineno)
from model import *
# from google.appengine.api import mail
#
# decorators
#
def login_required(function):
def _loging_required(arg):
user = users.get_current_user()
if not user:
arg.redirect(users.create_login_url(arg.request.uri))
arg.user = user
res = function(arg)
return res
return _loging_required
#
# RequestHandler
#
class SsRequestHandler(webapp.RequestHandler):
pass
class IndexAction(SsRequestHandler):
@login_required
def get(self):
path = os.path.join(os.path.dirname(__file__), 'index.html')
self.response.out.write(template.render(path, {}))
class SaveItemAction(SsRequestHandler):
@login_required
def get(self):
# path = os.path.join(os.path.dirname(__file__), 'index.html')
path = os.path.join(os.path.dirname(__file__), 'save.html')
self.response.out.write(template.render(path, {}))
@login_required
def post(self):
params = self.request.POST.items()
item_list = eval(params[0][0])
# logging.info(item_list)
for i in item_list:
if not i.get('id'): continue
if not i.get('user'): continue
if not i.get('name'): continue
item_id = int(i['id'])
items = Item.get_by_item_id(item_id, i['user'])
if items:
item = items[0]
self.redirect('/')
# item = items[0]
# logging.info(item.__class__)
# item(
# name = unicode(i['name'], 'utf-8', 'replace'),
# attr = unicode(i['attr'], 'utf-8', 'replace'),
# )
else:
item = Item(
user = i['user'],
item_id = item_id,
name = unicode(i['name'], 'utf-8', 'replace'),
attr = unicode(i['attr'], 'utf-8', 'replace'),
)
item.put()
self.redirect('/')
class SaveRecordAction(SsRequestHandler):
@login_required
def post(self):
params = self.request.POST.items()
logging.info(params)
record_list = eval(params[0][0])
logging.info(record_list)
self.redirect('/')
for i in record_list:
if not i.get('id'): continue
if not i.get('user'): continue
if not i.get('item_id'): continue
record_id = int(i['id'])
records = Record.get_by_record_id(record_id, i['user'])
if records:
record = records[0]
self.redirect('/')
# record = records[0]
# logging.info(record.__class__)
# record(
# name = unicode(i['name'], 'utf-8', 'replace'),
# attr = unicode(i['attr'], 'utf-8', 'replace'),
# )
else:
record = Record(
user = i['user'],
record_id = record_id,
item_id = int(i['item_id']),
value = int(i['value']),
)
record.put()
self.redirect('/')
class UserInfoAction(SsRequestHandler):
@login_required
def get(self):
self.response.out.write(self.user)
# class DebugAction(SsRequestHandler):
# @login_required
# def get(self):
# path = os.path.join(os.path.dirname(__file__), 'debug.html')
# self.response.out.write(template.render(path, {}))
# class TruncateAction(SsRequestHandler):
# @login_required
# def get(self):
# items = Item.all().fetch(100)
# for i in items:
# i.delete()
# trainnings = Trainning.all().fetch(100)
# for t in trainnings:
# t.delete()
# self.redirect('/config')
# class TestAction(SsRequestHandler):
# def get(self):
# path = os.path.join(os.path.dirname(__file__), 'test.html')
# self.response.out.write(template.render(path, {}))
application = webapp.WSGIApplication(
[('/', IndexAction),
('/save_item', SaveItemAction),
('/save_record', SaveRecordAction),
# ('/record', RecordTrainningAction),
# ('/list', ListTrainningAction),
# ('/view', ViewTrainningAction),
# ('/config', SetConfigAction),
# ('/add_item', AddItemAction),
('/user_info', UserInfoAction),
# ('/debug', DebugAction),
# ('/truncate', TruncateAction),
# ('/test', TestAction),
],
debug=True)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
|
"""High level imports for this and that"""
from __future__ import print_function
from datetime import datetime, date
import calendar
from pytz import timezone
from api import alpha_vantage as av
from api import ticker_symbol as ts
# --------------- Helpers that build all of the responses ----------------------
def build_speechlet_response(title, output, reprompt_text, should_end_session):
return {
'outputSpeech': {
'type': 'PlainText',
'text': output
},
'card': {
'type': 'Simple',
'title': title,
'content': output
},
'reprompt': {
'outputSpeech': {
'type': 'PlainText',
'text': reprompt_text
}
},
'shouldEndSession': should_end_session
}
def build_response(session_attributes, speechlet_response):
return {
'version': '1.0',
'sessionAttributes': session_attributes,
'response': speechlet_response
}
# --------------- Functions that control the skill's behavior ------------------
def get_welcome_response():
session_attributes = {}
card_title = 'Welcome'
speech_output = 'Hi, welcome to Stock Buddy. If you need help, just ask.'
reprompt_text = 'For help, please say: help.'
return builder(session_attributes, card_title, speech_output, reprompt_text, False)
def handle_session_end_request():
card_title = 'Session Ended'
speech_output = 'Thank you for using Stock Buddy. Have a good one!'
return builder({}, card_title, speech_output, None, True)
# --------------- Intents ---------------
def handle_stock_portfolio(intent, session):
"""Returns the stock symbol and add it into list"""
# TODO: later for more features
return None
def handle_portfolio_contents(intent, session):
"""Returns the contents of your portfolio"""
# TODO: later for more features
return None
def handle_stock_info(intent, session):
"""Returns the stock information"""
session_attributes = session.get('attributes', {})
speech_output = ''
reprompt_text = 'Is there anything else you would like to know? If not, say stop.'
card_title = 'Stock Information'
print('stock info intent')
if weekend_checker() is False and time_checker() is True and 'value' in intent['slots']['company']:
print('not weekend and after opening')
company = intent['slots']['company']['value']
ticker_symbol = ts.get_symbol(ts.filter_tags(company))
latest_data = av.daily_intraday_stock(ticker_symbol)
speech_output = av.open_format_intraday(latest_data)
elif weekend_checker() is False and time_checker() is False and 'value' in intent['slots']['company']:
print('not weekend and before opening')
company = intent['slots']['company']['value']
speech_output = f'The stock market is currently closed.\
However, I can provide the latest information on {company}.'
ticker_symbol = ts.get_symbol(ts.filter_tags(company))
latest_data = av.daily_single_stock(ticker_symbol)
if return_latest(ticker_symbol):
speech_output = ' '.join([speech_output, av.closed_format_singles(latest_data, return_latest(ticker_symbol))])
else:
speech_output = 'I could not find any stock information about that company.'
elif weekend_checker() is True and 'value' in intent['slots']['company']:
print('is weekend')
company = intent['slots']['company']['value']
speech_output = f'Stocks are not being traded on the weekends. \
However, I can provide the latest information on {company}.'
ticker_symbol = ts.get_symbol(ts.filter_tags(company))
latest_data = av.daily_single_stock(ticker_symbol)
if return_latest(ticker_symbol):
speech_output = ' '.join([speech_output, av.closed_format_singles(latest_data, return_latest(ticker_symbol))])
else:
speech_output = 'I could not find any stock information about that company.'
return builder(session_attributes, card_title, speech_output, reprompt_text, True)
def handle_help(intent, session):
"""Returns the options available within the skill"""
print('help intent')
session_attributes = session.get('attributes', {})
reprompt_text = 'Is there anything else you would like to know? If not, say stop.'
card_title = 'Help Menu'
should_end_session = False
speech_output = 'You can do the following:'
speech_output = ' '.join([speech_output, '\nLook up a publicly traded company stock info.'])
speech_output = ' '.join([speech_output, 'For example, stocks for Amazon.'])
return builder(session_attributes, card_title, speech_output, reprompt_text, should_end_session)
def handle_fallback(intent, session):
"""Handles all the bad requests to the Skill"""
session_attributes = session.get('attributes', {})
speech_output = 'Sorry, Stock Buddy cannot help with that. For what I can do, please say, help.'
return builder(session_attributes, 'Request Error', speech_output, speech_output, False)
# --------------- Helper Functions ---------------
def weekend_checker():
"""
Checks to see if the current day is a weekend or not
"""
check = calendar.day_name[datetime.now(timezone('US/Eastern')).weekday()]
return bool(check in ['Saturday', 'Sunday'])
def time_checker():
"""Checks to see if the stock market is open."""
_ = datetime.now(timezone('US/Eastern'))
curr_time = '{}'.format(_.strftime('%H%M'))
return bool(int(curr_time) > 930 and int(curr_time) < 1600)
def return_latest(symbol):
"""Returns the first date in the stocks data"""
data = av.daily_single_stock(symbol)
if 'Time Series (Daily)' in data:
return list(data['Time Series (Daily)'].keys())[0]
return None
def builder(session, card, out, reprompt, end):
"""helper function to return response"""
return build_response(session,
build_speechlet_response(card, out, reprompt, end))
# --------------- Events ---------------
def on_session_started(session_started_request, session):
""" Called when the session starts """
print('on_session_started requestId = {}, sessionId = {}'.format(session_started_request['requestId'], session['sessionId']))
def on_session_ended(session_ended_request, session):
"""
Called when the user ends their session
It is not called when skill returns should_end_session = true
"""
print('on_session_ended requestId = {}, sessionId = {}'.format(session_ended_request['requestId'], session['sessionId']))
def on_launch(launch_request, session):
"""
Called when the user launches the skill without
specifying what they want
"""
print('on_launch requestId = {}, sessionId = {}'.format(launch_request['requestId'], session['sessionId']))
return get_welcome_response()
def on_intent(intent_request, session):
"""Called when the user specifies an intent for this skill"""
print('on_intent requestId = {}, sessionId = {}'.format(intent_request['requestId'], session['sessionId']))
intent = intent_request['intent']
intent_name = intent_request['intent']['name']
if intent_name == 'StockInfo':
return handle_stock_info(intent, session)
elif intent_name == 'StockPortfolio':
return handle_stock_portfolio(intent, session)
elif intent_name == 'AMAZON.HelpIntent':
return handle_help(intent, session)
elif intent_name == 'AMAZON.FallbackIntent':
return handle_fallback(intent, session)
elif intent_name == 'AMAZON.CancelIntent' or intent_name == 'AMAZON.StopIntent':
return handle_session_end_request()
else:
raise ValueError('Invalid Intent')
# --------------- Main handler ------------------
def lambda_handler(event, context):
""" Route the incoming request based on type (LaunchRequest, IntentRequest,
etc.) The JSON body of the request is provided in the event parameter.
"""
print('event.session.application.applicationId=' + event['session']['application']['applicationId'])
if event['session']['new']:
on_session_started({'requestId': event['request']['requestId']}, event['session'])
if event['request']['type'] == 'LaunchRequest':
return on_launch(event['request'], event['session'])
elif event['request']['type'] == 'IntentRequest':
return on_intent(event['request'], event['session'])
elif event['request']['type'] == 'SessionEndedRequest':
return on_session_ended(event['request'], event['session'])
|
import sys
import hashlib
import itertools
MIN_LENGTH = 3
MAX_LENGTH = 9
# Read hash from gesture.key file
# adb pull /data/system/gesture.key
with open(sys.argv[1], 'rb') as f:
target_hash = f.read().hex()
print('Target hash', target_hash)
# Crack
found = False
answer = None
for pattern_length in range(MIN_LENGTH, MAX_LENGTH + 1): # 3 - 9
combinations = itertools.permutations(range(0, 9), pattern_length) # DO NOT use `itertools.combinations()`
for guess in combinations:
guess_hash = hashlib.sha1(bytes(guess)).hexdigest()
if guess_hash == target_hash: # FOUND
found = True
answer = guess
break
if found:
break
# Print result
print('Answer', answer)
for y in range(3):
for x in range(3):
position = 3 * y + x
if position in answer:
print(f'| {answer.index(position)} |', end='')
else:
print('| |', end='')
print('\n===============')
|
from kivy.app import App
from kivy.uix.widget import Widget
class ImageWidget(Widget):
def __init__(self, **kwargs):
super().__init__(**kwargs)
class ImageApp(App):
def build(self):
return ImageWidget()
if __name__=='__main__':
ImageApp().run() |
# Generated by Django 3.1.7 on 2021-03-13 12:39
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('testapp', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='tb_content',
name='active',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='tb_content',
name='created',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='tb_content',
name='profile_img',
field=models.ImageField(default='', upload_to='pics'),
preserve_default=False,
),
migrations.AddField(
model_name='tb_content',
name='user_name',
field=models.CharField(default='', max_length=1000),
preserve_default=False,
),
]
|
from __future__ import print_function
from scipy.misc import imsave
import numpy as np
import time
from keras.applications import vgg16
from keras import backend as K
from keras.utils import plot_model
import os
import matplotlib.pyplot as plt
from keras.layers import Conv2D
from utils import *
class GradientVisualizer(object):
def __init__(self, model):
self.model = model
# dimensions of the generated pictures for each filter.
self.img_width = 224
self.img_height = 224
def process_layer(self, layer_num):
# Select the layer
layer_output = self.model.layers[layer_num].output
# Find number of filters
if K.image_data_format() == 'channels_first':
filter_num = layer_output.get_shape().as_list()[1]
else:
filter_num = layer_output.get_shape().as_list()[3]
print("The number of filters in the layer: ", filter_num, sep="")
# this is the placeholder for the input images
input_img = self.model.input
# To be returned
output = []
# for filter_index in range(filter_num):
for filter_index in range(2):
print('Processing filter %d' % filter_index)
start_time = time.time()
# we build a loss function that maximizes the activation
# of the nth filter of the layer considered
if K.image_data_format() == 'channels_first':
loss = K.mean(layer_output[:, filter_index, :, :])
else:
loss = K.mean(layer_output[:, :, :, filter_index])
# we compute the gradient of the input picture wrt this loss
grads = K.gradients(loss, input_img)[0]
# normalization trick: we normalize the gradient
grads = normalize(grads)
# this function returns the loss and grads given the input picture
iterate = K.function([input_img], [loss, grads])
# step size for gradient ascent
step = 1.
# we start from a gray image with some random noise
if K.image_data_format() == 'channels_first':
input_img_data = np.random.random(
(1, 3, self.img_width, self.img_height))
else:
input_img_data = np.random.random(
(1, self.img_width, self.img_height, 3))
input_img_data = (input_img_data - 0.5) * 20 + 128
# we run gradient ascent for 20 steps
for i in range(20):
loss_value, grads_value = iterate([input_img_data])
input_img_data += grads_value * step
print('Current loss value:', loss_value)
if loss_value <= 0.:
# some filters get stuck to 0, we can skip them
break
# decode the resulting input image
if loss_value > 0:
img = deprocess_image(input_img_data[0])
output.append(
[img, loss_value, self.model.layers[layer_num].name, filter_index])
else:
output.append(
[np.zeros_like(img), loss_value, self.model.layers[layer_num].name, filter_index])
end_time = time.time()
print('Filter %d processed in %ds' %
(filter_index, end_time - start_time))
return np.asarray(output)
def save_images(self, output):
for i in range(len(output)):
imsave('./static/viz/{}_{}.png'.format(
output[i][2], output[i][3]), output[i][0])
return output[:, 1:]
def process_net(self):
results = []
for layer_num in range(3):
# for layer_num in range(len(self.model.layers)):
if isinstance(self.model.layers[layer_num], Conv2D):
output = self.process_layer(layer_num )
# Show image
results.append(self.save_images(output))
return np.asarray(results)
|
# This file is part of beets.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Various tests for querying the library database.
"""
from contextlib import contextmanager
from functools import partial
import os
import sys
import unittest
from test import _common
from test import helper
import beets.library
from beets import dbcore
from beets.dbcore import types
from beets.dbcore.query import (NoneQuery, ParsingError,
InvalidQueryArgumentValueError)
from beets.library import Library, Item
from beets import util
from beets.util import syspath
# Because the absolute path begins with something like C:, we
# can't disambiguate it from an ordinary query.
WIN32_NO_IMPLICIT_PATHS = 'Implicit paths are not supported on Windows'
class TestHelper(helper.TestHelper):
def assertInResult(self, item, results): # noqa
result_ids = [i.id for i in results]
self.assertIn(item.id, result_ids)
def assertNotInResult(self, item, results): # noqa
result_ids = [i.id for i in results]
self.assertNotIn(item.id, result_ids)
class AnyFieldQueryTest(_common.LibTestCase):
def test_no_restriction(self):
q = dbcore.query.AnyFieldQuery(
'title', beets.library.Item._fields.keys(),
dbcore.query.SubstringQuery
)
self.assertEqual(self.lib.items(q).get().title, 'the title')
def test_restriction_completeness(self):
q = dbcore.query.AnyFieldQuery('title', ['title'],
dbcore.query.SubstringQuery)
self.assertEqual(self.lib.items(q).get().title, 'the title')
def test_restriction_soundness(self):
q = dbcore.query.AnyFieldQuery('title', ['artist'],
dbcore.query.SubstringQuery)
self.assertEqual(self.lib.items(q).get(), None)
def test_eq(self):
q1 = dbcore.query.AnyFieldQuery('foo', ['bar'],
dbcore.query.SubstringQuery)
q2 = dbcore.query.AnyFieldQuery('foo', ['bar'],
dbcore.query.SubstringQuery)
self.assertEqual(q1, q2)
q2.query_class = None
self.assertNotEqual(q1, q2)
class AssertsMixin:
def assert_items_matched(self, results, titles):
self.assertEqual({i.title for i in results}, set(titles))
def assert_albums_matched(self, results, albums):
self.assertEqual({a.album for a in results}, set(albums))
# A test case class providing a library with some dummy data and some
# assertions involving that data.
class DummyDataTestCase(_common.TestCase, AssertsMixin):
def setUp(self):
super().setUp()
self.lib = beets.library.Library(':memory:')
items = [_common.item() for _ in range(3)]
items[0].title = 'foo bar'
items[0].artist = 'one'
items[0].album = 'baz'
items[0].year = 2001
items[0].comp = True
items[0].genre = 'rock'
items[1].title = 'baz qux'
items[1].artist = 'two'
items[1].album = 'baz'
items[1].year = 2002
items[1].comp = True
items[1].genre = 'Rock'
items[2].title = 'beets 4 eva'
items[2].artist = 'three'
items[2].album = 'foo'
items[2].year = 2003
items[2].comp = False
items[2].genre = 'Hard Rock'
for item in items:
self.lib.add(item)
self.album = self.lib.add_album(items[:2])
def assert_items_matched_all(self, results):
self.assert_items_matched(results, [
'foo bar',
'baz qux',
'beets 4 eva',
])
class GetTest(DummyDataTestCase):
def test_get_empty(self):
q = ''
results = self.lib.items(q)
self.assert_items_matched_all(results)
def test_get_none(self):
q = None
results = self.lib.items(q)
self.assert_items_matched_all(results)
def test_get_one_keyed_term(self):
q = 'title:qux'
results = self.lib.items(q)
self.assert_items_matched(results, ['baz qux'])
def test_get_one_keyed_exact(self):
q = 'genre:=rock'
results = self.lib.items(q)
self.assert_items_matched(results, ['foo bar'])
q = 'genre:=Rock'
results = self.lib.items(q)
self.assert_items_matched(results, ['baz qux'])
q = 'genre:="Hard Rock"'
results = self.lib.items(q)
self.assert_items_matched(results, ['beets 4 eva'])
def test_get_one_keyed_exact_nocase(self):
q = 'genre:=~"hard rock"'
results = self.lib.items(q)
self.assert_items_matched(results, ['beets 4 eva'])
def test_get_one_keyed_regexp(self):
q = 'artist::t.+r'
results = self.lib.items(q)
self.assert_items_matched(results, ['beets 4 eva'])
def test_get_one_unkeyed_term(self):
q = 'three'
results = self.lib.items(q)
self.assert_items_matched(results, ['beets 4 eva'])
def test_get_one_unkeyed_exact(self):
q = '=rock'
results = self.lib.items(q)
self.assert_items_matched(results, ['foo bar'])
def test_get_one_unkeyed_exact_nocase(self):
q = '=~"hard rock"'
results = self.lib.items(q)
self.assert_items_matched(results, ['beets 4 eva'])
def test_get_one_unkeyed_regexp(self):
q = ':x$'
results = self.lib.items(q)
self.assert_items_matched(results, ['baz qux'])
def test_get_no_matches(self):
q = 'popebear'
results = self.lib.items(q)
self.assert_items_matched(results, [])
def test_invalid_key(self):
q = 'pope:bear'
results = self.lib.items(q)
# Matches nothing since the flexattr is not present on the
# objects.
self.assert_items_matched(results, [])
def test_get_no_matches_exact(self):
q = 'genre:="hard rock"'
results = self.lib.items(q)
self.assert_items_matched(results, [])
def test_term_case_insensitive(self):
q = 'oNE'
results = self.lib.items(q)
self.assert_items_matched(results, ['foo bar'])
def test_regexp_case_sensitive(self):
q = ':oNE'
results = self.lib.items(q)
self.assert_items_matched(results, [])
q = ':one'
results = self.lib.items(q)
self.assert_items_matched(results, ['foo bar'])
def test_term_case_insensitive_with_key(self):
q = 'artist:thrEE'
results = self.lib.items(q)
self.assert_items_matched(results, ['beets 4 eva'])
def test_key_case_insensitive(self):
q = 'ArTiST:three'
results = self.lib.items(q)
self.assert_items_matched(results, ['beets 4 eva'])
def test_keyed_matches_exact_nocase(self):
q = 'genre:=~rock'
results = self.lib.items(q)
self.assert_items_matched(results, [
'foo bar',
'baz qux',
])
def test_unkeyed_term_matches_multiple_columns(self):
q = 'baz'
results = self.lib.items(q)
self.assert_items_matched(results, [
'foo bar',
'baz qux',
])
def test_unkeyed_regexp_matches_multiple_columns(self):
q = ':z$'
results = self.lib.items(q)
self.assert_items_matched(results, [
'foo bar',
'baz qux',
])
def test_keyed_term_matches_only_one_column(self):
q = 'title:baz'
results = self.lib.items(q)
self.assert_items_matched(results, ['baz qux'])
def test_keyed_regexp_matches_only_one_column(self):
q = 'title::baz'
results = self.lib.items(q)
self.assert_items_matched(results, [
'baz qux',
])
def test_multiple_terms_narrow_search(self):
q = 'qux baz'
results = self.lib.items(q)
self.assert_items_matched(results, [
'baz qux',
])
def test_multiple_regexps_narrow_search(self):
q = ':baz :qux'
results = self.lib.items(q)
self.assert_items_matched(results, ['baz qux'])
def test_mixed_terms_regexps_narrow_search(self):
q = ':baz qux'
results = self.lib.items(q)
self.assert_items_matched(results, ['baz qux'])
def test_single_year(self):
q = 'year:2001'
results = self.lib.items(q)
self.assert_items_matched(results, ['foo bar'])
def test_year_range(self):
q = 'year:2000..2002'
results = self.lib.items(q)
self.assert_items_matched(results, [
'foo bar',
'baz qux',
])
def test_singleton_true(self):
q = 'singleton:true'
results = self.lib.items(q)
self.assert_items_matched(results, ['beets 4 eva'])
def test_singleton_1(self):
q = 'singleton:1'
results = self.lib.items(q)
self.assert_items_matched(results, ['beets 4 eva'])
def test_singleton_false(self):
q = 'singleton:false'
results = self.lib.items(q)
self.assert_items_matched(results, ['foo bar', 'baz qux'])
def test_singleton_0(self):
q = 'singleton:0'
results = self.lib.items(q)
self.assert_items_matched(results, ['foo bar', 'baz qux'])
def test_compilation_true(self):
q = 'comp:true'
results = self.lib.items(q)
self.assert_items_matched(results, ['foo bar', 'baz qux'])
def test_compilation_false(self):
q = 'comp:false'
results = self.lib.items(q)
self.assert_items_matched(results, ['beets 4 eva'])
def test_unknown_field_name_no_results(self):
q = 'xyzzy:nonsense'
results = self.lib.items(q)
titles = [i.title for i in results]
self.assertEqual(titles, [])
def test_unknown_field_name_no_results_in_album_query(self):
q = 'xyzzy:nonsense'
results = self.lib.albums(q)
names = [a.album for a in results]
self.assertEqual(names, [])
def test_item_field_name_matches_nothing_in_album_query(self):
q = 'format:nonsense'
results = self.lib.albums(q)
names = [a.album for a in results]
self.assertEqual(names, [])
def test_unicode_query(self):
item = self.lib.items().get()
item.title = 'caf\xe9'
item.store()
q = 'title:caf\xe9'
results = self.lib.items(q)
self.assert_items_matched(results, ['caf\xe9'])
def test_numeric_search_positive(self):
q = dbcore.query.NumericQuery('year', '2001')
results = self.lib.items(q)
self.assertTrue(results)
def test_numeric_search_negative(self):
q = dbcore.query.NumericQuery('year', '1999')
results = self.lib.items(q)
self.assertFalse(results)
def test_album_field_fallback(self):
self.album['albumflex'] = 'foo'
self.album.store()
q = 'albumflex:foo'
results = self.lib.items(q)
self.assert_items_matched(results, [
'foo bar',
'baz qux',
])
def test_invalid_query(self):
with self.assertRaises(InvalidQueryArgumentValueError) as raised:
dbcore.query.NumericQuery('year', '199a')
self.assertIn('not an int', str(raised.exception))
with self.assertRaises(InvalidQueryArgumentValueError) as raised:
dbcore.query.RegexpQuery('year', '199(')
exception_text = str(raised.exception)
self.assertIn('not a regular expression', exception_text)
self.assertIn('unterminated subpattern', exception_text)
self.assertIsInstance(raised.exception, ParsingError)
class MatchTest(_common.TestCase):
def setUp(self):
super().setUp()
self.item = _common.item()
def test_regex_match_positive(self):
q = dbcore.query.RegexpQuery('album', '^the album$')
self.assertTrue(q.match(self.item))
def test_regex_match_negative(self):
q = dbcore.query.RegexpQuery('album', '^album$')
self.assertFalse(q.match(self.item))
def test_regex_match_non_string_value(self):
q = dbcore.query.RegexpQuery('disc', '^6$')
self.assertTrue(q.match(self.item))
def test_substring_match_positive(self):
q = dbcore.query.SubstringQuery('album', 'album')
self.assertTrue(q.match(self.item))
def test_substring_match_negative(self):
q = dbcore.query.SubstringQuery('album', 'ablum')
self.assertFalse(q.match(self.item))
def test_substring_match_non_string_value(self):
q = dbcore.query.SubstringQuery('disc', '6')
self.assertTrue(q.match(self.item))
def test_exact_match_nocase_positive(self):
q = dbcore.query.StringQuery('genre', 'the genre')
self.assertTrue(q.match(self.item))
q = dbcore.query.StringQuery('genre', 'THE GENRE')
self.assertTrue(q.match(self.item))
def test_exact_match_nocase_negative(self):
q = dbcore.query.StringQuery('genre', 'genre')
self.assertFalse(q.match(self.item))
def test_year_match_positive(self):
q = dbcore.query.NumericQuery('year', '1')
self.assertTrue(q.match(self.item))
def test_year_match_negative(self):
q = dbcore.query.NumericQuery('year', '10')
self.assertFalse(q.match(self.item))
def test_bitrate_range_positive(self):
q = dbcore.query.NumericQuery('bitrate', '100000..200000')
self.assertTrue(q.match(self.item))
def test_bitrate_range_negative(self):
q = dbcore.query.NumericQuery('bitrate', '200000..300000')
self.assertFalse(q.match(self.item))
def test_open_range(self):
dbcore.query.NumericQuery('bitrate', '100000..')
def test_eq(self):
q1 = dbcore.query.MatchQuery('foo', 'bar')
q2 = dbcore.query.MatchQuery('foo', 'bar')
q3 = dbcore.query.MatchQuery('foo', 'baz')
q4 = dbcore.query.StringFieldQuery('foo', 'bar')
self.assertEqual(q1, q2)
self.assertNotEqual(q1, q3)
self.assertNotEqual(q1, q4)
self.assertNotEqual(q3, q4)
class PathQueryTest(_common.LibTestCase, TestHelper, AssertsMixin):
def setUp(self):
super().setUp()
# This is the item we'll try to match.
self.i.path = util.normpath('/a/b/c.mp3')
self.i.title = 'path item'
self.i.album = 'path album'
self.i.store()
self.lib.add_album([self.i])
# A second item for testing exclusion.
i2 = _common.item()
i2.path = util.normpath('/x/y/z.mp3')
i2.title = 'another item'
i2.album = 'another album'
self.lib.add(i2)
self.lib.add_album([i2])
@contextmanager
def force_implicit_query_detection(self):
# Unadorned path queries with path separators in them are considered
# path queries only when the path in question actually exists. So we
# mock the existence check to return true.
beets.library.PathQuery.force_implicit_query_detection = True
yield
beets.library.PathQuery.force_implicit_query_detection = False
def test_path_exact_match(self):
q = 'path:/a/b/c.mp3'
results = self.lib.items(q)
self.assert_items_matched(results, ['path item'])
results = self.lib.albums(q)
self.assert_albums_matched(results, [])
# FIXME: fails on windows
@unittest.skipIf(sys.platform == 'win32', 'win32')
def test_parent_directory_no_slash(self):
q = 'path:/a'
results = self.lib.items(q)
self.assert_items_matched(results, ['path item'])
results = self.lib.albums(q)
self.assert_albums_matched(results, ['path album'])
# FIXME: fails on windows
@unittest.skipIf(sys.platform == 'win32', 'win32')
def test_parent_directory_with_slash(self):
q = 'path:/a/'
results = self.lib.items(q)
self.assert_items_matched(results, ['path item'])
results = self.lib.albums(q)
self.assert_albums_matched(results, ['path album'])
def test_no_match(self):
q = 'path:/xyzzy/'
results = self.lib.items(q)
self.assert_items_matched(results, [])
results = self.lib.albums(q)
self.assert_albums_matched(results, [])
def test_fragment_no_match(self):
q = 'path:/b/'
results = self.lib.items(q)
self.assert_items_matched(results, [])
results = self.lib.albums(q)
self.assert_albums_matched(results, [])
def test_nonnorm_path(self):
q = 'path:/x/../a/b'
results = self.lib.items(q)
self.assert_items_matched(results, ['path item'])
results = self.lib.albums(q)
self.assert_albums_matched(results, ['path album'])
@unittest.skipIf(sys.platform == 'win32', WIN32_NO_IMPLICIT_PATHS)
def test_slashed_query_matches_path(self):
with self.force_implicit_query_detection():
q = '/a/b'
results = self.lib.items(q)
self.assert_items_matched(results, ['path item'])
results = self.lib.albums(q)
self.assert_albums_matched(results, ['path album'])
@unittest.skipIf(sys.platform == 'win32', WIN32_NO_IMPLICIT_PATHS)
def test_path_query_in_or_query(self):
with self.force_implicit_query_detection():
q = '/a/b , /a/b'
results = self.lib.items(q)
self.assert_items_matched(results, ['path item'])
def test_non_slashed_does_not_match_path(self):
with self.force_implicit_query_detection():
q = 'c.mp3'
results = self.lib.items(q)
self.assert_items_matched(results, [])
results = self.lib.albums(q)
self.assert_albums_matched(results, [])
def test_slashes_in_explicit_field_does_not_match_path(self):
with self.force_implicit_query_detection():
q = 'title:/a/b'
results = self.lib.items(q)
self.assert_items_matched(results, [])
def test_path_item_regex(self):
q = 'path::c\\.mp3$'
results = self.lib.items(q)
self.assert_items_matched(results, ['path item'])
def test_path_album_regex(self):
q = 'path::b'
results = self.lib.albums(q)
self.assert_albums_matched(results, ['path album'])
def test_escape_underscore(self):
self.add_album(path=b'/a/_/title.mp3', title='with underscore',
album='album with underscore')
q = 'path:/a/_'
results = self.lib.items(q)
self.assert_items_matched(results, ['with underscore'])
results = self.lib.albums(q)
self.assert_albums_matched(results, ['album with underscore'])
def test_escape_percent(self):
self.add_album(path=b'/a/%/title.mp3', title='with percent',
album='album with percent')
q = 'path:/a/%'
results = self.lib.items(q)
self.assert_items_matched(results, ['with percent'])
results = self.lib.albums(q)
self.assert_albums_matched(results, ['album with percent'])
def test_escape_backslash(self):
self.add_album(path=br'/a/\x/title.mp3', title='with backslash',
album='album with backslash')
q = 'path:/a/\\\\x'
results = self.lib.items(q)
self.assert_items_matched(results, ['with backslash'])
results = self.lib.albums(q)
self.assert_albums_matched(results, ['album with backslash'])
def test_case_sensitivity(self):
self.add_album(path=b'/A/B/C2.mp3', title='caps path')
makeq = partial(beets.library.PathQuery, 'path', '/A/B')
results = self.lib.items(makeq(case_sensitive=True))
self.assert_items_matched(results, ['caps path'])
results = self.lib.items(makeq(case_sensitive=False))
self.assert_items_matched(results, ['path item', 'caps path'])
# FIXME: Also create a variant of this test for windows, which tests
# both os.sep and os.altsep
@unittest.skipIf(sys.platform == 'win32', 'win32')
def test_path_sep_detection(self):
is_path_query = beets.library.PathQuery.is_path_query
with self.force_implicit_query_detection():
self.assertTrue(is_path_query('/foo/bar'))
self.assertTrue(is_path_query('foo/bar'))
self.assertTrue(is_path_query('foo/'))
self.assertFalse(is_path_query('foo'))
self.assertTrue(is_path_query('foo/:bar'))
self.assertFalse(is_path_query('foo:bar/'))
self.assertFalse(is_path_query('foo:/bar'))
# FIXME: shouldn't this also work on windows?
@unittest.skipIf(sys.platform == 'win32', WIN32_NO_IMPLICIT_PATHS)
def test_detect_absolute_path(self):
"""Test detection of implicit path queries based on whether or
not the path actually exists, when using an absolute path query.
Thus, don't use the `force_implicit_query_detection()`
contextmanager which would disable the existence check.
"""
is_path_query = beets.library.PathQuery.is_path_query
path = self.touch(os.path.join(b'foo', b'bar'))
self.assertTrue(os.path.isabs(util.syspath(path)))
path_str = path.decode('utf-8')
# The file itself.
self.assertTrue(is_path_query(path_str))
# The parent directory.
parent = os.path.dirname(path_str)
self.assertTrue(is_path_query(parent))
# Some non-existent path.
self.assertFalse(is_path_query(path_str + 'baz'))
def test_detect_relative_path(self):
"""Test detection of implicit path queries based on whether or
not the path actually exists, when using a relative path query.
Thus, don't use the `force_implicit_query_detection()`
contextmanager which would disable the existence check.
"""
is_path_query = beets.library.PathQuery.is_path_query
self.touch(os.path.join(b'foo', b'bar'))
# Temporarily change directory so relative paths work.
cur_dir = os.getcwd()
try:
os.chdir(syspath(self.temp_dir))
self.assertTrue(is_path_query('foo/'))
self.assertTrue(is_path_query('foo/bar'))
self.assertTrue(is_path_query('foo/bar:tagada'))
self.assertFalse(is_path_query('bar'))
finally:
os.chdir(cur_dir)
class IntQueryTest(unittest.TestCase, TestHelper):
def setUp(self):
self.lib = Library(':memory:')
def tearDown(self):
Item._types = {}
def test_exact_value_match(self):
item = self.add_item(bpm=120)
matched = self.lib.items('bpm:120').get()
self.assertEqual(item.id, matched.id)
def test_range_match(self):
item = self.add_item(bpm=120)
self.add_item(bpm=130)
matched = self.lib.items('bpm:110..125')
self.assertEqual(1, len(matched))
self.assertEqual(item.id, matched.get().id)
def test_flex_range_match(self):
Item._types = {'myint': types.Integer()}
item = self.add_item(myint=2)
matched = self.lib.items('myint:2').get()
self.assertEqual(item.id, matched.id)
def test_flex_dont_match_missing(self):
Item._types = {'myint': types.Integer()}
self.add_item()
matched = self.lib.items('myint:2').get()
self.assertIsNone(matched)
def test_no_substring_match(self):
self.add_item(bpm=120)
matched = self.lib.items('bpm:12').get()
self.assertIsNone(matched)
class BoolQueryTest(unittest.TestCase, TestHelper):
def setUp(self):
self.lib = Library(':memory:')
Item._types = {'flexbool': types.Boolean()}
def tearDown(self):
Item._types = {}
def test_parse_true(self):
item_true = self.add_item(comp=True)
item_false = self.add_item(comp=False)
matched = self.lib.items('comp:true')
self.assertInResult(item_true, matched)
self.assertNotInResult(item_false, matched)
def test_flex_parse_true(self):
item_true = self.add_item(flexbool=True)
item_false = self.add_item(flexbool=False)
matched = self.lib.items('flexbool:true')
self.assertInResult(item_true, matched)
self.assertNotInResult(item_false, matched)
def test_flex_parse_false(self):
item_true = self.add_item(flexbool=True)
item_false = self.add_item(flexbool=False)
matched = self.lib.items('flexbool:false')
self.assertInResult(item_false, matched)
self.assertNotInResult(item_true, matched)
def test_flex_parse_1(self):
item_true = self.add_item(flexbool=True)
item_false = self.add_item(flexbool=False)
matched = self.lib.items('flexbool:1')
self.assertInResult(item_true, matched)
self.assertNotInResult(item_false, matched)
def test_flex_parse_0(self):
item_true = self.add_item(flexbool=True)
item_false = self.add_item(flexbool=False)
matched = self.lib.items('flexbool:0')
self.assertInResult(item_false, matched)
self.assertNotInResult(item_true, matched)
def test_flex_parse_any_string(self):
# TODO this should be the other way around
item_true = self.add_item(flexbool=True)
item_false = self.add_item(flexbool=False)
matched = self.lib.items('flexbool:something')
self.assertInResult(item_false, matched)
self.assertNotInResult(item_true, matched)
class DefaultSearchFieldsTest(DummyDataTestCase):
def test_albums_matches_album(self):
albums = list(self.lib.albums('baz'))
self.assertEqual(len(albums), 1)
def test_albums_matches_albumartist(self):
albums = list(self.lib.albums(['album artist']))
self.assertEqual(len(albums), 1)
def test_items_matches_title(self):
items = self.lib.items('beets')
self.assert_items_matched(items, ['beets 4 eva'])
def test_items_does_not_match_year(self):
items = self.lib.items('2001')
self.assert_items_matched(items, [])
class NoneQueryTest(unittest.TestCase, TestHelper):
def setUp(self):
self.lib = Library(':memory:')
def test_match_singletons(self):
singleton = self.add_item()
album_item = self.add_album().items().get()
matched = self.lib.items(NoneQuery('album_id'))
self.assertInResult(singleton, matched)
self.assertNotInResult(album_item, matched)
def test_match_after_set_none(self):
item = self.add_item(rg_track_gain=0)
matched = self.lib.items(NoneQuery('rg_track_gain'))
self.assertNotInResult(item, matched)
item['rg_track_gain'] = None
item.store()
matched = self.lib.items(NoneQuery('rg_track_gain'))
self.assertInResult(item, matched)
def test_match_slow(self):
item = self.add_item()
matched = self.lib.items(NoneQuery('rg_track_peak', fast=False))
self.assertInResult(item, matched)
def test_match_slow_after_set_none(self):
item = self.add_item(rg_track_gain=0)
matched = self.lib.items(NoneQuery('rg_track_gain', fast=False))
self.assertNotInResult(item, matched)
item['rg_track_gain'] = None
item.store()
matched = self.lib.items(NoneQuery('rg_track_gain', fast=False))
self.assertInResult(item, matched)
class NotQueryMatchTest(_common.TestCase):
"""Test `query.NotQuery` matching against a single item, using the same
cases and assertions as on `MatchTest`, plus assertion on the negated
queries (ie. assertTrue(q) -> assertFalse(NotQuery(q))).
"""
def setUp(self):
super().setUp()
self.item = _common.item()
def test_regex_match_positive(self):
q = dbcore.query.RegexpQuery('album', '^the album$')
self.assertTrue(q.match(self.item))
self.assertFalse(dbcore.query.NotQuery(q).match(self.item))
def test_regex_match_negative(self):
q = dbcore.query.RegexpQuery('album', '^album$')
self.assertFalse(q.match(self.item))
self.assertTrue(dbcore.query.NotQuery(q).match(self.item))
def test_regex_match_non_string_value(self):
q = dbcore.query.RegexpQuery('disc', '^6$')
self.assertTrue(q.match(self.item))
self.assertFalse(dbcore.query.NotQuery(q).match(self.item))
def test_substring_match_positive(self):
q = dbcore.query.SubstringQuery('album', 'album')
self.assertTrue(q.match(self.item))
self.assertFalse(dbcore.query.NotQuery(q).match(self.item))
def test_substring_match_negative(self):
q = dbcore.query.SubstringQuery('album', 'ablum')
self.assertFalse(q.match(self.item))
self.assertTrue(dbcore.query.NotQuery(q).match(self.item))
def test_substring_match_non_string_value(self):
q = dbcore.query.SubstringQuery('disc', '6')
self.assertTrue(q.match(self.item))
self.assertFalse(dbcore.query.NotQuery(q).match(self.item))
def test_year_match_positive(self):
q = dbcore.query.NumericQuery('year', '1')
self.assertTrue(q.match(self.item))
self.assertFalse(dbcore.query.NotQuery(q).match(self.item))
def test_year_match_negative(self):
q = dbcore.query.NumericQuery('year', '10')
self.assertFalse(q.match(self.item))
self.assertTrue(dbcore.query.NotQuery(q).match(self.item))
def test_bitrate_range_positive(self):
q = dbcore.query.NumericQuery('bitrate', '100000..200000')
self.assertTrue(q.match(self.item))
self.assertFalse(dbcore.query.NotQuery(q).match(self.item))
def test_bitrate_range_negative(self):
q = dbcore.query.NumericQuery('bitrate', '200000..300000')
self.assertFalse(q.match(self.item))
self.assertTrue(dbcore.query.NotQuery(q).match(self.item))
def test_open_range(self):
q = dbcore.query.NumericQuery('bitrate', '100000..')
dbcore.query.NotQuery(q)
class NotQueryTest(DummyDataTestCase):
"""Test `query.NotQuery` against the dummy data:
- `test_type_xxx`: tests for the negation of a particular XxxQuery class.
- `test_get_yyy`: tests on query strings (similar to `GetTest`)
"""
def assertNegationProperties(self, q): # noqa
"""Given a Query `q`, assert that:
- q OR not(q) == all items
- q AND not(q) == 0
- not(not(q)) == q
"""
not_q = dbcore.query.NotQuery(q)
# assert using OrQuery, AndQuery
q_or = dbcore.query.OrQuery([q, not_q])
q_and = dbcore.query.AndQuery([q, not_q])
self.assert_items_matched_all(self.lib.items(q_or))
self.assert_items_matched(self.lib.items(q_and), [])
# assert manually checking the item titles
all_titles = {i.title for i in self.lib.items()}
q_results = {i.title for i in self.lib.items(q)}
not_q_results = {i.title for i in self.lib.items(not_q)}
self.assertEqual(q_results.union(not_q_results), all_titles)
self.assertEqual(q_results.intersection(not_q_results), set())
# round trip
not_not_q = dbcore.query.NotQuery(not_q)
self.assertEqual({i.title for i in self.lib.items(q)},
{i.title for i in self.lib.items(not_not_q)})
def test_type_and(self):
# not(a and b) <-> not(a) or not(b)
q = dbcore.query.AndQuery([
dbcore.query.BooleanQuery('comp', True),
dbcore.query.NumericQuery('year', '2002')],
)
not_results = self.lib.items(dbcore.query.NotQuery(q))
self.assert_items_matched(not_results, ['foo bar', 'beets 4 eva'])
self.assertNegationProperties(q)
def test_type_anyfield(self):
q = dbcore.query.AnyFieldQuery('foo', ['title', 'artist', 'album'],
dbcore.query.SubstringQuery)
not_results = self.lib.items(dbcore.query.NotQuery(q))
self.assert_items_matched(not_results, ['baz qux'])
self.assertNegationProperties(q)
def test_type_boolean(self):
q = dbcore.query.BooleanQuery('comp', True)
not_results = self.lib.items(dbcore.query.NotQuery(q))
self.assert_items_matched(not_results, ['beets 4 eva'])
self.assertNegationProperties(q)
def test_type_date(self):
q = dbcore.query.DateQuery('added', '2000-01-01')
not_results = self.lib.items(dbcore.query.NotQuery(q))
# query date is in the past, thus the 'not' results should contain all
# items
self.assert_items_matched(not_results, ['foo bar', 'baz qux',
'beets 4 eva'])
self.assertNegationProperties(q)
def test_type_false(self):
q = dbcore.query.FalseQuery()
not_results = self.lib.items(dbcore.query.NotQuery(q))
self.assert_items_matched_all(not_results)
self.assertNegationProperties(q)
def test_type_match(self):
q = dbcore.query.MatchQuery('year', '2003')
not_results = self.lib.items(dbcore.query.NotQuery(q))
self.assert_items_matched(not_results, ['foo bar', 'baz qux'])
self.assertNegationProperties(q)
def test_type_none(self):
q = dbcore.query.NoneQuery('rg_track_gain')
not_results = self.lib.items(dbcore.query.NotQuery(q))
self.assert_items_matched(not_results, [])
self.assertNegationProperties(q)
def test_type_numeric(self):
q = dbcore.query.NumericQuery('year', '2001..2002')
not_results = self.lib.items(dbcore.query.NotQuery(q))
self.assert_items_matched(not_results, ['beets 4 eva'])
self.assertNegationProperties(q)
def test_type_or(self):
# not(a or b) <-> not(a) and not(b)
q = dbcore.query.OrQuery([dbcore.query.BooleanQuery('comp', True),
dbcore.query.NumericQuery('year', '2002')])
not_results = self.lib.items(dbcore.query.NotQuery(q))
self.assert_items_matched(not_results, ['beets 4 eva'])
self.assertNegationProperties(q)
def test_type_regexp(self):
q = dbcore.query.RegexpQuery('artist', '^t')
not_results = self.lib.items(dbcore.query.NotQuery(q))
self.assert_items_matched(not_results, ['foo bar'])
self.assertNegationProperties(q)
def test_type_substring(self):
q = dbcore.query.SubstringQuery('album', 'ba')
not_results = self.lib.items(dbcore.query.NotQuery(q))
self.assert_items_matched(not_results, ['beets 4 eva'])
self.assertNegationProperties(q)
def test_type_true(self):
q = dbcore.query.TrueQuery()
not_results = self.lib.items(dbcore.query.NotQuery(q))
self.assert_items_matched(not_results, [])
self.assertNegationProperties(q)
def test_get_prefixes_keyed(self):
"""Test both negation prefixes on a keyed query."""
q0 = '-title:qux'
q1 = '^title:qux'
results0 = self.lib.items(q0)
results1 = self.lib.items(q1)
self.assert_items_matched(results0, ['foo bar', 'beets 4 eva'])
self.assert_items_matched(results1, ['foo bar', 'beets 4 eva'])
def test_get_prefixes_unkeyed(self):
"""Test both negation prefixes on an unkeyed query."""
q0 = '-qux'
q1 = '^qux'
results0 = self.lib.items(q0)
results1 = self.lib.items(q1)
self.assert_items_matched(results0, ['foo bar', 'beets 4 eva'])
self.assert_items_matched(results1, ['foo bar', 'beets 4 eva'])
def test_get_one_keyed_regexp(self):
q = '-artist::t.+r'
results = self.lib.items(q)
self.assert_items_matched(results, ['foo bar', 'baz qux'])
def test_get_one_unkeyed_regexp(self):
q = '-:x$'
results = self.lib.items(q)
self.assert_items_matched(results, ['foo bar', 'beets 4 eva'])
def test_get_multiple_terms(self):
q = 'baz -bar'
results = self.lib.items(q)
self.assert_items_matched(results, ['baz qux'])
def test_get_mixed_terms(self):
q = 'baz -title:bar'
results = self.lib.items(q)
self.assert_items_matched(results, ['baz qux'])
def test_fast_vs_slow(self):
"""Test that the results are the same regardless of the `fast` flag
for negated `FieldQuery`s.
TODO: investigate NoneQuery(fast=False), as it is raising
AttributeError: type object 'NoneQuery' has no attribute 'field'
at NoneQuery.match() (due to being @classmethod, and no self?)
"""
classes = [(dbcore.query.DateQuery, ['added', '2001-01-01']),
(dbcore.query.MatchQuery, ['artist', 'one']),
# (dbcore.query.NoneQuery, ['rg_track_gain']),
(dbcore.query.NumericQuery, ['year', '2002']),
(dbcore.query.StringFieldQuery, ['year', '2001']),
(dbcore.query.RegexpQuery, ['album', '^.a']),
(dbcore.query.SubstringQuery, ['title', 'x'])]
for klass, args in classes:
q_fast = dbcore.query.NotQuery(klass(*(args + [True])))
q_slow = dbcore.query.NotQuery(klass(*(args + [False])))
try:
self.assertEqual([i.title for i in self.lib.items(q_fast)],
[i.title for i in self.lib.items(q_slow)])
except NotImplementedError:
# ignore classes that do not provide `fast` implementation
pass
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
import os
import threading
import time
from multiprocessing import Process
import eventlet
import numpy as np
import socketio
from socketio.exceptions import ConnectionError
from stratego_env.game.config import STANDARD_STRATEGO_CONFIG
from stratego_env.game.stratego_procedural_env import StrategoProceduralEnv
STATIC_FILES_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "public")
PLAYER_1_ROOM = "/player1"
PLAYER_2_ROOM = "/player2"
def _player_room(player: int):
if player == 1:
return PLAYER_1_ROOM
elif player == -1:
return PLAYER_2_ROOM
else:
raise ValueError("There isn't a namespace for player {}".format(player))
def gui_websocket_broker(base_env: StrategoProceduralEnv, port: int):
sio = socketio.Server()
static_files = {
'/': {'content_type': 'text/html', 'filename': os.path.join(STATIC_FILES_PATH, 'index.html')},
'/static': STATIC_FILES_PATH
}
app = socketio.WSGIApp(sio, static_files=static_files)
current_state = {'current_action_step': -1}
@sio.event
def connect(sid, environ):
print('connect ', sid)
@sio.event
def join_both_player_rooms(sid):
print("{} entered both player rooms".format(sid))
sio.enter_room(sid=sid, room=PLAYER_1_ROOM)
sio.enter_room(sid=sid, room=PLAYER_2_ROOM)
@sio.event
def join_player_room(sid, player):
print("{} entered room {}".format(sid, _player_room(player)))
sio.enter_room(sid=sid, room=_player_room(player))
if player in current_state:
print("emitted state update")
sio.emit("state_update_wait_for_action_request",
data=(current_state[player], current_state['current_action_step'], False),
room=_player_room(player))
@sio.event
def action_requested_from_env(sid, state, player, action_step, valid_moves):
# print("server: action requested, player {}".format(player))
sio.emit("action_requested", data=(state, action_step, valid_moves), room=_player_room(player))
if current_state['current_action_step'] < action_step:
current_state['current_action_step'] = action_step
current_state[player] = state
current_state[-player] = base_env.get_state_from_player_perspective(state=state, player=-1).tolist()
sio.emit("state_update_wait_for_action_request",
data=(current_state[-player], current_state['current_action_step'], False),
room=_player_room(-player))
@sio.event
def reset_game(sid, initial_state):
current_state['current_action_step'] = 0
current_state[1] = initial_state
current_state[-1] = base_env.get_state_from_player_perspective(state=initial_state, player=-1).tolist()
sio.emit("state_update_wait_for_action_request",
data=(current_state[1], current_state['current_action_step'], True), room=_player_room(1))
sio.emit("state_update_wait_for_action_request",
data=(current_state[-1], current_state['current_action_step'], True), room=_player_room(-1))
@sio.event
def action_selected_by_browser(sid, action_step, action_positions, player):
print("server got action selected")
sio.emit("action_selected_for_env", (action_step, action_positions, player))
@sio.event
def disconnect(sid):
print('disconnect ', sid)
eventlet.wsgi.server(eventlet.listen(('0.0.0.0', port)), app, log_output=False)
class StrategoHumanGUIServer(object):
def __init__(self, base_env: StrategoProceduralEnv, port: int = 7000):
self.state = None
self.base_env: StrategoProceduralEnv = base_env
self.broker_p = Process(target=gui_websocket_broker, args=(self.base_env, port), daemon=True)
self.broker_p.start()
self.sio = socketio.Client(logger=False)
self.action_step_lock = threading.Lock()
self.current_action_step_num = 0
self.current_action_player = None
self.current_action = None
@self.sio.event
def connect():
print('connection established')
self.sio.emit("join_both_player_rooms")
@self.sio.event
def action_selected_for_env(action_step, action, player):
print('action {}, received with step {} from player {}'.format(action, action_step, player))
with self.action_step_lock:
if action_step == self.current_action_step_num and player == self.current_action_player:
self.current_action = action
@self.sio.event
def disconnect():
print('disconnected from server')
retry_secs = 0.1
retries = 5
for retry in range(retries):
try:
self.sio.connect('http://localhost:{}'.format(port), )
break
except ConnectionError:
if retry + 1 >= retries:
raise ConnectionError
time.sleep(retry_secs)
retry_secs *= 2
def reset_game(self, initial_state: np.ndarray):
with self.action_step_lock:
self.current_action_step_num = 0
self.current_action_player = None
self.sio.emit(event="reset_game", data=initial_state.tolist())
self.state = initial_state
def get_action_by_position(self, state: np.ndarray, player):
action = None
player_perspective_state = self.base_env.get_state_from_player_perspective(state=state, player=player)
pp_valid_moves = self.base_env.get_dict_of_valid_moves_by_position(state=player_perspective_state, player=1)
with self.action_step_lock:
self.current_action_player = player
print("waiting for action from player", player)
while action is None:
with self.action_step_lock:
self.sio.emit(event="action_requested_from_env",
data=(player_perspective_state.tolist(),
player,
self.current_action_step_num,
pp_valid_moves))
if self.current_action is not None:
action = self.current_action
self.current_action = None
self.current_action_step_num += 1
if action is None:
time.sleep(0.1)
return action
def __del__(self):
self.broker_p.kill()
if __name__ == '__main__':
config = STANDARD_STRATEGO_CONFIG
base_env = StrategoProceduralEnv(config['rows'], config['columns'])
s = StrategoHumanGUIServer(base_env=base_env)
# while True:
# print("waiting for action now")
# player = 1
# random_initial_state_fn = get_random_initial_state_fn(base_env=base_env, game_version_config=config)
# state = random_initial_state_fn()
# base_env.print_fully_observable_board_to_console(state)
#
# s.reset_game(initial_state=state)
#
# while base_env.get_game_ended(state, player) == 0:
# # if player == 1:
# # base_env.print_fully_observable_board_to_console(state)
# # action = s.get_action_by_position(state=state, player=player)
# # print("action received by client is ", action)
# # action_index = base_env.get_action_1d_index_from_positions(*action)
# # action_index = base_env.get_action_1d_index_from_player_perspective(action_index=action_index, player=player)
# # print("action_size:", base_env.action_size)
# # print("action_index: ", action_index)
# # print("action is valid: ", base_env.is_move_valid_by_1d_index(state, player, base_env.get_action_1d_index_from_player_perspective(action_index=action_index, player=player)))
# # else:
#
# action_index = base_env.sample_partially_observable_simple_heuristic_policy(state=state, player=player)
# time.sleep(0.1)
#
# state, player = base_env.get_next_state(state=state, player=player, action_index=action_index)
# s.reset_game(state)
# base_env.print_fully_observable_board_to_console(state)
|
def process_emotions(emotions):
total = {
'Fear': 0,
'Sad': 0,
'Bored': 0,
'Happy': 0,
'Excited': 0,
'Angry': 0
}
length = len(emotions)
for emotion in emotions:
total['Fear'] += emotion['Fear']
total['Sad'] += emotion['Sad']
total['Bored'] += emotion['Bored']
total['Happy'] += emotion['Happy']
total['Excited'] += emotion['Excited']
total['Angry'] += emotion['Angry']
total = [(x, y/length) for x, y in total.items()]
return max(total, key=lambda x: x[1])[0]
def get_emotion(sentence):
import paralleldots
paralleldots.set_api_key("AWDCWos9GlVND0R3Pf8L6D3NDjRAKQzDDWsgdtW0Pbw")
text = sentence.split('.')
response = paralleldots.batch_emotion(text)
return process_emotions(response['emotion'])
# if __name__ == '__main__':
# text=["Choke me daddy"]
# response=paralleldots.batch_emotion(text)
# # print(response)
# print(f'You are {process_emotions(response["emotion"])}') |
import argparse
import configparser
import datetime
import os
import time
from abc import ABC, abstractmethod
from pathlib import Path
from typing import Any, Dict
class ArgParser(ABC):
def __init__(self):
self.environment = os.environ.get("ENVIRON", "DOCKER")
config = configparser.ConfigParser()
config.read(self.configuration_file_path)
for key, value in config[self.environment].items():
os.environ[key.upper()] = value
self.run_tag = datetime.datetime \
.fromtimestamp(time.time()) \
.strftime('%Y-%m-%d-%H%M%S')
@property
def configuration_file_path(self) -> str:
return "config.ini"
@property
def hyperparameters_file_name(self) -> str:
return "hyperparameters.json"
@abstractmethod
def get_arguments(self) -> Dict[str, Any]:
pass
class TrainArgParser(ArgParser):
def get_arguments(self) -> Dict[str, Any]:
parser = argparse.ArgumentParser()
parser.add_argument(
"--input_dir",
type=Path,
default=Path(os.environ["SM_INPUT_DIR"]),
)
parser.add_argument(
"--output_dir",
type=Path,
default=Path(os.environ["SM_OUTPUT_DIR"]),
)
parser.add_argument(
'--project_name',
default="",
type=str,
help="Project name (default: '')",
)
parser.add_argument(
'--run_tag',
default=self.run_tag,
type=str,
help=f"Run ID (default: '{self.run_tag}')",
)
args = parser.parse_args()
return args
class APIArgParser(ArgParser):
def get_arguments(self) -> Dict[str, Any]:
parser = argparse.ArgumentParser()
parser.add_argument(
"--model_dir",
type=Path,
default=Path(os.environ["SM_OUTPUT_DIR"]),
)
parser.add_argument(
"--model_name",
default="customer-lifetime-values",
type=str,
help="Project name",
)
parser.add_argument(
"--num_cpus",
type=int,
default=os.environ["SM_NUM_CPUS"],
)
parser.add_argument(
"--model_server_timeout",
default=60,
type=int,
help="Number of model server workers (default: 60)",
)
parser.add_argument(
"--run_tag",
default=self.run_tag,
type=str,
help=f"Run ID (default: \"{self.run_tag}\")",
)
args = parser.parse_args()
return args
|
import cal
class TestCal:
def test_addn(self):
assert 4 == cal.add(2,2)
assert 5 == cal.add(2,4)
|
import os
import time
import folium
import webbrowser
import collections
import numpy as np
import pandas as pd
import networkx as nx
from tqdm import tqdm
from folium.map import *
from folium import plugins
from folium.plugins import FloatImage
from folium.plugins import MeasureControl
from fibonacci_heap import Fibonacci_heap
# Custom function to create a coordinates dictionary and a graph
def get_coords_and_graph():
files_names = os.listdir('./Files')
dataframes_names = ['coordinates', 'physical_dist', 'time_dist']
for k in tqdm(range(3)):
if os.getcwd() + '\\Files\\' + files_names[k] == os.getcwd() + '\\Files\\' + files_names[0]:
globals()[dataframes_names[k]] = pd.read_csv(os.getcwd() + '\\Files\\' + files_names[0], skiprows = 7, sep = " ",
delimiter = " ", names = ["Character", "ID_Node", "Longitude", "Latitude"],
index_col = None, usecols = None, encoding = 'ISO-8859-1')
eval(dataframes_names[k]).drop(columns = ["Character"], inplace = True)
eval(dataframes_names[k])['Longitude'] = coordinates['Longitude']/1000000
eval(dataframes_names[k])['Latitude'] = coordinates['Latitude']/1000000
elif os.getcwd() + '\\Files\\' + files_names[k] == os.getcwd() + '\\Files\\' + files_names[1]:
globals()[dataframes_names[k]] = pd.read_csv(os.getcwd() + '\\Files\\' + files_names[1], skiprows = 7, sep = " ",
delimiter = " ", names = ["Character", "Node_1", "Node_2", "Physical_distance"],
index_col = None, usecols = None, encoding = 'ISO-8859-1')
eval(dataframes_names[k]).drop(columns = ["Character"], inplace = True)
elif os.getcwd() + '\\Files\\' + files_names[k] == os.getcwd() + '\\Files\\' + files_names[2]:
globals()[dataframes_names[k]] = pd.read_csv(os.getcwd() + '\\Files\\' + files_names[2], skiprows = 7, sep = " ",
delimiter = " ", names = ["Character", "Node_1", "Node_2", "Time_distance"],
index_col = None, usecols = None, encoding = 'ISO-8859-1')
eval(dataframes_names[k]).drop(columns = ["Character"], inplace = True)
t = time.time()
print('Creating the coordinates dictionary...')
coordinates_dict = coordinates.set_index('ID_Node').T.to_dict('list')
print('Elapsed time:')
print(round(time.time() - t, 2))
complete = pd.merge(physical_dist, time_dist, on = ['Node_1', 'Node_2'])
t2 = time.time()
print('Creating the graph...')
G = nx.from_pandas_edgelist(complete, 'Node_1', 'Node_2', ['Physical_distance', 'Time_distance'], create_using = nx.DiGraph())
print('Elapsed time:')
print(round(time.time() - t2, 2))
return coordinates_dict, G
# Custom function to run a BFS over the graph and checking it a node is connected or not
def b_f_s(graph, root):
visited, queue = set(), collections.deque([root])
while queue:
vertex = queue.popleft()
for neighbour in graph[vertex]:
if neighbour not in visited:
visited.add(neighbour)
queue.append(neighbour)
return visited
# Custom function created to get the distance attribute from each edge.
def get_weight(graph, node_a, node_b, measure):
if measure == 'network':
return 1
elif measure == 'time':
return graph.get_edge_data(node_a, node_b)['Time_distance']
elif measure == 'physical':
return graph.get_edge_data(node_a, node_b)['Physical_distance']
# Custom function to implement the Dijkstra algorithm
def dijkstra(graph, source, destination, measure = 'network'):
# The first if condition is deployed to avoid the case of an erroneous distance measure typing.
if measure in ['network', 'time', 'physical']:
# Here we decided to prevent the algorithm from starting by checking whether a node is not connected
# via BFS.
if destination in b_f_s(graph, source):
# The variable 'shortest paths' is basically a dictionary where the keys are nodes and the values are a tuple
# containing the couple (previous node, weight). We initialize it with the source vertex and set its weight to 0.
shortest_paths = {source: (None, 0)}
#The variable 'current_node' does basically store the node we are on, we initialize it with the source
# vertex in the beginning
current_node = source
# The variable 'visited' is a set keeping trace of the visited nodes.
visited = set()
# The variable 'heap' is a Fibonacci heap we use to store our nodes and order them by
# their current weight. To create it we resorted to an existing library which we modified
# to better meet our requirements.
heap = Fibonacci_heap()
while current_node != destination:
if current_node not in visited:
# Here we add our current node to the set of visited ones
visited.add(current_node)
# The variable 'destinations' is essentially an adjacency list, it extracts all the edges
# departing from the current node
destinations = [elem[1] for elem in graph.edges(current_node)]
# The variable 'current_node_weight' stores the weight attribute on the edge connected
current_node_weight = shortest_paths[current_node][1]
# During the following loop we visit all the nodes connected to the current one
for next_node in destinations:
# Here we compute the weight of current edge as the sum:
# weight of the edge + weight of edges previously visited
weight = get_weight(graph, current_node, next_node, measure) + current_node_weight
# Here we add nodes to the heap
if next_node not in visited and next_node not in heap.nodes:
heap.enqueue(next_node, weight)
# Here we add a new node to the shortest path, or update
# it if the current path is shorter than previous path
if next_node not in shortest_paths:
shortest_paths[next_node] = (current_node, weight)
else:
current_shortest_weight = shortest_paths[next_node][1]
if current_shortest_weight > weight:
shortest_paths[next_node] = (current_node, weight)
# If our heap is empty, we cannot continue
# nor reach the destination
if not heap.__bool__():
return "Not Possible"
# Here we update current_node with the next one,
# namely the destination with lowest weight
current_node = heap.dequeue_min().m_elem
# Creating a path and reversing it
path = []
while current_node is not None:
path.append(current_node)
# Extract the previous node from shortest_paths
next_node = shortest_paths[current_node][0]
# Update current_node
current_node = next_node
# Reverse path
path = path[::-1]
return (path, shortest_paths[path[-1]][1])
else:
print('The graph is not connected.')
else:
print('Invalid measure, please try again.')
# Custom function to apply the Dijkstra algorithm in order to obtain a shortest ordered route
def shortest_ordered_route(graph, source, destinations, measure = 'network'):
# Creating a list to store the source node and the destinations in order to pass it
# to folium and differentiate the sizes
lst = []
lst.append(source)
lst.extend(destinations)
# Storing the source vertex in a variable with the same name
source = source
# List variable to store the path
total_steps = []
# List variable to store the chosen distance measure
total_weight = 0
# Iterating over the elements in the list of different destinations to reach
for destination in destinations:
# An in condition to avoid printing two times the same node
if not total_steps:
# Storing the path and the sum of weights
steps, weight = dijkstra(graph, source, destination, measure)
# Updating the variables
total_steps +=steps
total_weight += weight
source = destination
# Same steps as above, with just one more: we pop the first elements in the list
# of steps, which is the same one in the last postion of the other list
else:
steps, weight = dijkstra(graph, source, destination, measure)
steps.pop(0)
total_steps += steps
total_weight += weight
source = destination
# Printing the output, conditioned to the selected measure
print("Path:")
print(*total_steps)
if measure == 'network':
print("Network distance:")
print(total_weight)
elif measure == 'time':
print("Time distance:")
print(total_weight)
elif measure == 'physical':
print("Physical distance:")
print(total_weight)
return total_steps, total_weight, lst
#t3 = time.time()
#s_o_r = shortest_ordered_route(G, int(input('Please choose a source node: ')), set(map(int, input('Now please choose a list of spaced nodes: ').split())), input('And, now, input a type of distance: '))
#print('Computing the shortest ordered route...')
#print('Elapsed time:')
#print(round(time.time() - t3, 2))
def visualize_func_3(coordinates_dict, s_o_r):
m = folium.Map(location = tuple(coordinates_dict[s_o_r[0][0]][::-1]), zoom_start = 10, tiles = 'openstreetmap')
folium.PolyLine([tuple(coordinates_dict[node][::-1]) for node in s_o_r[0]], color = 'violet', weight = 2.5, opacity = 1).add_to(m)
for node in s_o_r[0]:
if node in s_o_r[2]:
folium.CircleMarker(location = tuple(coordinates_dict[node][::-1]), radius = 10, line_color = 'purple', fill_color = 'black', fill_opacity = 0.8, fill = True).add_to(m)
else:
folium.CircleMarker(location = tuple(coordinates_dict[node][::-1]), radius = 5, line_color = '#3186cc', fill_color = '#FFFFFF', fill_opacity = 0.8, fill = True).add_to(m)
m.save(outfile = "Func_3.html")
webbrowser.open("Func_3.html", new = 2)
|
score = int(input("Enter your grade point = "))
grade = ['A','B','C','D','E','F']
if score >=0 and score <= 39:
print(grade[5])
if score < 0:
print("grade cannot be ascribe to", score , "." + "Enter grade between 0 and 100")
elif score >=40 and score <= 44:
print(grade[4])
elif score >=45 and score <=49:
print(grade[3])
elif score >=50 and scores <=59:
print(grade[2])
elif score >= 60 and scores <=69:
print(grade[1])
elif score >= 70 and scores <=100:
print(grade[0])
|
"""RelayDomain viewsets."""
from rest_framework import viewsets
from rest_framework.permissions import DjangoModelPermissions, IsAuthenticated
from modoboa.admin import models as admin_models
from modoboa.lib.throttle import GetThrottleViewsetMixin
from modoboa.lib.viewsets import RevisionModelMixin
from . import serializers
class RelayDomainViewSet(GetThrottleViewsetMixin, RevisionModelMixin, viewsets.ModelViewSet):
"""RelayDomain viewset."""
permission_classes = [IsAuthenticated, DjangoModelPermissions, ]
serializer_class = serializers.RelayDomainSerializer
def get_queryset(self):
"""Filter queryset based on current user."""
return (
admin_models.Domain.objects.get_for_admin(self.request.user)
.filter(type="relaydomain")
)
def perform_destroy(self, instance):
"""Add custom args to delete call."""
instance.delete(self.request.user)
|
import os
from glob import glob
import shutil
import struct
import numpy as np
from scipy.ndimage import zoom
from ..io import mkdir,writeTxt
class ngDataset(object):
def __init__(self, volume_size = [1024,1024,100], \
resolution = [6,6,30], chunk_size=[64,64,64], offset = [0,0,0], \
mip_ratio = [[1,1,1],[2,2,1],[4,4,1],[8,8,1],[16,16,2],[32,32,4]], cloudpath=''):
# dimension order: x,y,z
self.volume_size = volume_size
self.resolution = resolution
self.chunk_size = chunk_size
self.mip_ratio = mip_ratio
self.offset = offset
self.cloudpath = cloudpath
if cloudpath != '' and 'file' == cloudpath[:4]:
mkdir(cloudpath[7:])
def createInfo(self, cloudpath = '', data_type = 'im', num_channel = 1, skel_radius=2):
from cloudvolume import CloudVolume
if cloudpath == '':
cloudpath = self.cloudpath + '/seg/'
if 'file' == cloudpath[:4]:
mkdir(cloudpath[7:])
num_mip_level = len(self.mip_ratio)
if data_type == 'im':
m_enc = 'jpeg'
m_type = 'image'
m_dtype = 'uint8'
elif data_type == 'seg':
m_enc = 'compressed_segmentation'
m_type = 'segmentation'
m_dtype = 'uint32'
elif data_type == 'skel':
m_enc = 'raw'
m_type = 'segmentation'
m_dtype = 'uint32'
# CloudVolume.create_new_info: only do 1 scale ...
scales = [None for x in range(num_mip_level)]
for i in range(num_mip_level):
m_ratio = self.mip_ratio[i]
m_res = [self.resolution[j] * m_ratio[j] for j in range(len(self.resolution))]
scales[i] = {
"encoding" : m_enc, # raw, jpeg, compressed_segmentation, fpzip, kempressed
"chunk_sizes" : [tuple(self.chunk_size)], # units are voxels
"key" : "_".join(map(str, m_res)),
"resolution" : m_res, # units are voxels
"voxel_offset" : [(self.offset[x] + m_ratio[x] - 1) // m_ratio[x] for x in range(3)],
"mesh" : 'mesh', # compute mesh
"skeletons" : 'skeletons', # compute mesh
"compressed_segmentation_block_size" : (8,8,8),
"size" : [(self.volume_size[x] + m_ratio[x] - 1) // m_ratio[x] for x in range(3)],
}
info = {
"num_channels" : num_channel,
"type" : m_type,
"data_type" : m_dtype, # Channel images might be 'uint8'
"scales" : scales,
}
vol = CloudVolume(cloudpath, info=info)
vol.commit_info()
def createSetup(self, data_type):
pass
def createTile(self, getVolume, cloudpath = '', data_type = 'im', \
mip_levels = None, tile_size = [512,512], num_thread = 1, do_subdir = False, num_channel = 1, start_chunk=0,\
insert_vol_size=[0,0,0], insert_vol_offset=[0,0,0]):
from cloudvolume import CloudVolume
if data_type == 'im':
m_resize = 1
m_dtype = 'uint8'
elif data_type == 'seg':
m_resize = 0
m_dtype = 'uint32'
else:
raise ValueError('Unrecognized data type: ', data_type)
if cloudpath == '':
cloudpath = self.cloudpath + '/%s/'%data_type
# write .htaccess
if 'file' == cloudpath[:4]:
self.writeHtaccess(cloudpath[7:] + '/.htaccess', data_type, do_subdir)
# setup cloudvolume writer
num_mip_level = len(self.mip_ratio)
if mip_levels is None:
# if not specified, do all mip levels
mip_levels = range(num_mip_level)
mip_levels = [x for x in mip_levels if x < num_mip_level]
# < mipI: save each tile
# >= mipI: save each slice
# find the mip-level that need to be tiled vs. whole section
m_mip_id = [i for i, ratio in enumerate(self.mip_ratio) if (self.volume_size[:2]/(tile_size*np.array(ratio[:2]))).max() <= 1]
m_mip_id = len(m_mip_id) if len(m_mip_id) == 0 else m_mip_id[0]
# invalid chunk_size
m_mip_id = min(m_mip_id, np.log2(np.array(tile_size)/self.chunk_size[:2]).astype(int).min())
m_vols = [None] * num_mip_level
m_tszA = [None] * num_mip_level
m_szA = [None] * num_mip_level
m_osA = [None] * num_mip_level
m_tiles = [None] * num_mip_level
m_zres = [0 ] * num_mip_level # number of slices to skip
for i in mip_levels:
m_vols[i] = CloudVolume(cloudpath, mip=i, parallel= num_thread)
if do_subdir:
m_vols[i].meta.name_sep = '/'
m_tszA[i] = [tile_size[j]//self.mip_ratio[i][j] for j in range(2)] + [num_channel]
m_szA[i] = m_vols[i].info['scales'][i]['size']
m_osA[i] = [(self.offset[j] + self.mip_ratio[i][j] - 1)//self.mip_ratio[i][j] for j in range(3)]
m_zres[i] = self.mip_ratio[i][-1]//self.mip_ratio[mip_levels[0]][-1]
if i >= m_mip_id:
# output whole section
m_tiles[i] = np.zeros((m_szA[i][0], m_szA[i][1], self.chunk_size[2], num_channel), dtype=m_dtype)
else:
# output in tiles
m_tiles[i] = np.zeros((m_tszA[i][0], m_tszA[i][1], self.chunk_size[2], num_channel), dtype=m_dtype)
# tile for the finest level
x0 = [None] * num_mip_level
x1 = [None] * num_mip_level
y0 = [None] * num_mip_level
y1 = [None] * num_mip_level
# num of chunk: x and y (offset)
# keep the size
#num_chunk = [(m_szA[mip_levels[0]][x] + m_tszA[mip_levels[0]][x]-1 - m_osA[mip_levels[0]][x]) // m_tszA[mip_levels[0]][x] for x in range(2)]
num_chunk = [(m_szA[mip_levels[0]][x] + m_tszA[mip_levels[0]][x]-1) // m_tszA[mip_levels[0]][x] for x in range(2)]
# num of chunk: z
# so that the tile-based mip-levels can output tiles
num_ztile = self.mip_ratio[m_mip_id-1][2]*self.chunk_size[2]
num_chunk += [(m_szA[mip_levels[0]][2] + num_ztile - 1) // num_ztile]
#num_chunk += [(m_szA[mip_levels[0]][2] - m_osA[mip_levels[0]][2] + num_ztile - 1) // num_ztile]
for z in range(start_chunk,num_chunk[2]):
#for z in range(num_chunk[2]):
z0 = z * num_ztile
z1 = min(self.volume_size[2], (z+1) * num_ztile)
for y in range(num_chunk[1]):
for x in range(num_chunk[0]):
print('do chunk: %d/%d, %d/%d, %d/%d' % (z, num_chunk[2], y, num_chunk[1], x, num_chunk[0]))
# generate global coord
for i in mip_levels:
# add offset for axis-aligned write
x0[i] = x * m_tszA[i][0]
x1[i] = min(x0[i] + m_tszA[i][0], m_szA[i][0])
y0[i] = y * m_tszA[i][1]
y1[i] = min(y0[i] + m_tszA[i][1], m_szA[i][1])
# read tiles
# input/output dimension order: z,y,x
ims = getVolume(z0, z1, \
y0[mip_levels[0]], y1[mip_levels[0]], \
x0[mip_levels[0]], x1[mip_levels[0]])
for zz in range(z0,z0+ims.shape[0]):
if zz-z0 >= ims.shape[0]:
im = np.zeros(ims[0].shape, ims.dtype)
else:
im = ims[zz-z0]
if im.ndim == 2:
im = im.transpose((1,0))
else:
im = im.transpose((1,0,2))
sz0 = im.shape
# in case the output is not padded for invalid regions
full_size_tile = (sz0[0] == m_tszA[0][0])*(sz0[1] == m_tszA[0][1])
for i in mip_levels:
# iterative bilinear downsample
# bug: last border tiles, not full size ...
sz0 = im.shape
if full_size_tile:
#im = cv2.resize(im.astype(np.float32), tuple(tszA[i]), m_resize).astype(m_dtype)
sz_r = m_tszA[i][:len(sz0)] / np.array(sz0)
sz_im = m_tszA[i]
else:
tszA_t = [x1[i]-x0[i], y1[i]-y0[i], num_channel]
sz_r = tszA_t[:len(sz0)] / np.array(sz0)
sz_im = tszA_t
if im.ndim == 2:
im = zoom(im, sz_r, order=m_resize)
else:
im0 = im.copy()
im = np.zeros(sz_im, im.dtype)
for c in range(im.shape[-1]):
im[:,:,c] = zoom(im0[:,:,c], sz_r[:2], order=m_resize)
# save image into tiles
if zz % m_zres[i] == 0:
zzl = (zz // m_zres[i]) % (self.chunk_size[2])
if i < m_mip_id: # write the whole tile
m_tiles[i][:im.shape[0], :im.shape[1], zzl] = im.reshape(m_tiles[i][:im.shape[0], :im.shape[1], zzl].shape)
else: # write into the whole section
if im[:(x1[i]-x0[i]), :(y1[i]-y0[i])].max()>0:
tmp = m_tiles[i][x0[i]: x1[i], \
y0[i]: y1[i], zzl]
tmp[:] = im[:(x1[i]-x0[i]), :(y1[i]-y0[i])].reshape(tmp.shape)
# < mipI: write for each tile
for i in [ii for ii in mip_levels if ii < m_mip_id]:
# chunk filled or last image
if (zz + 1) % (m_zres[i] * self.chunk_size[2]) == 0 or (z == num_chunk[2] - 1) * (zz == z1 - 1):
# take the ceil for the last chunk
z1g = (zz + m_zres[i]) // m_zres[i]
z0g = ((z1g - 1) // self.chunk_size[2]) * self.chunk_size[2]
# check volume align
if m_tiles[i][: x1[i] - x0[i], : y1[i] - y0[i], : z1g - z0g, :].max()>0:
m_vols[i][x0[i]+m_osA[i][0]: x1[i]+m_osA[i][0], \
y0[i]+m_osA[i][1]: y1[i]+m_osA[i][1], z0g+m_osA[i][2]: z1g+m_osA[i][2], :] = \
m_tiles[i][: x1[i] - x0[i], : y1[i] - y0[i], : z1g - z0g, :]
# print(i, z0g, z1g)
#print(i, m_osA[i][2] + z0g, m_osA[i][2] + z1g, m_tiles[i][: x1[i] - x0[i], : y1[i] - y0[i], : z1g - z0g, :].max())
m_tiles[i][:] = 0
# >= mipI: write for each section
# in one zchunk, there can be multiple chunk in z
# last xy chunk
if y == num_chunk[1]-1 and x == num_chunk[0]-1:
for i in [ii for ii in mip_levels if ii >= m_mip_id]:
if (zz+1) % (m_zres[i] * self.chunk_size[2]) == 0 or zz == self.volume_size[2]-1:
z1g = (zz + 1 + m_zres[i] - 1) // m_zres[i]
z0g = z1g - self.chunk_size[2]
if (zz+1) % (m_zres[i] * self.chunk_size[2]) != 0: # last unfilled chunk
z0g = (z1g // self.chunk_size[2]) * self.chunk_size[2]
if m_tiles[i][:, :, : z1g - z0g, :].max()>0:
try:
m_vols[i][m_osA[i][0]:, m_osA[i][1]:, z0g+m_osA[i][2]: z1g+m_osA[i][2], :] = m_tiles[i][:, :, : z1g - z0g, :]
except:
import pdb; pdb.set_trace()
m_tiles[i][:] = 0
def createMesh(self, cloudpath='', mip_level=0, volume_size=[256,256,100], \
num_thread = 1, dust_threshold = None, do_subdir = False, object_ids = None):
from taskqueue import LocalTaskQueue
import igneous.task_creation as tc
if cloudpath == '':
cloudpath = self.cloudpath + '/seg/'
tq = LocalTaskQueue(parallel = num_thread)
tasks = tc.create_meshing_tasks(cloudpath, mip = mip_level, \
shape = volume_size, mesh_dir = 'mesh', object_ids = object_ids, \
dust_threshold = 20, max_simplification_error = 40, do_subdir = do_subdir)
tq.insert(tasks)
tq.execute()
tq = LocalTaskQueue(parallel=num_thread)
tasks = tc.create_mesh_manifest_tasks(cloudpath)
tq.insert(tasks)
tq.execute()
def createSkeleton(self, coordinates, cloudpath='', volume_size=None, resolution=None):
# coordinates is a list of tuples (x,y,z)
if cloudpath == '':
cloudpath = os.path.join(self.cloudpath, 'skeletons')
if volume_size is None:
volume_size = self.volume_size
if resolution is None:
resolution = self.resolution
foldername = cloudpath
if 'file' == cloudpath[:4]:
# convert from cloudvolume path to local path
foldername = cloudpath[7:]
mkdir(foldername)
mkdir(os.path.join(foldername, 'spatial0'))
self.writeSkeletonInfo(os.path.join(foldername, 'info'), volume_size, resolution)
with open(os.path.join(foldername, 'spatial0', '0_0_0'), 'wb') as outfile:
total_count=len(coordinates) # coordinates is a list of tuples (x,y,z)
buf = struct.pack('<Q',total_count)
for (x,y,z) in coordinates:
pt_buf = struct.pack('<3f',x,y,z)
buf+=pt_buf
# write the ids at the end of the buffer as increasing integers
id_buf = struct.pack('<%sQ' % len(coordinates), *range(len(coordinates)))
buf+=id_buf
outfile.write(buf)
def writeSkeletonInfo(self, output_file='', volume_size=None, resolution=None):
if output_file is None:
output_file = self.cloudpath + 'skeletons/info'
if volume_size is None:
volume_size = self.volume_size
if resolution is None:
resolution = self.resolution
out = """{
"@type" : "neuroglancer_annotations_v1",
"annotation_type" : "POINT",
"by_id" : {
"key" : "by_id"
},
"dimensions" : {
"x" : [ %.e, "m" ],
"y" : [ %.e, "m" ],
"z" : [ %.e, "m" ]
},
"lower_bound" : [ 0, 0, 0 ],
"properties" : [],
"relationships" : [],
"spatial" : [
{
"chunk_size" : [ %d, %d, %d ],
"grid_shape" : [ 1, 1, 1 ],
"key" : "spatial0",
"limit" : 1
}
],
"upper_bound" : [ %d, %d, %d]
}"""%(resolution[0],resolution[1],resolution[2],volume_size[0],volume_size[1],volume_size[2],volume_size[0],volume_size[1],volume_size[2])
writeTxt(output_file, out)
def writeHtaccess(self, output_file, data_type = 'im', do_subdir = False):
out = """# If you get a 403 Forbidden error, try to comment out the Options directives
# below (they may be disallowed by your server's AllowOverride setting).
#<IfModule headers_module>
# Needed to use the data from a Neuroglancer instance served from a
# different server (see http://enable-cors.org/server_apache.html).
# Header set Access-Control-Allow-Origin "*"
#</IfModule>
# Data chunks are stored in sub-directories, in order to avoid having
# directories with millions of entries. Therefore we need to rewrite URLs
# because Neuroglancer expects a flat layout.
#Options FollowSymLinks
"""
if do_subdir:
out+= """RewriteEngine On
RewriteRule "^(.*)/([0-9]+-[0-9]+)_([0-9]+-[0-9]+)_([0-9]+-[0-9]+)$" "$1/$2/$3/$4"
"""
"""
# Microsoft filesystems do not support colons in file names, but pre-computed
# meshes use a colon in the URI (e.g. 100:0). As :0 is the most common (only?)
# suffix in use, we will serve a file that has this suffix stripped.
#RewriteCond "%{REQUEST_FILENAME}" !-f
#RewriteRule "^(.*):0$" "$1"
"""
if data_type == 'seg':
out += """<IfModule mime_module>
# Allow serving pre-compressed files, which can save a lot of space for raw
# chunks, compressed segmentation chunks, and mesh chunks.
#
# The AddType directive should in theory be replaced by a "RemoveType .gz"
# directive, but with that configuration Apache fails to serve the
# pre-compressed chunks (confirmed with Debian version 2.2.22-13+deb7u6).
# Fixes welcome.
# Options Multiviews
AddEncoding x-gzip .gz
AddType application/octet-stream .gz
</IfModule>
"""
writeTxt(output_file, out)
def removeGz(self, cloudpath='', folder_key='_', option = 'copy'):
# utility function
if 'file' == cloudpath[:4]:
cloudpath = cloudpath[7:]
fns = [x for x in glob(cloudpath + '/*') if folder_key in x[x.rfind('/'):]]
for fn in fns:
print(fn)
gzs = glob(fn + '/*.gz')
if option == 'copy':
for gz in gzs:
if not os.path.exists(gz[:-3]):
shutil.copy(gz, gz[:-3])
elif option == 'move':
for gz in gzs:
shutil.move(gz, gz[:-3])
elif option == 'remove_orig':
for gz in gzs:
os.remove(gz[:-3])
elif option == 'copy_subdir':
for gz in gzs:
gz2 = gz[gz.rfind('/'):-3].split('_')
mkdir(fn + gz2[0] + '/' + gz2[1], 2)
shutil.copy(gz, fn + '/'.join(gz2))
|
from common.run_method import RunMethod
import allure
@allure.step("极数据/大屏/首页人次三率")
def big_screen_queryBigScreenIndexBussinessRate_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/首页人次三率"
url = f"/api-operation-web/big-screen/queryBigScreenIndexBussinessRate"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/首页目标完成雷达图")
def big_screen_queryBigScreenIndexRadar_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/首页目标完成雷达图"
url = f"/api-operation-web/big-screen/queryBigScreenIndexRadar"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/首页目标完成排行")
def big_screen_queryBigScreenIndexRank_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/首页目标完成排行"
url = f"/api-operation-web/big-screen/queryBigScreenIndexRank"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/首页校区目标完成")
def big_screen_queryBigScreenIndexSchoolAchieveRate_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/首页校区目标完成"
url = f"/api-operation-web/big-screen/queryBigScreenIndexSchoolAchieveRate"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/首页招生动态")
def big_screen_queryBigScreenIndexRange_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/首页招生动态"
url = f"/api-operation-web/big-screen/queryBigScreenIndexRange"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/校区增长率")
def big_screen_querySchoolGrowingRate_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/校区增长率"
url = f"/api-operation-web/big-screen/querySchoolGrowingRate"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/单校规模同比")
def big_screen_querySchoolScaleYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/单校规模同比"
url = f"/api-operation-web/big-screen/querySchoolScaleYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/单校规模环比")
def big_screen_querySchoolScaleQuarterOnQuarter_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/单校规模环比"
url = f"/api-operation-web/big-screen/querySchoolScaleQuarterOnQuarter"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/年级人次同比")
def big_screen_queryGradePersonTimeYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/年级人次同比"
url = f"/api-operation-web/big-screen/queryGradePersonTimeYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/年级人次环比")
def big_screen_queryGradePersonTimeQuarterOnQuarter_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/年级人次环比"
url = f"/api-operation-web/big-screen/queryGradePersonTimeQuarterOnQuarter"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/科目人次同比")
def big_screen_querySubjectPersonTimeYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/科目人次同比"
url = f"/api-operation-web/big-screen/querySubjectPersonTimeYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/课型人次同比")
def big_screen_queryCourseTypePersonTimeYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/课型人次同比"
url = f"/api-operation-web/big-screen/queryCourseTypePersonTimeYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/年级续报率·同比")
def big_screen_queryGradeContinueYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/年级续报率·同比"
url = f"/api-operation-web/big-screen/queryGradeContinueYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/科目续报率·同比")
def big_screen_querySubjectContinueYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/科目续报率·同比"
url = f"/api-operation-web/big-screen/querySubjectContinueYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/年级优生续报率·同比")
def big_screen_queryGradeTopContinueYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/年级优生续报率·同比"
url = f"/api-operation-web/big-screen/queryGradeTopContinueYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/科目优生续报率·同比")
def big_screen_querySubjectTopContinueYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/科目优生续报率·同比"
url = f"/api-operation-web/big-screen/querySubjectTopContinueYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/科目续报率&退费率")
def big_screen_querySubjectContinueAndRefund_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/科目续报率&退费率"
url = f"/api-operation-web/big-screen/querySubjectContinueAndRefund"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/年级续报率&退费率")
def big_screen_queryGradeContinueAndRefund_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/年级续报率&退费率"
url = f"/api-operation-web/big-screen/queryGradeContinueAndRefund"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/年级退费率·同比")
def big_screen_queryGradeRefundYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/年级退费率·同比"
url = f"/api-operation-web/big-screen/queryGradeRefundYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/科目退费率·同比")
def big_screen_querySubjectRefundYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/科目退费率·同比"
url = f"/api-operation-web/big-screen/querySubjectRefundYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/退费率排名")
def big_screen_queryRefundRank_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/退费率排名"
url = f"/api-operation-web/big-screen/queryRefundRank"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/退费率原因占比")
def big_screen_queryRefundReasonProportion_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/退费率原因占比"
url = f"/api-operation-web/big-screen/queryRefundReasonProportion"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/续报率折线图")
def big_screen_queryContinueRange_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/续报率折线图"
url = f"/api-operation-web/big-screen/queryContinueRange"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/年级优生占比同比")
def big_screen_queryGradeTopStudentProportion_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/年级优生占比同比"
url = f"/api-operation-web/big-screen/queryGradeTopStudentProportion"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/科目优生占比同比")
def big_screen_querySubjectTopStudentProportion_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/科目优生占比同比"
url = f"/api-operation-web/big-screen/querySubjectTopStudentProportion"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/优生占比排行榜")
def big_screen_queryTopStudentRank_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/优生占比排行榜"
url = f"/api-operation-web/big-screen/queryTopStudentRank"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/优生概览")
def big_screen_queryTopStudentCount_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/优生概览"
url = f"/api-operation-web/big-screen/queryTopStudentCount"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/优生公立校分布")
def big_screen_queryTopStudentSchoolDistribution_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/优生公立校分布"
url = f"/api-operation-web/big-screen/queryTopStudentSchoolDistribution"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/续报率排名")
def big_screen_queryContinueRank_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/续报率排名"
url = f"/api-operation-web/big-screen/queryContinueRank"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/满班率排名")
def big_screen_queryFullClassRank_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/满班率排名"
url = f"/api-operation-web/big-screen/queryFullClassRank"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/年级满班率·同比")
def big_screen_queryGradeFullClassYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/年级满班率·同比"
url = f"/api-operation-web/big-screen/queryGradeFullClassYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/科目满班率·同比")
def big_screen_querySubjectFullClassYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/科目满班率·同比"
url = f"/api-operation-web/big-screen/querySubjectFullClassYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/带班量排名")
def big_screen_queryTeacherClassCountRank_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/带班量排名"
url = f"/api-operation-web/big-screen/queryTeacherClassCountRank"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/学段带班量·同比")
def big_screen_queryPhaseTeacherClassCountYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/学段带班量·同比"
url = f"/api-operation-web/big-screen/queryPhaseTeacherClassCountYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/科目带班量·同比")
def big_screen_querySubjectTeacherClassCountYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/科目带班量·同比"
url = f"/api-operation-web/big-screen/querySubjectTeacherClassCountYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/通用条件(班型,校区类型)")
def big_screen_queryCommonInfo_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/通用条件(班型,校区类型)"
url = f"/api-operation-web/big-screen/queryCommonInfo"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/入口年级报率")
def big_screen_queryEntryContinueRate_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/入口年级报率"
url = f"/api-operation-web/big-screen/queryEntryContinueRate"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/入口年级满班率")
def big_screen_queryEntryFullClassRate_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/入口年级满班率"
url = f"/api-operation-web/big-screen/queryEntryFullClassRate"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/入口年级退费率")
def big_screen_queryEntryRefundRate_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/入口年级退费率"
url = f"/api-operation-web/big-screen/queryEntryRefundRate"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/入口年级单校规模排行榜")
def big_screen_queryEntrySchoolScaleRank_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/入口年级单校规模排行榜"
url = f"/api-operation-web/big-screen/queryEntrySchoolScaleRank"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/入口年级人次占比同比")
def big_screen_queryEntryPersonTimeProportion_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/入口年级人次占比同比"
url = f"/api-operation-web/big-screen/queryEntryPersonTimeProportion"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/最近更新时间")
def big_screen_queryLastUpdateTime_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/最近更新时间"
url = f"/api-operation-web/big-screen/queryLastUpdateTime"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/新开校汇总")
def big_screen_queryNewSchoolAllCount_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/新开校汇总"
url = f"/api-operation-web/big-screen/queryNewSchoolAllCount"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/新开校人次排名")
def big_screen_queryNewSchoolPersonTimeRank_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/新开校人次排名"
url = f"/api-operation-web/big-screen/queryNewSchoolPersonTimeRank"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/新开校人次同比")
def big_screen_queryNewSchoolPersonTimeYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/新开校人次同比"
url = f"/api-operation-web/big-screen/queryNewSchoolPersonTimeYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/新开校续报率排名")
def big_screen_queryNewSchoolContinueRank_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/新开校续报率排名"
url = f"/api-operation-web/big-screen/queryNewSchoolContinueRank"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/新开校续报率同比")
def big_screen_queryNewSchoolContinueYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/新开校续报率同比"
url = f"/api-operation-web/big-screen/queryNewSchoolContinueYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/新开校满班率排名")
def big_screen_queryNewSchoolFullClassRank_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/新开校满班率排名"
url = f"/api-operation-web/big-screen/queryNewSchoolFullClassRank"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/新开校满班率同比")
def big_screen_queryNewSchoolFullClassYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/新开校满班率同比"
url = f"/api-operation-web/big-screen/queryNewSchoolFullClassYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/新开校退费率排名")
def big_screen_queryNewSchoolRefundRank_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/新开校退费率排名"
url = f"/api-operation-web/big-screen/queryNewSchoolRefundRank"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/新开校退费率同比")
def big_screen_queryNewSchoolRefundYearOnYear_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/新开校退费率同比"
url = f"/api-operation-web/big-screen/queryNewSchoolRefundYearOnYear"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/课型人次占比")
def big_screen_queryCourseTypePersonTimeProportion_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/课型人次占比"
url = f"/api-operation-web/big-screen/queryCourseTypePersonTimeProportion"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/数据概览人次")
def big_screen_queryWindowPersonTime_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/数据概览人次"
url = f"/api-operation-web/big-screen/queryWindowPersonTime"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/数据概览续报")
def big_screen_queryWindowContinue_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/数据概览续报"
url = f"/api-operation-web/big-screen/queryWindowContinue"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/数据概览续报排行")
def big_screen_queryWindowContinueRank_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/数据概览续报排行"
url = f"/api-operation-web/big-screen/queryWindowContinueRank"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/数据概览续报教师排行")
def big_screen_queryWindowContinueTeacherRank_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/数据概览续报教师排行"
url = f"/api-operation-web/big-screen/queryWindowContinueTeacherRank"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/大屏/续报率折线图")
def big_screen_queryWindowContinueRange_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/大屏/续报率折线图"
url = f"/api-operation-web/big-screen/queryWindowContinueRange"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
|
from rest_framework.routers import SimpleRouter
from . import views
router = SimpleRouter()
router.register(
prefix='lots',
viewset=views.LotAPI,
)
router.register(
prefix='bets',
viewset=views.BetAPI,
)
urlpatterns = router.urls
|
#!/usr/bin/env python
import socket
import gevent
import random
from gevent.monkey import patch_all
patch_all()
UDP_IP = "127.0.0.1"
UDP_PORT = 9000
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 32)
def jailer(ip):
seeds = [random.randint(1, 100) for _ in xrange(10)]
while True:
#print("ip {} action".format(ip))
if random.randint(1, seeds[0]) == 1:
sock.sendto("action test {} 1 10 100 30".format(ip), (UDP_IP, UDP_PORT))
if random.randint(1, seeds[1]) == 1:
sock.sendto("action test2 {} 1 10 100 30".format(ip), (UDP_IP, UDP_PORT))
if random.randint(1, seeds[2]) == 1:
sock.sendto("action test3 {} 1 10 100 30".format(ip), (UDP_IP, UDP_PORT))
if random.randint(1, seeds[3]) == 1:
sock.sendto("action test4 {} 1 10 100 30".format(ip), (UDP_IP, UDP_PORT))
if random.randint(1, seeds[4]) == 1:
sock.sendto("action test5 {} 1 10 100 30".format(ip), (UDP_IP, UDP_PORT))
if random.randint(1, seeds[5]) == 1:
sock.sendto("action test6 {} 1 10 100 30".format(ip), (UDP_IP, UDP_PORT))
gevent.sleep(random.uniform(0, 0.2))
for i in xrange(1000):
print("spawning new ip")
gevent.spawn(jailer, "{}.{}.{}.{}".format(random.randint(1, 255),
random.randint(1, 255),
random.randint(1, 255),
random.randint(1, 255)))
gevent.wait()
|
#!/usr/bin/env python
import random
import sys
with sys.stdin as f:
pairs = f.read().splitlines()
print random.choice(pairs)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2016-12-29 13:06
from __future__ import unicode_literals
from django.db import migrations
import gim.core.utils
class Migration(migrations.Migration):
dependencies = [
('events', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='eventpart',
name='new_value',
field=gim.core.utils.JSONField(blank=True, null=True),
),
migrations.AlterField(
model_name='eventpart',
name='old_value',
field=gim.core.utils.JSONField(blank=True, null=True),
),
]
|
class Node:
def __init__(self, data):
self.item = data
self.prev = None
self.pref = None
def get_data(self):
return self.item
class DoublyLinkedList:
def __init__(self):
self.start_prev = None
self.start_node = None
def insert_in_emptylist(self, data):
if self.start_node is None:
new_node = Node(data)
self.start_node = new_node
else:
print("list is not empty")
def insert_at_start(self, data):
if self.start_node is None:
new_node = Node(data)
self.start_node = new_node
print("node inserted")
return
new_node = Node(data)
new_node.prev = self.start_node
self.start_node.pref = new_node
self.start_node = new_node
def insert_at_end(self, data):
if self.start_node is None:
new_node = Node(data)
self.start_node = new_node
return
n = self.start_node
while n.prev is not None:
n = n.prev
new_node = Node(data)
n.prev = new_node
new_node.pref = n
def delete_at_start(self):
if self.start_node is None:
print("The list has no element to delete")
return
if self.start_node.prev is None:
self.start_node = None
return
self.start_node = self.start_node.prev
def delete_at_end(self):
if self.start_node is None:
print("The list has no element to delete")
return
if self.start_node.prev is None:
self.start_node = None
return
n = self.start_node
while n.prev is not None:
n = n.prev
n.pref.prev = None
def count(self):
x = 0
if self.start_node is None:
print("List has no element")
return
else:
n = self.start_node
while n is not None:
x += 1
n = n.prev
return x
|
from tkinter import *
window =Tk()
b1 = Button(window, text="TaeYun")
b2 = Button(window, text="60145171")
b1.grid(row=0, column=0)
b2.grid(row=1, column=1)
window.mainloop()
|
from selenium import webdriver
import os
def printCWD():
cwd = os.getcwd()
print(cwd)
def webDriverWithCustimizedDownloadLocaiton(downloadPath: str = "") -> webdriver:
"""
This method return a chromedriver object with a customized download path
By default, if you left the input variable empty, the default downaload folder will be the current working directory
:param downloadPath:
:return: webdriver
"""
if not downloadPath:
downloadPath = os.getcwd()
chrome_options = webdriver.ChromeOptions()
prefs = {'download.default_directory': downloadPath}
chrome_options.add_experimental_option('prefs', prefs)
# driver = webdriver.Chrome(chrome_options=chrome_options)
driver = webdriver.Chrome(options=chrome_options)
return driver
if __name__ == "__main__":
webDriverWithCustimizedDownloadLocaiton()
|
from sqlalchemy import Column, DateTime, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from bitcoin_acks.database.base import Base
from bitcoin_acks.models import PullRequests
from bitcoin_acks.models.users import Users
class Bounties(Base):
__tablename__ = 'bounties'
id = Column(String, primary_key=True)
amount = Column(Integer)
published_at = Column(DateTime(timezone=True), nullable=False)
pull_request_id = Column(String,
ForeignKey('pull_requests.id'))
pull_request = relationship(PullRequests, backref='bounties')
payer_user_id = Column(String,
ForeignKey('users.id'))
payer = relationship(Users,
primaryjoin=payer_user_id == Users.id,
backref='bounties_payable'
)
def __repr__(self):
return f'{self.amount} sats for {self.pull_request.title} #{self.pull_request.number}'
|
#!/usr/bin/env python
# Copyright (c) 2014 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verify that "else-if" conditions work.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('elseif.gyp')
test.build('elseif.gyp', test.ALL)
test.run_built_executable(
'program0', stdout='first_if\n')
test.run_built_executable(
'program1', stdout='first_else_if\n')
test.run_built_executable(
'program2', stdout='second_else_if\n')
test.run_built_executable(
'program3', stdout='third_else_if\n')
test.run_built_executable(
'program4', stdout='last_else\n')
# Verify that bad condition blocks fail at gyp time.
test.run_gyp('elseif_bad1.gyp', status=1, stderr=None)
test.run_gyp('elseif_bad2.gyp', status=1, stderr=None)
test.run_gyp('elseif_bad3.gyp', status=1, stderr=None)
test.pass_test()
|
# -*- coding: utf-8 -*-
import numpy as np
from itertools import product
from random import sample
from scipy.stats import dirichlet
from numpy.random import multinomial
def flatten(ls):
return [item for sublist in ls for item in sublist]
def random_index(n,p,x,minp=1):
"""
n,p,x are all integer arguments
n & p specify the row,column dimensions of a 2d array
x is the desired number of index tuples to be returned
minp is the minimum number of non-missing values per row
"""
all_idx = list(product(xrange(n),xrange(p)))
exclude_idx = flatten(sample(all_idx[(p*i):(p*i+p)],minp) for i in xrange(n))
return sample(set(all_idx) - set(exclude_idx),x)
def nansample(alpha,total_count):
k = alpha.shape[0]
mask = ~np.isnan(alpha)
sample = np.empty((k,))
sample[:] = np.nan
sample[mask] = multinomial(total_count, pvals = dirichlet.rvs(alpha[mask],size=1).reshape(mask.sum()))
return sample
def generate_fake_counts(alpha, total_count_range, n, n_nan):
total_counts_per_row = np.random.randint(*total_count_range,size=n)
p = alpha.shape[0]
nan_idx = random_index(n,p,n_nan)
alpha_tile = np.tile(alpha,(n,1))
for (i,j) in nan_idx:
alpha_tile[i,j] = np.nan
return np.array(map(nansample, alpha_tile, total_counts_per_row)) |
from __future__ import print_function # Python 2/3 compatibility
import boto3
import time
import csv
import sys
import string
import random
import threading
import glob
from multiprocessing import Queue
from lab_config import boto_args
queue = Queue()
def import_csv(tableName, filename, thread_id):
dynamodb = boto3.resource(**boto_args)
dynamodb_table = dynamodb.Table(tableName)
count = 0
batch_size = 250
rows_of_file = 2000
rows_of_thread = batch_size * rows_of_file
SHARDS = 10
time1 = time.time()
with open(filename, 'r', encoding="utf-8") as csvfile:
myreader = csv.reader(csvfile, delimiter=',')
for row in myreader:
with dynamodb_table.batch_writer() as batch:
for i in range(batch_size):
count += 1
newitem = {}
newitem['requestid'] = (thread_id * rows_of_thread) + (i * rows_of_file) + int(row[0])
newitem['host'] = row[1]
newitem['date'] = row[2]
newitem['hourofday'] = int(row[3])
newitem['timezone'] = row[4]
newitem['method'] = row[5]
newitem['url'] = row[6]
newitem['responsecode'] = int(row[7])
newitem['bytessent'] = int(row[8])
newitem['useragent'] = row[9]
# Set primary keys
if tableName == "logfile_gsi_low":
newitem["GSI_1_PK"] = "host#{}".format(newitem['host'])
else:
newitem['GSI_1_PK'] = "shard#{}".format((newitem['requestid'] % SHARDS) + 1)
newitem['GSI_1_SK'] = row[7] + "#" + row[2] + "#" + row[3]
newitem['PK'] = "request#{}".format((thread_id * rows_of_thread) + (i * rows_of_file) + int(row[0]))
batch.put_item(Item=newitem)
if count % 5000 == 0:
time2 = time.time() - time1
print("thread_id: %s, row: %s, %s" % (thread_id, count, time2))
time1 = time.time()
queue.put(count)
if __name__ == "__main__":
args = sys.argv[1:]
tableName = args[0]
thread_list = []
begin_time = time.time()
# BUGFIX https://github.com/boto/boto3/issues/1592
boto3.resource(**boto_args)
#
files = glob.glob("./data/logfile_medium*.csv")
thread_id = 0
for f in files:
print("starting thread for file: %s" % (f))
thread = threading.Thread(target=import_csv, args=(tableName, f, thread_id))
thread_list.append(thread)
thread_id += 1
time.sleep(.2)
# Start threads
for thread in thread_list:
thread.start()
# Block main thread until all threads are finished
for thread in thread_list:
thread.join()
totalcount = 0
for t in range(len(files)):
totalcount = totalcount + queue.get()
print('total rows %s in %s seconds' %(totalcount, time.time() - begin_time))
|
#!usr/bin/python
def spaces(line_length=35,subject=0,predicate=0):
return " "*(line_length - (predicate+subject))
|
# coding: utf-8
import functools
import bottle
import lglass.database
import lglass.web.helpers
obj_urlize = lglass.web.helpers.obj_urlize
class RegistryApp(lglass.web.helpers.BaseApp):
DEFAULT_CONFIG = {
"database": [
"whois+lglass.database.file+file:.",
"whois+lglass.database.cidr+cidr:",
"whois+lglass.database.schema+schema:?types-include=person,aut-num"
]
}
def __init__(self, config=None):
lglass.web.helpers.BaseApp.__init__(self, config=config)
self.route("/search", "POST", self.handle_whois_query)
self.route("/search/<query>", "GET", self.handle_whois_query)
self.route("/objects", "GET", self.handle_show_object_types)
self.route("/objects/<type>", "GET", self.handle_show_objects)
self.route("/objects/<type>/<primary_key>", "GET", self.handle_show_object)
self.route("/flush", "POST", self.handle_flush_cache)
_database = None
@property
def database(self):
if self._database is None:
self.database = self.config.get("database", self.DEFAULT_CONFIG["database"])
return self._database
@database.setter
def database(self, new_value):
if isinstance(new_value, list):
self._database = lglass.database.build_chain(new_value)
elif isinstance(new_value, str):
self._database = lglass.database.from_url(new_value)
else:
self._database = new_value
def handle_whois_query(self, query=None):
if bottle.request.method == "POST":
query = bottle.request.forms["query"]
objects = self.database.find(query)
if len(objects):
if len(objects) > 1:
return self.render_template("registry/whois_query.html", objects=objects)
else:
object = objects.pop()
bottle.redirect("/registry/objects/{}/{}".format(object.real_type, obj_urlize(object.real_primary_key)))
else:
bottle.abort(404, "Nothing found")
def handle_show_object(self, type, primary_key):
try:
object = self.database.get(type, primary_key)
except KeyError:
bottle.abort(404, "Object not found")
try:
schema = self.database.schema(object.type)
except KeyError:
pass
items = []
for key, value in object:
if schema is not None:
inverse = list(schema.find_inverse(self.database, key, value))
else:
inverse = []
if inverse:
inverse = inverse[0].type
else:
inverse = None
items.append((key, value, inverse))
return self.render_template("registry/show_object.html", items=items, object=object)
def handle_show_objects(self, type):
objects = [self.database.get(*spec) for spec in sorted(self.database.list())
if spec[0] == type]
return self.render_template("registry/show_objects.html", objects=objects, type=type)
def handle_show_object_types(self):
types = sorted(self.database.object_types)
return self.render_template("registry/show_object_types.html", types=types)
def handle_flush_cache(self):
if getattr(self.database, "flush") and callable(self.database.flush):
self.database.flush()
|
import unittest
from test_case import test_baidu
from test_case import test_youdao
suite = unittest.TestSuite()
suite.addTest(test_baidu.TestBaidu("test_baidu"))
suite.addTest(test_youdao.TestYouDao("test_youdao"))
if __name__ == "__main__":
runner = unittest.TextTestRunner()
runner.run(suite) |
class profiler:
"""
Manages execution time data from various parts of the program. Value is a
dictionary, where the key is the name of the calling part of the progam
(or some other relevant label) and the value is the execution time. If the
key isn't in the dictionary it will be added automatically.
"""
profiles = {} # the dictionary storing all the exec time values
def __init__(self):
pass
def add(self, profile):
try:
self.profiles.update(profile)
except:
pass
def toString(self):
msg = ""
for key in self.profiles:
msg += "{}: {}ms\n".format(key, float('%.3f' %
(self.profiles[key]*1000.0)))
return msg
|
# -*- coding: utf-8 -*-
"""
A hamming-distance-based model for predicting long term rhythmic patterns
"""
from rhythm_hmm import Rhythm, makeRhythmSamples
import math
import numpy as np
from scipy.cluster.vq import vq, kmeans
from scipy.stats import binom
import pdb
class StructuredRhythm(Rhythm):
def __init__(self, ticksPerBar):
super().__init__()
self.ticksPerBar = ticksPerBar
def bars(self):
return math.ceil(len(self.timesteps)/self.ticksPerBar)
class RhythmDistanceModel:
def __init__(self, barLen, barCount, clusterCount, partitions=None):
self.partitions = partitions
self.barCount = barCount
self.barLen = barLen
self.weights = np.zeros((barCount,barCount,clusterCount))
self.probs = np.zeros((barCount,barCount,clusterCount))
self.clusterCount = clusterCount
self.converged = False
self.minimumDistanceProb = 1/(self.barLen+1)
self.maximumDistanceProb = 1 - self.minimumDistanceProb
def train(self, rhythms, convergence=0.000001, maxIters=10000):
for rhy in rhythms:
assert len(rhy) == self.barCount*self.barLen, "Rhythms must correct number of measures and length"
for i in range(self.barCount-1):
for j in range(i+1,self.barCount):
#pdb.set_trace()
dists = [distance(r, i, j, self.barLen) for r in rhythms]
alphas = [alphaDist(r, i, j, self.barLen) for r in rhythms]
betas = [betaDist(r, i, j, self.barLen) for r in rhythms]
# Initialise parameter estimates
ijDS = np.zeros(len(rhythms))
for r in range(len(rhythms)):
if alphas[r] - betas[r] == 0:
ijDS[r] = 0
else:
ijDS[r] = (dists[r] - betas[r])/(alphas[r] - betas[r])
ijDS[r] = max(min(ijDS[r],self.maximumDistanceProb),self.minimumDistanceProb)
centroids = kmeans(ijDS, self.clusterCount)[0]
# TODO: Bit of a hack, but necessary in some form
while len(centroids) < self.clusterCount:
centroids = np.append(centroids, centroids[-1])
code = vq(ijDS, centroids)[0]
for k in range(self.clusterCount):
n = sum(c == k for c in code)
self.weights[i][j][k] = n / len(rhythms)
self.probs[i][j][k] = centroids[k]
# Use iterative EM to refine parameters
converged = False
iters = 0
while (not converged) and (iters < maxIters):
converged = True
iters += 1
clusterProbs = np.zeros((self.clusterCount,len(rhythms)))
for k in range(self.clusterCount):
for r in range(len(rhythms)):
"""
TODO: Not sure about using this; the paper says to
use dist but I think it's a typo - it doesn't make
that much sense otherwise
"""
delta = dists[r] - betas[r]
clusterProbs[k][r] = (
self.weights[i][j][k] *
self.gradientBinomialDistanceProb(delta,alphas[r],betas[r],self.probs[i][j][k]))
# Normalize cluster probabilities s.t. the total prob
# across clusters for a given rhythm is 1
np.divide(clusterProbs, np.sum(clusterProbs,0))
for k in range(self.clusterCount):
numerator = 0.0
denominator = 0.0
for r in range(len(rhythms)):
numerator += (dists[r] - betas[r]) * clusterProbs[k][r]
denominator += (alphas[r] - betas[r]) * clusterProbs[k][r]
oldProb = self.probs[i][j][k]
oldWeight = self.weights[i][j][k]
if denominator == 0:
self.probs[i][j][k] = 0
else:
self.probs[i][j][k] = numerator/denominator
self.probs[i][j][k] = max(min(
self.probs[i][j][k],
self.maximumDistanceProb),
self.minimumDistanceProb)
self.weights[i][j][k] = np.sum(clusterProbs[k])/len(rhythms)
if abs(self.probs[i][j][k]-oldProb)/self.probs[i][j][k] > convergence:
converged = False
if abs(self.weights[i][j][k]-oldWeight)/self.weights[i][j][k] > convergence:
converged = False
self.converged = converged
# Returns a log probability of "bar" succeeding "rhythm" according to this
# model
def score(self, rhythm, bar):
assert len(rhythm) % self.barLen == 0, "Rhythm length must be divisible by bar length"
assert len(bar) == self.barLen, "Input bar has incorrect length"
totalProb = 0.0
combinedRhythm = np.concatenate([rhythm, bar])
j = int(len(rhythm) / self.barLen)
for i in range(j):
dist = distance(combinedRhythm, i, j, self.barLen)
alpha = alphaDist(combinedRhythm, i, j, self.barLen)
beta = betaDist(combinedRhythm, i, j, self.barLen)
delta = dist - beta
iProb = 0.0
for k in range(self.clusterCount):
iProb += self.weights[i][j][k] * self.gradientBinomialDistanceProb(delta,alpha,beta,self.probs[i][j][k])
totalProb += np.log(iProb)
return totalProb
# As binomialDistanceProb below, but adds a gradient to impossible distance
# value probabilities, so that all probabilities are non-zero and "more
# impossible" values have lower probability
def gradientBinomialDistanceProb(self, delta, alpha, beta, prob):
if alpha - beta == 0:
if delta == 0:
return 1
else:
return self.minimumDistanceProb**(1+delta)
return max(min(
binom.pmf(delta, alpha - beta, prob),
self.maximumDistanceProb),
self.minimumDistanceProb)
def generateNextBar(rdm, hmm, lam, rhythm, partitions=None):
assert len(rhythm) % rdm.barLen == 0, "Rhythm length must be divisible by bar length"
assert len(rhythm) < rdm.barLen * rdm.barCount, "Rhythm length must be less than distance model maximum"
# Generate notes
# TODO: Use predict_proba instead to achieve a more accurate range of results
#startState = hmm.predict(rhythm)[-1]
#startStateProbs = [0]*len(hmm.startprob_)
#startStateProbs[startState] = 1.0
startStateProbs = hmm.predict_proba(rhythm)[-1]
tempProbs = hmm.startprob_
hmm.startprob_ = startStateProbs
startSymbol = hmm.sample(1)[0][0]
barOut = np.concatenate(hmm.sample(rdm.barLen+1)[0])[1:]
rhythmSteps = np.concatenate(rhythm)
end = False
while end == False:
end = True
for j in range(rdm.barLen):
startVal = barOut[j]
bestVal = 0
bestScore = -np.inf
for newVal in range(3):
newBar = barOut
newBar[j] = newVal
hmmScore = hmm.score(np.concatenate([startSymbol,newBar]).reshape(-1,1))
distScore = rdm.score(rhythmSteps, newBar)
newScore = hmmScore + (lam * distScore)
if newScore > bestScore:
bestScore = newScore
bestVal = newVal
barOut[j] = bestVal
# Converge only when no values are changed
if bestVal != startVal:
end = False
hmm.startprob_ = tempProbs
return barOut
def makeTrackStructuredRhythm(track, ticksPerBar):
assert track.isMonophonic(), "Only monophonic tracks can be enscribed"
rhythm = StructuredRhythm(ticksPerBar)
rhythm.timesteps = [0]*track.length
noteStart = 0
noteEnd = 0
n = -1
for t in range(track.length):
if noteEnd <= t:
n = n + 1
noteStart = track.notes[n].start
noteEnd = track.notes[n].start + track.notes[n].duration
if t == noteStart:
rhythm.timesteps[t] = 1
elif noteStart < t and t < noteEnd:
rhythm.timesteps[t] = 2
return rhythm
def distance(rhythm, barA, barB, ticksPerBar):
tickA = ticksPerBar * barA
tickB = ticksPerBar * barB
d = 0
for i in range(ticksPerBar):
if rhythm[tickA+i] != rhythm[tickB+i]:
d = d + 1
return d
def alphaDist(rhythm, barA, barB, ticksPerBar):
greater = barB
lesser = barA
if barA > barB:
greater = barA
lesser = barB
if lesser == 0:
return distance(rhythm, barA, barB, ticksPerBar)
alpha = math.inf
for i in range(lesser):
iAlpha = distance(rhythm, lesser, i, ticksPerBar) + distance(rhythm, greater, i, ticksPerBar)
if iAlpha < alpha:
alpha = iAlpha
return alpha
def betaDist(rhythm, barA, barB, ticksPerBar):
greater = barB
lesser = barA
if barA > barB:
greater = barA
lesser = barB
if lesser == 0:
return distance(rhythm, barA, barB, ticksPerBar)
beta = -math.inf
for i in range(lesser):
iBeta = abs(distance(rhythm, lesser, i, ticksPerBar) - distance(rhythm, greater, i, ticksPerBar))
if iBeta > beta:
beta = iBeta
return beta
def binomialDistanceProb(delta, alpha, beta, prob):
if alpha - beta == 0:
if delta == 0:
return 1
else:
# This causes a gradient of 0 among "impossible" distance
# values - making gradient ascent impossible. For cases where
# gradient ascent is needed, use gradientBinomialDistanceProb
return 0
return binom.pmf(delta, alpha - beta, prob)
|
import os,re,time,random,math,pprint
import numpy as np
def get_data(filename="data.dat",as_list=False):
data = np.loadtxt(filename,delimiter=",")
if as_list:
list(data)
return data
def remove_incomplete_data(data):
result = data
minLength = len(data[0]);
maxLength = minLength;
for row in sm:
minLength = min(minLength, len(row))
maxLength = max(maxLength, len(row))
if (minLength != maxLength):
print "Warning: min-length: ", minLength, ", max-length: ", maxLength
previousLenght = len(data)
result = filter(lambda x: len(x) == maxLength, data)
#sm = [x in x in sm where len(x) == maxLength]
print "Discarded entries: ", (previousLenght - len(sm)), " remaining: ", len(result)
return data
def normalise_data_zero_means_std_deviation(data):
return (data - np.mean(data, axis=0)) / np.std(data, axis=0)
def normalise_data_zero_to_one(data):
t = data - np.min(data, axis=0)
return t / np.max(t, axis=0)
def convert_to_place_data(data,indexes):
new_data = []
for i in indexes:
new_data.append(data[0:1000,i].T)
return new_data
class DataPool(object):
"""
Our pool of data
"""
def __init__(self, data):
super(DataPool, self).__init__()
self.data = data
self.time_step = 100
self.window = 100
self.time=None
self.update()
def step(self,t=None):
"""
Increases the timestep or sets
it to the passed value
"""
if t:
self.time_step = t
else:
self.time_step += 1
def update(self):
"""
Updates all values from the current timestep
"""
self.s_t=self.data[self.time_step][1:9]
self.s_tminus1=self.data[self.time_step-1][1:9]
self.s_tminus2=self.data[self.time_step-2][1:9]
self.m_t=self.data[self.time_step][9:11]
self.m_tminus1=self.data[self.time_step-1][9:11]
self.m_tminus2=self.data[self.time_step-2][9:11]
self.time=self.data[self.time_step][0]
def get_curr_data_as_vec(self):
"""
Returns data from s and m t, t-1 and t-2 as one vector
"""
data = np.array([])
t = 0
while t < self.window:
data = np.hstack((data,self.data[self.time_step-t][1:11]))
t += 1
return data
def print_curr(self):
print "simulation time:",self.time
print "s_t:",self.s_t
print "s_tminus1:",self.s_tminus1
print "s_tminus12",self.s_tminus2
print "m_t:",self.m_t
print "m_tminus1:",self.m_tminus1
print "m_tminus2",self.m_tminus2
def normalise_features(X):
"""
Normalise our features by subtracting the mean
and dividing by the standard deviation
"""
columns = len(X[0])
rows = len(X)
N = copy.deepcopy(X)
mean = []
std = []
for column in range(0,columns):
x_column = get_columns_as_list(X,column)
mean.append(np.mean(x_column))
std.append(np.std(x_column))
for i,value in enumerate(x_column):
N[i][column]=(value-mean[column])/std[column]
return [N,mean,std]
if __name__ == '__main__':
data = get_data()
pool = DataPool(data)
pool.print_curr()
|
string = input()
list_1 = string.split()
list_2 = []
for element in list_1:
element = -(int(element))
list_2.append(element)
print(list_2) |
# Designate the Domains and Score types
domains = {
1: "Language",
2: "Spatial",
3: "Motor",
4: "Attention",
5: "Executive Function"
}
score_types = {
1: "Standard Score",
2: "Scaled Score",
3: "T-Score",
4: "Z-Score"
}
# Building Dictionaries
x = 1
# Language
language_tests = {}
domain = domains[x]
print("Press 1 to enter a new", domain, "test.")
print("When you have added all the", domain, "tests administered, press 0.")
response1 = int(input("Enter: "))
print(response1)
print()
if (response1 > 1):
print("ERROR!")
response1 = int(input("Enter: "))
print(response1)
print()
while (response1 == 1):
test = str(input("Enter Test Name: "))
print(test)
print()
print("Find the number associated with the type of score you will be entering for this test.")
print("You will enter the NUMBER below. Not the name.")
print()
print(score_types)
print()
scoretype = int(input("Enter Score Type: "))
print(score_types[scoretype])
print()
language_tests[test] = scoretype
response1 = int(input("Press 1 to enter another test. Press 0 to proceed to the next domain."))
print()
if (response1 > 1):
print("ERROR!")
response1 = int(input("Enter: "))
print(response1)
print()
print()
print(domain)
print(language_tests)
print()
print()
x += 1
# Spatial
spatial_tests = {}
domain = domains[x]
print("Press 1 to enter a new", domain, "test.")
print("When you have added all the", domain, "tests administered, press 0.")
response2 = int(input("Enter: "))
print(response2)
print()
if (response2 > 1):
print("ERROR!")
response2 = int(input("Enter: "))
print(response2)
print()
while (response2 == 1):
test = str(input("Enter Test Name: "))
print(test)
print()
print("Find the number associated with the type of score you will be entering for this test.")
print("You will enter the NUMBER below. Not the name.")
print()
print(score_types)
print()
scoretype = int(input("Enter Score Type: "))
print(score_types[scoretype])
print()
spatial_tests[test] = scoretype
response2 = int(input("Press 1 to enter another test. Press 0 to proceed to the next domain."))
print()
if (response2 > 1):
print("ERROR!")
response2 = int(input("Enter: "))
print(response2)
print()
print()
print(domain)
print(spatial_tests)
print()
print()
x += 1
# Motor
motor_tests = {}
domain = domains[x]
print("Press 1 to enter a new", domain, "test.")
print("When you have added all the", domain, "tests administered, press 0.")
response3 = int(input("Enter: "))
print(response3)
print()
if (response3 > 1):
print("ERROR!")
response3 = int(input("Enter: "))
print(response3)
print()
while (response3 == 1):
test = str(input("Enter Test Name: "))
print(test)
print()
print("Find the number associated with the type of score you will be entering for this test.")
print("You will enter the NUMBER below. Not the name.")
print()
print(score_types)
print()
scoretype = int(input("Enter Score Type: "))
print(scoretype)
print()
motor_tests[test] = scoretype
print(score_types[scoretype])
print()
response3 = int(input("Press 1 to enter another test. Press 0 to proceed to the next domain."))
print()
if (response3 > 1):
print("ERROR!")
response3 = int(input("Enter: "))
print(response3)
print()
print()
print(domain)
print(motor_tests)
print()
print()
x += 1
# Attention
attention_tests = {}
domain = domains[x]
print("Press 1 to enter a new", domain, "test.")
print("When you have added all the", domain, "tests administered, press 0.")
response4 = int(input("Enter: "))
print(response4)
print()
if (response4 > 1):
print("ERROR!")
response4 = int(input("Enter: "))
print(response4)
print()
while (response4 == 1):
test = str(input("Enter Test Name: "))
print(test)
print()
print("Find the number associated with the type of score you will be entering for this test.")
print("You will enter the NUMBER below. Not the name.")
print()
print(score_types)
print()
scoretype = int(input("Enter Score Type: "))
print(score_types[scoretype])
print()
spatial_tests[test] = scoretype
response4 = int(input("Press 1 to enter another test. Press 0 to proceed to the next domain."))
print()
if (response4 > 1):
print("ERROR!")
response4 = int(input("Enter: "))
print(response4)
print()
print()
print(domain)
print(attention_tests)
print()
print()
x += 1
# Executive Function
executive_tests = {}
domain = domains[x]
print("Press 1 to enter a new", domain, "test.")
print("When you have added all the", domain, "tests administered, press 0.")
response5 = int(input("Enter: "))
print(response5)
print()
if (response5 > 1):
print("ERROR!")
response5 = int(input("Enter: "))
print(response5)
print()
while (response5 == 1):
test = str(input("Enter Test Name: "))
print(test)
print()
print("Find the number associated with the type of score you will be entering for this test.")
print("You will enter the NUMBER below. Not the name.")
print()
print(score_types)
print()
scoretype = int(input("Enter Score Type: "))
executive_tests[test] = scoretype
print(score_types[scoretype])
print()
response5 = int(input("Press 1 to enter another test. Press 0 to proceed to the next domain."))
print()
if (response5 > 1):
print("ERROR!")
response5 = int(input("Enter: "))
print(response5)
print()
print()
print(domain)
print(executive_tests)
print()
print()
# Allow for more tests to be added
print("Do you need to add any additional tests?")
edits = int(input("Press 1 for yes. Press 0 for no."))
print(edits)
print()
if (edits > 1):
print("ERROR!")
edits = int(input("Enter: "))
print(edits)
print()
while (edits == 1):
print("Find the number associated with the domain you wish to edit.")
print("You will enter the NUMBER below. Not the name.")
print()
print(domains)
print()
edit_domain = int(input("Enter Domain: "))
if (edit_domain == 1):
domain = domains[1]
print("Press 1 to enter a new", domain, "test.")
print("When you have added all the", domain, "tests administered, press 0.")
response1 = int(input("Enter: "))
print(response1)
print()
if (response1 > 1):
print("ERROR!")
response1 = int(input("Enter: "))
print(response1)
print()
while (response1 == 1):
test = str(input("Enter Test Name: "))
print(test)
print()
print("Find the number associated with the type of score you will be entering for this test.")
print("You will enter the NUMBER below. Not the name.")
print()
print(score_types)
print()
scoretype = int(input("Enter Score Type: "))
print(score_types[scoretype])
print()
language_tests[test] = scoretype
response1 = int(input("Press 1 to enter another test. Press 0 to proceed to the next domain."))
print()
if (response5 > 1):
print("ERROR!")
response5 = int(input("Enter: "))
print(response5)
print()
print()
print(domain)
print(language_tests)
print()
print()
print("Do you need to add any additional tests?")
edits = int(input("Press 1 for yes. Press 0 for no."))
print(edits)
print()
elif (edit_domain == 2):
domain = domains[2]
print("Press 1 to enter a new", domain, "test.")
print("When you have added all the", domain, "tests administered, press 0.")
response2 = int(input("Enter: "))
print(response2)
print()
if (response2 > 1):
print("ERROR!")
response2 = int(input("Enter: "))
print(response2)
print()
while (response2 == 1):
test = str(input("Enter Test Name: "))
print(test)
print()
print("Find the number associated with the type of score you will be entering for this test.")
print("You will enter the NUMBER below. Not the name.")
print()
print(score_types)
print()
scoretype = int(input("Enter Score Type: "))
print(score_types[scoretype])
print()
spatial_tests[test] = scoretype
response2 = int(input("Press 1 to enter another test. Press 0 to proceed to the next domain."))
print()
if (response2 > 1):
print("ERROR!")
response2 = int(input("Enter: "))
print(response2)
print()
print()
print(domain)
print(spatial_tests)
print()
print()
print("Do you need to add any additional tests?")
edits = int(input("Press 1 for yes. Press 0 for no."))
print(edits)
print()
elif (edit_domain == 3):
domain = domains[3]
print("Press 1 to enter a new", domain, "test.")
print("When you have added all the", domain, "tests administered, press 0.")
response3 = int(input("Enter: "))
print(response3)
print()
if (response3 > 1):
print("ERROR!")
response3 = int(input("Enter: "))
print(response3)
print()
while (response3 == 1):
test = str(input("Enter Test Name: "))
print(test)
print()
print("Find the number associated with the type of score you will be entering for this test.")
print("You will enter the NUMBER below. Not the name.")
print()
print(score_types)
print()
scoretype = int(input("Enter Score Type: "))
print(scoretype)
print()
motor_tests[test] = scoretype
print(score_types[scoretype])
print()
response3 = int(input("Press 1 to enter another test. Press 0 to proceed to the next domain."))
print()
if (response3 > 1):
print("ERROR!")
response3 = int(input("Enter: "))
print(response3)
print()
print()
print(domain)
print(motor_tests)
print()
print()
print("Do you need to add any additional tests?")
edits = int(input("Press 1 for yes. Press 0 for no."))
print(edits)
print()
elif (edit_domain == 4):
domain = domains[4]
print("Press 1 to enter a new", domain, "test.")
print("When you have added all the", domain, "tests administered, press 0.")
response4 = int(input("Enter: "))
print(response4)
print()
if (response4 > 1):
print("ERROR!")
response4 = int(input("Enter: "))
print(response4)
print()
while (response4 == 1):
test = str(input("Enter Test Name: "))
print(test)
print()
print("Find the number associated with the type of score you will be entering for this test.")
print("You will enter the NUMBER below. Not the name.")
print()
print(score_types)
print()
scoretype = int(input("Enter Score Type: "))
print(score_types[scoretype])
print()
spatial_tests[test] = scoretype
response4 = int(input("Press 1 to enter another test. Press 0 to proceed to the next domain."))
print()
if (response4 > 1):
print("ERROR!")
response4 = int(input("Enter: "))
print(response4)
print()
print()
print(domain)
print(attention_tests)
print()
print()
print("Do you need to add any additional tests?")
edits = int(input("Press 1 for yes. Press 0 for no."))
print(edits)
print()
elif (edit_domain == 5):
domain = domains[5]
print("Press 1 to enter a new", domain, "test.")
print("When you have added all the", domain, "tests administered, press 0.")
response5 = int(input("Enter: "))
print(response5)
print()
if (response5 > 1):
print("ERROR!")
response5 = int(input("Enter: "))
print(response5)
print()
while (response5 == 1):
test = str(input("Enter Test Name: "))
print(test)
print()
print("Find the number associated with the type of score you will be entering for this test.")
print("You will enter the NUMBER below. Not the name.")
print()
print(score_types)
print()
scoretype = int(input("Enter Score Type: "))
executive_tests[test] = scoretype
print(score_types[scoretype])
print()
response5 = int(input("Press 1 to enter another test. Press 0 to proceed to the next domain."))
print()
if (response5 > 1):
print("ERROR!")
response5 = int(input("Enter: "))
print(response5)
print()
print()
print(domain)
print(executive_tests)
print()
print()
print("Do you need to add any additional tests?")
edits = int(input("Press 1 for yes. Press 0 for no."))
print(edits)
print()
else:
print("ERROR!")
print()
print("Find the number associated with the domain you wish to edit.")
print("You will enter the NUMBER below. Not the name.")
print()
print(domains)
print()
edit_domain = int(input("Enter Domain: "))
print("Test Entry Complete.")
# Make new dict
d = {
1: language_tests,
2: spatial_tests,
3: motor_tests,
4: attention_tests,
5: executive_tests
}
print(d)
# save dict in pickle format as test_lists.p
import pickle
filename = "test_lists.p"
outfile = open(filename, "wb")
pickle.dump(d, outfile)
outfile.close()
|
#Created on September 16, 2014
#@author: rspies
# Python 2.7
# This script parses through the HTML reports created by CHPS and outputs a
# csv file with summary statistics
import os
from bs4 import BeautifulSoup
import csv
os.chdir('../..')
############################## User Input ###################################
RFC = 'MBRFC_FY2017'
fx_group = '' # leave blank if not processing by fx group
sim_type = 'final-CalibrationPeriod' # specific to type of simulation: 'initial' or 'draft' or 'final' and 'CalibrationPeriod' or 'ValidationPeriod'
variables = ['local', 'outflow', 'inflow'] # choices: 'local', 'outflow', 'inflow'
if fx_group == '':
maindir = os.getcwd() + os.sep + 'Calibration_NWS' + os.sep + RFC[:5] + os.sep + RFC + os.sep + 'Calibration_TimeSeries'
else:
maindir = os.getcwd() + os.sep + 'Calibration_NWS' + os.sep + RFC[:5] + os.sep + RFC + os.sep + 'Calibration_TimeSeries' + os.sep + fx_group
html_folder = maindir + os.sep + 'statqme_html_reports' + os.sep + 'statqme-' + sim_type + os.sep
########################### End User Input ##################################
for variable in variables:
print 'Processing -> ' + variable + ' for ' + RFC + ' --> ' + fx_group
#html_folder_inflow = maindir + '\\statqme_inflow_reports\\final_inflow\\'
#html_folder_output = maindir + '\\statqme_output_reports\\'
months = ['Basin','October','November','December','January','February','March','April','May','June','July','August','September','Year Avg.']
abv_months = ['Basin','Oct','Nov','Dec','Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Year Avg.']
basin_fit_summary ={}
output_csv_dir = maindir + os.sep + 'statqme_' + variable + '_csv' + os.sep + sim_type + os.sep
if os.path.exists(output_csv_dir) == False:
os.makedirs(output_csv_dir)
###################### Output Files #####################################
##### monthly stats new csv file ####
month_csv_pbias = open(output_csv_dir + variable + '_monthly_mean_pbias_' + sim_type + '.csv','wb')
month_csv_bias = open(output_csv_dir + variable + '_monthly_mean_bias_' + sim_type + '.csv','wb')
month_csv_rms = open(output_csv_dir + variable + '_monthly_mean_percent_daily_rms_error_' + sim_type + '.csv','wb')
mpbias = csv.writer(month_csv_pbias)
mbias = csv.writer(month_csv_bias)
mrms = csv.writer(month_csv_rms)
mpbias.writerow(['Mean Monthly % Bias'])
mbias.writerow(['Mean Monthly Bias (CMSD)'])
mrms.writerow(['Monthly % Daily RMS Error'])
mpbias.writerow(abv_months)
mbias.writerow(abv_months)
mrms.writerow(abv_months)
##### daily fit stats new csv ####
basin_fit_stats = open(output_csv_dir + variable + '_all_fit_stats_' + sim_type + '.csv','wb')
basin_fit = csv.writer(basin_fit_stats)
basin_fit.writerow(['MULTI-YEAR STATISTICAL SUMMARY'])
basin_fit.writerow(['Basin','Correlation Coef', 'Daily RMS Error (CMSD)', 'Daily Absolute Error (CMSD)'])
##### complete simulation period summary ####
basin_all_sim_stats = open(output_csv_dir + variable + '_all_sim_stats_' + sim_type + '.csv','wb')
basin_all_sim = csv.writer(basin_all_sim_stats)
basin_all_sim.writerow(['SIMULATION PERIOD STATISTICAL SUMMARY'])
basin_all_sim.writerow(['Basin','Mean SQME (CMSD)','Mean QME (CMSD)','Mean % Bias','Mean Bias (CMSD)','Correlation Coef', 'Daily RMS Error (CMSD)', 'Daily Absolute Error (CMSD)','Daily Data Points'])
##### flow interval stats new csv file ####
output_flow_dir = output_csv_dir + os.sep + 'flow_categories' + os.sep
if os.path.exists(output_flow_dir) == False:
os.makedirs(output_flow_dir)
flow_cat_new = open(output_flow_dir + '_all_basins_' + variable + '_flow_categories_stats_' + sim_type + '.csv','wb')
flow_cat = csv.writer(flow_cat_new)
########################################################################
ignore_list = ['PCFM7','LK2F1','OLNF1','TREF1']
html_files = {}
# finds both inflow, outflow, and local html reports individually
basins = []
for each in sorted([f for f in os.listdir(html_folder) if f.endswith('.html')]):
basin_name = each.split('_')[-1].strip('.html')
name = each
if variable == 'outflow' and basin_name not in ignore_list and 'output' in name and 'inflow' not in name:
basins.append(basin_name); html_files[basin_name]=name
if variable == 'inflow' and basin_name not in ignore_list and 'inflow' in name:
basins.append(basin_name); html_files[basin_name]=name
if variable == 'local' and basin_name not in ignore_list and 'local' in name:
basins.append(basin_name); html_files[basin_name]=name
print basins
for basin in basins:
print basin
html_file = open(html_folder + os.sep + html_files[basin],'r')
print html_files[basin]
#test_file = open(maindir + '\\temp.txt','w')
month_data = {'October':[],'November':[],'December':[],'January':[],'February':[],'March':[],
'April':[],'May':[],'June':[],'July':[],'August':[],'September':[],'Year Avg.':[]}
soup = BeautifulSoup(html_file, "lxml") # read in html file # updated with lxml tag based on warning in console
#test_file.write(soup.prettify())
############### Monthly Stats - All Years #####################
# SQME River Discharge Simulated Mean CMSD, QME River Discharge Observed Mean CMSD, % BIAS, MONTHLY BIAS CMSD,
# MAXIMUM ERROR CMSD, % AVERAGE ABSOLUTE ERROR, % DAILY RMS ERROR
month_check = ''; count = 1 # script outputs the desired variable twice (only take odd index)
for child in soup.find(id="tableStyle4_scrollable").descendants:
each = str(child.string).strip()
#print repr(each)
if each in months:
month_check = each
if month_check != '' and each != month_check:
if count % 2 ==1:
month_data[month_check].append(each)
count += 1
### Seperate section for year averge stats ###
month_check = ''; count = 1
for child in soup.find(id="tableStyle5_scrollable").descendants:
each = str(child.string).strip()
#print repr(each)
if each in months or each == 'Year Avg.':
month_check = each
if month_check != '' and each != month_check:
if count % 2 ==1:
month_data[month_check].append(each)
count += 1
#for child in soup.find(id="tableStyle4_scrollable").descendants:
# print child
# print(child.string)
############### Fit Statistics - All Years #####################
# DAILY RMS ERROR CMSD, DAILY ABSOLUTE ERROR CMSD, CORRELATION COEF,
# LINE OF BEST FIT A, LINE OF BEST FIT B
count = 1
for child in soup.find(id="tableStyle3_scrollable").descendants:
each = str(child.string).strip()
#print repr(each)
if each != 'None' and each != 'Year Avg':
if count % 2 ==1 and each != '':
if basin in basin_fit_summary:
basin_fit_summary[basin].append(each)
else:
basin_fit_summary[basin]=[each]
count += 1
################### Flow Interval Statistics ######################
#### interval values ####
count = 1
flow_int = {'a':[],'b':[],'c':[],'d':[],'e':[],'f':[],'g':[]}
cats = ['a','b','c','d','e','f','g']
for child in soup.find(id="tableStyle7").descendants:
each = str(child.string).strip()
if each != 'None' and each != 'FLOW INTERVAL' and each != 'CMSD' and each != '' and each != '-':
#print str(count) + ' : ' + each
if count % 2 ==1:
if count <= 4:
flow_int['a'].append(each)
if count > 4 and count <=8:
flow_int['b'].append(each)
if count > 8 and count <=12:
flow_int['c'].append(each)
if count > 12 and count <=16:
flow_int['d'].append(each)
if count > 16 and count <=20:
flow_int['e'].append(each)
if count > 20 and count <=24:
flow_int['f'].append(each)
if count > 24 and count <=26:
flow_int['g'].append(each)
flow_int['g'].append(' - ')
count += 1
#### statistics ####
# number of cases, sqme river discharge simulated mean (cmsd), QME river discharge obs mean (cmsd),
# % bias, bias (sim-obs) mm, max error (cmsd), % avg absolute error, % daily rms error
count = 1
check_missing = [15,31,47,63,79,95,111] #index of "# of cases" to look for missing rows
for child in soup.find(id="tableStyle6").descendants:
#print child.string
if str(child.string) == ' ': #check for missing values in flow cat
each = '--'
else:
each = str(child.string).strip()
if each != 'None' and each != 'FLOW INTERVAL' and each != 'CMSD' and each != '' and each != '\n' and each != '-':
if count % 2 ==1:
if count > 13 and count <=29:
flow_int['a'].append(each)
if count > 29 and count <=45:
flow_int['b'].append(each)
if count > 45 and count <=61:
flow_int['c'].append(each)
if count > 61 and count <=77:
flow_int['d'].append(each)
if count > 77 and count <=93:
flow_int['e'].append(each)
if count > 93 and count <=109:
flow_int['f'].append(each)
if count > 109 and count <=125:
flow_int['g'].append(each)
#print str(count) + ' : ' + each
count += 1
#print str(count) + ' ' + str(child.string) + ' ' + each
#if count in check_missing and str(each) == '0':
# count+=14
if basin in basin_fit_summary: # check that statistics data is available for basin (no data if no qme)
dpoints = 0
flow_cat_basin = open(output_csv_dir + os.sep + 'flow_categories' + os.sep + basin + '_' + variable + '_flow_categories_stats_' + sim_type + '.csv','wb')
flow_cat_b = csv.writer(flow_cat_basin)
flow_cat.writerow(''); flow_cat_b.writerow('')
flow_cat.writerow([basin +' Flow Interval Statistics']);flow_cat_b.writerow([basin +' Flow Interval Statistics'])
flow_cat.writerow(['From','To','Number of Cases','SQME River Discharge Simulated Mean (CMSD)','QME River Discharge Observed Mean (CMSD)','% Bias','Bias (Sim-Obs) MM','Maximum Error (CMSD)','Percent Average Absolute Error','Percent Daily RMS Error'])
flow_cat_b.writerow(['From','To','Number of Cases','SQME River Discharge Simulated Mean (CMSD)','QME River Discharge Observed Mean (CMSD)','% Bias','Bias (Sim-Obs) MM','Maximum Error (CMSD)','Percent Average Absolute Error','Percent Daily RMS Error'])
for cat in cats:
data = []
### calculated number of data points
if flow_int[cat][2] != ' ':
dpoints = dpoints + int(flow_int[cat][2])
for each in flow_int[cat]:
data.append(each)
flow_cat.writerow(data)
flow_cat_b.writerow(data)
flow_cat_basin.close()
################ Write data to CSV file #################################
mpbias.writerow([basin, month_data['October'][2],month_data['November'][2],month_data['December'][2],month_data['January'][2],month_data['February'][2],
month_data['March'][2],month_data['April'][2],month_data['May'][2],month_data['June'][2],month_data['July'][2],month_data['August'][2],month_data['September'][2],month_data['Year Avg.'][2]])
mbias.writerow([basin, month_data['October'][3],month_data['November'][3],month_data['December'][3],month_data['January'][3],month_data['February'][3],
month_data['March'][3],month_data['April'][3],month_data['May'][3],month_data['June'][3],month_data['July'][3],month_data['August'][3],month_data['September'][3],month_data['Year Avg.'][3]])
mrms.writerow([basin, month_data['October'][6],month_data['November'][6],month_data['December'][6],month_data['January'][6],month_data['February'][6],
month_data['March'][6],month_data['April'][6],month_data['May'][6],month_data['June'][6],month_data['July'][6],month_data['August'][6],month_data['September'][6],month_data['Year Avg.'][6]])
basin_fit.writerow([basin, basin_fit_summary[basin][2], basin_fit_summary[basin][0], basin_fit_summary[basin][1]])
basin_all_sim.writerow([basin, month_data['Year Avg.'][0], month_data['Year Avg.'][1], month_data['Year Avg.'][2], month_data['Year Avg.'][3], basin_fit_summary[basin][2], basin_fit_summary[basin][0], basin_fit_summary[basin][1], str(dpoints)])
html_file.close()
else:
print 'No data for basin: ' + basin
flow_cat_new.close()
month_csv_pbias.close()
month_csv_bias.close()
month_csv_rms.close()
basin_fit_stats.close()
basin_all_sim_stats.close()
print 'Completed!!' |
"""
Bonus items module.
@author: Jason Cohen
@author: Shaun Hamelin-Owens
@author: Sasithra Thanabalan
@author: Andrew Walker
"""
# Imports
from PointItem import PointItem
from display import DrawingGenerics
# Constants
DISAPPEAR_CYCLES = 9 * DrawingGenerics.CYCLES_PER_SECOND
class BonusItem(PointItem):
"""
Bonus Item class.
This class contains the methods used in the creation of Bonus Items.
It is of the tkinter library and inherits an instance of PointItem.
"""
def __init__(self, gameCanvas, specs):
"""
Initialization.
This method creates a new Bonus Item and initializes it.
@param gameCanvas:
@param specs: Specifies the coordinates, radius, color, tag and points associated
with this bonus item.
"""
# Initialization of the Bonus Item
super(BonusItem, self).__init__(gameCanvas, specs, specs['points'])
# Flag to indicate if bonus item is eadible
self.active = False
# Indication of the number of cycles the bonus item has been active
self.activeCycles = 0
def process(self):
"""
Process.
This method checks if the bonus item is active and if it should be
deactivated.
"""
if self.active:
self.activeCycles += 1
if self.activeCycles >= DISAPPEAR_CYCLES:
self.activeCycles = 0
self.deactivate()
def isActive(self):
"""
Is Active.
This method checks if the bonus item is active.
"""
return self.active
def activate(self):
"""
Activate.
This method activates the bonus item and draws it.
"""
if not self.active:
self.active = True
self.draw()
self.activeCycles = 0
def deactivate(self):
"""
Deactivate.
This method deactivates the bonus item and deletes the drawing for it.
"""
if self.active:
self.active = False
self.deleteDrawing()
|
# -*- coding: utf-8 -*-
'''
Obtener schedules a partir de una rango de fechas
@author Ivan
@example python3 getLogsForSchedulesByRange.py userId date1 date2
@example python3 getLogsForSchedulesByRange.py e43e5ded-e271-4422-8e85-9f1bc0a61235 14/04/2015 15/04/2015
'''
import sys
sys.path.insert(0, '../../python')
import inject
import logging
import datetime
import dateutil
import asyncio
from asyncio import coroutine
from autobahn.asyncio.wamp import ApplicationSession
from model.config import Config
from model.systems.assistance.schedule import ScheduleData
from collections import OrderedDict
''' configuro el injector y el logger '''
#logging.getLogger().setLevel(logging.DEBUG)
def config_injector(binder):
binder.bind(Config, Config('server-config.cfg'))
inject.configure(config_injector)
config = inject.instance(Config)
sid = sys.argv[1]
userId = sys.argv[2]
class WampMain(ApplicationSession):
def __init__(self, config=None):
#logging.debug('instanciando')
ApplicationSession.__init__(self, config)
self.serverConfig = inject.instance(Config)
@coroutine
def onJoin(self, details):
logging.debug('********** getLogsForSchedulesByDate **********')
userId = sys.argv[1]
dateParamStart = sys.argv[2]
dateParamEnd = sys.argv[3]
dateStart = datetime.datetime.strptime(dateParamStart, "%d/%m/%Y").date()
dateEnd = datetime.datetime.strptime(dateParamEnd, "%d/%m/%Y").date()
date = dateStart
logsByDate = OrderedDict()
while date != dateEnd:
schedules = yield from self.call('assistance.getSchedulesByDate', userId, date)
logs = yield from self.call('assistance.getLogsForSchedulesByDate', schedules, date)
if(len(logs)):
logsByDate[date.strftime('%d/%m/%Y')] = logs
date = date + datetime.timedelta(days=1)
if(len(logsByDate)):
print("scheduleDate, log, user_id")
for date in logsByDate:
for log in logsByDate[date]:
print(date + ", " + log["log"] + ", " + log["userId"])
sys.exit()
if __name__ == '__main__':
from autobahn.asyncio.wamp import ApplicationRunner
from autobahn.wamp.serializer import JsonSerializer
url = config.configs['server_url']
realm = config.configs['server_realm']
debug = config.configs['server_debug']
json = JsonSerializer()
runner = ApplicationRunner(url=url, realm=realm, debug=debug, debug_wamp=debug, debug_app=debug, serializers=[json])
runner.run(WampMain)
|
import fart
def hello(x):
return x+1
def poop(y):
return fart.bye(y)*y*3
myName = 'Jeremy'
|
#!/usr/bin/python3
import sys
import time
import os.path
cpath = os.path.splitext(sys.argv[1])[0]
from subprocess import Popen
from itertools import islice
cf_path = "{}.cf".format(cpath)
print("cf_path : {}".format( cf_path ))
cfdg_path = "{}.cfdg".format(cpath)
print("cfdg_path : {}".format( cfdg_path ))
name = os.path.splitext(cfdg_path)[0]
name = os.path.basename(name)
new_file = "/mnt/c/temp/{}.png".format(name)
cmd="cfdg {} --width=2560 --height=1440 --minimumsize=2 -o {}".format(cfdg_path, new_file)
os.system(cmd)
print("new_file: {}".format( new_file))
parent = os.path.dirname(new_file)
cmd="cd {}; wslview {}.png".format(parent, name)
os.system(cmd)
exit()
#print (sys.argv[1])
#destination = "/tmp/out.cfdg"
with open(cf_path, "r") as f:
bar = f.readlines()
values = dict()
variables = list()
ranges = dict()
args = None
for aline in bar:
if aline and aline[0] == "#":
continue
if not args:
args = aline
continue
if ":" in aline:
tokens = aline.split(":")
val = tokens[1][:-1]
key = tokens[0]
if " " in val:
ranges[key] = val.strip().split(" ")
else:
values[key] = val
needed = int(values['duration']) * int(values['fps'])
for key,var in ranges.items():
start = float(var[1])
stop = float(var[0])
iteration = (stop-start)/needed
ranges[key] = [start, iteration]
print("needed : {}".format( needed ))
cmd="rm /mnt/c/temp/bar* -rf"
os.system(cmd)
commands = list()
for i, it in enumerate(range(needed)):
addline = []
for key,var in ranges.items():
value = round(((i + 1) * var[1]) + var[0],4)
addline.append("-D{}={}".format(key, value))
cmd="cfdg {} {} {} -o /mnt/c/temp/bar{}.png".format(cfdg_path, args[:-1], " ".join(addline), str(i).zfill(3))
commands.append(cmd)
# os.system(cmd)
max_workers = 10 # no more than 2 concurrent processes
processes = (Popen(cmd, shell=True) for cmd in commands)
running_processes = list(islice(processes, max_workers)) # start new processes
while running_processes:
for i, process in enumerate(running_processes):
if process.poll() is not None: # the process has finished
running_processes[i] = next(processes, None) # start new process
if running_processes[i] is None: # no new processes
del running_processes[i]
break
cmd="ffmpeg -i /mnt/c/temp/bar%3d.png -r {} /mnt/c/outputs/test_{}.avi".format(values['fps'], str(int(time.time())))
os.system(cmd)
#cmd="cfdg $1 --width=2560 --height=1440 --minimumsize=2 -DloopN=3 -o /mnt/c/temp/bar.jpg
#for need in range(needed):
# print("need : {}".format( need ))
#
#for aline in bar:
# print("aline : {}".format( aline ))
|
from TrainingRawDataValidation.RawValidation import RawDataValidation
from TrainingDataBaseOperation.DbOperation import DbOperation
from TrainingDataTransform.DataTransformation import DataTransform
from logger import App_Logger
class TrainValidation:
def __init__(self,path):
self.RawData = RawDataValidation(path)
self.DataTransform = DataTransform()
self.DbOperation = DbOperation()
self.file = open("TrainingLog/TrainingMainLog.txt", 'a+')
self.logger = App_Logger()
def TrainValidation(self):
try:
self.logger.log(self.file, "Start of Validation of files!!!!!" )
# extracting values from training schema
LenghtOfDateStampInFile, LenghtOfTimeStampInFile, ColumnNames ,NumberOfColumn = self.RawData.ValuesFromSchema()
# getting the regex defined to validate filename
regex = self.RawData.RegexCreator()
# validating filename of training files
self.RawData.ValidateFileName(regex, LenghtOfDateStampInFile, LenghtOfTimeStampInFile)
# validating column length in the file
self.RawData.ValidateColumnLenght(NumberOfColumn)
# validating if any column has all values missing
self.RawData.ValidateMissingValuesInWholeColumn()
self.logger.log(self.file, "Raw Data Validation Completed!!!!!")
self.logger.log(self.file,"Starting Data Transformation!!!")
# replacing blanks in the csv file with "Null" values
self.DataTransform.ReplaceMissingWithNull()
self.logger.log(self.file,"Data Transformation Completed!!")
self.logger.log(self.file,"Start Creating Training Database and Tables on the basis of given schema!!")
# create database with given name, if present open the connection! Create table with columns given in schema
self.DbOperation.CreateTableInDB('Training', ColumnNames)
self.logger.log(self.file,"Table Creation Completed!!!!")
# insert csv files in the table
self.DbOperation.InsertGoodDataIntoTable('Training')
self.logger.log(self.file,"Insertion in Table completed!!!")
# Delete the good data folder after loading files in table
self.logger.log(self.file,"Deleting Good Data Folder!!!")
self.RawData.DeleteExistingGoodDataTrainingFolder()
self.logger.log(self.file,"Good Data Folder Deleted!!!")
# Move the bad files to archive folder
self.logger.log(self.file,"Moving Bad Files to Archive and Deleting Bad Files Folder!!")
self.RawData.MoveBadFilesToArchiveBad()
self.logger.log(self.file,"Bad Files Moved to Archive and Deleted Bad Files!!!")
self.logger.log(self.file,"Validation Operation completed!!!")
self.logger.log(self.file,"Extracting csv file From table")
#export data in table to csvfile
self.DbOperation.SelectingDataFromTableIntoCsv('Training')
self.logger.log(self.file,"Successfully Extracted Files from Table!!!")
except Exception as e:
raise e
|
from .user import User
from .news import News
from .task import Task
from .lesson import Lesson |
# Copyright 2019 Magazino GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import tf2_ros
from tf_service import client_binding
# Decorator for class methods that translates internal exception types to
# corresponding tf2_ros.*Exceptions.
def translate_exceptions(method):
@functools.wraps(method)
def translate(obj, *args, **kwargs):
try:
return_value = method(obj, *args, **kwargs)
return return_value
except client_binding.ConnectivityException as e:
raise tf2_ros.ConnectivityException(str(e))
except client_binding.ExtrapolationException as e:
raise tf2_ros.ExtrapolationException(str(e))
except client_binding.LookupException as e:
raise tf2_ros.LookupException(str(e))
except client_binding.ExtrapolationException as e:
raise tf2_ros.ExtrapolationException(str(e))
except client_binding.InvalidArgumentException as e:
raise tf2_ros.InvalidArgumentException(str(e))
except client_binding.TimeoutException as e:
raise tf2_ros.TimeoutException(str(e))
except client_binding.TransformException as e:
raise tf2_ros.TransformException(str(e))
return translate
|
a1 = float(input())
b1 = float(input())
h = float(input())
area =(a1 + b1)*h/2
print("trapezoid is " + str(area))
|
N, C = input().split()
N = int(N)
C = int(C)
total = 0
for i in range(N):
value = (C - i) if (C - i) >= 1 else 1
total += value
print(total)
|
from flask import (
Blueprint, flash, g, redirect, render_template, request, url_for
)
from werkzeug.exceptions import abort
from reef.auth import login_required
from reef.model import *
from reef import database
import flask
bp = Blueprint('books', __name__, url_prefix='/books')
@bp.route('/')
@login_required
def index():
page = request.args.get('page', 1, type=int)
items_per_page = flask.current_app.config['ITEMS_PER_PAGE']
page_content = g.user.books.paginate(page, items_per_page, False)
next_url = url_for('books.index', page=page_content.next_num) \
if page_content.has_next else None
prev_url = url_for('books.index', page=page_content.prev_num) \
if page_content.has_prev else None
return render_template('books/index.html', books=page_content.items,
next_url=next_url, prev_url=prev_url)
@bp.route('/create', methods=('GET', 'POST'))
@login_required
def create():
if request.method == 'POST':
title = request.form['title']
body = request.form['body']
error = None
if not title:
error = 'Title is required.'
if error is not None:
flash(error, 'danger')
else:
post = Post(title=title, body=body, author=g.user)
database.session.add(post)
database.session.commit()
return redirect(url_for('books.index'))
return render_template('books/create.html')
def get_post(id, check_author=True):
post = Post.query.get(id)
if post is None:
abort(404, "Post id {0} doesn't exist.".format(id))
if check_author and post.author.id != g.user.id:
abort(403)
return post
@bp.route('/<int:id>/update', methods=('GET', 'POST'))
@login_required
def update(id):
post = get_post(id)
if request.method == 'POST':
title = request.form['title']
body = request.form['body']
error = None
if not title:
error = 'Title is required.'
if error is not None:
flash(error, 'danger')
else:
database.session.query(Post).filter(Post.id == id) \
.update({'title': title, 'body': body})
database.session.commit()
return redirect(url_for('books.index'))
return render_template('books/update.html', post=post)
@bp.route('/<int:id>/delete', methods=('POST',))
@login_required
def delete(id):
database.session.query(Post).filter(Post.id == id).delete()
database.session.commit()
return redirect(url_for('books.index'))
|
import requests
from hheaders import headers
from download_file import DownloadFile
from wgdata.split_config_items import splitconfigitems
def get_datas(options):
try:
URL = splitconfigitems._get_url(options)
PARAMS = splitconfigitems._get_params(options)
r = requests.post(URL, data=PARAMS, headers=headers)
downloadfile = DownloadFile(r.content, options)
downloadfile.download_file()
except Exception, e:
print e.message
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.