max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
|---|---|---|---|---|---|---|
train.py
|
zzdxfei/3d_landmarks
| 1
|
12776351
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
import keras
import keras.backend as K
import re
import cv2
import numpy as np
np.set_printoptions(threshold='nan')
def list_pictures(directory, ext='jpg|jpeg|bmp|png|ppm'):
return [os.path.join(root, f)
for root, _, files in os.walk(directory) for f in files
if re.match(r'([\w]+\.(?:' + ext + '))', f)]
def get_train_test_dataset():
if os.path.exists('./data/train.npz'):
dataset = np.load('./data/train.npz')
print('{} already exits.'.format('./data/train.npz'))
return (dataset['x'], dataset['y'])
x = list_pictures('./test_dataset', ext='png')
y = [item[:-4] + '_posmap.jpg' for item in x]
filted_x = []
filted_y = []
for ix, iy in zip(x, y):
if os.path.exists(ix) and os.path.exists(iy):
filted_x.append(ix)
filted_y.append(iy)
else:
print('{} or {} not exits.'.format(ix, iy))
x = [cv2.imread(item) for item in filted_x]
y = [cv2.imread(item) for item in filted_y]
x = np.array(x)
y = np.array(y)
if not os.path.exists('./data'):
os.makedirs('./data')
np.savez('./data/train.npz', x=x, y=y)
return (x, y)
def res_block(x, filters):
# stage1
shortcut = x
shortcut = keras.layers.Conv2D(
filters, (1, 1), strides=(2, 2), padding='same')(shortcut)
x = keras.layers.Conv2D(
filters / 2, (1, 1), strides=(1, 1), padding='same', activation='relu')(x)
x = keras.layers.Conv2D(
filters / 2, (4, 4), strides=(2, 2), padding='same', activation='relu')(x)
x = keras.layers.Conv2D(
filters, (1, 1), strides=(1, 1), padding='same')(x)
x = keras.layers.Add()([x, shortcut])
x = keras.layers.BatchNormalization()(x)
x = keras.layers.Activation('relu')(x)
# stage2
shortcut = x
x = keras.layers.Conv2D(
filters / 2, (1, 1), strides=(1, 1), padding='same', activation='relu')(x)
x = keras.layers.Conv2D(
filters / 2, (4, 4), strides=(1, 1), padding='same', activation='relu')(x)
x = keras.layers.Conv2D(
filters, (1, 1), strides=(1, 1), padding='same')(x)
x = keras.layers.Add()([x, shortcut])
x = keras.layers.BatchNormalization()(x)
x = keras.layers.Activation('relu')(x)
return x
def get_regress_model():
input = keras.layers.Input(shape=(256, 256, 3))
x = keras.layers.Conv2D(
16, (4, 4), strides=(1, 1), padding='same', activation='relu')(input)
x = res_block(x, 32)
x = res_block(x, 64)
x = res_block(x, 128)
x = res_block(x, 256)
x = res_block(x, 512)
x = keras.layers.Conv2DTranspose(512, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(
256, (4, 4), strides=(2, 2), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(256, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(256, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(
128, (4, 4), strides=(2, 2), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(128, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(128, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(
64, (4, 4), strides=(2, 2), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(64, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(64, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(
32, (4, 4), strides=(2, 2), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(32, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(
16, (4, 4), strides=(2, 2), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(16, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(3, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(3, (4, 4), padding='same', activation='relu')(x)
x = keras.layers.Conv2DTranspose(3, (4, 4), padding='same')(x)
model = keras.Model(input, x)
return model
def preprocess_input(x, y=None):
x = x.astype(np.float32)
x = keras.applications.xception.preprocess_input(x)
if y is not None:
y = y.astype(np.float32)
y /= 256.0
return (x, y)
loss_mask = cv2.imread('./data/uv-data/uv_weight_mask.png')
face_mask = cv2.imread('./data/uv-data/uv_face_mask.png')
loss_mask = np.where(face_mask > 0, loss_mask, face_mask)
loss_mask = loss_mask.astype(np.float32)
loss_mask /= 16.0
def mean_squared_error_with_mask(y_true, y_pred):
mask = K.constant(loss_mask)
return K.mean(K.mean(K.square(y_pred - y_true) * mask, axis=-1), axis=-1)
def lr_adjustor(epoch):
base_lr = 0.001
if epoch < 100:
return base_lr
base_lr *= .1
if epoch < 150:
return base_lr
base_lr *= .1
return base_lr
def train():
(x, y) = get_train_test_dataset()
# x = np.concatenate([x for i in range(20)])
# y = np.concatenate([y for i in range(20)])
print('x shape -> {}, y shape -> {}.'.format(x.shape, y.shape))
(x, y) = preprocess_input(x, y)
model = get_regress_model()
model.summary()
model.load_weights('./weights.100-0.0137.hdf5')
# keras.utils.plot_model(model, show_shapes=True)
opti = keras.optimizers.Adam(lr=0.001)
if not os.path.exists('./weights'):
os.makedirs('./weights')
callbacks = [
keras.callbacks.LearningRateScheduler(lr_adjustor),
keras.callbacks.CSVLogger('train.log'),
keras.callbacks.ModelCheckpoint(
'./weights/weights.{epoch:02d}-{loss:.4f}.hdf5',
monitor='loss',
save_best_only=True,
period=10)]
model.compile(opti, loss=mean_squared_error_with_mask)
model.fit(x, y, batch_size=16, epochs=200, callbacks=callbacks)
def test():
(x, y) = get_train_test_dataset()
# x = np.concatenate([x for i in range(20)])
# y = np.concatenate([y for i in range(20)])
print('x shape -> {}, y shape -> {}.'.format(x.shape, y.shape))
(x, y) = preprocess_input(x, y)
model = get_regress_model()
model.summary()
# model.load_weights('./weights.100-0.0137.hdf5')
model.load_weights('./Data/net-data/weights.190-0.0010.hdf5')
if not os.path.exists('./result'):
os.makedirs('./result')
y = model.predict(x)
for index, i in enumerate(y):
i *= 255
i = i.astype(np.uint8)
savename = os.path.join('./result', str(index) + '.png')
cv2.imwrite(savename, i)
if __name__ == "__main__":
# train()
test()
| 2.328125
| 2
|
gv_analyzer_client.py
|
yama-kei/trail_analyzer
| 0
|
12776352
|
import json
import socket
import urllib2
#import requests
class GvAnalyzerClient(object):
"""
GV Analyzer Client
"""
def __init__(self, gd_data):
self.base_url = "https://damp-retreat-1145.herokuapp.com/"
self.base_url = "http://127.0.0.1:5000/"
self.gd_data = gd_data
socket.setdefaulttimeout(15)
def analyze(self, gv_data):
"""Invoke analyze API of GV Analyzer"""
url = self.base_url + "gv_analyze"
gdv_data = json.dumps({"gd_data":self.gd_data, "gv_data":gv_data})
req = urllib2.Request(url)
req.add_header('Content-Type', 'application/json')
req.add_header('Accept', 'application/json')
try:
res = urllib2.urlopen(req, gdv_data)
response = json.loads(res.read())
return response
except Exception as e:
return {"Error":str(e)}
"""
#requests version:
headers = {'Accept' : 'application/json', 'Content-Type' : 'application/json'}
try:
r = requests.post(url, data = gdv_data, headers = headers)
return r.json()
except requests.exceptions.RequestException as e:
return {"Error":str(e)}
"""
"""
def gva():
url = "http://127.0.0.1:5000/analyze"
#url = "https://damp-retreat-1145.herokuapp.com/analyze"
headers = {'Accept' : 'application/json', 'Content-Type' : 'application/json'}
r = requests.post(url, data = open("event.json", "rb"), headers = headers)
print json.dumps(r.json(), indent=4)
"""
| 3.046875
| 3
|
src/OTLMOW/OTLModel/Datatypes/KlVerlichtingstoestelVerlichtGebied.py
|
davidvlaminck/OTLClassPython
| 2
|
12776353
|
# coding=utf-8
from OTLMOW.OTLModel.Datatypes.KeuzelijstField import KeuzelijstField
from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde
# Generated with OTLEnumerationCreator. To modify: extend, do not edit
class KlVerlichtingstoestelVerlichtGebied(KeuzelijstField):
"""Het gebied op de wegbaan of het object dat verlicht wordt door het verlichtingstoestel."""
naam = 'KlVerlichtingstoestelVerlichtGebied'
label = 'Verlichtingstoestel verlicht gebied.'
objectUri = 'https://wegenenverkeer.data.vlaanderen.be/ns/abstracten#KlVerlichtingstoestelVerlichtGebied'
definition = 'Het gebied op de wegbaan of het object dat verlicht wordt door het verlichtingstoestel.'
codelist = 'https://wegenenverkeer.data.vlaanderen.be/id/conceptscheme/KlVerlichtingstoestelVerlichtGebied'
options = {
'afrit': KeuzelijstWaarde(invulwaarde='afrit',
label='afrit',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/afrit'),
'bebakening': KeuzelijstWaarde(invulwaarde='bebakening',
label='bebakening',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/bebakening'),
'doorlopende-straatverlichting': KeuzelijstWaarde(invulwaarde='doorlopende-straatverlichting',
label='doorlopende straatverlichting',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/doorlopende-straatverlichting'),
'fietspad': KeuzelijstWaarde(invulwaarde='fietspad',
label='fietspad',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/fietspad'),
'hoofdweg': KeuzelijstWaarde(invulwaarde='hoofdweg',
label='hoofdweg',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/hoofdweg'),
'kruispunt': KeuzelijstWaarde(invulwaarde='kruispunt',
label='kruispunt',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/kruispunt'),
'monument': KeuzelijstWaarde(invulwaarde='monument',
label='monument',
definitie='Alle niet-functie verlichting, dus alle verlichting die nodig is om je weg te vinden.Verlichting voor artistieke creaties op (bv. rotonde) of rond de openbare weg (bv. ecoduct dat onderaan een schilderij heeft) of voor artistieke belichting (niet verlichting) te geven, bv een hangbrug waarbij de kabels aangelicht worden. Somskan dit ook zijn voor het aanlichten of belichten van gebouwen.',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/monument'),
'onderdoorgang': KeuzelijstWaarde(invulwaarde='onderdoorgang',
label='onderdoorgang',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/onderdoorgang'),
'oprit': KeuzelijstWaarde(invulwaarde='oprit',
label='oprit',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/oprit'),
'parking': KeuzelijstWaarde(invulwaarde='parking',
label='parking',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/parking'),
'projector': KeuzelijstWaarde(invulwaarde='projector',
label='projector',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/projector'),
'punctuele-verlichting': KeuzelijstWaarde(invulwaarde='punctuele-verlichting',
label='punctuele verlichting',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/punctuele-verlichting'),
'rotonde': KeuzelijstWaarde(invulwaarde='rotonde',
label='rotonde',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/rotonde'),
'tunnelverlichting': KeuzelijstWaarde(invulwaarde='tunnelverlichting',
label='tunnelverlichting',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/tunnelverlichting'),
'wisselaar': KeuzelijstWaarde(invulwaarde='wisselaar',
label='wisselaar',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelVerlichtGebied/wisselaar')
}
| 1.929688
| 2
|
rplugin/python3/multiterm.py
|
kjelly/nvim-multiterm
| 1
|
12776354
|
import re
import neovim
import enum
import json
try:
import psutil
except ImportError:
psutil = None
def isNumber(x):
return x in '1234567890'
class Result(enum.Enum):
BY_PASS = 1
HANDLED = 2
UNHANDLED = 3
def is_shell(name):
for i in ['fish', 'bash', 'csh', 'zsh']:
if i in name:
return True
return False
@neovim.plugin
class MultiTerm(object):
def __init__(self, nvim):
self.nvim = nvim
self.data = {}
self.name_map = {}
self.last_term_job_id = None
self.last_command = ''
self.name_list = ['one', 'two', 'three', 'four', 'five', 'six',
'seven', 'eight', 'nine', 'ten']
self.name_index = 0
self.browser = self.nvim.eval("expand('$BROWSER')")
if self.browser == '$BROWSER':
self.browser = 'w3m'
def get_command_map(self):
try:
command_map_history = self.nvim.eval('g:MultiTerm_Map')
command_map = json.loads(command_map_history)
except Exception as e:
self.echo(e)
command_map = {}
return command_map
def write_text(self, job_id, data):
self.nvim.call('jobsend', int(job_id), data)
def run(self, job_id, cmd):
self.last_command = cmd.strip()
self.write_text(job_id, cmd)
def run_in_all_terminal(self, cmd):
for i in self.data:
self.run(int(self.data[i]), cmd)
def echo(self, data):
self.nvim.command('echo "%s"' % data)
def replace_args(self, args):
for i in range(len(args)):
val = args[i]
if val == '!':
args[i] = self.last_command
elif val == '!l':
args[i] = self.nvim.current.line.strip()
elif val == '!w':
self.nvim.command('normal! viw"*y')
args[i] = self.nvim.eval('@*')
elif val == '!!':
shell = self.nvim.eval('&shell')
if 'fish' in shell:
args[i] = 'eval $history[1]'
elif 'zsh' in shell:
args[i] = '!!\n'
else:
args[i] = '!!'
elif len(val) == 2 and val[0] == '@':
args[i] = self.nvim.eval('@' + val[1])
def subcommand_a(self, arg0, args, range):
'''
Run the command in all terminal.
'''
cmd = ' '.join(args[1:]) + '\n'
self.run_in_all_terminal(cmd)
return Result.HANDLED
def subcommand_s(self, arg0, args, range):
'''
Store the command in the command_map.
'''
if len(arg0) == 2 and arg0[0] == 's' and isNumber(arg0[1]):
cmd = ' '.join(args[1:]) + '\n'
command_map = self.get_command_map()
command_map[arg0[1]] = cmd
self.nvim.command("let g:MultiTerm_Map='%s'" % json.dumps(command_map))
return Result.HANDLED
return Result.UNHANDLED
def subcommand_r(self, arg0, args, range):
'''
Run the command stored in command_map.
'''
command_map = self.get_command_map()
if arg0[0] == 'r' and len(arg0) == 1:
self.echo(arg0)
cmd = command_map.get(arg0[1], '')
self.run(self.last_term_job_id, cmd)
return Result.HANDLED
elif arg0[0] == 'r' and len(arg0) == 2 and isNumber(arg0[1]):
# C g1 : run command 1 stored in command map.
self.echo(arg0)
cmd = command_map.get(arg0[1], '')
self.run(self.last_term_job_id, cmd)
return Result.HANDLED
return Result.UNHANDLED
def subcommand_n(self, arg0, args, range):
'''
Name the terminal.
'''
if arg0 in ['n', 'N'] and len(args) > 1:
if len(args) == 2:
try:
filename = self.nvim.eval("expand('%:p')").split('#')[0].strip()
job_id = self.nvim.eval('expand(b:terminal_job_id)')
self.name_map[job_id] = args[1]
self.nvim.command("keepalt file %s \#%s" % (filename, args[1]))
except:
self.name_map[self.last_term_job_id] = args[1]
return Result.HANDLED
elif len(args) > 2:
self.name_map[args[2]] = args[1]
return Result.HANDLED
return Result.UNHANDLED
def subcommand_g(self, arg0, args, range):
'''
Go to the terminal.
'''
name_or_id = args[1]
inv_name_map = {v: k for k, v in self.name_map.items()}
inv_data_map = {v: k for k, v in self.data.items()}
r = inv_name_map.get(name_or_id, None)
if r is None:
r = name_or_id
r = inv_data_map.get(r, None)
if r is None:
self.echo("Terminal not found")
return Result.BY_PASS
self.nvim.command("buffer %s" % r)
return Result.HANDLED
def subcommand_w(self, arg0, args, range):
'''
Run w3m browser in the w3m terminal buffer.
'''
if psutil is None:
return Result.BY_PASS
inv_name_map = {v: k for k, v in self.name_map.items()}
if inv_name_map.get('w3m', None) is None:
self.nvim.command("terminal")
self.nvim.command("C n w3m")
url = ' '.join(args[1:]) + '\n'
self.kill_and_run('w3m', '%s %s' % (self.browser, url))
self.nvim.command("normal! i")
return Result.HANDLED
def subcommand_k(self, arg0, args, range):
'''
Kill and run command in terminal.
'''
if psutil is None:
return Result.BY_PASS
name_list = args[1].split(',')
if len(name_list) < 2:
return
cmd = ' '.join(args[2:]) + '\n'
for i in name_list:
if i == '':
continue
self.kill_and_run(i, cmd)
self.nvim.command("normal! G")
return Result.HANDLED
def kill_and_run(self, name, command):
inv_name_map = {v: k for k, v in self.name_map.items()}
inv_data_map = {v: k for k, v in self.data.items()}
job_id = inv_name_map.get(name, None)
if job_id is None:
self.nvim.command("terminal")
self.nvim.command("C n %s" % name)
inv_name_map = {v: k for k, v in self.name_map.items()}
inv_data_map = {v: k for k, v in self.data.items()}
job_id = inv_name_map.get(name, None)
if job_id is None:
self.echo("terminal not found")
return
file_name = inv_data_map[job_id]
self.nvim.command("buffer %s" % file_name)
pid = file_name.split(':')[1].split('/')[-1]
p = psutil.Process(pid=int(pid, 10))
childrens = p.children()
for i in childrens:
i.kill()
self.run(job_id, command)
return Result.HANDLED
def subcommand_l(self, arg0, args, range):
'''
List all terminal.
'''
if len(arg0) > 1:
return Result.UNHANDLED
text = ''
for i in self.data:
job_id = self.data[i]
text += '%s => %s, %s\n' % (job_id, i,
self.name_map.get(job_id, ''))
try:
job_id = self.nvim.eval('expand(b:terminal_job_id)')
except:
pass
text += 'current job_id=%s, name=%s' % (job_id, self.name_map[job_id])
self.echo(text)
return Result.HANDLED
def subcommand_empty(self, arg0, args, range):
return Result.UNHANDLED
@neovim.command("C", range='', nargs='*', sync=True)
def command(self, args, range):
if len(args) < 1:
return
if self.last_term_job_id is None:
self.nvim.command("split")
self.nvim.command("wincmd j")
self.nvim.command("terminal")
self.replace_args(args)
function_map = {
'a': self.subcommand_a,
'g': self.subcommand_g,
'l': self.subcommand_l,
'n': self.subcommand_n,
'r': self.subcommand_r,
's': self.subcommand_s,
'w': self.subcommand_w,
'k': self.subcommand_k,
}
arg0 = args[0]
result = function_map.get(arg0[0],
self.subcommand_empty)(arg0, args, range)
if result == Result.BY_PASS or result == Result.HANDLED:
return
if re.match(r'(\d+,)*\d+', arg0):
# C 1, 3 ls : run ls in terminal 1, terminal 3.
cmd = ' '.join(args[1:]) + '\n'
for i in arg0.split(','):
self.run(i, cmd)
elif re.match(r'(\w+,)+', arg0):
cmd = ' '.join(args[1:]) + '\n'
name_list = arg0.split(',')
inv_name_map = {v: k for k, v in self.name_map.items()}
ever_run = False
for name in name_list:
job_id = inv_name_map.get(name, None)
if job_id is None:
continue
self.run(job_id, cmd)
ever_run = True
if ever_run is False:
self.run(self.last_term_job_id, cmd)
else:
cmd = ' '.join(args[:]) + '\n'
self.run(self.last_term_job_id, cmd)
@neovim.autocmd('TermOpen', eval='expand("<afile>")', sync=True,
pattern='*sh*')
def on_termopen(self, filename):
if not is_shell(filename):
return
lst = filename.split('#')
filename = lst[0]
job_id = self.nvim.eval('expand(b:terminal_job_id)')
self.data[filename] = job_id
self.last_term_job_id = job_id
if len(lst) > 1:
terminal_name = lst[-1]
self.name_map[job_id] = terminal_name
try:
index = self.name_list.index(terminal_name)
del self.name_list[index]
if index < self.name_index:
self.name_index -= 1
except ValueError:
pass
return
if self.name_index < len(self.name_list):
name = self.name_list[self.name_index]
self.name_map[job_id] = name
self.nvim.command("keepalt file %s \#%s" % (filename, name))
self.name_index += 1
@neovim.autocmd('BufWinEnter', eval='expand("%:p")', sync=False,
pattern='*sh*')
def on_buffer_win_enter(self, filename):
try:
job_id = self.nvim.eval('expand(b:terminal_job_id)')
if self.name_map.get(job_id, '') != 'w3m':
self.last_term_job_id = job_id
except:
pass
@neovim.autocmd('BufEnter', eval='expand("%:p")', sync=False,
pattern='*sh*')
def on_buffer_enter(self, filename):
if psutil is None:
return
try:
pid = filename.split('/')[-1].split(':')[0]
p = psutil.Process(pid=int(pid, 10))
childrens = p.children()
if len(childrens) > 0 and childrens[0].name() == 'w3m':
self.nvim.command("normal! g")
self.nvim.command("normal! i")
except:
pass
| 2.234375
| 2
|
venv/lib/python3.7/site-packages/cloudbio/custom/java.py
|
itsmesatwik/pants
| 0
|
12776355
|
"""Install instructions for non-packaged java programs.
"""
import os
from fabric.api import *
from fabric.contrib.files import *
from shared import _if_not_installed
@_if_not_installed("cljr")
def install_cljr(env):
"""Install the clojure package manager cljr
http://github.com/liebke/cljr
"""
run("wget http://incanter.org/downloads/cljr-installer.jar")
run("java -jar cljr-installer.jar")
env.safe_sudo("ln -s .cljr/bin/cljr /usr/bin")
run("rm cljr-installer.jar")
@_if_not_installed("lein")
def install_leinengin(env):
"""Standard clojure build tool: http://github.com/technomancy/leiningen
"""
run("wget --no-check-certificate https://github.com/technomancy/leiningen/raw/stable/bin/lein")
run("chmod a+rwx lein")
env.safe_sudo("mv lein %s" % os.path.join(env.system_install, "bin"))
run("lein self-install")
| 2.265625
| 2
|
check-in/daily/Find-All-Anagrams-in-a-String-(Medium).py
|
huandrew99/LeetCode
| 36
|
12776356
|
"""
LC 438
Given a string and a pattern, find all of the pattern in the given string.
Every anagram is a permutation of a string. As we know, when we are not allowed to repeat characters while finding permutations of a string, we get N!N! permutations (or anagrams) of a string having NN characters. For example, here are the six anagrams of the string “abc”:
abc
acb
bac
bca
cab
cba
Write a function to return a list of starting indices of the anagrams of the pattern in the given string.
Example 1:
Input: String="ppqp", Pattern="pq"
Output: [1, 2]
Explanation: The two anagrams of the pattern in the given string are "pq" and "qp".
Example 2:
Input: String="abbcabc", Pattern="abc"
Output: [2, 3, 4]
Explanation: The three anagrams of the pattern in the given string are "bca", "cab", and "abc".
"""
from collections import defaultdict
class Solution:
def findAnagrams(self, str1: str, pattern: str):
cnt = defaultdict(int)
for c in pattern:
cnt[c] += 1
chars = set(cnt.keys())
ans = []
for i, c in enumerate(str1):
if i >= len(pattern):
self.add_c(cnt, chars, str1[i - len(pattern)])
self.rm_c(cnt, chars, c)
if not cnt:
ans.append(i - len(pattern) + 1)
return ans
def add_c(self, cnt, chars, c):
if c in chars:
cnt[c] += 1
if cnt[c] == 0:
del cnt[c]
def rm_c(self, cnt, chars, c):
if c in chars:
cnt[c] -= 1
if cnt[c] == 0:
del cnt[c]
"""
Time O(N + M)
Space O(N): space to store result
"""
| 4.09375
| 4
|
bluetooth/camera_share.py
|
AdityaNG/Self-Driving-RC-Car
| 0
|
12776357
|
<gh_stars>0
"""
# sudo modprobe bcm2835-v4l2
# python3 controller.py > logs/controller.txt &
# python3 autopilot_web_server.py > logs/autopilot_web_server.txt &
# python3 recorder.py > logs/recorder.txt &
cd training_data;
#python3 -m http.server > ../logs/webserver.txt &
"""
import threading
import time
import sys
from camera_pi import Camera
import controller
from controller import bluetooth_connected, connect_bluetooth_loop
import autopilot_web_server
import web_server
import recorder
import wheel_speed
sys.path.append("../self_drive")
import local_autopilot
THREADS = []
#THREADS.append(threading.Thread(target=controller.main, args=(Camera, ) ))
#THREADS.append(threading.Thread(target=controller.main))
#THREADS[0].setName("Controller")
#THREADS.append(threading.Thread(target=autopilot_web_server.main, args=(Camera, ) ))
#THREADS[0].setName("Autopilot Webserver")
THREADS.append(threading.Thread(target=web_server.main, args=(Camera, ) ))
THREADS[0].setName("Autopilot Webserver")
#THREADS.append(threading.Thread(target=recorder.main))
#THREADS[2].setName("Recorder")
THREADS.append(threading.Thread(target=local_autopilot.main, args=(Camera, ) ))
THREADS[1].setName("Local Autopilot")
#THREADS.append(threading.Thread(target=wheel_speed.main))
#THREADS[4].setName("Wheel Speed")
#THREADS.append(threading.Thread(target=controller.main))
#THREADS.append(threading.Thread(target=controller.main))
def log(*a):
print("[THRD]", a)
def launch_all():
for t in THREADS:
t.start()
log(t.name, " Started")
if t.name=="Autopilot Webserver":
time.sleep(10)
def loop():
for t in THREADS:
if not t.is_alive():
log(t.name, " - Died")
def main():
if not bluetooth_connected():
connect_bluetooth_loop()
launch_all()
while True:
try:
loop()
except Exception as e:
log("Runner error - ", e)
time.sleep(5)
if __name__ == "__main__":
main()
| 2.1875
| 2
|
contxt/services/health.py
|
ndustrialio/contxt-sdk-python-control
| 0
|
12776358
|
from ..models.health import Health
from .api import ConfiguredLegacyApi
from ..utils.config import ContxtEnvironmentConfig
class HealthService(ConfiguredLegacyApi):
"""Health API client"""
def __init__(self, env_config: ContxtEnvironmentConfig, **kwargs) -> None:
super().__init__(env_config=env_config, **kwargs)
def create_health_status(self, org_id: str, asset_id: str, health: Health) -> Health:
resp = self.post(f"{org_id}/assets/{asset_id}", data=health.post())
return Health.from_api(resp)
| 2.28125
| 2
|
hangman.py
|
VersatileVishal/Hanging-The-Man
| 5
|
12776359
|
<filename>hangman.py
import random
def hangman():
word=random.choice(["pugger","tiger","lion","cheetah","thor","ironman","superman","blackwidow","hawkeye","mother","country","pegion",
"carrot","cricket","hockey","technical","human","bad","amazing","hulk","god","picture","air","laptop",
"pokemon","avengers","savewater","strangerthings","lostinspace","selena","justice","doctor","hospital","youtube","night",
"annabelle","rockstar","mindflayer","dragons"])
validLetters = "abcdefghijklmnopqrstuvwxyz"
turns = 10
guessmade = ""
while len(word)>0:
main = ""
for letter in word:
if letter in guessmade:
main = main + letter
else:
main = main + "_" +" "
if main == word:
print("'%s'"%main)
print("Congratulations!!!You Win.")
print("You saved the kind man")
break
print("Guess the word(all in small): ",main)
guess = input()
if guess in validLetters:
if guess in main:
print("You have already guessed that character!")
else:
guessmade = guessmade + guess
else:
print("Enter a valid character")
guess = input()
if guess not in word:
turns = turns - 1
if turns==9:
print("9 turns left")
print(" --------- ")
if turns == 8:
print("8 turns left")
print(" --------- ")
print(" O ")
if turns == 7:
print("7 turns left")
print(" --------- ")
print(" O ")
print(" | ")
if turns == 6:
print("6 turns left")
print(" --------- ")
print(" O ")
print(" | ")
print(" / ")
if turns == 5:
print("5 turns left")
print(" --------- ")
print(" O ")
print(" | ")
print(" / \ ")
if turns == 4:
print("4 turns left")
print(" --------- ")
print(" \ O ")
print(" | ")
print(" / \ ")
if turns == 3:
print("3 turns left")
print(" --------- ")
print(" \ O / ")
print(" | ")
print(" / \ ")
if turns == 2:
print("2 turns left")
print(" --------- ")
print(" \ O /| ")
print(" | ")
print(" / \ ")
if turns == 1:
print("1 turn left")
print("Last breathe counting, Take care!")
print(" --------- ")
print(" \ O_|/ ")
print(" | ")
print(" / \ ")
if turns == 0:
print("Word is '%s'"%word)
print("You loose!!")
print("You let a kind man die")
print(" --------- ")
print(" O_| ")
print(" /|\ ")
print(" / \ ")
break
name = input("Enter your name:- ")
while len(name)==0:
name= input("Firstly, Enter your name :- ")
print("Welcome",name)
print("-----------------")
print("Try to guess the word in less than 10 try")
hangman()
| 4.09375
| 4
|
api_server/api_server.py
|
aperauch/VMC-on-AWS-App-Portal
| 0
|
12776360
|
import logging, sys, os, ldap, time, yaml
from ns1 import NS1, Config
from ns1.rest.errors import ResourceException, RateLimitException, AuthException
from flask import Flask, json, g, request, make_response, jsonify
from flask.logging import create_logger
from flask_cors import CORS, cross_origin
from flask_jwt import JWT, jwt_required, current_identity
from datetime import datetime, timedelta
from vmware.vapi.vmc.client import create_vmc_client
from com.vmware.nsx_vmc_app_client_for_vmc import create_nsx_vmc_app_client_for_vmc
from com.vmware.nsx_policy_client_for_vmc import create_nsx_policy_client_for_vmc
from com.vmware.nsx_vmc_app.model_client import PublicIp
from com.vmware.nsx_policy.model_client import PolicyNatRule
from vmware.vapi.bindings.struct import PrettyPrinter as NsxPrettyPrinter
from com.vmware.nsx_policy.model_client import ApiError
# Import config settings from yaml file
yaml_file = open("e:\\GitHub\\EUC-Lab-Portal-Python\\api_server\\config.yaml", 'r')
yaml_dict = yaml.load(yaml_file, Loader=yaml.FullLoader)
# Logging Settings
log_format = ('[%(asctime)s] %(levelname)-8s %(name)-12s %(message)s')
logging.basicConfig(
filename=yaml_dict['LogFilepath'],
level=logging.ERROR,
format=log_format
)
# Flask app config settings
app = Flask(__name__)
app.config['CORS_HEADERS'] = 'Content-Type'
app.config['JWT_AUTH_HEADER_PREFIX'] = 'Bearer'
app.config['JWT_EXPIRATION_DELTA'] = timedelta(seconds=yaml_dict['JwtTimeoutInSeconds'])
app.config['SECRET_KEY'] = yaml_dict['JwtKey']
CORS(app)
# LDAP
LDAP_CONNECTION_STRING = yaml_dict['LdapConnectionString']
LDAP_PROTOCOL_VERSION = yaml_dict['LdapProtocolVersion']
# NS1 DNS config settings
API_KEY_VALUE = yaml_dict['DnsApiKey']
EUCLABNET_ZONE_NAME = yaml_dict['DnsZones'][0]
PSOLABNET_ZONE_NAME = yaml_dict['DnsZones'][1]
config = Config()
config.createFromAPIKey(API_KEY_VALUE)
api = NS1(config=config)
# VMC
VMC_CSP_REFRESH_TOKEN = yaml_dict['VmcCspRefreshToken']
VMC_CSP_AUTH_URL = yaml_dict['VmcCspAuthUrl'] + "?refresh_token=" + VMC_CSP_REFRESH_TOKEN
VMC_ORG = yaml_dict['VmcOrg']
VMC_ORG_ID = yaml_dict['VmcOrgId']
VMC_SDDC = yaml_dict['VmcSddc']
VMC_SDDC_ID = yaml_dict['VmcSddcId']
NSX_VMC_AWS_API_BASE_URL = yaml_dict['NsxVmxAwsApiBaseUrl']
# format NSXT objects for readability
nsx_pp = NsxPrettyPrinter()
@app.route("/")
def home():
localtime = time.asctime( time.localtime(time.time()) )
logging.info("Server is running.")
return {"Status": "Running", "DateTime": localtime}
@app.route("/dns", methods=["GET"])
@jwt_required()
def get_dns_records():
try:
psolabnet_zone = api.loadZone(PSOLABNET_ZONE_NAME)
euclabnet_zone = api.loadZone(EUCLABNET_ZONE_NAME)
all_zone_records = {
psolabnet_zone.zone: psolabnet_zone.data["records"],
euclabnet_zone.zone: euclabnet_zone.data["records"]
}
all_zone_records_json = json.dumps(all_zone_records)
return all_zone_records_json
except Exception as ex:
logging.error("Exception: " + ex)
response = make_response({"message": "No action was taken."}, 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/dns", methods=["PUT"])
@jwt_required()
def create_dns_record():
data = request.get_json()
logging.info("Creating DNS record " + data['zone'])
zone = api.loadZone(data['zone'])
response = make_response({"message": "No action was taken."}, 500)
try:
if data['type'] == 'A':
ns1Record = zone.add_A(data['domain'], data['answers'])
json = jsonify(ns1Record.data)
response = make_response(ns1Record.data, json.status_code)
elif data['type'] == 'AAAA':
ns1Record = zone.add_AAAA(data['domain'], data['answers'])
json = jsonify(ns1Record.data)
response = make_response(ns1Record.data, json.status_code)
elif data['type'] == 'CNAME':
ns1Record = zone.add_CNAME(data['domain'], data['answers'])
json = jsonify(ns1Record.data)
response = make_response(ns1Record.data, json.status_code)
elif data['type'] == 'MX':
mx_answers_list = data['answers'].replace(" ","").split(",")
if len(mx_answers_list) == 2:
ns1Record = zone.add_MX(data['domain'], [[int(mx_answers_list[0]), mx_answers_list[1]]])
json = jsonify(ns1Record.data)
response = make_response(ns1Record.data, json.status_code)
elif len(mx_answers_list) == 4:
ns1Record = zone.add_MX(data['domain'], [[int(mx_answers_list[0]), mx_answers_list[1]], [int(mx_answers_list[2]), mx_answers_list[3]]])
json = jsonify(ns1Record.data)
response = make_response(ns1Record.data, json.status_code)
else:
response = make_response({"message": "Unable to create MX record due to issue parsing the answers list => " + data['answers']}, 400)
elif data['type'] == 'TXT':
ns1Record = zone.add_TXT(data['domain'], data['answers'])
json = jsonify(ns1Record.data)
response = make_response(ns1Record.data, json.status_code)
else:
logging.warn("Unknown record type: " + data['type'])
response = make_response({"message": "Unable to create DNS record due to unknown record type " + data['type']}, 400)
except ResourceException as re:
response = make_response(re.response.text, re.response.status_code)
logging.error("ResourceException: " + re)
except Exception as ex:
logging.error("Exception: " + ex)
response = make_response({"message": "An error occurred when trying to create a DNS record."}, 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/dns", methods=["POST"])
@jwt_required()
def update_dns_record():
data = request.get_json()
zone = api.loadZone(data['zone'])
rec = zone.loadRecord(data['domain'], data['type'])
# Modify the record with the new values
logging.info("Updating DNS record: " + rec.domain)
response = make_response({"message": "No action was taken."}, 500)
try:
ns1Record = rec.update(answers=[data['answers']])
json = jsonify(ns1Record.data)
response = make_response(ns1Record.data, json.status_code)
except ResourceException as re:
response = make_response(re.response.text, re.response.status_code)
logging.error("ResourceException: " + ex)
except Exception as ex:
logging.error("Exception: " + ex)
error_message = "Something unexpected occurred when updating " + rec.domain
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/dns/delete", methods=["POST"])
@jwt_required()
def delete_dns_record():
response = make_response({"message": "No action was taken."}, 500)
try:
data = request.get_json()
zone = api.loadZone(data['zone'])
rec = zone.loadRecord(data['domain'], data['type'])
print("Deleting DNS record: " + rec.domain)
response = rec.delete()
if response:
error_message = "Something unexpected occurred when deleting " + rec.domain
print(error_message)
response = make_response(jsonify({"message": error_message}), 500)
else:
print("Deleted " + rec.domain + " successfully.")
response = make_response(jsonify({"message": "Deleted " + rec.domain + " successfully."}))
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when deleting DNS record IP " + data['domain']
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/publicips", methods=["GET"])
@jwt_required()
def get_vmc_public_ips():
response = make_response({"message": "No action was taken."}, 500)
try:
nsx_vmc_client = create_nsx_vmc_app_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
response = make_response(nsx_vmc_client.infra.PublicIps.list().to_json(), 200)
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when getting list of leased IP addresses."
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/publicips", methods=["POST"])
@jwt_required()
def request_new_vmc_public_ip():
response = make_response({"message": "No action was taken."}, 500)
try:
nsx_vmc_client = create_nsx_vmc_app_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
data = request.get_json()
public_ip = PublicIp(display_name=data['display_name'])
response = make_response(nsx_vmc_client.infra.PublicIps.update(data['display_name'], public_ip).to_json(), 200)
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when requesting IP " + data['display_name']
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/publicips", methods=["PATCH"])
@jwt_required()
def update_vmc_public_ip():
nsx_vmc_client = create_nsx_vmc_app_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
data = request.get_json()
public_ip = PublicIp(display_name=data['display_name'], ip=data['ip'], id=data['id'])
response = make_response(nsx_vmc_client.infra.PublicIps.update(data['display_name'], public_ip).to_json(), 200)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/publicips", methods=["PUT"])
@jwt_required()
def delete_new_vmc_public_ip():
response = make_response({"message": "No action was taken."}, 500)
try:
data = request.get_json()
# Ensure IP is not being used in a NAT Rule before attempting delete
nsx_policy_client = create_nsx_policy_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
nat = nsx_policy_client.infra.tier_1s.nat.NatRules.list('cgw', 'USER')
for nat_rule in nat.results:
if nat_rule.translated_network == data['ip']:
response = make_response({"message": "The IP is being used by NAT rule " + nat_rule.display_name + ". Delete NAT rule before continuing." }, 409)
response.headers["Content-Type"] = "application/json"
return response
# Proceed to delete
nsx_vmc_client = create_nsx_vmc_app_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
response = nsx_vmc_client.infra.PublicIps.delete(data['display_name']) # None value returned on successful delete
response = make_response()
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when releasing IP " + data['ip']
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/natrules", methods=['GET'])
@jwt_required()
def get_nat_rules():
response = make_response({"message": "No action was taken."}, 500)
try:
nsx_policy_client = create_nsx_policy_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
nat = nsx_policy_client.infra.tier_1s.nat.NatRules.list('cgw', 'USER')
response = make_response(nat.to_json(), 200)
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when getting NAT rules. Exception: " + ex
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/natrules", methods=['POST'])
@jwt_required()
def create_nat_rule():
data = request.get_json()
response = make_response({"message": "No action was taken."}, 500)
try:
nat_obj = PolicyNatRule(action = 'REFLEXIVE',
scope = ['/infra/labels/cgw-public'],
source_network = data['source_network'],
translated_network = data['translated_network'],
display_name = data['display_name'],
sequence_number = 1,
firewall_match = 'MATCH_INTERNAL_ADDRESS')
# patch() method is void
nsx_policy_client = create_nsx_policy_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
nsx_policy_client.infra.tier_1s.nat.NatRules.patch('cgw', 'USER', data['display_name'], nat_obj)
response = make_response(nat_obj.to_json(), 200)
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when creating NAT rule " + data['display_name'] + ". Exception: " + ex
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/natrules", methods=['PUT'])
@jwt_required()
def delete_nat_rule():
data = request.get_json()
response = make_response({"message": "No action was taken."}, 500)
try:
nsx_policy_client = create_nsx_policy_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
nsx_policy_client.infra.tier_1s.nat.NatRules.delete('cgw', 'USER', data['display_name'])
response = make_response({"message": "Successfully deleted NAT rule " + data['display_name']}, 200)
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when deleting NAT rule " + data['display_name'] + ". Exception: " + ex
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/cgwrules", methods=['GET'])
@jwt_required()
def get_cgw_rules():
response = make_response({"message": "No action was taken."}, 500)
try:
nsx_policy_client = create_nsx_policy_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
cgw_object = nsx_policy_client.infra.domains.GatewayPolicies.get('cgw', 'default')
security_groups = nsx_policy_client.infra.domains.Groups.list('cgw').results
services = nsx_policy_client.infra.Services.list()
# Replace destination group ID, source group ID, and service ID with display name
for cgw in cgw_object.rules:
new_dest_list = []
for dest_group in cgw.destination_groups:
if dest_group != 'ANY':
for sec_group in security_groups:
if sec_group.id == dest_group.split('/')[-1]:
new_dest_list.append(sec_group.display_name)
if len(new_dest_list) > 0:
cgw.destination_groups = new_dest_list
new_source_list = []
for source_group in cgw.source_groups:
if source_group != 'ANY':
for sec_group in security_groups:
if sec_group.id == source_group.split('/')[-1]:
new_source_list.append(sec_group.display_name)
if len(new_source_list) > 0:
cgw.source_groups = new_source_list
new_service_list = []
for cgw_service in cgw.services:
if cgw_service != 'ANY':
for service in services.results:
if service.id == cgw_service.split('/')[-1]:
new_service_list.append(service.display_name)
if len(new_service_list) > 0:
cgw.services = new_service_list
new_scope_list = []
for scope in cgw.scope:
new_scope_list.append(scope.split('/')[-1])
if len(new_scope_list) > 0:
cgw.scope = new_scope_list
response = make_response(cgw_object.to_json(), 200)
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when getting CGW rules. Exception: " + ex
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
@app.route("/cgwrules", methods=['PUT'])
@jwt_required()
def delete_cgw_rule():
data = request.get_json()
response = make_response({"message": "No action was taken."}, 500)
try:
nsx_policy_client = create_nsx_policy_client_for_vmc(VMC_CSP_REFRESH_TOKEN, VMC_ORG_ID, VMC_SDDC_ID)
nsx_policy_client.infra.domains.gateway_policies.Rules.delete('cgw', 'default', data['display_name'])
response = make_response({"message": "Successfully deleted CGW rule " + data['display_name']}, 200)
except Exception as ex:
logging.error("Exception: " + ex)
log_error(ex)
error_message = "Something unexpected occurred when deleting CGW rule " + data['display_name'] + ". Exception: " + ex
response = make_response(jsonify({"message": error_message}), 500)
response.headers["Content-Type"] = "application/json"
return response
def log_error(ex):
"""
Generic error logger that will use NSXT API Error message decoders for
more descriptive information on errors
"""
api_error = ex.data.convert_to(ApiError)
print("Error configuring {}".format(api_error.error_message))
print("{}".format(api_error.__dict__))
print("{}".format(api_error.details))
logging.error("Error configuring {}".format(api_error.error_message))
logging.error("{}".format(api_error.__dict__))
logging.error("{}".format(api_error.details))
def ldap_bind_as_user(upn, password):
"""
UPN is required for AD bind.
"""
result = False
conn = ldap.initialize(LDAP_CONNECTION_STRING)
conn.protocol_version = LDAP_PROTOCOL_VERSION
try:
bind_result_s = conn.simple_bind_s(upn, password)
if bind_result_s[0] == 97:
logging.info("LDAP bind successful for upn " + upn + ".")
result = User(id=upn)
else:
logging.error("Received an unexpected bind result code: " + bind_result_s)
except ldap.INVALID_CREDENTIALS:
logging.error("Username or password is incorrect.")
except ldap.LDAPError as e:
if type(e.message) == dict and e.message.has_key('desc'):
logging.error("LDAP Error exception occurred: " + e.message['desc'])
else:
logging.error("A server error occurred during API authentication.")
except Exception as e:
logging.error("An exception occurred when performing ldap bind. Exception: " + e)
finally:
conn.unbind_s()
return result
class User(object):
def __init__(self, id):
self.id = id
def __str__(self):
return "User(id='%s')" % self.id
def authenticate(username, password):
if not (username and password):
return False
elif "_admin" not in username:
logging.error("The given username does not contain the substring '_admin': " + username)
return False
else:
return ldap_bind_as_user(username, password)
def identity(payload):
user_id = payload['identity']
return {"user_id": user_id}
jwt = JWT(app, authenticate, identity)
if __name__ == "__main__":
app.run()
| 1.71875
| 2
|
model/SparseModels/RobustKLSparseAutoencoder.py
|
ne1199/RobustAutoencoder
| 3
|
12776361
|
<gh_stars>1-10
import numpy as np
import tensorflow as tf
import KLSparseAutoencoder as sdae
import sys
sys.path.append("../")
from shrink import l21shrink as SHR
class Robust_KL_SparseAutoencder():
"""
@author: <NAME>
modified: 02/12/2018
Des:
X = L + S
L is a non-linearly low dimension matrix and S is a sparse matrix.
argmin ||L - Decoder(Encoder(L))||+ KL(Encoder(L)) + ||S||_2,1
Use Alternating projection to train model
"""
def __init__(self, sess, layers_sizes, sparsity, sparse_ratio, lambda_=1.0, error = 1.0e-5):
"""
sparsity is the weight of penalty term
sparse ratio is special for the KL divergence, how much sparsity do you expect on each hidden feature.
"""
self.lambda_ = lambda_
self.layers_sizes = layers_sizes
self.sparcity = sparsity
self.sparse_ratio = sparse_ratio
self.SAE = sdae.KL_Sparse_Autoencoder( sess = sess, input_dim_list = self.layers_sizes,
sparsity = self.sparcity, sparse_ratio = self.sparse_ratio)
def fit(self, X, sess, learning_rate=0.05, inner_iteration = 50,
iteration=20, batch_size=40, verbose=False):
## The first layer must be the input layer, so they should have same sizes.
assert X.shape[1] == self.layers_sizes[0]
## initialize L, S, mu(shrinkage operator)
self.L = np.zeros(X.shape)
self.S = np.zeros(X.shape)
if verbose:
print ("X shape: ", X.shape)
print ("L shape: ", self.L.shape)
print ("S shape: ", self.S.shape)
for it in range(iteration):
if verbose:
print ("Out iteration: " , it)
## alternating project, first project to L
self.L = np.array(X - self.S,dtype=float)
## Using L to train the auto-encoder
self.SAE.fit(self.L, sess = sess,
iteration = inner_iteration,
learning_rate = learning_rate,
batch_size = batch_size,
verbose = verbose)
## get optmized L
self.L = self.SAE.getRecon(X = self.L, sess = sess)
## alternating project, now project to S and shrink S.T
self.S = SHR.l21shrink(self.lambda_, (X - self.L).T).T
return self.L, self.S
def transform(self, X, sess):
return self.SAE.transform(X = X, sess = sess)
def getRecon(self, X, sess):
return self.SAE.getRecon(self.L, sess = sess)
if __name__ == '__main__':
x = np.load(r"../../data/data.npk")[:500]
with tf.Session() as sess:
rsae = Robust_KL_SparseAutoencder(sess = sess, lambda_= 40, layers_sizes=[784,784,784,784],
sparsity= 0.5, sparse_ratio= 0.2)
L, S = rsae.fit(x, sess = sess, inner_iteration = 20, iteration = 30,verbose = True)
print (L.shape,S.shape)
| 2.53125
| 3
|
hashmap/htbl.py
|
ebisLab/codingchallenges
| 0
|
12776362
|
class Hashtable:
def __init__(self):
"""
Create an array(self.my dict) w/ a bucket size - derived from load factor.
Load factor --> a measure that decides when to increase the Hashmap capacity to maintain the get() and put() operator complexity of o(1).
Default load factor of hashmap is .75f (75% of the map size)
Load Factor = (n/k)
n => max number of elements that can be stored
k => bucket size
Optimal load factor is (2/3) so that the effect of hash collision is minimum"""
self.bucket = 16
self.hashmap = [[] for i in range(self.bucket)]
def __str__(self):
return str(self.__dict__)
def hash(self, key):
return len(key) % self.bucket
def put(self, key, value):
"""value may already be present"""
hash_val = self.hash(key)
reference = self.hashmap[hash_val]
for i in range(len(reference)):
# if reference[i][0] == key:
# reference[i][1] = value
# return None
# * === cleaner way to write this ====
if not reference:
reference = []
# * ==========================
reference.append([key, value])
return None
def get(self, key): # if there's no collision, it can be O(1)
"""return value to which the specified key is mapped, or -1 if this map contains no mapping for the key"""
hash_val = self.hash(key)
reference = self.hashmap[hash_val]
for i in range(len(reference)):
if reference[i][0] == key: # grab this first array[i], then 0
return reference[i][1] # current bucket and 1 (1000)
return -1 # <-- undefined
# iterate and spit out what's in the hash map
def keys(self):
keysArr = []
for i in range(self.bucket):
if self.hashmap[i] != 0: # if its empty
for j in range(len(self.hashmap[i])):
keysArr.append(self.hashmap[i][j][0])
return keysArr
h = Hashtable()
h.put('grapes', 1000)
h.put('apples', 10)
h.put('ora', 300)
print(h.get('grapes'))
print(h)
h.keys()
print(h.keys())
# h.remove('apples')
print(h)
| 3.921875
| 4
|
controller.py
|
ionufi/first
| 0
|
12776363
|
<reponame>ionufi/first
from repository import StudentRepository
class StudentController(object):
def __init__(self):
self._student_controller = StudentRepository()
def add_student(self, student):
return self._student_controller.add_student(student)
def remove_student(self, sid):
return self._student_controller.remove_student(sid)
def update_student(self, sid, name, surname, grade):
return self._student_controller.update_student(sid, name, surname, grade)
def get_student(self, sid):
return self._student_controller.get_student(sid)
def get_all_students(self):
return self._student_controller.get_all_students()
| 2.625
| 3
|
src/nr/powerline/__init__.py
|
NiklasRosenstein/powerline
| 0
|
12776364
|
<reponame>NiklasRosenstein/powerline
# -*- coding: utf-8 -*-
# MIT License
#
# Copyright (c) 2020, <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# this software and associated documentation files (the "Software"), to deal in
# Software without restriction, including without limitation the rights to use,
# modify, merge, publish, distribute, sublicense, and/or sell copies of the
# and to permit persons to whom the Software is furnished to do so, subject to
# following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
# USE OR OTHER DEALINGS IN THE SOFTWARE.
from . import chars, server, static
from .utils import register_signal_handler, try_remove
from nr import ansiterm as ansi
from nr.interface import Interface
from nr.sumtype import Constructor, Sumtype
from nr.utils.process import process_exists, process_terminate, replace_stdio, spawn_daemon
from typing import Iterable, Optional, Sequence, TextIO, Union
import argparse
import io
import json
import logging
import os
import nr.databind.core, nr.databind.json
import signal
import sys
__author__ = '<NAME> <<EMAIL>>'
__version__ = '0.1.2'
logger = logging.getLogger(__name__)
class Pen(Sumtype):
Text = Constructor('text,style')
Flipchar = Constructor('char')
def render(
pen_sequence: Sequence[Pen],
fp: TextIO = None,
escape_unprintable: bool = False
) -> Optional[str]:
r"""
Render a sequence of #Pen instructions to *fp*, or returns it as a string.
If *escape_unprintable* is enabled, unprintable characters will be wrapped
in `\[` and `\]` to allow the shell to properly count the width of the
resulting string.
"""
if fp is None:
fp = io.StringIO()
return_result = True
else:
return_result = False
def _find_next_bg(offset: int) -> Optional[ansi.Color]:
for pen in pen_sequence[offset:]: # TODO (@NiklasRosenstein): islice()?
if isinstance(pen, Pen.Text):
return pen.style.bg
return None
style = ansi.Style()
for index, pen in enumerate(pen_sequence):
if isinstance(pen, Pen.Flipchar):
new_bg = _find_next_bg(index+1) or ansi.SgrColor('DEFAULT')
if new_bg == style.bg:
# Note: This is more of a hack in cases where two plugins
# have the same background color, rendering the common
# RIGHT_TRIANGLE flipchar invisible.
text = chars.RIGHT_TRIANGLE_THIN
style = ansi.Style(None, new_bg)
else:
style = ansi.Style(style.bg, new_bg)
text = pen.char
elif isinstance(pen, Pen.Text):
style = pen.style or style
text = pen.text
else:
raise TypeError('expected Pen object, got {!r}'.format(
type(pen).__name__))
if escape_unprintable:
fp.write('\\[')
fp.write(str(style))
if escape_unprintable:
fp.write('\\]')
fp.write(text)
if escape_unprintable:
fp.write('\\[')
fp.write(str(ansi.Attribute.RESET))
if escape_unprintable:
fp.write('\\]')
if return_result:
return fp.getvalue()
return None
class PowerlineContext:
def __init__(self,
path: str,
exit_code: int = 0,
default_style: ansi.Style = None,
env: dict = None,
is_server: bool = False):
self.path = path
self.exit_code = exit_code
self.default_style = default_style or ansi.parse_style('white blue')
self.env = os.environ if env is None else env
self.is_server = is_server
def getenv(self, name: str, default: str = None) -> Optional[str]:
return self.env.get(name, default)
class AnsiModule(nr.databind.core.Module):
def __init__(self):
super().__init__()
self.register(ansi.Color, nr.databind.core.IDeserializer(
deserialize=lambda m, n: ansi.parse_color(n.value)))
self.register(ansi.Style, nr.databind.core.IDeserializer(
deserialize=lambda m, n: ansi.parse_style(n.value)))
@nr.databind.core.SerializeAs(nr.databind.core.UnionType
.with_entrypoint_resolver('nr.powerline.plugins'))
class PowerlinePlugin(Interface):
def render(self, context: PowerlineContext) -> Iterable[Pen]:
...
class Powerline(nr.databind.core.Struct):
plugins = nr.databind.core.Field([PowerlinePlugin])
default_style = nr.databind.core.Field(ansi.Style,
nr.databind.core.FieldName('default-style'), default=None)
def render(self,
context: PowerlineContext,
fp: TextIO = None,
escape_unprintable: bool = False
) -> Optional[str]:
pens = []
for plugin in self.plugins:
pens += plugin.render(context)
return render(pens, fp, escape_unprintable)
def load_powerline(*try_files: str, default: Union[dict, Powerline] = None) -> Optional[Powerline]:
mapper = nr.databind.core.ObjectMapper(
AnsiModule(),
nr.databind.json.JsonModule(),
)
for filename in try_files:
if os.path.isfile(filename):
with open(filename) as fp:
data = json.load(fp)
return mapper.deserialize(data, Powerline, filename=filename)
if isinstance(default, dict):
default = mapper.deserialize(default, Powerline, filename='<default>')
return default
def main(argv=None):
"""
Entrypoint for nr-powerline.
"""
parser = argparse.ArgumentParser()
parser.add_argument('exit_code', type=int, nargs='?')
parser.add_argument('-f', '--file')
parser.add_argument('-e', '--escape', action='store_true')
parser.add_argument('--run-dir', default=None)
parser.add_argument('--start', action='store_true')
parser.add_argument('--stop', action='store_true')
parser.add_argument('--status', action='store_true')
parser.add_argument('--fake-server', action='store_true')
parser.add_argument('--exit-code', action='store_true')
parser.add_argument('--src', choices=('bash',))
args = parser.parse_args(argv)
logging.basicConfig(format='[%(asctime)s - %(levelname)s]: %(message)s', level=logging.INFO)
powerline = load_powerline(
args.file or os.path.expanduser('~/.local/powerline/config.json'),
default=static.default_powerline)
context = PowerlineContext(
os.getcwd(),
args.exit_code or 0,
default_style=powerline.default_style,
is_server=args.fake_server)
if args.src == 'bash':
print(static.bash_src)
sys.exit(0)
elif args.src:
parser.error('unexpected argument for --src: {!r}'.format(args.src))
if not args.start and not args.stop and not args.status:
print(powerline.render(context, escape_unprintable=args.escape), end='')
return
run_dir = args.run_dir or os.path.expanduser('~/.local/powerline')
log_file = os.path.join(run_dir, 'daemon.log')
pid_file = os.path.join(run_dir, 'daemon.pid')
socket_file = os.path.join(run_dir, 'daemon.sock')
if os.path.isfile(pid_file):
with open(pid_file) as fp:
daemon_pid = int(fp.read().strip())
else:
daemon_pid = None
if args.stop and daemon_pid:
logger.info('Stopping %d', daemon_pid)
process_terminate(daemon_pid)
if args.start:
if os.path.exists(socket_file):
os.remove(socket_file)
def run(powerline, stdout):
with open(pid_file, 'w') as fp:
fp.write(str(os.getpid()))
logger.info('Started %d', os.getpid())
register_signal_handler('SIGINT', lambda *a: try_remove(pid_file))
replace_stdio(None, stdout, stdout)
conf = server.Address.UnixFile(socket_file)
server.PowerlineServer(conf, powerline).run_forever()
logger.info('Bye bye')
os.makedirs(run_dir, exist_ok=True)
stdout = open(log_file, 'a')
spawn_daemon(lambda: run(powerline, stdout))
if args.status:
if not daemon_pid or not process_exists(daemon_pid):
if args.exit_code:
sys.exit(7)
print('stopped')
else:
if args.exit_code:
sys.exit(0)
print('running')
| 1.992188
| 2
|
Research/imitate_experiments/001/imitateOptimizer.py
|
RockmanZheng/AgentSteve
| 1
|
12776365
|
<gh_stars>1-10
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.eager import context
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.training import optimizer
from tensorflow.python.training import training_ops
from tensorflow.python.util.tf_export import tf_export
class ImitateOptimizer(optimizer.Optimizer):
'''
Optimizer that is used in third person imitation
'''
def __init__(self,learning_rate,use_locking=False,name="Imitate"):
'''
Construct a new imitate optimizer
Args:
learning_rate: A Tensor or a floating point value. The learning rate to use
use_locking: If True use locks for update operations.
name: Optional name prefix for the operations created when applying gradients. Defaults to "Imitate".
'''
super(ImitateOptimizer,self).__init__(use_locking,name)
self._learning_rate = learning_rate
self._learning_rate_tensor = None
def _apply_dense(self,grad,var):
| 2.203125
| 2
|
tests/pytests/test_rotation.py
|
ladoramkershner/ale
| 4
|
12776366
|
<gh_stars>1-10
import pytest
import numpy as np
from scipy.spatial.transform import Rotation
from ale.rotation import ConstantRotation, TimeDependentRotation
def test_constant_constant_composition():
rot1_2 = ConstantRotation([1.0/np.sqrt(2), 0, 0, 1.0/np.sqrt(2)], 1, 2)
rot2_3 = ConstantRotation([0, 1.0/np.sqrt(2), 0, 1.0/np.sqrt(2)], 2, 3)
rot1_3 = rot2_3*rot1_2
assert isinstance(rot1_3, ConstantRotation)
assert rot1_3.source == 1
assert rot1_3.dest == 3
np.testing.assert_equal(rot1_3.quat, [0.5, 0.5, -0.5, 0.5])
def test_constant_time_dependent_composition():
quats = [[1.0/np.sqrt(2), 0, 0, 1.0/np.sqrt(2)],[1, 0, 0, 0]]
times = [0, 1]
av = [[np.pi/2, 0, 0], [np.pi/2, 0, 0]]
rot1_2 = TimeDependentRotation(quats, times, 1, 2, av=av)
rot2_3 = ConstantRotation([0, 1.0/np.sqrt(2), 0, 1.0/np.sqrt(2)], 2, 3)
rot1_3 = rot2_3*rot1_2
assert isinstance(rot1_3, TimeDependentRotation)
assert rot1_3.source == 1
assert rot1_3.dest == 3
expected_quats = [[0.5, 0.5, -0.5, 0.5],[1.0/np.sqrt(2), 0, -1.0/np.sqrt(2), 0]]
np.testing.assert_equal(rot1_3.times, times)
np.testing.assert_almost_equal(rot1_3.quats, expected_quats)
np.testing.assert_almost_equal(rot1_3.av, av)
def test_time_dependent_constant_composition():
rot1_2 = ConstantRotation([1.0/np.sqrt(2), 0, 0, 1.0/np.sqrt(2)], 1, 2)
quats = [[1.0/np.sqrt(2), 0, 0, 1.0/np.sqrt(2)],[1, 0, 0, 0]]
times = [0, 1]
av = [[np.pi/2, 0, 0], [np.pi/2, 0, 0]]
rot2_3 = TimeDependentRotation(quats, times, 2, 3, av=av)
rot1_3 = rot2_3*rot1_2
assert isinstance(rot1_3, TimeDependentRotation)
assert rot1_3.source == 1
assert rot1_3.dest == 3
expected_quats = [[1, 0, 0, 0],[1.0/np.sqrt(2), 0, 0, -1.0/np.sqrt(2)]]
np.testing.assert_equal(rot1_3.times, times)
np.testing.assert_almost_equal(rot1_3.quats, expected_quats)
np.testing.assert_almost_equal(rot1_3.av, av)
def test_time_dependent_time_dependent_composition():
# 90 degree rotation about the X-axis to a 180 degree rotation about the X-axis
quats1_2 = [[1.0/np.sqrt(2), 0, 0, 1.0/np.sqrt(2)],[1, 0, 0, 0]]
times1_2 = [0, 1]
av1_2 = [[np.pi/2, 0, 0], [np.pi/2, 0, 0]]
rot1_2 = TimeDependentRotation(quats1_2, times1_2, 1, 2, av=av1_2)
# -90 degree rotation about the X-axis to a 90 degree rotation about the X-axis
quats2_3 = [[1.0/np.sqrt(2), 0, 0, -1.0/np.sqrt(2)],[1.0/np.sqrt(2), 0, 0, 1.0/np.sqrt(2)]]
times2_3 = [0, 2]
av2_3 = [[np.pi/2, 0, 0], [np.pi/2, 0, 0]]
rot2_3 = TimeDependentRotation(quats2_3, times2_3, 2, 3, av=av2_3)
# compose to get no rotation to a 180 degree rotation about the X-axis to no rotation
rot1_3 = rot2_3*rot1_2
assert isinstance(rot1_3, TimeDependentRotation)
assert rot1_3.source == 1
assert rot1_3.dest == 3
expected_times = [0, 1, 2]
expected_quats = [[0, 0, 0, -1], [-1, 0, 0, 0], [0, 0, 0, 1]]
expected_av = [[np.pi, 0, 0], [np.pi, 0, 0], [np.pi, 0, 0]]
np.testing.assert_equal(rot1_3.times, expected_times)
np.testing.assert_almost_equal(rot1_3.quats, expected_quats)
np.testing.assert_almost_equal(rot1_3.av, expected_av)
def test_constant_inverse():
rot1_2 = ConstantRotation([1.0/np.sqrt(2), 0, 0, 1.0/np.sqrt(2)], 1, 2)
rot2_1 = rot1_2.inverse()
assert rot2_1.source == 2
assert rot2_1.dest == 1
expected_quats = [1.0/np.sqrt(2), 0, 0, -1.0/np.sqrt(2)]
np.testing.assert_almost_equal(rot2_1.quat, expected_quats)
def test_time_dependent_inverse():
quats1_2 = [[1.0/np.sqrt(2), 0, 0, 1.0/np.sqrt(2)],[1, 0, 0, 0]]
times1_2 = [0, 1]
av1_2 = [[np.pi/2, 0, 0], [np.pi/2, 0, 0]]
rot1_2 = TimeDependentRotation(quats1_2, times1_2, 1, 2, av=av1_2)
rot2_1 = rot1_2.inverse()
assert rot2_1.source == 2
assert rot2_1.dest == 1
expected_quats = [[1.0/np.sqrt(2), 0, 0, -1.0/np.sqrt(2)],[1, 0, 0, 0]]
expected_av = [[-np.pi/2, 0, 0], [-np.pi/2, 0, 0]]
np.testing.assert_equal(rot2_1.times, times1_2)
np.testing.assert_almost_equal(rot2_1.quats, expected_quats)
np.testing.assert_almost_equal(rot2_1.av, expected_av)
def test_time_dependent_inverse_no_av():
quats1_2 = [[1.0/np.sqrt(2), 0, 0, 1.0/np.sqrt(2)],[1, 0, 0, 0]]
times1_2 = [0, 1]
rot1_2 = TimeDependentRotation(quats1_2, times1_2, 1, 2)
rot2_1 = rot1_2.inverse()
assert rot2_1.source == 2
assert rot2_1.dest == 1
expected_quats = [[1.0/np.sqrt(2), 0, 0, -1.0/np.sqrt(2)],[1, 0, 0, 0]]
np.testing.assert_equal(rot2_1.times, times1_2)
np.testing.assert_almost_equal(rot2_1.quats, expected_quats)
assert rot2_1.av is None
def test_rotation_matrix():
rot = ConstantRotation([0, 0, 0, 1], 1, 2)
mat = rot.rotation_matrix()
assert isinstance(mat, np.ndarray)
assert mat.shape == (3, 3)
def test_from_euler():
angles = [[np.pi/2, np.pi/2, 0],
[-np.pi/2, -np.pi/2, 0]]
times = [0, 1]
seq = 'XYZ'
rot = TimeDependentRotation.from_euler(seq, angles, times, 0, 1)
expected_quats = [[0.5, 0.5, 0.5, 0.5], [-0.5, -0.5, 0.5, 0.5]]
np.testing.assert_almost_equal(rot.quats, expected_quats)
assert rot.av is None
np.testing.assert_equal(rot.times, times)
assert rot.source == 0
assert rot.dest == 1
def test_from_euler_degrees():
rad_angles = [[np.pi/2, np.pi/2, 0],
[-np.pi/2, -np.pi/2, 0]]
degree_angles = [[90, 90, 0],
[-90, -90, 0]]
rad_rot = TimeDependentRotation.from_euler('XYZ', rad_angles, [0, 1], 0, 1)
degree_rot = TimeDependentRotation.from_euler('XYZ', degree_angles, [0, 1], 0, 1, degrees=True)
np.testing.assert_almost_equal(rad_rot.quats, degree_rot.quats)
assert degree_rot.av is None
def test_from_matrix():
mat = [[0, 0, 1],
[1, 0 ,0],
[0, 1, 0]]
rot = ConstantRotation.from_matrix(mat, 0, 1)
expected_quats = np.asarray([0.5, 0.5, 0.5, 0.5])
np.testing.assert_almost_equal(rot.quat, expected_quats)
assert rot.source == 0
assert rot.dest == 1
def test_slerp():
test_quats = Rotation.from_euler('x', np.array([-135, -90, 0, 45, 90]), degrees=True).as_quat()
rot = TimeDependentRotation(test_quats, [-0.5, 0, 1, 1.5, 2], 1, 2)
new_rots, new_avs = rot._slerp(np.arange(-3, 5))
expected_rot = Rotation.from_euler('x',
[-360, -270, -180, -90, 0, 90, 180, 270],
degrees=True)
np.testing.assert_almost_equal(new_rots.as_quat(),
expected_rot.as_quat())
np.testing.assert_almost_equal(np.degrees(new_avs),
np.repeat([[90, 0, 0]], 8, 0))
def test_slerp_constant_rotation():
rot = TimeDependentRotation([[0, 0, 0, 1]], [0], 1, 2)
new_rot, new_avs = rot._slerp([-1, 3])
np.testing.assert_equal(new_rot.as_quat(),
[[0, 0, 0, 1], [0, 0, 0, 1]])
np.testing.assert_equal(new_avs,
[[0, 0, 0], [0, 0, 0]])
def test_slerp_single_time():
rot = TimeDependentRotation([[0, 0, 0, 1]], [0], 1, 2, av=[[np.pi/2, 0, 0]])
new_rot, new_avs = rot._slerp([-1, 3])
expected_quats = [[-1/np.sqrt(2), 0, 0, 1/np.sqrt(2)], [1/np.sqrt(2), 0, 0, -1/np.sqrt(2)]]
expected_av = [[np.pi/2, 0, 0], [np.pi/2, 0, 0]]
np.testing.assert_almost_equal(new_rot.as_quat(),
expected_quats)
np.testing.assert_equal(new_avs,
expected_av)
def test_slerp_variable_velocity():
test_quats = Rotation.from_euler('xyz',
[[0, 0, 0],
[-90, 0, 0],
[-90, 180, 0],
[-90, 180, 90]],
degrees=True).as_quat()
rot = TimeDependentRotation(test_quats, [0, 1, 2, 3], 1, 2)
new_rots, new_avs = rot._slerp([-0.5, 0.5, 1.5, 2.5, 3.5])
expected_rot = Rotation.from_euler('xyz',
[[45, 0, 0],
[-45, 0, 0],
[-90, 90, 0],
[-90, 180, 45],
[-90, 180, 135]],
degrees=True)
np.testing.assert_almost_equal(new_rots.as_quat(),
expected_rot.as_quat())
np.testing.assert_almost_equal(np.degrees(new_avs),
[[-90, 0, 0],
[-90, 0 ,0],
[0, 180, 0],
[0, 0, 90],
[0, 0, 90]])
def test_reinterpolate():
rot = TimeDependentRotation([[0, 0, 0, 1], [0, 0, 0, 1]], [0, 1], 1, 2)
new_rot = rot.reinterpolate(np.arange(-3, 5))
assert new_rot.source == rot.source
assert new_rot.dest == rot.dest
np.testing.assert_equal(new_rot.times, np.arange(-3, 5))
def test_apply_at_single_time():
test_quats = Rotation.from_euler('x', np.array([-90, 0, 45]), degrees=True).as_quat()
rot = TimeDependentRotation(test_quats, [0, 1, 1.5], 1, 2)
input_vec = np.asarray([1, 2, 3])
rot_vec = rot.apply_at(input_vec, 0)
np.testing.assert_almost_equal(rot_vec, np.asarray([[1, 3, -2]]))
def test_apply_at_vector_time():
test_quats = Rotation.from_euler('x', np.array([-90, 0, 45]), degrees=True).as_quat()
rot = TimeDependentRotation(test_quats, [0, 1, 1.5], 1, 2)
input_vec = np.asarray([[1, 2, 3], [1, 2, 3]])
rot_vec = rot.apply_at(input_vec, [0, 2])
np.testing.assert_almost_equal(rot_vec, np.asarray([[1, 3, -2], [1, -3, 2]]))
def test_rotate_velocity_at():
test_quats = Rotation.from_euler('xyz',
[[0, 0, 0],
[-90, 0, 0],
[-90, 180, 0],
[-90, 180, 90]],
degrees=True).as_quat()
rot = TimeDependentRotation(test_quats, [0, 1, 2, 3], 1, 2)
input_pos = [[1, 2, 3], [1, 2, 3], [1, 2, 3]]
input_vel = [[-1, -2, -3], [-1, -2, -3], [-1, -2, -3]]
input_times = [1, 2, 3]
rot_vel = rot.rotate_velocity_at(input_pos, input_vel, input_times)
np.testing.assert_almost_equal(rot_vel,
[[-1, -3 + np.pi, 2 + 3*np.pi/2],
[1 + 3*np.pi, -3 + np.pi, -2],
[3, 1 - np.pi, -2 - np.pi/2]])
| 1.992188
| 2
|
example/test/core/geometry/simple/plane/__init__.py
|
dmilos/IceRay
| 2
|
12776367
|
#__name__ = "sphere"
#__package__ = "sphere"
#__all__ = ['make']
| 1.015625
| 1
|
Aula_60/ForTwo/r2/embarque.py
|
Mateus-Silva11/AulasPython
| 0
|
12776368
|
<reponame>Mateus-Silva11/AulasPython<gh_stars>0
def embarque(motorista:str, passageiro:str, saida:dict):
fortwo = {'motorista': motorista, 'passageiro': passageiro}
saida['pessoas'].remove(motorista)
print(f"{fortwo['motorista']} embarcou como motorista")
if passageiro != '':
saida['pessoas'].remove(passageiro)
print(f"{fortwo['passageiro']} embarcou como passageiro")
return fortwo
| 3.078125
| 3
|
demoire-baseline/losses.py
|
Foldit/Netdisk-Image-Demoireing-Rank7th
| 1
|
12776369
|
import paddle
import paddle.nn as nn
import vgg
def compute_l1_loss(input, output):
return paddle.mean(paddle.abs(input - output))
def loss_Textures(x, y, nc=3, alpha=1.2, margin=0):
xi = x.contiguous().view(x.size(0), -1, nc, x.size(2), x.size(3))
yi = y.contiguous().view(y.size(0), -1, nc, y.size(2), y.size(3))
xi2 = paddle.sum(xi * xi, axis=2)
yi2 = paddle.sum(yi * yi, axis=2)
# pdb.set_trace() #15*32*32
out = nn.functional.relu(yi2.mul(alpha) - xi2 + margin)
return paddle.mean(out)
class LossNetwork(nn.Layer):
"""Reference:
https://discuss.pytorch.org/t/how-to-extract-features-of-an-image-from-a-trained-model/119/3
"""
def __init__(self, pretrained: str = None):
super(LossNetwork, self).__init__()
self.vgg_layers = vgg.vgg19(pretrained=pretrained).features
self.layer_name_mapping = {
'3': "relu1",
'8': "relu2",
'13': "relu3",
'22': "relu4",
'31': "relu5", # 1_2 to 5_2
}
def forward(self, x):
output = {}
# import pdb
# pdb.set_trace()
for name, module in self.vgg_layers._sub_layers.items():
x = module(x)
if name in self.layer_name_mapping:
output[self.layer_name_mapping[name]] = x
return output
class TVLoss(nn.Layer):
def __init__(self, weight=1):
super(TVLoss, self).__init__()
self.weight = weight
def forward(self, x):
batch_size = x.shape[0]
h_x = x.shape[2]
w_x = x.shape[3]
count_h = self._tensor_size(x[:, :, 1:, :])
count_w = self._tensor_size(x[:, :, :, 1:])
h_tv = paddle.pow((x[:, :, 1:, :] - x[:, :, :h_x - 1, :]), 2).sum()
w_tv = paddle.pow((x[:, :, :, 1:] - x[:, :, :, :w_x - 1]), 2).sum()
return self.weight * 2 * (h_tv / count_h + w_tv / count_w) / batch_size
def _tensor_size(self, t):
return t.shape[1] * t.shape[2] * t.shape[3]
if __name__ == '__main__':
img = paddle.randn([1, 3, 224, 224])
net = LossNetwork()
out = net(img)
| 2.828125
| 3
|
model.py
|
noob-life/GAIL-for-mountain-car-Tensorflow
| 0
|
12776370
|
<reponame>noob-life/GAIL-for-mountain-car-Tensorflow
import tensorflow as tf
import tensorflow.keras as kr
import collections
import numpy as np
from tensorflow import random
#from tensorflow.python.keras import layers
class gail_gen(kr.Model):
"""docstring for gail_gen"""
def __init__(self, state_shape,action_shape):
super(gail_gen, self).__init__()
self.linear1 = kr.layers.Dense(128,activation='relu')
self.linear2 = kr.layers.Dense(128,activation='relu')
self.linear3 = kr.layers.Dense(action_shape)
def call(self,input):
#x_in = kr.layers.Input(shape=(self.state_shape,))
x= self.linear1(input)
x = self.linear2(x)
x = self.linear3(x)
x_out = kr.activations.softmax(x,axis=-1)
#x_out = x
return x_out#kr.models.Model(x_in,x_out)
class gail_disc(kr.Model):
"""docstring for disc"""
def __init__(self,state_shape,action_shape):
super(gail_disc, self).__init__()
# gotta concat (state_shape+action_shape) before feeding
self.linear1 = kr.layers.Dense(400)
self.relu1 = kr.layers.LeakyReLU(alpha=0.2)
self.linear2 = kr.layers.Dense(400)
self.relu2 = kr.layers.LeakyReLU(alpha=0.2)
self.linear3 = kr.layers.Dense(1)
def call(self,input):
#x_in = kr.layers.Input(shape=(self.state_shape+self.action_shape,))
x = self.relu1(self.linear1(input))
x = self.relu2(self.linear2(x))
x_out = (self.linear3(x))
#x_out = kr.backend.exp(x_out) # try this later
#x = kr.backend.mean(x,axis=0)
return x_out#kr.models.Model(x_in,x_out)
| 3.03125
| 3
|
sources/lib.py
|
qnguyen2atb/MUL
| 0
|
12776371
|
<reponame>qnguyen2atb/MUL
# -*- coding: utf-8 -*-
"""
Created on Sat Jan 22 14:56:14 2022
@author: quang
"""
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
import pandas as pd
from pyparsing import col
# Import train_test_split function
from sklearn.model_selection import train_test_split
#Import Random Forest Model
from sklearn.ensemble import RandomForestClassifier
from sklearn import metrics
from sklearn.metrics import accuracy_score, make_scorer
from sklearn.metrics import classification_report
import pickle
#from sympy import ordered
from lib import *
from data_exploratory_test import *
import timeit
| 1.875
| 2
|
test/components/scheduler/promises/test_2_2_6_2.py
|
rerobins/rhobot_framework
| 0
|
12776372
|
"""
2.2.6: `then` may be called multiple times on the same promise.
https://github.com/promises-aplus/promises-tests/blob/2.1.1/lib/tests/2.2.6.js
"""
import mock
from test.components.scheduler.promises.helpers import generate_rejected_test_case
other = {'other': 'other'}
sentinel = {'sentinel': 'sentinel'}
sentinel2 = {'sentinel2': 'sentinel2'}
sentinel3 = {'sentinel3': 'sentinel3'}
dummy = {'dummy': 'dummy'}
def multiple_boring_tests(test_case, promise, done):
handler1 = mock.MagicMock(return_value=other)
handler2 = mock.MagicMock(return_value=other)
handler3 = mock.MagicMock(return_value=other)
fulfilled = mock.MagicMock()
def final_rejected(argument):
test_case.assertEqual(argument, sentinel)
handler1.assert_called_once_with(sentinel)
handler2.assert_called_once_with(sentinel)
handler3.assert_called_once_with(sentinel)
fulfilled.assert_not_called()
done()
promise.then(fulfilled, handler1)
promise.then(fulfilled, handler2)
promise.then(fulfilled, handler3)
promise.then(None, final_rejected)
def multiple_one_throws(test_case, promise, done):
handler1 = mock.MagicMock(return_value=other)
handler2 = mock.MagicMock(side_effect=AttributeError())
handler3 = mock.MagicMock(return_value=other)
fulfilled = mock.MagicMock()
def final_rejected(argument):
test_case.assertEqual(argument, sentinel)
handler1.assert_called_once_with(sentinel)
handler2.assert_called_once_with(sentinel)
handler3.assert_called_once_with(sentinel)
fulfilled.assert_not_called()
done()
promise.then(fulfilled, handler1)
promise.then(fulfilled, handler2)
promise.then(fulfilled, handler3)
promise.then(None, final_rejected)
def multiple_branching_chains_each_with_own_value(test_case, promise, done):
test_case.session['semiDone'] = 0
def semidone():
test_case.session['semiDone'] += 1
if test_case.session['semiDone'] == 3:
done()
def branch01(value):
return sentinel
def branch01_final(value):
test_case.assertIs(value, sentinel)
semidone()
branch02_error = TypeError()
def branch02(value):
raise branch02_error
def branch02_final(value):
test_case.assertIs(value, branch02_error)
semidone()
def branch03(value):
return sentinel3
def branch03_final(value):
test_case.assertIs(value, sentinel3)
semidone()
promise.then(None, branch01).then(branch01_final)
promise.then(None, branch02).then(None, branch02_final)
promise.then(None, branch03).then(branch03_final)
def on_fulfilled_handlers_called_in_original_order(test_case, promise, done):
handler_mock = mock.MagicMock(**{'handler01.return_value': sentinel,
'handler02.return_value': sentinel2,
'handler03.return_value': sentinel3})
promise.then(None, handler_mock.handler01)
promise.then(None, handler_mock.handler02)
promise.then(None, handler_mock.handler03)
def test_handlers(value):
test_case.assertIs(dummy, value)
method_calls = [a[0] for a in handler_mock.method_calls]
test_case.assertEquals(['handler01', 'handler02', 'handler03'], method_calls)
done()
promise.then(None, test_handlers)
def order_manipulated_in_a_promise(test_case, promise, done):
handler_mock = mock.MagicMock(**{'handler01.return_value': sentinel,
'handler02.return_value': sentinel2,
'handler03.return_value': sentinel3})
def inject_handler_during_execution(value):
handler_mock.handler01()
promise.then(None, handler_mock.handler03)
promise.then(None, inject_handler_during_execution)
promise.then(None, handler_mock.handler02)
def test_handlers():
method_calls = [a[0] for a in handler_mock.method_calls]
test_case.assertEquals(['handler01', 'handler02', 'handler03'], method_calls)
done()
def schedule_test(value):
test_case.scheduler.schedule_task(test_handlers, 0.015)
promise.then(None, schedule_test)
MultipleBoringTestCases = generate_rejected_test_case(method=multiple_boring_tests, value=sentinel,
module=__name__,
name='MultipleBoringTestCases')
MultipleOneThrowsTestCases = generate_rejected_test_case(method=multiple_one_throws, value=sentinel,
module=__name__,
name='MultipleOneThrowsTestCases')
MultipleBranchingTestCases = generate_rejected_test_case(method=multiple_branching_chains_each_with_own_value,
module=__name__,
value=dummy,
name='MultipleBranchingTestCases')
FulfilledHandlersInOrder = generate_rejected_test_case(method=on_fulfilled_handlers_called_in_original_order,
value=dummy,
module=__name__,
name='FulfilledHandlersInOrder')
OrderManipulatedInPromise = generate_rejected_test_case(method=order_manipulated_in_a_promise,
value=dummy,
module=__name__,
name='OrderManipulatedInPromise')
| 2.75
| 3
|
src/swiftlet/azext_swiftlet/vendored_sdks/swiftlet/operations/_virtual_machine_operations.py
|
Mannan2812/azure-cli-extensions
| 207
|
12776373
|
<gh_stars>100-1000
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualMachineOperations(object):
"""VirtualMachineOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~swiftlet_management_client.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _start_initial(
self,
vm_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01-preview"
# Construct URL
url = self._start_initial.metadata['url'] # type: ignore
path_format_arguments = {
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Swiftlet/virtualMachines/{vmName}/start'} # type: ignore
def begin_start(
self,
vm_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Start a specified virtual machine.
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param resource_group_name: The name of the resource group within the user’s subscription ID.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._start_initial(
vm_name=vm_name,
resource_group_name=resource_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Swiftlet/virtualMachines/{vmName}/start'} # type: ignore
def _stop_initial(
self,
vm_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01-preview"
# Construct URL
url = self._stop_initial.metadata['url'] # type: ignore
path_format_arguments = {
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Swiftlet/virtualMachines/{vmName}/stop'} # type: ignore
def begin_stop(
self,
vm_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Stop a specified virtual machine.
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param resource_group_name: The name of the resource group within the user’s subscription ID.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._stop_initial(
vm_name=vm_name,
resource_group_name=resource_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Swiftlet/virtualMachines/{vmName}/stop'} # type: ignore
def list_image(
self,
location, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.SwiftletImageListResult"]
"""List all Swiftlet images available for the specified subscription and Azure location.
:param location: The name of a supported Azure region.
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SwiftletImageListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~swiftlet_management_client.models.SwiftletImageListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SwiftletImageListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01-preview"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
if not next_link:
# Construct URL
url = self.list_image.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('SwiftletImageListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_image.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Swiftlet/locations/{location}/swiftletImages'} # type: ignore
def list_bundle(
self,
location, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.SwiftletBundleListResult"]
"""List all Swiftlet bundles available for the specified subscription and Azure location.
:param location: The name of a supported Azure region.
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SwiftletBundleListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~swiftlet_management_client.models.SwiftletBundleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SwiftletBundleListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01-preview"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
if not next_link:
# Construct URL
url = self.list_bundle.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('SwiftletBundleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_bundle.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Swiftlet/locations/{location}/swiftletBundles'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
vm_name, # type: str
location, # type: str
swiftlet_bundle_sku, # type: str
swiftlet_image_id, # type: str
tags=None, # type: Optional[Dict[str, str]]
username=None, # type: Optional[str]
ssh_public_key=None, # type: Optional[str]
password=<PASSWORD>, # type: Optional[str]
ports=None, # type: Optional[List["models.Port"]]
startup_script=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "models.VirtualMachine"
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachine"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
_parameters = models.VirtualMachine(tags=tags, location=location, swiftlet_bundle_sku=swiftlet_bundle_sku, swiftlet_image_id=swiftlet_image_id, username=username, ssh_public_key=ssh_public_key, password=password, ports=ports, startup_script=startup_script)
api_version = "2020-03-01-preview"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(_parameters, 'VirtualMachine')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualMachine', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualMachine', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Swiftlet/virtualMachines/{vmName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
vm_name, # type: str
location, # type: str
swiftlet_bundle_sku, # type: str
swiftlet_image_id, # type: str
tags=None, # type: Optional[Dict[str, str]]
username=None, # type: Optional[str]
ssh_public_key=None, # type: Optional[str]
password=<PASSWORD>, # type: Optional[str]
ports=None, # type: Optional[List["models.Port"]]
startup_script=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.VirtualMachine"]
"""Create or update a virtual machine.
:param resource_group_name: The name of the resource group within the user’s subscription ID.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param location: The geo-location where the resource lives.
:type location: str
:param swiftlet_bundle_sku: Specifies the Swiftlet bundle of this virtual machine (which
specifies the selected tier of memory, processing, and storage).
:type swiftlet_bundle_sku: str
:param swiftlet_image_id: The image ID to use. The image "platform" must match the
"supportedImagePlatform" of the specified swiftletBundleSku.
:type swiftlet_image_id: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param username: The username for connecting the the virtual machine.
:type username: str
:param ssh_public_key: The SSH public key used to connect to this virtual machine. Only
supported on Linux images. If specified on a Windows image, an error will be returned.
:type ssh_public_key: str
:param password: The password for connecting to this Swiftlet. If the image platform type is
"linux", this is optional if sshPublicKey is set. If the image platform type is "windows", this
is required.
:type password: str
:param ports: The ports on which inbound traffic will be allowed.
:type ports: list[~swiftlet_management_client.models.Port]
:param startup_script: An inline script that will run upon startup of the virtual machine.
:type startup_script: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualMachine or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~swiftlet_management_client.models.VirtualMachine]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachine"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
location=location,
swiftlet_bundle_sku=swiftlet_bundle_sku,
swiftlet_image_id=swiftlet_image_id,
tags=tags,
username=username,
ssh_public_key=ssh_public_key,
password=password,
ports=ports,
startup_script=startup_script,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualMachine', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Swiftlet/virtualMachines/{vmName}'} # type: ignore
def _update_initial(
self,
resource_group_name, # type: str
vm_name, # type: str
tags=None, # type: Optional[Dict[str, str]]
ports=None, # type: Optional[List["models.Port"]]
**kwargs # type: Any
):
# type: (...) -> "models.VirtualMachine"
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachine"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
_parameters = models.VirtualMachineUpdate(tags=tags, ports=ports)
api_version = "2020-03-01-preview"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(_parameters, 'VirtualMachineUpdate')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualMachine', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('VirtualMachine', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Swiftlet/virtualMachines/{vmName}'} # type: ignore
def begin_update(
self,
resource_group_name, # type: str
vm_name, # type: str
tags=None, # type: Optional[Dict[str, str]]
ports=None, # type: Optional[List["models.Port"]]
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.VirtualMachine"]
"""Update a virtual machine.
:param resource_group_name: The name of the resource group within the user’s subscription ID.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param ports: Specifies the list of ports to be opened.
:type ports: list[~swiftlet_management_client.models.Port]
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualMachine or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~swiftlet_management_client.models.VirtualMachine]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachine"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
tags=tags,
ports=ports,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualMachine', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Swiftlet/virtualMachines/{vmName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
vm_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01-preview"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Swiftlet/virtualMachines/{vmName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
vm_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Delete a virtual machine.
:param resource_group_name: The name of the resource group within the user’s subscription ID.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Swiftlet/virtualMachines/{vmName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
vm_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.VirtualMachine"
"""Get information about the virtual machine.
:param resource_group_name: The name of the resource group within the user’s subscription ID.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachine, or the result of cls(response)
:rtype: ~swiftlet_management_client.models.VirtualMachine
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachine"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01-preview"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachine', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Swiftlet/virtualMachines/{vmName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.VirtualMachineListResult"]
"""List all of the virtual machines in the specified resource group. Use the nextLink property in
the response to get the next page of virtual machines.
:param resource_group_name: The name of the resource group within the user’s subscription ID.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualMachineListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~swiftlet_management_client.models.VirtualMachineListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachineListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01-preview"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualMachineListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Swiftlet/virtualMachines'} # type: ignore
def list_by_subscription(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["models.VirtualMachineListResult"]
"""List all of the virtual machines in the specified subscription. Use the nextLink property in
the response to get the next page of virtual machines.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualMachineListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~swiftlet_management_client.models.VirtualMachineListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachineListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-03-01-preview"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
if not next_link:
# Construct URL
url = self.list_by_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualMachineListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Swiftlet/virtualMachines'} # type: ignore
| 1.773438
| 2
|
unfolders/result.py
|
peguerosdc/unfolding_benchmark
| 0
|
12776374
|
class UnfoldingResult:
def __init__(self, solution, error):
self.solution = solution
self.error = error
| 1.976563
| 2
|
qforce/tests/test_qm_orca.py
|
xiki-tempula/qforce
| 0
|
12776375
|
<reponame>xiki-tempula/qforce
import numpy as np
import pytest
from qforce_examples import Orca_default, Orca_default_NBO6
from ase.units import Hartree, mol, kJ
from qforce.qm.orca import ReadORCA
from .test_qm_gaussian import TestReadHessian as Gaussian_hessian
from .test_qm_gaussian import TestReadScan as Gaussian_scan
class TestReadHessian(Gaussian_hessian):
@staticmethod
@pytest.fixture(scope='class')
def hessian():
class Config(dict):
charge_method = "cm5"
charge = 0
multiplicity = 1
(n_atoms, charge, multiplicity, elements, coords, hessian,
b_orders, point_charges) = ReadORCA().hessian(Config(),
Orca_default['out_file'],
Orca_default['hess_file'],
Orca_default['pc_file'],
Orca_default['coord_file'],)
return n_atoms, charge, multiplicity, elements, coords, hessian, b_orders, point_charges
def test_coords(self, hessian):
n_atoms, charge, multiplicity, elements, coords, hessian, b_orders, point_charges = hessian
assert all(np.isclose(coords[0, :], [-5.48129672124137, 1.91902042205872,
-0.07175480174836], rtol=0.01))
def test_point_charges(self, hessian):
n_atoms, charge, multiplicity, elements, coords, hessian, b_orders, point_charges = hessian
assert all(np.isclose(point_charges, [-0.052487, -0.017287, 0.017356, 0.015348, 0.015348,
-0.05249, 0.013127, 0.013127, 0.017322, 0.01534,
0.01534], atol=0.0001))
def test_hessian(self, hessian):
n_atoms, charge, multiplicity, elements, coords, hessian, b_orders, point_charges = hessian
assert np.isclose(hessian[0], 4336.9313407, rtol=0.1)
assert np.isclose(hessian[1], -35.78124679, rtol=0.1)
assert np.isclose(hessian[2], 5317.32106175, rtol=0.1)
def test_b_orders(self, hessian):
n_atoms, charge, multiplicity, elements, coords, hessian, b_orders, point_charges = hessian
assert all(np.isclose(b_orders[0],
[0, 0.9730, 0.9759, 0.9752,
0.9752, -0.0135, -0.0051,
-0.0051, 0.0025, -0.0012,
-0.0012], atol=0.0001))
assert np.isclose(b_orders[9][10], -0.0059, atol=0.0001)
class TestReadScan(Gaussian_scan):
@staticmethod
@pytest.fixture(scope='class')
def scan():
class Config(dict):
charge_method = "cm5"
(n_atoms, coords, angles,
energies, point_charges) = ReadORCA().scan(Config(), Orca_default['fragments_out'])
return n_atoms, coords, angles, energies, point_charges
def test_coords(self, scan):
(n_atoms, coords, angles, energies, point_charges) = scan
assert all(np.isclose(coords[0][0], [-5.481060, 1.918927, -0.071752], rtol=0.01))
assert len(coords) == 24
def test_angles(self, scan):
(n_atoms, coords, angles, energies, point_charges) = scan
assert np.isclose(angles[0], -180, atol=0.01)
assert np.isclose(angles[1], -180+15, atol=0.01)
assert len(angles) == 24
def test_energies(self, scan):
(n_atoms, coords, angles, energies, point_charges) = scan
energy = ['-119.07752449', '-119.07687528', '-119.07524691',
'-119.07350525', '-119.07272311', '-119.07344151',
'-119.07517421', '-119.07683536', '-119.07752403',
'-119.07688738', '-119.07524888', '-119.07348511',
'-119.07272154', '-119.07349001', '-119.07524223',
'-119.07689394', '-119.07752531', '-119.07683264',
'-119.07517599', '-119.07344223', '-119.07271719',
'-119.07349512', '-119.07524031', '-119.07688126']
energy = np.array([float(point) for point in energy])
energies = energies * kJ / Hartree / mol
assert all(np.isclose(energies, energy, atol=0.01))
| 2.109375
| 2
|
D4/D4.py
|
bitlischieber/AdventOfCode2019
| 0
|
12776376
|
import re
# Your puzzle input is 246540-787419.
## Part 1
pwords = []
for i in range(246540,787419):
if(int(str(i)[0]) <= int(str(i)[1]) <= int(str(i)[2]) <= int(str(i)[3]) <= int(str(i)[4]) <= int(str(i)[5])):
if((str(i)[0] == str(i)[1]) or (str(i)[1] == str(i)[2]) or (str(i)[2] == str(i)[3]) or (str(i)[3] == str(i)[4]) or (str(i)[4] == str(i)[5])):
pwords.append(i)
print("Candidate found " + str(i))
print("Number of possible passwords: " + str(len(pwords)))
input()
## Part 2
pwords2 = []
for pw in pwords:
# search for packages of three and more digits
x = re.sub(r"(\d)\1{2,6}", "", str(pw))
if(x):
# but allow packages with two other digits
y = re.search(r"(\d)\1", str(x))
if(not y):
print("Invalid password " + str(pw))
else:
# collect valid pw for counting
pwords2.append(pw)
print("Number of possible passwords left: " + str(len(pwords2)))
input()
| 3.59375
| 4
|
python/example_code/nifcloud-basic-demo.py
|
nifcloud/nifcloud-sample
| 0
|
12776377
|
from nifcloud import session
import sys
import base64
# ---- define name -------
# -- key name ----------
SSH_KYE_FILE_NAME = 'key.pub'
EAST31_KEY_NAME = "key"
# -- security group ----
WEB_SECURITY_GP_NAME = "webfw"
DB_SECURITY_GP_NAME = "dbfw"
# -- Private LAN name ---
WEB_DB_PRV_NET_NAME = "webdbnet"
# -- Router name ---
WEB_DB_ROUTER_NAME = "webdbRtr"
# -------------------------
# -------- Create Firewall --------------------------------------
def wait_for_fw_create(client, sg_name):
print("wait : ", sys._getframe().f_code.co_name)
try:
waiter = client.get_waiter('security_group_exists')
wait_result = waiter.wait(
Filter=[
{
'ListOfRequestValue': [
'applied',
],
'Name': 'group-name'
},
],
GroupName=[sg_name, ],
WaiterConfig={
'Delay': 20,
'MaxAttempts': 40
}
)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
finally:
return wait_result
def wait_for_fw_applied(client, sg_name):
print("wait : ", sys._getframe().f_code.co_name)
try:
waiter = client.get_waiter('security_group_applied')
wait_result = waiter.wait(
Filter=[
{
'ListOfRequestValue': [
'applied',
],
'Name': 'group-name'
},
],
GroupName=[sg_name, ],
WaiterConfig={
'Delay': 20,
'MaxAttempts': 40
}
)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
finally:
return wait_result
def create_fw(client):
try:
sg = client.create_security_group(
GroupName=WEB_SECURITY_GP_NAME,
GroupDescription="WEB FW"
)
print("create : ", sg)
wait_for_fw_create(client, WEB_SECURITY_GP_NAME)
sg = client.create_security_group(
GroupName=DB_SECURITY_GP_NAME,
GroupDescription="DB FW"
)
print("create : ", sg)
wait_for_fw_create(client, WEB_SECURITY_GP_NAME)
# -------------- web fw -----------------------------
client.authorize_security_group_ingress(
GroupName=WEB_SECURITY_GP_NAME,
IpPermissions=[
{
'Description': 'class b allow',
'InOut': 'IN',
'IpProtocol': 'ANY',
'ListOfRequestIpRanges': [
{
'CidrIp': "192.168.2.0/24",
},
],
},
]
)
wait_for_fw_applied(client, WEB_SECURITY_GP_NAME)
client.authorize_security_group_ingress(
GroupName=WEB_SECURITY_GP_NAME,
IpPermissions=[
{
'Description': 'DB alow',
'InOut': 'IN',
'IpProtocol': 'ANY',
'ListOfRequestGroups': [
{
'GroupName': DB_SECURITY_GP_NAME,
},
],
},
]
)
wait_for_fw_applied(client, WEB_SECURITY_GP_NAME)
client.authorize_security_group_ingress(
GroupName=WEB_SECURITY_GP_NAME,
IpPermissions=[
{
'Description': 'ssh allow(example IP)',
'FromPort': 22,
'ToPort': 22,
'InOut': 'IN',
'IpProtocol': 'TCP',
'ListOfRequestIpRanges': [
{
'CidrIp': "203.0.113.1",
},
],
},
]
)
wait_for_fw_applied(client, WEB_SECURITY_GP_NAME)
# ------------- dbfw ----------------------------
client.authorize_security_group_ingress(
GroupName=DB_SECURITY_GP_NAME,
IpPermissions=[
{
'Description': 'DB alow',
'InOut': 'IN',
'IpProtocol': 'ANY',
'ListOfRequestGroups': [
{
'GroupName': WEB_SECURITY_GP_NAME,
},
],
},
]
)
wait_for_fw_applied(client, DB_SECURITY_GP_NAME)
client.authorize_security_group_ingress(
GroupName=DB_SECURITY_GP_NAME,
IpPermissions=[
{
'Description': 'class b allow',
'InOut': 'IN',
'IpProtocol': 'ANY',
'ListOfRequestIpRanges': [
{
'CidrIp': "192.168.2.0/24",
},
],
},
]
)
wait_for_fw_applied(client, DB_SECURITY_GP_NAME)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
client.delete_security_group(
GroupName=WEB_SECURITY_GP_NAME,
)
client.delete_security_group(
GroupName=DB_SECURITY_GP_NAME,
)
sys.exit(1)
# ------ Create Virtual Server ----------------------------------
def wait_for_instance_create(client, instance_name):
print("wait : ", sys._getframe().f_code.co_name)
try:
waiter = client.get_waiter('instance_running')
wait_result = waiter.wait(
InstanceId=[instance_name, ],
Tenancy=['all', ],
WaiterConfig={ # Wait 10 min with a check interval of 30s.
'Delay': 30,
'MaxAttempts': 20
}
)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
finally:
return wait_result
def wait_for_instance_stop(client, instance_name):
print("wait : ", sys._getframe().f_code.co_name)
try:
waiter = client.get_waiter('instance_stopped')
wait_result = waiter.wait(
InstanceId=[instance_name, ],
Tenancy=['all', ],
WaiterConfig={ # Wait 10 min with a check interval of 30s.
'Delay': 30,
'MaxAttempts': 20
}
)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
finally:
return wait_result
def create_instance(client):
try:
"""
client.run_instances(
AccountingType='2',#'1':Monthly
#'2':Payper(Default)
Admin='string',#Windows Admin User Name
Agreement="False",#True:License Agree for SPLA,RHEL and anymore.
# see also https://pfs.nifcloud.com/service/licence_ms.htm
# https://pfs.nifcloud.com/service/rhel.htm
#False:Not License Agree(Default)
Description='string',# memo
DisableApiTermination=True,#True :Not Allow to delete from API(Default)
#False:Allow to delete from API
ImageId='string', #OS Image Name
InstanceId='string',#Server Name
InstanceType="",#Server Type
#see also https://pfs.nifcloud.com/api/rest/RunInstances.htm
KeyName='string',#SSH Key Name
License=[#License Infomation.see also https://pfs.nifcloud.com/service/licence_ms.htm
{#No.1 License Info
'LicenseName': 'RDS'|'Office(Std)'|'Office(Pro Plus)',
'LicenseNum' : 'string'
},
#...
{#No.N License Info
'LicenseName': 'RDS'|'Office(Std)'|'Office(Pro Plus)',
'LicenseNum' : 'string'
},
],
IpType='',#'static' :Use Global IP
#'elastic':Use Replacement IP.Shuld set PublicIp
#'none' :Not Use Global IP
PublicIp='string',#If you use Replacement IP set this
NetworkInterface=[#Network Config.
{#Full argument
'IpAddress': 'string',#See also NetworkInterface.n.IpAddress in
#https://pfs.nifcloud.com/api/rest/RunInstances.htm
#if use the DHCP delete this
'NetworkId': 'string',#Connect Network
#net-COMMON_GLOBAL :Common Global
#net-COMMON_PRIVATE:Common Private
#NetworkID :Network ID at Private LAN
'NetworkName': 'string'
},
{#Common Private DHCP sample
'NetworkId': 'net-COMMON_PRIVATE',
},
],
Password='<PASSWORD>',#Password for Windows Admin user
Placement={
'AvailabilityZone': 'string',#Zone Name.
#For jp-east-1, east-11,east-12,east-13,east-14 can be selected.
#For jp-west-1, west-11,west-12,west-13 can be selected.
},
SecurityGroup=[#Firewall Group name
'string',
],
UserData={#Server Boot Script
'Content': 'string',#Encoded Server Boot Script body
'Encoding': 'string'#Encoding Type
#'' :text
#'base64':base64 encode(Default)
}
)
"""
client.run_instances(
AccountingType='2',
Description='web sv',
DisableApiTermination=False,
ImageId='220',
InstanceId='websv',
InstanceType="e-small4",
KeyName=EAST31_KEY_NAME,
IpType='static',
NetworkInterface=[
{
'NetworkName': WEB_DB_PRV_NET_NAME,
},
],
Placement={
'AvailabilityZone': 'east-31',
},
SecurityGroup=[
WEB_SECURITY_GP_NAME,
],
)
wait_for_instance_create(client, 'websv')
client.run_instances(
AccountingType='2',
Description='DB sv',
DisableApiTermination=False,
ImageId='220',
InstanceId='dbsv',
InstanceType="e-small4",
KeyName=EAST31_KEY_NAME,
IpType='none',
NetworkInterface=[
{
'NetworkName': WEB_DB_PRV_NET_NAME,
},
],
Placement={
'AvailabilityZone': 'east-31',
},
SecurityGroup=[
WEB_SECURITY_GP_NAME,
],
)
wait_for_instance_create(client, 'dbsv')
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
client.stop_instances(
Force=True,
InstanceId=[
'websv',
],
)
wait_for_instance_stop(client, 'websv')
client.terminate_instances(
InstanceId=[
'websv',
]
)
client.stop_instances(
Force=True,
InstanceId=[
'dbsv',
],
)
wait_for_instance_stop(client, 'dbsv')
client.terminate_instances(
InstanceId=[
'dbsv',
]
)
sys.exit(1)
# ----- import ssh key ------------------------------------------
def import_sshkey(client):
try:
with open(SSH_KYE_FILE_NAME, "rb") as ssh_pub_file:
client.import_key_pair(
Description='memo',
KeyName=EAST31_KEY_NAME,
PublicKeyMaterial=base64.b64encode(
ssh_pub_file.read()).decode("ascii")
)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
sys.exit(1)
# ----- Create Private LAN --------------------------------------
def wait_for_private_lan_create(client, private_lan_name):
print("wait : ", sys._getframe().f_code.co_name)
try:
waiter = client.get_waiter('private_lan_exists')
wait_result = waiter.wait(
Filter=[
{
'ListOfRequestValue': [
'available',
],
'Name': 'state'
},
],
PrivateLanName=[private_lan_name, ],
WaiterConfig={
'Delay': 20,
'MaxAttempts': 40
}
)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
finally:
return wait_result
def create_private_lan(client):
try:
"""
client.nifty_create_private_lan(
AccountingType = '2',#'1':Monthly
#'2':Payper(Default)
AvailabilityZone = 'string',#Zone Name.east-31,east-11,west-12 and more
CidrBlock = 'string',#CIDR for Private LAN address
Description = 'string',#memo
PrivateLanName = 'string'#Private LAN Name
)
"""
client.nifty_create_private_lan(
AccountingType='2', # '1':Monthly
AvailabilityZone='east-31', # Zone Name.east-31,east-11,west-12 and more
CidrBlock='192.168.170.0/24', # CIDR for Private LAN address
Description='memo', # memo
PrivateLanName=WEB_DB_PRV_NET_NAME # Private LAN Name
)
wait_for_private_lan_create(client, WEB_DB_PRV_NET_NAME)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
client.nifty_delete_private_lan(
PrivateLanName=WEB_DB_PRV_NET_NAME
)
sys.exit(1)
# ------ Create Private LAN DHCP Router -------------------------
def wait_for_create_router(client, router_name):
print("wait : ", sys._getframe().f_code.co_name)
try:
waiter = client.get_waiter('router_exists')
wait_result = waiter.wait(
Filter=[
{
'ListOfRequestValue': [
'available',
],
'Name': 'state'
},
],
RouterName=[
router_name,
],
WaiterConfig={ # Wait 10 min with a check interval of 30s.
'Delay': 30,
'MaxAttempts': 20
}
)
except Exception as e:
print("exception :", e, "\nin :", sys._getframe().f_code.co_name)
finally:
return wait_result
def enable_dhcp(client):
# Create DHCP Config
dhcp_config_resutl = client.nifty_create_dhcp_config()
dhcp_config_id = dhcp_config_resutl['DhcpConfig']['DhcpConfigId']
# DHCP Setting ADD
client.nifty_create_dhcp_ip_address_pool(
Description='memo', # memo
DhcpConfigId=dhcp_config_id, # DHCP Config ID
StartIpAddress='192.168.170.100', # DHCP Start IP
StopIpAddress='192.168.170.250' # DHCP End IP
)
# Create Router
"""
client.nifty_create_router(
AccountingType = '2',#'1':Monthly
#'2':Payper(Default)
RouterName = 'string',#Router Name
AvailabilityZone = 'string',#Zone Name.east-31,east-11,west-12 and more
Description = 'string', #memo
NetworkInterface=[
{
'Dhcp' : True, #True :DHCP Enable.Request after item(Default)
#False:DHCP Disable
'DhcpConfigId' : 'string',#DHCP Config ID
'DhcpOptionsId': 'string',#DHCP Option ID
'IpAddress' : 'string',#IP Address at Connectted Private LAN
'NetworkId' : 'string',#Select Setting Network.Exclusive NetworkName
'NetworkName' : 'string' #Select Setting Network.Exclusive NetwokId
},
],
SecurityGroup=[#Firewall Group(Option)
'string',
],
Type='small'#'small' :Max 10 Rule(Default)
#'medium':Max 30 Rule
#'large' :Max 80 Rule
)
"""
client.nifty_create_router(
AccountingType='2',
RouterName=WEB_DB_ROUTER_NAME,
AvailabilityZone='east-31',
Description='memo', # memo
NetworkInterface=[
{
'Dhcp': True,
'DhcpConfigId': dhcp_config_id,
'IpAddress': '192.168.170.1',
'NetworkName': WEB_DB_PRV_NET_NAME
},
],
Type='small'
)
wait_for_create_router(client, WEB_DB_ROUTER_NAME)
# -------------- main ----------------
client = session.get_session().create_client(
"computing",
region_name="jp-east-3",
)
import_sshkey(client)
create_fw(client)
create_private_lan(client)
enable_dhcp(client)
create_instance(client)
| 2.203125
| 2
|
astetik/style/random_colors.py
|
meirm/astetik
| 8
|
12776378
|
<gh_stars>1-10
import numpy as np
def randomcolor():
''' PICKS COLORS RANDOMLY
'''
colors = []
for i in range(20):
colors.append(list((np.random.randint(0, 255, 3) / 255)))
return colors
| 3.1875
| 3
|
3.4_matplotlib_live.py
|
codernayeem/python-data-science-cheat-sheet
| 0
|
12776379
|
from matplotlib import pyplot as plt
import pandas as pd
import random
from itertools import count
from matplotlib.animation import FuncAnimation
plt.style.use('bmh')
# index = count()
# x = []
# y = []
# def animate(i):
# x.append(next(index))
# y.append(random.randint(1, 10))
# plt.cla()
# plt.plot(x, y)
# plt.title('Lavel Monitor')
# plt.xlabel('Count')
# plt.ylabel('Levels')
# plt.yticks(ticks=range(12))
# ani = FuncAnimation(plt.gcf(), animate, interval=1000)
# plt.tight_layout()
# plt.show()
def animate(i):
df = pd.read_csv('data\\changing_data.csv')
x = df.iloc[-50:, 0]
y1 = df.iloc[-50:, 1]
y2 = df.iloc[-50:, 2]
plt.cla() # clear axis
plt.plot(x, y1, label='Ajaira LTD')
plt.plot(x, y2, label='<NAME>')
plt.fill_between(x, y1, y2, where=y1 > y2, color='b', alpha=0.5, interpolate=True)
plt.fill_between(x, y1, y2, where=y1 <= y2, color='r', alpha=0.5, interpolate=True)
plt.title('Channel Subscriptions')
plt.xlabel('Days')
plt.ylabel('Subscriptions')
plt.legend()
ani = FuncAnimation(plt.gcf(), animate, interval=1000)
plt.tight_layout()
plt.show()
| 3.15625
| 3
|
Algorithms/Dynamic Programming/Stock Maximize/solution.py
|
kitarp29/ds-algo-solutions
| 48
|
12776380
|
# input number of testcases
test=int(input())
for i in range(test):
# input the number of predicted prices for WOT
n=int(input())
# input array of predicted stock price
a=list(map(int,input().split()))
c=0
i=len(a)-1
while(i>=0):
d=a[i]
l=i
p=0
while(a[i]<=d and i>=0):
p+=a[i]
i-=1
c+=(l-i)*a[l]-p
continue
# print 'test' lines each containing the maximum profit which can be obtained for the corresponding test case
print (c)
| 3.5625
| 4
|
mailtemplates/lib/celery_tasks.py
|
axant/tgapp-mailtemplates
| 2
|
12776381
|
<filename>mailtemplates/lib/celery_tasks.py
from tgext.celery.celery import celery_app
from tgext.mailer import get_mailer
from tg import config
import logging
celery_app.config_from_object(config.get('celery_configuration_object'))
log = logging.getLogger(__name__)
@celery_app.task(name='mailtemplates_async_send_email')
def mailtemplates_async_send_email(message):
"""Sends email asynchronously throuh tgext.celery"""
log.info('mailtemplates_async_send_email started')
mailer = get_mailer(None)
mailer.send_immediately(message)
log.info('mailtemplates_async_send_email ended')
| 2.234375
| 2
|
Data Structures/LinkedList/DoublyLL/python/doublyLL.py
|
ashishsahu1/DataStructures
| 1
|
12776382
|
class node:
def __init__(self,value):
self.data=value
self.next=None
self.prev=None
class DoubleLinkedList:
def __init__(self):
self.head=None
def insertAtBeg(self,value):
newnode = node(value)
if self.head==None:
self.head=newnode
else:
self.head.prev = newnode
newnode.next=self.head
self.head=newnode
def insertAtEnd(self,value):
newnode=node(value)
if self.head == None:
self.head=newnode
else:
temp=self.head
while temp.next is not None:
temp=temp.next
temp.next=newnode
newnode.prev=temp
def insertAtBet(self,value,pos):
pass
def deleteAtBeg(self):
temp = self.head
self.head=self.head.next
def deleteAtEnd(self):
temp=self.head
while temp.next.next is not None:
temp=temp.next
temp.next = None
def deleteAtBet(self,pos):
pass
def show(self):
temp = self.head
while(temp is not None):
print(temp.data)
temp=temp.next
def revshow(self):
temp = self.head
while(temp.next is not None):
temp=temp.next
while(temp is not None):
print(temp.data)
temp=temp.prev
def menu():
print('------------------------------------------------------------------------------')
print()
print('1. Insertion at beginning')
print('2. Insertion at end')
print('3. Insertion at between')
print('4. Deletion at beginning')
print('5. Deletion at end')
print('6. Deletion at between')
print('7. Show')
print('8. Reverse show')
print('9. Exit()')
print()
print('------------------------------------------------------------------------------')
if __name__ == '__main__':
ll = DoubleLinkedList()
while(True):
menu()
ch = int(input('Enter your choice : '))
if ch==1:
value= int(input('ENter your data : '))
ll.insertAtBeg(value)
elif ch==2:
value= int(input('ENter your data : '))
ll.insertAtEnd(value)
elif ch==3:
value= int(input('ENter your data : '))
pos= int(input('ENter your position : '))
ll.insertAtBet(value,pos)
elif ch==4:
ll.deleteAtBeg()
elif ch==5:
ll.deleteAtEnd()
elif ch==6:
pos=int('Enter the position : ')
ll.deleteAtBet(pos)
elif ch==7:
print('***************************************************************************')
ll.show()
print('***************************************************************************')
elif ch==8:
print('***************************************************************************')
ll.revshow()
print('***************************************************************************')
elif ch==9:
exit()
else:
print("Enter some valid option")
| 3.796875
| 4
|
tests/test_parameter/test_time_components_parameter.py
|
roocs/roocs-utils
| 1
|
12776383
|
import pytest
from roocs_utils.exceptions import InvalidParameterValue
from roocs_utils.parameter.time_components_parameter import string_to_dict
from roocs_utils.parameter.time_components_parameter import time_components
from roocs_utils.parameter.time_components_parameter import TimeComponentsParameter
type_error = (
"Input type of <{}> not allowed. Must be one of: "
"[<class 'dict'>, <class 'str'>, <class "
"'roocs_utils.parameter.param_utils.TimeComponents'>, <class 'NoneType'>]"
)
tc_str = "year:1999,2000,2001|month:12,01,02|hour:00"
tc_dict = {"year": [1999, 2000, 2001], "month": [12, 1, 2], "hour": [0]}
tc_dict_month_long_names = {
"year": [1999, 2000, 2001],
"month": ["December", "January", "February"],
"hour": [0],
}
tc_dict_short_names = {
"year": [1999, 2000, 2001],
"month": ["dec", "jan", "feb"],
"hour": [0],
}
def test_string_input():
# year, month, hour
parameter = TimeComponentsParameter("year:1999,2000,2001|month:dec,jan,feb|hour:00")
assert parameter.value["year"] == [1999, 2000, 2001]
assert parameter.value["month"] == [12, 1, 2]
assert parameter.value["hour"] == [0]
assert parameter.get_bounds() == ("1999-01-01T00:00:00", "2001-12-31T23:59:59")
# month
parameter = TimeComponentsParameter("month:12,1,02")
assert parameter.value["month"] == [12, 1, 2]
assert parameter.get_bounds() == (None, None)
# single year
parameter = TimeComponentsParameter("year:2010|month:mar,apr,may")
assert parameter.value["year"] == [2010]
assert parameter.value["month"] == [3, 4, 5]
assert parameter.get_bounds() == ("2010-01-01T00:00:00", "2010-12-31T23:59:59")
def test_TimeComponents_class():
tc1 = time_components(**string_to_dict(tc_str))
tc2 = time_components(**tc_dict)
tc3 = time_components(**tc_dict_month_long_names)
tc4 = time_components(**tc_dict_short_names)
assert tc1.value == tc2.value
assert tc2.value == tc3.value
assert tc2.value == tc4.value
def test__str__():
parameter = TimeComponentsParameter(tc_str)
assert str(parameter).startswith("Time components to select:")
assert "month => [12, 1, 2]" in str(parameter)
def test_raw():
parameter = TimeComponentsParameter(tc_str)
assert parameter.raw == tc_str
def test_validate_error_id():
with pytest.raises(InvalidParameterValue) as exc:
TimeComponentsParameter("I am rubbish")
assert str(exc.value) == "Cannot create TimeComponentsParameter from: I am rubbish"
def test_bad_type_input():
with pytest.raises(InvalidParameterValue) as exc:
TimeComponentsParameter(34)
assert str(exc.value) == type_error.format("class 'int'")
def test_dict():
for input_dct in (tc_dict, tc_dict_short_names, tc_dict_month_long_names):
parameter = TimeComponentsParameter(input_dct)
assert parameter.value == tc_dict
def test_time_components_input():
tc = time_components(**tc_dict)
parameter = TimeComponentsParameter(tc)
assert parameter.value == tc_dict
def test_time_components_with_args():
tc = time_components(year=[200, 500], hour="06")
assert tc.value["year"] == [200, 500]
assert tc.value["hour"] == [6]
def test_whitespace():
parameter = TimeComponentsParameter(tc_str + " ")
assert parameter.value == tc_dict
def test_empty_string():
parameter = TimeComponentsParameter("")
assert parameter.value is None
def test_none():
parameter = TimeComponentsParameter(None)
assert parameter.value is None
def test_class_instance():
parameter = TimeComponentsParameter(tc_str)
new_parameter = TimeComponentsParameter(parameter)
assert new_parameter.value == tc_dict
| 2.65625
| 3
|
apps/puestos/models.py
|
Monse200599/sistema_medico
| 0
|
12776384
|
from django.db import models
from django.db.models.base import Model
class Puesto(models.Model):
nombre = models.CharField(max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.nombre
| 2.28125
| 2
|
pytorch_yolo_v1/utils/viz.py
|
ldylab/learning_yolo_family_with_pytorch
| 0
|
12776385
|
import cv2
import numpy as np
def label2rgb(label_np):
print(label_np)
label_color = np.argmax(label_np, axis=0)
label_color = label_color / np.max(label_color) * 255
print(label_color)
n = label_color.astype(np.uint8)
n = np.array(n)
print(type(n))
label_color = cv2.applyColorMap(n, 'jet')
return label_color
| 3.03125
| 3
|
happy_slot_machine.py
|
breard-r/happy_slot_machine
| 0
|
12776386
|
<reponame>breard-r/happy_slot_machine
#!/bin/env python
#
# Copyright (c) 2015 <NAME>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
"""
Happy Slot Machine
This software is a python slot machine that cheats on the users. The numbers
selection function is designed so it is very unlikely to get three identical
numbers and increases the chances for the two firsts numbers to be the same.
"""
from collections import Counter
from random import randint
import curses
import time
import os
def get_asset(asset_name):
'''
Loads an asset in memory.
'''
file_name = '{}.txt'.format(asset_name)
asset_path = os.path.join('assets', file_name)
with open(asset_path) as f:
return [l.rstrip('\n') for l in f.readlines()]
slot_asset = get_asset('slot')
numbers_assets = [get_asset(n) for n in range(10)]
results_assets = {
'success': get_asset('success'),
'failure': get_asset('failure'),
'reset': get_asset('reset'),
}
def get_numbers():
'''
Returns a list of 3 not-so-random numbers.
See the attached README.md for more details.
'''
nbs = [randint(0, 9) for _ in range(4)]
set_len = len(set(nbs))
if set_len in [2, 3]:
nbs.sort(key=Counter(nbs).get, reverse=True)
nbs[2], nbs[3] = nbs[3], nbs[2]
rand_position = randint(1, 100)
if rand_position < 50:
nbs[0], nbs[2] = nbs[2], nbs[0]
elif rand_position < 25:
nbs[1], nbs[2] = nbs[1], nbs[0]
return nbs[:3]
def draw_result(stdscr, status):
'''
Draws the status of the current round.
stdscr: curses window object
status: string, must be either "success", "failure" or "reset"
'''
for i, line in enumerate(results_assets[status]):
stdscr.addstr(i + 13, 1, line)
stdscr.refresh()
def draw_slot(stdscr, offset):
'''
Draws an empty slot.
stdscr: curses window object
offset: integer representing the slot's position
'''
off = offset * (len(slot_asset[0]) + 3) + 1
for i, line in enumerate(slot_asset):
stdscr.addstr(i + 2, off, line)
def draw_slots(stdscr):
'''
Draws the 3 empty slots.
stdscr: curses window object
'''
for i in range(3):
draw_slot(stdscr, i)
stdscr.refresh()
def draw_raw_number(stdscr, nb, offset):
'''
Draws a number in a given slot.
stdscr: curses window object
nb: integer representing number to display
offset: integer representing the slot's position
'''
nb = numbers_assets[nb]
off = offset * (len(nb[0]) + 13) + 6
for i, line in enumerate(nb):
stdscr.addstr(i + 4, off, line)
stdscr.refresh()
def random_excepted(nb):
'''
Returns a random number that cannot be the one passed as a parameter.
nb: integer representing the number to avoid
'''
while True:
n = randint(0, 9)
if n != nb:
return n
def numbers_to_display(nb):
'''
Yields a series of numbers that should be displayed on a slot.
nb: integer representing the last number to be yielded
'''
n = None
for _ in range(10):
time.sleep(0.15)
n = random_excepted(n)
yield n
yield nb
def draw_number(stdscr, nb, offset):
'''
Draws a number in a given slot with an animation.
stdscr: curses window object
nb: integer representing number to display
offset: integer representing the slot's position
'''
for n in numbers_to_display(nb):
draw_raw_number(stdscr, n, offset)
def play(stdscr):
'''
Plays a new round.
stdscr: curses window object
'''
nbs = get_numbers()
draw_slots(stdscr)
draw_result(stdscr, 'reset')
for i, nb in enumerate(nbs):
draw_number(stdscr, nb, i)
draw_result(stdscr, 'success' if len(set(nbs)) == 1 else 'failure')
def clean_input(stdscr):
'''
Removes all unread data from the standard input.
'''
stdscr.nodelay(1)
while stdscr.getch() != -1:
pass
stdscr.nodelay(0)
def main(stdscr):
'''
Initialize the screen and the commands.
stdscr: curses window object
'''
height, width = stdscr.getmaxyx()
curses.curs_set(0)
stdscr.clear()
stdscr.addstr(" SLOT MACHINE", curses.A_REVERSE)
stdscr.chgat(-1, curses.A_REVERSE)
h = height - 1
stdscr.addstr(h, 0, " Press Q to quit, P to play.", curses.A_REVERSE)
stdscr.chgat(h, 0, -1, curses.A_REVERSE)
draw_slots(stdscr)
while True:
stdscr.refresh()
clean_input(stdscr)
key = stdscr.getch()
if key in [ord('q'), ord('Q')]:
break
elif key in [ord('p'), ord('P')]:
play(stdscr)
if __name__ == '__main__':
curses.wrapper(main)
| 3.40625
| 3
|
setup.py
|
CottageLabs/OpenArticleGauge
| 1
|
12776387
|
<filename>setup.py
from setuptools import setup, find_packages
setup(
name = 'openarticlegauge',
version = '0.0.1',
packages = find_packages(),
install_requires = [
"Flask==0.9",
"Jinja2==2.6",
"Werkzeug==0.8.3",
"anyjson==0.3.3",
"argparse==1.2.1",
"celery==3.0.25",
"python-dateutil==1.5",
"wsgiref==0.1.2",
"Flask-Login==0.1.3",
"Flask-WTF==0.8.3",
"requests==1.1.0",
"redis",
"lxml",
"beautifulsoup4",
"nose==1.3.0",
"setproctitle",
"bleach==1.4",
"python-magic==0.4.6",
]
)
| 1.453125
| 1
|
example.py
|
Kalvar/python-GreyTheory
| 23
|
12776388
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from greytheory import GreyTheory
grey = GreyTheory()
# GM0N
gm0n = grey.gm0n
gm0n.add_outputs([1., 1., 1., 1., 1., 1.], "x1")
gm0n.add_patterns([.75, 1.22, .2, 1., 1., 1.], "x2")
gm0n.add_patterns([.5, 1., .7, .66, 1., .5], "x3")
gm0n.add_patterns([1., 1.09, .4, .33, .66, .25], "x4")
gm0n.add_patterns([.25, .99, 1., .66, .33, .25], "x5")
gm0n.analyze()
# Looks GM0N the results as below:
gm0n.print_analyzed_results()
gm0n.print_influence_degrees()
# GM1N
gm1n = grey.gm1n
gm1n.add_outputs([2., 11., 1.5, 2., 2.2, 3.], "x1")
gm1n.add_patterns([3., 13.5, 1., 3., 3., 4.], "x2")
gm1n.add_patterns([2., 11., 3.5, 2., 3., 2.], "x3")
gm1n.add_patterns([4., 12., 2., 1., 2., 1.], "x4")
gm1n.add_patterns([1., 10., 5., 2., 1., 1.], "x5")
gm1n.analyze()
# Looks GM1N the results as below:
gm1n.print_analyzed_results()
gm1n.print_influence_degrees()
# GM11
gm11 = grey.gm11
# To try customized alpha for IAGO of Z.
gm11.alpha = 0.5
gm11.convolution = True # Convolutional forecasting of GM11.
gm11.stride = 1
gm11.length = 4
# gm11.add_pattern(533.0, "x1")
# gm11.add_pattern(665.0, "x2")
# gm11.add_pattern(655.0, "x3")
# gm11.add_pattern(740.0, "x4")
gm11.add_pattern(223.3, "a1")
gm11.add_pattern(227.3, "a2")
gm11.add_pattern(230.5, "a3")
gm11.add_pattern(238.1, "a4")
gm11.add_pattern(242.9, "a5")
gm11.add_pattern(251.1, "a6")
gm11.forecast()
# To record last forecasted result.
#last_forecasted_results = gm11.forecasted_outputs
# To clean all forecasted results.
#gm11.clean_forecasted()
# In next iteration of forecasting, we wanna continue use last forecasted results to do next forecasting,
# but if we removed gm11.forecasted_outputs list before,
# we can use continue_forecasting() to extend / recall the last for ecasted result come back to be convolutional features.
#gm11.continue_forecasting(last_forecasted_results)
# Looks GM11 the results for example as below:
gm11.print_forecasted_results()
"""
# multiprocessing examples:
# for GM0N, GM1N
queue = []
queue.append(gm0n.deepcopy())
queue.append(gm0n.deepcopy())
queue.append(gm0n.deepcopy())
queue.append(gm0n.deepcopy())
queue.append(gm0n.deepcopy())
queue.append(gm0n.deepcopy())
queue.append(gm0n.deepcopy())
grey.run.gm0n(queue)
for gm in queue:
gm.print_influence_degrees()
# for GM11
gm11_queue = []
gm11_queue.append(gm11.deepcopy())
gm11_queue.append(gm11.deepcopy())
gm11_queue.append(gm11.deepcopy())
gm11_queue.append(gm11.deepcopy())
gm11_queue.append(gm11.deepcopy())
gm11_queue.append(gm11.deepcopy())
gm11_queue.append(gm11.deepcopy())
grey.run.gm11(gm11_queue)
for gm in gm11_queue:
gm.print_forecasted_results()
"""
| 2.5
| 2
|
serveradmin/servershell/templatetags/serversearch.py
|
abdulkadirakin/serveradmin
| 43
|
12776389
|
"""Serveradmin
Copyright (c) 2019 InnoGames GmbH
"""
from json import dumps
from django import template
from django.conf import settings
from adminapi.filters import filter_classes
from serveradmin.serverdb.models import Attribute, Servertype
register = template.Library()
@register.inclusion_tag('serversearch.html')
def serversearch_js(search_id):
servertypes = Servertype.objects.all()
attributes = list(Attribute.objects.all())
attributes.extend(Attribute.specials.values())
return {
'servertypes_json': dumps({s.servertype_id: {} for s in servertypes}),
'attributes_json': dumps({
a.attribute_id: {
'multi': a.multi,
'type': a.type,
'regexp': a.regexp,
}
for a in attributes
}),
'filters_json': dumps([f.__name__ for f in filter_classes]),
'search_id': search_id,
'STATIC_URL': settings.STATIC_URL,
}
@register.filter
def json(value):
return dumps(value)
| 2.046875
| 2
|
tasks.py
|
Lugoues/bq-schema
| 9
|
12776390
|
from invoke import task
_TEST_FOLDER = "tests"
_SOURCE_FOLDERS = " ".join(["bq_schema", _TEST_FOLDER])
@task
def lint(context):
context.run(f"pylint {_SOURCE_FOLDERS}")
@task
def type_check(context):
context.run("mypy bq_schema")
@task
def check_code_format(context):
context.run("black --check .")
context.run("isort --profile black --check .")
@task
def test(context):
context.run(
f"pytest {_TEST_FOLDER} --doctest-modules --junitxml=junit/test-results.xml --cov=bq_schema --cov-report=xml --cov-report=html"
)
@task
def format_code(context):
context.run("black .")
context.run("isort --profile black .")
@task(pre=[lint, type_check, check_code_format, test])
def check_all(_):
pass
| 1.796875
| 2
|
tools/common.py
|
Wenslauv/RekiScript
| 1
|
12776391
|
#!usr/bin/python
class Enviroment:
sets = []
banned = []
| 1.195313
| 1
|
data_pre_process.py
|
mahatmaWM/NCRFpp
| 0
|
12776392
|
<filename>data_pre_process.py<gh_stars>0
# -*- coding: utf-8 -*-
"""
把我们的序列标注格式转化为项目支持的格式
"""
import codecs
if __name__ == '__main__':
with codecs.open('./sys.media.ip/test.txt', "r", "utf-8") as f1, \
codecs.open('./sys.media.ip/test_new.txt', "w", "utf-8") as f_train:
sentence = list([])
chars = list([])
pos = list([])
for line in f1.readlines():
if line.startswith('B') or line.startswith('E'):
continue
line = line.strip('\n')
splits = line.split('\t')
if len(splits) > 1:
sentence.append([splits[1], '[POS]' + splits[2], '[DICT]' + splits[3], splits[4]])
chars.append(splits[1])
pos.append(splits[2])
elif len(sentence) > 1:
for i in range(len(chars)-1):
sentence[i].append('[BI]'+chars[i]+chars[i+1])
sentence[i].append('[BI_POS]'+pos[i]+pos[i+1])
# logging.info(sentence)
sentence[-1].append('[BI]'+chars[-1]+'#')
sentence[-1].append('[BI_POS]'+pos[-1] + '#')
res = list([])
for item in sentence:
res.append(item[0] + ' ' + item[-1] + ' ' + item[-2] + ' ' + item[1] + ' ' + item[2] + ' ' + item[3])
res_str = '\n'.join(res)
sentence.clear()
chars.clear()
pos.clear()
f_train.write(res_str + '\n\n')
| 2.734375
| 3
|
ch10/2_group_anagrams.py
|
zmarvel/cracking
| 0
|
12776393
|
store = {}
def anagram_key(s):
if s not in store:
store[s] = sorted(s)
return store[s]
def group_anagrams(ls):
ls = sorted(ls, key=anagram_key)
return ls
def test():
from random import shuffle
l = [
"ascot",
"coats",
"coast",
"sushi",
"tacos",
"angel",
"breakfast",
"angle",
"glean",
"deist",
"coffee",
"diets",
"edits",
"sited",
"tides",
]
shuffle(l)
print(group_anagrams(l))
if __name__ == '__main__':
test()
| 3.9375
| 4
|
PyProj.py
|
MrBanh/PyProj
| 0
|
12776394
|
<filename>PyProj.py
#! python3
# PyProj.py - Creates a directory template for python projects
# Usage:
# PyProj.py save <project name> - creates project directory
# PyProj.py remove <project name> - removes project directory
# PyProj.py <project name> - Shows where project directory is located
# PyProj.py - Displays how to use script & list all project directories
import os
import re
import sys
import shelve
import send2trash
import pyperclip
scriptPath = os.path.split(__file__)
# Stores the projects and project directories in a file for projects created with this script
PyProjShelfDir = os.path.join(scriptPath[0], scriptPath[1] + '_Shelf')
# Create a directory to store data on the project folders created with this script
if not os.path.isdir(PyProjShelfDir):
os.mkdir(PyProjShelfDir)
PyProjShelf = shelve.open(os.path.join(PyProjShelfDir, 'PyProj'))
def makeProjectDir(folder, projectName):
# :param: folder - absolute path, parent directory of where this script is located
# :param: projectName - name of new python project directory
try:
# Check for valid folder name
folderRegex = re.compile(r'(\w)+')
if folderRegex.search(projectName) == None:
print('Invalid folder name, please run again...')
exit()
projectDir = os.path.join(folder, projectName)
# Create new project directory
os.mkdir(projectDir)
# licenseFile = open(os.path.join(projectDir, 'LICENSE.txt'), 'w')
readmeFile = open(os.path.join(projectDir, 'README.md'), 'w')
readmeFile.write(f'# {projectName}.py\n\n')
pythonFile = open(os.path.join(projectDir, f'{projectName}.py'), 'w')
pythonFile.write('#! python3\n\n')
pythonFile.write(f'# {projectName}.py - ')
# Create a .bat file in the parent (of this script) directory
batFile = open(os.path.join(folder, f'{projectName}.bat'), 'w')
batFile.write(f'@py.exe "{os.path.join(projectDir, projectName)}.py" %* \n@pause')
# licenseFile.close()
readmeFile.close()
pythonFile.close()
batFile.close()
print()
except FileExistsError:
print('Project folder already exists. Please run again...')
exit()
# sys.argv - ['script.py', arg1, arg2, ...]
# sys.argv[0] - name of the script
# sys.argv[1] - first line argument
# sys.argv[2] - second line argument
# Creating project file via terminal
if len(sys.argv) == 3 and sys.argv[1].lower() == 'add':
if sys.argv[2].lower() != 'list':
# Change to this script's directory
os.chdir(scriptPath[0])
# Change current directory to the parent directory
os.chdir('..')
# Adds a new project directory
makeProjectDir(os.getcwd(), sys.argv[2])
# Saves project directory to a file
PyProjShelf[sys.argv[2]] = os.path.join(os.getcwd(), sys.argv[2])
print(f'Created project folder: {sys.argv[2]} \nLocation: {PyProjShelf[sys.argv[2]]}')
# Copy the new project directory to clipboard
pyperclip.copy(f'"{PyProjShelf[sys.argv[2]]}"')
print('\nCopied location to clipboard.\n')
else:
print('"list" is an argument in this script. \nPlease try a different project name')
exit()
# Removing project file via terminal
elif len(sys.argv) == 3 and sys.argv[1] == 'remove':
# Check if the project name is in the shelf data
if sys.argv[2] in PyProjShelf:
while True:
# Confirm removal
doRemove = input(f'Remove {sys.argv[2]} at [{PyProjShelf[sys.argv[2]]}]? (y/n): ')
if doRemove.lower() == 'y':
# Delete the .bat file
currDir = os.getcwd()
os.chdir(scriptPath[0])
os.chdir('..')
send2trash.send2trash(f'{sys.argv[2]}.bat')
os.chdir(currDir)
# Removes the project directory
send2trash.send2trash(PyProjShelf[sys.argv[2]])
del PyProjShelf[sys.argv[2]]
break
elif doRemove.lower() == 'n':
exit()
else:
print('Invalid input')
else:
print('Project created with this script does not exist')
exit()
elif len(sys.argv) == 2:
if sys.argv[1] == 'list':
# Show all projects and their directories
print(f'\n{"Projects":<20} Location')
print(f'{"--------":<20} --------')
for proj in PyProjShelf:
print(f'{proj:<20}- {PyProjShelf[proj]}')
# Show and copy the location of a project directory created with this script
elif PyProjShelf.__contains__(sys.argv[1]):
print(f'\n{sys.argv[1]} Located at: {PyProjShelf[sys.argv[1]]}')
pyperclip.copy(f'"{PyProjShelf[sys.argv[1]]}"')
print('\nCopied location to clipboard.\n')
# Otherwise, tell user that a project directory was never created
else:
print(f'{sys.argv[1]} was never created.')
# Show how to use the script
elif len(sys.argv) == 1:
print()
print(f"Add the directory of this file to the environment variable: {scriptPath[0]}")
print('\tIn Terminal:')
print('\t\tPyProj.py add <project name> - creates project directory')
print('\t\tPyProj.py remove <project name> - removes project directory')
print('\t\tPyProj.py list - Shows all projects and directories')
print('\t\tPyProj.py <project name> - Shows where project directory is located')
print('\t\tPyProj.py - Displays how to use script')
print()
PyProjShelf.close()
| 3.421875
| 3
|
app/auth/views.py
|
reivhax/One-minute-pitch
| 0
|
12776395
|
from flask import render_template,request,redirect,url_for,abort
from flask_login import login_user,login_required,current_user,logout_user
from ..models import User
from .forms import LoginForm,RegisterForm
from . import auth
# Views
@auth.route('/login', methods=["GET","POST"])
def login():
if current_user.is_authenticated:
return redirect(url_for('main.dashboard'))
title = 'Login'
Form = LoginForm()
Error=False
if Form.validate_on_submit():
username=str(Form.username.data)
password=str(Form.password.data)
if username and password:
user=User.query.filter(User.username==username).first()
if user and user.verifypass(password):
print(password)
login_user(user,Form.remember.data)
return redirect(url_for('main.dashboard'))
Error='Wrong Username or Password'
else:
Error='Please Type a Username or Password'
return render_template('login.html', title = title ,Form=Form,Error=Error)
@auth.route('/register', methods=["GET","POST"])
def register():
if current_user.is_authenticated:
return redirect(url_for('main.dashboard'))
title = 'Register'
Form = RegisterForm()
Error=False
if Form.validate_on_submit():
username=str(Form.username.data)
password=str(Form.password.data)
if username and password:
user=User.query.filter(User.username==username).first()
if not user:
user=User(username=username,passwd=password)
user.save()
return redirect(url_for('auth.login'))
Error='Username Already taken'
return render_template('register.html', title = title ,Form=Form,Error=Error)
@auth.route('/logout')
def logout():
if current_user.is_authenticated:
logout_user()
return redirect(url_for('main.dashboard'))
| 2.84375
| 3
|
htcanalyze/view/summarized_logfile_view.py
|
psyinfra/htcompact
| 0
|
12776396
|
<gh_stars>0
"""Module to create a view for summarized log files."""
from datetime import timedelta
from typing import List
from htcanalyze.globals import BAD_USAGE, TOLERATED_USAGE
from .view import View
from ..log_summarizer.summarized_condor_logs.summarized_condor_logs import (
SummarizedCondorLogs
)
class SummarizedLogfileView(View):
"""Visualizes summarized log files."""
def print_times(
self,
job_times
):
"""Prints job times table."""
if not job_times:
return
if job_times.is_empty():
return
time_table = self.create_table(
["Description", "Duration"],
# title="Job Dates and Times",
)
if not job_times.waiting_time == timedelta():
time_table.add_row(
"Average Waiting Time",
str(job_times.waiting_time)
)
if not job_times.execution_time == timedelta():
time_table.add_row(
"Average Execution Time",
str(job_times.execution_time),
)
if not job_times.total_runtime == timedelta():
time_table.add_row(
"Average Runtime (Total)",
str(job_times.total_runtime)
)
self.console.print(time_table)
def print_summarized_node_jobs(
self,
summarized_node_jobs,
sort_by_n_jobs: bool = True
):
"""
Prints summarized node jobs table,
sorted by the number of jobs.
:param summarized_node_jobs:
:param sort_by_n_jobs:
:return:
"""
if not summarized_node_jobs:
return
node_table = self.create_table(
[
"Node Address",
"No. of Jobs",
"Avg. Waiting Time",
"Avg. Execution Time",
"Avg. Runtime (Total)"
]
)
if sort_by_n_jobs:
summarized_node_jobs = sorted(summarized_node_jobs, reverse=True)
for summarized_node in summarized_node_jobs:
node_table.add_row(
summarized_node.address,
str(summarized_node.n_jobs),
str(summarized_node.job_times.waiting_time),
str(summarized_node.job_times.execution_time),
str(summarized_node.job_times.total_runtime)
)
self.console.print(node_table)
def print_resources(
self,
resources,
bad_usage=BAD_USAGE,
tolerated_usage=TOLERATED_USAGE,
headers=None,
precision=3,
):
"""Prints a resource table."""
if not resources:
return
if headers is None:
headers = [
"Partitionable Resources",
"Usage",
"Request",
"Allocated"
]
resource_table = self.create_table(headers)
for resource in resources.resources:
if not resource.is_empty():
color = resource.get_color_by_threshold(
bad_usage,
tolerated_usage
)
resource_table.add_row(
resource.description,
f"[{color}]{round(resource.usage, precision)}[/{color}]",
str(round(resource.requested, precision)),
str(round(resource.allocated, precision))
)
self.console.print(resource_table)
def print_summarized_error_events(
self,
summarized_error_states,
sort_by_n_error_events=True,
file_lim=3
):
"""
Print summarized error events,
sorted by the number of events.
Prints file names if less than file_lim files have such an error event,
else only the number is printed to keep the output readable.
:param summarized_error_states:
:param sort_by_n_error_events:
:param file_lim:
:return:
"""
if not summarized_error_states:
return
if sort_by_n_error_events:
summarized_error_states = sorted(
summarized_error_states, reverse=True
)
headers = ["Error Event", "No. of Occurrences"]
use_file_lim = True
for ses in summarized_error_states:
if len(ses.files) > file_lim:
use_file_lim = False
break
if use_file_lim:
headers.append("Files")
def file_func(files):
return "\n".join(files)
else:
headers.append("No. of Files")
def file_func(files):
return str(len(files))
error_table = self.create_table(
headers,
title="Occurred Job Error Events"
)
for summarized_error_state in summarized_error_states:
error_table.add_row(
summarized_error_state.error_state.name,
str(summarized_error_state.n_error_events),
file_func(summarized_error_state.files)
)
self.console.print(error_table)
def print_summarized_condor_logs(
self,
summarized_condor_logs: List[SummarizedCondorLogs],
sort_states_by_n_jobs=True,
bad_usage=BAD_USAGE,
tolerated_usage=TOLERATED_USAGE,
sep_char='~'
):
"""
Prints summarized log files
sorts description table by number of files per state,
separates each state summary by a line of sep_char
:param summarized_condor_logs:
:param sort_states_by_n_jobs:
:param bad_usage:
:param tolerated_usage
:param sep_char:
:return:
"""
if sort_states_by_n_jobs:
summarized_condor_logs = sorted(
summarized_condor_logs, reverse=True
)
jobs_table = self.create_table(
["State", "No. of Jobs"],
title="Number of Jobs per State",
)
for state_summarized_logs in summarized_condor_logs:
color = state_summarized_logs.state.color
jobs_table.add_row(
f"[{color}]{state_summarized_logs.state.name}[/{color}]",
str(state_summarized_logs.n_jobs)
)
self.console.print(jobs_table)
self.console.print(sep_char * self.window_width)
for state_summarized_logs in summarized_condor_logs:
self.console.print()
color = state_summarized_logs.state.color
self.print_desc_line(
"Log files with JobState:",
state_summarized_logs.state.name,
color=color
)
self.print_times(state_summarized_logs.avg_times)
self.print_resources(
state_summarized_logs.avg_resources,
bad_usage=bad_usage,
tolerated_usage=tolerated_usage,
headers=[
"Partitionable Resources",
"Avg. Usage",
"Avg. Request",
"Avg. Allocated"
]
)
self.print_summarized_node_jobs(
state_summarized_logs.summarized_node_jobs
)
self.print_summarized_error_events(
state_summarized_logs.summarized_error_states
)
self.console.print()
self.console.print(sep_char * self.window_width)
| 2.5
| 2
|
utils.py
|
steynvl/video-format-converter
| 1
|
12776397
|
import sys
import os
import shutil
import shlex
from command import Command
def check_ffmpeg_installed():
if shutil.which('ffmpeg') is None:
print('The program \'ffmpeg\' is not installed in your system.\n'
'You can install it by visiting http://ffmpeg.org/download.html')
sys.exit(0)
def convert_file(path: str, from_format: str, to: str, delete_original=False):
assert path.endswith(from_format)
escaped = shlex.quote(path)
new_name = escaped.replace(from_format, to)
ffmpeg = shutil.which('ffmpeg')
filename = path[path.rfind('/') + 1:]
print('Converting \'{}\''.format(filename))
cmd = Command('{} -i {} {}'.format(ffmpeg, escaped, new_name), os.getcwd())
sig, _, err = cmd.run()
if sig != 0:
print('\'{}\' could not be converted'.format(filename))
print('>>> ERR >>>')
print(err)
if delete_original:
os.remove(path)
def convert_directory(path: str, from_format: str, to: str, delete_original=False):
for dirpath, _, files in os.walk(path):
for f in filter(lambda f: f.endswith(from_format), files):
convert_file('{}/{}'.format(dirpath, f), from_format,
to, delete_original)
| 3.28125
| 3
|
sentdex_data_analysis/pandas_TPOT.py
|
thimotyb/Data-Analysis
| 5
|
12776398
|
import pandas as pd
import numpy as np
from tpot import TPOTClassifier
from sklearn.model_selection import train_test_split
benchmark = pd.read_pickle('us_pct.pickle') # us overall housing price index percentage change
HPI = pd.read_pickle('HPI_complete.pickle') # all of the state data, thirty year mortgage, unemployment rate, GDP, SP500
HPI = HPI.join(benchmark['United States'])
# all in percentage change since the start of the data (1975-01-01)
HPI.dropna(inplace=True)
housing_pct = HPI.pct_change()
housing_pct.replace([np.inf, -np.inf], np.nan, inplace=True)
housing_pct['US_HPI_future'] = housing_pct['United States'].shift(-1)
housing_pct.dropna(inplace=True)
def create_labels(cur_hpi, fut_hpi):
if fut_hpi > cur_hpi:
return 1
else:
return 0
housing_pct['label'] = list(map(create_labels, housing_pct['United States'], housing_pct['US_HPI_future']))
# housing_pct['ma_apply_example'] = housing_pct['M30'].rolling(window=10).apply(moving_average)
# print(housing_pct.tail())
X = np.array(housing_pct.drop(['label', 'US_HPI_future'], 1))
y = np.array(housing_pct['label'])
X_train, X_test, y_train, y_test = train_test_split(X,y, test_size=0.25)
tpot = TPOTClassifier(generations=10, population_size=20, verbosity=2)
tpot.fit(X_train, y_train)
print(tpot.score(X_test, y_test))
tpot.export('HPI_tpot_pipeline.py')
| 2.921875
| 3
|
app.py
|
frason88/Heart-Pred-Project
| 0
|
12776399
|
#Important Modules
from flask import Flask,render_template, url_for ,flash , redirect
import pickle
from flask import request
import numpy as np
import os
from flask import send_from_directory
#from this import SQLAlchemy
app=Flask(__name__,template_folder='template')
@app.route("/")
@app.route("/home")
def home():
return render_template("home.html")
@app.route("/about")
def about():
return render_template("about.html")
@app.route("/heart")
def heart():
return render_template("heart.html")
def ValuePredictor(to_predict_list, size):
to_predict = np.array(to_predict_list).reshape(1,size)
if(size==13):#Heart
loaded_model = pickle.load(open("Heart_model_new.pkl","rb"))
result =loaded_model.predict(to_predict)
return result[0]
@app.route('/result',methods = ["POST"])
def result():
if request.method == 'POST':
to_predict_list = request.form.to_dict()
to_predict_list=list(to_predict_list.values())
to_predict_list = list(map(float, to_predict_list))
if(len(to_predict_list)==13):#heart
result = ValuePredictor(to_predict_list,13)
if(int(result)==1):
prediction="The patient's heart seems to be healthy."
else:
prediction="The patient's heart does not seems to be healthy."
return(render_template("result.html", prediction=prediction))
if __name__ == "__main__":
app.run(debug=True)
| 2.75
| 3
|
1) Applying Geometric Transformations to Images/#4 Image translation/translation.py
|
RezaFirouzii/python-opencv-review
| 0
|
12776400
|
import cv2 as cv
import numpy as np
if __name__ == "__main__":
img = cv.imread('../../assets/test1.jpg')
height, width = img.shape[:2] # rows, columns
# translating the img 200 pixels right (x axis)
translation_matrix = np.float32([[1, 0, 200], [0, 1, 0]])
output = cv.warpAffine(img, translation_matrix, (width, height))
cv.imshow('1) 200 Pixels right', output)
# translating the img 50 pixels down (y axis)
translation_matrix = np.float32([[1, 0, 0], [0, 1, 50]])
output = cv.warpAffine(img, translation_matrix, (width, height))
cv.imshow('2) 50 Pixels Down', output)
# translating the img in both x-y axis.
translation_matrix = np.float32([[1, 0, 200], [0, 1, 50]])
output = cv.warpAffine(img, translation_matrix, (width, height))
cv.imshow('3) (dx, dy) = (200, 50)', output)
# translating without getting cropped (by increasing the output size)
translation_matrix = np.float32([[1, 0, 200], [0, 1, 50]])
output = cv.warpAffine(img, translation_matrix, (width + 200, height + 50))
cv.imshow("4) Preventing Crop", output)
cv.waitKey(0)
cv.destroyAllWindows()
| 3.296875
| 3
|
views/templates.py
|
dpgd/utils
| 0
|
12776401
|
# -*- coding: utf-8 -*-
from django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.http import HttpResponse
from django.http import JsonResponse
import json
import datetime
# Template classes
class CreateTemplateView(CreateView):
def __init__(self, model, template_name, form_class, ctx=None):
self.model = model
self.template_name = template_name
self.form_class = form_class
self.ctx = ctx
def get(self, request, *args, **kwargs):
if self.ctx:
return render(request, self.template_name, self.ctx)
return render(request, self.template_name)
def post(self, request, *args, **kwargs):
post = request.POST
if self.with_request:
form = self.form_class(post, request)
else:
form = self.form_class(post)
if form.is_valid():
element = form.save()
response = [
element.as_json()
]
response = json.dumps(response, indent=4, separators=(',', ': '))
return HttpResponse(response, content_type="application/json")
else:
return JsonResponse({'status': 'false', 'message': form.error},
status=500)
class UpdateTemplateView(UpdateView):
def __init__(self, model, template_name, form_class, message_not_exists,
element_name):
self.template_name = template_name
self.message_not_exists = message_not_exists
self.model = model
self.element_name = element_name
self.form_class = form_class
self.ctx = {}
self.element = None
def get(self, request, *args, **kwargs):
element_id = kwargs['pk']
element = self.model.objects.filter(pk=element_id)
if not element:
return JsonResponse({'status': 'false',
'message': self.message_not_exists},
status=500)
element = element[0]
self.element = element
self.ctx[self.element_name] = element
self.add_data_ctx()
return render(request, self.template_name, self.ctx)
def post(self, request, *args, **kwargs):
post = request.POST
if self.with_request:
form = self.form_class(post, kwargs['pk'], request)
else:
form = self.form_class(post, kwargs['pk'])
if form.is_valid():
element = form.update()
response = [
element.as_json()
]
response = json.dumps(response, indent=4, separators=(',', ': '))
return HttpResponse(response, content_type="application/json")
else:
return JsonResponse({'status': 'false', 'message': form.error},
status=500)
def add_data_ctx(self):
pass
class DeleteTemplateView(DeleteView):
model = None
message_not_exists = None
def get(self, request, *args, **kwargs):
pk = int(kwargs['pk'])
element = self.model.objects.all().filter(pk=pk)
if element:
element.delete()
return JsonResponse({'status': 'true', 'message': 200}, status=200)
else:
return JsonResponse({'status': 'false',
'message': self.message_not_exists}, status=500
)
class DeleteLogicTemplateView(DeleteView):
model = None
message_not_exists = None
def get(self, request, *args, **kwargs):
pk = int(kwargs['pk'])
reason = kwargs['reason']
element = self.model.objects.all().filter(pk=pk)
if element:
element.update(eliminated=True,
eliminated_reason=reason,
eliminated_date=datetime.date.today())
return JsonResponse({'status': 'true', 'message': 200}, status=200)
else:
return JsonResponse({'status': 'false',
'message': self.message_not_exists}, status=500
)
class ListTemplateView(ListView):
def get(self, request, *args, **kwargs):
elements = self.model.objects.all()
response = [
element.as_json()
for element in elements
]
response = json.dumps(response, indent=4, separators=(',', ': '))
return HttpResponse(response, content_type="application/json")
class DetailTemplateView(DetailView):
def get(self, request, *args, **kwargs):
element_id = kwargs.get('pk', None)
element = self.model.objects.filter(pk=element_id)
if element:
element = element[0]
response = [
element.as_json()
]
response = json.dumps(response, indent=4, separators=(',', ': '))
return HttpResponse(response, content_type="application/json")
else:
return JsonResponse(
{'status': 'false', 'message': self.message_not_exists},
status=500)
class FilterTemplateView(ListView):
def get(self, request, *args, **kwargs):
filter = self.filter(request)
elements = filter.results()
response = [
element.as_json()
for element in elements
]
response = json.dumps(response, indent=4, separators=(',', ': '))
return HttpResponse(response, content_type="application/json")
| 2.109375
| 2
|
gitvier/commands.py
|
MasterOdin/gitvier
| 2
|
12776402
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
import os
import sys
from colorama import Fore, init as colorama_init
from git import Repo, CheckoutError
from git.exc import InvalidGitRepositoryError
from .common import get_input, get_yes, call as shell_call, output
from .config import get_config, Config
colorama_init(autoreset=True)
def restore_directory(func):
def func_wrapper(*args, **kwargs):
cwd = os.getcwd()
result = func(*args, **kwargs)
os.chdir(cwd)
return result
return func_wrapper
def init(force=False):
config = get_config()
if config is not None and not force:
print(".gitvier.yml file already exists. Use --force to overwrite.", file=sys.stderr)
return
config = Config()
config.location = get_input("Location to install components", ".")
while True:
add = get_yes("Add a component")
if add:
repo = get_input("Git url")
if repo == "":
print("You must input a url")
continue
default = repo.split("/")[-1].replace(".git", "")
name = get_input("Name of component", default)
rev = get_input("Revision to use", "master")
config.add_component(name, repo, rev)
else:
break
config.save()
@restore_directory
def install():
config = get_config()
if config is None:
print("Not a valid gitvier instance")
return
base_dir = config.location
os.makedirs(base_dir, exist_ok=True)
print("Directory: {}".format(base_dir))
os.chdir(base_dir)
output("├ Config loaded: " + config.config_file)
output("├ Install Path: " + config.location)
output("├ Installing Components:")
count = 0
for component in config.components:
if count > 0:
output("", 1)
_install(base_dir, component, 0)
output("")
def _install(base_dir, component, level=0):
output("├─┬ {} ({:s})".format(component.name, component.rev), level)
comp_dir = os.path.join(base_dir, component.name)
level += 1
if os.path.isdir(os.path.join(base_dir, component.name)):
try:
repo = Repo(comp_dir)
if repo.head.is_detached:
output(
"├── Component already cloned, on hash " + repo.commit(repo.head).hexsha + ".",
level)
else:
output(
"├── Component already cloned, on branch {}.".format(repo.active_branch.name),
level)
# we're detached, and if we're also in a 'clean' state, then we consider that
# good enough to move to the tag/hash/branch
if repo.head.is_detached and not repo.is_dirty():
_checkout(repo, component, level)
else:
if component.rev != repo.active_branch.name:
output("├── Active branch not equal to requested revision, skipping",
level + 1)
else:
output("├── git pull", level)
repo.remote('origin').pull()
except InvalidGitRepositoryError:
output("├── Folder exists for component, but is not git repo", level + 1)
else:
output("├── git clone {}".format(component.repo, comp_dir), level)
repo = Repo.clone_from(component.repo, comp_dir)
_checkout(repo, component, level)
os.chdir(comp_dir)
config = get_config(comp_dir)
if config is not None:
output("├── Config loaded: " + config.config_file, level)
output("├── Install Path: " + config.location, level)
output("├─┬ Installing Components:", level)
count = 0
for component in config.components:
if count > 0:
output("", 1)
_install(config.location, component, level+1)
os.chdir(comp_dir)
if len(component.commands) > 0:
output("├─┬ Running Commands:", level)
for i in range(len(component.commands)):
command = component.commands[i]
if i == len(component.commands)-1:
output("└── " + command, level + 1)
else:
output("├── " + command, level + 1)
shell_call(command)
def _checkout(repo, component, level=0):
for tag in repo.tags:
if tag.name == component.rev:
try:
output("git checkout tags/{}".format(component.rev), level)
repo.git.checkout('tags/' + component.rev)
except CheckoutError:
pass
return
try:
output("├── git checkout {}".format(component.rev), level)
repo.git.checkout(component.rev)
except CheckoutError:
pass
@restore_directory
def update():
config = get_config()
base_dir = config.location
for component in config.components:
print("Updating component {}... ".format(component.name), end="")
success = False
comp_dir = os.path.join(base_dir, component.name)
repo = Repo(comp_dir)
if repo.active_branch.name == component.rev:
origin = repo.remotes.origin
if not repo.is_dirty():
origin.pull()
success = True
if success:
print(Fore.GREEN + "✔", end="")
else:
print(Fore.RED + "✘", end="")
print()
@restore_directory
def display():
output("├ Components Statuses:")
_display()
def _display(base_dir=None, level=0):
config = get_config(base_dir)
if config is None:
return
level += 1
for component in config.components:
output("├─┬ {} ({:s})".format(component.name, component.rev), level-1)
comp_dir = os.path.join(config.location, component.name)
if not os.path.isdir(comp_dir):
output("├── Not Installed", level)
else:
os.chdir(comp_dir)
try:
repo = Repo(comp_dir)
if repo.head.is_detached:
output("├── Revision: {:s}".format(repo.commit(repo.head).hexsha), level)
else:
output("├── Branch: {:s}".format(repo.active_branch.name), level)
output("├── Dirty: {}".format("True" if repo.is_dirty() else "False"), level)
_display(comp_dir, level)
except InvalidGitRepositoryError:
output("├── Invalid Git repository", level)
| 2.46875
| 2
|
edu_loan/api/app.py
|
renanbs/edu-loan
| 0
|
12776403
|
from flask import Flask
from injector import Injector
from edu_loan.config.default import Config
from edu_loan.config.dependencies import ApplicationRegister, Application
from edu_loan.config.main_module import MODULES, create_injector
def create_app(injector: Injector) -> Flask:
"""
Creates a Flask app
:param injector: The injector
:return: Returns the Flask app
"""
app_flask = Flask(__name__)
injector.binder.bind(Application, to=app_flask)
app_flask.config.from_object(Config)
registers = injector.get(ApplicationRegister)
if registers:
for register in registers:
try:
register = injector.get(register)
register.register_endpoints()
except Exception as e:
print(e)
raise
return app_flask
def initialize(modules=MODULES):
injector = create_injector(modules=modules)
application = create_app(injector)
return application
| 2.6875
| 3
|
webapp-flask/app/src/models/user.py
|
didYouUpdateCode/iot_monitoring
| 0
|
12776404
|
from marshmallow import fields, validate
from .. import db, ma
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, unique=True, nullable=False)
password = db.Column(db.String, nullable=False)
creation_date = db.Column(db.TIMESTAMP,
default=db.func.current_timestamp(),
nullable=False)
modification_date = db.Column(db.TIMESTAMP,
default=db.func.current_timestamp(),
onupdate=db.func.current_timestamp(),
nullable=False)
class UserSchema(ma.Schema):
id = fields.Integer()
email = fields.Email(required=True)
password = fields.String(required=True,
validate=[validate.Length(min=6, max=20)])
creation_date = fields.DateTime()
modification_date = fields.DateTime()
| 2.671875
| 3
|
udata_schema_gouvfr/__init__.py
|
AntoineAugusti/udata-schema
| 2
|
12776405
|
<filename>udata_schema_gouvfr/__init__.py
'''
udata-schema-gouvfr
Integration with schema.data.gouv.fr
'''
__version__ = '1.3.3.dev'
__description__ = 'Integration with schema.data.gouv.fr'
| 0.980469
| 1
|
inheritance/zoo/reptile.py
|
ivan-yosifov88/python_oop_june_2021
| 1
|
12776406
|
from zoo.animal import Animal
class Reptile(Animal):
pass
| 1.40625
| 1
|
PedSimulation/example/listener.py
|
HDL951236874/PedSimulation
| 1
|
12776407
|
from PedSimulation.scene import SceneListener
from PedSimulation.entity import Agent, RotateAgent
import math
class PedestrianEscapeListener(SceneListener):
def on_added(self):
pass
def on_begin(self):
pass
def on_stepped(self):
for agent in list(set(self.scene.entities_of_type(Agent)).union(set(self.scene.entities_of_type(RotateAgent)))):
if agent.escaped:
self.scene.remove_entity(agent)
def on_removed(self):
pass
| 2.59375
| 3
|
pyqtsocius/ui_elements/misc_ui/system_tray.py
|
Giddius/PyQt_Socius
| 0
|
12776408
|
<reponame>Giddius/PyQt_Socius
# region [Imports]
# * Standard Library Imports -->
import gc
import os
import re
import sys
import json
import lzma
import time
import queue
import logging
import platform
import subprocess
from enum import Enum, Flag, auto
from time import sleep
from pprint import pprint, pformat
from typing import Union
from datetime import tzinfo, datetime, timezone, timedelta
from functools import wraps, lru_cache, singledispatch, total_ordering, partial
from contextlib import contextmanager
from collections import Counter, ChainMap, deque, namedtuple, defaultdict
from multiprocessing import Pool
from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor
# * Third Party Imports -->
import requests
import pyperclip
from bs4 import BeautifulSoup
from dotenv import load_dotenv
from jinja2 import BaseLoader, Environment
from natsort import natsorted
from fuzzywuzzy import fuzz, process
# * PyQt5 Imports -->
from PyQt5.QtGui import QFont, QIcon, QBrush, QColor, QCursor, QPixmap, QStandardItem, QRegExpValidator
from PyQt5.QtCore import (Qt, QRect, QSize, QObject, QRegExp, QThread, QMetaObject, QCoreApplication,
QFileSystemWatcher, QPropertyAnimation, QAbstractTableModel, pyqtSlot, pyqtSignal)
from PyQt5.QtWidgets import (QMenu, QFrame, QLabel, QDialog, QLayout, QWidget, QWizard, QMenuBar, QSpinBox, QCheckBox, QComboBox,
QGroupBox, QLineEdit, QListView, QCompleter, QStatusBar, QTableView, QTabWidget, QDockWidget, QFileDialog,
QFormLayout, QGridLayout, QHBoxLayout, QHeaderView, QListWidget, QMainWindow, QMessageBox, QPushButton,
QSizePolicy, QSpacerItem, QToolButton, QVBoxLayout, QWizardPage, QApplication, QButtonGroup, QRadioButton,
QFontComboBox, QStackedWidget, QListWidgetItem, QTreeWidgetItem, QDialogButtonBox, QAbstractItemView,
QCommandLinkButton, QAbstractScrollArea, QGraphicsOpacityEffect, QTreeWidgetItemIterator, QSystemTrayIcon, QAction)
# * Gid Imports -->
import gidlogger as glog
from gidtools.gidfiles import (QuickFile, readit, clearit, readbin, writeit, loadjson, pickleit, writebin, pathmaker, writejson,
dir_change, linereadit, get_pickled, ext_splitter, appendwriteit, create_folder, from_dict_to_file)
from gidqtutils.gidgets import make_icons
import pyqtsocius.ui_elements.pyqt_sorter_ressources_rc
# endregion[Imports]
__updated__ = '2020-10-31 06:41:58'
# region [AppUserData]
# endregion [AppUserData]
# region [Logging]
log = glog.aux_logger(__name__)
log.info(glog.imported(__name__))
# endregion[Logging]
# region [Constants]
# endregion[Constants]
class SystemTray(QSystemTrayIcon):
def __init__(self, window, in_app, *args, **kwargs):
super().__init__(* args, **kwargs)
self.window = window
self.app = in_app
self.icon = None
self.setup_menu()
def setup_menu(self):
_actions = {'Show': self.window.show,
'Hide': self.window.hide,
'Close': self.window.close}
self.systray_menu = QMenu(self.window)
for _name, _target in _actions.items():
_action = QAction(_name, self.systray_menu)
_action.triggered.connect(_target)
if _name != 'Close':
_action.triggered.connect(partial(self.hide_option, _name))
if _name == 'Show':
_action.setEnabled(False)
_action.triggered.connect(self.window.activateWindow)
self.systray_menu.addAction(_action)
self.setContextMenu(self.systray_menu)
def hide_option(self, name):
for _action in self.systray_menu.actions():
if _action.text() == name:
_action.setEnabled(False)
else:
_action.setEnabled(True)
def new_icon(self, icon):
self.icon = make_icons(f':/icons/{icon}', 100, 100)
self.setIcon(self.icon)
def show_hide_message(self, title, message, icon=None):
_icon = self.icon if icon is None else make_icons(f':/icons/{icon}', 100, 100)
self.showMessage(title, message, _icon, 500)
| 1.40625
| 1
|
dmriqcpy/version.py
|
liuzhenqi77/dmriqcpy
| 4
|
12776409
|
<filename>dmriqcpy/version.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import glob
# Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z"
_version_major = 0
_version_minor = 1
_version_micro = '' # use '' for first of series, number for 1 and above
_version_extra = 'dev'
# _version_extra = '' # Uncomment this for full releases
# Construct full version string from these.
_ver = [_version_major, _version_minor]
if _version_micro:
_ver.append(_version_micro)
if _version_extra:
_ver.append(_version_extra)
__version__ = '.'.join(map(str, _ver))
CLASSIFIERS = ["Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering"]
# Description should be a one-liner:
description = "Diffusion MRI Quality Check in python "
# Long description will go up on the pypi page
long_description = """
"""
NAME = "dmriqcpy"
MAINTAINER = "<NAME>"
MAINTAINER_EMAIL = "<EMAIL>"
DESCRIPTION = description
LONG_DESCRIPTION = long_description
URL = "https://github.com/GuillaumeTh/dmriqcpy"
DOWNLOAD_URL = ""
LICENSE = "MIT"
AUTHOR = "The developers"
AUTHOR_EMAIL = ""
PLATFORMS = "OS Independent"
MAJOR = _version_major
MINOR = _version_minor
MICRO = _version_micro
VERSION = __version__
REQUIRES = ['numpy (>=1.18)', 'jinja2 (>=2.10.1)', 'pandas (>=0.25.1)',
'nibabel (>=3.0)', 'plotly (>=3.0.0)', 'vtk (>=8.1.2)',
'pillow (>=6.2.0)', 'fury (>=0.2.0)',
'matplotlib (>=2.2.0)', 'scipy (>=1.4.1)']
SCRIPTS = glob.glob("scripts/*.py")
| 1.726563
| 2
|
problem-051/main.py
|
Shiqan/daily-coding-problem
| 2
|
12776410
|
<filename>problem-051/main.py<gh_stars>1-10
#!/usr/bin/env python
""" Problem 51 daily-coding-problem.com """
import random
from typing import List
def shuffle_cards(cards: List[int]) -> List[int]:
for i in range(len(cards)-1, 0, -1):
j = random.randint(0,i)
cards[i], cards[j] = cards[j],cards[i]
return cards
if __name__ == "__main__":
cards = [i for i in range(51)]
print(shuffle_cards(cards))
# show with monte carlo the chances?
| 3.71875
| 4
|
storage/team03/storageManager/Tabla.py
|
webdev188/tytus
| 35
|
12776411
|
from storageManager.CrudTupla import CrudTuplas
class Tabla:
def __init__(self, nombre, columnas):
self.nombre = nombre
self.columnas = columnas
self.estructura = CrudTuplas(columnas)
def getNombreASCII(self):
number = 0
for c in self.nombre:
number += ord(c)
return number
| 2.828125
| 3
|
quadcopter.py
|
shijy16/quadqopter
| 0
|
12776412
|
import numpy as np
import vrep
import ctypes
import math
import sys
import time
sim_dt = 0.01
dt = 0.001
SYNC = True
vrep_mode = vrep.simx_opmode_oneshot
def b( num ):
""" forces magnitude to be 1 or less """
if abs( num ) > 1.0:
return math.copysign( 1.0, num )
else:
return num
def convert_angles( ang ):
""" Converts Euler angles from x-y-z to z-x-y convention """
s1 = math.sin(ang[0])
s2 = math.sin(ang[1])
s3 = math.sin(ang[2])
c1 = math.cos(ang[0])
c2 = math.cos(ang[1])
c3 = math.cos(ang[2])
pitch = math.asin( b(c1*c3*s2-s1*s3) )
cp = math.cos(pitch)
# just in case
if cp == 0:
cp = 0.000001
yaw = math.asin( b((c1*s3+c3*s1*s2)/cp) ) #flipped
# Fix for getting the quadrants right
if c3 < 0 and yaw > 0:
yaw = math.pi - yaw
elif c3 < 0 and yaw < 0:
yaw = -math.pi - yaw
roll = math.asin( b((c3*s1+c1*s2*s3)/cp) ) #flipped
return [roll, pitch, yaw]
class Quadcopter( object ):
"""
This callable class will return the state of the quadcopter relative to its
target whenever it is called. It will also accept motor commands which will be
sent to the quadcopter in V-REP.
"""
def __init__( self, max_target_distance=4, noise=False,
noise_std=None, dodging=True,
target_func=None, cid=None,ori_mode=False
):
self.ori_mode = ori_mode
# If a cid is specified, assume the connection has already been
# established and should remain open
if cid is None:
vrep.simxFinish(-1) # just in case, close all opened connections
self.cid = vrep.simxStart('127.0.0.1',19997,True,True,5000,5)
else:
self.cid = cid
if self.cid != -1:
print ('Connected to V-REP remote API server, client id: %s' % self.cid)
vrep.simxStartSimulation( self.cid, vrep.simx_opmode_oneshot )
if SYNC:
vrep.simxSynchronous( self.cid, True )
else:
print ('Failed connecting to V-REP remote API server')
self.exit()
err, self.copter = vrep.simxGetObjectHandle(self.cid, "Quadricopter_base",
vrep.simx_opmode_oneshot_wait )
err, self.target = vrep.simxGetObjectHandle(self.cid, "Quadricopter_target",
vrep.simx_opmode_oneshot_wait )
# Reset the motor commands to zero
packedData=vrep.simxPackFloats([0,0,0,0])
raw_bytes = (ctypes.c_ubyte * len(packedData)).from_buffer_copy(packedData)
err = vrep.simxSetStringSignal(self.cid, "rotorTargetVelocities",
raw_bytes,
vrep_mode)
self.pos = [0,0,0]
self.pos_err = [0,0,0]
self.t_pos = [0,0,0]
self.lin = [0,0,0]
self.ori = [0,0,0]
self.ori_err = [0,0,0]
self.t_ori = [0,0,0]
self.ang = [0,0,0]
self.count = 0
# Maximum target distance error that can be returned
self.max_target_distance = max_target_distance
# If noise is being modelled
if noise_std is not None:
self.noise = True
else:
self.noise = False
# Standard Deviation of the noise for the 4 state variables
self.noise_std = noise_std
# Overwrite the get_target method if the target is to be controlled by a
# function instead of by V-REP
if target_func is not None:
self.step = 0
self.target_func = target_func
def get_target():
self.t_pos, self.t_ori = self.target_func( self.step )
self.step += 1
self.get_target = get_target
def stop( self ):
"""
Stops the simulation
"""
err = vrep.simxStopSimulation( self.cid, vrep.simx_opmode_oneshot_wait )
time.sleep(0.01) # Maybe this will prevent V-REP from crashing as often
return hasattr(self, 'failed') # Returns true if this is a failed run
def reset( self ):
err = vrep.simxStopSimulation(self.cid, vrep.simx_opmode_oneshot_wait)
time.sleep(1)
self.pos_err = [0,0,0]
self.ori_err = [0,0,0]
self.lin = [0,0,0]
self.ang = [0,0,0]
err = vrep.simxStartSimulation(self.cid, vrep.simx_opmode_oneshot_wait)
if SYNC:
vrep.simxSynchronous( self.cid, True )
def exit( self ):
self.failed = True
exit(1)
def get_target( self ):
err, self.t_ori = vrep.simxGetObjectOrientation(self.cid, self.target, -1,
vrep_mode )
err, self.t_pos = vrep.simxGetObjectPosition(self.cid, self.target, -1,
vrep_mode )
# Convert orientations to z-y-x convention
self.t_ori = convert_angles(self.t_ori)
def calculate_error( self ):
# Return the state variables
err, self.ori = vrep.simxGetObjectOrientation(self.cid, self.copter, -1,
vrep_mode )
err, self.pos = vrep.simxGetObjectPosition(self.cid, self.copter, -1,
vrep_mode )
err, self.lin, self.ang = vrep.simxGetObjectVelocity(self.cid, self.copter,
vrep_mode )
self.ori = convert_angles(self.ori)
# Apply noise to each measurement if required
#FIXME this is a dumb way to do this, clean it up later
# if self.noise:
# n_pos = np.random.normal(0,self.noise_std[0],3)
# n_lin = np.random.normal(0,self.noise_std[1],3)
# n_ori = np.random.normal(0,self.noise_std[2],3)
# n_ang = np.random.normal(0,self.noise_std[3],3)
# for i in range(3):
# self.pos[i] += n_pos[i]
# self.lin[i] += n_lin[i]
# self.ori[i] += n_ori[i]
# self.ang[i] += n_ang[i]
#TODO: might have to wrap angles here
# Find the error
self.ori_err = [self.t_ori[0] - self.ori[0],
self.t_ori[1] - self.ori[1],
self.t_ori[2] - self.ori[2]]
# print(self.ori_err)
cz = math.cos(self.ori[2])
sz = math.sin(self.ori[2])
x_err = self.t_pos[0] - self.pos[0]
y_err = self.t_pos[1] - self.pos[1]
if not self.ori_mode:
self.pos_err = [ x_err * cz + y_err * sz,
-x_err * sz + y_err * cz,
self.t_pos[2] - self.pos[2]]
else:
self.pos_err = [0,0,
self.t_pos[2] - self.pos[2]]
# print(self.pos_err)
self.lin = [self.lin[0]*cz+self.lin[1]*sz, -self.lin[0]*sz+self.lin[1]*cz, self.lin[2]]
self.ang = [self.ang[0]*cz+self.ang[1]*sz, -self.ang[0]*sz+self.ang[1]*cz, self.ang[2]]
for i in range(3):
if self.ori_err[i] > math.pi:
self.ori_err[i] -= 2 * math.pi
elif self.ori_err[i] < -math.pi:
self.ori_err[i] += 2 * math.pi
def send_motor_commands( self, values ):
# Limit motors by max and min values
motor_values = np.zeros(4)
for i in range(4):
"""
if values[i] > 30:
motor_values[i] = 30
elif values[i] < 0:
motor_values[i] = 0
else:
motor_values[i] = values[i]
"""
motor_values[i] = values[i]
packedData=vrep.simxPackFloats(motor_values.flatten())
raw_bytes = (ctypes.c_ubyte * len(packedData)).from_buffer_copy(packedData)
err = vrep.simxSetStringSignal(self.cid, "rotorTargetVelocities",
raw_bytes,
vrep_mode)
def handle_input( self, values ):
# Send motor commands to V-REP
self.send_motor_commands( values )
# Retrieve target location
self.get_target()
# Calculate state error
self.calculate_error()
def bound( self, value ):
if abs( value ) > self.max_target_distance:
return math.copysign( self.max_target_distance, value )
else:
return value
def get_state( self ):
"""
Returns the current state. Used for recording benchmarks of performance
"""
return [self.pos, self.ori,
self.lin, self.ang,
self.t_pos, self.t_ori]
def handle_output( self ):
l = math.sqrt(self.pos_err[0]**2 + self.pos_err[1]**2)
bl = self.bound(l)
r = (bl+.1)/(l+.1)
return [r*self.pos_err[0], r*self.pos_err[1], self.bound(self.pos_err[2]),
self.lin[0], self.lin[1], self.lin[2],
self.ori_err[0], self.ori_err[1], self.ori_err[2],
self.ang[0], self.ang[1], self.ang[2]]
def __call__( self, t, values ):
""" This class will be callable within a nengo node. It will accept as input
the control signals for each rotor, and will output the relevant state
variables (position, velocity, orientation, angular velocity).
"""
self.count += 1
if self.count == int(round(sim_dt/dt)):
self.count = 0
self.handle_input( values )
if SYNC:
vrep.simxSynchronousTrigger( self.cid )
return self.handle_output()
class FullStateQuadcopter( Quadcopter ):
"""
Returns both egocentric and allocentric information about the state
"""
def handle_output( self ):
l = math.sqrt(self.pos_err[0]**2 + self.pos_err[1]**2)
bl = self.bound(l)
r = (bl+.1)/(l+.1)
return [r*self.pos_err[0], r*self.pos_err[1], self.bound(self.pos_err[2]),
self.lin[0], self.lin[1], self.lin[2],
self.ori_err[0], self.ori_err[1], self.ori_err[2],
self.ang[0], self.ang[1], self.ang[2],
self.pos[0], self.pos[1], self.pos[2],
self.ori[0], self.ori[1], self.ori[2],
]
class FullStateTargetQuadcopter( Quadcopter ):
"""
Returns both egocentric and allocentric information about the state
as well as the state of the target
"""
def handle_output( self ):
l = math.sqrt(self.pos_err[0]**2 + self.pos_err[1]**2)
bl = self.bound(l)
r = (bl+.1)/(l+.1)
return [r*self.pos_err[0], r*self.pos_err[1], self.bound(self.pos_err[2]),
self.lin[0], self.lin[1], self.lin[2],
self.ori_err[0], self.ori_err[1], self.ori_err[2],
self.ang[0], self.ang[1], self.ang[2],
self.pos[0], self.pos[1], self.pos[2],
self.ori[0], self.ori[1], self.ori[2],
self.t_pos[0], self.t_pos[1], self.t_pos[2],
self.t_ori[0], self.t_ori[1], self.t_ori[2],
]
class TargetControlledQuadcopter( Quadcopter ):
"""
The target location is sent as input to this node, rather than reading from a
manually controlled target in V-REP. This class is designed to be used for
hyperopt tuning of gains.
"""
def set_target( self, v ):
self.t_pos = [v[0], v[1], v[2]]
self.t_ori = [0, 0, v[3]]
def handle_input( self, values ):
"""
First four elements of values are the motor commands.
The next four are x,y,z,yaw of the target
"""
self.send_motor_commands( values[:4] )
self.set_target( values[4:] )
self.calculate_error()
def exit( self ):
# When running hyperopt to find gains, should not exit program if one
# trial fails
self.failed = True
class AdaptiveController( object ):
"""
Adaptive controller based on Slotine's methods and physics model from the
python quadcopter simulator
"""
def __init__( self, adaptive=True, dt=0.001, initial_param=None ):
# When false, parameter updating does not occur
self.adaptive = adaptive
self.initialize_parameters( initial_param )
# Gain set
k1 = 0.43352026190263104
k2 = 2.0 * 2
k3 = 0.5388202808181405
k4 = 1.65 * 2
k5 = 2.5995452450850185
k6 = 0.802872750102059 * 2
k7 = 0.5990281657438163
k8 = 2.8897310746350824 * 2
ak1 = 0.026210965785217845
ak2 = 2.0 * 5
ak3 = 0.027614986033826894
ak4 = 1.65 * 5
ak6 = k6
ak8 = k8
self.K = np.matrix([[ 0, 0, k2, 0, 0,-k4, 0, 0, 0, 0, 0, 0],
[ 0, k1, 0, 0,-k3, 0,-k5, 0, 0, k7, 0, 0],
[-k1, 0, 0, k3, 0, 0, 0,-k5, 0, 0, k7, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0,-k6, 0, 0, k8] ])
self.AK = np.matrix([[ 0, 0, ak2, 0, 0,-ak4, 0, 0, 0, 0, 0, 0],
[ 0, ak1, 0, 0,-ak3, 0, 0, 0, 0, 0, 0, 0],
[-ak1, 0, 0, ak3, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, -ak6, 0, 0, ak8] ])
self.task_to_rotor = np.matrix([[ 1,-1, 1, 1],
[ 1,-1,-1,-1],
[ 1, 1,-1, 1],
[ 1, 1, 1,-1] ])
self.control_matrix = self.task_to_rotor * self.K
self.adaptive_matrix = self.task_to_rotor * self.AK
self.error = np.matrix([[0.0], # x
[0.0], # y
[0.0], # z
[0.0], # dx
[0.0], # dy
[0.0], # dz
[0.0], # roll
[0.0], # pitch
[0.0], # yaw
[0.0], # droll
[0.0], # dpitch
[0.0], # dyaw
])
self.learning_rate = 1
self.dt = dt
def initialize_parameters( self, initial_param ):
# Unknown Constant Vector
self.param = np.matrix([[0.0],
[0.0],
[0.0],
[0.0],
[0.0],
[0.0],
[0.0],
])
# If initial parameters are specified, set them now
if initial_param is not None:
for i in range(len(initial_param)):
self.param[i,0] = initial_param[i]
def compute_Y( self ):
"""
Generate the Y matrix
"""
# TODO: this might need to be allocentric, or the equations changed for
# egocentric
c1 = math.cos( self.error[6,0] )
c2 = math.cos( self.error[7,0] )
c3 = math.cos( self.error[8,0] )
s1 = math.sin( self.error[6,0] )
s2 = math.sin( self.error[7,0] )
s3 = math.sin( self.error[8,0] )
at = c1*s2*c3 + s1*s3
bt = c1*s2*s3 - s1*c3
ct = c1*c2
a = at / (at*at + bt*bt + ct*ct)
b = bt / (at*at + bt*bt + ct*ct)
c = ct / (at*at + bt*bt + ct*ct)
ax = a*abs(self.error[3,0])*self.error[3,0]
by = b*abs(self.error[4,0])*self.error[4,0]
cz = c*abs(self.error[5,0])*self.error[5,0]
rxy = self.error[9,0]*self.error[10,0]
rxz = self.error[9,0]*self.error[1,0]
ryz = self.error[10,0]*self.error[11,0]
"""
self.Y = np.matrix([[ax, by, cz, c, 0, -rxz, -rxy],
[ax, by, cz, c, -ryz, 0, rxy],
[ax, by, cz, c, 0, rxz, -rxy],
[ax, by, cz, c, ryz, 0, rxy],
])
"""
# Trying out different orientation of rotor blades
self.Y = np.matrix([[ax, by, cz, c, -ryz, rxz, rxy],
[ax, by, cz, c, -ryz, -rxz, -rxy],
[ax, by, cz, c, ryz, -rxz, rxy],
[ax, by, cz, c, ryz, rxz, -rxy],
])
def compute_rotor_velocities( self ):
"""
Generate the four rotor velocities to control the quadcopter
"""
self.compute_Y()
# Calculate rotor velocities
w = self.Y * self.param +\
self.control_matrix * self.error
#self.adaptive_matrix * self.error
if self.adaptive:
dparam = self.learning_rate *\
self.Y.T *\
( self.control_matrix * self.error ) *\
self.dt
#( self.adaptive_matrix * self.error ) *\
#self.dt
# Update the parameter estimates
self.param += dparam
return [ w[0,0], w[1,0], w[2,0], w[3,0] ]
def __call__( self, t, values ):
""" This class will be callable within a nengo node. It will accept as input
the 12D state error and will output desired rotor velocities
"""
# Insert state into error matrix
for i in range(len(values)):
self.error[i,0] = values[i]
# Compute desired rotor velocities
return self.compute_rotor_velocities()
| 2.828125
| 3
|
cogs/events/errorHandler.py
|
xcloudx12345/tts-bot-discord
| 3
|
12776413
|
import discord
from discord.ext import commands
import random
import sys
import traceback
class ErrorHandler(commands.Cog):
def __init__(self, client):
self.client = client
@commands.Cog.listener()
async def on_command_error(self, ctx, error):
# This prevents any commands with local handlers being handled here in on_command_error.
if hasattr(ctx.command, 'on_error'):
return
# This prevents any cogs with an overwritten cog_command_error being handled here.
cog = ctx.cog
if cog:
if cog._get_overridden_method(cog.cog_command_error) is not None:
return
ignored = ()
if isinstance(error, ignored):
return
elif isinstance(error, commands.DisabledCommand):
await ctx.send(f'{ctx.command} has been disabled.')
elif isinstance(error, commands.NoPrivateMessage):
try:
await ctx.author.send(f'`{ctx.command}` can not be used in Private Messages.')
except discord.HTTPException:
pass
elif isinstance(error, commands.BadArgument):
await ctx.send('The given argument(s) were not correct.')
elif isinstance(error, commands.MissingRequiredArgument):
await ctx.send(f'One or more required arguments were missing.')
elif isinstance(error, commands.CommandNotFound):
await ctx.send(f'Command not found')
elif isinstance(error, commands.BotMissingPermissions):
try:
await ctx.send(f'The bot is missing the required permissions to complete this action.')
except:
try:
await ctx.guild.owner.send(f'The bot is missing required permissions in your sever: {ctx.guild.name} (guild id: {ctx.guild.id})')
except:
pass
else:
await ctx.send(f'unkown error occured: `{error}`')
print('Ignoring exception in command {}:'.format(ctx.command), file=sys.stderr)
traceback.print_exception(type(error), error, error.__traceback__, file=sys.stderr)
def setup(bot):
bot.add_cog(ErrorHandler(bot))
| 2.4375
| 2
|
Face Reconstruction/Self-Supervised Monocular 3D Face Reconstruction by Occlusion-Aware Multi-view Geometry Consistency/src_common/geometry/render/api_tf_mesh_render.py
|
swapnilgarg7/Face-X
| 302
|
12776414
|
<filename>Face Reconstruction/Self-Supervised Monocular 3D Face Reconstruction by Occlusion-Aware Multi-view Geometry Consistency/src_common/geometry/render/api_tf_mesh_render.py<gh_stars>100-1000
# system
from __future__ import print_function
# python lib
import math
from copy import deepcopy
import numpy as np
# tf_render
import tensorflow as tf
# self
from thirdParty.tf_mesh_renderer.mesh_renderer.mesh_renderer import phong_shader, tone_mapper
from thirdParty.tf_mesh_renderer.mesh_renderer.rasterize_triangles import rasterize_triangles
# perspective
def mesh_renderer_camera_light(vertices, triangles, normals, diffuse_colors,
mtx_camera, mtx_perspective_frustrum, camera_position,
image_width, image_height):
"""Renders an input scene using phong shading, and returns an output image.
Args:
vertices: 3-D float32 tensor with shape [batch_size, vertex_count, 3]. Each
triplet is an xyz position in world space.
triangles: 2-D int32 tensor with shape [triangle_count, 3]. Each triplet
should contain vertex indices describing a triangle such that the
triangle's normal points toward the viewer if the forward order of the
triplet defines a clockwise winding of the vertices. Gradients with
respect to this tensor are not available.
normals: 3-D float32 tensor with shape [batch_size, vertex_count, 3]. Each
triplet is the xyz vertex normal for its corresponding vertex. Each
vector is assumed to be already normalized.
diffuse_colors: 3-D float32 tensor with shape [batch_size,
vertex_count, 3]. The RGB diffuse reflection in the range [0,1] for
each vertex.
mtx_camera: 3-D tensor with shape [batch_size, 4, 4] or 2-D tensor with
shape [4, 4] specifying the camera model view matrix
mtx_perspective_frustrum: 3-D tensor with shape [batch_size, 4, 4] or 2-D tensor with
shape [4, 4] specifying the perspective and frustrum matrix
camera_position: 2-D tensor with shape [batch_size, 3] or 1-D tensor with
shape [3] specifying the XYZ world space camera position.
light_intensities: a 3-D tensor with shape [batch_size, light_count, 3]. The
RGB intensity values for each light. Intensities may be above one.
image_width: int specifying desired output image width in pixels.
image_height: int specifying desired output image height in pixels.
Returns:
A 4-D float32 tensor of shape [batch_size, image_height, image_width, 4]
containing the lit RGBA color values for each image at each pixel. RGB
colors are the intensity values before tonemapping and can be in the range
[0, infinity]. Clipping to the range [0,1] with tf_render.clip_by_value is likely
reasonable for both viewing and training most scenes. More complex scenes
with multiple lights should tone map color values for display only. One
simple tonemapping approach is to rescale color values as x/(1+x); gamma
compression is another common techinque. Alpha values are zero for
background pixels and near one for mesh pixels.
Raises:
ValueError: An invalid argument to the method is detected.
"""
if len(vertices.shape) != 3:
raise ValueError('Vertices must have shape [batch_size, vertex_count, 3].')
batch_size = vertices.shape[0].value
if len(normals.shape) != 3:
raise ValueError('Normals must have shape [batch_size, vertex_count, 3].')
if len(diffuse_colors.shape) != 3:
raise ValueError(
'vertex_diffuse_colors must have shape [batch_size, vertex_count, 3].')
if camera_position.get_shape().as_list() == [3]:
camera_position = tf.tile(
tf.expand_dims(camera_position, axis=0), [batch_size, 1])
elif camera_position.get_shape().as_list() != [batch_size, 3]:
raise ValueError('Camera_position must have shape [batch_size, 3]')
# TODO: Debug Shape
if mtx_camera.get_shape().as_list() == [4, 4]:
mtx_camera = tf.tile(
tf.expand_dims(mtx_camera, axis=0), [batch_size, 1, 1])
elif mtx_camera.get_shape().as_list() != [batch_size, 4, 4]:
raise ValueError('Camera_lookat must have shape [batch_size, 4, 4]')
if mtx_perspective_frustrum.get_shape().as_list() == [4, 4]:
mtx_camera = tf.tile(
tf.expand_dims(mtx_perspective_frustrum, axis=0), [batch_size, 1])
elif mtx_camera.get_shape().as_list() != [batch_size, 4, 4]:
raise ValueError('Camera_lookat must have shape [batch_size, 4, 4]')
vertex_attributes = tf.concat([normals, vertices, diffuse_colors], axis=2)
clip_space_transforms = tf.matmul(mtx_perspective_frustrum, mtx_camera, name="mtx_clip_space_transforms_batch")
pixel_attributes, alpha, tri_ids = rasterize_triangles(
vertices, vertex_attributes, triangles, clip_space_transforms,
image_width, image_height, [-1] * vertex_attributes.shape[2].value)
# Extract the interpolated vertex attributes from the pixel buffer and
# supply them to the shader:
#pixel_normals = tf.nn.l2_normalize(pixel_attributes[:, :, :, 0:3], dim=3)
#pixel_positions = pixel_attributes[:, :, :, 3:6]
diffuse_colors = pixel_attributes[:, :, :, 6:9]
diffuse_colors = tf.reverse(diffuse_colors, axis=[1])
#return renders, pixel_mask
pixel_mask = alpha > 0.5
pixel_mask = tf.cast(pixel_mask, dtype=tf.float32)
pixel_mask = tf.reverse(pixel_mask, axis=[1])
#
tri_ids = tf.expand_dims(tri_ids, -1)
return diffuse_colors, pixel_mask, tri_ids
def mesh_renderer_camera(vertices, triangles, normals, diffuse_colors,
mtx_camera, mtx_perspective_frustrum, camera_position,
light_positions, light_intensities, image_width, image_height,
specular_colors=None, shininess_coefficients=None, ambient_color=None, background=-1
):
"""Renders an input scene using phong shading, and returns an output image.
Args:
vertices: 3-D float32 tensor with shape [batch_size, vertex_count, 3]. Each
triplet is an xyz position in world space.
triangles: 2-D int32 tensor with shape [triangle_count, 3]. Each triplet
should contain vertex indices describing a triangle such that the
triangle's normal points toward the viewer if the forward order of the
triplet defines a clockwise winding of the vertices. Gradients with
respect to this tensor are not available.
normals: 3-D float32 tensor with shape [batch_size, vertex_count, 3]. Each
triplet is the xyz vertex normal for its corresponding vertex. Each
vector is assumed to be already normalized.
diffuse_colors: 3-D float32 tensor with shape [batch_size,
vertex_count, 3]. The RGB diffuse reflection in the range [0,1] for
each vertex.
mtx_camera: 3-D tensor with shape [batch_size, 4, 4] or 2-D tensor with
shape [4, 4] specifying the camera model view matrix
mtx_perspective_frustrum: 3-D tensor with shape [batch_size, 4, 4] or 2-D tensor with
shape [4, 4] specifying the perspective and frustrum matrix
camera_position: 2-D tensor with shape [batch_size, 3] or 1-D tensor with
shape [3] specifying the XYZ world space camera position.
light_positions: a 3-D tensor with shape [batch_size, light_count, 3]. The
XYZ position of each light in the scene. In the same coordinate space as
pixel_positions.
light_intensities: a 3-D tensor with shape [batch_size, light_count, 3]. The
RGB intensity values for each light. Intensities may be above one.
image_width: int specifying desired output image width in pixels.
image_height: int specifying desired output image height in pixels.
specular_colors: 3-D float32 tensor with shape [batch_size,
vertex_count, 3]. The RGB specular reflection in the range [0, 1] for
each vertex. If supplied, specular reflections will be computed, and
both specular_colors and shininess_coefficients are expected.
shininess_coefficients: a 0D-2D float32 tensor with maximum shape
[batch_size, vertex_count]. The phong shininess coefficient of each
vertex. A 0D tensor or float gives a constant shininess coefficient
across all batches and images. A 1D tensor must have shape [batch_size],
and a single shininess coefficient per image is used.
ambient_color: a 2D tensor with shape [batch_size, 3]. The RGB ambient
color, which is added to each pixel in the scene. If None, it is
assumed to be black.
Returns:
A 4-D float32 tensor of shape [batch_size, image_height, image_width, 4]
containing the lit RGBA color values for each image at each pixel. RGB
colors are the intensity values before tonemapping and can be in the range
[0, infinity]. Clipping to the range [0,1] with tf_render.clip_by_value is likely
reasonable for both viewing and training most scenes. More complex scenes
with multiple lights should tone map color values for display only. One
simple tonemapping approach is to rescale color values as x/(1+x); gamma
compression is another common techinque. Alpha values are zero for
background pixels and near one for mesh pixels.
Raises:
ValueError: An invalid argument to the method is detected.
"""
if len(vertices.shape) != 3:
raise ValueError('Vertices must have shape [batch_size, vertex_count, 3].')
batch_size = vertices.shape[0].value
if len(normals.shape) != 3:
raise ValueError('Normals must have shape [batch_size, vertex_count, 3].')
if len(light_positions.shape) != 3:
raise ValueError(
'Light_positions must have shape [batch_size, light_count, 3].')
if len(light_intensities.shape) != 3:
raise ValueError(
'Light_intensities must have shape [batch_size, light_count, 3].')
if len(diffuse_colors.shape) != 3:
raise ValueError(
'vertex_diffuse_colors must have shape [batch_size, vertex_count, 3].')
if (ambient_color is not None and
ambient_color.get_shape().as_list() != [batch_size, 3]):
raise ValueError('Ambient_color must have shape [batch_size, 3].')
if camera_position.get_shape().as_list() == [3]:
camera_position = tf.tile(
tf.expand_dims(camera_position, axis=0), [batch_size, 1])
elif camera_position.get_shape().as_list() != [batch_size, 3]:
raise ValueError('Camera_position must have shape [batch_size, 3]')
# TODO: Debug Shape
if mtx_camera.get_shape().as_list() == [4, 4]:
mtx_camera = tf.tile(
tf.expand_dims(mtx_camera, axis=0), [batch_size, 1, 1])
elif mtx_camera.get_shape().as_list() != [batch_size, 4, 4]:
raise ValueError('Camera_lookat must have shape [batch_size, 4, 4]')
if mtx_perspective_frustrum.get_shape().as_list() == [4, 4]:
mtx_camera = tf.tile(
tf.expand_dims(mtx_perspective_frustrum, axis=0), [batch_size, 1])
elif mtx_camera.get_shape().as_list() != [batch_size, 4, 4]:
raise ValueError('Camera_lookat must have shape [batch_size, 4, 4]')
if specular_colors is not None and shininess_coefficients is None:
raise ValueError(
'Specular colors were supplied without shininess coefficients.')
if shininess_coefficients is not None and specular_colors is None:
raise ValueError(
'Shininess coefficients were supplied without specular colors.')
if specular_colors is not None:
# Since a 0-D float32 tensor is accepted, also accept a float.
if isinstance(shininess_coefficients, float):
shininess_coefficients = tf.constant(
shininess_coefficients, dtype=tf.float32)
if len(specular_colors.shape) != 3:
raise ValueError('The specular colors must have shape [batch_size, '
'vertex_count, 3].')
if len(shininess_coefficients.shape) > 2:
raise ValueError('The shininess coefficients must have shape at most'
'[batch_size, vertex_count].')
# If we don't have per-vertex coefficients, we can just reshape the
# input shininess to broadcast later, rather than interpolating an
# additional vertex attribute:
if len(shininess_coefficients.shape) < 2:
vertex_attributes = tf.concat(
[normals, vertices, diffuse_colors, specular_colors], axis=2)
else:
vertex_attributes = tf.concat(
[
normals, vertices, diffuse_colors, specular_colors,
tf.expand_dims(shininess_coefficients, axis=2)
],
axis=2)
else:
vertex_attributes = tf.concat([normals, vertices, diffuse_colors], axis=2)
# camera_matrices = camera_utils.look_at(camera_position, camera_lookat,
# camera_up)
#
# perspective_transforms = camera_utils.perspective(image_width / image_height,
# fov_y, near_clip, far_clip)
clip_space_transforms = tf.matmul(mtx_perspective_frustrum, mtx_camera, name="mtx_clip_space_transforms_batch")
pixel_attributes, alpha, tri_ids = rasterize_triangles(
vertices, vertex_attributes, triangles, clip_space_transforms,
image_width, image_height, [background] * vertex_attributes.shape[2].value)
# Extract the interpolated vertex attributes from the pixel buffer and
# supply them to the shader:
pixel_normals = tf.nn.l2_normalize(pixel_attributes[:, :, :, 0:3], dim=3)
pixel_positions = pixel_attributes[:, :, :, 3:6]
diffuse_colors = pixel_attributes[:, :, :, 6:9]
if specular_colors is not None:
specular_colors = pixel_attributes[:, :, :, 9:12]
# Retrieve the interpolated shininess coefficients if necessary, or just
# reshape our input for broadcasting:
if len(shininess_coefficients.shape) == 2:
shininess_coefficients = pixel_attributes[:, :, :, 12]
else:
shininess_coefficients = tf.reshape(shininess_coefficients, [-1, 1, 1])
pixel_mask = tf.cast(tf.reduce_any(diffuse_colors >= 0, axis=3), tf.float32)
renders = phong_shader(
normals=pixel_normals,
alphas=pixel_mask,
pixel_positions=pixel_positions,
light_positions=light_positions,
light_intensities=light_intensities,
diffuse_colors=diffuse_colors,
camera_position=camera_position if specular_colors is not None else None,
specular_colors=specular_colors,
shininess_coefficients=shininess_coefficients,
ambient_color=ambient_color)
#return renders, pixel_mask
pixel_mask = alpha > 0.5
pixel_mask = tf.cast(pixel_mask, dtype=tf.float32)
pixel_mask = tf.reverse(pixel_mask, axis=[1])
return renders, pixel_mask
def mesh_depthmap_camera(vertices, triangles, mtx_ext,
mtx_camera, mtx_perspective_frustrum,
image_width, image_height
):
"""Renders an input scene using phong shading, and returns an output image.
Args:
vertices: 3-D float32 tensor with shape [batch_size, vertex_count, 3]. Each
triplet is an xyz position in world space.
triangles: 2-D int32 tensor with shape [triangle_count, 3]. Each triplet
should contain vertex indices describing a triangle such that the
triangle's normal points toward the viewer if the forward order of the
triplet defines a clockwise winding of the vertices. Gradients with
respect to this tensor are not available.
normals: 3-D float32 tensor with shape [batch_size, vertex_count, 3]. Each
triplet is the xyz vertex normal for its corresponding vertex. Each
vector is assumed to be already normalized.
mtx_camera: 3-D tensor with shape [batch_size, 4, 4] or 2-D tensor with
shape [4, 4] specifying the camera model view matrix
mtx_perspective_frustrum: 3-D tensor with shape [batch_size, 4, 4] or 2-D tensor with
shape [4, 4] specifying the perspective and frustrum matrix
camera_position: 2-D tensor with shape [batch_size, 3] or 1-D tensor with
shape [3] specifying the XYZ world space camera position.
image_width: int specifying desired output image width in pixels.
image_height: int specifying desired output image height in pixels.
Returns:
A 4-D float32 tensor of shape [batch_size, image_height, image_width, 4]
containing the lit RGBA color values for each image at each pixel. RGB
colors are the intensity values before tonemapping and can be in the range
[0, infinity]. Clipping to the range [0,1] with tf_render.clip_by_value is likely
reasonable for both viewing and training most scenes. More complex scenes
with multiple lights should tone map color values for display only. One
simple tonemapping approach is to rescale color values as x/(1+x); gamma
compression is another common techinque. Alpha values are zero for
background pixels and near one for mesh pixels.
Raises:
ValueError: An invalid argument to the method is detected.
"""
if len(vertices.shape) != 3:
raise ValueError('Vertices must have shape [batch_size, vertex_count, 3].')
batch_size = vertices.shape[0].value
# TODO: Debug Shape
if mtx_camera.get_shape().as_list() == [4, 4]:
mtx_camera = tf.tile(
tf.expand_dims(mtx_camera, axis=0), [batch_size, 1, 1])
elif mtx_camera.get_shape().as_list() != [batch_size, 4, 4]:
raise ValueError('Camera_lookat must have shape [batch_size, 4, 4]')
if mtx_perspective_frustrum.get_shape().as_list() == [4, 4]:
mtx_camera = tf.tile(
tf.expand_dims(mtx_perspective_frustrum, axis=0), [batch_size, 1])
elif mtx_camera.get_shape().as_list() != [batch_size, 4, 4]:
raise ValueError('Camera_lookat must have shape [batch_size, 4, 4]')
# vertex attribute of depthmap is only z
vertex_attributes = vertices
#vertex_attributes = tf_render.expand_dims(vertex_attributes, -1)
# camera_matrices = camera_utils.look_at(camera_position, camera_lookat,
# camera_up)
#
# perspective_transforms = camera_utils.perspective(image_width / image_height,
# fov_y, near_clip, far_clip)
clip_space_transforms = tf.matmul(mtx_perspective_frustrum, mtx_camera, name="mtx_clip_space_transforms_batch")
pixel_attributes, alpha, _ = rasterize_triangles(
vertices, vertex_attributes, triangles, clip_space_transforms,
image_width, image_height, [99999999] * vertex_attributes.shape[2].value)
# Extract the interpolated vertex attributes from the pixel buffer and
# supply them to the shader:
filler_homo = tf.ones(shape=[pixel_attributes.shape[0], pixel_attributes.shape[1], pixel_attributes.shape[2], 1])
pixel_attributes = tf.concat([pixel_attributes, filler_homo], axis=3)
pixel_attributes = tf.reshape(pixel_attributes, shape=[batch_size, -1, 4])
pixel_attributes = tf.transpose(pixel_attributes, perm=[0, 2, 1])
pixel_attributes = tf.matmul(mtx_ext, pixel_attributes)
pixel_attributes = tf.transpose(pixel_attributes, perm=[0, 2, 1])
pixel_attributes = tf.reshape(pixel_attributes, shape=[batch_size, image_height, image_width, 4])
depth_map = pixel_attributes[:, :, :, 2]
pixel_mask = alpha > 0.5
pixel_mask = tf.cast(pixel_mask, dtype=tf.float32)
depth_map = tf.reverse(depth_map, axis=[1])
pixel_mask = tf.reverse(pixel_mask, axis=[1])
return depth_map, pixel_mask
# ortho
def mesh_rendererOrtho_camera(vertices, triangles, normals, diffuse_colors,
mtx_camera, mtx_perspective_frustrum, light_positions, light_intensities,
image_width, image_height, ambient_color=None, background=-1
):
"""Renders an input scene using phong shading, and returns an output image.
Args:
vertices: 3-D float32 tensor with shape [batch_size, vertex_count, 3]. Each
triplet is an xyz position in world space.
triangles: 2-D int32 tensor with shape [triangle_count, 3]. Each triplet
should contain vertex indices describing a triangle such that the
triangle's normal points toward the viewer if the forward order of the
triplet defines a clockwise winding of the vertices. Gradients with
respect to this tensor are not available.
normals: 3-D float32 tensor with shape [batch_size, vertex_count, 3]. Each
triplet is the xyz vertex normal for its corresponding vertex. Each
vector is assumed to be already normalized.
diffuse_colors: 3-D float32 tensor with shape [batch_size,
vertex_count, 3]. The RGB diffuse reflection in the range [0,1] for
each vertex.
mtx_camera: 3-D tensor with shape [batch_size, 4, 4] or 2-D tensor with
shape [4, 4] specifying the camera model view matrix
mtx_perspective_frustrum: 3-D tensor with shape [batch_size, 4, 4] or 2-D tensor with
shape [4, 4] specifying the perspective and frustrum matrix
camera_position: 2-D tensor with shape [batch_size, 3] or 1-D tensor with
shape [3] specifying the XYZ world space camera position.
light_positions: a 3-D tensor with shape [batch_size, light_count, 3]. The
XYZ position of each light in the scene. In the same coordinate space as
pixel_positions.
light_intensities: a 3-D tensor with shape [batch_size, light_count, 3]. The
RGB intensity values for each light. Intensities may be above one.
image_width: int specifying desired output image width in pixels.
image_height: int specifying desired output image height in pixels.
specular_colors: 3-D float32 tensor with shape [batch_size,
vertex_count, 3]. The RGB specular reflection in the range [0, 1] for
each vertex. If supplied, specular reflections will be computed, and
both specular_colors and shininess_coefficients are expected.
shininess_coefficients: a 0D-2D float32 tensor with maximum shape
[batch_size, vertex_count]. The phong shininess coefficient of each
vertex. A 0D tensor or float gives a constant shininess coefficient
across all batches and images. A 1D tensor must have shape [batch_size],
and a single shininess coefficient per image is used.
ambient_color: a 2D tensor with shape [batch_size, 3]. The RGB ambient
color, which is added to each pixel in the scene. If None, it is
assumed to be black.
Returns:
A 4-D float32 tensor of shape [batch_size, image_height, image_width, 4]
containing the lit RGBA color values for each image at each pixel. RGB
colors are the intensity values before tonemapping and can be in the range
[0, infinity]. Clipping to the range [0,1] with tf_render.clip_by_value is likely
reasonable for both viewing and training most scenes. More complex scenes
with multiple lights should tone map color values for display only. One
simple tonemapping approach is to rescale color values as x/(1+x); gamma
compression is another common techinque. Alpha values are zero for
background pixels and near one for mesh pixels.
Raises:
ValueError: An invalid argument to the method is detected.
"""
if len(vertices.shape) != 3:
raise ValueError('Vertices must have shape [batch_size, vertex_count, 3].')
batch_size = vertices.shape[0].value
if len(normals.shape) != 3:
raise ValueError('Normals must have shape [batch_size, vertex_count, 3].')
if len(light_positions.shape) != 3:
raise ValueError(
'Light_positions must have shape [batch_size, light_count, 3].')
if len(light_intensities.shape) != 3:
raise ValueError(
'Light_intensities must have shape [batch_size, light_count, 3].')
if len(diffuse_colors.shape) != 3:
raise ValueError(
'vertex_diffuse_colors must have shape [batch_size, vertex_count, 3].')
if (ambient_color is not None and
ambient_color.get_shape().as_list() != [batch_size, 3]):
raise ValueError('Ambient_color must have shape [batch_size, 3].')
# TODO: Debug Shape
if mtx_camera.get_shape().as_list() == [4, 4]:
mtx_camera = tf.tile(
tf.expand_dims(mtx_camera, axis=0), [batch_size, 1, 1])
elif mtx_camera.get_shape().as_list() != [batch_size, 4, 4]:
raise ValueError('Camera_lookat must have shape [batch_size, 4, 4]')
if mtx_perspective_frustrum.get_shape().as_list() == [4, 4]:
mtx_camera = tf.tile(
tf.expand_dims(mtx_perspective_frustrum, axis=0), [batch_size, 1])
elif mtx_camera.get_shape().as_list() != [batch_size, 4, 4]:
raise ValueError('Camera_lookat must have shape [batch_size, 4, 4]')
vertex_attributes = tf.concat([normals, vertices, diffuse_colors], axis=2)
clip_space_transforms = tf.matmul(mtx_perspective_frustrum, mtx_camera, name="mtx_clip_space_transforms_batch")
pixel_attributes, alpha, tri_ids = rasterize_triangles(
vertices, vertex_attributes, triangles, clip_space_transforms,
image_width, image_height, [background] * vertex_attributes.shape[2].value)
# Extract the interpolated vertex attributes from the pixel buffer and
# supply them to the shader:
pixel_normals = tf.nn.l2_normalize(pixel_attributes[:, :, :, 0:3], dim=3)
pixel_positions = pixel_attributes[:, :, :, 3:6]
diffuse_colors = pixel_attributes[:, :, :, 6:9]
pixel_mask = tf.cast(tf.reduce_any(diffuse_colors >= 0, axis=3), tf.float32)
renders = phong_shader(
normals=pixel_normals,
alphas=pixel_mask,
pixel_positions=pixel_positions,
light_positions=light_positions,
light_intensities=light_intensities,
diffuse_colors=diffuse_colors,
camera_position=None,
specular_colors=None,
shininess_coefficients=None,
ambient_color=ambient_color)
#return renders, pixel_mask
pixel_mask = alpha > 0.5
pixel_mask = tf.cast(pixel_mask, dtype=tf.float32)
pixel_mask = tf.reverse(pixel_mask, axis=[1])
return renders, pixel_mask
def mesh_depthmapOrtho_camera(vertices, triangles,
mtx_ext, mtx_perspective_frustrum, image_width, image_height
):
"""Renders an input scene using phong shading, and returns an output image.
Args:
vertices: 3-D float32 tensor with shape [batch_size, vertex_count, 3]. Each
triplet is an xyz position in world space.
triangles: 2-D int32 tensor with shape [triangle_count, 3]. Each triplet
should contain vertex indices describing a triangle such that the
triangle's normal points toward the viewer if the forward order of the
triplet defines a clockwise winding of the vertices. Gradients with
respect to this tensor are not available.
normals: 3-D float32 tensor with shape [batch_size, vertex_count, 3]. Each
triplet is the xyz vertex normal for its corresponding vertex. Each
vector is assumed to be already normalized.
mtx_camera: 3-D tensor with shape [batch_size, 4, 4] or 2-D tensor with
shape [4, 4] specifying the camera model view matrix
mtx_perspective_frustrum: 3-D tensor with shape [batch_size, 4, 4] or 2-D tensor with
shape [4, 4] specifying the perspective and frustrum matrix
camera_position: 2-D tensor with shape [batch_size, 3] or 1-D tensor with
shape [3] specifying the XYZ world space camera position.
image_width: int specifying desired output image width in pixels.
image_height: int specifying desired output image height in pixels.
Returns:
A 4-D float32 tensor of shape [batch_size, image_height, image_width, 4]
containing the lit RGBA color values for each image at each pixel. RGB
colors are the intensity values before tonemapping and can be in the range
[0, infinity]. Clipping to the range [0,1] with tf_render.clip_by_value is likely
reasonable for both viewing and training most scenes. More complex scenes
with multiple lights should tone map color values for display only. One
simple tonemapping approach is to rescale color values as x/(1+x); gamma
compression is another common techinque. Alpha values are zero for
background pixels and near one for mesh pixels.
Raises:
ValueError: An invalid argument to the method is detected.
"""
if len(vertices.shape) != 3:
raise ValueError('Vertices must have shape [batch_size, vertex_count, 3].')
batch_size = vertices.shape[0].value
# TODO: Debug Shape
if mtx_ext.get_shape().as_list() == [4, 4]:
mtx_ext = tf.tile(
tf.expand_dims(mtx_ext, axis=0), [batch_size, 1, 1])
elif mtx_ext.get_shape().as_list() != [batch_size, 4, 4]:
raise ValueError('Camera_lookat must have shape [batch_size, 4, 4]')
if mtx_perspective_frustrum.get_shape().as_list() == [4, 4]:
mtx_perspective_frustrum = tf.tile(
tf.expand_dims(mtx_perspective_frustrum, axis=0), [batch_size, 1])
elif mtx_perspective_frustrum.get_shape().as_list() != [batch_size, 4, 4]:
raise ValueError('Camera_lookat must have shape [batch_size, 4, 4]')
# vertex attribute of depthmap is only z
vertex_attributes = vertices
#vertex_attributes = tf_render.expand_dims(vertex_attributes, -1)
# camera_matrices = camera_utils.look_at(camera_position, camera_lookat,
# camera_up)
#
# perspective_transforms = camera_utils.perspective(image_width / image_height,
# fov_y, near_clip, far_clip)
clip_space_transforms = tf.matmul(mtx_perspective_frustrum, mtx_ext, name="mtx_clip_space_transforms_batch")
pixel_attributes, alpha, _ = rasterize_triangles(
vertices, vertex_attributes, triangles, clip_space_transforms,
image_width, image_height, [99999999] * vertex_attributes.shape[2].value)
# Extract the interpolated vertex attributes from the pixel buffer and
# supply them to the shader:
filler_homo = tf.ones(shape=[pixel_attributes.shape[0], pixel_attributes.shape[1], pixel_attributes.shape[2], 1])
pixel_attributes = tf.concat([pixel_attributes, filler_homo], axis=3)
pixel_attributes = tf.reshape(pixel_attributes, shape=[batch_size, -1, 4])
pixel_attributes = tf.transpose(pixel_attributes, perm=[0, 2, 1])
pixel_attributes = tf.matmul(mtx_ext, pixel_attributes)
pixel_attributes = tf.transpose(pixel_attributes, perm=[0, 2, 1])
pixel_attributes = tf.reshape(pixel_attributes, shape=[batch_size, image_height, image_width, 4])
depth_map = pixel_attributes[:, :, :, 2]
pixel_mask = alpha > 0.5
pixel_mask = tf.cast(pixel_mask, dtype=tf.float32)
depth_map = tf.reverse(depth_map, axis=[1])
pixel_mask = tf.reverse(pixel_mask, axis=[1])
return depth_map, pixel_mask
| 1.875
| 2
|
scripts/evaluate_on_target.py
|
millicentli/clin-bias-summarization
| 0
|
12776415
|
<reponame>millicentli/clin-bias-summarization
import argparse
import pandas as pd
import numpy as np
from transformers import BartTokenizer, BartForConditionalGeneration
## Initialize arguments
parser = argparse.ArgumentParser()
parser.add_argument('--task_type', defaut='binary', const='binary', nargs ='?', choices=['binary', 'multiclass', 'regression'], help='what type of data the target_col_name is')
parser.add_argument('--output_dir', help='folder to output model/results', type=str)
parser.add_argument('--input_dir', help='folder for the inputs to evaluation', type=str)
parser.add_argument('--freeze_bart', help='freeze all BART layers and only use pre-trained representation', action='store_true')
parser.add_argument('--gridsearch_classifier', help='whether to run a grid search over the classifier parameters, using AUPRC as metric', action='store_true')
# Gridsearch stuff
if args.gridsearch_classifier:
idx_max = np.argmax(grid_auprcs)
predictor = es_models[idx_max].to(device)
opt_c = optimal_cs[idx_max]
else:
opt_c = 2.0
prediction_dict_val, merged_preds_val, embs_val = evaluate_on_set(val_generator, predictor, emb_gen = args.freeze_bart, c_val = opt_c)
merged_preds_val_list = [merged_preds_val[str(i)] for i in actual_val.index]
if args.task_type == 'binary':
acc = accuracy_score(actual_val.values.astype(int), np.array(merged_preds_val_list).round())
auprc = average_precision_score(actual_val.values.astype(int), merged_preds_val_list)
ll = log_loss(actual_val.values.astype(int), merged_preds_val_list)
roc = roc_auc_score(actual_val.values.astype(int), merged_preds_val_list)
print('Accuracy: %.5f' % acc)
print('AUPRC: %.5f' % auprc)
print('Log Loss: %.5f' % ll)
print('AUROC: %.5f' % roc)
elif args.task_type == 'regression':
mse = mean_squared_error(actual_val, merged_preds_val_list)
print('MSE: %.5f' % mse)
elif args.task_type == 'multiclass':
report = classification_report(actual_val.values.astype(int), np.array(merged_preds_val_list))
prediction_dict_test, merged_preds_test, embs_test = evaluate_on_set(test_generator, predictor, emb_gen = args.freeze_bart, c_val = opt_c)
## Ignore the train stats
merged_preds_train, embs_train = {}
# save args
json.dump(vars(args), open(os.path.join(args.output_dir, 'argparsre_args.json'), 'w'))
# preds
rough_preds = {**merged_preds_val, **merged_preds_test, **merged_preds_train}
pickle.dump(rough_preds, open(os.path.join(args.output_dir, 'preds.pkl'), 'wb'))
| 2.40625
| 2
|
cupyx/fallback_mode/__init__.py
|
prkhrsrvstv1/cupy
| 6,180
|
12776416
|
<reponame>prkhrsrvstv1/cupy
from cupy import _util
# Attributes and Methods for fallback_mode
# Auto-execute numpy method when corresponding cupy method is not found
# "NOQA" to suppress flake8 warning
from cupyx.fallback_mode.fallback import numpy # NOQA
_util.experimental('cupyx.fallback_mode.numpy')
| 1.5625
| 2
|
tools/intogen/lib/python/intogensm/command/qc.py
|
globusgenomics/galaxy
| 1
|
12776417
|
<filename>tools/intogen/lib/python/intogensm/command/qc.py
from intogensm.command import RunCommand
class QcCommand(RunCommand):
cmd_name = "qc"
def execute(self):
self._wok_run(self.instance_name, self.conf_builder, "intogen-mutations:qc")
| 1.726563
| 2
|
openarticlegauge/plugins/oup.py
|
CottageLabs/OpenArticleGauge
| 1
|
12776418
|
from openarticlegauge import plugin
import re
class OUPPlugin(plugin.Plugin):
_short_name = __name__.split('.')[-1]
__version__='0.1' # consider incrementing or at least adding a minor version
# e.g. "0.1.1" if you change this plugin
__desc__ = "Handles articles from the Oxford University Press"
supported_url_format = '(http|https){0,1}://.+?\.oxfordjournals.org/.+'
_license_mappings = [
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/by/3.0/),"
+ "\n" + ' '*21 + "which permits unrestricted reuse, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by/3.0/'}
},
# same, but note "re-use" vs "reuse"
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/by/3.0/),"
+ "\n" + ' '*21 + "which permits unrestricted re-use, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by/3.0/'}
},
{ # Same as above but without the trailing slash in the URL in the license statement and 'use' rather than 'reuse'
"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/by/3.0),"
+ "\n" + ' '*21 + "which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by/3.0/'}
},
{ # Same as above but without the trailing slash in the URL and 'reuse' rather than 'use'
"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/by/3.0),"
+ "\n" + ' '*21 + "which permits unrestricted reuse, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by/3.0/'}
},
{ # this license statement is the same as the one above, but somebody's missed out the "reuse" word after unrestricted
"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/by/3.0/),"
+ "\n" + ' '*21 + "which permits unrestricted, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-by', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by/3.0/'}
},
{"This is an Open Access article distributed under the terms of the Creative Commons Attribution Non-Commercial License (http://creativecommons.org/licenses/by-nc/3.0),"
+ "\n" + ' '*21 + "which permits unrestricted non-commercial use, distribution, and reproduction in any medium, provided the original work is"
+ "\n" + ' '*21 + "properly cited.":
{'type': 'cc-nc', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by-nc/3.0/'}
},
{ # Same as above but with the trailing slash in the URL in the license statement
"This is an Open Access article distributed under the terms of the Creative Commons Attribution Non-Commercial License (http://creativecommons.org/licenses/by-nc/3.0/),"
+ "\n" + ' '*21 + "which permits unrestricted non-commercial use, distribution, and reproduction in any medium, provided the original work is"
+ "\n" + ' '*21 + "properly cited.":
{'type': 'cc-nc', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by-nc/3.0/'}
},
{ # Subtly different text
"This is an Open Access article distributed under the terms of the Creative Commons Attribution Non-Commercial License (http://creativecommons.org/licenses/by-nc/3.0/),"
+ "\n" + ' '*21 + "which permits unrestricted non-commercial use, distribution, and reproduction in any medium, provided the original work is properly"
+ "\n" + ' '*21 + "and fully attributed":
{'type': 'cc-nc', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by-nc/3.0/'}
},
# Yet another subtly different case - note "reuse" immediately after unrestricted
{
"This is an Open Access article distributed under the terms of the Creative Commons Attribution Non-Commercial License (http://creativecommons.org/licenses/by-nc/3.0),"
+ "\n" + ' '*21 + "which permits unrestricted reuse, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-nc', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by-nc/3.0/'}
},
# Variation on the above with a trailing slash in the license URL
{
"This is an Open Access article distributed under the terms of the Creative Commons Attribution Non-Commercial License (http://creativecommons.org/licenses/by-nc/3.0/),"
+ "\n" + ' '*21 + "which permits unrestricted reuse, distribution, and reproduction in any medium, provided the original work is properly cited.":
{'type': 'cc-nc', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by-nc/3.0/'}
},
{ # Yet another case at eg: http://cardiovascres.oxfordjournals.org/content/98/2/286
"This is an Open Access article distributed under the terms of the Creative Commons Attribution License (http://creativecommons.org/licenses/by-nc/3.0/),"
+ "\n" + ' '*21 + "which permits non-commercial use, distribution, and reproduction in any medium, provided that the original authorship is properly"
+ "\n" + ' '*21 + "and fully attributed":
{'type': 'cc-nc', 'version':'3.0',
# also declare some properties which override info about this license in the licenses list (see licenses module)
'url': 'http://creativecommons.org/licenses/by-nc/3.0/'}
}
]
def capabilities(self):
return {
"type_detect_verify" : False,
"canonicalise" : [],
"detect_provider" : [],
"license_detect" : True
}
def supports(self, provider):
"""
Does the page_license plugin support this provider
"""
for url in provider.get("url", []):
if self.supports_url(url):
return True
return False
def supports_url(self, url):
if re.match(self.supported_url_format, url):
return True
return False
def license_detect(self, record):
"""
To respond to the provider identifier: *.oxfordjournals.org
This should determine the licence conditions of the OUP article and populate
the record['bibjson']['license'] (note the US spelling) field.
"""
# licensing statements to look for on this publisher's pages
# take the form of {statement: meaning}
# where meaning['type'] identifies the license (see licenses.py)
# and meaning['version'] identifies the license version (if available)
lic_statements = self._license_mappings
for url in record.provider_urls:
if self.supports_url(url):
self.simple_extract(lic_statements, record, url)
return (self._short_name, self.__version__)
def get_description(self, plugin_name):
pd = super(OUPPlugin, self).get_description(plugin_name)
pd.provider_support = "Supports urls which match the regular expression: " + self.supported_url_format
return pd
| 2.203125
| 2
|
tick/solver/history/__init__.py
|
sumau/tick
| 411
|
12776419
|
# License: BSD 3 clause
# import tick.base
from .history import History
__all__ = ["History"]
| 1.046875
| 1
|
nodes/smads_endpoints_node.py
|
UTSMADS/smads_core
| 0
|
12776420
|
#!/usr/bin/env python3
import rospy
import threading
from enum import Enum
from smads_core.client import JackalClient
from smads_core.client import SpotClient
from smads_core.client import RobotClient
from smads_core.interface import RobotSensorInterface
from smads_core.interface import RobotNavigationInterface
class RobotType:
SPOT = 1
JACKAL = 2
platform_map = {
SPOT : SpotClient(),
JACKAL : JackalClient(),
}
class SMADSROS:
def __init__(self, client, sensor_poll_rate, robot_prefix="smads_platform"):
self.client = client
self.robot_prefix = robot_prefix
self.client_mutex = threading.Lock()
self.sensor_interface = RobotSensorInterface(client, self.client_mutex, sensor_poll_rate, robot_prefix)
self.navigation_interface = RobotNavigationInterface(client, self.client_mutex, robot_prefix)
def start(self):
x = threading.Thread(target=self.sensor_interface.start)
y = threading.Thread(target=self.navigation_interface.start)
x.start()
y.start()
rospy.spin()
if __name__ == '__main__':
try:
rospy.init_node('smads_ros_node', anonymous=False)
platform = RobotType.JACKAL
client = RobotType.platform_map[platform]
platorm = rospy.get_param("~platform", 1)
platform_prefix = rospy.get_param("~platform_prefix", "smads_platform")
poll_rate = rospy.get_param("~sensor_poll_rate", 10)
smadsros = SMADSROS(client, poll_rate, platform_prefix)
smadsros.start()
except rospy.ROSInterruptException:
pass
| 2.390625
| 2
|
ht/cogs/misc.py
|
mirnovov/heraldtron
| 0
|
12776421
|
import discord, asyncio, typing, random, os, html
from discord import ui
from discord.ext import commands
from collections import defaultdict
from datetime import datetime, timezone, timedelta
from .. import converters, embeds, services, utils, views
class MiscStuff(utils.MeldedCog, name = "Miscellaneous", category = "Other", limit = True):
ACTIVITIES = {
-1: "",
0: "Playing",
1: "Streaming",
2: "Listening to",
3: "Watching",
4: "Activity:",
5: "Competing in"
}
def __init__(self, bot):
self.bot = bot
@commands.command(help = "Retrieves a random piece of advice.\nUses adviceslip.com", aliases = ("ad",))
@utils.trigger_typing
async def advice(self, ctx):
result = await utils.get_json(self.bot.session,f"https://api.adviceslip.com/advice",content_type="text/html")
embed = embeds.GENERIC.create(result["slip"]["advice"], "", heading = "Random advice")
embed.set_footer(text=f"Retrieved using adviceslip.com")
await ctx.send(embed = embed)
@commands.command(
help = "Generates a continuously updated countdown post.",
aliases = ("time", "cd")
)
async def countdown(self, ctx, *, elapsed : converters.Date):
delta = (elapsed - datetime.now(tz = timezone.utc)) + timedelta(minutes = 1)
embed = embeds.COUNTDOWN.create(f"<t:{elapsed.timestamp():.0f}:R>", "")
embed.add_field(name = "End time", value = f"<t:{elapsed.timestamp():.0f}:F>")
if delta.total_seconds() < 0:
raise utils.CustomCommandError(
"Date has already occured",
"The date that you entered is in the past."
)
desc = (await views.RespondOrReact(ctx).run(
f"Your countdown will expire <t:{elapsed.timestamp():.0f}:R>."
" Give it a name by responding below."
)).content
embed.description = desc
message = await ctx.send(embed = embed)
@commands.command(
help = "Generates a competition distribution.\n If no number is specified, asks for a list of names.",
aliases = ("dt", "dist")
)
async def distribute(self, ctx, size : converters.Range(3, 50) = None):
if not size:
message = await views.RespondOrReact(ctx, timeout = views.LONG_TIMEOUT).run(
"Enter a list of contestants separated by line breaks (\u21E7\u23CE on desktop)",
)
names = dict(enumerate(message.content.split("\n"), start = 1))
size = await converters.Range(3, 50).convert(ctx, len(names))
else: names = None
def distribution(keysize):
vals = list(range(1, keysize))
candidates = {i: None for i in range(1, keysize)}
for c in candidates:
same = c in vals
if len(vals) == 1 and same: #try again, no valid option
candidates = distribution(keysize)
break
elif same: vals.remove(c)
candidates[c] = vals.pop(random.randrange(0, len(vals)))
if same: vals.append(c)
return candidates
dist = distribution(size + 1)
display = lambda e: f"**{e}**: {names[e]}" if names else f"**{e}**"
output = "".join(f"{display(k)} \U0001F86A {display(v)}\n" for k, v in dist.items())
await ctx.send(output)
@commands.command(help = "Conducts a search using Google Images.", aliases = ("img", "gi"))
@utils.trigger_typing
async def imgsearch(self, ctx, *, query):
await services.gis(ctx, "" + query)
@commands.command(
help = "Chooses a random number.\n"
" By default, this is out of 6, but another value can be specified.",
aliases = ("dice", "d")
)
async def roll(self, ctx, ceiling : converters.Range(2, 9999) = 6):
message = await ctx.send(":game_die: | Rolling the dice...")
result = random.randrange(1, ceiling + 1)
await asyncio.sleep(2)
await message.edit(content=f":game_die: | The dice landed on... **{result}**!")
@commands.command(help = "Sends a post as the bot user. Handy for jokes and such.", aliases = ("st",), hidden = True)
@commands.is_owner()
async def sendtext(ctx, channel : typing.Optional[discord.TextChannel] = None, *, message_content):
channel = channel or ctx.channel
await channel.send(message_content)
@commands.command(
help = "Completes a passage of text using machine learning.\n"
" This uses DeepAI's online model to compute the result.",
aliases=("aitext", "tg")
)
@utils.trigger_typing
async def textgen(self, ctx, *, text : str):
url = "https://api.deepai.org/api/text-generator"
data = {"text": text.strip()}
headers = {"api-key": ctx.bot.conf["DEEP_AI"].strip()}
async with ctx.bot.session.post(url, data = data, headers = headers) as source:
if not source.ok:
raise utils.CustomCommandError(
"Invalid HTTP request",
f"Please try again. If problems persist, contact the bot's maintainer."
)
result_json = await source.json()
result = result_json["output"]
newtext = result[result.index(text) + len(text):]
await ctx.send(f":abcd: | **Text generated!**\n\n*{text}*{newtext}")
@commands.group(
invoke_without_command = True,
help = "Asks a trivia question that users can react to.\n"
"Optionally, a numeric category can be specified."
"\nCourtesy of the Open Trivia Database.\n\u0020\n",
aliases = ("q","tr")
)
@utils.trigger_typing
async def trivia(self, ctx, category : typing.Optional[int] = -1):
catstring = "" if category == -1 else f"&category={category}"
json = f"https://opentdb.com/api.php?amount=1{catstring}"
result = await utils.get_json(self.bot.session, json)
if result["response_code"] == 1:
raise utils.CustomCommandError(
"Invalid category code",
f"Consult `{ctx.clean_prefix}trivia categories` to see the available codes."
)
result = result["results"][0]
info = f"**{result['category']}** | {result['difficulty'].capitalize()}\n\n"
embed = embeds.GENERIC.create(html.unescape(result["question"]), info, heading = "Trivia")
correct = random.randrange(0,2 if result["type"] == "boolean" else 4)
answers = result["incorrect_answers"]
answers.insert(correct, result["correct_answer"])
embed.description += f"The correct answer will appear in **one minute.**"
embed.set_footer(text = f"Courtesy of the Open Trivia Database.")
view = ui.View()
users = {}
tuple(view.add_item(views.TriviaButton(answer, users)) for answer in answers)
message = await ctx.send(embed = embed, view = view)
await asyncio.sleep(60)
embed.description = f"{info}The correct answer is: **{html.unescape(answers[correct])}**"
updated = await message.channel.fetch_message(message.id)
if updated is None: return #message deleted
results = defaultdict(list)
for user, answer in users.items():
results[answer].append(user)
stats = "\n".join(f"- {a}: {','.join(u)} (**{len(u)}**)" for a, u in results.items())
if stats: embed.description += f"\n\n**Responses:**\n\u0020{stats}"
await message.edit(embed = embed, view = None)
@trivia.command(help = "Lists all categories.")
async def categories(self, ctx):
result = await utils.get_json(self.bot.session, f"https://opentdb.com/api_category.php")
embed = embeds.GENERIC.create(
"Trivia categories", "To choose a category, specify its numeric ID.", heading = "Trivia"
)
for category in result["trivia_categories"]:
embed.add_field(name = category["name"], value=category["id"], inline=True)
embed.set_footer(text = f"Courtesy of the Open Trivia Database.")
await ctx.send(embed = embed)
@commands.command(help = "Looks up a Discord user.", aliases = ("u",))
@utils.trigger_typing
async def user(self, ctx, *, user : converters.MemberOrUser = None):
user = user or ctx.author
if not isinstance(user, discord.Member) and ctx.guild:
user = ctx.guild.get_member(user.id) or user
embed = embeds.USER_INFO.create(f"{user.name}#{user.discriminator}", f"{user.mention}")
if user.bot:
embed.description += " | **Bot**"
embed.set_thumbnail(url = user.display_avatar.with_size(512).url)
embed.add_field(name = "Created", value = utils.stddate(user.created_at), inline = True)
embed.description += "\n\u200b"
if isinstance(user, discord.Member):
embed.colour = user.colour if user.colour.value != 0 else embeds.DEFAULT
for activity in user.activities:
preface = activity.emoji or "" if hasattr(activity, "emoji") else f"**{self.ACTIVITIES[int(activity.type)]}**"
embed.description += f"\n{preface} {activity.name}"
embed.add_field(name = "Joined", value = utils.stddate(user.joined_at), inline = True)
embed.add_field(name = "Status", value = f"Currently **{user.raw_status}**", inline = True)
if isinstance(ctx.channel, discord.abc.GuildChannel):
roles = (str(role.mention) for role in user.roles[1:])
embed.add_field(name = "Roles", value = ", ".join(("@everyone ", *roles)), inline = False)
await ctx.send(embed = embed)
def setup(bot):
bot.add_cog(MiscStuff(bot))
| 2.578125
| 3
|
src/main.py
|
Pikaurd/ps_mockup_downloader
| 0
|
12776422
|
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.chrome.options import Options
options = Options()
options.headless = True
# options.add_argument('--proxy-server http://127.0.0.1:8001')
options.binary_location = '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome'
driver = webdriver.Chrome(
executable_path='/usr/local/bin/chromedriver', options=options)
# driver.get('https://mrmockup.com/freebies/')
driver.get('https://cn.bing.com/')
content = driver.find_elements_by_css_selector(
'#b_footerItems li.b_footerItems_icp')
anchor = driver.find_element_by_css_selector('#scpl4')
print(f"a.href: {anchor.get_attribute('href')}")
# print(driver.page_source)
| 2.640625
| 3
|
wflow-py/wflow/wflow_topoflex.py
|
edwinkost/wflow
| 0
|
12776423
|
#!/usr/bin/python
"""
Definition of the wflow_sceleton model.
---------------------------------------
This simple model calculates soil temperature using
air temperature as a forcing.
Usage:
wflow_sceleton -C case -R Runid -c inifile
-C: set the name of the case (directory) to run
-R: set the name runId within the current case
-c name of the config file (in the case directory)
$Author: schelle $
$Id: wflow_sceleton.py 898 2014-01-09 14:47:06Z schelle $
$Rev: 898 $
"""
import wflow.reservoir_Si as reservoir_Si
import wflow.reservoir_Sa as reservoir_Sa
import wflow.reservoir_Sw as reservoir_Sw
import wflow.reservoir_Su as reservoir_Su
import wflow.reservoir_Sf as reservoir_Sf
import wflow.reservoir_Ss as reservoir_Ss
import wflow.JarvisCoefficients as JarvisCoefficients
import numpy
import os
import os.path
import shutil, glob
import getopt
import time
import pdb
from wflow.wf_DynamicFramework import *
from wflow.wflow_adapt import *
from copy import deepcopy as copylist
# TODO: see below
"""
Verwijderen IRURFR_L statements?
Documentatie updaten!
Wegschrijven per class, afkortingen van classes gebruiken (zie outputtss_0) Jaap!
Routing functies in apart file onderbrengen, aanroepen, configureerbaar maken welke gebruiken Hessel!
logging toevoegen, ervoor zorgen dat het ook 1 per x aantal stappen weggeschreven kan worden
"""
def usage(*args):
sys.stdout = sys.stderr
for msg in args: print msg
print __doc__
sys.exit(0)
class WflowModel(DynamicModel):
"""
The user defined model class. This is your work!
"""
def __init__(self, cloneMap, Dir, RunDir, configfile):
"""
*Required*
The init function **must** contain what is shown below. Other functionality
may be added by you if needed.
"""
DynamicModel.__init__(self)
setclone(os.path.join(Dir, 'staticmaps', cloneMap))
self.runId = RunDir
self.caseName = os.path.abspath(Dir)
self.Dir = os.path.abspath(Dir)
self.configfile = configfile
def parameters(self):
"""
List all the parameters (both static and forcing here). Use the wf_updateparameters()
function to update them in the initial section (static) and the dynamic section for
dynamic parameters and forcing date.
Possible parameter types are:
+ staticmap: Read at startup from map
+ statictbl: Read at startup from tbl, fallback to map (need Landuse, Soil and TopoId (subcatch) maps!
+ timeseries: read map for each timestep
+ monthlyclim: read a map corresponding to the current month (12 maps in total)
+ dailyclim: read a map corresponding to the current day of the year
+ hourlyclim: read a map corresponding to the current hour of the day (24 in total)
:return: List of modelparameters
"""
modelparameters = []
# Static model parameters
modelparameters.append(
self.ParamType(name="Altitude", stack="staticmaps/wflow_dem.map", type="staticmap", default=0.0,
verbose=False, lookupmaps=[]))
return modelparameters
def stateVariables(self):
"""
*Required*
Returns a list of state variables that are essential to the model.
This list is essential for the resume and suspend functions to work.
This function is specific for each model and **must** be present. This is
where you specify the state variables of you model. If your model is stateless
this function must return and empty array (states = [])
In the simple example here the TSoil variable is a state
for the model.
:var TSoil: Temperature of the soil [oC]
"""
states = ['Si', 'Su', 'Sf', 'Ss', 'Sw', 'Sa', 'Sfa', 'Qstate']
return states
def supplyCurrentTime(self):
"""
*Optional*
Supplies the current time in seconds after the start of the run
This function is optional. If it is not set the framework assumes
the model runs with daily timesteps.
Output:
- time in seconds since the start of the model run
"""
return self.currentTimeStep() * int(configget(self.config, 'model', 'timestepsecs', '86400'))
def readtblDefault2(self,pathtotbl,landuse,subcatch,soil, default):
"""
First check if a prepared maps of the same name is present
in the staticmaps directory. next try to
read a tbl file to match a landuse, catchment and soil map. Returns
the default value if the tbl file is not found.
Input:
- pathtotbl: full path to table file
- landuse: landuse map
- subcatch: subcatchment map
- soil: soil map
- default: default value
Output:
- map constructed from tbl file or map with default value
"""
mapname = os.path.dirname(pathtotbl) + "/../staticmaps/" + os.path.splitext(os.path.basename(pathtotbl))[0]+".map"
if os.path.exists(mapname):
self.logger.info("reading map parameter file: " + mapname)
rest = cover(readmap(mapname),default)
else:
if os.path.isfile(pathtotbl):
rest=cover(lookupscalar(pathtotbl,landuse,subcatch,soil), default)
self.logger.info("Creating map from table: " + pathtotbl)
else:
self.logger.warn("tbl file not found (" + pathtotbl + ") returning default value: " + str(default))
rest = scalar(default)
return rest
def suspend(self):
"""
*Required*
Suspends the model to disk. All variables needed to restart the model
are saved to disk as pcraster maps. Use resume() to re-read them
This function is required.
"""
self.logger.info("Saving initial conditions...")
if self.fewsrun:
self.logger.info("Saving initial conditions for FEWS...")
# self.wf_suspend(os.path.join(self.Dir, "outstate"))
[report(self.Si[i], self.Dir + "/outstate/Si" + self.NamesClasses[i] + ".map") for i in self.Classes if self.selectSi[i]]
[report(self.Su[i], self.Dir + "/outstate/Su" + self.NamesClasses[i] + ".map") for i in self.Classes if self.selectSu[i]]
[report(self.Sa[i], self.Dir + "/outstate/Sa" + self.NamesClasses[i] + ".map") for i in self.Classes if self.selectSa[i]]
[report(self.Sf[i], self.Dir + "/outstate/Sf" + self.NamesClasses[i] + ".map") for i in self.Classes if self.selectSf[i]]
[report(self.Sfa[i], self.Dir + "/outstate/Sfa" + self.NamesClasses[i] + ".map") for i in self.Classes if self.selectSfa[i]]
[report(self.Sw[i], self.Dir + "/outstate/Sw" + self.NamesClasses[i] + ".map") for i in self.Classes if self.selectSw[i]]
report(self.Ss, self.Dir + "/outstate/Ss.map")
report(self.Qstate, self.Dir + "/outstate/Qstate.map")
#: It is advised to use the wf_suspend() function
#: here which will suspend the variables that are given by stateVariables
#: function.
[report(self.Si[i], self.SaveDir + "/outstate/Si" + self.NamesClasses[i] + ".map") for i in self.Classes if self.selectSi[i]]
[report(self.Su[i], self.SaveDir + "/outstate/Su" + self.NamesClasses[i] + ".map") for i in self.Classes if self.selectSu[i]]
[report(self.Sa[i], self.SaveDir + "/outstate/Sa" + self.NamesClasses[i] + ".map") for i in self.Classes if self.selectSa[i]]
[report(self.Sf[i], self.SaveDir + "/outstate/Sf" + self.NamesClasses[i] + ".map") for i in self.Classes if self.selectSf[i]]
[report(self.Sfa[i], self.SaveDir + "/outstate/Sfa" + self.NamesClasses[i] + ".map") for i in self.Classes if self.selectSfa[i]]
[report(self.Sw[i], self.SaveDir + "/outstate/Sw" + self.NamesClasses[i] + ".map") for i in self.Classes if self.selectSw[i]]
report(self.Ss, self.SaveDir + "/outstate/Ss.map")
report(self.Qstate, self.SaveDir + "/outstate/Qstate.map")
[report(self.percent[i], self.SaveDir + "/outmaps/percent" + self.NamesClasses[i] + ".map") for i in
self.Classes]
report(self.percentArea, self.SaveDir + "/outmaps/percentArea.map")
report(self.surfaceArea, self.SaveDir + "/outmaps/surfaceArea.map")
report(self.sumprecip, self.SaveDir + "/outsum/sumprecip.map")
report(self.sumevap, self.SaveDir + "/outsum/sumevap.map")
report(self.sumpotevap, self.SaveDir + "/outsum/sumpotevap.map")
report(self.sumtemp, self.SaveDir + "/outsum/sumtemp.map")
report(self.sumrunoff, self.SaveDir + "/outsum/sumrunoff.map")
report(self.sumwb, self.SaveDir + "/outsum/sumwb.map")
def initial(self):
"""
*Required*
Initial part of the model, executed only once. It reads all static model
information (parameters) and sets-up the variables used in modelling.
This function is required. The contents is free. However, in order to
easily connect to other models it is advised to adhere to the directory
structure used in the other models.
"""
#: pcraster option to calculate with units or cells. Not really an issue
#: in this model but always good to keep in mind.
setglobaloption("unittrue")
self.thestep = scalar(0)
#: files to be used in case of timesries (scalar) input to the model
# files for forcing data
self.precipTss = os.path.join(self.Dir,
configget(self.config, "model", "Pfile_1", ""))
self.evapTss = os.path.join(self.Dir,
configget(self.config, "model", "Efile_1", ""))
self.tempTss = os.path.join(self.Dir,
configget(self.config, "model", "Tfile_1", ""))
self.precipTss2 = os.path.join(self.Dir,
configget(self.config, "model", "Pfile_2", ""))
self.evapTss2 = os.path.join(self.Dir,
configget(self.config, "model", "Efile_2", ""))
self.tempDMTss = os.path.join(self.Dir,
configget(self.config, "model", "TDMfile_2", ""))
self.radnTss = os.path.join(self.Dir,
configget(self.config, "model", "RNfile_2", ""))
self.radsTss = os.path.join(self.Dir,
configget(self.config, "model", "RSfile_2", ""))
self.sgammaTss = os.path.join(self.Dir,
configget(self.config, "model", "SGfile_2", ""))
self.vpdTss = os.path.join(self.Dir,
configget(self.config, "model", "VPDfile_2", ""))
self.windTss = os.path.join(self.Dir,
configget(self.config, "model", "Wfile_2", ""))
self.daySTss = os.path.join(self.Dir,
configget(self.config, "model", "DSfile_2", ""))
self.dayETss = os.path.join(self.Dir,
configget(self.config, "model", "DEfile_2", ""))
self.logger.info(
"running for " + str(self.nrTimeSteps()) + " timesteps") # keeping track of number of timesteps
self.fewsrun = int(configget(self.config,"model","fewsrun","0"))
# Set and get defaults from ConfigFile here ###################################
self.timestepsecs = int(configget(self.config,
"model", "timestepsecs", "3600")) # number of seconds in a timestep
self.scalarInput = int(configget(self.config,
"model", "ScalarInput", "1")) # forcing data in maps (0) or timeseries (1)
self.InputSeries = int(configget(self.config,
"model", "InputSeries", "1")) # forcing data in maps (0) or timeseries (1)
self.reinit = int(configget(self.config,
"run", "reinit", "0"))
self.intbl = configget(self.config,
"model","intbl","intbl")
self.maxTransit = float(configget(self.config,
"model", "maxTransitTime", "34")) # maximum Transit time in cacthment
self.distForcing = int(configget(self.config,
"model", "DistForcing",
"10")) # number of different forcing inputs (eg. number of rainfall stations)
self.maxGaugeId = int(configget(self.config,
"model", "maxGaugeId", "10")) # highest index of all used meteo stations
self.IRURFR_L = int(configget(self.config,
"model", "L_IRURFR",
"0")) # combination of reservoirs that are distributed (1: all these reservoirs are distributed)
self.URFR_L = int(configget(self.config,
"model", "L_URFR",
"0")) # combination of reservoirs that are distributed (1: all these reservoirs are distributed)
self.FR_L = int(configget(self.config,
"model", "L_FR",
"0")) # combination of reservoirs that are distributed (1: all these reservoirs are distributed)
self.Ctime = int(configget(self.config,
"model", "spinUp_time",
"7775")) # number of timesteps for which no data needs to be recorded
self.NamesClasses = eval(str(configget(self.config,
"model", "classes", "['W','H','P']"))) # classes used in model
self.Classes = [x for x in range(len(self.NamesClasses))] # numbering of classes
# selection of reservoir conceputalisatie - codes are described in reservoir files
self.selectSw = configget(self.config, "model",
"selectSw", "0, 0, 0").replace(
' ', '').replace('[', '').replace(
']', '').replace(
'None', '').split(',')
self.selectSi = configget(self.config, "model",
"selectSi", "0, 0, 0").replace(
' ', '').replace('[', '').replace(
']', '').replace(
'None', '').split(',')
self.selectSa = configget(self.config, "model",
"selectSa", "0, 0, 0").replace(
' ', '').replace('[', '').replace(
']', '').replace(
'None', '').split(',')
self.selectSu = configget(self.config, "model",
"selectSu", "0, 0, 0").replace(
' ', '').replace('[', '').replace(
']', '').replace(
'None', '').split(',')
self.selectSf = configget(self.config, "model",
"selectSf", "0, 0, 0").replace(
' ', '').replace('[', '').replace(
']', '').replace(
'None', '').split(',')
self.selectSfa = configget(self.config, "model",
"selectSfa", "0, 0, 0").replace(
' ', '').replace('[', '').replace(
']', '').replace(
'None', '').split(',')
self.selectSs = configget(self.config, "model", "selectSs", "groundWaterCombined3")
self.selectRout = configget(self.config, "model", "selectRout", " ")
# static maps to use (normally default)
wflow_subcatch = configget(self.config,
"model", "wflow_subcatch", "staticmaps/wflow_subcatch.map")
wflow_catchArea = configget(self.config,
"model", "wflow_subcatch", "staticmaps/wflow_catchmentAreas.map")
wflow_dem = configget(self.config,
"model", "wflow_dem", "staticmaps/wflow_dem.map")
wflow_ldd = configget(self.config,
"model", "wflow_ldd", "staticmaps/wflow_ldd.map")
wflow_landuse = configget(self.config,
"model","wflow_landuse","staticmaps/wflow_landuse.map")
wflow_soil = configget(self.config,
"model","wflow_soil","staticmaps/wflow_soil.map")
wflow_gauges = configget(self.config,
"model", "wflow_gauges", "staticmaps/wflow_gauges.map")
wflow_mgauges = configget(self.config,
"model", "wflow_mgauges", "staticmaps/wflow_mgauges.map")
wflow_surfaceArea = configget(self.config,
"model", "wflow_surfaceArea", "staticmaps/wflow_surfaceArea.map")
wflow_transit = configget(self.config,
"model", "wflow_transit", "staticmaps/wflow_transit.map")
wflow_velocity = configget(self.config,
"model", "wflow_velocity", "staticmaps/wflow_velocity.map")
wflow_percent = [configget(self.config,
"model", "wflow_percent_" + str(self.Classes[i]),
"staticmaps/wflow_percent" + str(self.Classes[i]) + ".map") for i in self.Classes]
self.rst_laiTss = [configget(self.config,
"model", "rst_lai_" + str(self.Classes[i]),
"staticmaps/rst_lai_" + str(self.Classes[i]) + ".map") for i in self.Classes]
# 2: Input base maps ########################################################
subcatch = ordinal(
readmap(os.path.join(self.Dir, wflow_subcatch))) # Determines the area of calculations (all cells > 0)
subcatch = ifthen(subcatch > 0, subcatch)
self.Altitude = readmap(os.path.join(self.Dir, wflow_dem)) * scalar(
defined(subcatch)) #: The digital elevation map (DEM)
self.TopoLdd = readmap(os.path.join(self.Dir, wflow_ldd)) #: The local drinage definition map (ldd)
self.TopoId = readmap(
os.path.join(self.Dir, wflow_subcatch)) #: Map define the area over which the calculations are done (mask)
self.catchArea = scalar(ifthen(self.TopoId > 0, scalar(1)))
self.LandUse=ordinal(self.wf_readmap(os.path.join(self.Dir , wflow_landuse),0.0,fail=True))#: Map with lan-use/cover classes
self.LandUse=cover(self.LandUse,ordinal(ordinal(subcatch) > 0))
self.Soil=ordinal(self.wf_readmap(os.path.join(self.Dir , wflow_soil),0.0,fail=True))#: Map with soil classes
self.Soil=cover(self.Soil,ordinal(ordinal(subcatch) > 0))
self.TopoId = ifthen(scalar(self.TopoId) > 0, self.TopoId)
self.surfaceArea = scalar(readmap(os.path.join(self.Dir, wflow_surfaceArea))) #: Map with surface area per cell
self.totalArea = areatotal(self.surfaceArea, nominal(self.TopoId))
self.percentArea = self.surfaceArea / self.totalArea
self.Transit = scalar(readmap(os.path.join(self.Dir, wflow_transit))) #: Map with surface area per cell
self.velocity = scalar(readmap(os.path.join(self.Dir, wflow_velocity))) #: Map with surface area per cell
self.gaugesR = nominal(readmap(os.path.join(self.Dir, wflow_gauges)))
self.percent = []
for i in self.Classes:
self.percent.append(readmap(os.path.join(self.Dir, wflow_percent[i])))
# MODEL PARAMETERS - VALUES PER CLASS
self.D = eval(str(configget(self.config, "model", "D", "[0]")))
self.Tf = eval(str(configget(self.config, "model", "Tf", "[0]")))
self.Tfa = eval(str(configget(self.config, "model", "Tfa", "[0]")))
# MODEL PARAMETERS - BASED ON TABLES
self.imax = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/imax" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,1.5) for i in self.Classes]
self.sumax = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/sumax" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,70) for i in self.Classes]
self.samax = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/samax" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,50) for i in self.Classes]
self.samin = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/samin" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,0.1) for i in self.Classes]
self.beta = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/beta" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,0.4) for i in self.Classes]
self.betaA = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/betaA" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,0.2) for i in self.Classes]
self.Kf = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/Kf" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,0.005) for i in self.Classes]
self.Kfa = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/Kfa" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,0.05) for i in self.Classes]
self.perc = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/perc" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,0.0035) for i in self.Classes]
self.cap = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/cap" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,0.028) for i in self.Classes]
self.LP = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/LP" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,0.15) for i in self.Classes]
self.Ks = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/Ks" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,0.0004) for i in self.Classes]
self.Fmax = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/Fmax" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,1) for i in self.Classes]
self.Fmin = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/Fmin" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,0) for i in self.Classes]
self.decF = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/decF" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,0.5) for i in self.Classes]
self.dayDeg = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/dayDeg" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,1) for i in self.Classes]
self.FrDur0 = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/FrDur0" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,-5) for i in self.Classes]
self.FrDur1 = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/FrDur1" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,0) for i in self.Classes]
self.ratFT = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/ratFT" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,1) for i in self.Classes]
self.Tt = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/Tt" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,1) for i in self.Classes]
self.Tm = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/Tm" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,2) for i in self.Classes]
self.Fm = [self.readtblDefault2(self.Dir + "/" + self.intbl + "/Fm" + self.NamesClasses[i] + ".tbl",self.LandUse,subcatch,self.Soil,0.2) for i in self.Classes]
# Jarvis stressfunctions
self.lamda = eval(str(configget(self.config, "model", "lamda", "[0]")))
self.lamdaS = eval(str(configget(self.config, "model", "lamdaS", "[0]")))
# initialise list for routing
self.trackQ = [0 * scalar(self.catchArea)] * int(self.maxTransit)
# initialise list for lag function
self.convQu = [[0 * scalar(self.catchArea)] * self.Tf[i] for i in self.Classes]
self.convQa = [[0 * scalar(self.catchArea)] * self.Tfa[i] for i in self.Classes]
if self.scalarInput:
self.gaugesMap = nominal(readmap(os.path.join(self.Dir,
wflow_mgauges))) #: Map with locations of rainfall/evap/temp gauge(s). Only needed if the input to the model is not in maps
self.OutputId = readmap(os.path.join(self.Dir, wflow_subcatch)) # location of subcatchment
self.OutputIdRunoff = boolean(ifthenelse(self.gaugesR == 1, 1 * scalar(self.TopoId),
0 * scalar(self.TopoId))) # location of subcatchment
self.ZeroMap = 0.0 * scalar(subcatch) # map with only zero's
self.wf_multparameters()
# For in memory override:
self.P = self.ZeroMap
self.PET = self.ZeroMap
self.TEMP = self.ZeroMap
self.logger.info("Linking parameters to landuse, catchment and soil...")
# Initializing of variables
self.logger.info("Initializing of model variables..")
self.TopoLdd = lddmask(self.TopoLdd, boolean(self.TopoId))
catchmentcells = maptotal(scalar(self.TopoId))
self.sumprecip = self.ZeroMap # accumulated rainfall for water balance
self.sumevap = self.ZeroMap # accumulated evaporation for water balance
self.sumrunoff = self.ZeroMap # accumulated runoff for water balance (weigthted for upstream area)
self.sumpotevap = self.ZeroMap # accumulated runoff for water balance
self.sumtemp = self.ZeroMap # accumulated runoff for water balance
self.Q = self.ZeroMap
self.sumwb = self.ZeroMap
# Define timeseries outputs There seems to be a bug and the .tss files are
# saved in the current dir...
self.SaveDir = os.path.join(self.Dir, self.runId)
self.logger.info("Starting Dynamic run...")
def resume(self):
"""
*Required*
This function is required. Read initial state maps (they are output of a
previous call to suspend()). The implementation shown here is the most basic
setup needed.
"""
if self.reinit == 1:
# self.logger.info("Setting initial conditions to default (zero!)")
self.logger.info("Setting initial conditions to preset values in main script!!")
self.Si = [self.ZeroMap] * len(self.Classes)
self.Sw = [self.ZeroMap] * len(self.Classes)
self.Su = [self.ZeroMap] * len(self.Classes)
self.Sa = [self.ZeroMap] * len(self.Classes)
self.Sf = [self.ZeroMap] * len(self.Classes)
self.Sfa = [self.ZeroMap] * len(self.Classes)
self.Ss = self.ZeroMap # for combined gw reservoir
self.Qstate = self.catchArea * 0 # for combined gw reservoir
self.Qstate_t = self.catchArea * 0
# set initial storage values
# pdb.set_trace()
self.Sa = [0.05 * self.samax[i] * scalar(self.catchArea) for i in self.Classes]
self.Su = [self.sumax[i] * scalar(self.catchArea) for i in self.Classes]
self.Ss = self.Ss + 30 * scalar(self.catchArea) # for combined gw reservoir
else:
# self.wf_resume(self.Dir + "/instate/")
self.Si = []
for i in self.Classes:
if self.selectSi[i]:
self.Si.append(readmap(os.path.join(self.Dir, 'instate', 'Si' + self.NamesClasses[i] + '.map')))
else:
self.Si.append(self.ZeroMap)
self.Sw = []
for i in self.Classes:
if self.selectSw[i]:
self.Sw.append(readmap(os.path.join(self.Dir, 'instate', 'Sw' + self.NamesClasses[i] + '.map')))
else:
self.Sw.append(self.ZeroMap)
self.Sa = []
for i in self.Classes:
if self.selectSa[i]:
self.Sa.append(readmap(os.path.join(self.Dir, 'instate', 'Sa' + self.NamesClasses[i] + '.map')))
else:
self.Sa.append(self.ZeroMap)
self.Su = []
for i in self.Classes:
if self.selectSu[i]:
self.Su.append(readmap(os.path.join(self.Dir, 'instate', 'Su' + self.NamesClasses[i] + '.map')))
else:
self.Su.append(self.ZeroMap)
self.Sf = []
for i in self.Classes:
if self.selectSf[i]:
self.Sf.append(readmap(os.path.join(self.Dir, 'instate', 'Sf' + self.NamesClasses[i] + '.map')))
else:
self.Sf.append(self.ZeroMap)
self.Sfa = []
for i in self.Classes:
if self.selectSfa[i]:
self.Sfa.append(readmap(os.path.join(self.Dir, 'instate', 'Sfa' + self.NamesClasses[i] + '.map')))
else:
self.Sfa.append(self.ZeroMap)
self.Ss = readmap(os.path.join(self.Dir, 'instate', 'Ss.map'))
self.Qstate = readmap(os.path.join(self.Dir, 'instate', 'Qstate.map'))
self.wbSi_ = [self.ZeroMap] * len(self.Classes)
self.wbSu_ = [self.ZeroMap] * len(self.Classes)
self.wbSa_ = [self.ZeroMap] * len(self.Classes)
self.wbSw_ = [self.ZeroMap] * len(self.Classes)
self.wbSf_ = [self.ZeroMap] * len(self.Classes)
self.wbSfa_ = [self.ZeroMap] * len(self.Classes)
self.wbSfrout = self.ZeroMap
self.wbSs = self.ZeroMap
self.Ei_ = [self.ZeroMap] * len(self.Classes)
self.Pe_ = [self.ZeroMap] * len(self.Classes)
self.Si_ = [self.ZeroMap] * len(self.Classes)
self.Eu_ = [self.ZeroMap] * len(self.Classes)
self.Ea_ = [self.ZeroMap] * len(self.Classes)
self.Ew_ = [self.ZeroMap] * len(self.Classes)
self.Qu_ = [self.ZeroMap] * len(self.Classes)
self.Qw_ = [self.ZeroMap] * len(self.Classes)
self.Qa_ = [self.ZeroMap] * len(self.Classes)
self.Cap_ = [self.ZeroMap] * len(self.Classes)
self.Perc_ = [self.ZeroMap] * len(self.Classes)
self.Fa_ = [self.ZeroMap] * len(self.Classes)
self.Qf_ = [self.ZeroMap] * len(self.Classes)
self.Qfa_ = [self.ZeroMap] * len(self.Classes)
self.Qs_ = self.ZeroMap # for combined gw reservoir
self.Qflag_ = [self.ZeroMap] * len(self.Classes)
self.Qfcub_ = [self.ZeroMap] * len(self.Classes)
self.Ep_ = [self.ZeroMap] * len(self.Classes)
self.EpD_ = [self.ZeroMap] * len(self.Classes)
self.FrDur = [self.ZeroMap] * len(self.Classes)
self.Ft_ = [self.ZeroMap] * len(self.Classes)
self.JC_temp_ = [self.ZeroMap] * len(self.Classes)
self.JC_vpd_ = [self.ZeroMap] * len(self.Classes)
self.JC_rad_ = [self.ZeroMap] * len(self.Classes)
self.JC_sm_ = [self.ZeroMap] * len(self.Classes)
self.JC_k_ = [self.ZeroMap] * len(self.Classes)
def default_summarymaps(self):
"""
*Optional*
Return a default list of variables to report as summary maps in the outsum dir.
The ini file has more options, including average and sum
"""
return ['self.Altitude']
def dynamic(self):
"""
*Required*
This is where all the time dependent functions are executed. Time dependent
output should also be saved here.
:type self: object
"""
# TODO: change rainfall .tss files into grids
self.wf_updateparameters() # read the temperature map for each step (see parameters())
# self.logger.debug("Step: "+str(int(self.thestep + self._d_firstTimeStep))+"/"+str(int(self._d_nrTimeSteps)))
self.thestep = self.thestep + 1
#if self.thestep == 26:
# pdb.set_trace()
self.Si_t = copylist(self.Si)
self.Sw_t = copylist(self.Sw)
self.Su_t = copylist(self.Su)
self.Sa_t = copylist(self.Sa)
self.Sf_t = copylist(self.Sf)
self.Sfa_t = copylist(self.Sfa)
self.Ss_t = self.Ss
self.trackQ_t = copylist(self.trackQ) # copylist(self.trackQ)
self.convQu_t = [copylist(self.convQu[i]) for i in self.Classes] # copylist(self.convQu)
self.convQa_t = [copylist(self.convQa[i]) for i in self.Classes]
if self.IRURFR_L:
self.PotEvaporation = areatotal(self.PotEvaporation * self.percentArea, nominal(self.TopoId))
self.Precipitation = areatotal(self.Precipitation * self.percentArea, nominal(self.TopoId))
self.Temperature = areaaverage(self.Temperature * self.percentArea, nominal(self.TopoId))
self.PrecipTotal = self.Precipitation
if self.selectSw[0] > 0:
self.Precipitation = ifthenelse(self.Temperature >= self.Tt[0], self.PrecipTotal,0)
self.PrecipitationSnow = ifthenelse(self.Temperature < self.Tt[0], self.PrecipTotal,0)
self.EpDay2 = self.EpDay
self.EpDaySnow2 = self.EpDaySnow
#if self.thestep >= 45:
#pdb.set_trace()
for k in self.Classes:
# SNOW =================================================================================================
if self.selectSw[k]:
eval_str = 'reservoir_Sw.{:s}(self, k)'.format(self.selectSw[k])
else:
eval_str = 'reservoir_Sw.snow_no_reservoir(self, k)'
eval(eval_str)
# INTERCEPTION =========================================================================================
if self.selectSi[k]:
eval_str = 'reservoir_Si.{:s}(self, k)'.format(self.selectSi[k])
else:
eval_str = 'reservoir_Si.interception_no_reservoir(self, k)'
eval(eval_str)
# AGRICULTURE ZONE ======================================================================================
if self.selectSa[k]:
eval_str = 'reservoir_Sa.{:s}(self, k)'.format(self.selectSa[k])
else:
eval_str = 'reservoir_Sa.agriZone_no_reservoir(self, k)'
eval(eval_str)
# UNSATURATED ZONE ======================================================================================
if self.selectSu[k]:
eval_str = 'reservoir_Su.{:s}(self, k)'.format(self.selectSu[k])
else:
eval_str = 'reservoir_Su.unsatZone_no_reservoir(self, k)'
eval(eval_str)
# FAST RUNOFF RESERVOIR ===================================================================================
if self.selectSf[k]:
eval_str = 'reservoir_Sf.{:s}(self, k)'.format(self.selectSf[k])
else:
eval_str = 'reservoir_Sf.fastRunoff_no_reservoir(self, k)'
eval(eval_str)
#FAST AGRICULTURE DITCHES RUNOFF RESERVOIR ===================================================================================
if self.selectSfa[k]:
eval_str = 'reservoir_Sf.{:s}(self, k)'.format(self.selectSfa[k])
else:
eval_str = 'reservoir_Sf.fastAgriRunoff_no_reservoir(self, k)'
eval(eval_str)
# TOTAL RUNOFF =============================================================================================
self.Qftotal = sum([x * y for x, y in zip(self.Qf_, self.percent)]) + sum([x*y for x,y in zip(self.Qfa_,self.percent)])
# SLOW RUNOFF RESERVOIR ===========================================================================
if self.selectSs:
eval_str = 'reservoir_Ss.{:s}(self)'.format(self.selectSs)
else:
eval_str = 'reservoir_Ss.groundWater_no_reservoir(self)'
eval(eval_str)
# ROUTING
if self.selectRout:
eval_str = 'reservoir_Sf.{:s}(self)'.format(self.selectRout)
else:
eval_str = 'reservoir_Sf.noRouting(self)'
eval(eval_str)
# WATER BALANCE (per reservoir, per cell) ========================================================================================
self.QtlagWB = (self.Qtlag / self.surfaceArea) * 1000 * self.timestepsecs
self.convQuWB = [sum(self.convQu[i]) for i in self.Classes]
self.convQuWB_t = [sum(self.convQu_t[i]) for i in self.Classes]
self.convQaWB = [sum(self.convQa[i]) for i in self.Classes]
self.convQaWB_t = [sum(self.convQa_t[i]) for i in self.Classes]
self.trackQWB = (sum(self.trackQ) / self.surfaceArea) * 1000
self.trackQWB_t = (sum(self.trackQ_t) / self.surfaceArea) * 1000
self.WB = self.Precipitation - sum(multiply(self.Ei_, self.percent)) - sum(
multiply(self.Eu_, self.percent)) - self.QtlagWB - sum(
multiply(self.Si, self.percent)) + sum(multiply(self.Si_t, self.percent)) - sum(
multiply(self.Su, self.percent)) + sum(multiply(self.Su_t, self.percent)) - sum(
multiply(self.Sf, self.percent)) + sum(multiply(self.Sf_t, self.percent)) - sum(
multiply(self.Ss, self.percent)) + sum(
multiply(self.Ss_t, self.percent)) - self.trackQWB + self.trackQWB_t - sum(
multiply(self.convQuWB, self.percent)) + sum(multiply(self.convQuWB_t, self.percent))
# #fuxes and states in m3/h
self.P = areatotal(self.PrecipTotal / 1000 * self.surfaceArea,nominal(self.TopoId))
self.Ei = areatotal(sum(multiply(self.Ei_,self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.Ea = areatotal(sum(multiply(self.Ea_,self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.Eu = areatotal(sum(multiply(self.Eu_,self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.Ew = areatotal(sum(multiply(self.Ew_,self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.EwiCorr = areatotal(sum(multiply(multiply(self.Ew_, self.lamdaS / self.lamda), self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.Qtot = self.QLagTot * self.timestepsecs
self.SiWB = areatotal(sum(multiply(self.Si,self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.Si_WB = areatotal(sum(multiply(self.Si_t,self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.SuWB = areatotal(sum(multiply(self.Su,self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.Su_WB = areatotal(sum(multiply(self.Su_t,self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.SaWB = areatotal(sum(multiply(self.Sa,self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.Sa_WB = areatotal(sum(multiply(self.Sa_t,self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.SfWB = areatotal(sum(multiply(self.Sf,self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.Sf_WB = areatotal(sum(multiply(self.Sf_t,self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.SfaWB = areatotal(sum(multiply(self.Sfa,self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.Sfa_WB = areatotal(sum(multiply(self.Sfa_t,self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.SwWB = areatotal(sum(multiply(self.Sw,self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.Sw_WB = areatotal(sum(multiply(self.Sw_t,self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.SsWB = areatotal(self.Ss / 1000 * self.surfaceArea,nominal(self.TopoId))
self.Ss_WB = areatotal(self.Ss_t / 1000 * self.surfaceArea,nominal(self.TopoId))
self.convQuWB = areatotal(sum(multiply([sum(self.convQu[i]) for i in self.Classes],self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.convQu_WB = areatotal(sum(multiply([sum(self.convQu_t[i]) for i in self.Classes],self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.convQaWB = areatotal(sum(multiply([sum(self.convQa[i]) for i in self.Classes],self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.convQa_WB = areatotal(sum(multiply([sum(self.convQa_t[i]) for i in self.Classes],self.percent)) / 1000 * self.surfaceArea,nominal(self.TopoId))
self.trackQWB = areatotal(sum(self.trackQ),nominal(self.TopoId))
self.trackQ_WB = areatotal(sum(self.trackQ_t),nominal(self.TopoId))
self.QstateWB = areatotal(sum(self.Qstate) * self.timestepsecs, nominal(self.TopoId))
self.Qstate_WB = areatotal(sum(self.Qstate_t) * self.timestepsecs, nominal(self.TopoId))
# self.QstateWB = areatotal(sum(self.Qstate) * 0.0405, nominal(self.TopoId))
# self.Qstate_WB = areatotal(sum(self.Qstate_t) * 0.0405, nominal(self.TopoId))
# self.QstateWB = areatotal(self.Qstate, nominal(self.TopoId))
# self.Qstate_WB = areatotal(self.Qstate_t, nominal(self.TopoId))
#
#WBtot in m3/s
self.WBtot = (self.P - self.Ei + self.EwiCorr - self.Ew - self.Ea - self.Eu - self.Qtot - self.SiWB + self.Si_WB - self.SuWB + self.Su_WB - self.SaWB + self.Sa_WB - self.SwWB + self.Sw_WB - self.SfWB + self.Sf_WB - self.SfaWB + self.Sfa_WB - self.SsWB + self.Ss_WB - self.convQuWB +self.convQu_WB - self.convQaWB +self.convQa_WB - self.trackQWB + self.trackQ_WB - self.QstateWB + self.Qstate_WB) / self.timestepsecs
# SUMMED FLUXES ======================================================================================
self.sumprecip = self.sumprecip + self.Precipitation # accumulated rainfall for water balance (m/h)
self.sumevap = self.sumevap + sum(multiply(self.Ei_, self.percent)) + sum(
multiply(self.Eu_, self.percent)) + sum(
multiply(self.Ea_, self.percent)) + sum(
multiply(self.Ew_, self.percent)) # accumulated evaporation for water balance (m/h)
try:
self.sumpotevap = self.sumpotevap + self.PotEvaporation # accumulated potential evaporation (m/h)
except:
self.sumpotevap = self.EpHour
self.sumrunoff = self.sumrunoff + self.Qtlag * 1000 * self.timestepsecs / self.surfaceArea # accumulated runoff for water balance (m/h)
self.sumwb = self.sumwb + self.WB
self.sumE = sum(multiply(self.Ei_, self.percent)) + sum(multiply(self.Eu_, self.percent))
# The main function is used to run the program from the command line
def main(argv=None):
"""
*Optional but needed it you want to run the model from the command line*
Perform command line execution of the model. This example uses the getopt
module to parse the command line options.
The user can set the caseName, the runDir, the timestep and the configfile.
"""
global multpars
caseName = "default"
runId = "run_default"
configfile = "wflow_topoflex.ini"
LogFileName="wflow.log"
_lastTimeStep = 10
_firstTimeStep = 1
fewsrun=False
runinfoFile="runinfo.xml"
timestepsecs = 86400
wflow_cloneMap = 'wflow_subcatch.map'
NoOverWrite=1
loglevel = logging.DEBUG
# This allows us to use the model both on the command line and to call
# the model usinge main function from another python script.
if argv is None:
argv = sys.argv[1:]
if len(argv) == 0:
usage()
return
opts, args = getopt.getopt(argv, 'C:S:T:Ic:s:R:F:fl:L:P:p:')
for o, a in opts:
if o == '-F':
runinfoFile = a
fewsrun = True
if o == '-C': caseName = a
if o == '-R': runId = a
if o == '-c': configfile = a
if o == '-s': timestepsecs = int(a)
if o == '-T': _lastTimeStep = int(a)
if o == '-S': _firstTimeStep = int(a)
if o == '-f': NoOverWrite = 0
if o == '-L': LogFileName = a
if o == '-l': exec "loglevel = logging." + a
if (len(opts) <= 1):
usage()
if fewsrun:
ts = getTimeStepsfromRuninfo(runinfoFile,timestepsecs)
starttime = getStartTimefromRuninfo(runinfoFile)
if (ts):
_lastTimeStep = ts# * 86400/timestepsecs
_firstTimeStep = 1
else:
print "Failed to get timesteps from runinfo file: " + runinfoFile
exit(2)
else:
starttime = dt.datetime(1990,01,01)
if _lastTimeStep < _firstTimeStep:
print "The starttimestep (" + str(_firstTimeStep) +") is smaller than the last timestep (" + str(_lastTimeStep) + ")"
usage()
myModel = WflowModel(wflow_cloneMap, caseName,runId,configfile)
dynModelFw = wf_DynamicFramework(myModel, _lastTimeStep,firstTimestep=_firstTimeStep,datetimestart=starttime)
dynModelFw.createRunId(NoOverWrite=NoOverWrite,logfname=LogFileName,level=loglevel,doSetupFramework=False)
print str(dynModelFw.DT)
myModel = WflowModel(wflow_cloneMap, caseName, runId, configfile)
dynModelFw = wf_DynamicFramework(myModel, _lastTimeStep, firstTimestep=_firstTimeStep)
dynModelFw.createRunId(NoOverWrite=False, level=logging.DEBUG)
for o, a in opts:
if o == '-P':
left = a.split('=')[0]
right = a.split('=')[1]
configset(myModel.config,'variable_change_once',left,right,overwrite=True)
if o == '-p':
left = a.split('=')[0]
right = a.split('=')[1]
configset(myModel.config,'variable_change_timestep',left,right,overwrite=True)
if o == '-X': configset(myModel.config, 'model', 'OverWriteInit', '1', overwrite=True)
if o == '-I': configset(myModel.config, 'model', 'reinit', '1', overwrite=True)
if o == '-i': configset(myModel.config, 'model', 'intbl', a, overwrite=True)
if o == '-s': configset(myModel.config, 'model', 'timestepsecs', a, overwrite=True)
if o == '-x': configset(myModel.config, 'model', 'sCatch', a, overwrite=True)
if o == '-c': configset(myModel.config, 'model', 'configfile', a, overwrite=True)
if o == '-M': configset(myModel.config, 'model', 'MassWasting', "0", overwrite=True)
if o == '-Q': configset(myModel.config, 'model', 'ExternalQbase', '1', overwrite=True)
if o == '-U':
configset(myModel.config, 'model', 'updateFile', a, overwrite=True)
configset(myModel.config, 'model', 'updating', "1", overwrite=True)
if o == '-u':
zz = []
exec "zz =" + a
updateCols = zz
if o == '-E': configset(myModel.config, 'model', 'reInfilt', '1', overwrite=True)
if o == '-R': runId = a
if o == '-W': configset(myModel.config, 'model', 'waterdem', '1', overwrite=True)
dynModelFw._runInitial()
dynModelFw._runResume()
dynModelFw._runDynamic(_firstTimeStep, _lastTimeStep)
dynModelFw._runSuspend()
dynModelFw._wf_shutdown()
if __name__ == "__main__":
main()
| 2.390625
| 2
|
ddi_search_engine/Bio/expressions/transfac.py
|
dbmi-pitt/DIKB-Evidence-analytics
| 3
|
12776424
|
"""A Martel format to parse the output from transfac.
Formats:
format Format for a whole file.
"""
import warnings
warnings.warn("Bio.expressions was deprecated, as it does not work with recent versions of mxTextTools. If you want to continue to use this module, please get in contact with the Biopython developers at <EMAIL> to avoid permanent removal of this module from Biopython", DeprecationWarning)
import sys
from Martel import *
from Martel import RecordReader
blank_line = Opt(Spaces()) + AnyEol()
MATRIX_LINE = Str("Search for sites by WeightMatrix library:") + Spaces() + \
UntilEol("matrix_file") + AnyEol()
SEQUENCE_LINE = Str("Sequence file:") + Spaces() + \
UntilEol("sequence_file") + AnyEol()
PROFILE_LINE = Str("Site selection profile:") + Spaces() + \
UntilSep("profile_file", sep=" ") + Spaces() + \
UntilEol("profile_description") + AnyEol()
TITLE_LINE = Str("Inspecting sequence ID") + Spaces() + \
UntilSep("entryname", sep=" ") + Spaces() + \
UntilSep("dataclass", sep=";") + Str(";") + Spaces() + \
UntilSep("molecule", sep=";") + Str(";") + Spaces() + \
UntilSep("division", sep=";") + Str(";") + Spaces() + \
UntilSep("sequencelength", sep=" ") + Spaces() + Str("BP") + \
UntilEol() + AnyEol()
def SS(exp): # expression surrounded by optional spaces.
return Opt(Spaces()) + exp + Opt(Spaces())
DATA_LINE = \
SS(UntilSep("matrix_identifier", sep=" |")) + \
Str("|") + \
SS(UntilSep("position", sep=" ")) + \
SS(Str("(") + Group("strand", Any("+-")) + Str(")")) + \
Str("|") + \
SS(Float("core_match")) + \
Str("|") + \
SS(Float("matrix_match")) + \
Str("|") + \
Opt(Spaces()) + UntilEol("sequence") + AnyEol()
SEQUENCES_LENGTH_LINE = \
Spaces() + Str("Total sequences length=") + Integer("sequences_length") + \
AnyEol()
FOUND_SITES_LINE = \
Spaces() + Str("Total number of found sites=") + Integer("found_sites") + \
AnyEol()
SITE_FREQUENCY_LINE = \
Spaces() + Str("Frequency of sites per nucleotide=") + \
Float("sites_per_nucleotide") + AnyEol()
format = MATRIX_LINE + \
SEQUENCE_LINE + \
PROFILE_LINE + \
blank_line + \
TITLE_LINE + \
blank_line + \
Rep(DATA_LINE) + \
blank_line + \
SEQUENCES_LENGTH_LINE + \
blank_line + \
FOUND_SITES_LINE + \
blank_line + \
SITE_FREQUENCY_LINE
| 2.84375
| 3
|
jab/admin.py
|
pythonanywhere/jab
| 13
|
12776425
|
from django.contrib import admin
from jab.models import Post, SidebarItem
class PostAdmin(admin.ModelAdmin):
list_display = ('publication_date', 'title', 'status',)
ordering = ('-publication_date',)
admin.site.register(Post, PostAdmin)
class SidebarItemAdmin(admin.ModelAdmin):
pass
admin.site.register(SidebarItem, SidebarItemAdmin)
| 1.5
| 2
|
graphbrain/commands/info.py
|
nicolamelluso/graphbrain
| 1
|
12776426
|
<filename>graphbrain/commands/info.py
from graphbrain import *
def run(args):
hg = hgraph(args.hg)
print('atoms: {}'.format(hg.atom_count()))
print('edges: {}'.format(hg.edge_count()))
print('primary atoms: {}'.format(hg.primary_atom_count()))
print('primary edges: {}'.format(hg.primary_edge_count()))
| 2.3125
| 2
|
queue/queue.py
|
marcoswb/data-structure
| 0
|
12776427
|
class Queue:
def __init__(self) :
self.__max = 10
self.__first = 0
self.__last = 0
self.__structure = {}
def is_full(self):
"""
Verifica se a Queue está cheia
"""
result = (self.__last - self.__first) == self.__max
return result
def is_empty(self):
"""
Verifica se a Queue está vazia
"""
result = self.__first == self.__last
return result
def push(self, item):
"""
Insere um item na Queue
"""
if self.is_full():
return 'Não é possível inserir pois a Queue está cheia!'
else:
last_position = self.__last % self.__max
self.__structure[last_position] = item
self.__last += 1
return 'Elemento inserido!'
def pop(self):
"""
Remover um item da Queue
"""
if self.is_empty():
return 'Não é possível excluir pois a Queue está vazia!'
else:
first_position = self.__first % self.__max
print(self.__first, first_position)
first_item = self.__structure[first_position]
del self.__structure[first_position]
self.__first += 1
return first_item
def print(self):
"""
Imprime a Queue
"""
for key in self.__structure.keys():
print(self.__structure[key])
| 4.1875
| 4
|
tests/utils/test_get_laplacian.py
|
BUPT-GAMMA/GammaGL
| 0
|
12776428
|
# !/usr/bin/env python3
# -*- coding:utf-8 -*-
# @Time : 2022/05/00 16:47
# @Author : clear
# @FileName: test_get_laplacian.py
import tensorlayerx as tlx
from gammagl.utils.get_laplacian import get_laplacian
def test_get_laplacian():
edge_index = tlx.convert_to_tensor([[0, 1, 1, 2], [1, 0, 2, 1]], dtype=tlx.int64)
edge_weight = tlx.convert_to_tensor([1, 2, 2, 4], dtype=tlx.float32)
lap = get_laplacian(edge_index, 3, edge_weight)
assert tlx.convert_to_numpy(lap[0]).tolist() == [[0, 1, 1, 2, 0, 1, 2], [1, 0, 2, 1, 0, 1, 2]]
assert tlx.convert_to_numpy(lap[1]).tolist() == [-1, -2, -2, -4, 1, 4, 4]
lap_sym = get_laplacian(edge_index, 3, edge_weight, normalization='sym')
assert tlx.convert_to_numpy(lap_sym[0]).tolist() == tlx.convert_to_numpy(lap[0]).tolist()
assert tlx.convert_to_numpy(lap_sym[1]).tolist() == [-0.5, -1, -0.5, -1, 1, 1, 1]
lap_rw = get_laplacian(edge_index, 3, edge_weight, normalization='rw')
assert tlx.convert_to_numpy(lap_rw[0]).tolist() == tlx.convert_to_numpy(lap[0]).tolist()
assert tlx.convert_to_numpy(lap_rw[1]).tolist() == [-1, -0.5, -0.5, -1, 1, 1, 1]
| 2.421875
| 2
|
gallery/sitemap.py
|
VasiliyBologov/i-bologova.info
| 0
|
12776429
|
<reponame>VasiliyBologov/i-bologova.info
from django.contrib.sitemaps import Sitemap
# from .models import RequestModel
# import ClientPortal.settings as set
class StaticViewSitemap(Sitemap):
changefreq = "monthly"
priority = 0.8
def items(self):
# return RequestModel.objects.all()
return ['/', '/gallery']
def location(self, obj):
# return obj.date
return obj
| 1.75
| 2
|
neochain/_release_info.py
|
dharif23/neochain
| 1
|
12776430
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Source code meta data
__author__ = '<NAME>'
__email__ = '<EMAIL>'
# Version
__version__ = '1.1'
__release__ = '1.1'
| 1.125
| 1
|
tests/test_api.py
|
chrononyan/ok
| 148
|
12776431
|
<filename>tests/test_api.py
import datetime as dt
import dateutil.parser
import json
import random
from server.models import (Client, db, Assignment, Backup, Course, User,
Version, Group, )
from server.utils import encode_id
from tests import OkTestCase
class TestApi(OkTestCase):
def _test_backup(self, submit, delay=10, success=True):
self.setup_course()
email = self.user1.email
self.login(email)
user = User.lookup(email)
course = self.course
assignment = self.assignment
# Offset the due date & lock_dates
assignment.due_date = assignment.due_date + dt.timedelta(hours=delay)
assignment.lock_date = assignment.lock_date + dt.timedelta(days=delay)
okversion = Version(name="ok-client", current_version="v1.5.0",
download_link="http://localhost/ok")
db.session.add(okversion)
db.session.commit()
data = {
'assignment': assignment.name,
'messages': {
'file_contents': {
'hog.py': 'print("Hello world!")'
}
},
'submit': submit,
}
response = self.client.post('/api/v3/backups/?client_version=v1.5.0',
data=json.dumps(data),
headers=[('Content-Type', 'application/json')])
backup = Backup.query.filter(Backup.submitter_id == user.id).first()
assert backup is not None
if success or not submit:
assert response.json['data'] == {
'email': email,
'key': encode_id(backup.id),
'course': {
'id': course.id,
'offering': course.offering,
'display_name': course.display_name,
'active': course.active,
'timezone': 'America/Los_Angeles'
},
'assignment': assignment.name
}
self.assert_200(response)
if not success:
self.assert_403(response)
submit = False
assert response.json['data'] == {
'data': {
'backup': True,
'late': True
}
}
assert backup.assignment == assignment
assert backup.submitter_id == user.id
assert len(backup.messages) == len(data['messages'])
assert backup.submit == submit
def test_backup(self):
self._test_backup(False)
def test_backup_after_deadline(self):
self._test_backup(False, delay=-2)
def test_submit(self):
self._test_backup(True)
def test_submit_after_deadline(self):
self._test_backup(True, delay=-2, success=False)
def test_api(self):
response = self.client.get('/api/v3/')
self.assert_200(response)
assert response.json['data'] == {
'version': 'v3',
'url': '/api/v3/',
'documentation': 'https://okpy.github.io/documentation',
'github': 'https://github.com/okpy/ok'
}
assert response.json['message'] == 'success'
assert response.json['code'] == 200
def test_no_envelope(self):
response = self.client.get('/api/v3/?envelope=false')
self.assert_200(response)
assert 'data' not in response.json
assert 'message' not in response.json
assert 'code' not in response.json
assert response.json['version'] == 'v3'
def test_non_existant_api(self):
response = self.client.get('/api/v3/doesnotexist')
self.assert_404(response)
assert response.json['data'] == {}
assert response.json['code'] == 404
def test_get_backup(self):
self._test_backup(False)
backup = Backup.query.first()
submission_time = (self.assignment.due_date
- dt.timedelta(days=random.randrange(0, 10)))
backup.custom_submission_time = submission_time
response = self.client.get('/api/v3/backups/{}/'.format(backup.hashid))
self.assert_200(response)
course = backup.assignment.course
user_json = {
"email": backup.submitter.email,
"id": encode_id(backup.submitter_id),
}
response_json = response.json['data']
time_threshold = dt.timedelta(seconds=5)
self.assertAlmostEqual(dateutil.parser.parse(response_json['created']),
backup.created,
delta=time_threshold)
self.assertAlmostEqual(dateutil.parser.parse(response_json['submission_time']),
submission_time,
delta=time_threshold)
self.assertAlmostEqual(dateutil.parser.parse(response_json['messages'][0]['created']),
backup.created,
delta=time_threshold)
# Unset timestamps already tested.
del response_json['created']
del response_json['submission_time']
del response_json['messages'][0]['created']
assert response_json == {
"submitter": user_json,
"submit": backup.submit,
"group": [user_json],
"is_late": backup.is_late,
"external_files": [],
"assignment": {
"name": backup.assignment.name,
"course": {
"id": course.id,
"active": course.active,
"display_name": course.display_name,
"offering": course.offering,
"timezone": course.timezone.zone,
},
},
"id": backup.hashid,
"messages": [
{
"kind": "file_contents",
"contents": backup.files(),
},
],
}
def test_bad_hashid(self):
self.setup_course()
response = self.client.get('/api/v3/backups/xyzxyz/')
self.assert_401(response)
assert response.json['data'] == {}
assert response.json['code'] == 401
self.login(self.user1.email)
response = self.client.get('/api/v3/backups/xyzxyz/')
self.assert_404(response)
assert response.json['data'] == {}
assert response.json['code'] == 404
def test_version_api(self):
okversion = Version(name="ok", current_version="v1.5.0",
download_link="http://localhost/ok")
db.session.add(okversion)
ok2version = Version(name="ok2", current_version="v2.5.0",
download_link="http://localhost/ok2")
db.session.add(ok2version)
response = self.client.get('/api/v3/version/')
self.assert_200(response)
assert response.json['data'] == {
'results': [
{
"current_version": "v1.5.0",
"download_link": "http://localhost/ok",
"name": "ok"
},
{
"current_version": "v2.5.0",
"download_link": "http://localhost/ok2",
"name": "ok2"
}
]
}
assert response.json['message'] == 'success'
response = self.client.get('/api/v3/version/ok')
self.assert_200(response)
assert response.json['data'] == {
'results': [
{
"current_version": "v1.5.0",
"download_link": "http://localhost/ok",
"name": "ok"
}
]
}
self.setup_course()
self.login(self.user1.email)
response = self.client.post('/api/v3/version/ok', data={
'current_version': 'v1.5.1',
'download_link': 'http://localhost/versions/v1.5.1/ok',
})
self.assert_403(response)
self.login(self.staff1.email)
response = self.client.post('/api/v3/version/ok', data={
'current_version': 'v1.5.1',
'download_link': 'http://localhost/versions/v1.5.1/ok',
})
# Staff members do not have permission to edit versions
self.assert_403(response)
self.login(self.admin.email)
response = self.client.post('/api/v3/version/ok', data={
'current_version': 'v1.5.1',
'download_link': 'http://example.com/doesnotexist',
})
self.assert_400(response)
response = self.client.post('/api/v3/version/ok', data={
'current_version': 'v1.5.1',
'download_link': 'http://example.com',
})
self.assert_200(response)
response = self.client.get('/api/v3/version/')
assert response.json['data'] == {
'results': [
{
"current_version": "v1.5.1",
"download_link": "http://example.com",
"name": "ok"
},
{
"current_version": "v2.5.0",
"download_link": "http://localhost/ok2",
"name": "ok2"
}
]
}
response = self.client.get('/api/v3/version/ok')
self.assert_200(response)
assert response.json['data'] == {
'results': [
{
"current_version": "v1.5.1",
"download_link": "http://example.com",
"name": "ok"
}
]
}
def test_score_anon(self):
response = self.client.post('/api/v3/score/')
self.assert_401(response)
assert response.json['code'] == 401
def test_score_student(self):
self._test_backup(True)
email = self.user1.email
self.login(email)
user = User.lookup(email)
response = self.client.post('/api/v3/score/')
self.assert_400(response)
assert response.json['code'] == 400
backup = Backup.query.filter(Backup.submitter_id == user.id).first()
data = {'bid': encode_id(backup.id), 'kind': 'Total',
'score': 128.2, 'message': 'wow'}
response = self.client.post('/api/v3/score/', data=data)
self.assert_401(response)
assert response.json['code'] == 401
def test_export_user(self):
self._test_backup(True)
student = User.lookup(self.user1.email)
self.login(self.staff1.email)
backup = Backup.query.filter(Backup.submitter_id == student.id).first()
endpoint = '/api/v3/assignment/{0}/export/{1}'.format(self.assignment.name,
student.email)
response = self.client.get(endpoint)
self.assert_200(response)
backups = response.json['data']['backups']
self.assertEqual(len(backups), 1)
self.assertTrue('submission_time' in backups[0])
self.assertEqual(backups[0]['submission_time'], backups[0]['created'])
self.assertEqual(response.json['data']['count'], 1)
self.assertEqual(response.json['data']['limit'], 150)
self.assertEqual(response.json['data']['offset'], 0)
self.assertEqual(response.json['data']['has_more'], False)
response = self.client.get(endpoint + "?offset=20&limit=2")
self.assert_200(response)
backups = response.json['data']['backups']
self.assertEqual(len(backups), 0)
self.assertEqual(response.json['data']['count'], 1)
self.assertEqual(response.json['data']['limit'], 2)
self.assertEqual(response.json['data']['offset'], 20)
self.assertEqual(response.json['data']['has_more'], False)
def test_export_final(self):
self._test_backup(True)
student = User.lookup(self.user1.email)
backup = Backup.query.filter(Backup.submitter_id == student.id).first()
endpoint = '/api/v3/assignment/{0}/submissions/'.format(self.assignment.name)
response = self.client.get(endpoint)
self.assert_403(response)
self.login(self.staff1.email)
response = self.client.get(endpoint)
self.assert_200(response)
backups = response.json['data']['backups']
self.assertEqual(len(backups), 1)
self.assertEqual(backups[0]['is_late'], False)
self.assertEqual(len(backups[0]['group']), 1)
self.assertEqual(backups[0]['group'][0]['email'], self.user1.email)
self.assertEqual(len(backups[0]['messages']), 1)
self.assertEqual(response.json['data']['count'], 1)
self.assertEqual(response.json['data']['has_more'], False)
self.assertEqual(response.json['data']['offset'], 0)
response = self.client.get(endpoint + '?offset=1')
self.assert_200(response)
backups = response.json['data']['backups']
self.assertEqual(len(backups), 0)
self.assertEqual(response.json['data']['count'], 1)
self.assertEqual(response.json['data']['has_more'], False)
self.assertEqual(response.json['data']['offset'], 1)
def test_assignment_api(self):
self._test_backup(True)
student = User.lookup(self.user1.email)
endpoint = '/api/v3/assignment/{0}'.format(self.assignment.name)
# View a public assignment
response = self.client.get(endpoint)
self.assert_200(response)
# Change assignment to be hidden
self.assignment.visible = False
db.session.commit()
response = self.client.get(endpoint)
self.assert_403(response)
self.assignment.visible = True
db.session.commit()
self.login(self.staff1.email)
response = self.client.get(endpoint)
self.assert_200(response)
self.assertEqual(response.json['data']['name'], self.assignment.name)
# Hidden assignment, but should be visible to staff
self.assignment.visible = False
db.session.commit()
response = self.client.get(endpoint)
self.assert_200(response)
self.login(self.user1.email)
self.assignment.visible = False
db.session.commit()
response = self.client.get(endpoint)
self.assert_403(response)
def test_group_api(self):
self._test_backup(True)
self.logout()
student = User.lookup(self.user1.email)
Group.invite(self.user1, self.user2, self.assignment)
group = Group.lookup(self.user1, self.assignment)
group.accept(self.user2)
base_api = '/api/v3/assignment/{0}/group/{1}'
endpoint = base_api.format(self.assignment.name, self.user1.email)
response = self.client.get(endpoint)
self.assert_401(response)
self.login(self.user1.email)
response = self.client.get(endpoint)
self.assert_200(response)
members = response.json['data']['members']
self.assertEqual(len(members), 2)
assert 'email' in members[0]['user']
# Make sure user2 can access user1's endpoint
self.login(self.user2.email)
response = self.client.get(endpoint)
self.assert_200(response)
members = response.json['data']['members']
self.assertEqual(len(members), 2)
assert 'email' in members[1]['user']
self.login(self.staff1.email)
response = self.client.get(endpoint)
self.assert_200(response)
members = response.json['data']['members']
self.assertEqual(len(members), 2)
assert 'email' in members[0]['user']
# Login as some random user
self.login(self.user3.email)
response = self.client.get(endpoint)
self.assert_403(response)
# Check for existence of email
response = self.client.get(base_api.format(self.assignment.name, '<EMAIL>'))
self.assert_403(response)
self.login(self.admin.email)
response = self.client.get(base_api.format(self.assignment.name, '<EMAIL>'))
self.assert_404(response)
def test_score_staff(self):
self._test_backup(True)
user = User.lookup(self.user1.email)
self.login(self.staff1.email)
response = self.client.post('/api/v3/score/')
self.assert_400(response)
assert response.json['code'] == 400
backup = Backup.query.filter(Backup.submitter_id == user.id).first()
data = {'bid': encode_id(backup.id), 'kind': 'Total',
'score': 128.2, 'message': 'wow'}
response = self.client.post('/api/v3/score/', data=data)
self.assert_200(response)
assert response.json['code'] == 200
self.logout()
self.login(self.admin.email)
data = {'bid': encode_id(backup.id), 'kind': 'Total',
'score': 128.2, 'message': 'wow'}
response = self.client.post('/api/v3/score/', data=data)
self.assert_200(response)
assert response.json['code'] == 200
def test_comment_staff(self):
self._test_backup(True)
user = User.lookup(self.user1.email)
self.login(self.staff1.email)
backup = Backup.query.filter(Backup.submitter_id == user.id).first()
comment_url = "/api/v3/backups/{}/comment/".format(encode_id(backup.id))
response = self.client.post(comment_url)
self.assert_400(response) # Not all fields present
assert response.json['code'] == 400
data = {'line': 2, 'filename': 'fizzbuzz.py',
'message': 'wow'}
response = self.client.post(comment_url, data=data)
self.assert_200(response)
assert response.json['code'] == 200
self.logout()
self.login(self.admin.email)
data = {'line': 2, 'filename': 'fizzbuzz.py',
'message': 'wow'}
response = self.client.post(comment_url, data=data)
self.assert_200(response)
assert response.json['code'] == 200
# Check that another student is not able to comment
self.login(self.user2.email)
data = {'line': 2, 'filename': 'fizzbuzz.py',
'message': 'wow'}
response = self.client.post(comment_url, data=data)
self.assert_403(response)
assert response.json['code'] == 403
def test_get_comments(self):
self._test_backup(True)
user = User.lookup(self.user1.email)
staff = User.lookup(self.staff1.email)
backup = Backup.query.filter(Backup.submitter_id == user.id).first()
comment_url = "/api/v3/backups/{}/comment/".format(encode_id(backup.id))
comment1 = Comment(
backupid = backup,
author_id = staff.id,
filename = 'fizzbuzz.py',
line = 2,
message = 'hello world'
)
comment2 = Comment(
backupid = backup,
author_id = staff.id,
filename = 'fizzbuzz.py',
line = 5,
message = 'wow'
)
db.session.add(comment1)
db.session.add(comment2)
#check to see if student can view comments on own backup's comments
self.login(self.user1.email)
response = self.client.get(comment_url)
self.assert_200(response)
self.assertEqual(len(response['data']['comments']), 2)
self.assertEqual(response['data']['comments'][0].message, 'hello world')
self.assertEqual(response['data']['comments'][1].message, 'wow')
self.logout()
#check to see if staff can access comments
self.login(self.staff1.email)
response = self.client.get(comment_url)
self.assert_200(response)
self.logout()
#check to see another student can't see others' backup's comments
self.login(self.user2.email)
response = self.client.get(comment_url)
self.assert_403(response)
self.logout()
def test_create_assignment(self):
self.setup_course()
self.login(self.staff1.email)
response = self.client.post("/api/v3/assignment/" + self.course.offering + "/newassignment", json={
'display_name': 'API Test Assignment',
'due_date': '2016-11-07T06:59:59',
'lock_date': '2016-11-08T06:59:59',
})
self.assert200(response)
assignment = Assignment.query.filter_by(name=self.course.offering + '/newassignment').one()
self.assertEqual(assignment.display_name, 'API Test Assignment')
self.assertEqual(assignment.due_date.day, 7)
response = self.client.post("/api/v3/assignment/" + self.course.offering + "/newassignment", json={
'display_name': 'API Test Assignment',
'due_date': '2016-11-10T06:59:59',
'lock_date': '2016-11-11T06:59:59',
})
self.assert200(response)
assignment = Assignment.query.filter_by(name=self.course.offering + '/newassignment').one()
self.assertEqual(assignment.due_date.day, 10)
self.login(self.user1.email)
response = self.client.post("/api/v3/assignment/" + self.course.offering + "/newassignment2", json={
'display_name': 'API Test Assignment',
'due_date': '2016-11-07T06:59:59',
'lock_date': '2016-11-08T06:59:59',
})
self.assert403(response)
assignment = Assignment.query.filter_by(name=self.course.offering + '/newassignment2').one_or_none()
self.assertEqual(assignment, None)
def test_user_api(self):
self._test_backup(True)
self.logout()
student = User.lookup(self.user1.email)
def test_both_endpoints(user):
base_api = '/api/v3/user/{0}'
user1_endpoint = base_api.format(user.email)
current_user_endpoint = base_api.format('')
current = self.client.get(current_user_endpoint)
specific = self.client.get(user1_endpoint)
return current, specific
current, specific = test_both_endpoints(student)
self.assert_401(current)
self.assert_401(specific)
# Should be able to view self
self.login(self.user1.email)
current, specific = test_both_endpoints(student)
self.assert_200(current)
self.assert_200(specific)
members = current.json['data']['participations']
self.assertEqual(len(members), 1)
self.assertEqual(current.json['data'], specific.json['data'])
# Staff don't get permission
self.login(self.staff1.email)
current, specific = test_both_endpoints(student)
self.assert_200(current)
self.assert_403(specific)
# Login as some random user
self.login(self.user3.email)
current, specific = test_both_endpoints(student)
self.assert_200(current)
self.assert_403(specific)
# Admins should have acess
self.login(self.admin.email)
current, specific = test_both_endpoints(student)
self.assert_200(current)
self.assert_200(specific)
self.assertEqual(specific.json['data']['email'], student.email)
# Lab Assistants don't have access
self.login(self.lab_assistant1.email)
current, specific = test_both_endpoints(student)
self.assert_200(current)
self.assert_403(specific)
def test_course_enrollment(self):
self._test_backup(True)
student = User.lookup(self.user1.email)
courses = student.enrollments()
course = courses[0]
student_endpoint = '/api/v3/course/cal/cs61a/sp16/enrollment'
self.login(self.staff1.email)
response = self.client.get(student_endpoint)
self.assert_200(response)
student_emails = [s['email'] for s in response.json['data']['student']]
self.assertEqual(self.user1.email in student_emails, True)
self.login(self.user1.email)
response = self.client.get(student_endpoint)
self.assert_403(response)
def test_course_assignments(self):
self._test_backup(True)
student = User.lookup(self.user1.email)
courses = student.enrollments()
course = courses[0]
student_endpoint = '/api/v3/course/cal/cs61a/sp16/assignments'
anon_response = self.client.get(student_endpoint)
self.assert_200(anon_response)
active_assignments = len([a for a in self.course.assignments if a.active])
self.assertEqual(active_assignments, len(anon_response.json['data']['assignments']))
self.login(self.staff1.email)
auth_response = self.client.get(student_endpoint)
self.assert_200(auth_response)
self.assertEqual(anon_response.json['data'], auth_response.json['data'])
def test_client(self):
self.setup_course()
self.login(self.staff1.email)
db.session.add(Client(
name='Test Client',
description='',
user=self.staff1,
client_id='test_client',
client_secret='secret',
redirect_uris=[],
default_scopes=['all'],
is_confidential=False
))
response = self.client.get('/api/v3/client/test_client')
self.assertEqual(
response.json['data'],
{'allowed_redirects': [], 'client_id': 'test_client', 'client_name': '<NAME>', 'description': '',
'is_confidential': False, 'owner_email': '<EMAIL>'}
)
response = self.client.post('/api/v3/client/test_client/redirect_urls', json={'url': 'test'})
self.assert_200(response)
response = self.client.get('/api/v3/client/test_client')
self.assertEqual(response.json['data']['allowed_redirects'], ['test'])
self.login(self.admin.email)
response = self.client.post('/api/v3/client/test_client/redirect_urls', json={'url': 'test2'})
self.assert_200(response)
response = self.client.get('/api/v3/client/test_client')
self.assertEqual(response.json['data']['allowed_redirects'], ['test', 'test2'])
self.login(self.staff2.email)
response = self.client.post('/api/v3/client/test_client/redirect_urls', json={'url': 'test3'})
self.assert_403(response)
response = self.client.get('/api/v3/client/test_client')
self.assert_403(response)
def test_course_grades(self):
self._test_backup(True)
self.login(self.staff1.email)
endpoint = '/api/v3/course/cal/cs61a/sp16/grades'
response = self.client.get(endpoint)
self.assert_200(response)
self.login(self.staff2.email)
endpoint = '/api/v3/course/cal/cs61a/sp16/grades'
response = self.client.get(endpoint)
self.assert_200(response)
self.login(self.user1.email)
endpoint = '/api/v3/course/cal/cs61a/sp16/grades'
response = self.client.get(endpoint)
self.assert_403(response)
self.login(self.user6.email)
endpoint = '/api/v3/course/cal/cs61a/sp16/grades'
response = self.client.get(endpoint)
self.assert_403(response)
self.login(self.admin.email)
endpoint = '/api/v3/course/cal/cs61a/sp16/grades'
response = self.client.get(endpoint)
self.assert_200(response)
def test_course_roster(self):
self._test_backup(True)
self.login(self.staff1.email)
endpoint = '/api/v3/course/cal/cs61a/sp16/roster'
response = self.client.get(endpoint)
self.assert_200(response)
self.login(self.staff2.email)
endpoint = '/api/v3/course/cal/cs61a/sp16/roster'
response = self.client.get(endpoint)
self.assert_200(response)
self.login(self.user1.email)
endpoint = '/api/v3/course/cal/cs61a/sp16/roster'
response = self.client.get(endpoint)
self.assert_403(response)
self.login(self.user6.email)
endpoint = '/api/v3/course/cal/cs61a/sp16/roster'
response = self.client.get(endpoint)
self.assert_403(response)
self.login(self.admin.email)
endpoint = '/api/v3/course/cal/cs61a/sp16/roster'
response = self.client.get(endpoint)
self.assert_200(response)
| 2.4375
| 2
|
pydm/tests/data_plugins/test_calc_plugin.py
|
jbellister-slac/pydm
| 0
|
12776432
|
from typing import Any
import pytest
from pytestqt.qtbot import QtBot
from qtpy.QtCore import Signal, QObject
import numpy as np
from pydm.application import PyDMApplication
from pydm.data_plugins.calc_plugin import epics_string, epics_unsigned
from pydm.widgets.channel import PyDMChannel
@pytest.mark.parametrize(
"input_string,expected",
[
(np.array((0x6f, 0x6b, 0x61, 0x79, 0, 42), dtype=np.int8), "okay"),
(np.array((0x6f, 0x6b, 0x61, 0x79), dtype=np.int8), "okay"),
(np.array((0, 0x6f, 0x6b, 0x61, 0x79, 0, 42, 42), dtype=np.int8), ""),
],
)
def test_epics_string(input_string: str, expected: str):
assert epics_string(input_string) == expected
@pytest.mark.parametrize(
"input_int,bits,expected",
[
(100, 32, 100),
(-1, 8, 255),
(-2, 4, 0b1110),
],
)
def test_epics_unsigned(input_int: int, bits: int, expected: int):
assert epics_unsigned(input_int, bits) == expected
@pytest.mark.parametrize(
"calc,input1,expected1,input2,expected2",
[
('val + 3', 0, 3, 1, 4),
('int(np.abs(val))', -5, 5, -10, 10),
('math.floor(val)', 3.4, 3, 5.7, 5),
('epics_string(val)',
np.array((0x61, 0), dtype=np.int8), 'a',
np.array((0x62, 0), dtype=np.int8), 'b'),
('epics_unsigned(val, 8)', -1, 255, -2, 254),
]
)
def test_calc_plugin(
qapp: PyDMApplication,
qtbot: QtBot,
calc: str,
input1: Any,
expected1: Any,
input2: Any,
expected2: Any,
):
class SigHolder(QObject):
sig = Signal(type(input1))
sig_holder = SigHolder()
type_str = str(type(input1))
local_addr = f'loc://test_calc_plugin_local_{calc}'
local_ch = PyDMChannel(
address=f'{local_addr}?type={type_str}&init={input1}',
value_signal=sig_holder.sig,
)
local_ch.connect()
calc_values = []
def new_calc_value(val: Any):
calc_values.append(val)
calc_addr = f'calc://test_calc_plugin_calc_{calc}'
calc_ch = PyDMChannel(
address=f'{calc_addr}?val={local_addr}&expr={calc}',
value_slot=new_calc_value,
)
calc_ch.connect()
sig_holder.sig.emit(input1)
def has_value():
assert len(calc_values) >= 1
qtbot.wait_until(has_value)
assert calc_values[0] == expected1
calc_values.clear()
sig_holder.sig.emit(input2)
qtbot.wait_until(has_value)
assert calc_values[0] == expected2
| 2.25
| 2
|
tests/test_storage.py
|
class4kayaker/stray_recipe_manager
| 0
|
12776433
|
import io
import pytest
import stray_recipe_manager.units
import stray_recipe_manager.storage
from stray_recipe_manager.recipe import (
CommentedRecipe,
Recipe,
Ingredient,
RecipeStep,
)
ureg = stray_recipe_manager.units.default_unit_registry
@pytest.fixture(scope="module")
def toml_coding():
registry = stray_recipe_manager.units.UnitHandler(ureg)
toml_load = stray_recipe_manager.storage.TOMLCoding(registry)
return toml_load
@pytest.mark.parametrize(
"recipe",
[
Recipe(
name="Boiling Water",
makes=Ingredient(item="Boiling water", quantity=1.0 * ureg.cup),
tools=["Saucepan"],
ingredients=[Ingredient(item="Water", quantity=1 * ureg.cup,)],
steps=[
RecipeStep(description="Place water on stove until boiling")
],
),
CommentedRecipe(
name="Boiling Water",
comments="Utterly basic",
makes=Ingredient(item="Boiling water", quantity=1.0 * ureg.cup),
tools=["Saucepan"],
ingredients=[Ingredient(item="Water", quantity=1 * ureg.cup,)],
steps=[
RecipeStep(description="Place water on stove until boiling")
],
),
Recipe(
name="Boiling Water",
makes=Ingredient(item="Boiling water", quantity=1.0 * ureg.cup),
tools=["Saucepan"],
ingredients=[Ingredient(item="Water", quantity=1 * ureg.cup,)],
steps=[
RecipeStep(
description="Place water on stove until boiling",
time=10 * ureg.min,
)
],
),
CommentedRecipe(
name="Boiling Water",
makes=Ingredient(item="Boiling water", quantity=1.0 * ureg.cup),
comments="Utterly basic",
tools=["Saucepan"],
ingredients=[Ingredient(item="Water", quantity=1 * ureg.cup,)],
steps=[
RecipeStep(
description="Place water on stove until boiling",
time=10 * ureg.min,
)
],
),
],
)
def test_recipe_round_trip(recipe, toml_coding):
# type: (Recipe, stray_recipe_manager.storage.TOMLCoding) -> None
fstream = io.StringIO()
toml_coding.write_recipe_to_toml_file(fstream, recipe)
fstream.seek(0)
print(fstream.getvalue())
n_recipe = toml_coding.load_recipe_from_toml_file(fstream)
assert recipe == n_recipe
| 2.171875
| 2
|
application/app.py
|
ericdaat/flask-template
| 0
|
12776434
|
<filename>application/app.py
import os
from flask import Flask
from application import errors, cli
from application.model import db, session
def create_app(config=None):
""" Flask app factory that creates and configure the app.
Args:
test_config (str): python configuration filepath
Returns: Flask application
"""
app = Flask(__name__)
app.config.from_pyfile('config.py')
if config:
app.config.update(config)
db.init_app(app)
# instance dir
try:
os.makedirs(app.instance_path)
except OSError:
pass
# register cli commands
app.cli.add_command(cli.init_db_command)
# HTTP errors
app.register_error_handler(404, errors.page_not_found)
# blueprints
from application.blueprints import home
app.register_blueprint(home.bp)
# request handlers
@app.after_request
def commit_db_session(response):
session.commit()
return response
return app
| 2.6875
| 3
|
python/scrutiny/server/device/device_handler.py
|
scrutinydebugger/scrutiny
| 1
|
12776435
|
import copy
import queue
import time
import logging
import binascii
from enum import Enum
from scrutiny.server.protocol.comm_handler import CommHandler
from scrutiny.server.protocol import Protocol, ResponseCode
from scrutiny.server.device.device_searcher import DeviceSearcher
from scrutiny.server.device.request_dispatcher import RequestDispatcher
from scrutiny.server.device.heartbeat_generator import HeartbeatGenerator
from scrutiny.core.firmware_id import PLACEHOLDER as DEFAULT_FIRMWARE_ID
from scrutiny.server.server_tools import Timer
DEFAULT_FIRMWARE_ID_ASCII = binascii.hexlify(DEFAULT_FIRMWARE_ID).decode('ascii')
class DeviceHandler:
DEFAULT_PARAMS = {
'response_timeout' : 1.0, # If a response take more than this delay to be received after a request is sent, drop the response.
'heartbeat_timeout' : 4.0
}
class FsmState(Enum):
INIT = 0
DISCOVERING = 1
CONNECTING = 2
POLLING_INFO = 3
def __init__(self, config, datastore):
self.logger = logging.getLogger(self.__class__.__name__)
self.config = copy.copy(self.DEFAULT_PARAMS)
self.config.update(config)
self.datastore = datastore
self.dispatcher = RequestDispatcher()
self.protocol = Protocol(1,0)
self.device_searcher = DeviceSearcher(self.protocol, self.dispatcher)
self.heartbeat_generator = HeartbeatGenerator(self.protocol, self.dispatcher)
self.comm_handler = CommHandler(self.config)
self.heartbeat_generator.set_interval(max(0.5, self.config['heartbeat_timeout'] * 0.75))
self.comm_broken = False
self.device_id = None
self.reconnect_timer = Timer(1)
self.reset_comm()
def reset_comm(self):
if self.comm_broken and self.device_id is not None:
self.logger.info('Communication with device stopped. Restarting')
self.connected = False
self.fsm_state = self.FsmState.INIT
self.last_fsm_state = self.FsmState.INIT
self.active_request_record = None
self.device_id = None
self.comm_broken = False
self.device_searcher.stop()
self.heartbeat_generator.stop()
self.session_id = None
self.reconnect_timer.stop()
def init_comm(self):
if self.config['link_type'] == 'none':
return
if self.config['link_type'] == 'udp':
from .links.udp_link import UdpLink
link_class = UdpLink
elif self.config['link_type'] == 'dummy':
from .links.dummy_link import DummyLink
link_class = DummyLink
else:
raise ValueError('Unknown link type %s' % self.config['link_type'])
device_link = link_class(self.config['link_config']) #instantiate the class
self.comm_handler.open(device_link)
self.reset_comm()
def stop_comm(self):
if self.comm_handler is not None:
self.comm_handler.close()
self.reset_comm()
def refresh_vars(self):
pass
def process(self):
self.device_searcher.process()
self.heartbeat_generator.process()
self.handle_comm() # Make sure request and response are being exchanged with the device
self.do_state_machine()
def do_state_machine(self):
if self.comm_broken:
self.fsm_state = self.FsmState.INIT
if self.connected:
time.time() - self.heartbeat_generator.last_valid_heartbeat_timestamp() > self.config['heartbeat_timeout']
# === FSM ===
state_entry = True if self.fsm_state != self.last_fsm_state else False
next_state = self.fsm_state
if self.fsm_state == self.FsmState.INIT:
self.reset_comm()
next_state = self.FsmState.DISCOVERING
#============= DISCOVERING =====================
elif self.fsm_state == self.FsmState.DISCOVERING:
if state_entry:
self.device_searcher.start()
found_device_id = self.device_searcher.get_found_device_ascii()
if found_device_id is not None:
if self.device_id is None:
self.logger.info('Found a device - %s' % found_device_id)
self.device_id = found_device_id
if found_device_id == DEFAULT_FIRMWARE_ID_ASCII:
self.logger.warning("Firmware ID of this device is a default placeholder. Firmware might not have been tagged with a valid ID in the build toolchain.")
if self.device_id is not None:
self.device_searcher.stop()
next_state = self.FsmState.CONNECTING
#============= CONNECTING =====================
elif self.fsm_state == self.FsmState.CONNECTING:
if state_entry:
self.comm_handler.reset() # Clear any active transmission. Just for safety
if not self.comm_handler.waiting_response():
if self.reconnect_timer.is_stopped() or self.reconnect_timer.is_timed_out():
self.comm_handler.send_request(self.protocol.comm_connect())
if self.comm_handler.has_timed_out():
self.comm_broken = True
elif self.comm_handler.response_available():
response = self.comm_handler.get_response()
if response.code == ResponseCode.OK:
self.session_id = self.protocol.parse_response(response)['session_id']
self.logger.debug("Session ID set : 0x%08x" % self.session_id)
self.heartbeat_generator.set_session_id(self.session_id)
self.heartbeat_generator.start() # This guy will send recurrent heartbeat request. If that request fails (timeout), comme will be reset
self.connected = True
next_state = self.FsmState.POLLING_INFO
else:
self.reconnect_timer.start()
elif self.fsm_state == self.FsmState.POLLING_INFO:
pass
# ==== FSM END ====
self.last_fsm_state = self.fsm_state
if next_state != self.fsm_state:
self.logger.debug('Moving FSM to state %s' % next_state)
self.fsm_state = next_state
def handle_comm(self):
self.comm_handler.process() # Process reception
if not self.comm_handler.is_open():
return
if self.active_request_record is None: # We haven't send a request
record = self.dispatcher.next()
if record is not None: # A new request to send
self.active_request_record = record
self.comm_handler.send_request(record.request)
else:
if self.comm_handler.has_timed_out(): # The request we have sent has timed out.. no response
self.comm_broken = True
self.comm_handler.clear_timeout()
self.active_request_record.complete(success=False)
elif self.comm_handler.waiting_response(): # We are still wiating for a resonse
if self.comm_handler.response_available(): # We got a response! yay
response = self.comm_handler.get_response()
try:
data = self.protocol.parse_response(response)
self.active_request_record.complete(success=True, response=response, response_data=data) # Valid response if we get here.
except Exception as e: # Malformed response.
self.comm_broken = True
self.logger.error("Invalid response received. %s" % str(e))
self.active_request_record.complete(success=False)
else: # should never happen - paranoid check.
self.comm_broken = True
self.comm_handler.reset()
self.active_request_record.complete(success=False)
if self.active_request_record.is_completed(): # If we have called a callback, then we are done with this request.
self.active_request_record = None
self.comm_handler.process() # Process new transmission now.
| 2.265625
| 2
|
ImgSegment06.py
|
mmtaksuu/OpenCV_Python_Tutorial
| 2
|
12776436
|
import cv2
import numpy as np
import matplotlib.pyplot as plt
def main():
path = "C:\\Users\\enesa\\Documents\\MATLAB\\blobs_objects.jpg"
img = cv2.imread(path, 1)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
filter1 = np.array(([0, -1, 0], [-1, 5, -1], [0, -1, 0]), np.float32) #Sharpening Filter
output = cv2.filter2D(img, -1, filter1) #convolution filter
blur = cv2.GaussianBlur(img,(5,5),0)
gray = cv2.cvtColor(blur, cv2.COLOR_BGR2GRAY)
_, thresh = cv2.threshold(gray,170,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
###########################################################################################################################
# Create a simple filter. The kernel slides through the image (as in 2D convolution).
kernel = np.ones((3, 3), np.uint8)
# Create a Rectangular Structuring Element
se1 = cv2.getStructuringElement(cv2.MORPH_RECT,(5,5))
# Create a Elliptical Structuring Element
se2 = cv2.getStructuringElement(cv2.MORPH_ELLIPSE,(5,5))
# Apply Erosion method over the image with kernel
erosion = cv2.erode(thresh,se1,iterations = 1)
# Apply Dilation method over the image with kernel
dilation = cv2.dilate(thresh,se2,iterations = 1)
# Noise removal using Morphological closing operation
closing = cv2.morphologyEx(dilation, cv2.MORPH_CLOSE, kernel, iterations = 4)
# Noise removal using Morphological opening operation
opening = cv2.morphologyEx(erosion, cv2.MORPH_OPEN, kernel, iterations = 1)
###########################################################################################################################
dilation = 255 - dilation # Complementing Operation
_, contours, _ = cv2.findContours(dilation, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
print("{} Objects have detected!".format(len(contours)))
original = cv2.imread(path, 1)
original = cv2.cvtColor(original, cv2.COLOR_BGR2RGB)
sayac = 0
for i in contours:
# perimeter = cv2.arcLength(i,True)
# if perimeter > 20:
sayac = sayac +1
#cv2.drawContours(img, contours, -1, (0, 0, 255), 2)
x,y,w,h = cv2.boundingRect(i)
cv2.rectangle(img,(x,y),(x+w,y+h),(0,255,0),2)
cv2.putText(img, str(sayac), (x+10, y+15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 1)
#plt.plot(cx, cy, color='red', marker='o', linestyle='dashed', linewidth=2, markersize=12) # belilenen noktaya x isareti koy.
#cv2.putText(img, 'x', (cx, cy), cv2.FONT_HERSHEY_SIMPLEX, 1.0, (0, 0, 255), 2)
#cv2.putText(closing, str(sayac), (cx, cy), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (0, 0, 255), 1)
print("{} Objects have drown!".format(sayac))
###########################################################################################################################
# output = [original, img]
# titles = ['Original', 'Contours']
# for i in range(2):
# plt.subplot(1, 2, i+1)
# plt.imshow(output[i])
# plt.title(titles[i])
# plt.xticks([])
# plt.yticks([])
cv2.imshow('Orignal Image', img)
#cv2.imshow('Erosion Image', erosion)
cv2.imshow('Dilation Image', dilation)
cv2.imshow('Closing Image', closing)
cv2.imshow('Opening Image', opening)
plt.show()
cv2.waitKey(0)
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
| 3.484375
| 3
|
dolfyn/_version.py
|
MRE-Code-Hub/dolfyn
| 28
|
12776437
|
__version__ = '0.12.1'
__prog_name__ = 'DOLfYN'
__version_date__ = 'May-07-2020'
def ver2tuple(ver):
if isinstance(ver, tuple):
return ver
# ### Previously used FLOATS for 'save-format' versioning.
# Version 1.0: underscore ('_') handled inconsistently.
# Version 1.1: '_' and '#' handled consistently in group naming:
# '#' is for groups that should be excluded, unless listed explicitly.
# '##' and ending with '##' is for specially handled groups.
# Version 1.2: now using time_array.
# '_' is for essential groups.
# Version 1.3: Now load/unload is fully symmetric (needed for __eq__ tests)
# Added _config_type to i/o.
if isinstance(ver, (float, int)):
return (0, int(ver), int(round(10 * (ver % 1))))
# ### Now switched to use pkg_version STRING.
# Switch to pkg_version STRING (pkg_version 0.6)
# Now 'old versions' become '0.x.y'
# ver becomes a tuple.
out = []
for val in ver.split('.'):
try:
val = int(val)
except ValueError:
pass
out.append(val)
return tuple(out)
version_info = ver2tuple(__version__)
| 2.5625
| 3
|
app_APPium_test/test_wework.py
|
XuXuClassMate/My_Test_PyProject
| 0
|
12776438
|
from time import sleep
from appium import webdriver
from appium.webdriver.common.mobileby import MobileBy
phone_info = {
"platformName": "android",
"platformVersion": "8.1",
"deviceName": "S4F6R19C18016391",
"appPackage": "com.tencent.wework",
"appActivity": ".launch.LaunchSplashActivity t9",
"noReset": "true",
# "dontStopAppOnReset": "true",
"skipDeviceInitialization": "true",
"resetKeyBoard": "true",
"waitFoeIdleTimeout": 0
}
class Test_wework:
def setup(self):
self.driver = webdriver.Remote("http://localhost:4723/wd/hub", phone_info)
self.driver.implicitly_wait(10)
def teardown(self):
sleep(5)
self.driver.quit()
def test_wework_Clockin(self):
self.driver.find_element(MobileBy.XPATH,
"//*[@resource-id='com.tencent.wework:id/en5' and @text='工作台']").click()
self.driver.find_element_by_android_uiautomator('new UiScrollable(new UiSelector().scrollable(true).instance('
'0)).scrollIntoView(new UiSelector().text("打卡").instance('
'0));').click()
self.driver.find_element(MobileBy.XPATH, '//*[@text="外出打卡"]').click()
self.driver.find_element(MobileBy.XPATH, '//*[contains(@text, "次外出")]').click()
ele = self.driver.find_element(MobileBy.ID, 'com.tencent.wework:id/pu').text
print(ele)
assert ele == "外出打卡成功"
def test_wework_jointeam(self):
add_name = "袁不婷"
add_num = "1008611"
add_another_name = "沙雕"
add_iphone_num = "13160018191"
self.driver.find_element(MobileBy.XPATH, '//*[@text="通讯录"]').click()
self.driver.find_element(MobileBy.ANDROID_UIAUTOMATOR, 'new UiScrollable(new UiSelector().scrollable('
'true).instance(0)).scrollIntoView(new UiSelector('
').text("添加成员").instance(0));').click()
self.driver.find_element(MobileBy.XPATH, '//*[@text="手动输入添加"]').click()
# self.driver.find_element(MobileBy.XPATH, '//*[@text="完整输入"]').click()
name = self.driver.find_element(MobileBy.XPATH, '//*[contains(@text,"姓名")]/../android.widget.EditText')
name.send_keys(add_name)
num = self.driver.find_element(MobileBy.XPATH, '//*[contains(@text,"帐号")]/../android.widget.EditText')
num.send_keys(add_num)
another_name = self.driver.find_element(MobileBy.XPATH, '//*[contains(@text,"别名")]/../android.widget.EditText')
another_name.send_keys(add_another_name)
iphone_num = self.driver.find_element(MobileBy.XPATH, '//*[contains(@text,"手机号")]')
iphone_num.send_keys(add_iphone_num)
self.driver.find_element(MobileBy.ANDROID_UIAUTOMATOR, 'new UiScrollable(new UiSelector().scrollable('
'true).instance(0)).scrollIntoView(new UiSelector('
').text("保存").instance(0));').click()
ele = self.driver.find_element(MobileBy.XPATH, '//*[@class="android.widget.Toast"]').text
assert "添加成功" == ele
self.driver.get_screenshot_as_file('路径.png')
self.driver.start_recording_screen()
self.driver.stop_recording_screen()
| 2.6875
| 3
|
disassembler.py
|
dasien/mfc6502
| 0
|
12776439
|
<filename>disassembler.py
from mfcbase import MFCBase
class Disassembler(MFCBase):
def __init__(self, infile, outfile, startaddr, includecounter, counterinfile):
# This variable handles the writing of the start position of file.
self.__programstartset = False
# Superclass init.
super(Disassembler, self).__init__(infile, outfile, startaddr, includecounter, counterinfile)
# Load the hex values.
self.loadhexcodes()
def disassemble(self):
# Parse the input file.
self.parse()
# Parse the commands into hex codes.
self.parsecommands()
def parsecommands(self):
# Loop through file.
for sourceline in super(Disassembler, self).sourcelines:
# Split into parts based on spaces.
lineparts = sourceline.split()
# Check to see if we have a start address handled.
if not self.__programstartset:
# Set the program listing start address (if possible).
self.setprogramstartaddress(lineparts)
# Write file header.
self.writeheader()
# Check to see if we should skip the program counter.
if self.counterinfile:
# Get the opcode and operand, skipping the hex address.
opcode, operand = self.getopcodeandoperand(lineparts, 1)
else:
# Get the opcode and operand.
opcode, operand = self.getopcodeandoperand(lineparts, 0)
# Get command formatter based on operand.
command = self.opcodes[int(opcode, 16)]
# Call formatting and output functions.
command[1](command[0], operand)
def getopcodeandoperand(self, line, opcodepos):
operand = None
# Get the next byte - this is the opcode.
opcode = line[opcodepos]
# Based on the number of tokens we have, grab operand.
if len(line) == (opcodepos + 2):
# There is a 1 byte operand.
operand = int(line[opcodepos + 1], 16)
elif len(line) == (opcodepos + 3):
# There is a 2 byte operand. This is little endian so we need the last part first.
operand = int(line[opcodepos + 2] + line[opcodepos + 1], 16)
# Return the opcode and operand.
return opcode, operand
def setprogramstartaddress(self, line):
# This procedure should only ever run once.
self.__programstartset = True
try:
# Check to see if line starts with a hex address
if self.counterinfile:
# Try to convert
intval = int(line[0], 16)
if intval < 1 or intval > 65535:
raise ValueError
else:
# Set the counter.
self.pc = intval
except ValueError:
print('Invalid address or format for program counter.')
def signextend(self, r):
return r if r < 0x80 else r - 0x100
def writeheader(self):
self.writeline(";;;;;;;;;;;;;;;;;;;;;;;;;")
self.writeline("; %s" % self.outfile.name)
self.writeline(";")
self.writeline("; Disassembled by mfc6502")
self.writeline(";;;;;;;;;;;;;;;;;;;;;;;;;")
self.writeline("")
# Output the program start.
output = "*=$" + "{:04X}".format(self.pc)
self.writeline(output)
self.writeline("")
def writelinedata(self, size, value):
str_out = []
# Check to see if we should output the instruction address.
if self.includecounter:
str_out.append("{0:04X} ".format(self.pc))
# Append opcode
str_out.append(value)
# Write the line to the file.
self.writeline("".join(str_out))
self.pc += size
def formatasempty(self, opcode, operand=None):
self.writelinedata(1, opcode)
def formatasimmediate(self, opcode, operand):
self.writelinedata(2, "{0} #${1:02X}".format(opcode, operand))
def formataszeropage(self, opcode, operand):
self.writelinedata(2, "{0} ${1:02X}".format(opcode, operand))
def formataszeropagex(self, opcode, operand):
self.writelinedata(2, "{0} ${1:02X},X".format(opcode, operand))
def formataszeropagey(self, opcode, operand):
self.writelinedata(2, "{0} ${1:02X},Y".format(opcode, operand))
def formatasabsolute(self, opcode, operand):
self.writelinedata(3, "{0} ${1:04X}".format(opcode, operand))
def formatasabsolutex(self, opcode, operand):
self.writelinedata(3, "{0} ${1:04X},X".format(opcode, operand))
def formatasabsolutey(self, opcode, operand):
self.writelinedata(3, "{0} ${1:04X},Y".format(opcode, operand))
def formatasindirectx(self, opcode, operand):
self.writelinedata(2, "{0} (${1:02X},X)".format(opcode, operand))
def formatasindirecty(self, opcode, operand):
self.writelinedata(2, "{0} (${1:02X}),Y".format(opcode, operand))
def formatasbranch(self, opcode, operand):
self.writelinedata(2, "{0} {1:04X}".format(opcode, self.pc + 2 + self.signextend(operand)))
def formatasjump(self, opcode, operand):
self.writelinedata(3, "{0} {1:04X}".format(opcode, operand))
def loadhexcodes(self):
self.opcodes = {
0x00: ("BRK", self.formatasempty),
0x01: ("ORA", self.formatasindirectx),
0x05: ("ORA", self.formataszeropage),
0x06: ("ASL", self.formataszeropage),
0x08: ("PHP", self.formatasempty),
0x09: ("ORA", self.formatasimmediate),
0x0A: ("LDY", self.formatasimmediate),
0x0D: ("ORA", self.formatasabsolute),
0x0E: ("ASL", self.formatasabsolute),
0x10: ("BPL", self.formatasbranch),
0x11: ("ORA", self.formatasindirecty),
0x15: ("ORA", self.formataszeropagex),
0x16: ("ASL", self.formataszeropagex),
0x18: ("CLC", self.formatasempty),
0x19: ("ORA", self.formatasabsolutey),
0x1D: ("ORA", self.formatasabsolutex),
0x1E: ("ASL", self.formatasabsolutex),
0x20: ("JSR", self.formatasjump),
0x21: ("AND", self.formatasindirectx),
0x24: ("BIT", self.formataszeropage),
0x25: ("AND", self.formataszeropage),
0x26: ("ROL", self.formataszeropage),
0x28: ("PLP", self.formatasempty),
0x29: ("AND", self.formatasimmediate),
0x2A: ("ROL", self.formatasempty),
0x2C: ("BIT", self.formatasabsolute),
0x2D: ("AND", self.formatasabsolute),
0x2E: ("ROL", self.formatasabsolute),
0x30: ("BMI", self.formatasbranch),
0x31: ("AND", self.formatasindirecty),
0x35: ("AND", self.formataszeropagex),
0x36: ("ROL", self.formataszeropagex),
0x38: ("SEC", self.formatasempty),
0x39: ("AND", self.formatasabsolutey),
0x3D: ("AND", self.formatasabsolutex),
0x3E: ("ROL", self.formatasabsolutex),
0x40: ("RTI", self.formatasempty),
0x41: ("EOR", self.formatasindirectx),
0x45: ("EOR", self.formataszeropage),
0x46: ("LSR", self.formataszeropage),
0x48: ("PHA", self.formatasempty),
0x49: ("EOR", self.formatasimmediate),
0x4A: ("LSR", self.formatasempty),
0x4C: ("JMP", self.formatasjump),
0x4D: ("EOR", self.formatasabsolute),
0x4E: ("LSR", self.formatasabsolute),
0x50: ("BVC", self.formatasbranch),
0x51: ("EOR", self.formatasindirecty),
0x55: ("EOR", self.formataszeropagex),
0x56: ("LSR", self.formataszeropagex),
0x58: ("CLI", self.formatasempty),
0x59: ("EOR", self.formatasabsolutey),
0x5A: ("PHY", self.formatasempty),
0x5D: ("EOR", self.formatasabsolutex),
0x5E: ("LSR", self.formatasabsolutex),
0x60: ("RTS", self.formatasempty),
0x61: ("ADC", self.formatasindirectx),
0x65: ("ADC", self.formataszeropage),
0x66: ("ROR", self.formataszeropage),
0x68: ("PLA", self.formatasempty),
0x69: ("ADC", self.formatasimmediate),
0x6A: ("ROR", self.formatasempty),
0x6D: ("ADC", self.formatasabsolute),
0x6E: ("ROR", self.formatasabsolute),
0x70: ("BVS", self.formatasbranch),
0x71: ("ADC", self.formatasindirecty),
0x75: ("ADC", self.formataszeropagex),
0x76: ("ROR", self.formataszeropagex),
0x78: ("SEI", self.formatasempty),
0x79: ("ADC", self.formatasabsolutey),
0x7A: ("PLY", self.formatasempty),
0x7D: ("ADC", self.formatasabsolutex),
0x7E: ("ROR", self.formatasabsolutex),
0x81: ("STA", self.formatasindirectx),
0x84: ("STY", self.formataszeropage),
0x85: ("STA", self.formataszeropage),
0x86: ("STX", self.formataszeropage),
0x88: ("DEY", self.formatasempty),
0x8A: ("TXA", self.formatasempty),
0x8C: ("STY", self.formatasabsolute),
0x8D: ("STA", self.formatasabsolute),
0x8E: ("STX", self.formatasabsolute),
0x90: ("BCC", self.formatasbranch),
0x91: ("STA", self.formatasindirecty),
0x94: ("STY", self.formataszeropagex),
0x95: ("STA", self.formataszeropagex),
0x96: ("STX", self.formataszeropagey),
0x98: ("TYA", self.formatasempty),
0x99: ("STA", self.formatasabsolutey),
0x9A: ("TXS", self.formatasempty),
0x9D: ("STA", self.formatasabsolutex),
0xA0: ("LDY", self.formatasimmediate),
0xA1: ("LDA", self.formatasindirectx),
0xA2: ("LDX", self.formatasimmediate),
0xA4: ("LDY", self.formataszeropage),
0xA5: ("LDA", self.formataszeropage),
0xA6: ("LDX", self.formataszeropage),
0xA8: ("TAY", self.formatasempty),
0xA9: ("LDA", self.formatasimmediate),
0xAA: ("TAX", self.formatasempty),
0xAC: ("LDY", self.formatasabsolute),
0xAD: ("LDA", self.formatasabsolute),
0xAE: ("LDX", self.formatasabsolute),
0xB0: ("BCS", self.formatasbranch),
0xB1: ("LDA", self.formatasindirecty),
0xB4: ("LDY", self.formataszeropagex),
0xB5: ("LDA", self.formataszeropagex),
0xB6: ("LDX", self.formataszeropagey),
0xB8: ("CLV", self.formatasempty),
0xB9: ("LDA", self.formatasabsolutey),
0xBA: ("TSX", self.formatasempty),
0xBC: ("LDY", self.formatasabsolutex),
0xBD: ("LDA", self.formatasabsolutex),
0xBE: ("LDX", self.formatasabsolutey),
0xC0: ("CPY", self.formatasimmediate),
0xC1: ("CMP", self.formatasindirectx),
0xC4: ("CPY", self.formataszeropage),
0xC5: ("CMP", self.formataszeropage),
0xC6: ("DEC", self.formataszeropage),
0xC8: ("INY", self.formatasempty),
0xC9: ("CMP", self.formatasimmediate),
0xCA: ("DEX", self.formatasempty),
0xCC: ("CPY", self.formatasabsolute),
0xCD: ("CMP", self.formatasabsolute),
0xCE: ("DEC", self.formatasabsolute),
0xD0: ("BNE", self.formatasbranch),
0xD1: ("CMP", self.formatasindirecty),
0xD5: ("CMP", self.formataszeropagex),
0xD6: ("DEC", self.formataszeropagex),
0xD8: ("CLD", self.formatasempty),
0xD9: ("CMP", self.formatasabsolutey),
0xDA: ("PHX", self.formatasempty),
0xDD: ("CMP", self.formatasabsolutex),
0xDE: ("DEC", self.formatasabsolutex),
0xE0: ("CPX", self.formatasimmediate),
0xE1: ("SBC", self.formatasindirectx),
0xE4: ("CPX", self.formataszeropage),
0xE5: ("SBC", self.formataszeropage),
0xE6: ("INC", self.formataszeropage),
0xE8: ("INX", self.formatasempty),
0xE9: ("SBC", self.formatasimmediate),
0xEA: ("NOP", self.formatasempty),
0xEC: ("CPX", self.formatasabsolute),
0xED: ("SBC", self.formatasabsolute),
0xEE: ("INC", self.formatasabsolute),
0xF0: ("BEQ", self.formatasbranch),
0xF1: ("SBC", self.formatasindirecty),
0xF5: ("SBC", self.formataszeropagex),
0xF6: ("INC", self.formataszeropagex),
0xF8: ("SED", self.formatasempty),
0xF9: ("SBC", self.formatasabsolutey),
0xFA: ("PLX", self.formatasempty),
0xFD: ("SBC", self.formatasabsolutex),
0xFE: ("INC", self.formatasabsolutex),
0xFF: (".SYS", self.formatasimmediate)
}
| 2.9375
| 3
|
cover.py
|
NaikAayush/bpl-hassio-component
| 4
|
12776440
|
import logging
import socket
import threading
import datetime
import time
import math
from . import BPLMonitor, BPLCurtain, DATA_DOMAIN
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the sensor platform."""
# We only want this platform to be set up via discovery.
if discovery_info is None:
return
sensors = hass.data[DATA_DOMAIN]['sensors']
sensors_to_add = []
for s in sensors :
if isinstance (s,BPLCurtain) :
sensors_to_add.append(s)
add_entities(sensors_to_add)
| 2.5
| 2
|
Codefights/arcade/intro/level-11/49.lineEncoding/Python/test.py
|
RevansChen/online-judge
| 7
|
12776441
|
# Python3
from solution1 import lineEncoding as f
qa = [
('aabbbc', '2a3bc'),
('abbcabb', 'a2bca2b'),
('abcd', 'abcd'),
('zzzz', '4z'),
('wwwwwwwawwwwwww', '7wa7w'),
('ccccccccccccccc', '15c'),
('qwertyuioplkjhg', 'qwertyuioplkjhg'),
('ssiiggkooo', '2s2i2gk3o'),
('adfaaa', 'adf3a'),
('bbjaadlkjdl', '2bj2adlkjdl')
]
for *q, a in qa:
for i, e in enumerate(q):
print('input{0}: {1}'.format(i + 1, e))
ans = f(*q)
if ans != a:
print(' [failed]')
print(' output:', ans)
print(' expected:', a)
else:
print(' [ok]')
print(' output:', ans)
print()
| 2.84375
| 3
|
autobazaar/__main__.py
|
csala/AutoBazaar
| 87
|
12776442
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""AutoBazaar Command Line Module."""
import argparse
import gc
import json
import os
import shutil
import socket
import sys
import traceback
import warnings
from datetime import datetime
import cloudpickle
import pandas as pd
from mit_d3m import metrics
from mit_d3m.dataset import D3MDS
from mit_d3m.db import get_db
from mit_d3m.stats import get_stats
from mit_d3m.utils import logging_setup, make_abs
import autobazaar
from autobazaar.search import TUNERS, PipelineSearcher
from autobazaar.utils import encode_score, make_keras_picklable
warnings.filterwarnings(action='ignore')
VERSION = autobazaar.get_version()
def _load_targets(datasets_dir, dataset, problem):
score_phase = 'SCORE'
if problem:
score_phase += '_' + problem
score_dir = os.path.join(datasets_dir, dataset, score_phase)
csv_path = os.path.join(score_dir, 'targets.csv')
if not os.path.exists(csv_path):
csv_path = os.path.join(score_dir, 'dataset_SCORE', 'tables', 'learningData.csv')
return pd.read_csv(csv_path, index_col='d3mIndex')
def _get_metric(problem_path):
problem_schema = os.path.join(problem_path, 'problemDoc.json')
with open(problem_schema, 'r') as f:
problem_doc = json.load(f)
problem_metrics = problem_doc['inputs']['performanceMetrics']
if len(problem_metrics) > 1:
raise Exception("Wrong number of metrics")
return metrics.METRICS_DICT[problem_metrics[0]['metric']]
def _get_dataset_paths(datasets_dir, dataset, phase, problem):
if problem:
full_phase = phase + '_' + problem
else:
full_phase = phase
root_dir = os.path.join(datasets_dir, dataset, full_phase)
dataset_path = os.path.join(root_dir, 'dataset_' + phase)
problem_path = os.path.join(root_dir, 'problem_' + phase)
return dataset_path, problem_path
def _search_pipeline(dataset, problem, template, input_dir, output_dir,
budget, checkpoints, splits, db, tuner_type, test_id):
dataset_path, problem_path = _get_dataset_paths(input_dir, dataset, 'TRAIN', problem)
d3mds = D3MDS(dataset_path, problem_path)
searcher = PipelineSearcher(
output_dir,
cv_splits=splits,
db=db,
tuner_type=tuner_type,
test_id=test_id
)
return searcher.search(d3mds, template, budget=budget, checkpoints=checkpoints)
def _test_pipeline(dataset, problem, pipeline_id, input_dir, output_dir):
dataset_path, problem_path = _get_dataset_paths(input_dir, dataset, 'TEST', problem)
pipeline_path = os.path.join(output_dir, '{}.pkl'.format(pipeline_id))
with open(pipeline_path, 'rb') as pipeline_pkl:
pipeline = cloudpickle.load(pipeline_pkl)
print('Executing best pipeline {}'.format(pipeline))
d3mds = D3MDS(dataset_path, problem_path)
with warnings.catch_warnings():
warnings.simplefilter('ignore')
predictions = pipeline.predict(d3mds)
return predictions
def _score_predictions(dataset, problem, predictions, input_dir):
dataset_path, problem_path = _get_dataset_paths(input_dir, dataset, 'TEST', problem)
metric = _get_metric(problem_path)
predictions = predictions.set_index('d3mIndex')
targets = _load_targets(input_dir, dataset, problem)[predictions.columns]
if len(targets.columns) > 1 or len(predictions.columns) > 1:
raise Exception("I don't know how to handle these")
if any(targets.index != predictions.index):
raise Exception("Different indexes cannot be compared")
targets = targets.iloc[:, 0]
predictions = predictions.iloc[:, 0]
score = encode_score(metric, targets, predictions)
print("Score: {}".format(score))
summary = {'predictions': predictions, 'targets': targets}
print(pd.DataFrame(summary).describe())
return score
def _format_exception(e):
error = '{}'.format(e.__class__.__name__)
str_e = str(e)
if str_e:
error += ' - ' + str_e
return error
def _box_print(message):
length = len(message) + 10
print(length * '#')
print('#### {} ####'.format(message))
print(length * '#')
def _insert_test(args, dataset):
insert_ts = datetime.utcnow()
document = {
'test_id': args.test_id,
'dataset': dataset,
'timeout': args.timeout,
'checkpoints': args.checkpoints,
'budget': args.budget,
'template': args.template,
'status': 'running',
'insert_ts': insert_ts,
'update_ts': insert_ts,
'version': VERSION,
'hostname': socket.gethostname(),
'tuner_type': args.tuner_type,
'splits': args.splits,
}
args.db.tests.insert_one(document)
def _update_test(args, dataset, error, step):
query = {
'test_id': args.test_id,
'dataset': dataset
}
update = {
'$set': {
'status': 'error' if error else 'done',
'error': error,
'step': step,
'update_ts': datetime.utcnow()
}
}
args.db.tests.update_one(query, update)
def _insert_test_result(args, result):
document = result.copy()
document['test_id'] = args.test_id
document['insert_ts'] = datetime.utcnow()
args.db.test_results.insert_one(document)
def _score_dataset(dataset, args):
start_ts = datetime.utcnow()
if args.db:
_insert_test(args, dataset)
result_base = {
'dataset': dataset,
'score': None,
'elapsed': None,
'iterations': None,
'error': None,
'step': None,
'load_time': None,
'trivial_time': None,
'cv_time': None,
'cv_score': None,
'rank': None
}
results = []
step = None
error = None
try:
step = 'SEARCH'
_box_print('Searching {}'.format(dataset))
# cleanup
if not args.keep:
shutil.rmtree(args.output, ignore_errors=True)
search_results = _search_pipeline(
dataset, args.problem, args.template, args.input, args.output, args.budget,
args.checkpoints, args.splits, args.db, args.tuner_type, args.test_id
)
gc.collect()
for search_result in search_results or []:
result = result_base.copy()
result.update(search_result)
results.append(result)
pipeline = result['pipeline']
try:
step = 'TEST'
_box_print('Executing {}'.format(dataset))
predictions = _test_pipeline(dataset, args.problem, pipeline,
args.input, args.output)
step = 'SCORE'
_box_print('Scoring {}'.format(dataset))
score = _score_predictions(dataset, args.problem,
predictions, args.input)
result['score'] = score
gc.collect()
except Exception as e:
error = _format_exception(e)
print("Scoring pipeline {} for dataset {} failed on step {} with error {}"
.format(pipeline, dataset, step, error))
traceback.print_exc()
result['error'] = error
result['step'] = step
if args.db:
_insert_test_result(args, result)
except Exception as e:
error = _format_exception(e)
print("Dataset {} failed on step {} with error {}".format(dataset, step, error))
traceback.print_exc()
result_base['step'] = step
result_base['error'] = error
result_base['elapsed'] = (datetime.utcnow() - start_ts).total_seconds()
results.append(result_base)
if args.db:
_update_test(args, dataset, error, step)
return results
def _prepare_search(args):
make_keras_picklable()
if not args.datasets and not args.all:
print('Please provide at least one dataset name or add the --all option')
sys.exit(1)
args.datasets = _get_datasets(args)
if args.db:
args.db = get_db(
config=args.db_config,
database=args.db_name,
host=args.db_host,
port=args.db_port,
user=args.db_user,
password=<PASSWORD>
)
if args.checkpoints:
args.checkpoints = [int(c) for c in args.checkpoints.split(',')]
elif args.timeout:
args.checkpoints = [args.timeout]
if args.test_id is None:
args.test_id = datetime.utcnow().strftime('%Y%m%d%H%M%S%f')
def _score_datasets(args):
if args.report and os.path.exists(args.report):
report = pd.read_csv(args.report)
else:
report = pd.DataFrame(columns=['dataset'])
for dataset, row in args.datasets.iterrows():
dataset_score = report[report.dataset == dataset]
if dataset_score.empty or dataset_score.score.isnull().values[0]:
if not dataset_score.empty:
# clean-up
report = report[report.dataset != dataset].copy()
scores = _score_dataset(dataset, args)
if scores:
scores = pd.DataFrame(scores)
scores = scores.merge(pd.DataFrame([row]), left_on='dataset', right_index=True)
report = report.append(scores, ignore_index=True, sort=False)
report = report.reindex(REPORT_COLUMNS, axis=1)
if args.report:
report.to_csv(args.report, index=False)
return report
def _search(args):
_prepare_search(args)
print("{} - Processing Datasets: {}".format(args.test_id, args.datasets.index.values))
report = _score_datasets(args)
report = report.reindex(REPORT_COLUMNS, axis=1)
columns = REPORT_COLUMNS[1:]
print(report.set_index('dataset').to_string(columns=columns))
def _get_datasets(args):
if args.all:
datasets = [
d for d in os.listdir(args.input)
if os.path.isdir(os.path.join(args.input, d))
]
else:
datasets = args.datasets
exclude = getattr(args, 'exclude', None) or []
datasets = [dataset for dataset in datasets if dataset not in exclude]
try:
summary = get_stats(datasets, args.input)
except KeyError:
print("No matching datasets found")
sys.exit(1)
summary = summary.set_index('dataset').reindex(datasets)
summary = summary[~summary.data_modality.isnull()]
for field in ['data_modality', 'task_type', 'task_subtype']:
value = getattr(args, field)
if value:
summary = summary[summary[field] == value]
if summary.empty:
print("No matching datasets found")
sys.exit(1)
return summary
REPORT_COLUMNS = [
'dataset',
'pipeline',
'score',
'rank',
'cv_score',
'metric',
'data_modality',
'task_type',
'task_subtype',
'elapsed',
'iterations',
'load_time',
'trivial_time',
'cv_time',
'error',
'step'
]
def _list(args):
args.all = True
datasets = _get_datasets(args)
datasets = datasets.reset_index().sort_values('dataset').set_index('dataset')
columns = [
'data_modality', 'task_type', 'task_subtype', 'metric', 'size_human', 'train_samples'
]
datasets = datasets.reindex(columns, axis=1)
if args.report:
print("Storing datasets as {}".format(args.report))
datasets[columns].to_csv(args.report, index=True)
else:
print(datasets.to_string(columns=columns, index=True))
class ArgumentParser(argparse.ArgumentParser):
def convert_arg_line_to_args(self, arg_line):
return arg_line.split()
def _path_type(string):
try:
return make_abs(string)
except ValueError:
error = "Not a valid path: '{0}'.".format(string)
raise argparse.ArgumentTypeError(error)
def _get_parser():
# Logging
logging_args = ArgumentParser(add_help=False)
logging_args.add_argument('-v', '--verbose', action='count', default=0)
logging_args.add_argument('-l', '--logfile')
# Report
report_args = ArgumentParser(add_help=False)
report_args.add_argument('-r', '--report', type=_path_type,
help='Store results in the given CVS file.')
# Dataset Selection
dataset_args = ArgumentParser(add_help=False)
dataset_args.add_argument('-i', '--input', default='input', type=_path_type,
help='Input datasets folder. Defaults to `input`.')
dataset_args.add_argument('-o', '--output', type=_path_type,
help='Output pipelines folder. Defaults to `output`.',
default='output')
dataset_args.add_argument('-p', '--problem', default='',
help='Problem suffix. Only needed if the dataset has more than one.')
dataset_args.add_argument('-M', '--data-modality', type=str,
help='Only process datasets of the given Data Modality.')
dataset_args.add_argument('-T', '--task-type', type=str,
help='Only process datasets of the given Task type')
dataset_args.add_argument('-S', '--task-subtype', type=str,
help='Only process datasets of the given Task Subtype')
# Search Configuration
search_args = ArgumentParser(add_help=False)
search_args.add_argument('-b', '--budget', type=int,
help='If given, maximum number tuning iterations to perform.')
search_args.add_argument('-s', '--splits', type=int, default=5,
help='Number of Cross Validation Folds. Defaults to 5')
search_args.add_argument('-c', '--checkpoints',
help=('Comma separated list of time checkpoints in seconds where '
'the best pipeline so far will be dumped and stored.'))
search_args.add_argument('-t', '--timeout', type=int,
help='Timeout in seconds. Ignored if checkpoints are given.')
search_args.add_argument('-u', '--tuner-type', default='gp', choices=TUNERS.keys(),
help='Type of tuner to use. Defaults to "gp"')
search_args.add_argument('--template',
help='Template to use. If not given, use the most appropriate one.')
search_args.add_argument('-e', '--exclude', nargs='+',
help='Exclude these datasets. Useful in combination with --all.')
search_args.add_argument('-a', '--all', action='store_true',
help='Process all the datasets found in the input folder.')
search_args.add_argument('-k', '--keep', action='store_true',
help='Keep previous results in the output folder.')
search_args.add_argument('--test-id', help='test_id associated with this run.')
search_args.add_argument('datasets', nargs='*',
help='Datasets to process. Ignored if --all use used.')
# Backend configuration
db_args = ArgumentParser(add_help=False)
db_args.add_argument('--db', action='store_true',
help='Use a MongoDB backend to store the results.')
db_args.add_argument('--db-config', help='MongoDB configuraiton JSON file.')
db_args.add_argument('--db-host', default='localhost')
db_args.add_argument('--db-port', default=27017, type=int)
db_args.add_argument('--db-name', default='autobazaar')
db_args.add_argument('--db-user')
db_args.add_argument('--db-password')
parser = ArgumentParser(
prog='autobazaar',
description='AutoBazaar Experiments Suite',
fromfile_prefix_chars='@',
parents=[logging_args]
)
parser.add_argument('--version', action='version',
version='%(prog)s {version}'.format(version=VERSION))
subparsers = parser.add_subparsers(title='command', help='Command to execute')
parser.set_defaults(command=None)
list_ = subparsers.add_parser('list', parents=[logging_args, dataset_args, report_args],
help='List the available datasets that match the conditions.')
list_.set_defaults(command=_list)
search_parents = [
logging_args,
dataset_args,
search_args,
report_args,
db_args
]
search_ = subparsers.add_parser('search', parents=search_parents,
help='Search the best pipeline for the given datasets.')
search_.set_defaults(command=_search)
return parser
def main():
parser = _get_parser()
args = parser.parse_args()
if not args.command:
parser.print_help()
parser.exit()
logging_setup(args.verbose, args.logfile)
gc.enable()
args.command(args)
if __name__ == '__main__':
main()
| 2.046875
| 2
|
bag.py
|
dineshrajpurohit/ds_al
| 0
|
12776443
|
from linked_list import Linked_List, Node
import random
class Bag(Linked_List):
def __init__(self):
Linked_List.__init__(self)
def add(self, item):
"""Stores in a random location in the Linked list"""
if self.count == 0:
random_location = 0
else:
random_location = random.randint(0, self.count - 1)
self.insert(Node(item), random_location)
def is_empty(self):
return self.count == 0
def size(self):
return self.count
def test_bag():
print "\n\nBag implementation"
bag = Bag()
print "Is Bag empty? ", bag.is_empty()
bag.add("A")
bag.add("B")
bag.add("C")
bag.add("X")
print "Is Bag empty? ", bag.is_empty()
print bag.size()
print bag
if __name__ == '__main__':
test_bag()
| 3.796875
| 4
|
library/Custom/python_2_template.py
|
sarafanshul/KACTL
| 4
|
12776444
|
<gh_stars>1-10
from __future__ import division, print_function
import bisect
import math
import heapq
import itertools
import sys
from collections import deque
from atexit import register
from collections import Counter
from functools import reduce
sys.setrecursionlimit(10000000)
if sys.version_info[0] < 3:
from io import BytesIO as stream
else:
from io import StringIO as stream
if sys.version_info[0] < 3:
class dict(dict):
"""dict() -> new empty dictionary"""
def items(self):
"""D.items() -> a set-like object providing a view on D's items"""
return dict.iteritems(self)
def keys(self):
"""D.keys() -> a set-like object providing a view on D's keys"""
return dict.iterkeys(self)
def values(self):
"""D.values() -> an object providing a view on D's values"""
return dict.itervalues(self)
input = raw_input
range = xrange
filter = itertools.ifilter
map = itertools.imap
zip = itertools.izip
def sync_with_stdio(sync=True):
"""Set whether the standard Python streams are allowed to buffer their I/O.
Args:
sync (bool, optional): The new synchronization setting.
"""
global input, flush
if sync:
flush = sys.stdout.flush
else:
sys.stdin = stream(sys.stdin.read())
input = lambda: sys.stdin.readline().rstrip('\r\n')
sys.stdout = stream()
register(lambda: sys.__stdout__.write(sys.stdout.getvalue()))
# use same as python3 no change all have been considered
| 2.3125
| 2
|
arrays/tests/test_remove_element.py
|
ahcode0919/python-ds-algorithms
| 0
|
12776445
|
from arrays.remove_element import remove_element
def test_remove_element():
arr = [3, 2, 2, 3]
length = remove_element(arr, 3)
assert length == 2
assert arr == [2, 2, 2, 3]
arr = [1]
length = remove_element(arr, 1)
assert length == 0
assert arr == [1]
arr = [2, 2, 3, 3]
length = remove_element(arr, 3)
assert length == 2
assert arr == [2, 2, 3, 3]
| 3.671875
| 4
|
tests/components/sleepiq/conftest.py
|
MrDelik/core
| 3
|
12776446
|
"""Common methods for SleepIQ."""
from __future__ import annotations
from collections.abc import Generator
from unittest.mock import MagicMock, create_autospec, patch
from asyncsleepiq import (
SleepIQActuator,
SleepIQBed,
SleepIQFoundation,
SleepIQLight,
SleepIQPreset,
SleepIQSleeper,
)
import pytest
from homeassistant.components.sleepiq import DOMAIN
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry
BED_ID = "123456"
BED_NAME = "Test Bed"
BED_NAME_LOWER = BED_NAME.lower().replace(" ", "_")
SLEEPER_L_ID = "98765"
SLEEPER_R_ID = "43219"
SLEEPER_L_NAME = "SleeperL"
SLEEPER_R_NAME = "Sleeper R"
SLEEPER_L_NAME_LOWER = SLEEPER_L_NAME.lower().replace(" ", "_")
SLEEPER_R_NAME_LOWER = SLEEPER_R_NAME.lower().replace(" ", "_")
PRESET_L_STATE = "Watch TV"
PRESET_R_STATE = "Flat"
SLEEPIQ_CONFIG = {
CONF_USERNAME: "<EMAIL>",
CONF_PASSWORD: "password",
}
@pytest.fixture
def mock_bed() -> MagicMock:
"""Mock a SleepIQBed object with sleepers and lights."""
bed = create_autospec(SleepIQBed)
bed.name = BED_NAME
bed.id = BED_ID
bed.mac_addr = "12:34:56:78:AB:CD"
bed.model = "C10"
bed.paused = False
sleeper_l = create_autospec(SleepIQSleeper)
sleeper_r = create_autospec(SleepIQSleeper)
bed.sleepers = [sleeper_l, sleeper_r]
sleeper_l.side = "L"
sleeper_l.name = SLEEPER_L_NAME
sleeper_l.in_bed = True
sleeper_l.sleep_number = 40
sleeper_l.pressure = 1000
sleeper_l.sleeper_id = SLEEPER_L_ID
sleeper_r.side = "R"
sleeper_r.name = SLEEPER_R_NAME
sleeper_r.in_bed = False
sleeper_r.sleep_number = 80
sleeper_r.pressure = 1400
sleeper_r.sleeper_id = SLEEPER_R_ID
bed.foundation = create_autospec(SleepIQFoundation)
light_1 = create_autospec(SleepIQLight)
light_1.outlet_id = 1
light_1.is_on = False
light_2 = create_autospec(SleepIQLight)
light_2.outlet_id = 2
light_2.is_on = False
bed.foundation.lights = [light_1, light_2]
return bed
@pytest.fixture
def mock_asyncsleepiq_single_foundation(
mock_bed: MagicMock,
) -> Generator[MagicMock, None, None]:
"""Mock an AsyncSleepIQ object with a single foundation."""
with patch("homeassistant.components.sleepiq.AsyncSleepIQ", autospec=True) as mock:
client = mock.return_value
client.beds = {BED_ID: mock_bed}
actuator_h = create_autospec(SleepIQActuator)
actuator_f = create_autospec(SleepIQActuator)
mock_bed.foundation.actuators = [actuator_h, actuator_f]
actuator_h.side = "R"
actuator_h.side_full = "Right"
actuator_h.actuator = "H"
actuator_h.actuator_full = "Head"
actuator_h.position = 60
actuator_f.side = None
actuator_f.actuator = "F"
actuator_f.actuator_full = "Foot"
actuator_f.position = 10
preset = create_autospec(SleepIQPreset)
mock_bed.foundation.presets = [preset]
preset.preset = PRESET_R_STATE
preset.side = None
preset.side_full = None
yield client
@pytest.fixture
def mock_asyncsleepiq(mock_bed: MagicMock) -> Generator[MagicMock, None, None]:
"""Mock an AsyncSleepIQ object with a split foundation."""
with patch("homeassistant.components.sleepiq.AsyncSleepIQ", autospec=True) as mock:
client = mock.return_value
client.beds = {BED_ID: mock_bed}
actuator_h_r = create_autospec(SleepIQActuator)
actuator_h_l = create_autospec(SleepIQActuator)
actuator_f = create_autospec(SleepIQActuator)
mock_bed.foundation.actuators = [actuator_h_r, actuator_h_l, actuator_f]
actuator_h_r.side = "R"
actuator_h_r.side_full = "Right"
actuator_h_r.actuator = "H"
actuator_h_r.actuator_full = "Head"
actuator_h_r.position = 60
actuator_h_l.side = "L"
actuator_h_l.side_full = "Left"
actuator_h_l.actuator = "H"
actuator_h_l.actuator_full = "Head"
actuator_h_l.position = 50
actuator_f.side = None
actuator_f.actuator = "F"
actuator_f.actuator_full = "Foot"
actuator_f.position = 10
preset_l = create_autospec(SleepIQPreset)
preset_r = create_autospec(SleepIQPreset)
mock_bed.foundation.presets = [preset_l, preset_r]
preset_l.preset = PRESET_L_STATE
preset_l.side = "L"
preset_l.side_full = "Left"
preset_r.preset = PRESET_R_STATE
preset_r.side = "R"
preset_r.side_full = "Right"
yield client
async def setup_platform(
hass: HomeAssistant, platform: str | None = None
) -> MockConfigEntry:
"""Set up the SleepIQ platform."""
mock_entry = MockConfigEntry(
domain=DOMAIN,
data=SLEEPIQ_CONFIG,
unique_id=SLEEPIQ_CONFIG[CONF_USERNAME].lower(),
)
mock_entry.add_to_hass(hass)
if platform:
with patch("homeassistant.components.sleepiq.PLATFORMS", [platform]):
assert await async_setup_component(hass, DOMAIN, {})
await hass.async_block_till_done()
return mock_entry
| 2.171875
| 2
|
qcloudsdkdfw/DescribeInstancesOfSecurityGroupRequest.py
|
f3n9/qcloudcli
| 0
|
12776447
|
<filename>qcloudsdkdfw/DescribeInstancesOfSecurityGroupRequest.py
# -*- coding: utf-8 -*-
from qcloudsdkcore.request import Request
class DescribeInstancesOfSecurityGroupRequest(Request):
def __init__(self):
super(DescribeInstancesOfSecurityGroupRequest, self).__init__(
'dfw', 'qcloudcliV1', 'DescribeInstancesOfSecurityGroup', 'dfw.api.qcloud.com')
def get_alias(self):
return self.get_params().get('alias')
def set_alias(self, alias):
self.add_param('alias', alias)
def get_limit(self):
return self.get_params().get('limit')
def set_limit(self, limit):
self.add_param('limit', limit)
def get_offset(self):
return self.get_params().get('offset')
def set_offset(self, offset):
self.add_param('offset', offset)
def get_sgId(self):
return self.get_params().get('sgId')
def set_sgId(self, sgId):
self.add_param('sgId', sgId)
def get_vagueIp(self):
return self.get_params().get('vagueIp')
def set_vagueIp(self, vagueIp):
self.add_param('vagueIp', vagueIp)
| 2.15625
| 2
|
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/SystemIPC_2/cmp_GemsFDTD/power.py
|
TugberkArkose/MLScheduler
| 0
|
12776448
|
<gh_stars>0
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0615522,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.251035,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.364373,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.189927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.328886,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.188625,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.707438,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.131872,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.75365,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0688379,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00688502,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0715316,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0509189,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.14037,
'Execution Unit/Register Files/Runtime Dynamic': 0.057804,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.189713,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.537473,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 1.95913,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000131425,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000131425,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000113736,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 4.36269e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000731455,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00110804,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00128637,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0489497,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.11362,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.119108,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.166255,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.48556,
'Instruction Fetch Unit/Runtime Dynamic': 0.336707,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.141075,
'L2/Runtime Dynamic': 0.0366513,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.30829,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.04115,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0670072,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0670072,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.626,
'Load Store Unit/Runtime Dynamic': 1.43862,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.165228,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.330457,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0586401,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0607512,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.193593,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0195491,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.453469,
'Memory Management Unit/Runtime Dynamic': 0.0803003,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 20.0214,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.240161,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0126018,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0960641,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.348827,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 4.20023,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0283133,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.224927,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.147891,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.056089,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.0904695,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.045666,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.192225,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0414757,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.1673,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0279397,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00235263,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0278161,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0173991,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0557559,
'Execution Unit/Register Files/Runtime Dynamic': 0.0197517,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0656807,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.170502,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.01278,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 4.01195e-05,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 4.01195e-05,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 3.4907e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 1.34928e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00024994,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000365086,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.000385987,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0167262,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.06393,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0389778,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0568097,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.33408,
'Instruction Fetch Unit/Runtime Dynamic': 0.113265,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0431929,
'L2/Runtime Dynamic': 0.0133415,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.94103,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.356665,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.022773,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0227729,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.04856,
'Load Store Unit/Runtime Dynamic': 0.491746,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0561544,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.112308,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0199294,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0205755,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.0661511,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00639738,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.256495,
'Memory Management Unit/Runtime Dynamic': 0.0269728,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 13.4391,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0734969,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00342502,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0274434,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.104365,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.76247,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0215805,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.219639,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.131279,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0526619,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.0849416,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0428757,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.180479,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0401025,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.12402,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0248013,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00220887,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.023453,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.016336,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0482543,
'Execution Unit/Register Files/Runtime Dynamic': 0.0185449,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0548052,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.155662,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 0.979702,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 4.35133e-05,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 4.35133e-05,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 3.77511e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 1.45326e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000234668,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000359445,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.000422522,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0157042,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 0.99892,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0386381,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0533385,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.26591,
'Instruction Fetch Unit/Runtime Dynamic': 0.108463,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0431776,
'L2/Runtime Dynamic': 0.0123726,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.89103,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.331189,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0211555,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0211555,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 1.99093,
'Load Store Unit/Runtime Dynamic': 0.456677,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0521659,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.104332,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0185138,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0191604,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.0621092,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0063403,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.250022,
'Memory Management Unit/Runtime Dynamic': 0.0255007,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 13.2635,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0652409,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00316993,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0258901,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.094301,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.67702,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0209712,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.219161,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.128627,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0509425,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.0821684,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0414758,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.174587,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0385432,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.11593,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0243004,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00213676,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0226775,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0158026,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.046978,
'Execution Unit/Register Files/Runtime Dynamic': 0.0179394,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0530192,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.151148,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 0.968212,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 4.29935e-05,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 4.29935e-05,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 3.73035e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 1.43622e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000227006,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000350296,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.000417352,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0151915,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 0.96631,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0376411,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0515971,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.23172,
'Instruction Fetch Unit/Runtime Dynamic': 0.105197,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0411216,
'L2/Runtime Dynamic': 0.0120555,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.87156,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.321777,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0205255,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0205255,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 1.96848,
'Load Store Unit/Runtime Dynamic': 0.443528,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0506125,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.101225,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0179625,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0185781,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.0600816,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00617672,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.247047,
'Memory Management Unit/Runtime Dynamic': 0.0247549,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 13.1938,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0639232,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00307632,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0250125,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0920121,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.64576,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 8.52743213010311,
'Runtime Dynamic': 8.52743213010311,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.453471,
'Runtime Dynamic': 0.173055,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 60.3713,
'Peak Power': 93.4836,
'Runtime Dynamic': 9.45853,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 59.9179,
'Total Cores/Runtime Dynamic': 9.28548,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.453471,
'Total L3s/Runtime Dynamic': 0.173055,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 1.585938
| 2
|
mail.py
|
robDaglio/dev_mail
| 0
|
12776449
|
#!/usr/bin/env python
import email, smtplib, ssl
from email import encoders
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.base import MIMEBase
class Simple():
def __init__(self, sender, recipient, password, subject, text):
self.sender = sender
self.password = password
self.recipient = recipient
self.subject = subject
self.text = text
def send_mail(self):
message = 'Subject: {}\n\n{}'.format(self.subject, self.text)
server_connect = smtplib.SMTP_SSL('smtp.gmail.com', 465)
server_connect.ehlo()
server_connect.login(self.sender, self.password)
server_connect.sendmail(self.sender, self.recipient, message)
class Html_Message(Simple):
def __init__(self, sender, recipient, password, subject, text, html):
Simple.__init__(self, sender, recipient, password, subject, text)
self.html = html
def send_mail(self):
SMTP_INFO = {
'host': 'smtp.gmail.com',
'port': 587,
'username': self.sender,
'password': <PASSWORD>
}
SENDER_NAME = 'SYSTEM'
RECIPIENT = self.recipient
SUBJECT = self.subject
BODY_PLAIN_TEXT = self.text
BODY_HTML = self.html
message = MIMEMultipart('alternative')
message['From'] = f"{SENDER_NAME} <{SMTP_INFO['username']}>"
message['To'] = RECIPIENT
message['Subject'] = SUBJECT
#Adding the plain text email body
message.attach(MIMEText(BODY_PLAIN_TEXT, 'plain'))
#Adding the HTML email BODY_HTML
message.attach(MIMEText(BODY_HTML, 'html'))
#SMTP server connection
with smtplib.SMTP(SMTP_INFO['host'], SMTP_INFO['port']) as smtp:
#encrypt the connection
smtp.starttls()
#Logging in and sending the email:
smtp.login(SMTP_INFO['username'], SMTP_INFO['password'])
smtp.send_message(message)
class Attachment(Simple):
def __init__(self, sender, recipient, password, subject, text, attachments):
# Attachments are passed as a list
Simple.__init__(self, sender, recipient, password, subject, text)
self.attachments = attachments
def send_mail(self):
SMTP_INFO = {
'host': 'smtp.gmail.com',
'port': 587,
'username': self.sender,
'password': <PASSWORD>
}
SENDER_NAME = 'SYSTEM'
RECIPIENT = self.recipient
SUBJECT = self.subject
BODY_PLAIN_TEXT = self.text
FILES = self.attachments
message = MIMEMultipart('alternative')
message['From'] = f"{SENDER_NAME} <{SMTP_INFO['username']}>"
message['To'] = RECIPIENT
message['Subject'] = SUBJECT
#Adding the plain text email body
message.attach(MIMEText(BODY_PLAIN_TEXT, 'plain'))
# Add the attachment
# Open files in binary mode
for f in FILES:
with open(f, "rb") as attached_file:
# Add file as application/octet-stream
# Email client can usually download this automatically as attachment
part = MIMEBase("application", "octet-stream")
part.set_payload(attached_file.read())
# Encode file in ASCII characters to send by email
encoders.encode_base64(part)
part.add_header(
"Content-Disposition",
f"attachment; filename={f}",
)
# Add attachment to message and convert message to string
message.attach(part)
#SMTP server connection
with smtplib.SMTP(SMTP_INFO['host'], SMTP_INFO['port']) as smtp:
#encrypt the connection
smtp.starttls()
#Logging in and sending the email:
smtp.login(SMTP_INFO['username'], SMTP_INFO['password'])
smtp.send_message(message)
class Fancy(Simple):
def __init__(self, sender, recipient, password, subject, text, attachments, html):
# Attachments are passed as a list
Simple.__init__(self, sender, recipient, password, subject, text)
self.attachments = attachments
self.html = html
def send_mail(self):
SMTP_INFO = {
'host': 'smtp.gmail.com',
'port': 587,
'username': self.sender,
'password': self.password
}
SENDER_NAME = 'SYSTEM'
RECIPIENT = self.recipient
SUBJECT = self.subject
BODY_PLAIN_TEXT = self.text
BODY_HTML = self.html
FILES = self.attachments
message = MIMEMultipart('alternative')
message['From'] = f"{SENDER_NAME} <{SMTP_INFO['username']}>"
message['To'] = RECIPIENT
message['Subject'] = SUBJECT
#Adding the plain text email body
message.attach(MIMEText(BODY_PLAIN_TEXT, 'plain'))
# Add the attachment
# Open files in binary mode
for f in FILES:
with open(f, "rb") as attached_file:
# Add file as application/octet-stream
# Email client can usually download this automatically as attachment
part = MIMEBase("application", "octet-stream")
part.set_payload(attached_file.read())
# Encode file in ASCII characters to send by email
encoders.encode_base64(part)
part.add_header(
"Content-Disposition",
f"attachment; filename={f}",
)
# Add attachment to message and convert message to string
message.attach(part)
#Adding the HTML email BODY_HTML
message.attach(MIMEText(BODY_HTML, 'html'))
#SMTP server connection
with smtplib.SMTP(SMTP_INFO['host'], SMTP_INFO['port']) as smtp:
#encrypt the connection
smtp.starttls()
#Logging in and sending the email:
smtp.login(SMTP_INFO['username'], SMTP_INFO['password'])
smtp.send_message(message)
| 3.21875
| 3
|
rollbar/examples/flask/app.py
|
arthurio/pyrollbar
| 177
|
12776450
|
<filename>rollbar/examples/flask/app.py
# NOTE: pyrollbar requires both `Flask` and `blinker` packages to be installed first
from flask import Flask
from flask import got_request_exception
import rollbar
import rollbar.contrib.flask
app = Flask(__name__)
@app.before_first_request
def init_rollbar():
rollbar.init('ACCESS_TOKEN', environment='development')
# send exceptions from `app` to rollbar, using flask's signal system.
got_request_exception.connect(rollbar.contrib.flask.report_exception, app)
@app.route('/')
def root():
foo()
return '<html><body>Hello World</body></html>'
if __name__ == '__main__':
app.run()
| 2.359375
| 2
|