content stringlengths 5 1.05M |
|---|
import numpy as np
import json
import AllData as ad
def get_speed_data(data_d, speed, ordered_params):
# convert speed to string (key)
speed = str(speed);
assert(speed in data_d);
speed_data = data_d[speed]["out_norm"];
num_params = len(ordered_params);
num_trials = len(speed_data["0"]);
vals = [];
speeds = [];
for p in ordered_params:
vals.append( speed_data[p] );
for sp_trial in speed_data["speed"]:
if (sp_trial != None):
speeds.append(sp_trial[0]);
else:
speeds.append(None);
return np.asarray(vals), np.asarray(speeds);
if __name__ == "__main__":
# data stored in dictionary of format:
# d["<speed>"][file - "OptiResults" or "out_norm"]["<var#>" (or metrics)] = [5 trials] or metric value;
data = dict();
with open("dict.txt", "r") as f:
data = json.load(f);
print ("Read in most recent 'dict.txt' successfully!");
# Parameter meta data
param_meta = [(r'$\tau$', "[s]"),
(r'$\theta_{trunk}$', "[rad]"),
(r'$\theta_{hip}$', "[rad]"),
(r'$k_{HFL_1}$', "[.]"),
(r'$k_{HFL_2}$', "[.]"),
(r'$k_{HAM}$', "[.]"),
(r'$G_{sol}$', "[.]"),
(r'$G_{sol_{ta}}$', "[.]"),
(r'$G_{gas}$', "[.]"),
(r'$G_{vas}$', "[.]"),
(r'$k_\theta$', "[rad]")];
metrics = ["energy", "speed", "period", "length", "one leg stance phase",
"double support", "tot flight"];
# Ordered parameters (keys of data) - regular params's keys are 0-indexed
ord_params = [str(i) for i in range(len(param_meta))];
# names of the variables
names = ['tau', 'theta_trunk', 'theta_hip', 'k_HFL1', 'k_HFL2',
'k_HAM', 'G_sol', 'G_sol_ta', 'G_gas', 'G_vas', 'k_theta'];
# plot factors for unit conversions
plot_fac = [1.0] * len(names);
# labels for plot
labels = param_meta;
# approximation order
approx_order = [1]*len(names);
# parameter bounds
bounds = np.array([
[ 0.010 , 0.15 ] ,# tau
[ 0.0 , 0.25 ] ,#theta_trunk
[ 0.005 , 0.9 ] ,#theta_hip
[ 0.3 , 8.1 ] ,#k_HFLrun1
[ 1.0 , 12.0 ] ,#k_HFLrun2
[ 0.3 , 7.0 ] ,#k_HAMrun
[ 0.60 , 5.0 ] ,#G_sol
[ 0.4 , 7.0 ] ,#G_sol_ta
[ 0.0 , 20.0 ] ,#G_gas
[ 0.82 , 5.0 ] ,#G_vas
[ 1.0 , 15.0 ]#k_theta
])
# xlim values
plot_xlim = np.array([ 1.2 , 1.8 ])
# ylim values - TODO: not used
plot_ylim = np.array([
[ 1.4 , 2.2 ] ,
[ 3.0 , 6.0 ] ,
[ 1.0 , 3.0 ] ,
[ 0.03 , 0.13 ] ,
[ 1.0 , 4.5 ] ,
[ 0.4 , 1.0 ] ,
[ 0.0 , 0.12 ] ,
[ 6.0 , 16.0 ] ,
[ 2.1 , 3.1 ] ,
[ 7.0 , 11.0 ] ,
[ 73.0 , 92.0 ]
])
# reference speed
x_star = 1.6;
# p-value threshold
p_thres = 0.05
# output folder
output_folder = '~/BIOROB/coman_matthew_new/workR/Optis_2.0/more'
all_data = ad.AllData(names, plot_fac, labels, approx_order, bounds,
x_star, p_thres, plot_xlim, plot_ylim, output_folder);
data_130, speed_130 = get_speed_data(data, 1.3, ord_params);
print data_130
print speed_130
data_135, speed_135 = get_speed_data(data, 1.35, ord_params);
data_140, speed_140 = get_speed_data(data, 1.4, ord_params);
data_145, speed_145 = get_speed_data(data, 1.45, ord_params);
data_150, speed_150 = get_speed_data(data, 1.5, ord_params);
data_155, speed_155 = get_speed_data(data, 1.55, ord_params);
data_160, speed_160 = get_speed_data(data, 1.6, ord_params);
data_165, speed_165 = get_speed_data(data, 1.65, ord_params);
data_170, speed_170 = get_speed_data(data, 1.7, ord_params);
# add data
all_data.add_data(data_130, speed_130, 1.30)
all_data.add_data(data_135, speed_135, 1.35)
all_data.add_data(data_140, speed_140, 1.40)
all_data.add_data(data_145, speed_145, 1.45 )
all_data.add_data(data_150, speed_150, 1.50)
all_data.add_data(data_155, speed_155, 1.55)
all_data.add_data(data_160, speed_160, 1.60)
all_data.add_data(data_165, speed_165, 1.65)
all_data.add_data(data_170, speed_170, 1.70)
# option to save the graphs
flag_save = 0;
all_data.flag_save = flag_save
# polynomial order
if not flag_save:
print('')
all_data.lack_of_fit_012('tau')
all_data.lack_of_fit_012('theta_trunk')
all_data.lack_of_fit_012('theta_hip')
all_data.lack_of_fit_012('k_HFL1')
all_data.lack_of_fit_012('k_HFL2')
all_data.lack_of_fit_012('k_HAM')
all_data.lack_of_fit_012('G_sol')
all_data.lack_of_fit_012('G_sol_ta')
all_data.lack_of_fit_012('G_gas')
all_data.lack_of_fit_012('G_vas')
all_data.lack_of_fit_012('k_theta')
|
""" inherits common methods """
from .base import BaseModel
class Categorymodel(BaseModel):
"""
Category Model has saved data
"""
def __init__(self):
self.category = []
self.products = []
def get_categories(self):
"""
get all categories
"""
return self.category
def add_category(self, data):
"""
add category items
"""
return self.category.append(data)
def add_products(self, data):
self.products.append(data)
def get_product_in_cat(self):
return self.products
def drop(self):
"""
used in tests to clear data in categories
"""
self.category.clear()
self.products.clear()
categoryModel = Categorymodel()
|
initcode_orig =[1,12,2,3,1,1,2,3,1,3,4,3,1,5,0,3,2,9,1,19,1,19,6,23,2,6,23,27,2,27,9,31,1,5,31,35,1,35,10,39,2,39,9,43,1,5,43,
47,2,47,10,51,1,51,6,55,1,5,55,59,2,6,59,63,2,63,6,67,1,5,67,71,1,71,9,75,2,75,10,79,1,79,5,83,1,10,83,87,1,
5,87,91,2,13,91,95,1,95,10,99,2,99,13,103,1,103,5,107,1,107,13,111,2,111,9,115,1,6,115,119,2,119,6,123,1,123,
6,127,1,127,9,131,1,6,131,135,1,135,2,139,1,139,10,0,99,2,0,14,0]
initcode = []
def decode(count):
global initcode
op = initcode[count]
if op == 99:
return 1
if op == 1:
in1 = initcode[count + 1]
in2 = initcode[count + 2]
out = initcode[count + 3]
initcode[out] = initcode[in1] + initcode[in2]
return 0
elif op == 2:
in1 = initcode[count + 1]
in2 = initcode[count + 2]
out = initcode[count + 3]
initcode[out] = initcode[in1] * initcode[in2]
return 0
else:
return count
def loop():
pc = 0
while True:
try:
exit_code = decode(pc)
except IndexError:
print("Index out of range at: " + str(pc))
break
if exit_code == 0:
pass
elif exit_code == 1:
print("System Halt")
break
else:
print("1202 program alarm\n Got unexpected OP-code: " + str(exit_code))
print("Exit on: " + str(pc))
break
pc += 4
def main():
for i1 in range(101):
for i2 in range(101):
global initcode
initcode = initcode_orig.copy()
initcode[1] = i1
initcode[2] = i2
loop()
if initcode[0] == 19690720:
print((100 * i1) + i2)
return
main()
|
import pygame
class Component:
def __init__(self, position, size):
self.position = position
self.size = size
def is_clicked(self, event):
mouse = pygame.mouse.get_pos()
if event.type == pygame.MOUSEBUTTONUP:
if (
self.position[0] < mouse[0] < self.position[0] + self.size[0]
and self.position[1] < mouse[1] < self.position[1] + self.size[1]
):
return True
return False
def get_dimensions(self):
return self.position + self.size
|
#!/usr/bin/env python3
# Script to remove files older than three months
# Import serious stuff
import os
import shutil
import argparse
from datetime import datetime
import pdb
# Generate list of files to be deleted
def generate_list(start_path, oldfile_age):
# List of files to be removed
remove_list = []
# Generate a datetime object
current_time = datetime.now()
# Find files older than three months under start_path
for root, directories, files in os.walk(start_path):
for filename in files:
file_path = os.path.join(root, filename)
modified_time = datetime.fromtimestamp(
os.path.getmtime(file_path)
)
file_age = current_time - modified_time
if file_age.days > oldfile_age:
remove_list.append(file_path)
return remove_list
# Remove files if --remove-files is passed
def remove_files(
files_to_remove,
remove_files,
disk_usage_limit,
disk_usage,
oldfile_age):
# Delete older files only if disk usage > disk_usage %
if disk_usage > disk_usage_limit and remove_files:
for file_path in files_to_remove:
try:
print("[Info]: Removing", file_path)
os.remove(file_path)
except:
print("[Warning]:", file_path, "could not be removed!")
elif disk_usage < disk_usage_limit and remove_files:
print("Disk usage below", str(int(disk_usage)) + "%")
print("No files will be deleted...")
elif files_to_remove:
print("No files will be deleted...")
print("Printing files older than", oldfile_age, "day(s)")
for file_path in files_to_remove:
print(file_path)
# The main function
def main(parser):
# Parse the arguments
arguments = parser.parse_args()
# Calculate disk usage
disk_usage = shutil.disk_usage(arguments.path)
disk_usage = (disk_usage.used / disk_usage.total) * 100
# Find files older than three months
files_to_remove = generate_list(arguments.path, arguments.oldfile_age)
# Do the file removal, for real!
remove_files(
files_to_remove,
arguments.remove_files,
arguments.disk_usage_limit,
disk_usage,
arguments.oldfile_age
)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description = "Remove files older than 3 months if disk usage > 80%"
)
parser.add_argument(
"--path",
type = str,
required = True,
help = "Path where cleanup is to be done"
)
parser.add_argument(
"--remove-files",
default = False,
action = "store_true",
help = "Enable removal of files"
)
parser.add_argument(
"--oldfile-age",
type = int,
default = 90,
help = "Files older than oldfile-age will be deleted, default: 90 days"
)
parser.add_argument(
"--disk-usage-limit",
type = int,
default = 80,
help = "Removal files only if disk-usage limit is reached,default: 80%"
)
# Call the main function
main(parser)
|
import unittest
import vimdoc
from vimdoc.block import Block
from vimdoc import error
from vimdoc import module
class TestVimModule(unittest.TestCase):
def test_section(self):
plugin = module.VimPlugin('myplugin')
main_module = module.Module('myplugin', plugin)
intro = Block(vimdoc.SECTION)
intro.Local(name='Introduction', id='intro')
main_module.Merge(intro)
main_module.Close()
self.assertEqual([intro], list(main_module.Chunks()))
def test_duplicate_section(self):
plugin = module.VimPlugin('myplugin')
main_module = module.Module('myplugin', plugin)
intro = Block(vimdoc.SECTION)
intro.Local(name='Introduction', id='intro')
main_module.Merge(intro)
intro2 = Block(vimdoc.SECTION)
intro2.Local(name='Intro', id='intro')
with self.assertRaises(error.DuplicateSection) as cm:
main_module.Merge(intro2)
self.assertEqual(('Duplicate section intro defined.',), cm.exception.args)
def test_default_section_ordering(self):
"""Sections should be ordered according to documented built-in ordering."""
plugin = module.VimPlugin('myplugin')
main_module = module.Module('myplugin', plugin)
intro = Block(vimdoc.SECTION)
intro.Local(name='Introduction', id='intro')
commands = Block(vimdoc.SECTION)
commands.Local(name='Commands', id='commands')
about = Block(vimdoc.SECTION)
about.Local(name='About', id='about')
# Merge in arbitrary order.
main_module.Merge(commands)
main_module.Merge(about)
main_module.Merge(intro)
main_module.Close()
self.assertEqual([intro, commands, about], list(main_module.Chunks()))
def test_manual_section_ordering(self):
"""Sections should be ordered according to explicitly configured order."""
plugin = module.VimPlugin('myplugin')
main_module = module.Module('myplugin', plugin)
intro = Block(vimdoc.SECTION)
intro.Local(name='Introduction', id='intro')
# Configure explicit order.
intro.Global(order=['commands', 'about', 'intro'])
commands = Block(vimdoc.SECTION)
commands.Local(name='Commands', id='commands')
about = Block(vimdoc.SECTION)
about.Local(name='About', id='about')
# Merge in arbitrary order.
main_module.Merge(commands)
main_module.Merge(about)
main_module.Merge(intro)
main_module.Close()
self.assertEqual([commands, about, intro], list(main_module.Chunks()))
def test_partial_ordering(self):
"""Always respect explicit order and prefer built-in ordering.
Undeclared built-in sections will be inserted into explicit order according
to default built-in ordering. The about section should come after custom
sections unless explicitly ordered."""
plugin = module.VimPlugin('myplugin')
main_module = module.Module('myplugin', plugin)
intro = Block(vimdoc.SECTION)
intro.Local(name='Introduction', id='intro')
# Configure explicit order.
intro.Global(order=['custom1', 'intro', 'custom2'])
commands = Block(vimdoc.SECTION)
commands.Local(name='Commands', id='commands')
about = Block(vimdoc.SECTION)
about.Local(name='About', id='about')
custom1 = Block(vimdoc.SECTION)
custom1.Local(name='Custom1', id='custom1')
custom2 = Block(vimdoc.SECTION)
custom2.Local(name='Custom2', id='custom2')
# Merge in arbitrary order.
for section in [commands, custom2, about, intro, custom1]:
main_module.Merge(section)
main_module.Close()
self.assertEqual([custom1, intro, commands, custom2, about],
list(main_module.Chunks()))
|
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.template import loader
from django.urls import reverse_lazy
@login_required
def index(request):
template = loader.get_template('core/index.html')
context = {
'var': 'foo',
}
return HttpResponse(template.render(context, request))
def login_view(request):
data_context = {
"error": None,
}
valid = False
if request.method == "POST":
username = request.POST["username"]
password = request.POST["password"]
next_page = request.GET.get('next', None)
if username == '' or password == '':
data_context["error"] = 'El Usuario y la Contraseña deben ser ingresados.'
else:
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
if request.POST.get('remember_me', None):
request.session.set_expiry(1209600) # 2 weeks
if next_page:
return HttpResponseRedirect(next_page)
else:
return HttpResponseRedirect(reverse_lazy("index"))
else:
data_context["error"] = 'El Usuario esta deshabilitado.'
else:
# Return an 'invalid login' error message.
data_context["error"] = 'El Usuario o Contraseña es incorrecta.'
else:
if request.user.is_authenticated():
return HttpResponseRedirect(reverse_lazy("index"))
valid = True
data_context["valid"] = valid
return render(request, "login.html", data_context)
def logout_view(request):
logout(request)
return HttpResponseRedirect(reverse_lazy("login"))
|
# pylint:disable=import-outside-toplevel
"""Registry of custom Gym environments."""
import importlib
import gym
from .utils import wrap_if_needed
def filtered_gym_env_ids():
"""
Return environment ids in Gym registry for which all dependencies are installed.
"""
specs = set(gym.envs.registry.all())
if importlib.util.find_spec("atari_py") is None:
specs.difference_update({s for s in specs if "atari" in s.entry_point})
if importlib.util.find_spec("mujoco_py") is None:
specs.difference_update({s for s in specs if "mujoco" in s.entry_point})
specs.difference_update({s for s in specs if "robotics" in s.entry_point})
if importlib.util.find_spec("Box2D") is None:
specs.difference_update({s for s in specs if "box2d" in s.entry_point})
return {s.id for s in specs}
IDS = filtered_gym_env_ids()
# kwarg trick from:
# https://github.com/satwikkansal/wtfpython#-the-sticky-output-function
ENVS = {
i: wrap_if_needed(lambda config, i=i: gym.make(i, **config.get("kwargs", {})))
for i in IDS
}
def register_external_library_environments(library_name):
"""Conveniency function for adding external environments to the global registry."""
if importlib.util.find_spec(library_name) is None:
return
importlib.import_module(library_name)
new_ids = filtered_gym_env_ids() - IDS
for name in new_ids:
@wrap_if_needed
def _env_maker(config, env_id=name):
importlib.import_module(library_name)
kwargs = config.get("kwargs", {})
return gym.make(env_id, **kwargs)
ENVS[name] = _env_maker
IDS.update(new_ids)
@wrap_if_needed
def _cartpole_stateless_maker(_):
from raylab.envs.environments.cartpole_stateless import CartPoleStateless
return CartPoleStateless()
@wrap_if_needed
def _navigation_maker(config):
from raylab.envs.environments.navigation import NavigationEnv
return NavigationEnv(config)
@wrap_if_needed
def _reservoir_maker(config):
from raylab.envs.environments.reservoir import ReservoirEnv
return ReservoirEnv(config)
@wrap_if_needed
def _hvac_maker(config):
from raylab.envs.environments.hvac import HVACEnv
return HVACEnv(config)
ENVS.update(
{
"CartPoleStateless": _cartpole_stateless_maker,
"Navigation": _navigation_maker,
"Reservoir": _reservoir_maker,
"HVAC": _hvac_maker,
}
)
register_external_library_environments("gym_cartpole_swingup")
register_external_library_environments("gym_industrial")
register_external_library_environments("pybullet_envs")
|
### sorting
class Solution:
def sortArrayByParity(self, A: List[int]) -> List[int]:
return sorted(A, key=lambda x: x % 2)
### two pass
class Solution:
def sortArrayByParity(self, A: List[int]) -> List[int]:
return [i for i in A if not i % 2] + [i for i in A if i % 2]
|
import os.path
import random
import ConfigFile
initialized = False
dists = {}
class Distribution:
def __init__(self, probs):
self.probabilities = [min(max(p, 0), 1) for p in probs]
if ((not self.probabilities) or (self.probabilities[-1] >= 1)):
self.probabilities.append(0)
def getCount(self):
count = 0
prob = 0
while (True):
if (count < len(self.probabilities)):
prob = self.probabilities[count]
if (random.random() < prob):
count += 1
else:
return count
def init():
global initialized
if (initialized):
return
configPath = os.path.join(os.path.dirname(__file__), "data", "dists.cfg")
configDict = ConfigFile.readFile(configPath)
for distName in configDict.keys():
if (type(configDict[distName]) != type("")):
continue
probs = [float(x) for x in configDict[distName].split() if x]
if (not probs):
continue
dists[distName] = Distribution(probs)
initialized = True |
#!/usr/bin/env python3
from wx_explore.common.models import (
Source,
SourceField,
Location,
Timezone,
)
from wx_explore.common import metrics
from wx_explore.common.db_utils import get_or_create
from wx_explore.web.core import db
sources = [
Source(
short_name='hrrr',
name='HRRR 2D Surface Data (Sub-Hourly)',
src_url='http://www.nco.ncep.noaa.gov/pmb/products/hrrr/',
last_updated=None,
),
Source(
short_name='nam',
name='North American Model',
src_url='https://www.nco.ncep.noaa.gov/pmb/products/nam/',
last_updated=None,
),
Source(
short_name='gfs',
name='Global Forecast System',
src_url='https://www.nco.ncep.noaa.gov/pmb/products/gfs/',
last_updated=None,
),
]
for i, s in enumerate(sources):
sources[i] = get_or_create(s)
metric_meta = {
'2m Temperature': {
'idx_short_name': 'TMP',
'idx_level': '2 m above ground',
'selectors': {
'name': '2 metre temperature',
},
},
'Visibility': {
'idx_short_name': 'VIS',
'idx_level': 'surface',
'selectors': {
'shortName': 'vis',
},
},
'Rain': {
'idx_short_name': 'CRAIN',
'idx_level': 'surface',
'selectors': {
'shortName': 'crain',
'stepType': 'instant',
},
},
'Ice': {
'idx_short_name': 'CICEP',
'idx_level': 'surface',
'selectors': {
'shortName': 'cicep',
'stepType': 'instant',
},
},
'Freezing Rain': {
'idx_short_name': 'CFRZR',
'idx_level': 'surface',
'selectors': {
'shortName': 'cfrzr',
'stepType': 'instant',
},
},
'Snow': {
'idx_short_name': 'CSNOW',
'idx_level': 'surface',
'selectors': {
'shortName': 'csnow',
'stepType': 'instant',
},
},
'Composite Reflectivity': {
'idx_short_name': 'REFC',
'idx_level': 'entire atmosphere',
'selectors': {
'shortName': 'refc',
},
},
'2m Humidity': {
'idx_short_name': 'SPFH',
'idx_level': '2 m above ground',
'selectors': {
'name': 'Specific humidity',
'typeOfLevel': 'heightAboveGround',
'level': 2,
},
},
'Surface Pressure': {
'idx_short_name': 'PRES',
'idx_level': 'surface',
'selectors': {
'name': 'Surface pressure',
},
},
'10m Wind U-component': {
'idx_short_name': 'UGRD',
'idx_level': '10 m above ground',
},
'10m Wind V-component': {
'idx_short_name': 'VGRD',
'idx_level': '10 m above ground',
},
'10m Wind Speed': {
'idx_short_name': 'WIND',
'idx_level': '10 m above ground',
'selectors': {
'shortName': 'wind',
'typeOfLevel': 'heightAboveGround',
'level': 10,
},
},
'10m Wind Direction': {
'idx_short_name': 'WDIR',
'idx_level': '10 m above ground',
'selectors': {
'shortName': 'wdir',
'typeOfLevel': 'heightAboveGround',
'level': 10,
},
},
'Gust Speed': {
'idx_short_name': 'GUST',
'idx_level': 'surface',
'selectors': {
'shortName': 'gust',
},
},
'Cloud Cover': {
'idx_short_name': 'TCDC',
'idx_level': 'entire atmosphere',
'selectors': {
'shortName': 'tcc',
'typeOfLevel': 'atmosphere',
},
},
}
for src in sources:
for metric in metrics.ALL_METRICS:
get_or_create(SourceField(
source_id=src.id,
metric_id=metric.id,
**metric_meta[metric.name],
))
# customization
nam_cloud_cover = SourceField.query.filter(
SourceField.source.has(short_name='nam'),
SourceField.metric == metrics.cloud_cover,
).first()
nam_cloud_cover.selectors = {'shortName': 'tcc'}
db.session.commit()
###
# Locations
###
import csv
from shapely import wkt
from shapely.geometry import Point
locs = []
with open("data/zipcodes/US.txt", encoding="utf8") as f:
rd = csv.reader(f, delimiter='\t', quotechar='"')
for row in rd:
if not row[3]:
continue
name = row[2] + ', ' + row[3] + ' (' + row[1] + ')'
lat = float(row[9])
lon = float(row[10])
locs.append(Location(
name=name,
location=wkt.dumps(Point(lon, lat)),
))
with open("data/cities/worldcities.csv", encoding="utf8") as f:
f.readline() # skip header line
rd = csv.reader(f)
for row in rd:
name = row[0] + ', ' + row[7]
lat = float(row[2])
lon = float(row[3])
population = None
if row[9]:
population = int(float(row[9]))
locs.append(Location(
name=name,
location=wkt.dumps(Point(lon, lat)),
population=population,
))
db.session.add_all(locs)
db.session.commit()
###
# Timezones
###
import geoalchemy2
import os
import osgeo.ogr
import requests
import shutil
import tempfile
import zipfile
with tempfile.TemporaryDirectory() as tmpdir:
with tempfile.TemporaryFile() as tmpf:
with requests.get('https://github.com/evansiroky/timezone-boundary-builder/releases/download/2020a/timezones-with-oceans.shapefile.zip', stream=True) as resp:
shutil.copyfileobj(resp.raw, tmpf)
with zipfile.ZipFile(tmpf) as z:
z.extractall(tmpdir)
shapefile = osgeo.ogr.Open(os.path.join(tmpdir, 'dist'))
layer = shapefile.GetLayer(0)
tzs = []
for feature in (layer.GetFeature(i) for i in range(layer.GetFeatureCount())):
tzs.append(Timezone(
name=feature.GetField("tzid"),
geom=geoalchemy2.functions.ST_Multi(feature.GetGeometryRef().ExportToWkt()),
))
db.session.add_all(tzs)
db.session.commit()
|
import argparse
import sys
from configparser import ConfigParser
from sqlalchemy import create_engine, MetaData, Table
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm.session import Session
class OracleDBManager(object):
def __init__(
self,
connection_string: str
) -> None:
self.connection_string = connection_string
self.engine = None
self.meta = None
self.session_factory = None
self.connect()
def connect(self, debug=False) -> None:
if self.engine is None:
if not debug:
self.engine = create_engine(
self.connection_string,
echo=False
)
else:
self.engine = create_engine(
self.connection_string,
echo=True
)
if self.meta is None:
self.meta = MetaData()
self.meta.bind = self.engine
if self.session_factory is None:
self.session_factory = sessionmaker(bind=self.engine)
def create_session(self) -> Session:
self.connect()
return self.session_factory()
def reverse_table(self, table_name: str) -> Table:
self.connect()
if table_name in self.meta.tables.keys():
return self.meta.tables[table_name]
return Table(table_name, self.meta, autoload=True)
def create_HBase_tables(table, conn='sqlite:///HBase.db'):
db = OracleDBManager(
conn
)
MetaEntity = db.reverse_table(table)
session = db.create_session()
result = session.query(MetaEntity).all()
session.close()
s = []
for each in result:
d = each._asdict()
s.append(
'create table \'' + d['Entity_Name'] + '\', \'details\''
)
return s
def create_HBase_data(table, conn='sqlite:///HBase.db'):
db = OracleDBManager(
conn
)
MetaAttributes = db.reverse_table(table)
session = db.create_session()
result = session.query(MetaAttributes).order_by('Attr_Nr').all()
session.close()
s = []
for each in result:
d = each._asdict()
s.append(
'put \'' + d['Table_Name'] + '\', \'1\', \'details:' + d['Attr_Name'] + '\', \'1\''
)
return s
config = ConfigParser()
config.read('config.ini')
parser = argparse.ArgumentParser(description='Generate DDL to load data from any RDBMS to HBase or Cassandra')
parser.add_argument('--hbase', dest='hbase', action='store_true',
default=False,
help='Generate DDL for HBase')
parser.add_argument('--cassandra', dest='cassandra', action='store_true',
default=False,
help='Generate DDL for Cassandra')
# parser.add_argument('--debug', dest='debug', action='store_true',
# default=False,
# help='Show verbose information')
args = vars(parser.parse_args())
# DEBUG = config['DEFAULT']['DEBUG']
if len(sys.argv) == 1:
print("Error: no options passed")
parser.print_help()
elif args['cassandra'] and args['hbase']:
print("Error: Only one of HBase or Cassandra is allowed at a time")
parser.print_help()
elif args['hbase']:
result = create_HBase_tables(
config['TABLES']['ENTITY'],
config['DATABASE']['CONNECTION_STRING']
)
result = create_HBase_data(
config['TABLES']['ATTRIBUTES'],
config['DATABASE']['CONNECTION_STRING']
)
elif args['cassandra']:
pass
|
from time import sleep
from selenium import webdriver
from subprocess import Popen
URL = 'http://www.specialeffect.org.uk/gameblast'
def write_to_text_file(file_name='', amount_to_write=''):
print('Writing to text file: {}'.format(file_name))
with open(file_name, 'w') as file:
file.write(amount_to_write)
file.close()
if __name__ == '__main__':
print('Opening Firefox')
driver = webdriver.Firefox()
driver.get(URL)
previous_donation_amount = driver.find_element_by_class_name('FundsRaised__total').text
print('Caching the current amount: {}'.format(previous_donation_amount))
write_to_text_file('gameblast.txt', previous_donation_amount)
# Upload the file by using the batch file, even when it starts to update after it stops for a while
Popen('send_file.bat')
while True:
print('Accessing: {}'.format(URL))
driver.get(URL)
print('Waiting for page to load')
sleep(5) # Wait for the page to load fully
if not driver.title == 'gameblast16':
print('Cycling browser')
driver.close()
sleep(60)
driver = webdriver.Firefox()
continue
donation_amount = driver.find_element_by_class_name('FundsRaised__total').text
print('Current Donation amount: {}'.format(donation_amount))
if not donation_amount == previous_donation_amount:
write_to_text_file('gameblast.txt', donation_amount)
print('Uploading file')
# Upload the file by using the batch file
Popen('send_file.bat')
previous_donation_amount = donation_amount
else:
print('Amount unchanged, not writing to file')
print('Holding for next cycle')
sleep(50)
|
#
# @lc app=leetcode id=1365 lang=python3
#
# [1365] How Many Numbers Are Smaller Than the Current Number
#
# @lc code=start
class Solution:
def smallerNumbersThanCurrent(self, nums: List[int]) -> List[int]:
result = []
for index, number in enumerate(nums):
temp = [y for y in [x for i, x in enumerate(
nums) if i != index] if y < number]
result.append(len(temp))
return result
# @lc code=end
|
#!/usr/bin/env python
import codecs
import csv
import datetime
import getpass
import optparse
import pymysql
import sys
parser = optparse.OptionParser(
'./load_antolin_db.py [-d <dbname>] [-u user] [-p password] csv-file')
parser.add_option('-d', '--database', dest='database', default='spils',
help='The database to store the data in.')
parser.add_option('-u', '--username', dest='username', default='gssb',
help='The username for the database.')
parser.add_option('-p', '--password', dest='password',
help='The password for the database.')
insert_sql = (
u"INSERT INTO antolin "
u"(author, title, publisher, isbn10, isbn10_formatted, isbn13, "
u"isbn13_formatted, book_id, available_since, grade, num_read) "
u"VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)")
def main(argv=sys.argv[1:]):
opts, args = parser.parse_args(argv)
if not opts.password:
opts.password = getpass.getpass()
conn = pymysql.connect(
user=opts.username,
password=opts.password,
database=opts.database,
charset='utf8')
cur = conn.cursor()
filename = args[0]
print "reading antolin CSV file", filename
csv_io = open(filename, 'r')
reader = csv.reader(csv_io, delimiter=";", quotechar='"')
# Drop header row.
reader.next()
seen = []
duplicates = 0
for idx, row in enumerate(reader):
if idx % 100 == 0:
sys.stdout.write('.')
sys.stdout.flush()
(author, title, publisher, isbn10, since, grade, read,
isbn13, isbn10f, isbn13f, bookid) = row
day, month, year = [int(p) for p in since.split('.')]
# Some entries are bad.
if len(isbn13) != 13:
continue
if len(isbn13f) != 17:
isbn13f = None
if len(isbn10) != 10:
isbn10 = None
if len(isbn10f) != 13:
isbn10f = None
if isbn13 in seen:
duplicates += 1
continue
seen.append(isbn13)
try:
cur.execute(
insert_sql,
(author.decode('utf-8'),
title.decode('utf-8'),
publisher.decode('utf-8'),
isbn10, isbn10f, isbn13, isbn13f, bookid,
datetime.date(year, month, day),
grade, read
)
)
except Exception as err:
import pdb; pdb.set_trace()
conn.commit()
print
print u'Books loaded:', len(seen)
print u'Duplicates:', duplicates
if __name__ == '__main__':
main()
|
import torch
from torch_geometric.nn import ARMAConv
def test_arma_conv():
in_channels, out_channels = (16, 32)
num_stacks, num_layers = 8, 4
edge_index = torch.tensor([[0, 0, 0, 1, 2, 3], [1, 2, 3, 0, 0, 0]])
num_nodes = edge_index.max().item() + 1
edge_weight = torch.rand(edge_index.size(1))
x = torch.randn((num_nodes, in_channels))
conv = ARMAConv(
in_channels, out_channels, num_stacks, num_layers, dropout=0.25)
assert conv.__repr__() == 'ARMAConv(16, 32, num_stacks=8, num_layers=4)'
assert conv(x, edge_index).size() == (num_nodes, out_channels)
assert conv(x, edge_index, edge_weight).size() == (num_nodes, out_channels)
conv = ARMAConv(
in_channels, out_channels, num_stacks, num_layers, shared_weights=True)
assert conv(x, edge_index).size() == (num_nodes, out_channels)
assert conv(x, edge_index, edge_weight).size() == (num_nodes, out_channels)
|
import unittest
from unittest.mock import MagicMock
import shutil
import glob
import os
import os.path
import logging
from PIL import Image
from Psd2Png import Psd2Png
class TestPsd2PngBase(unittest.TestCase):
#テスト実行の都度、過去に生成したフォルダを削除します。
def setUp(self):
self.logger = logging.getLogger("TEST")
for rmPath in glob.glob("tests/testPsd/*"):
if os.path.isdir(rmPath):
shutil.rmtree(rmPath)
class TestInputError(TestPsd2PngBase):
def test_PsdFileIsNotFound(self):
with self.assertLogs("TEST",level=logging.ERROR) as cm:
with self.assertRaises(FileNotFoundError):
Psd2Png(os.path.join("tests","testPsd","NotExist.psd"),self.logger)
self.assertIn("ERROR:TEST:指定されたファイルが見つかりません\n"+os.path.join("tests","testPsd","NotExist.psd"),cm.output)
def test_FileIsNotPsd(self):
with self.assertLogs("TEST",level=logging.ERROR) as cm:
with self.assertRaises(OSError):
Psd2Png(os.path.join("tests","testPsd","PngFile.psd"),self.logger)
self.assertIn("ERROR:TEST:指定されたファイルはpsdではありません\n"+os.path.join("tests","testPsd","PngFile.psd"),cm.output)
class TestMakeOutputPaths(TestPsd2PngBase):
def testSimpleMultiLayer(self):
p2p=Psd2Png(os.path.join("tests","testPsd","SimpleMultiLayer.psd"),self.logger)
p2p.SetOutputPaths()
self.assertEqual(p2p.outputPaths,[os.path.join("tests","testPsd","SimpleMultiLayer","red"),
os.path.join("tests","testPsd","SimpleMultiLayer","blue"),
os.path.join("tests","testPsd","SimpleMultiLayer","green"),
os.path.join("tests","testPsd","SimpleMultiLayer","alpha")])
def testLayerNamecp932(self):
p2p=Psd2Png(os.path.join("tests","testPsd","LayerNamecp932.psd"),self.logger)
p2p.SetOutputPaths()
self.assertEqual(p2p.outputPaths,[os.path.join("tests","testPsd","LayerNamecp932","あ")])
def testLayerNameUtf8(self):
p2p=Psd2Png(os.path.join("tests","testPsd","LayerNameUtf8.psd"),self.logger)
p2p.SetOutputPaths()
self.assertEqual(p2p.outputPaths,[os.path.join("tests","testPsd","LayerNameUtf8","あ")])
def testLayerNameIsNone(self):
p2p=Psd2Png(os.path.join("tests","testPsd","LayerNameIsNone.psd"),self.logger)
p2p.SetOutputPaths()
self.assertEqual(p2p.outputPaths,[os.path.join("tests","testPsd","LayerNameIsNone","LayerNameIsNone")])
def testLayerNameIsDuplication(self):
p2p=Psd2Png(os.path.join("tests","testPsd","LayerNameDuplication.psd"),self.logger)
p2p.SetOutputPaths()
self.assertEqual(p2p.outputPaths,[os.path.join("tests","testPsd","LayerNameDuplication","test__"),
os.path.join("tests","testPsd","LayerNameDuplication","test"),
os.path.join("tests","testPsd","LayerNameDuplication","test_"),
os.path.join("tests","testPsd","LayerNameDuplication","test___")])
def testBadLayerName(self):
p2p=Psd2Png(os.path.join("tests","testPsd","BadLayerName.psd"),self.logger)
p2p.SetOutputPaths()
self.assertEqual(p2p.outputPaths,[os.path.join("tests","testPsd","BadLayerName","test"),
os.path.join("tests","testPsd","BadLayerName","test2"),
os.path.join("tests","testPsd","BadLayerName","test3"),
os.path.join("tests","testPsd","BadLayerName","test4"),
os.path.join("tests","testPsd","BadLayerName","test5"),
os.path.join("tests","testPsd","BadLayerName","test6"),
os.path.join("tests","testPsd","BadLayerName","test7"),
os.path.join("tests","testPsd","BadLayerName","layer0"),
os.path.join("tests","testPsd","BadLayerName","layer1")])
def testGroup(self):
p2p=Psd2Png(os.path.join("tests","testPsd","Group.psd"),self.logger)
p2p.SetOutputPaths()
self.assertEqual(p2p.outputPaths,[os.path.join("tests","testPsd","Group","layer0","NoneGroupName"),
os.path.join("tests","testPsd","Group","BadGroupName","badlayer"),
os.path.join("tests","testPsd","Group","日本語グループ","日本語レイヤー"),
os.path.join("tests","testPsd","Group","NoGroup"),
os.path.join("tests","testPsd","Group","SimpleGroup","Layer1"),
os.path.join("tests","testPsd","Group","SimpleGroup","NestedGroup","NestedLayer")])
class TestOutputPng(TestPsd2PngBase):
def testSimpleMultiLayer(self):
p2p=Psd2Png(os.path.join("tests","testPsd","SimpleMultiLayer.psd"),self.logger)
p2p.OutputPng()
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","SimpleMultiLayer","red.png")))
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","SimpleMultiLayer","blue.png")))
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","SimpleMultiLayer","green.png")))
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","SimpleMultiLayer","alpha.png")))
im=Image.open(os.path.join("tests","testPsd","SimpleMultiLayer","red.png"))
self.assertEqual(im.getpixel((0,0)),(255,0,0,255))
im=Image.open(os.path.join("tests","testPsd","SimpleMultiLayer","green.png"))
self.assertEqual(im.getpixel((0,0)),(0,255,0,255))
im=Image.open(os.path.join("tests","testPsd","SimpleMultiLayer","blue.png"))
self.assertEqual(im.getpixel((0,0)),(0,0,255,255))
im=Image.open(os.path.join("tests","testPsd","SimpleMultiLayer","alpha.png"))
self.assertEqual(im.getpixel((0,0)),(0,0,0,0))
im.close()
def testGroup(self):
p2p=Psd2Png(os.path.join("tests","testPsd","Group.psd"),self.logger)
p2p.OutputPng()
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","Group","layer0","NoneGroupName.png")))
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","Group","BadGroupName","badlayer.png")))
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","Group","日本語グループ","日本語レイヤー.png")))
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","Group","NoGroup.png")))
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","Group","SimpleGroup","Layer1.png")))
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","Group","SimpleGroup","NestedGroup","NestedLayer.png")))
def testLayerNameIsNone(self):
p2p=Psd2Png(os.path.join("tests","testPsd","LayerNameIsNone.psd"),self.logger)
p2p.OutputPng()
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","LayerNameIsNone","LayerNameIsNone.png")))
im=Image.open(os.path.join("tests","testPsd","LayerNameIsNone","LayerNameIsNone.png"))
self.assertEqual(im.getpixel((0,0)),(255,0,0))
im.close()
def testLayerOffset(self):
p2p=Psd2Png(os.path.join("tests","testPsd","offset.psd"),self.logger)
p2p.OutputPng()
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","offset","bgi.png")))
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","offset","offset.png")))
im=Image.open(os.path.join("tests","testPsd","offset","offset.png"))
self.assertEqual(im.getpixel((0,0)),(0,0,0,0))
self.assertEqual(im.getpixel((0,1)),(0,0,0,0))
self.assertEqual(im.getpixel((0,2)),(0,0,0,0))
self.assertEqual(im.getpixel((1,0)),(0,0,0,0))
self.assertEqual(im.getpixel((1,1)),(0,0,255,255))
self.assertEqual(im.getpixel((1,2)),(0,0,0,0))
self.assertEqual(im.getpixel((2,0)),(0,0,0,0))
self.assertEqual(im.getpixel((2,1)),(0,0,0,0))
self.assertEqual(im.getpixel((2,2)),(0,0,0,0))
im.close()
im=Image.open(os.path.join("tests","testPsd","offset","bgi.png"))
self.assertEqual(im.getpixel((0,0)),(255,0,0,255))
self.assertEqual(im.getpixel((0,1)),(255,0,0,255))
self.assertEqual(im.getpixel((0,2)),(255,0,0,255))
self.assertEqual(im.getpixel((1,0)),(255,0,0,255))
self.assertEqual(im.getpixel((1,1)),(255,0,0,255))
self.assertEqual(im.getpixel((1,2)),(255,0,0,255))
self.assertEqual(im.getpixel((2,0)),(255,0,0,255))
self.assertEqual(im.getpixel((2,1)),(255,0,0,255))
self.assertEqual(im.getpixel((2,2)),(255,0,0,255))
im.close()
def testSimpleMultiLayerOutputPathsOverride(self):
p2p=Psd2Png(os.path.join("tests","testPsd","SimpleMultiLayer.psd"),self.logger)
p2p.outputPaths=[os.path.join("tests","testPsd","override","test1"),
os.path.join("tests","testPsd","override","test2"),
os.path.join("tests","testPsd","override","test3"),
os.path.join("tests","testPsd","override","test4")]
p2p.OutputPng()
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","override","test1.png")))
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","override","test2.png")))
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","override","test3.png")))
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","override","test4.png")))
im=Image.open(os.path.join("tests","testPsd","override","test1.png"))
self.assertEqual(im.getpixel((0,0)),(255,0,0,255))
im=Image.open(os.path.join("tests","testPsd","override","test2.png"))
self.assertEqual(im.getpixel((0,0)),(0,0,255,255))
im=Image.open(os.path.join("tests","testPsd","override","test3.png"))
self.assertEqual(im.getpixel((0,0)),(0,255,0,255))
im=Image.open(os.path.join("tests","testPsd","override","test4.png"))
self.assertEqual(im.getpixel((0,0)),(0,0,0,0))
im.close()
def testLayerNameIsNoneOutputPathsOverride(self):
p2p=Psd2Png(os.path.join("tests","testPsd","LayerNameIsNone.psd"),self.logger)
p2p.outputPaths=[os.path.join("tests","testPsd","override","test1")]
p2p.OutputPng()
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","override","test1.png")))
im=Image.open(os.path.join("tests","testPsd","override","test1.png"))
self.assertEqual(im.getpixel((0,0)),(255,0,0))
im.close()
class TestOverideDir(TestPsd2PngBase):
def setUp(self):
super().setUp()
p2p=Psd2Png(os.path.join("tests","testPsd","LayerNameIsNone.psd"),self.logger)
p2p.OutputPng()
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","LayerNameIsNone","LayerNameIsNone.png")))
im=Image.open(os.path.join("tests","testPsd","LayerNameIsNone","LayerNameIsNone.png"))
self.assertEqual(im.getpixel((0,0)),(255,0,0))
im.close()
def testForceOveride(self):
p2p=Psd2Png(os.path.join("tests","testPsd","LayerNameIsNone.psd"),self.logger)
p2p.OutputPng()
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","LayerNameIsNone","LayerNameIsNone.png")))
im=Image.open(os.path.join("tests","testPsd","LayerNameIsNone","LayerNameIsNone.png"))
self.assertEqual(im.getpixel((0,0)),(255,0,0))
im.close()
def testDontOveride(self):
p2p=Psd2Png(os.path.join("tests","testPsd","LayerNameIsNone.psd"),self.logger)
p2p.forceOveride=False
with self.assertLogs("TEST",level=logging.ERROR) as cm:
with self.assertRaises(FileExistsError):
p2p.OutputPng()
self.assertIn("ERROR:TEST:既にファイルが存在するため、出力を中断しました",cm.output)
def testOverideAlertYes(self):
p2p=Psd2Png(os.path.join("tests","testPsd","LayerNameIsNone.psd"),self.logger)
p2p.ShowUpdateAlert=MagicMock(return_value="yes")
p2p.forceOveride=False
p2p.updateAlert=True
p2p.OutputPng()
p2p.ShowUpdateAlert.assert_called()
self.assertTrue(os.path.isfile(os.path.join("tests","testPsd","LayerNameIsNone","LayerNameIsNone.png")))
im=Image.open(os.path.join("tests","testPsd","LayerNameIsNone","LayerNameIsNone.png"))
self.assertEqual(im.getpixel((0,0)),(255,0,0))
im.close()
def testOverideAlertNo(self):
p2p=Psd2Png(os.path.join("tests","testPsd","LayerNameIsNone.psd"),self.logger)
p2p.ShowUpdateAlert=MagicMock(return_value="no")
p2p.forceOveride=False
p2p.updateAlert=True
with self.assertLogs("TEST",level=logging.ERROR) as cm:
with self.assertRaises(FileExistsError):
p2p.OutputPng()
p2p.ShowUpdateAlert.assert_called()
self.assertIn("ERROR:TEST:ファイルの上書きがキャンセルされました",cm.output)
def testPermissionError(self):
p2p=Psd2Png(os.path.join("tests","testPsd","LayerNameIsNone.psd"),self.logger)
fw=open(os.path.join("tests","testPsd","LayerNameIsNone","test"),"w")
with self.assertLogs("TEST",level=logging.ERROR) as cm:
with self.assertRaises(PermissionError):
p2p.OutputPng()
self.assertIn("ERROR:TEST:"+os.path.join("tests","testPsd","LayerNameIsNone")+"はほかのプロセスで使用中のため、削除できませんでした",cm.output) |
import serial #requires python-serial to be installed on system
import time
arduino_location = '/dev/ttyACM0'
arduino_port = 9600
arduino = serial.Serial(arduino_location, arduino_port)
def get_temp():
raw = arduino.readline()
return float(raw.decode())
def turn_relay_on():
arduino.write('1'.encode())
print("relay on")
def turn_relay_off():
arduino.write('0'.encode())
print("relay off")
# Run test code if invoked and not included
if __name__ == '__main__':
print(get_temp())
|
# Generated by Django 2.1.7 on 2019-03-20 08:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('examples', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='basicfields',
name='float_field',
field=models.FloatField(null=True),
),
]
|
from __future__ import unicode_literals
import re
from .util import (
extract_user_from_cookies,
FileProgress,
options,
parse_date,
sql_filter,
TableSizeProgressBar,
)
from .sources import get_requests_from_db
@options([], requires_db=True)
def action_load_requestlog(args, config, db, wdb):
""" Creates analysis_requestlog database table based off requestlog table """
wdb.drop_table('analysis_requestlog')
wdb.execute('''CREATE TABLE analysis_requestlog (
id int PRIMARY KEY auto_increment,
access_time int,
ip_address varchar(255),
request_url text,
cookies text,
user_agent text,
deleted boolean NOT NULL,
method varchar(10));
''')
# We need a second cursor due to parallel db access
cur2 = wdb.db.cursor(buffered=True)
for r in get_requests_from_db(db):
sql = '''INSERT INTO analysis_requestlog
SET access_time = %s,
ip_address = %s,
request_url = %s,
cookies = %s,
user_agent = %s,
method = '',
deleted = 0;
'''
cur2.execute(
sql,
(r.time, r.ip, r.path, r.cookies, r.user_agent))
wdb.commit()
@options([], requires_db=True)
def action_cleanup_requestlog(args, config, db, wdb):
""" Remove unneeded requests, or ones we created ourselves """
try:
start_date = parse_date(config['startdate'])
end_date = parse_date(config['enddate'])
except KeyError as ke:
raise KeyError('Missing key %s in configuration' % ke.args[0])
wdb.execute(
'''UPDATE analysis_requestlog SET deleted=1
WHERE access_time < %s
OR access_time > %s''',
(start_date, end_date))
wdb.commit()
print('Deleted %d rows due to date constraints' % wdb.affected_rows())
wdb.execute(
'''UPDATE analysis_requestlog SET deleted=1
WHERE user_agent RLIKE
'GoogleBot|Pingdom|ApacheBench|bingbot|YandexBot|SISTRIX Crawler';
''')
wdb.commit()
print('Deleted %d rows due to UA constraints' % wdb.affected_rows())
wdb.execute(
'''UPDATE analysis_requestlog SET deleted=1
WHERE cookies is NULL;
''')
wdb.commit()
print('Deleted %d rows that do not have cookies' % wdb.affected_rows())
wdb.execute(
'''CREATE OR REPLACE VIEW analysis_requestlog_undeleted AS
SELECT * FROM analysis_requestlog WHERE NOT deleted''')
wdb.commit()
@options(requires_db=True)
def action_annotate_requests(args, config, db, wdb):
""" Filter out the interesting requests to HTML pages and copy all the
information we got with them (for example duration) into one row"""
bar = TableSizeProgressBar(
db, 'analysis_requestlog_undeleted',
'Collecting request information')
wdb.recreate_table('analysis_request_annotations', '''
id int PRIMARY KEY auto_increment,
request_id int,
user_sid varchar(64),
duration int,
detail_json TEXT,
INDEX (request_id),
INDEX (user_sid)
''')
class RequestInfo(object):
__slots__ = 'request_id', 'access_time', 'latest_update', 'user_sid'
def __init__(self, request_id, access_time, user_sid):
self.request_id = request_id
self.access_time = access_time
self.user_sid = user_sid
self.latest_update = None
def __str__(self):
return '%d %s' % (self.access_time, self.user_sid)
def write_request(key, ri):
ip, user_agent, request_url = key
wdb.execute(
'''INSERT INTO analysis_request_annotations
SET request_id=%s, user_sid=%s
''', (ri.request_id, ri.user_sid))
# Key: (ip, user_agent, request_url), value: RequestInfo
requests = {}
is_stats = re.compile(r'/+(?:i/[^/]+/)?stats/')
is_static = re.compile(r'''(?x)
/favicon\.ico|
/images/|
/fanstatic/|
/stylesheets/|
/robots\.txt|
/javascripts|
# Technically not static, but very close
/admin|
/i/[^/]+/instance/[^/]+/settings
''')
write_count = 0
db.execute(
'''SELECT id, access_time as atime,
ip_address, user_agent, request_url, cookies
FROM analysis_requestlog_undeleted
ORDER BY access_time ASC
''')
for req in db:
bar.next()
request_id, atime, ip, user_agent, request_url, cookies = req
if is_static.match(request_url):
continue # Skip
#assert '/stats' not in request_url
key = (ip, user_agent, request_url)
cur = requests.get(key)
if cur is not None:
write_request(key, cur)
del requests[key]
write_count += 1
user = extract_user_from_cookies(cookies, None)
requests[key] = RequestInfo(request_id, atime, user)
bar.finish()
print('Writing out %d requests (already wrote out %d inline) ...' % (
len(requests), write_count))
for key, ri in requests.items():
write_request(key, ri)
wdb.execute('''CREATE OR REPLACE VIEW analysis_requestlog_combined AS
SELECT analysis_requestlog_undeleted.*,
analysis_request_annotations.user_sid as user_sid,
analysis_request_annotations.duration as duration,
analysis_request_annotations.detail_json as detail_json
FROM analysis_requestlog_undeleted, analysis_request_annotations
WHERE analysis_requestlog_undeleted.id = analysis_request_annotations.request_id
''')
@options()
def action_user_classification(args, config, db, wdb):
start_date = parse_date(config['startdate'])
end_date = parse_date(config['enddate'])
time_q = "create_time >= FROM_UNIXTIME(%d) AND create_time <= FROM_UNIXTIME(%d)" % (
start_date, end_date)
# 1 = admin (i.e. created by us)
where_q = ' WHERE creator_id != 1 AND delete_time IS NULL AND ' + time_q + sql_filter('proposal', config)
proposal_authors = db.simple_query(
'SELECT COUNT(delegateable.id) FROM delegateable' + where_q + ' AND type="proposal"')[0]
where_q = ' WHERE creator_id != 1 AND delete_time IS NULL AND ' + time_q + sql_filter('comment', config)
comment_count = db.simple_query(
'SELECT COUNT(*) FROM comment ' + where_q)[0]
print('%d comments' % comment_count)
where_q = ' WHERE user_id != 1 AND ' + time_q + sql_filter('vote', config)
raw_vote_count = db.simple_query(
'SELECT COUNT(*) FROM vote ' + where_q)[0]
print('%d votes' % raw_vote_count)
where_q = ' WHERE user_id != 1 AND ' + time_q + sql_filter('vote', config)
vote_count = db.simple_query(
'SELECT COUNT(DISTINCT user_id, poll_id) FROM vote ' + where_q)[0]
print('%d votings' % vote_count)
|
import numpy as np
from gpucsl.pc.pc import GaussianPC
from gpucsl.pc.helpers import correlation_matrix_of
from gpucsl.pc.kernel_management import Kernels
from tests.equality import check_graph_equality, graph_equality_isomorphic
from .fixtures.input_data import Fixture, input_data
import pytest
import networkx as nx
from .asserts import assert_pmax_valid
from typing import List
import cupy as cp
from .test_gaussian_device_manager import MockDevice
alpha = 0.05
@pytest.mark.parametrize("input_data", ["coolingData"], indirect=True)
def test_pc(input_data: Fixture):
data = input_data.samples
expected_graph = input_data.expected_graph
max_level = 3
pc = GaussianPC(data, max_level, alpha).set_distribution_specific_options()
(res, _) = pc.execute()
assert check_graph_equality(
expected_graph, res.directed_graph, graph_equality_isomorphic
)
assert_pmax_valid(res.pmax, res.directed_graph)
print(nx.adjacency_matrix(res.directed_graph).todense())
@pytest.mark.timeout(3.0)
@pytest.mark.parametrize("devices", [[0], [0, 1]])
@pytest.mark.parametrize("input_data", ["coolingData"], indirect=True)
def test_pc_interface(monkeypatch, input_data: Fixture, devices: List[int]):
monkeypatch.setattr(cp.cuda, "Device", MockDevice)
monkeypatch.setattr(cp.cuda.runtime, "getDeviceCount", lambda: 2)
n_devices = len(devices)
data = input_data.samples
max_level = 3
correlation_matrix = correlation_matrix_of(data)
Kernels.for_gaussian_ci(data.shape[1], n_devices, max_level)
kernels = [
Kernels.for_gaussian_ci(data.shape[1], n_devices, max_level)
for _ in range(n_devices)
]
pc = GaussianPC(
data,
max_level,
alpha,
kernels=kernels,
).set_distribution_specific_options(
correlation_matrix=correlation_matrix, devices=devices
)
result = pc.execute()
res = result.result
assert result.runtime > 0
assert res.discover_skeleton_runtime > 0
assert res.edge_orientation_runtime > 0
assert res.directed_graph.is_directed()
assert len(res.separation_sets) > 0
assert res.pmax is not None
assert res.discover_skeleton_kernel_runtime > 0
# Measures runtime. Only run this test manually with
# pytest tests/test_pc.py::test_pc_runtime_with_optional_args
# because if other tests run before it, kernels will already be compiled.
# Compare results with test_pc_runtime_without_supplied_kernels.
@pytest.mark.skip(reason="Only run this test manually (comment this line out)")
@pytest.mark.parametrize("input_data", ["coolingData"], indirect=True)
def test_pc_runtime_with_supplied_kernels(input_data):
data = input_data.samples
max_level = 3
correlation_matrix = correlation_matrix_of(data)
kernels = [Kernels.for_gaussian_ci(data.shape[1], 1, max_level)]
pc = GaussianPC(
data,
max_level,
alpha,
kernels,
).set_distribution_specific_options(correlation_matrix=correlation_matrix)
(_, full_runtime) = pc.execute()
print(f"pc duration: {full_runtime}")
assert False
# Measures runtime. Only run this test manually with
# pytest tests/test_pc.py::test_pc_runtime_without_supplied_kernels
# because if other tests run before it, kernels will already be compiled.
# Compare results with test_pc_runtime_with_supplied_kernels.
@pytest.mark.skip(reason="Only run this test manually (comment this line out)")
@pytest.mark.parametrize("input_data", ["coolingData"], indirect=True)
def test_pc_runtime_without_supplied_kernels(input_data):
data = input_data["samples"]
max_level = 3
correlation_matrix = correlation_matrix_of(data)
pc = GaussianPC(
data,
max_level,
alpha,
).set_distribution_specific_options(correlation_matrix=correlation_matrix)
(_, full_runtime) = pc.execute()
print(f"pc duration: {full_runtime}")
assert False
|
from prepare_mesh_lib.file_reader2 import FileReader
def main():
print('-' * 25)
print('Put full filename to parse mesh from:')
filename = input() # Файл, который надо распарсить. Полное имя.
# mesh_dir = input()
mesh_dir = 'prepared_meshes' # Папка где будут лежать готовые сетки.
# raw_mesh_dir = input()
raw_mesh_dir = 'raw_meshes' # Папка где лежат файлы с неготовыми сетками.
# new_dir = input()
new_dir = filename[:-2] # Папка куда сложить результат.
parser = FileReader(filename, mesh_dir, raw_mesh_dir, new_dir)
parser.make_directory()
parser.parse_nodes()
parser.prepare_nodes()
parser.parse_elements()
parser.prepare_elements()
parser.parse_constraints_and_sets()
parser.prepare_constraints()
parser.parse_loads()
parser.prepare_loads()
# parser.make_good()
print('-' * 25)
print('Done')
if __name__ == '__main__':
main()
|
# encoding: utf-8
from miniworld.model.singletons.Singletons import singletons
from miniworld.model.spatial.MovementPattern.ReplayMovementPattern import ReplayMovementPattern
__author__ = "Patrick Lampe"
__email__ = "uni at lampep.de"
class ReplayNode():
def __init__(self, node_id):
self.crnt_movement_pattern = ReplayMovementPattern(singletons.spatial_singleton.file_path, node_id)
def step(self):
self.crnt_movement_pattern.walk()
def get_lat(self):
return self.crnt_movement_pattern.get_lat()
def get_lon(self):
return self.crnt_movement_pattern.get_lon()
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Module that contains functions and classes related with meta system
"""
import copy
import pprint
import logging
import traceback
import maya.cmds
from tpDcc.dccs.maya.core import common, attribute as attr_utils, name as name_utils, shape as shape_utils
logger = logging.getLogger('tpDcc-dccs-maya')
class MetaAttributeValidator(attr_utils.AttributeValidator):
"""
Utility class that contains functions to check if a DCC node attributes are valid or not
"""
@staticmethod
def meta_node_string(arg):
"""
Returns given argument meta node if possible
:return: arg
"""
try:
arg = arg.meta_node
except Exception:
pass
return arg
@staticmethod
def meta_node_string_list(string_list):
"""
Returns list of arguments with their meta nodes if possible
:param string_list:
:return:
"""
string_list = MetaAttributeValidator.list_arg(string_list)
result = list()
for obj in string_list:
try:
obj = obj.meta_node
except Exception:
pass
result.append(obj)
return result
@staticmethod
def shape_arg(node=None, types=None, single_return=False):
"""
Returns arg if args is a Maya shape node else returns shapes of given node
:param node: variant, value to valid as a Maya shape node
:param types: valid types if you want validation
:param single_return: True if you only want to return first
:return: bool
"""
try:
node = node.meta_node
except Exception:
pass
return attr_utils.AttributeValidator.shape_arg(node=node, types=types, single_return=single_return)
@staticmethod
def is_component(arg=None):
"""
Returns whether given node is a component or not
:param node: str
:return: bool
"""
arg = MetaAttributeValidator.meta_node_string(arg)
return attr_utils.AttributeValidator.is_component(arg)
class MetaAttributeUtils(object):
"""
Utility class that contains functions to work with meta nodes attributes
"""
attr_types = dict(message=('message', 'msg', 'm'),
double=('float', 'fl', 'f', 'doubleLinear', 'doubleAngle', 'double', 'd'),
string=('string', 's', 'str'), long=('long', 'int', 'i', 'integer'), short=('short', 'shrt'),
bool=('bool', 'b', 'boolean'), enum=('enum', 'options', 'e'),
double3=('double3', 'd3', 'vector', 'vec', 'v'),
float3=('vector', 'vec'), multi=('multi', 'm'))
# region Public Functions
@classmethod
def validate_attr_type_name(cls, attr_type):
"""
Validates an attribute type by converting the given attribute type to a valid one if possible
:param attr_type: str, attribute type
:return: variant, str(validated type) || bool
"""
for option in cls.attr_types.keys():
if attr_type == option:
return option
if attr_type in cls.attr_types.get(option):
return option
return False
@classmethod
def validate_attr_type_match(cls, type_a, type_b):
"""
Returns True if bot attribute types given match or False otherwise
:param type_a: str
:param type_b: str
:return: bool
"""
if type_a == type_b:
return True
for o in cls.attr_types.keys():
if type_a in cls.attr_types.get(o) and type_b in cls.attr_types.get(o):
return True
return False
# endregion
# region Abstract Functions
@staticmethod
def validate_attribute(*args):
"""
Validates given attribute and check if the given attributes is valid or not
:param args:
"""
if len(args) == 1:
# logger.debug('|Attribute Validation| >> single argument')
if issubclass(type(args[0]), dict):
# logger.debug('|Attribute Validation| >> dict argument')
if args[0].get('combined'):
# logger.debug('|Attribute Validation| >> passed validating arg, returning it ...')
return args[0]
raise ValueError('Given argument is not a valid dictionary: {}'.format(args[0]))
elif type(args[0]) in [list, tuple] and len(args[0]) == 2:
# logger.debug('|Attribute Validation| >> list argument')
if hasattr(args[0][0], 'meta_node'):
obj = args[0][0].meta_node
else:
obj = args[0][0]
attr = args[0][1]
combined = '{0}.{1}'.format(obj, attr)
elif '.' in args[0]:
# logger.debug('|Attribute Validation| >> string argument')
obj = args[0].split('.')[0]
attr = '.'.join(args[0].split('.')[1:])
combined = args[0]
else:
raise ValueError('Invalid attribute argument: {}'.format(args))
else:
# logger.debug('|Attribute Validation| >> multi argument')
if hasattr(args[0], 'meta_node'):
args[0] = args[0].meta_node
combined = '{0}.{1}'.format(args[0], args[1])
obj = args[0]
attr = args[1]
return {'node': obj, 'obj': obj, 'attr': attr, 'combined': combined}
@staticmethod
def get_type(*args):
"""
Returns given attribute type
:param args: dict, validated argument
:return: str
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
try:
return maya.cmds.getAttr(attr_dict['combined'], type=True)
except Exception as e:
logger.error('|Attribute Type Getter| >> {0} | {1}'.format(attr_dict['combined'], e))
return False
@staticmethod
def convert_type(node=None, attr=None, attr_type=None, *args):
"""
Attempts to convert an existing attribute type from one type to another
Enums are stored to string as 'option1;option2'
Strings with a ';' will split to enum options during conversion
:param node: str
:param attr: str
:param attr_type: str
:return: bool
"""
_attr_type = attr_type
if '.' in node or issubclass(type(node), dict):
attr_dict = MetaAttributeUtils.validate_attribute(node)
_attr_type = attr
else:
attr_dict = MetaAttributeUtils.validate_attribute(node, attr)
combined = attr_dict['combined']
_attr_type = MetaAttributeUtils.validate_attr_type_name(_attr_type)
_type_current = MetaAttributeUtils.validate_attr_type_name(MetaAttributeUtils.get_type(attr_dict))
logger.debug(
'|Convert Attribute Type| >> attr: {0} | type: {1} | target_type: {2}'.format(
combined, _type_current, _attr_type))
if _attr_type == _type_current:
logger.debug('|Convert Attribute Type| >> {} already target type'.format(combined))
return True
# ============================================== Data Gathering
lock = False
if MetaAttributeUtils.is_locked(attr_dict):
lock = True
maya.cmds.setAttr(combined, lock=False)
_driver = MetaAttributeUtils.get_driver(attr_dict, skip_conversion_nodes=True)
_driven = MetaAttributeUtils.get_driven(attr_dict, skip_conversion_nodes=True)
_enum = 'off', 'on'
if _type_current == 'enum':
_enum = MetaAttributeUtils.get_enum(attr_dict).split(':')
if _type_current == 'message':
_data = MetaAttributeUtils.get_message(attr_dict)
elif _type_current == 'enum':
_data = MetaAttributeUtils.get_enum(attr_dict)
else:
_data = MetaAttributeUtils.get(attr_dict)
MetaAttributeUtils.delete(attr_dict)
# ============================================== Data Rebuild
if _attr_type == 'enum':
if _data:
if MetaAttributeValidator.string_arg(_data):
for o in [":", ",", ";"]:
if o in _data:
_enum = _data.split(o)
break
MetaAttributeUtils.add(attr_dict, _attr_type, enum_options=_enum)
if _data is not None:
logger.debug('|Convert Attribute Type| >> Data Setting: {}'.format(_data))
try:
if _attr_type == 'string':
if _type_current == 'message':
_data = ','.join(_data)
else:
_data = str(_data)
elif _attr_type == 'double':
_data = float(_data)
elif _attr_type == 'long':
_data = int(_data)
except Exception as e:
logger.error(
'|Convert Attribute Type| >> Failed to convert data: {0} | type: {1} | err: {2}'.format(
_data, _attr_type, e))
try:
MetaAttributeUtils.set(attr_dict, value=_data)
except Exception as e:
logger.error(
'|Convert Attribute Type| >> Failed to set back data buffer {0} | data: {1} | err: {2}'.format(
combined, _data, e))
if _driver and _type_current != 'message':
logger.debug('|Convert Attribute Type| >> Driver: {}'.format(_driver))
try:
MetaAttributeUtils.connect(_driver, combined)
except Exception as e:
logger.debug(
'|Convert Attribute Type| >> Failed to connect {0} >> {1} | err: {2}'.format(_driver, combined, e))
if _driven:
logger.debug('|Convert Attribute Type| >> Driven: {}'.format(_driven))
for c in _driven:
logger.debug('|Convert Attribute Type| >> driven: {}'.format(c))
try:
MetaAttributeUtils.connect(combined, c)
except Exception as e:
logger.debug(
'|Convert Attribute Type| >> Failed to connect {0} >> {1} | err: {2}'.format(combined, c, e))
if lock:
maya.cmds.setAttr(combined, lock=True)
return True
@staticmethod
def get(*args, **kwargs):
"""
Get attribute for the given DCC node
:param args: dict, validated argument
:param kwargs:
:return:
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
combined = attr_dict['combined']
obj = attr_dict['obj']
attr = attr_dict['attr']
if kwargs:
if not kwargs.get('sl') or not kwargs.get('silent'):
kwargs['sl'] = True
logger.debug('|Attribute Getter| >> arg: {}'.format(args))
if kwargs:
logger.debug('|Attribute Getter| >> kwargs: {}'.format(kwargs))
if '[' in attr:
logger.debug('|Attribute Getter| >> Indexed Attribute')
return maya.cmds.listConnections(combined)
try:
attr_type = maya.cmds.getAttr(combined, type=True)
except Exception as e:
logger.debug(
'|Attribute Getter| >> {0} failed to return type. Exists: {1}'.format(combined, maya.cmds.objExists(
combined)))
return None
if attr_type in ['TdataCompound']:
return maya.cmds.listConnections(combined)
if maya.cmds.attributeQuery(attr, node=obj, msg=True):
return MetaAttributeUtils.get_message(message_attr=attr_dict)
elif attr_type == 'double3':
return [maya.cmds.getAttr(obj + '.' + arg) for arg in
maya.cmds.attributeQuery(attr, node=obj, listChildren=True)]
else:
return maya.cmds.getAttr(combined, **kwargs)
@staticmethod
def get_driver(node, attr=None, get_node=False, skip_conversion_nodes=False, long_names=True):
"""
Get the driver of an attribute if exists
:param node: str
:param attr: str
:param get_node: bool, True if you want the DAG node or the attribute(s) or False otherwise
:param skip_conversion_nodes: bool, True if you want conversion nodes included in query or False otherwise
:param long_names: bool, True if you want the data returned name wise
:return: str, driver attr
"""
if attr is None:
attr_dict = MetaAttributeUtils.validate_attribute(node)
else:
attr_dict = MetaAttributeUtils.validate_attribute(node, attr)
combined = attr_dict['combined']
if not MetaAttributeUtils.is_connected(attr_dict):
return False
if get_node:
connections = maya.cmds.listConnections(combined, skipConversionNodes=skip_conversion_nodes,
destination=False, source=True, plugs=False)
if not connections:
parent = MetaAttributeUtils.get_parent(attr_dict)
if parent:
logger.debug('|Driver Attribute Getter| >> Parent Attribute Check: {}'.format(parent))
return MetaAttributeUtils.get_driver(attr_dict['node'], parent, get_node=get_node,
skip_conversion_nodes=skip_conversion_nodes,
long_names=long_names)
return False
if long_names:
return name_utils.get_long_name(obj=connections[0])
else:
return name_utils.get_short_name(obj=connections[0])
else:
if maya.cmds.connectionInfo(combined, isDestination=True):
connections = maya.cmds.listConnections(combined, skipConversionNodes=skip_conversion_nodes,
destination=False, source=True, plugs=True)
if not connections:
connections = [maya.cmds.connectionInfo(combined, sourceFromDestination=True)]
if connections:
if skip_conversion_nodes and MetaAttributeValidator.get_maya_type(
node=connections) == 'unitConversion':
parent = MetaAttributeUtils.get_parent(attr_dict)
if parent:
logger.debug('|Driver Attribute Getter| >> Parent Attribute Check: {}'.format(parent))
return MetaAttributeUtils.get_driver(attr_dict['node'], parent, get_node=get_node,
skip_conversion_nodes=skip_conversion_nodes,
long_names=long)
if long_names:
return name_utils.get_long_name(obj=connections[0])
else:
return name_utils.get_short_name(obj=connections[0])
return False
return False
@staticmethod
def get_driven(node, attr=None, get_node=False, skip_conversion_nodes=False, long_names=True):
"""
Get attributes driven by an attribute
:param node: str
:param attr: str
:param get_node: bool, True if you want the DAG node or the attribute(s) or False otherwise
:param skip_conversion_nodes: bool, True if you want conversion nodes included in query or False otherwise
:param long_names: bool, True if you want the data returned name wise
:return: str, driven attrs
"""
if attr is None:
attr_dict = MetaAttributeUtils.validate_attribute(node)
else:
attr_dict = MetaAttributeUtils.validate_attribute(node, attr)
combined = attr_dict['combined']
if get_node:
connections = maya.cmds.listConnections(combined, skipConversionNodes=skip_conversion_nodes,
destination=True, source=False, plugs=False)
if not connections:
return False
if long_names:
return [name_utils.get_long_name(o) for o in connections]
else:
return [name_utils.get_short_name(o) for o in connections]
else:
if maya.cmds.connectionInfo(combined, isSource=True):
connections = maya.cmds.listConnections(combined, skipConversionNodes=skip_conversion_nodes,
destination=True, source=False, plugs=True)
if not connections:
connections = maya.cmds.connectionInfo(combined, destinationFromSource=True)
if connections:
connections_list = list()
for cnt in connections:
if long_names:
connections_list.append(name_utils.get_long_name(cnt))
else:
connections_list.append(name_utils.get_short_name(cnt))
return connections_list
return False
return False
@staticmethod
def add(obj, attr=None, attr_type=None, enum_options=['off', 'on'], *args, **kwargs):
"""
Add a new attribute to the given object
:param obj: str, object to add attribute to
:param attr: str, attribute name
:param attr_type: str, valid type
:param enum_options: list<str>, list of option for enum attribute types
:return: str, added attribute name
"""
try:
if enum_options is None:
enum_options = ['off', 'on']
if '.' in obj or issubclass(type(obj), dict):
attr_dict = MetaAttributeUtils.validate_attribute(obj)
attr_type = attr
else:
attr_dict = MetaAttributeUtils.validate_attribute(obj, attr)
combined = attr_dict['combined']
node = attr_dict['node']
attr_name = attr_dict['attr']
if maya.cmds.objExists(combined):
raise ValueError('{} already exists!'.format(combined))
_type = MetaAttributeUtils.validate_attr_type_name(attr_type=attr_type)
assert _type is not False, '"{}" is not a valid attribute type'.format(attr_type)
if _type == 'string':
maya.cmds.addAttr(node, ln=attr_name, dt='string', *args, **kwargs)
elif _type == 'double':
maya.cmds.addAttr(node, ln=attr_name, at='float', *args, **kwargs)
elif _type == 'long':
maya.cmds.addAttr(node, ln=attr_name, at='long', *args, **kwargs)
elif _type == 'double3':
maya.cmds.addAttr(node, ln=attr_name, at='double3', *args, **kwargs)
maya.cmds.addAttr(node, ln=(attr_name + 'X'), p=attr_name, at='double', *args, **kwargs)
maya.cmds.addAttr(node, ln=(attr_name + 'Y'), p=attr_name, at='double', *args, **kwargs)
maya.cmds.addAttr(node, ln=(attr_name + 'Z'), p=attr_name, at='double', *args, **kwargs)
elif _type == 'enum':
if type(enum_options) in [list, tuple]:
enum_options = '%s' % (':'.join(enum_options))
maya.cmds.addAttr(node, ln=attr_name, at='enum', en=enum_options, *args, **kwargs)
maya.cmds.setAttr((node + '.' + attr_name), e=True, keyable=True)
elif _type == 'bool':
maya.cmds.addAttr(node, ln=attr_name, at='bool', *args, **kwargs)
maya.cmds.setAttr((node + '.' + attr_name), edit=True, channelBox=True)
elif _type == 'message':
maya.cmds.addAttr(node, ln=attr_name, at='message', *args, **kwargs)
elif _type == 'float3':
maya.cmds.addAttr(node, ln=attr_name, at='float3', *args, **kwargs)
maya.cmds.addAttr(node, ln=(attr_name + 'X'), p=attr_name, at='float', *args, **kwargs)
maya.cmds.addAttr(node, ln=(attr_name + 'Y'), p=attr_name, at='float', *args, **kwargs)
maya.cmds.addAttr(node, ln=(attr_name + 'Z'), p=attr_name, at='float', *args, **kwargs)
else:
raise ValueError('Unknown attribute type: {}'.format(attr_type))
return combined
except Exception as e:
raise Exception(traceback.format_exc())
# logger.error(str(e))
@staticmethod
def set(node, attr=None, value=None, lock=False, **kwargs):
"""
Sets an existing attribute of the given object
:param node: str, object to set attribute of
:param attr: str, attribute name
:param value: variant
:param lock: bool, True if the attribute must be locke after setting it or False otherwise
:param kwargs:
:return: value
"""
if '.' in node or issubclass(type(node), dict):
attr_dict = MetaAttributeUtils.validate_attribute(node)
if value is None and attr is not None:
value = attr
else:
attr_dict = MetaAttributeUtils.validate_attribute(node, attr)
combined = attr_dict['combined']
obj = attr_dict['node']
attr_name = attr_dict['attr']
was_locked = False
logger.debug('|Attribute Setter| >> attr: {0} | value: {1} | lock: {2}'.format(combined, value, lock))
if kwargs:
logger.debug('|Attribute Setter| >> kwargs: {0}'.format(kwargs))
attr_type = maya.cmds.getAttr(combined, type=True)
valid_type = MetaAttributeUtils.validate_attr_type_name(attr_type=attr_type)
if MetaAttributeUtils.is_locked(combined):
was_locked = True
maya.cmds.setAttr(combined, lock=False)
if not MetaAttributeUtils.is_keyed(attr_dict):
if MetaAttributeUtils.break_connection(attr_dict):
logger.warning('|Attribute Setter| >> Broken connection: {}'.format(combined))
current = MetaAttributeUtils.get(combined)
if current == value:
logger.debug('|Attribute Setter| >> Already has a value: {}'.format(combined))
if was_locked:
MetaAttributeUtils.set_lock(attr_dict, arg=True)
return
children = MetaAttributeUtils.get_children(attr_dict)
if children:
if MetaAttributeValidator.is_list_arg(value):
if len(children) != len(value):
raise ValueError(
'Must have matching len for value and children. Children: {0} | Value: {1}'.format(
children, value))
else:
value = [value for i in range(len(children))]
for i, child in enumerate(children):
maya.cmds.setAttr('{0}.{1}'.format(obj, child), value[i], **kwargs)
elif valid_type == 'long':
maya.cmds.setAttr(combined, int(float(value)), **kwargs)
elif valid_type == 'string':
maya.cmds.setAttr(combined, str(value), type='string', **kwargs)
elif valid_type == 'double':
maya.cmds.setAttr(combined, float(value), **kwargs)
elif valid_type == 'message':
MetaAttributeUtils.set_message(obj, attr_name, value)
elif valid_type == 'enum':
if MetaAttributeValidator.string_arg(value) and ':' in value:
maya.cmds.addAttr(combined, edit=True, en=value, **kwargs)
else:
enum_values = MetaAttributeUtils.get_enum(attr_dict).split(':')
if value in enum_values:
maya.cmds.setAttr(combined, enum_values.index(value), **kwargs)
elif value is not None and value <= len(enum_values):
maya.cmds.setAttr(combined, value, **kwargs)
else:
maya.cmds.setAttr(combined, value, **kwargs)
else:
maya.cmds.setAttr(combined, value, **kwargs)
if was_locked or lock:
maya.cmds.setAttr(combined, lock=True)
return
@staticmethod
def delete(*args):
"""
Deletes given attribute from the given node
:param args: dict, validated argument
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
combined = attr_dict['combined']
try:
if maya.cmds.objExists(combined):
if MetaAttributeUtils.get_parent(attr_dict):
raise ValueError('{0} is a child attribute, try deleting parent attr first: {1}'.format(
combined, MetaAttributeUtils.get_parent(attr_dict)))
try:
maya.cmds.setAttr(combined, lock=False)
except Exception:
pass
try:
MetaAttributeUtils.break_connection(combined)
except Exception:
pass
driven_attr = MetaAttributeUtils.get_driven(attr_dict) or []
for plug in driven_attr:
logger.warning('|Attribute Deletion| >> [{0}] | Breaking out plug: {1}'.format(combined, plug))
MetaAttributeUtils.disconnect(combined, plug)
maya.cmds.deleteAttr(combined)
return True
return False
except Exception as e:
pprint.pprint(vars())
raise Exception(e)
@staticmethod
def get_children(*args):
"""
Get children of a given attribute
:param args: dict, validated argument
:return: list, children attrs || status (bool)
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
try:
return maya.cmds.attributeQuery(attr_dict['attr'], node=attr_dict['node'], listChildren=True) or []
except Exception as e:
logger.error('|Attribute Children Getter| >> {0} | {1}'.format(attr_dict['combined'], e))
return False
@staticmethod
def get_parent(*args):
"""
Get parent of a given attribute
:param args: dict, validated argument
:return: list, parent attrs || status (bool)
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
try:
parents = maya.cmds.attributeQuery(attr_dict['attr'], node=attr_dict['node'], listParent=True) or []
if parents:
return parents[0]
return parents
except Exception as e:
logger.error('|Attribute Parent Getter| >> {0} | {1}'.format(attr_dict['combined'], e))
return False
@staticmethod
def get_siblings(*args):
"""
Get siblings of a given attribute
:param args: dict, validated argument
:return: list, sibling attrs || status (bool)
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
try:
return maya.cmds.attributeQuery(attr_dict['attr'], node=attr_dict['node'], listSiblings=True) or []
except Exception as e:
logger.error('|Attribute Siblings Getter| >> {0} | {1}'.format(attr_dict['combined'], e))
return False
@staticmethod
def get_family_dict(*args):
"""
Gets family dictionary of a given attribute
:param args: dict, validated argument
:return: dict
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
obj = attr_dict['obj']
attr = attr_dict['attr']
return_dict = {}
attrs = maya.cmds.attributeQuery(attr, node=obj, listParent=True)
if attrs is not None:
return_dict['parent'] = attrs[0]
attrs = maya.cmds.attributeQuery(attr, node=obj, listChildren=True)
if attrs is not None:
return_dict['children'] = attrs
attrs = maya.cmds.attributeQuery(attr, node=obj, listSiblings=True)
if attrs is not None:
return_dict['siblings'] = attrs
if return_dict:
return return_dict
return False
@staticmethod
def get_numeric_attribute_state(*args):
"""
Returns a dictionary of max, min, range, soft and default settings of a given numeric attribute
:param args: dict, validated argument
:return: dict
default
min
max
softMin
softMax
range
softRange
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
combined = attr_dict['combined']
obj = attr_dict['obj']
attr = attr_dict['attr']
data_dict = dict()
numeric = MetaAttributeUtils.is_numeric(attr_dict)
if not numeric and MetaAttributeUtils.get_children(attr_dict):
return {}
else:
try:
data_dict['min'] = MetaAttributeUtils.get_min(attr_dict)
except Exception:
data_dict['min'] = False
logger.debug('{0}.{1} failed to query min value'.format(obj, attr))
try:
data_dict['max'] = MetaAttributeUtils.get_max(attr_dict)
except Exception:
data_dict['max'] = False
logger.debug('{0}.{1} failed to query max value'.format(obj, attr))
try:
data_dict['default'] = MetaAttributeUtils.get_default(attr_dict)
except Exception:
data_dict['default'] = False
logger.debug('{0}.{1} failed to query default value'.format(obj, attr))
try:
data_dict['softMax'] = MetaAttributeUtils.get_soft_max(attr_dict)
except Exception:
data_dict['softMax'] = False
logger.debug('{0}.{1} failed to query soft max value'.format(obj, attr))
try:
data_dict['softMin'] = MetaAttributeUtils.get_soft_min(attr_dict)
except Exception:
data_dict['softMin'] = False
logger.debug('{0}.{1} failed to query soft min value'.format(obj, attr))
try:
data_dict['range'] = MetaAttributeUtils.get_range(attr_dict)
except Exception:
data_dict['range'] = False
logger.debug('{0}.{1} failed to query range value'.format(obj, attr))
try:
data_dict['softRange'] = MetaAttributeUtils.get_soft_range(attr_dict)
except Exception:
data_dict['softRange'] = False
logger.debug('{0}.{1} failed to query soft range value'.format(obj, attr))
return attr_dict
@staticmethod
def get_attribute_state(*args):
"""
Returns a dictionary of locked, keyable and hidden states of the given attribute
:param args: dict, validated argument
:return: dict
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
combined = attr_dict['combined']
obj = attr_dict['obj']
attr = attr_dict['attr']
obj_attrs = maya.cmds.listAttr(obj, userDefined=True) or []
data_dict = {'type': maya.cmds.getAttr(combined, type=True),
'locked': maya.cmds.getAttr(combined, lock=True),
'keyable': maya.cmds.getAttr(combined, keyable=True)
}
dynamic = False
if attr in obj_attrs:
dynamic = True
data_dict['dynamic'] = dynamic
hidden = not maya.cmds.getAttr(combined, channelBox=True)
if data_dict.get('keyable'):
hidden = maya.cmds.attributeQuery(attr, node=obj, hidden=True)
data_dict['hidden'] = hidden
if data_dict.get('type') == 'enum' and dynamic is True:
data_dict['enum'] = maya.cmds.addAttr(combined, query=True, en=True)
numeric = True
if data_dict.get('type') in ['string', 'message', 'enum', 'bool']:
numeric = False
data_dict['numeric'] = numeric
if numeric:
numeric_dict = MetaAttributeUtils.get_numeric_attribute_state(attr_dict)
data_dict.update(numeric_dict)
if dynamic:
data_dict['readable'] = maya.cmds.addAttr(combined, query=True, r=True)
data_dict['writable'] = maya.cmds.addAttr(combined, query=True, w=True)
data_dict['storable'] = maya.cmds.addAttr(combined, query=True, s=True)
data_dict['usedAsColor'] = maya.cmds.addAttr(combined, query=True, usedAsColor=True)
return data_dict
@staticmethod
def copy_to(from_object, from_attr, to_object=None, to_attr=None, convert_to_match=True, values=True,
in_connection=False, out_connections=False, keep_source_connections=True, copy_settings=True,
driven=None):
"""
Copy attributes from one object to another. If the attribute already exists, it'll cpy the values
If it does not, it will be created.
:param from_object: str, object with attributes to copy
:param from_attr: str, source attribute
:param to_object: str, object where we want to copy attributes to
:param to_attr: str, name of the attribute to copy. If None, it will create an attribute of the
from_attr name on the to_object if it does not exists
:param convert_to_match: bool, whether to automatically convert attribute if they need to be
:param values:
:param in_connection: bool
:param out_connections: bool
:param keep_source_connections: bool, keeps connections on source
:param copy_settings: bool, copy the attribute state of the from_attr (keyable, lock, and hidden)
:param driven: str, whether to connect source>target or target>source
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(from_object, from_attr)
combined = attr_dict['combined']
node = attr_dict['node']
to_obj = to_object
to_attr_name = to_attr
if to_obj is None:
logger.debug('|Attributes Copy| >> No to_object specified. Using from_object:{}'.format(from_object))
to_obj = from_object
if to_attr_name is None:
logger.debug('|Attributes Copy| >> No to_attr specified. Using from-attr:{}'.format(from_attr))
to_attr_name = from_attr
attr_dict_target = MetaAttributeUtils.validate_attribute(to_obj, to_attr_name)
if combined == attr_dict_target['combined']:
raise ValueError('Cannot copy to itself')
logger.debug('|Attributes Copy| >> source: {}'.format(combined))
logger.debug('|Attributes Copy| >> target: {0} | {1}'.format(to_obj, to_attr_name))
dict_source_flags = MetaAttributeUtils.get_attribute_state(attr_dict)
if values and not MetaAttributeUtils.validate_attr_type_name(dict_source_flags['type']):
logger.warning(
'|Attributes Copy| >> {0} is a {1} attribute and not valid for copying'.format(attr_dict['combined'],
dict_source_flags[
'type']))
return False
_driver = MetaAttributeUtils.get_driver(attr_dict, skip_conversion_nodes=True)
_driven = MetaAttributeUtils.get_driven(attr_dict, skip_conversion_nodes=True)
_data = MetaAttributeUtils.get(attr_dict)
logger.debug('|Attributes Copy| >> data: {}'.format(_data))
logger.debug('|Attributes Copy| >> driver: {}'.format(_driver))
logger.debug('|Attributes Copy| >> driven: {}'.format(_driven))
if maya.cmds.objExists(attr_dict_target['combined']):
dict_target_flags = MetaAttributeUtils.get_attribute_state(attr_dict_target)
if not MetaAttributeUtils.validate_attr_type_name(dict_target_flags['type']):
logger.warning('|Attributes Copy| >> {0} may not copy correctly. Type did not validate'.format(
attr_dict_target['combined']))
if not MetaAttributeUtils.validate_attr_type_match(dict_source_flags['type'], dict_target_flags['type']):
if dict_target_flags['dynamic'] and convert_to_match:
logger.debug('Attributes Copy| >> {} not the correct type, trying to convert it'.format(
attr_dict_target['combined']))
MetaAttributeUtils.convert_type(attr_dict_target, dict_source_flags['type'])
else:
raise Exception(
'|Attributes Copy| >> {} not the correct type. Conversion is necessary '
'and convert_to_match is disabled'.format(
attr_dict_target['combined']))
else:
MetaAttributeUtils.add(attr_dict_target, dict_source_flags['type'])
if _data is not None:
try:
MetaAttributeUtils.set(attr_dict_target, value=_data)
except Exception as e:
logger.debug('|Attributes Copy| >> Failed to set back data buffer {0} | data: {1} | err: {2}'.format(
attr_dict_target['combined'], _data, e))
if _driver and in_connection:
if dict_source_flags['type'] != 'message':
logger.debug('|Attributes Copy| >> Current Driver: {}'.format(_driver))
try:
MetaAttributeUtils.connect(_driver, attr_dict_target['combined'])
except Exception as e:
logger.error(
'|Attributes Copy| >> Failed to connect {0} >> {1} | err: {2}'.format(
_driver, attr_dict_target['combined'], e))
if _driven and out_connections:
logger.debug('|Attributes Copy| >> Current Driven: {}'.format(_driven))
for c in _driven:
dict_driven = MetaAttributeUtils.validate_attribute(c)
if dict_driven['combined'] != attr_dict_target['combined']:
logger.debug('|Attributes Copy| >> driven: {}'.format(c))
try:
MetaAttributeUtils.connect(attr_dict_target['combined'], c)
except Exception as e:
logger.error(
'|Attributes Copy| >> Failed to connect {0} >> {1} | err: {2}'.format(
_driven, attr_dict_target['combined'], e))
if copy_settings:
if dict_source_flags.get('enum'):
maya.cmds.addAttr(attr_dict_target['combined'], e=True, at='enum', en=dict_source_flags['enum'])
if dict_source_flags['numeric']:
children = MetaAttributeUtils.get_children(attr_dict)
if children:
for child in children:
dict_child = MetaAttributeUtils.get_attribute_state(node, child)
_buffer = '{0}.{1}'.format(attr_dict_target['node'], child)
if dict_child['default']:
maya.cmds.addAttr(_buffer, edit=True, dv=dict_child['default'])
if dict_child['max']:
maya.cmds.addAttr(_buffer, edit=True, maxValue=dict_child['max'])
if dict_child['min']:
maya.cmds.addAttr(_buffer, edit=True, minValue=dict_child['min'])
if dict_child['softMax']:
maya.cmds.addAttr(_buffer, edit=True, softMaxValue=dict_child['softMax'])
if dict_child['softMin']:
maya.cmds.addAttr(_buffer, edit=True, softMinValue=dict_child['softMin'])
else:
if dict_source_flags['default']:
maya.cmds.addAttr(attr_dict_target['combined'], edit=True, dv=dict_source_flags['default'])
if dict_source_flags['max']:
maya.cmds.addAttr(attr_dict_target['combined'], edit=True, maxValue=dict_source_flags['max'])
if dict_source_flags['min']:
maya.cmds.addAttr(attr_dict_target['combined'], edit=True, minValue=dict_source_flags['min'])
if dict_source_flags['softMax']:
maya.cmds.addAttr(attr_dict_target['combined'], edit=True,
softMaxValue=dict_source_flags['softMax'])
if dict_source_flags['softMin']:
maya.cmds.addAttr(attr_dict_target['combined'], edit=True,
softMinValue=dict_source_flags['softMin'])
maya.cmds.setAttr(attr_dict_target['combined'], edit=True, channelBox=not dict_source_flags['hidden'])
maya.cmds.setAttr(attr_dict_target['combined'], edit=True, keyable=dict_source_flags['keyable'])
maya.cmds.setAttr(attr_dict_target['combined'], edit=True, lock=dict_source_flags['locked'])
if driven == 'target':
try:
MetaAttributeUtils.connect(attr_dict, attr_dict_target)
except Exception as e:
logger.error(
'|Attributes Copy| >> Failed to connect source to target {0} >> {1} | err: {2}'.format(
combined, attr_dict_target['combined'], e))
elif driven == 'source':
try:
MetaAttributeUtils.connect(attr_dict_target, attr_dict)
except Exception as e:
logger.error('|Attributes Copy| >> Failed to connect target to source {0} >> {1} | err: {2}'.format(
attr_dict_target['combined'], combined, e))
if dict_source_flags['locked']:
maya.cmds.setAttr(attr_dict_target['combined'], lock=True)
return True
@staticmethod
def is_connected(*args):
"""
Returns true if a given attribute is connected to another one
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
combined = attr_dict['combined']
if maya.cmds.connectionInfo(combined, isDestination=True):
return True
return False
@staticmethod
def connect(from_attr, to_attr, lock=False, **kwargs):
"""
Connects attributes. Handles locks on source or end automatically
:param from_attr: str
:param to_attr: str
:param lock: bool
:param kwargs:
:return: bool
"""
from_attr_dict = MetaAttributeUtils.validate_attribute(from_attr)
from_combined = from_attr_dict['combined']
to_attr_dict = MetaAttributeUtils.validate_attribute(to_attr)
to_combined = to_attr_dict['combined']
logger.debug('|Attribute Connection| >> Connecting {0} to {1}'.format(from_combined, to_combined))
assert from_combined != to_combined, 'Cannot connect an attribute to itself'
was_locked = False
if maya.cmds.objExists(to_combined):
if maya.cmds.getAttr(to_combined, lock=True):
was_locked = True
maya.cmds.setAttr(to_combined, lock=False)
MetaAttributeUtils.break_connection(to_attr_dict)
maya.cmds.connectAttr(from_combined, to_combined, **kwargs)
if was_locked or lock:
maya.cmds.setAttr(to_combined, lock=True)
return True
@staticmethod
def disconnect(from_attr, to_attr):
"""
Disconnects attributes. Handles locks on source or end automatically
:param from_attr: str, node.attribute
:param to_attr: attribute type dependant
:return: bool
"""
from_attr_dict = MetaAttributeUtils.validate_attribute(from_attr)
from_combined = from_attr_dict['combined']
to_attr_dict = MetaAttributeUtils.validate_attribute(to_attr)
to_combined = to_attr_dict['combined']
driven_lock = False
if maya.cmds.getAttr(to_combined, lock=True):
driven_lock = True
maya.cmds.setAttr(to_combined, lock=False)
source_lock = False
if maya.cmds.getAttr(from_combined, lock=True):
source_lock = True
maya.cmds.setAttr(from_combined, lock=False)
maya.cmds.disconnectAttr(from_combined, to_combined)
if driven_lock:
maya.cmds.setAttr(to_combined, lock=True)
if source_lock:
maya.cmds.setAttr(from_combined, lock=True)
return True
@staticmethod
def break_connection(*args):
"""
Breaks connection of a given attributes. Handles locks on source or end automatically
:param args: dict, validated argument
:return: broken connection || status (bool)
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
combined = attr_dict['combined']
obj = attr_dict['obj']
attr = attr_dict['attr']
driven_attr = combined
family = dict()
if MetaAttributeUtils.get_type(attr_dict) == 'message':
logger.debug('|Attribute Break Connection| >> message')
dst = maya.cmds.listConnections(
combined, skipConversionNodes=False, destination=True, source=False, plugs=True)
if dst:
for child_attr in dst:
logger.debug('|Attribute Break Connection| >> Disconnecting attr {}'.format(child_attr))
MetaAttributeUtils.disconnect(driven_attr, child_attr)
if maya.cmds.connectionInfo(combined, isDestination=True):
source_connections = maya.cmds.listConnections(
combined, skipConversionNodes=False, destination=False, source=True, plugs=True)
if not source_connections:
family = MetaAttributeUtils.get_family_dict(attr_dict)
source_connections = maya.cmds.connectionInfo(combined, sourceFromDestination=True)
else:
source_connections = source_connections[0]
if not source_connections:
logger.warning('|Attribute Break Connection| >> No source for "{0}.{1}" found!'.format(obj, attr))
return False
logger.debug('|Attribute Break Connection| >> Source Connections: {}'.format(source_connections))
if family and family.get('parent'):
logger.debug('|Attribute Break Connection| >> Attribute Family: {}'.format(family))
driven_attr = '{0}.{1}'.format(obj, family.get('parent'))
logger.debug(
'|Attribute Break Connection| >> Breaking {0} >>> to >>> {1}'.format(source_connections, driven_attr))
MetaAttributeUtils.disconnect(from_attr=source_connections, to_attr=driven_attr)
return source_connections
return False
@staticmethod
def has_attr(*args):
"""
Returns True if the given attribute exists or False otherwise
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
try:
if maya.cmds.objExists(attr_dict['combined']):
return True
return False
except Exception as e:
logger.error('|Has Attribute| >> {0} | {1}'.format(attr_dict['combined'], e))
return False
return False
@staticmethod
def get_name_long(*args):
"""
Get the long name of an attribute
:param args: dict, validated argument
:return: str
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
try:
return maya.cmds.attributeQuery(attr_dict['attr'], node=attr_dict['node'], longName=True) or False
except Exception:
if maya.cmds.objExists(attr_dict['combined']):
return attr_dict['attr']
else:
raise RuntimeError(
'|Long Attribute Name Getter| >> Attribute does nost exists: {}'.format(attr_dict['combined']))
@staticmethod
def is_locked(*args):
"""
Returns True if the given attribute is locked or False otherwise
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
try:
return maya.cmds.getAttr(attr_dict['combined'], lock=True)
except Exception as e:
logger.error('|Attribute Locker| >> {0} | {1}'.format(attr_dict['combined'], e))
return False
@staticmethod
def set_lock(node, attr=None, arg=None):
"""
Set the lock status of an attribute
:param node: str
:param attr: str
:param arg: bool
:return:
"""
if '.' in node or issubclass(type(node), dict):
attr_dict = MetaAttributeUtils.validate_attribute(node)
if attr_dict is None and attr is not None:
attr_dict = attr
else:
attr_dict = MetaAttributeUtils.validate_attribute(node, attr)
combined = attr_dict['combined']
obj = attr_dict['node']
children = MetaAttributeUtils.get_children(attr_dict)
if children:
for i, child in enumerate(children):
maya.cmds.setAttr('{0}.{1}'.format(obj, child), lock=arg)
else:
maya.cmds.setAttr(combined, lock=arg)
@staticmethod
def is_hidden(*args):
"""
Returns True if the given attribute is hidden or False otherwise
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
hidden = not maya.cmds.getAttr(attr_dict['combined'], channelBox=True)
if MetaAttributeUtils.is_keyed(attr_dict):
hidden = maya.cmds.attributeQuery(attr_dict['attr'], node=attr_dict['node'], hidden=True)
return hidden
@staticmethod
def set_hidden(node, attr=None, arg=None):
"""
Set the hidden status of the given attribute
:param node: str
:param attr: str
:param arg: bool
"""
if '.' in node or issubclass(type(node), dict):
attr_dict = MetaAttributeUtils.validate_attribute(node)
if arg is None and attr is not None:
arg = attr
else:
attr_dict = MetaAttributeUtils.validate_attribute(node, attr)
combined = attr_dict['combined']
obj = attr_dict['node']
children = MetaAttributeUtils.get_children(attr_dict)
if arg:
if children:
for child in children:
child_attr_dict = MetaAttributeUtils.validate_attribute(node, child)
if not MetaAttributeUtils.is_hidden(child_attr_dict):
if MetaAttributeUtils.is_keyed(child_attr_dict):
MetaAttributeUtils.set_keyable(child_attr_dict, arg=False)
maya.cmds.setAttr(child_attr_dict['combined'], e=True, channelBox=False)
elif not MetaAttributeUtils.is_hidden(attr_dict):
if MetaAttributeUtils.is_keyed(attr_dict):
MetaAttributeUtils.set_keyable(attr_dict, arg=False)
maya.cmds.setAttr(combined, e=True, channelBox=False)
else:
if children:
for child in children:
child_attr_dict = MetaAttributeUtils.validate_attribute(node, child)
if MetaAttributeUtils.is_hidden(child_attr_dict):
maya.cmds.setAttr(child_attr_dict['combined'], e=True, channelBox=True)
elif MetaAttributeUtils.is_hidden(attr_dict):
maya.cmds.setAttr(combined, e=True, channelBox=True)
@staticmethod
def get_keyed(node):
"""
Returns list of keyed attributes
:param node: dict, validated argument
:return: list<attributes>
"""
result = list()
for attr in maya.cmds.listAttr(node, keyable=True):
if MetaAttributeUtils.is_keyed(node, attr):
result.append(attr)
return result
@staticmethod
def is_keyed(*args):
""""
Returns True if the given attribute is keyable or False otherwise
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
if maya.cmds.keyframe(attr_dict['combined'], query=True):
return True
return False
@staticmethod
def set_keyable(node, attr=None, arg=None):
"""
Set the lock of status of the given attribute
:param node: str
:param attr: str
:param arg: bool
"""
if '.' in node or issubclass(type(node), dict):
attr_dict = MetaAttributeUtils.validate_attribute(node)
if arg is None and attr is not None:
arg = attr
else:
attr_dict = MetaAttributeUtils.validate_attribute(node, attr)
combined = attr_dict['combined']
obj = attr_dict['node']
children = MetaAttributeUtils.get_children(attr_dict)
if children:
for i, child in enumerate(children):
if not arg:
hidden = MetaAttributeUtils.is_hidden(obj, child)
maya.cmds.setAttr('{0}.{1}'.format(obj, child), e=True, keyable=arg)
if not arg and MetaAttributeUtils.is_hidden(obj, child) != hidden:
MetaAttributeUtils.set_hidden(obj, child, hidden)
else:
if not arg:
hidden = MetaAttributeUtils.is_hidden(attr_dict)
maya.cmds.setAttr(combined, e=True, keyable=arg)
if not arg and MetaAttributeUtils.is_hidden(attr_dict) != hidden:
MetaAttributeUtils.set_hidden(attr_dict, hidden)
@staticmethod
def get_enum(*args):
"""
Returns enum attribute
:param args: dict, validated argument
:return: variant, str || bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
if MetaAttributeUtils.get_type(attr_dict) == 'enum':
return maya.cmds.attributeQuery(attr_dict['attr'], node=attr_dict['node'], listEnum=True)[0]
return False
@staticmethod
def is_multi(*args):
"""
:Check if the given attribute is a valid Maya multi attribute
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
try:
if not MetaAttributeUtils.is_dynamic(attr_dict):
return False
return maya.cmds.addAttr(attr_dict['combined'], query=True, m=True)
except Exception as e:
logger.error(('|Is Multi Attribute| >> {0} | {1}'.format(attr_dict['combined'], e)))
return False
@staticmethod
def is_dynamic(*args):
"""
Returns True if the given attribute is dynamic or False otherwise
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
user_defined = maya.cmds.listAttr(attr_dict['obj'], userDefined=True) or []
if attr_dict['attr'] in user_defined:
return True
return False
@staticmethod
def is_numeric(*args):
"""
Returns if an attribute is numeric or False otherwise
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
if maya.cmds.getAttr(attr_dict['combined'], type=True) in ['string', 'message', 'enum', 'bool']:
return False
return True
@staticmethod
def is_readable(*args):
"""
Returns if an attribute is readable or False otherwise
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
if not MetaAttributeUtils.is_dynamic(attr_dict):
return False
return maya.cmds.addAttr(attr_dict['combined'], query=True, r=True) or False
@staticmethod
def is_writable(*args):
"""
Returns if an attribute is writable or False otherwise
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
if not MetaAttributeUtils.is_dynamic(attr_dict):
return False
return maya.cmds.addAttr(attr_dict['combined'], query=True, w=True) or False
@staticmethod
def is_storable(*args):
"""
Returns True if an attribute is storable or False otherwise
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
if not MetaAttributeUtils.is_dynamic(attr_dict):
return False
return maya.cmds.addAttr(attr_dict['combined'], query=True, s=True) or False
@staticmethod
def is_used_as_color(*args):
"""
Returns True if an attribute is used as color or False otherwise
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
if not MetaAttributeUtils.is_dynamic(attr_dict):
return False
return maya.cmds.addAttr(attr_dict['combined'], query=True, usedAsColor=True) or False
@staticmethod
def is_user_defined(*args):
"""
Returns True if an attribute is user defined or False otherwise
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
if MetaAttributeUtils.get_name_long(attr_dict) in maya.cmds.listAttr(attr_dict['node'], userDefined=True):
return True
return False
@staticmethod
def get_default(*args):
"""
Returns the default value of the given integer attribute
:param args: dict, validated argument
:return: dict
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
combined = attr_dict['combined']
node = attr_dict['node']
attr = attr_dict['attr']
if not MetaAttributeUtils.is_dynamic(attr_dict):
long_attr = MetaAttributeUtils.get_name_long(attr_dict)
if long_attr in ['translateX', 'translateY', 'translateZ', 'translate',
'rotateX', 'rotateY', 'rotateZ', 'rotate',
'scaleX', 'scaleY', 'scaleZ', 'scale']:
if 'scale' in long_attr:
if long_attr == 'scale':
return [1.0, 1.0, 1.0]
return 1.0
else:
if long_attr in ['rotate', 'translate']:
return [0.0, 0.0, 0.0]
return 0.0
return False
if type(maya.cmds.addAttr(combined, query=True, defaultValue=True)) is int or float:
result = maya.cmds.attributeQuery(attr, node=node, listDefault=True)
if result is not False:
if len(result) == 1:
return result[0]
return result
return False
@staticmethod
def get_max(*args):
"""
Returns the maximum value of the given integer attribute
:param args: dict, validated argument
:return: variant, float || bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
try:
if maya.cmds.attributeQuery(attr_dict['attr'], node=attr_dict['node'], maxExists=True):
result = maya.cmds.attributeQuery(attr_dict['attr'], node=attr_dict['node'], maximum=True)
if result is not False:
if len(result) == 1:
return result[0]
return result
except Exception as e:
logger.error('|Max Attribute Getter| >> {0} | {1}'.format(attr_dict['combined'], e))
return False
return False
@staticmethod
def get_min(*args):
"""
Returns the minimum value of the given integer attribute
:param args: dict, validated argument
:return: variant, float || bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
try:
if maya.cmds.attributeQuery(attr_dict['attr'], node=attr_dict['node'], minExists=True):
result = maya.cmds.attributeQuery(attr_dict['attr'], node=attr_dict['node'], minimum=True)
if result is not False:
if len(result) == 1:
return result[0]
return result
except Exception as e:
logger.error('|Min Attribute Getter| >> {0} | {1}'.format(attr_dict['combined'], e))
return False
return False
@staticmethod
def get_range(*args):
"""
Returns the range of the given integer attribute
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
try:
return maya.cmds.attributeQuery(attr_dict['attr'], node=attr_dict['node'], range=True) or False
except Exception as e:
logger.error('|Integer Range Getter| >> {0} | {1}'.format(attr_dict['combined'], e))
return False
@staticmethod
def get_soft_range(*args):
"""
Returns the soft range of the given integer attribute
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
try:
return maya.cmds.attributeQuery(attr_dict['attr'], node=attr_dict['node'], softRange=True) or False
except Exception as e:
logger.error('|Integer Range Getter| >> {0} | {1}'.format(attr_dict['combined'], e))
return False
@staticmethod
def get_soft_max(*args):
"""
Returns the soft maximum value of the given integer attribute
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
try:
result = maya.cmds.attributeQuery(attr_dict['attr'], node=attr_dict['node'], softMin=True)
if result is not False:
if len(result) == 1:
return result[0]
return result
except Exception as e:
logger.error('|Integer Soft Max Getter| >> {0} | {1}'.format(attr_dict['combined'], e))
return False
return False
@staticmethod
def get_soft_min(*args):
"""
Returns the soft minimum value of the given integer attribute
:param args: dict, validated argument
:return: bool
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
try:
result = maya.cmds.attributeQuery(attr_dict['attr'], node=attr_dict['node'], softMax=True)
if result is not False:
if len(result) == 1:
return result[0]
return result
except Exception as e:
logger.error('|Integer Soft Max Getter| >> {0} | {1}'.format(attr_dict['combined'], e))
return False
return False
@staticmethod
def get_nice_name(*args):
"""
Returns the nice name of the given attribute
:param args: dict, validated argument
:return: str
"""
attr_dict = MetaAttributeUtils.validate_attribute(*args)
return maya.cmds.attributeQuery(attr_dict['attr'], node=attr_dict['node'], niceName=True) or False
@staticmethod
def rename_nice_name(node=None, attr=None, name=None):
"""
Set the nice name of the given attribute
:param node: str
:param attr: str
:param name: str, new name. If None, assumes obj argument is combined and uses attr. If False, clears the nice
name
:return: bool
"""
if name is None:
name = attr
attr_dict = MetaAttributeUtils.validate_attribute(node)
else:
attr_dict = MetaAttributeUtils.validate_attribute(node, attr)
lock = MetaAttributeUtils.is_locked(attr_dict)
if lock:
MetaAttributeUtils.set_lock(attr_dict, False)
if name:
maya.cmds.addAttr(attr_dict['combined'], edit=True, niceName=name)
elif not name:
maya.cmds.addAttr(attr_dict['combined'], edit=True, niceName=attr_dict['attr'])
if lock:
MetaAttributeUtils.set_lock(attr_dict, True)
return MetaAttributeUtils.get_nice_name(attr_dict)
@staticmethod
def get_message(message_holder, message_attr=None, data_attr=None, data_key=None, simple=False):
"""
:param message_holder:
:param message_attr:
:param data_attr:
:param data_key:
:param simple:
:return:
"""
from tpDcc.dccs.maya.meta import metanode
data = data_attr
if '.' in message_holder or issubclass(type(message_holder), dict):
attr_dict = MetaAttributeUtils.validate_attribute(message_holder)
data = message_attr
else:
attr_dict = MetaAttributeUtils.validate_attribute(message_holder, message_attr)
combined = attr_dict['combined']
if data_key is None:
data_key = message_attr
else:
data_key = str(data_key)
logger.debug('|Message Getter| >> {0} || data_attr: {1} | data_Key: {2}'.format(combined, data, data_key))
if not maya.cmds.objExists(combined):
logger.debug('|Message Getter| >> {0} | No attribute exists'.format(combined))
return None
dict_type = MetaAttributeUtils.get_type(attr_dict)
if dict_type in ['string']:
logger.debug('|Message Getter| >> Special Message Attr ...')
msg_buffer = maya.cmds.listConnections(combined, p=True)
if msg_buffer and len(msg_buffer) == 1:
msg_buffer = [msg_buffer[0].split('.')[0]]
else:
raise ValueError('Invalid message {}'.format(msg_buffer))
else:
msg_buffer = maya.cmds.listConnections(combined, destination=True, source=True, shapes=True)
if msg_buffer and maya.cmds.objectType(msg_buffer[0]) == 'reference':
msg_buffer = maya.cmds.listConnections(combined, destination=True, source=True)
if MetaAttributeUtils.is_multi(attr_dict):
logger.debug('|Message Getter| >> Multimessage')
if msg_buffer:
return msg_buffer
return None
else:
logger.debug('|Message Getter| >> Single Message')
if simple:
return msg_buffer
data = 'MsgData'
if data_attr:
data = data_attr
if '.' in data or issubclass(type(data), dict):
attr_dict_data = MetaAttributeUtils.validate_attribute(data)
else:
attr_dict_data = MetaAttributeUtils.validate_attribute(message_holder, data)
if msg_buffer is not None:
if maya.cmds.objExists(msg_buffer[0]) and not maya.cmds.objectType(msg_buffer[0]) == 'reference':
meta_node = metanode.MetaNode(attr_dict['node'])
if meta_node.has_attr(attr_dict_data['attr']):
dict_buffer = meta_node.__getattribute__(attr_dict_data['attr']) or {}
if dict_buffer.get(data_key):
logger.debug('|Message Getter| >> Extra Message Data Found')
return [msg_buffer[0] + '.' + dict_buffer.get(data_key)]
return msg_buffer
else:
return attr_utils.repair_message_to_reference_target(combined)
# return repairMessageToReferencedTarget(storageObject, messageAttr)
return None
@staticmethod
def set_message(message_holder, message_attr, message, data_attr=None, data_key=None, simple=False,
connect_back=None):
"""
:param message_holder:
:param message_attr:
:param message:
:param data_attr:
:param data_key:
:param simple:
:param connect_back:
:return:
"""
from tpDcc.dccs.maya.meta import metanode
try:
attr_dict = MetaAttributeUtils.validate_attribute(message_holder, message_attr)
combined = attr_dict['combined']
mode = 'reg'
messaged_node = None
messaged_extra = None
dict_data_attr = None
if data_attr is None:
data_attr = '{}_datdict'.format(message_attr)
multi = False
if maya.cmds.objExists(combined) and maya.cmds.addAttr(combined, query=True, m=True):
multi = True
if not message:
logger.debug('|Message Setter| >> MultiMessage delete')
MetaAttributeUtils.delete(combined)
MetaAttributeUtils.add(combined, 'message', m=True, im=False)
return True
if issubclass(type(message), list) or multi:
def store_message_multi(message_nodes, holder_dict):
for n in message_nodes:
try:
MetaAttributeUtils.connect((n + '.message'), holder_dict['combined'], next_available=True)
except Exception as e:
logger.warning('|Message Setter| >> {0} failed: {1}'.format(n, e))
if len(message) > 1 or multi:
if maya.cmds.objExists(combined):
if not MetaAttributeUtils.get_type(combined) == 'message':
logger.warning('|Message Setter| >> Not a message attribute. Converting ...')
MetaAttributeUtils.delete(attr_dict)
MetaAttributeUtils.add(message_holder, message_attr, 'message', m=True, im=False)
store_message_multi(message, attr_dict)
return True
_buffer = MetaAttributeUtils.get_message(combined, data_attr)
if not maya.cmds.addAttr(combined, query=True, m=True):
logger.warning(('|Message Setter| >> Not a multi message attribute. Converting ...'))
MetaAttributeUtils.delete(attr_dict)
MetaAttributeUtils.add(message_holder, message_attr, 'message', m=True, im=False)
store_message_multi(message, attr_dict)
return True
else:
logger.debug('|Message Setter| >> Multimesssage')
message_long = [name_utils.get_long_name(m) for m in message]
if _buffer and [name_utils.get_long_name(m) for m in _buffer] == message_long:
logger.debug('|Message Setter| >> Message match. Good to go')
return True
else:
logger.debug('|Message Setter| >> Messages do not match')
connections = MetaAttributeUtils.get_driven(combined)
if connections:
for c in connections:
MetaAttributeUtils.break_connection(c)
MetaAttributeUtils.delete(attr_dict)
MetaAttributeUtils.add(message_holder, message_attr, 'message', m=True, im=False)
store_message_multi(message, attr_dict)
else:
logger.debug('|Message Setter| >> New Attribute ...')
MetaAttributeUtils.add(message_holder, message_attr, 'message', m=True, im=False)
store_message_multi(message, attr_dict)
return True
else:
if message:
message = message[0]
if not message:
MetaAttributeUtils.break_connection(attr_dict)
return True
elif '.' in message:
if MetaAttributeValidator.is_component(message):
list_msg = MetaAttributeValidator.get_component(message)
messaged_node = list_msg[1]
if simple:
message = list_msg[1]
logger.debug('|Message Setter| >> Simle. Using {0} | {1}'.format(message, list_msg))
else:
mode = 'comp'
logger.debug('|Message Setter| >> ComponentMessage: {}'.format(list_msg))
messaged_extra = list_msg[0]
else:
dict_msg = MetaAttributeUtils.validate_attribute(message)
messaged_node = dict_msg['node']
if simple:
message = dict_msg['node']
logger.debug('|Message Setter| >> Simle. Using {0} | {1}'.format(message, dict_msg))
else:
mode = 'attr'
logger.debug('|Message Setter| >> AttrMessage: {}'.format(dict_msg))
messaged_extra = dict_msg['attr']
elif MetaAttributeValidator.is_shape(message):
mode = 'shape'
messaged_node = message
else:
messaged_node = message
message_long = name_utils.get_long_name(message)
_data_attr = 'MsgData_'
if data_attr is not None:
_data_attr = data_attr
if data_key is None:
data_key = message_attr
else:
data_key = str(data_key)
logger.debug(
'|Message Setter| >> mode: {0} | data_attr: {1} | data_key: {2}'.format(mode, _data_attr, data_key))
logger.debug(
'|Message Setter| >> message_holder: {0} | message_attr: {1}'.format(message_holder, message_attr))
logger.debug(
'|Message Setter| >> messaged_node: {0} | messaged_extra: {1} | message_long: {2}'.format(
messaged_node, messaged_extra, message_long))
if messaged_extra:
if '.' in _data_attr:
dict_data_attr = MetaAttributeUtils.validate_attribute(data_attr)
else:
dict_data_attr = MetaAttributeUtils.validate_attribute(message_holder, _data_attr)
def store_message(msg_node, msg_extra, holder_dict, data_attr_dict=None, data_key=None, mode=None):
if mode not in ['shape']:
MetaAttributeUtils.connect((msg_node + '.message'), holder_dict['combined'])
if msg_extra:
logger.debug('|Message Setter| >> {0}.{1} stored to: {2}'.format(
msg_node, msg_extra, holder_dict['combined']))
if not maya.cmds.objExists(data_attr_dict['combined']):
MetaAttributeUtils.add(data_attr_dict['node'], data_attr_dict['attr'], 'string')
if MetaAttributeUtils.get_type(data_attr_dict['combined']) != 'string':
raise ValueError(
'DataAttr must be string. {0} is type {1}'.format(
data_attr_dict['combined'], MetaAttributeUtils.get_type(data_attr_dict['combined'])))
meta_node = metanode.MetaNode(attr_dict['node'])
dict_buffer = meta_node.__getattribute__(data_attr_dict['attr']) or {}
dict_buffer[data_key] = messaged_extra
logger.debug('|Message Setter| >> buffer: {}'.format(dict_buffer))
meta_node.__setattr__(data_attr_dict['attr'], dict_buffer)
return True
logger.debug('|Message Setter| >> "{0}" stored to: "{1}"'.format(msg_node, combined))
return True
if mode == 'shape':
MetaAttributeUtils.copy_to(messaged_node, 'viewName', message_holder, message_attr, driven='target')
store_message(messaged_node, messaged_extra, attr_dict, dict_data_attr, 'shape')
return True
if maya.cmds.objExists(combined):
if not MetaAttributeUtils.get_type(combined) == 'message':
logger.warning('|Message Setter| >> Not a message attribute. Converting ...')
MetaAttributeUtils.delete(attr_dict)
MetaAttributeUtils.add(message_holder, message_attr, 'message', m=False)
store_message(messaged_node, messaged_extra, attr_dict, dict_data_attr, data_key)
return True
_buffer = MetaAttributeUtils.get_message(combined, data_attr, data_key=data_key, simple=simple)
if not maya.cmds.addAttr(combined, query=True, m=True):
logger.debug('|Message Setter| >> MessageSimple')
if _buffer and name_utils.get_long_name(_buffer[0]) == message_long:
logger.debug('|Message Setter| >> Message match. Good to go')
return True
else:
MetaAttributeUtils.break_connection(attr_dict)
store_message(messaged_node, messaged_extra, attr_dict, dict_data_attr, data_key)
else:
logger.debug('|Message Setter| >> MultiMessage')
if _buffer and name_utils.get_long_name(_buffer[0]) == message_long:
logger.ebug('|Message Setter| >> Message match. Good to go')
return True
else:
connections = MetaAttributeUtils.get_driven(combined)
if connections:
for c in connections:
MetaAttributeUtils.break_connection(c)
MetaAttributeUtils.delete(attr_dict)
MetaAttributeUtils.add(message_holder, message_attr, 'message', m=False)
store_message(messaged_node, messaged_extra, attr_dict, dict_data_attr, data_key)
else:
logger.debug('|Message Setter| >> New Attribute')
MetaAttributeUtils.add(message_holder, message_attr, 'message', m=False)
store_message(messaged_node, messaged_extra, attr_dict, dict_data_attr, data_key)
return True
except Exception as e:
raise Exception(traceback.format_exc())
class MetaDataListUtils(object):
@staticmethod
def get_sequential_attr_dict(node, attr=None):
"""
Returns a dictionary of sequential user defined attributes
:param node: str, name of the node we want to get attributes of
:param attr:
:return: dict
"""
result = dict()
user_attrs = maya.cmds.listAttr(node, userDefined=True) or list()
for a in user_attrs:
if '_' in a:
split_attr = a.split('_')
split_index = split_attr[-1]
split_str = ('_').join(split_attr[:-1])
if str(attr) == split_str:
try:
result[int(split_index)] = a
except Exception:
logger.debug('|get_sequential_attr_dict| >> {}.{} failed to int | int: {}'.format(
name_utils.get_short_name(node), attr, split_index))
return result
@staticmethod
def get_next_available_sequential_attr_index(node, attr=None):
"""
Returns next available attribute in sequence
:param node: str
:param attr:
:return: int
"""
exists = False
count = 0
while not exists and count < 100:
a = '{}_{}'.format(attr, count)
logger.debug('|get_next_available_sequential_attr_index| >> {}'.format(a))
if MetaAttributeUtils.has_attr(node, a):
count += 1
else:
exists = True
return count
return False
@staticmethod
def data_list_purge(node=None, attr=None, data_attr=None):
"""
Purges a messageList if it exists in the given node
:param node: str
:param attr: str, name of the messageList attribute
:param data_attr:
:return: bool
"""
from tpDcc.dccs.maya.meta import metanode
fn_name = 'data_list_purge'
attrs_dict = MetaDataListUtils.get_sequential_attr_dict(node, attr)
if data_attr is None:
data_attr = '{}_datdict'.format(attr)
for k in attrs_dict.keys():
str_attr = attrs_dict[k]
MetaAttributeUtils.delete(node, str_attr)
logger.debug('|{}| >> Removed: {}.{}'.format(fn_name, node, str_attr))
try:
mn = metanode.MetaNode(node)
if mn.has_attr(data_attr):
MetaAttributeUtils.delete(node, data_attr)
logger.debug('|{}| >> Removed: {}.{}'.format(fn_name, node, data_attr))
except Exception:
pass
return True
@staticmethod
def data_list_exists(node=None, attr=None, mode=None, data_attr=None):
"""
Checks if messageList attr exists in given node
:param node:
:param attr:
:param mode:
:param data_attr:
:return:
"""
attrs_dict = MetaDataListUtils.get_sequential_attr_dict(node, attr)
if data_attr is None:
data_attr = '{}_datdict'.format(attr)
for i, k in enumerate(attrs_dict.keys()):
str_attr = attrs_dict[k]
if mode == 'message':
if MetaAttributeUtils.get_message(node, str_attr, data_attr):
return True
elif MetaAttributeUtils.get(node, str_attr) is not None:
return True
return False
@staticmethod
def data_list_connect(node=None, attr=None, data=None, mode=None, data_attr=None):
"""
Multimessage data is not ordered by default. Using this function we can add handle multiMessage lists
through indexes
:param node: str, node to add messageList attr
:param attr: str, name of the messageList attribute
:param data:
:param mode:
:param data_attr:
:return: bool
"""
from tpDcc.dccs.maya.meta import metanode
fn_name = 'data_list_connect'
data_list = MetaAttributeValidator.list_arg(data)
if data_attr is None:
data_attr = '{}_datdict'.format(attr)
logger.info("|{0}| >> node: {1} | attr: {2} | mode: {3}".format(fn_name, node, attr, mode))
logger.info("|{0}| >> data | len: {1} | list: {2}".format(fn_name, len(data_list), data_list))
attrs_list = MetaDataListUtils.data_list_get_attrs(node=node, attr=attr)
driven_dict = dict()
for i, a in enumerate(attrs_list):
driven = MetaAttributeUtils.get_driven(node=node, attr=a)
if driven:
driven_dict[i] = driven
else:
driven_dict[i] = False
MetaDataListUtils.data_list_purge(node=node, attr=attr)
meta_node = metanode.MetaNode(node)
if mode == 'message':
MetaMessageListUtils.message_list_connect(node=node, attr=attr, data=data_list, data_attr=data_attr)
else:
for i, d in enumerate(data_list):
attr_str = '{}_{}'.format(attr, i)
MetaDataListUtils.store_info(node, attr_str, data, mode)
plug = driven_dict.get(i)
if plug:
for p in plug:
try:
MetaAttributeUtils.connect('{}.{}'.format(node, attr_str), p)
except Exception as e:
logger.warning(
"|{0}| >> Failed to reconnect {1} | driven: {2} | err: {3}".format(fn_name, attr_str, p,
e))
return True
@staticmethod
def data_list_get(node=None, attr=None, mode=None, data_attr=None, cull=False, as_meta=True):
"""
Return messageList
:param node:
:param attr:
:param mode:
:param data_attr:
:param cull:
:param as_meta: bool
:return:
"""
from tpDcc.dccs.maya.meta import metanode
fn_name = 'data_list_get'
if mode is not None:
_mode = MetaAttributeUtils.validate_attr_type_name(mode)
else:
_mode = mode
if data_attr is None:
data_attr = '{}_datdict'.format(attr)
logger.debug('|{}| >> node: {} | attr: {} | mode: {} | cull: {}'.format(fn_name, node, attr, _mode, cull))
attrs_dict = MetaDataListUtils.get_sequential_attr_dict(node, attr)
return_list = list()
for k in attrs_dict.keys():
if _mode == 'message':
res = MetaAttributeUtils.get_message(node, attrs_dict[k], data_attr, k) or False
if res:
res = res[0]
else:
try:
res = MetaAttributeUtils.get(node, attrs_dict[k])
except Exception as e:
logger.warning('|{}| >> {}.{} failed" || err: {}'.format(fn_name, node, attrs_dict[k], e))
res = None
if issubclass(type(res), list):
if _mode == 'message' or maya.cmds.objExists(res[0]):
return_list.extend(res)
else:
return_list.append(res)
else:
return_list.append(res)
if cull:
return_list = [o for o in return_list if o]
if return_list.count(False) == len(return_list):
return list()
if as_meta:
return_list = metanode.validate_obj_list_arg(return_list, none_valid=True)
return return_list
@staticmethod
def data_list_get_attrs(node=None, attr=None):
"""
Get the attributes of a dataList
:param node: str
:param attr: str, base name for the data list (becomes attr_0, attr_1, etc)
:return: bool
"""
attrs_dict = MetaDataListUtils.get_sequential_attr_dict(node=node, attr=attr)
return [attrs_dict[i] for i in attrs_dict.keys()]
@staticmethod
def data_list_index(node=None, attr=None, data=None, mode=None, data_attr=None):
"""
Index a value in a given dataList
:param node: str
:param attr: str, base name for the dataList
:param data: str, data to index
:param mode: str, what kind of data to be looking for
:param data_attr:
:return: bool
"""
fn_name = 'data_list_index'
logger.debug('|{}| >> node: {} | attr: {} | data: {} | mode: {}'.format(fn_name, node, attr, data, mode))
if data_attr is None:
data_attr = '{}_datdict'.format(attr)
data_list = MetaDataListUtils.data_list_get(node=node, attr=attr, mode=mode, data_attr=data_attr, cull=False)
index = None
if mode == 'message':
long_list = [name_utils.get_long_name(o) for o in data_list]
long_str = name_utils.get_long_name(data)
if long_str in long_list:
index = long_list.index(long_str)
elif data in data_list:
if data_list.count(data) > 1:
raise ValueError('More that one entry!')
else:
index = data_list.index(data)
if index is None:
logger.info(
'|{}| >> Data not found! node: {} | attr: {} | data: {} | mode: {}'.format(fn_name, node, attr, data,
mode))
logger.info('|{}| >> values ....'.format(fn_name))
for i, v in enumerate(data_list):
logger.info('idx: {} | {}'.format(i, v))
raise ValueError('Data not found!')
return index
@staticmethod
def data_list_append(node=None, attr=None, data=None, mode=None, data_attr=None):
"""
Append node to dataList
:param node:
:param attr:
:param data:
:param data:
:param mode:
:param data_attr:
:return: bool
"""
fn_name = 'data_list_append'
logger.debug('|{}| >> node: {} | attr: {} | data: {} | mode: {}'.format(fn_name, node, attr, data, data_attr))
if data_attr is None:
data_attr = '{}_datdict'.format(attr)
data_list = MetaDataListUtils.data_list_get(node, attr, mode, data_attr, False)
data_len = len(data_list)
index = data_len
if mode == 'message':
MetaAttributeUtils.set_message(node, '{}_{}'.format(attr, index), data, data_attr, data_key=index)
else:
MetaDataListUtils.store_info(node, '{}_{}'.format(attr, index), data)
return index
@staticmethod
def data_list_remove(node=None, attr=None, data=None, mode=None, data_attr=None):
"""
Returns node from dataList
:param node:
:param attr:
:param data:
:param mode:
:param data_attr:
:return: bool
"""
fn_name = 'data_list_remove'
logger.debug('|{}| >> node: {} | attr: {} | data: {} | mode: {}'.format(fn_name, node, attr, data, data_attr))
data = MetaAttributeValidator.list_arg(data)
if data_attr is None:
data_attr = '{}_datdict'.format(attr)
attrs_dict = MetaDataListUtils.get_sequential_attr_dict(node=node, attr=attr)
action = False
if mode == 'message':
data = MetaAttributeValidator.obj_string_list(args_list=data, called_from=fn_name)
data_long_list = [name_utils.get_long_name(o) for o in data]
for i in attrs_dict.keys():
o_msg = MetaAttributeUtils.get_message(node, attrs_dict[i], '{}_datdict'.format(attr), data_key=i)
if o_msg and name_utils.get_long_name(o_msg) in data_long_list:
logger.debug(
'|{}| >> removing | idx: {} | attr: {} | value: {}'.format(fn_name, i, attrs_dict[i], o_msg))
MetaAttributeUtils.delete(node, attrs_dict[i])
action = True
else:
attrs_dict = MetaDataListUtils.get_sequential_attr_dict(node=node, attr=attr)
for i in attrs_dict.keys():
value = MetaAttributeUtils.get(node, attrs_dict[i])
if value in data:
logger.debug(
'|{}| >> removing | idx: {} | attr: {} | value: {}'.format(fn_name, i, attrs_dict[i], value))
MetaAttributeUtils.delete(node, attrs_dict[i])
action = True
return action
@staticmethod
def data_list_remove_by_index(node=None, attr=None, indices=None):
"""
Removes dataList message elements by their indices
:param node: str
:param attr: str, base name for the dataList attribute
:param indices: list<int>, indices you want to remove
:return: bool
"""
fn_name = 'data_list_remove_by_index'
indices = MetaAttributeValidator.list_arg(indices)
attrs_dict = MetaDataListUtils.get_sequential_attr_dict(node=node, attr=attr)
logger.debug('|{}| >> node: {} | attr: {} | indices: {}'.format(fn_name, node, attr, indices))
for i in attrs_dict.keys():
if i in indices:
logger.warning('|{}| >> removing... | idx: {} | attr: {}'.format(fn_name, i, attrs_dict[i]))
MetaAttributeUtils.delete(node, attrs_dict[i])
return True
@staticmethod
def data_list_clean(node=None, attr=None, mode=None, data_attr=None):
"""
Removes dead data from a dataList and reconnect if the data is the data continue in the scene
:param node: str
:param attr: str, bae name for the dataList attribute
:param mode: str, what kind of data to be looking for
:param data_attr:
:return: bool
"""
fn_name = 'data_list_remove'
logger.debug('|{}| >> node: {} | attr: {} | mode: {}'.format(fn_name, node, attr, data_attr))
if data_attr is None:
data_attr = '{}_datdict'.format(attr)
data_list = MetaDataListUtils.data_list_get(node=node, attr=attr, mode=mode, data_attr=data_attr, cull=True)
if mode == 'message':
return MetaMessageListUtils.message_list_connect(node=node, attr=attr, data=data_list, data_attr=data_attr)
else:
return MetaDataListUtils.data_list_connect(node=node, attr=attr, data=data_list, mode=mode,
data_attr=data_attr)
@staticmethod
def store_info(node=None, attr=None, data=None, attr_type=None, lock=True):
"""
Stores information to an attribute (supports: message, doubleArray, json dicts, etc)
:param node: str
:param attr: str, base name for the dataList
:param data: data to add
:param attr_type: variant (if not given, will be picked best guess)
:param lock: bool
:return: bool
"""
from tpDcc.dccs.maya.meta import metanode
try:
fn_name = 'store_info'
data = MetaAttributeValidator.list_arg(data)
if attr_type is None:
_meta_node = False
try:
data = [o.meta_node for o in data]
_meta_node = True
attr_type = 'message'
logger.debug('|{}| >> meta node no art passed...'.format(fn_name))
except Exception:
pass
if not _meta_node:
if len(data) == 3:
attr_type = 'double3'
elif len(data) > 3:
attr_type = 'doubleArray'
logger.debug(
"|{}| >> node: {} | attr: {} | data: {} | attrType: {}".format(fn_name, node, attr, data, attr_type))
# STORE DATA
if attr_type == ['message', 'msg', 'messageSimple']:
logger.debug('|{}| >> message...'.format(fn_name))
MetaAttributeUtils.set_message(message_holder=node, message_attr=attr, message=data)
elif attr_type in ['double3']:
logger.debug('|{}| >> list...'.format(fn_name))
meta_node = metanode.MetaNode(node=node)
if meta_node.has_attr(attr=attr):
try:
MetaAttributeUtils.set(node=node, attr=attr, value=data)
except Exception:
logger.warning(
'|{}| >> removing... | node: {} | attr: {} | value: {}'.format(fn_name, node, attr,
meta_node.__getattribute__(
attr)))
MetaAttributeUtils.delete(node=node, attr=attr)
meta_node.add_attribute(attr=attr, value=data, attr_type=attr_type)
else:
meta_node.add_attribute(attr=attr, value=data, attr_type=attr_type)
else:
logger.debug('|{}| >> default...'.format(fn_name))
meta_node = metanode.MetaNode(node=node)
if meta_node.has_attr(attr=attr):
try:
MetaAttributeUtils.set(node=node, attr=attr, value=data[0])
except Exception:
logger.warning(
'|{}| >> removing... | node: {} | attr: {} | value: {}'.format(fn_name, node, attr,
meta_node.__getattribute__(
attr)))
MetaAttributeUtils.delete(node=node, attr=attr)
meta_node.add_attribute(attr=attr, value=data[0], attr_type=attr_type)
else:
meta_node.add_attribute(attr=attr, value=data[0], attr_type=attr_type)
if lock:
MetaAttributeUtils.set_lock(node, attr, lock)
return True
except Exception as e:
raise Exception(e)
class MetaMessageListUtils(object):
@staticmethod
def message_list_purge(node=None, attr=None, data_attr=None):
"""
Purges a messageList if it exists in the given node
:param node: str
:param attr: str, name of the messageList attribute
:param data_attr:
:return: bool
"""
return MetaDataListUtils.data_list_purge(node=node, attr=attr, data_attr=data_attr)
@staticmethod
def message_list_exists(node=None, attr=None, data_attr=None):
"""
Checks if messageList attr exists in given node
:param node:
:param attr:
:param data_attr:
:return:
"""
return MetaDataListUtils.data_list_exists(node=node, attr=attr, mode='message', data_attr=data_attr)
@staticmethod
def message_list_get(node=None, attr=None, data_attr=None, cull=False, as_meta=True):
"""
Return messageList
:param node:
:param attr:
:param data_attr:
:param cull:
:return:
"""
return MetaDataListUtils.data_list_get(node=node, attr=attr, mode='message', data_attr=data_attr, cull=cull,
as_meta=as_meta)
@staticmethod
def message_list_connect(node=None, attr=None, data=None, connect_back=None, data_attr=None):
"""
Multimessage data is not ordered by default. Using this function we can add handle multiMessage lists
through indexes
:param node: str, node to add messageList attr
:param attr: str, name of the messageList attribute
:param data:
:param connect_back:
:param data_attr:
:return: bool
"""
from tpDcc.dccs.maya.meta import metanode
fn_name = 'message_list_connect'
data = MetaAttributeValidator.meta_node_string_list(data)
if data_attr is None:
data_attr = '{}_datdict'.format(attr)
logger.debug(
'|{}| >> node: {} | attr: {} | connect_back: {} | data_attr: {}'.format(fn_name, node, attr, connect_back,
data_attr))
logger.debug('|{}| >> data | len: {} | list: {}'.format(fn_name, len(data), data))
MetaMessageListUtils.message_list_purge(node, attr)
# TODO: Doing this call we force that Instanced meta nodes have same number of arguments
# TODO: as MetaNode class. Find a fix to this
meta_node = metanode.MetaNode(node)
for i, k in enumerate(data):
str_attr = '{}_{}'.format(attr, i)
MetaAttributeUtils.set_message(node, str_attr, k, data_attr, i)
if connect_back is not None:
if '.' in k:
n = k.split('.')[0]
else:
n = k
MetaAttributeUtils.set_message(n, connect_back, node, simple=True)
return True
@staticmethod
def message_list_set(node=None, attr=None, data=None, connect_back=None, data_attr=None):
return MetaMessageListUtils.message_list_connect(node=node, attr=attr, data=data, connect_back=connect_back,
data_attr=data_attr)
@staticmethod
def message_list_get_attrs(node=None, attr=None):
return MetaDataListUtils.data_list_get_attrs(node=node, attr=attr)
@staticmethod
def message_list_index(node=None, attr=None, data=None, data_attr=None):
return MetaDataListUtils.data_list_index(node=node, attr=attr,
data=MetaAttributeValidator.meta_node_string(data), mode='message',
data_attr=data_attr)
@staticmethod
def message_list_append(node=None, attr=None, data=None, connect_back=None, data_attr=None):
data = MetaAttributeValidator.meta_node_string(data)
result = MetaDataListUtils.data_list_append(node=node, attr=attr, data=data, mode='message',
data_attr=data_attr)
if connect_back is not None:
MetaAttributeUtils.set_message(data, connect_back, node, data_attr)
return result
@staticmethod
def message_list_remove(node=None, attr=None, data=None, data_attr=None):
return MetaDataListUtils.data_list_remove(node=node, attr=attr,
data=MetaAttributeValidator.meta_node_string(data), mode='message',
data_attr=data_attr)
@staticmethod
def message_list_remove_by_index(node=None, attr=None, indices=None):
return MetaDataListUtils.data_list_remove_by_index(node=node, attr=attr, indices=indices)
@staticmethod
def message_list_clean(node=None, attr=None, data_attr=None):
return MetaDataListUtils.data_list_clean(node=node, attr=attr, mode='message', data_attr=data_attr)
class MetaTransformUtils(object):
@staticmethod
def get_rotate_pivot(node=None):
"""
Returns the world space rotate pivot of a given node
:param node: str, node to query
:return: variant, list | euclid.Vector3
"""
node = MetaAttributeValidator.meta_node_string(node)
result = maya.cmds.xform(node, query=True, ws=True, rp=True)
logger.debug('|{}| >> [{}] = {}'.format('get_rotate_pivot', node, result))
return result
@staticmethod
def get_scale_pivot(node=None):
"""
Returns the world space scale pivot of a given node
:param node: str, node to query
:return: list | euclid.Vector3
"""
node = MetaAttributeValidator.meta_node_string(node)
result = maya.cmds.xform(node, query=True, ws=True, sp=True)
logger.debug('|{}| >> [{}] = {}'.format('get_scale_pivot', node, result))
return result
@staticmethod
def get_parent(node=None, full_path=True):
"""
Get parent of the given node
:param node: str, object to get parents of
:param full_path: bool, whether you want long names or not
:return: list<str>
"""
node = MetaAttributeValidator.meta_node_string(node)
logger.debug('|Parent Getter| >> node: {}'.format(node))
parents = maya.cmds.listRelatives(node, parent=True, type='transform', fullPath=full_path) or False
if parents:
return parents[0]
return False
@staticmethod
def set_parent(node=None, parent=False):
"""
Parente transform and returns new names
:param node: str, object to modify hierarhcy
:param parent: str, parent node or False/None for parent to world (unparent)
:return: str, new name
"""
node = MetaAttributeValidator.meta_node_string(node)
if parent:
parent = MetaAttributeValidator.meta_node_string(parent)
logger.debug('|Parent Setter| >> node: {}'.format(node))
logger.debug('|Parent Setter| >> parent: {}'.format(parent))
parents = maya.cmds.listRelatives(node, parent=True, type='transform')
if parent:
try:
return maya.cmds.parent(node, parent)[0]
except Exception as e:
logger.debug('|Parent Setter| >> Failed to parent "{}" to "{}" | error: {}'.format(node, parent, e))
return node
else:
if parents:
return maya.cmds.parent(node, world=True)[0]
else:
return node
@staticmethod
def get_parents(node=None, full_path=True):
"""
Get all parents of a given node where the last parent is the top of the hierarchy
:param node: str, object to check
:param full_path: bool, whether you want long names or not
:return: list<str>
"""
node = MetaAttributeValidator.meta_node_string(node)
list_parents = list()
tmp_obj = node
no_parent = False
while not no_parent:
tmp_parent = maya.cmds.listRelatives(tmp_obj, allParents=True, fullPath=True)
if tmp_parent:
if len(tmp_parent) > 1:
raise ValueError(
'Do not know what to do with multiple parents ... {0} | {1}'.format(node, tmp_parent))
list_parents.append(tmp_parent[0])
tmp_obj = tmp_parent[0]
else:
no_parent = True
if not full_path:
return [name_utils.get_short_name(o) for o in list_parents]
return list_parents
@staticmethod
def get_children(node=None, full_path=False):
"""
Get the immediate children of a given node
:param node: object to check
:param full_path: bool, whether you want long names or not
:return: list<str>
"""
node = MetaAttributeValidator.meta_node_string(node)
return maya.cmds.listRelatives(node, children=True, type='transform', fullPath=full_path) or []
@staticmethod
def get_descendents(node=None, full_path=False):
"""
Get all children of a given node
:param node: str, object to check
:param full_path: bool, whether you want long names or not
:return: list<str>
"""
node = MetaAttributeValidator.meta_node_string(node)
return maya.cmds.listRelatives(node, allDescendents=True, type='transform', fullPath=full_path) or []
@staticmethod
def get_shapes(node=None, full_path=False, intermediates=False, non_intermediates=True):
"""
Get shapes of a given node
:param node: object to check
:param full_path: bool, whether you want long names or not
:param intermediates: bool, list intermediate shapes
:param non_intermediates: bool, list non intermediate shapes
:return: list<str>
"""
node = MetaAttributeValidator.meta_node_string(node)
return shape_utils.get_shapes(node, intermediates=intermediates, non_intermediates=non_intermediates,
full_path=full_path) or []
def snap(self, node=None, target=None, position=True, rotation=True, rotate_axis=False, rotate_order=False,
scale_pivot=False, pivot='rp', space='w', mode='xform'):
"""
Function that snaps source object to target
:param source:
:param target:
:param position:
:param rotation:
:param rotate_axis:
:param rotate_order:
:param scale_pivot:
:param pivot:
:param space:
:param mode:
:return:
"""
fn_name = 'snap'
node = node.meta_node
node = MetaAttributeValidator.meta_node_string(node)
target = MetaAttributeValidator.meta_node_string(target)
pivot = MetaAttributeValidator.kwargs_from_dict(pivot, common.PIVOT_ARGS, none_valid=False,
called_from=__name__ + fn_name + '>> validate pivot')
space = MetaAttributeValidator.kwargs_from_dict(space, common.SPACE_ARGS, none_valid=False,
called_from=__name__ + fn_name + '>> validate space')
logger.debug(
'|{}| >> obj: {} | target: {} | pivot: {} | space: {} | mode: {}'.format(fn_name, node, target, pivot,
space, mode))
logger.debug(
'|{}| >> position: {} | rotation: {} | rotate_axis: {} | rotate_order: {}'.format(fn_name, position,
rotation, rotate_axis,
rotate_order))
kwargs = {'ws': False, 'os': False}
if space == 'world':
kwargs['ws'] = True
else:
kwargs['os'] = True
if position:
kwargs_move = copy.copy(kwargs)
if pivot == 'sp':
kwargs_move['spr'] = True
else:
kwargs_move['rpr'] = True
if pivot == 'closestPoint':
logger.debug('|{}| <<< closestPoint >>>'.format(fn_name))
target_type = MetaAttributeValidator.get_maya_type(target)
dst = None
|
from pk_classifier.bootstrap import split_train_val_test
FEATURES_SAMPLE = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
LABELS_SAMPLE = {'label': [0, 1, 0, 1, 0, 1, 0, 1, 0, 1], 'pmid': [10, 11, 12, 13, 14, 15, 16, 17, 18, 19]}
def test_split_train_val_test():
x_train, x_dev, x_test, y_train, y_dev, \
y_test, pmids_train, pmids_val, pmids_test = split_train_val_test(features=FEATURES_SAMPLE,
labels=LABELS_SAMPLE, test_size=0.2, seed=123)
# test sizes
assert len(x_train) == len(y_train) == 6
assert len(x_dev) == len(y_dev) == 2
assert len(x_test) == len(y_test) == 2
# test proportions (stratification)
initial_proportion = sum(LABELS_SAMPLE['label']) / len(LABELS_SAMPLE['label']) # proportion of samples with label 1
assert sum(y_train) / len(y_train) == initial_proportion
assert sum(y_dev) / len(y_dev) == initial_proportion
assert sum(y_dev) / len(y_dev) == initial_proportion
def test_split_train_val_test_2():
x_train, x_dev, x_test, y_train, y_dev, \
y_test, pmids_train, pmids_val, pmids_test = split_train_val_test(features=FEATURES_SAMPLE,
labels=LABELS_SAMPLE, test_size=0.4, seed=123)
# test sizes
assert len(x_train) == len(y_train) == 2
assert len(x_dev) == len(y_dev) == 4
assert len(x_test) == len(y_test) == 4
# test proportions (stratification)
initial_proportion = sum(LABELS_SAMPLE['label']) / len(LABELS_SAMPLE['label']) # proportion of samples with label 1
assert sum(y_train) / len(y_train) == initial_proportion
assert sum(y_dev) / len(y_dev) == initial_proportion
assert sum(y_dev) / len(y_dev) == initial_proportion
|
from core import Asset
from core import GridBot
from datetime import datetime
from core import grid_bot_optimization
def test_answer():
# "btc-usd_2021-01-01_2021-03-31.csv"
# a1 = Asset("BTC-USD.csv")
a1 = Asset("btc-usd_2021-01-01_2021-03-31.csv")
assets = [a1]
best = grid_bot_optimization(assets, 100, 10, 10)
assert isinstance(best, GridBot)
|
import base64
import datetime
import io
import os
from os import listdir
import dash
from dash.dependencies import Input, Output, State
import dash_core_components as dcc
import dash_html_components as html
import dash_table
import ntpath
import csv
import pandas as pd
# from extra import find_pfds_csv
from pfd import find_pfds_csv
from components import Header, make_dash_table, get_menu
DATA_FOLDER = "./data"
# important link https://dash.plot.ly/datatable/interactivity
# external CSS stylesheets
external_stylesheets = [
'https://codepen.io/chriddyp/pen/bWLwgP.css',
{
'href': 'https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css',
'rel': 'stylesheet',
'integrity': 'sha384-MCw98/SFnGE8fJT3GXwEOngsV7Zt27NXFoaoApmYm81iuXoPkFOJwJ8ERdknLPMO',
'crossorigin': 'anonymous'
}
]
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
app.config['suppress_callback_exceptions']=True
app.title = 'ANMAT'
DATA_FOLDER = "./data"
gdf = None
gresults = None
param_dict = dict()
param_dict["results_main_dir"] = "../Results/"
def read_table(tab_name):
t_name = ntpath.basename(tab_name)
try:
df = pd.read_csv(filepath_or_buffer=tab_name, dtype=object, delimiter=',', low_memory=False,
quoting=csv.QUOTE_ALL, doublequote=True)
except ValueError:
try:
df = pd.read_csv(filepath_or_buffer=tab_name, dtype=object, delimiter=',', low_memory=False,
quoting=csv.QUOTE_ALL, doublequote=True, encoding="ISO-8859-1")
except:
print("Error reading csv file .. file encoding is not recognizable")
return None
return df
def get_csv_files(folder):
csv_files = []
folder_contents = listdir(folder)
for item in folder_contents:
if item.endswith('.csv'):
csv_files.append(item)
return csv_files
def dynamic_page():
return html.Div([
Header(),
html.Div([
html.Div([
dcc.Dropdown(
id='uploaded-datasets',
options=[{'label': i, 'value': i} for i in get_csv_files(DATA_FOLDER)],
placeholder='Select a Dataset',
)
],style={
'width': '220px',
'display': 'inline-block',
'margin-left': '25px',
}
),
html.Div([
dcc.Upload(
id='upload-data',
children = html.Div([
html.Button(' Upload ', className='fa fa-upload')
],style={
'backgroundColor':'green',
'color':'white',
'margin-left': '5px',
}),
multiple=False
),
]),
html.Div([
dcc.Input(
placeholder='Enter the Min Support',
type='text',
value='',
id='MSupport'
)
],style={
'width': '200',
'display': 'inline-block',
'margin-left': '5px',
}),
html.Div([
dcc.Input(
placeholder='Enter the Allowed Violations',
type='text',
value='',
id='Delta'
)
],style={
'width': '200',
'display': 'inline-block',
'margin-left': '5px',
}),
html.Div([
dcc.Input(
placeholder='Enter the Min. Coverage',
type='text',
value='',
id='Coverage'
)
],style={
'width': '200px',
'display': 'inline-block',
'margin-left': '5px',
}),
html.Button('PFD Discovery', className='fa', id='button',
style={
'backgroundColor':'green',
'color':'white',
'width': '200',
'flow':'right',
'margin-left': '15px',
}),
], className="row",
style={
'width': '100%',
'height':'50px',
'borderWidth': '1px',
'borderRadius': '5px',
'textAlign': 'center',
'margin-left': '25px',
'margin-top': '10px',
}),
html.Div(id='output-data-upload'),
html.Div(id='output-data-dropdown',
style={
'width': '100%',
'height': '440px',
'borderWidth': '1px',
'borderRadius': '5px',
'textAlign': 'center',
'margin-left': '50px',
'margin-right': '25px',
'overflowY': 'scroll',
}),
html.Hr(), # horizontal line
html.Div(id = 'output-results',
style={
'width': '100%',
'height': '200px',
'borderWidth': '1px',
'borderRadius': '5px',
'textAlign': 'left',
'margin-left': '25px',
'margin-right': '25px',
'margin-top': '40px'
}),
# Footer()
], className='body')
app.layout = dynamic_page
def upload_contents(contents, filename):
content_type, content_string = contents.split(',')
decoded = base64.b64decode(content_string)
new_file_name = ''
try:
if filename.endswith('csv'):
# Assume that the user uploaded a CSV file
df = pd.read_csv(
io.StringIO(decoded.decode('utf-8')))
elif 'xls' in filename:
# Assume that the user uploaded an excel file
df = pd.read_excel(io.BytesIO(decoded))
if filename.endswith('xls'):
new_file_name = filename.replace('.xls', '.csv')
if filename.endswith('xlsx'):
new_file_name = filename.replace('.xlsx', '.csv')
filename2 = os.path.join(DATA_FOLDER, filename)
df.to_csv(filename2, sep=',', encoding='latin', index=False, quoting=csv.QUOTE_ALL, doublequote=True)
except Exception as e:
print(e)
return html.Div([
''
])
return html.Div([
''
])
def parse_contents(filename):
global gdf
try:
if filename.endswith('csv'):
# Assume that the user uploaded a CSV file
filename2 = os.path.join(DATA_FOLDER, filename)
df = read_table(filename2)
gdf = df
# elif 'xls' in filename:
# # Assume that the user uploaded an excel file
# df = pd.read_excel(io.BytesIO(decoded))
except Exception as e:
print(e)
return html.Div([
'There was an error processing this file.'
])
return html.Div([
dash_table.DataTable(
data=df.to_dict('records'),
columns=[{'name': i, 'id': i} for i in df.columns],
css=[{
'selector': '.dash-cell div.dash-cell-value',
'rule': 'display: inline; white-space: inherit; overflow: inherit; text-overflow: inherit;'
}],
style_cell={
'whiteSpace': 'no-wrap',
'overflow': 'hidden',
'textOverflow': 'ellipsis',
'maxWidth': 0,
'textAlign':'left'
},
style_cell_conditional=[{
'if': {'row_index': 'odd'},
'backgroundColor': 'rgb(248, 248, 248)'
}],
style_header={
'backgroundColor': 'white',
'fontWeight': 'bold'
},
style_table={
'max_rows_in_viewport':15,
'maxHeight': '400px',
'overflowY': 'scroll'
},
pagination_settings={
"current_page": 0,
"page_size": 50,
},
),
# html.Hr(), # horizontal line
], className='ui-grid-resize-columns ui-grid-pagination')
@app.callback([Output('output-data-upload', 'children'),
Output('uploaded-datasets', 'options'),
Output('uploaded-datasets', 'value')],
[Input('upload-data', 'contents')],
[State('upload-data', 'filename'),
State('upload-data', 'last_modified')])
def update_output_data(content, fname, modified):
if content:
grid = upload_contents(content, fname)
options=[{'label': i, 'value': i} for i in get_csv_files(DATA_FOLDER)]
value=fname
return grid, options, value
else:
options=[{'label': i, 'value': i} for i in get_csv_files(DATA_FOLDER)]
return html.Div(['']), options, ''
@app.callback(Output('output-data-dropdown', 'children'),
[Input('uploaded-datasets', 'value')])
def output_dropdown(fname):
options=[{'label': i, 'value': i} for i in get_csv_files(DATA_FOLDER)]
if fname:
grid = parse_contents(fname)
return grid
else:
return html.Div([''])
@app.callback(
Output('output-results', 'children'),
[Input('button', 'n_clicks')],
[State('uploaded-datasets', 'value'),
State('MSupport', 'value'),
State('Delta', 'value'),
State('Coverage', 'value')])
def update_output_discovery(n_clicks, fname, conf, delta, min_coverage):
global gresults
if fname:
if conf:
if delta:
if min_coverage:
param_dict["tab_name"] = os.path.join(DATA_FOLDER, fname)
param_dict["min_acceptable_coverage"] = float(min_coverage) / 100.0
param_dict["confidence_K"] = float(conf)
param_dict["allowed_noise_delta"] = float(delta)
gresults = find_pfds_csv(param_dict)
return html.Div([
dcc.Tabs(
id="tabs-with-classes",
value='patt',
parent_className='custom-tabs',
className='tab',
children=[
dcc.Tab(
label='Patterns',
value='patt',
className='custom-tab',
selected_className='custom-tab--selected'
),
dcc.Tab(
label='PFDs',
value='pfds',
className='custom-tab',
selected_className='custom-tab--selected'
),
dcc.Tab(
label='Violations',
value='vio', className='custom-tab',
selected_className='custom-tab--selected'
),
]),
html.Div(id='tabs-content-classes')
])
else:
return html.Div(['The min_coverage is missing'])
else:
return html.Div(['The allowed violations is missing'])
else:
html.Div(['The minimum support is missing'])
else:
html.Div(['The data file is missing'])
if n_clicks:
return html.Div(['Something goes wrong after {}'.format(n_clicks)])
return html.Div([''])
@app.callback(Output('tabs-content-classes', 'children'),
[Input('tabs-with-classes', 'value')],
[State('uploaded-datasets', 'value')])
def render_content(tab, tab_name):
global gdf, gresults
if tab == 'patt':
att_names = gdf.columns.tolist()
if gresults:
df_details = gresults['df_details']
tok_or_ngrams = dict()
tok_or_ngrams.clear()
for d in df_details.keys():
tok_or_ngrams[df_details[d]['att_name']] = df_details[d]['tg_vs_ng']
data = []
cols = ['Attributes', 'Tokens or n-Grams']
for i in range(len(data)):
data.remove(data[0])
gms = gresults['patterns']
for k in tok_or_ngrams.keys():
new_k = gdf.columns.get_loc(k)
# print(new_k, gms.keys())
if new_k in gms.keys():
data.append([k, tok_or_ngrams[k]])
else:
data.append([k, '----'])
att_df = pd.DataFrame(data, columns=cols)
return html.Div([
dash_table.DataTable(
data=att_df.to_dict('records'),
columns=[{'name': i, 'id': i} for i in att_df.columns],
id='patterns-table',
row_selectable="single",
css=[{
'selector': '.dash-cell div.dash-cell-value',
'rule': 'display: inline; white-space: inherit; margin-left: 20px; overflow: inherit; text-overflow: inherit;'
}],
style_cell={
'whiteSpace': 'no-wrap',
'overflow': 'hidden',
'textOverflow': 'ellipsis',
'maxWidth': '500px',
'textAlign':'left',
'font-size': '150%',
},
style_cell_conditional=[{
'if': {'row_index': 'odd'},
'backgroundColor': 'rgb(248, 248, 248)'
}],
style_header={
'backgroundColor': 'white',
'fontWeight': 'bold'
},
style_table={
'max_rows_in_viewport':15,
'maxHeight': '400px',
'maxWidth':'600px',
'overflowY': 'scroll',
'margin-left': '20px',
# 'border': 'thin lightgrey solid',
}
),
# html.H3('Select an attribute to see the patterns extracted from the attribute.'),
html.Div(id='patterns-container', className="six columns"),
], className="row ")
elif tab == 'pfds':
# att_names = gdf.columns.tolist()
if gresults:
pfds = gresults['pfds']
data = []
cols = ['Determinant', 'Dependent']
for i in range(len(data)):
data.remove(data[0])
for pfd in pfds:
data.append([pfd['det'], pfd['dep']])
pfds_df = pd.DataFrame(data, columns=cols)
return html.Div([
dash_table.DataTable(
data=pfds_df.to_dict('records'),
columns=[{'name': i, 'id': i} for i in pfds_df.columns],
id='pfds-table',
row_selectable="single",
row_deletable=True,
css=[{
'selector': '.dash-cell div.dash-cell-value',
'rule': 'display: inline; white-space: inherit; margin-left: 20px; overflow: inherit; text-overflow: inherit;'
}],
style_cell={
'whiteSpace': 'no-wrap',
'overflow': 'hidden',
'textOverflow': 'ellipsis',
'maxWidth': '500px',
'textAlign':'left',
'font-size': '150%',
},
style_cell_conditional=[{
'if': {'row_index': 'odd'},
'backgroundColor': 'rgb(248, 248, 248)'
}],
style_header={
'backgroundColor': 'white',
'fontWeight': 'bold'
},
style_table={
'max_rows_in_viewport':15,
'maxHeight': '400px',
'maxWidth':'600px',
'overflowY': 'scroll',
'margin-left': '20px',
# 'border': 'thin lightgrey solid',
}
),
# html.H3('Select a dependency to see its tableau of PFDs'),
html.Div(id='pfds-container', className="six columns"),
html.Div(id='pfds-container-hidden', style={'display':'none'}),
], className="row ")
elif tab == 'vio':
if gresults:
pfds = gresults['pfds']
data = []
cols = ['Determinant', 'Dependent']
for i in range(len(data)):
data.remove(data[0])
for pfd in pfds:
data.append([pfd['det'], pfd['dep']])
pfds_df = pd.DataFrame(data, columns=cols)
return html.Div([
dash_table.DataTable(
data=pfds_df.to_dict('records'),
columns=[{'name': i, 'id': i} for i in pfds_df.columns],
id='vios-table',
row_selectable="single",
css=[{
'selector': '.dash-cell div.dash-cell-value',
'rule': 'display: inline; white-space: inherit; margin-left: 20px; overflow: inherit; text-overflow: inherit;'
}],
style_cell={
'whiteSpace': 'no-wrap',
'overflow': 'hidden',
'textOverflow': 'ellipsis',
'maxWidth': '500px',
'textAlign':'left',
'font-family': 'Times New Roman',
'font-size': '150%',
},
style_cell_conditional=[{
'if': {'row_index': 'odd'},
'backgroundColor': 'rgb(248, 248, 248)'
}],
style_header={
'backgroundColor': 'white',
'fontWeight': 'bold'
},
style_table={
'max_rows_in_viewport':15,
'maxHeight': '400px',
'maxWidth':'600px',
'overflowY': 'scroll',
'margin-left': '20px',
# 'border': 'thin lightgrey solid',
}
),
html.Div(id='vios-container', className="six columns"),
], className="row ")
@app.callback(
Output('patterns-container', "children"),
[Input('patterns-table', "derived_virtual_data"),
Input('patterns-table', "derived_virtual_selected_rows")])
def update_graphs_patterns(rows, derived_virtual_selected_rows):
# When the table is first rendered, `derived_virtual_data` and
# `derived_virtual_selected_rows` will be `None`. This is due to an
# idiosyncracy in Dash (unsupplied properties are always None and Dash
# calls the dependent callbacks when the component is first rendered).
# So, if `rows` is `None`, then the component was just rendered
# and its value will be the same as the component's dataframe.
# Instead of setting `None` in here, you could also set
# `derived_virtual_data=df.to_rows('dict')` when you initialize
# the component.
global gresults, gdf
# print(derived_virtual_selected_rows)
if not(derived_virtual_selected_rows):
derived_virtual_selected_rows = []
return html.Div([
html.H4('')
], className="six columns")
else:
gms = gresults['patterns']
if derived_virtual_selected_rows[0] in gms.keys():
req_gms = gms[derived_virtual_selected_rows[0]]
patt_df = pd.DataFrame(req_gms, columns=['patterns', 'frequency'])
return html.Div([
dash_table.DataTable(
data=patt_df.to_dict('records'),
columns=[{'name': i, 'id': i} for i in patt_df.columns],
id='patterns-freq-table',
css=[{
'selector': '.dash-cell div.dash-cell-value',
'rule': 'display: inline; white-space: inherit; margin-left: 20px; overflow: inherit; text-overflow: inherit;'
}],
style_cell={
'whiteSpace': 'no-wrap',
'overflow': 'hidden',
'textOverflow': 'ellipsis',
'maxWidth': '600px',
'textAlign':'left',
'font-size': '150%',
'font-family': 'Times New Roman'
},
style_cell_conditional=[{
'if': {'row_index': 'odd'},
'backgroundColor': 'rgb(248, 248, 248)'
}],
style_header={
'backgroundColor': 'white',
'fontWeight': 'bold'
},
style_table={
'max_rows_in_viewport':15,
'maxHeight': '400px',
'maxWidth':'800px',
'overflowY': 'scroll',
'margin-left': '20px',
# 'border': 'thin lightgrey solid',
}
),
# html.H3(gdf.columns[derived_virtual_selected_rows] + ' Just for test')
])
else:
text = '(' + gdf.columns[derived_virtual_selected_rows[0]] + ') has been ignored because it represents '
text += 'a numerical quantity '
return html.Div([
# html.H3('The selected attribute is ( ' + derived_virtual_selected_rows[0] + ' )')
html.H3(text)
], className="six columns")
@app.callback(
Output('pfds-container', "children"),
[Input('pfds-table', "derived_virtual_data"),
Input('pfds-table', "derived_virtual_selected_rows")])
def update_graphs_pfds(rows, derived_virtual_selected_rows):
# When the table is first rendered, `derived_virtual_data` and
# `derived_virtual_selected_rows` will be `None`. This is due to an
# idiosyncracy in Dash (unsupplied properties are always None and Dash
# calls the dependent callbacks when the component is first rendered).
# So, if `rows` is `None`, then the component was just rendered
# and its value will be the same as the component's dataframe.
# Instead of setting `None` in here, you could also set
# `derived_virtual_data=df.to_rows('dict')` when you initialize
# the component.
global gresults, gdf
# print(derived_virtual_selected_rows)
if not(derived_virtual_selected_rows):
derived_virtual_selected_rows = []
return html.Div([
html.H3('')
], className="six columns")
else:
pfds = gresults['pfds']
# if derived_virtual_selected_rows[0] in gms.keys():
req_pfd = pfds[derived_virtual_selected_rows[0]]
data = []
for ii in range(len(data)):
data.remove(data[0])
for tp in req_pfd['tableau']:
((a,b), c) = tp
data.append((a,b,len(c)))
cols = ['Determinant Pattern', 'Dependent Pattern', '# affected tuples']
tableau_df = pd.DataFrame(data, columns=cols)
return html.Div([
dash_table.DataTable(
data=tableau_df.to_dict('records'),
columns=[{'name': i, 'id': i} for i in tableau_df.columns],
id='pfds-tableau-table',
css=[{
'selector': '.dash-cell div.dash-cell-value',
'rule': 'display: inline; white-space: inherit; margin-left: 20px; overflow: inherit; text-overflow: inherit;'
}],
style_cell={
'whiteSpace': 'no-wrap',
'overflow': 'hidden',
'textOverflow': 'ellipsis',
'maxWidth': '600px',
'textAlign':'left',
'font-size': '150%',
'font-family': 'Times New Roman',
},
style_cell_conditional=[{
'if': {'row_index': 'odd'},
'backgroundColor': 'rgb(248, 248, 248)'
}],
style_header={
'backgroundColor': 'white',
'fontWeight': 'bold'
},
style_table={
'max_rows_in_viewport':15,
'maxHeight': '400px',
'maxWidth':'800px',
'overflowY': 'scroll',
'margin-left': '20px',
# 'border': 'thin lightgrey solid',
}
),
# html.H3(gdf.columns[derived_virtual_selected_rows] + ' Just for test')
])
@app.callback(Output('pfds-container-hidden', 'children'),
[Input('pfds-table', 'data_previous')],
[State('pfds-table', 'data')])
def show_removed_rows(previous, current):
global gresults
if previous is None:
dash.exceptions.PreventUpdate()
else:
for row in previous:
if row not in current:
rem_det = row['Determinant'][0]
rem_dep = row['Dependent'][0]
for jj in range(len(gresults['pfds'])):
if gresults['pfds'][jj]['det'] == row['Determinant'][0] and gresults['pfds'][jj]['dep'] == row['Dependent'][0]:
gresults['pfds'].remove(gresults['pfds'][jj])
# print(gresults['pfds'][jj]['det'], gresults['pfds'][jj]['dep'], jj)
break
# print(row['Determinant'][0], '===>', row['Dependent'][0])
return html.Div([
# html.H3('The selected attribute is ( ' + derived_virtual_selected_rows[0] + ' )')
html.H3(""),
])
@app.callback(
Output('vios-container', "children"),
[Input('vios-table', "derived_virtual_data"),
Input('vios-table', "derived_virtual_selected_rows")])
def update_graphs_vios(rows, derived_virtual_selected_rows):
# When the table is first rendered, `derived_virtual_data` and
# `derived_virtual_selected_rows` will be `None`. This is due to an
# idiosyncracy in Dash (unsupplied properties are always None and Dash
# calls the dependent callbacks when the component is first rendered).
# So, if `rows` is `None`, then the component was just rendered
# and its value will be the same as the component's dataframe.
# Instead of setting `None` in here, you could also set
# `derived_virtual_data=df.to_rows('dict')` when you initialize
# the component.
global gresults, gdf
# print(derived_virtual_selected_rows)
if not(derived_virtual_selected_rows):
derived_virtual_selected_rows = []
return html.Div([
html.H3('')
], className="six columns")
else:
pfds = gresults['pfds']
# if derived_virtual_selected_rows[0] in gms.keys():
req_pfd = pfds[derived_virtual_selected_rows[0]]
data = []
for ii in range(len(data)):
data.remove(data[0])
if len(req_pfd['vios']) > 0:
vios_df = req_pfd['vios']
det_name = ''
dep_name = ''
for col in vios_df.columns:
if col == req_pfd['det']:
det_name = col
if col == req_pfd['dep']:
dep_name = col
cols = [det_name, dep_name]
data = vios_df[cols]
prjected_vios_df = pd.DataFrame(data, columns=cols)
return html.Div([
dash_table.DataTable(
data=prjected_vios_df.to_dict('records'),
columns=[{'name': i, 'id': i} for i in prjected_vios_df.columns],
id='vios-values-table',
row_selectable="single",
css=[{
'selector': '.dash-cell div.dash-cell-value',
'rule': 'display: inline; white-space: inherit; margin-left: 20px; overflow: inherit; text-overflow: inherit;'
}],
style_cell={
'whiteSpace': 'no-wrap',
'overflow': 'hidden',
'textOverflow': 'ellipsis',
'maxWidth': '600px',
'textAlign':'left',
'font-size': '150%',
'font-family': 'Times New Roman',
},
style_cell_conditional=[
{'if': {'row_index': 'odd'},
'backgroundColor': 'rgb(248, 248, 248)',},
{'if': {'column_id': det_name},
'backgroundColor': 'white',
'color': '#3D9970',},
{'if': {'column_id': dep_name},
'backgroundColor': 'white',
'color': '#9D3D70',},
],
style_header={
'backgroundColor': 'white',
'fontWeight': 'bold',
# 'textAlign': 'center'
},
style_table={
'max_rows_in_viewport':15,
'maxHeight': '400px',
# 'maxWidth':'800px',
'overflowY': 'scroll',
'margin-left': '20px',
# 'border': 'thin lightgrey solid',
}
),
html.H3('Select a violation to see its context'),
html.Hr(),
html.Div(id='vios-explain', className="twelve columns"),
])
else:
return html.Div([
# html.H3('The selected attribute is ( ' + derived_virtual_selected_rows[0] + ' )')
html.H3(""),
])
@app.callback(
Output('vios-explain', "children"),
[Input('vios-values-table', "derived_virtual_data"),
Input('vios-values-table', "derived_virtual_selected_rows"),
Input('vios-table', "derived_virtual_data"),
Input('vios-table', "derived_virtual_selected_rows")])
def update_graphs_vios_w_details(rows_vio, derived_virtual_selected_rows_vio, rows_pfds, derived_virtual_selected_rows_pfd):
# When the table is first rendered, `derived_virtual_data` and
# `derived_virtual_selected_rows` will be `None`. This is due to an
# idiosyncracy in Dash (unsupplied properties are always None and Dash
# calls the dependent callbacks when the component is first rendered).
# So, if `rows` is `None`, then the component was just rendered
# and its value will be the same as the component's dataframe.
# Instead of setting `None` in here, you could also set
# `derived_virtual_data=df.to_rows('dict')` when you initialize
# the component.
global gresults, gdf
# print(derived_virtual_selected_rows)
if not(derived_virtual_selected_rows_pfd):
derived_virtual_selected_rows_pfd = []
return html.Div([
html.H3('')
], className="six columns")
elif not(derived_virtual_selected_rows_vio):
derived_virtual_selected_rows_vio = []
return html.Div([
html.H3('')
], className="six columns")
else:
pfds = gresults['pfds']
comp_data = rows_vio[derived_virtual_selected_rows_vio[0]]
req_pfd = pfds[derived_virtual_selected_rows_pfd[0]]
if len(req_pfd['vios']) == 0:
return html.Div([html.H3('')])
if not(derived_virtual_selected_rows_vio[0] < len(req_pfd['vios'].index.tolist())):
return html.Div([html.H3('')])
req_vio = int(req_pfd['vios'].index.tolist()[derived_virtual_selected_rows_vio[0]])
req_idx = []
for jj in range(len(req_idx)):
rec_idx.remove(req_idx[0])
det_name = ''
dep_name = ''
vios_df = req_pfd['vios']
for col in vios_df.columns:
if col == req_pfd['det']:
det_name = col
if col == req_pfd['dep']:
dep_name = col
rule = None
for tp in req_pfd['tableau']:
((a, b), c) = tp
rule = (a,b)
if req_vio in c:
req_idx = c
break
req_vio_df = gdf.loc[req_idx]
if not (det_name in comp_data.keys()) or not (dep_name in comp_data.keys()):
return html.Div([html.H3("")])
# req_vio_rec = pd.DataFrame(req_pfd['vios'].loc[req_vio])
# print (req_pfd['vios'].index.tolist(), req_vio)
mylist = []
for kk in range(len(mylist)):
mylist.remove(mylist[0])
req_vio_df_idx = req_vio_df.index.tolist()
for kk in range(len(req_vio_df_idx)):
jj = req_vio_df_idx[kk]
if (req_vio_df[det_name][jj] == comp_data[det_name]) and (req_vio_df[dep_name][jj] == comp_data[dep_name]):
mylist.append(kk)
aa = ''
bb = ''
if rule:
(aa, bb) = rule
return html.Div([
html.H3("Violation(s) in context: [ " + aa + ' ==> ' + bb + ' ]'),
dash_table.DataTable(
data=req_vio_df.to_dict('records'),
columns=[{'name': i, 'id': i} for i in req_vio_df.columns],
# id='vios-explain-table',
# row_selectable="single",
css=[{
'selector': '.dash-cell div.dash-cell-value',
'rule': 'display: inline; white-space: inherit; margin-left: 20px; overflow: inherit; text-overflow: inherit;'
}],
style_cell={
'whiteSpace': 'no-wrap',
'overflow': 'hidden',
'textOverflow': 'ellipsis',
'maxWidth': '600px',
'textAlign':'left',
'font-size': '150%',
'font-family': 'Times New Roman',
},
style_cell_conditional=[
{
'if': {'row_index': x,
'column_id': det_name,
},
'backgroundColor': 'lightblue',} for x in mylist
]+[
{
'if': {'row_index': x,
'column_id': dep_name,
},
'backgroundColor': 'lightblue',} for x in mylist
]+[
{'if': {'column_id': det_name,},
'color': '#3D9970',},
{'if': {'column_id': dep_name,},
'color': '#9D3D70',},
],
style_header={
'backgroundColor': 'white',
'fontWeight': 'bold'
},
style_table={
'max_rows_in_viewport':15,
'maxHeight': '400px',
# 'maxWidth':'1000px',
'overflowY': 'scroll',
'margin-left': '20px',
# 'border': 'thin lightgrey solid',
}
),
])
# else:
# return html.Div([
# # html.H3('The selected attribute is ( ' + derived_virtual_selected_rows[0] + ' )')
# html.H3(""),
# ])
# # # # # # # # #
# detail the way that external_css and external_js work and link to alternative method locally hosted
# # # # # # # # #
external_css = ["https://cdnjs.cloudflare.com/ajax/libs/normalize/7.0.0/normalize.min.css",
"https://cdnjs.cloudflare.com/ajax/libs/skeleton/2.0.4/skeleton.min.css",
"//fonts.googleapis.com/css?family=Raleway:400,300,600",
"https://codepen.io/bcd/pen/KQrXdb.css",
"https://maxcdn.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css",
"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css",
"https://www.w3schools.com/w3css/4/w3.css",
"/assets/style.css"]
for css in external_css:
app.css.append_css({"external_url": css})
external_js = ["https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js",
"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js",
]
for js in external_js:
app.scripts.append_script({"external_url": js})
if __name__ == '__main__':
app.run_server(debug=True, host='0.0.0.0')
|
"""
Name: Reissner-Nordstrom Electro-Vacuum
References:
- Reissner, Ann. Phys., v50, p106, (1916)
- Stephani (13.21) p158
Coordinates: Spherical
Symmetry: Spherical
"""
from sympy import diag, sin, symbols
coords = symbols("t r theta phi", real=True)
variables = symbols("M Q", constant=True)
functions = ()
t, r, th, ph = coords
M, Q = variables
expr = 1 - 2 * M / r + Q ** 2 / r ** 2
metric = diag(-expr, 1 / expr, r ** 2, r ** 2 * sin(th) ** 2)
|
from django.apps import AppConfig
class StagedoorConfig(AppConfig):
name = "stagedoor"
|
from pyoperant.errors import * |
#!/usr/bin/env python
import logging, logging.config
import numpy as np
import yaml
import cv2
import os
from operator import attrgetter, itemgetter
from collections import namedtuple
# The FLANN Index enums aren't exposed in the OpenCV Python bindings. We create
# our own in accordance with:
# https://github.com/Itseez/opencv/blob/7d4d28605087ec2d3878f9467aea313a2acdfd49/
# modules/flann/include/opencv2/flann/defines.h#L81
FLANN_INDEX_LINEAR, FLANN_INDEX_KDTREE, FLANN_INDEX_KMEANS, FLANN_INDEX_COMPOSITE, \
FLANN_INDEX_KDTREE_SINGLE, FLANN_INDEX_HIERARCHICAL, FLANN_INDEX_LSH = range(7)
NEON_GREEN = 60, 255, 20
BASE_DIR = os.path.dirname(__file__)
drawing = False
# Initialize logger from configuration file
with open(os.path.join(BASE_DIR, 'logging.yml')) as infile:
logging.config.dictConfig(yaml.load(infile))
logger = logging.getLogger(__name__)
class Subject(namedtuple('Subject', ['image', 'keypoints', 'descriptors'])):
pass
class Tracker:
def __init__(self, detector=None, extractor=None, matcher=None):
if detector is None:
self.detector = cv2.SIFT()
if extractor is None:
self.extractor = self.detector
if matcher is None:
self.matcher = cv2.BFMatcher()
def corners(im):
# Order matters when drawing closed polygon
# We order like so:
# 0 --- 1
# | |
# | |
# 3 --- 2
h, w = im.shape[:2]
return [(0, 0), (w-1, 0), (w-1, h-1), (0, h-1)]
# mouse callback function
def mouseCallback(event, x, y, flags, param):
logger.debug(param)
global sx, sy, ex, ey, drawing, query_img, train_imgs, train_keypoints_lst, train_descriptors_lst
if event == cv2.EVENT_LBUTTONDOWN:
sx, sy, ex, ey, drawing = x, y, x, y, True
if event == cv2.EVENT_LBUTTONUP:
drawing = False
cv2.namedWindow('Crop')
if (sx != x and sy != y):
if (sx > x): sx, x = x, sx
if (sy > y): sy, y = y, sy
cv2.imshow('Crop', query_img[sy:y, sx:x])
# new_train_img = np.array(query_img[sy:y, sx:x])
# train_imgs.append(new_train_img)
# train_keypoints, train_descriptors = detector.detectAndCompute(new_train_img, mask=None)
# train_keypoints_lst.append(train_keypoints)
# train_descriptors_lst.append(train_descriptors)
if event == cv2.EVENT_MOUSEMOVE:
if drawing:
ex, ey = x,y
def drawImage(img):
global sx, sy, ex, ey, drawing
if drawing:
img2 = np.copy(img)
cv2.rectangle(img2, (sx,sy), (ex,ey), (255,0,0), 2)
cv2.imshow('Tracking', img2)
else:
cv2.imshow('Tracking', img)
if __name__ == '__main__':
import argparse
# Argument Parsing
parser = argparse.ArgumentParser(
description = 'COMP9517 Project (Part 1)',
version = '1.0'
)
# Positional (required) arguments
parser.add_argument('input_video',
type=str,
action="store",
help='Input video')
# Optional arguments
parser.add_argument('--image-filenames',
'-i',
default=[],
nargs='*',
action="store",
help='Filename of input images')
parser.add_argument('--output-video-file',
'-o',
type=str,
action="store",
help='Output video to specified file instead of displaying in window')
# parse arguments, which by default is sys.argv
args = parser.parse_args()
logger.debug('Received arguments: {0}'.format(args))
logger.debug('Reading training images: {0}'.format(args.image_filenames))
# Initialize training images as those specified
# through command line arguments
train_imgs = map(cv2.imread, args.image_filenames)
logger.debug('Read {0} training images of dimensions: {1}' \
.format(len(train_imgs), map(lambda img: img.shape, train_imgs)))
detector = cv2.SIFT()
# Generalized version
# detector = cv2.FeatureDetector_create('SIFT')
# The method `compute` requires the image and detected keypoints
# and returns a pair consisting of keypoints and descriptors,
# where descriptors is a len(keypoints)x128 array (a keypoint descriptor
# is an 128 element vector.)
# Note that the keypoints returned by `compute` may not be same
# as the input keypoints: "Keypoints for which a descriptor cannot
# be computed are removed and the remaining ones may be reordered.
# Sometimes new keypoints can be added, for example: SIFT duplicates
# a keypoint with several dominant orientations (for each orientation)."
train_keypoints_lst = map(detector.detect, train_imgs)
train_keypoints_lst, train_descriptors_lst = zip(*map(detector.compute, train_imgs, train_keypoints_lst))
# equivalent to
# zip(*map(detector.detectAndCompute, train_imgs, [None for _ in train_imgs]))
# Note that `map(detector.compute, train_imgs, train_keypoints)` returns
# a list of pairs consisting of keypoints and descriptors for each image,
# whereas we actually want a separate list of keypoints and list of descriptors.
# So we need to apply some sort of inverse `zip` function to the result of the
# `map`. Recall that `zip` is actually its own inverse. E.g.
# zip(['a', 'b', 'c'], [1, 4, 6, 8, 9]) -> [('a', 1), ('b', 4), ('c', 6)]
# zip(('a', 1), ('b', 4), ('c', 6)) -> [('a', 'b', 'c'), (1, 4, 6, 8, 9)]
logger.debug('Detected {0} keypoints (resp.) in each image'.format(map(len, train_keypoints_lst)))
# The VideoCapture class initialization is overloaded
# to accommodate video filenames or device numbers
# If the string argument provided can be cast to an
# integer, we interpret it as device number, otherwise
# it is a video filename.
try:
input_video = int(args.input_video)
except ValueError:
input_video = args.input_video
logger.debug("Reading video file/device: {0}".format(input_video))
cap = cv2.VideoCapture(input_video)
if not cap.isOpened():
logger.error("Couldn't read video file/device: {0}".format(input_video))
exit(1)
if args.output_video_file is not None:
# TODO:
# out = cv2.VideoWriter(filename=args.output_video_file)
pass
cv2.namedWindow('Tracking', cv2.WINDOW_AUTOSIZE)
cv2.setMouseCallback('Tracking', mouseCallback, param={'a': 3, 'b': 7})
# By default, uses L2-norm with no cross-checking
# matcher = cv2.BFMatcher(cv2.NORM_HAMMING)
matcher = cv2.FlannBasedMatcher(indexParams=dict(algorithm=FLANN_INDEX_KDTREE, trees=5), searchParams={})
# matcher = cv2.FlannBasedMatcher(indexParams=dict(algorithm=FLANN_INDEX_LSH, table_number=6, key_size=12, multi_probe_level=1), searchParams={})
detector2 = cv2.ORB()
while True:
ret, query_img = cap.read()
if not ret: break
query_keypoints, query_descriptors = detector.detectAndCompute(query_img, mask=None)
# TODO: Loop over all train_keypoints here and decide
# whether and how to display all of them. Only working
# with the first one for right now
# TODO: Dynamically add training images, find keypoints/decriptors, etc.
# based on user mouse selection here
train_img, train_keypoints, train_descriptors = train_imgs[0], train_keypoints_lst[0], train_descriptors_lst[0]
# list of pairs of best and second best match
top_matches = matcher.knnMatch(query_descriptors, train_descriptors, k=2)
# logger.debug('Found {0} matches'.format(len(top_matches)))
# filter matches
matches = [a for a, b in filter(lambda m: len(m) == 2, top_matches) if a.distance < 0.75*b.distance]
# logger.debug('Retained {0} matches'.format(len(matches)))
print cv2.getCaptureProperty(cap, cv2.CV_CAP_PROP_FOURCC)
# cv2.VideoWriter('out.mp4', cv2.CV_FOURCC('P','I','M','1'), 20, )
# TODO: Get rid of magic number here
if len(matches) > 10:
src_pts = np.float32(map(lambda m: train_keypoints[m.trainIdx].pt, matches))
dst_pts = np.float32(map(lambda m: query_keypoints[m.queryIdx].pt, matches))
H, mask = cv2.findHomography(src_pts, dst_pts, method=cv2.RANSAC, ransacReprojThreshold=1)
# logger.debug(mask.ravel())
train_img_corners = np.float32(corners(train_img)).reshape(-1, 1, 2)
transformed_train_img_corners = cv2.perspectiveTransform(train_img_corners, H)
cv2.polylines(query_img, [np.int32(transformed_train_img_corners)], \
isClosed=True, color=NEON_GREEN, thickness=2, lineType=cv2.CV_AA)
# logger.debug('Detected {0} keypoints in d'.format(len(query_keypoints)))
# cv2.imshow("Tracking", cv2.drawKeypoints(query_img, query_keypoints))
# cv2.imshow("Tracking", query_img)
drawImage(query_img)
if cv2.waitKey(1) >= 0: break
cap.release()
|
data = (
'Ku ', # 0x00
'Ke ', # 0x01
'Tang ', # 0x02
'Kun ', # 0x03
'Ni ', # 0x04
'Jian ', # 0x05
'Dui ', # 0x06
'Jin ', # 0x07
'Gang ', # 0x08
'Yu ', # 0x09
'E ', # 0x0a
'Peng ', # 0x0b
'Gu ', # 0x0c
'Tu ', # 0x0d
'Leng ', # 0x0e
'[?] ', # 0x0f
'Ya ', # 0x10
'Qian ', # 0x11
'[?] ', # 0x12
'An ', # 0x13
'[?] ', # 0x14
'Duo ', # 0x15
'Nao ', # 0x16
'Tu ', # 0x17
'Cheng ', # 0x18
'Yin ', # 0x19
'Hun ', # 0x1a
'Bi ', # 0x1b
'Lian ', # 0x1c
'Guo ', # 0x1d
'Die ', # 0x1e
'Zhuan ', # 0x1f
'Hou ', # 0x20
'Bao ', # 0x21
'Bao ', # 0x22
'Yu ', # 0x23
'Di ', # 0x24
'Mao ', # 0x25
'Jie ', # 0x26
'Ruan ', # 0x27
'E ', # 0x28
'Geng ', # 0x29
'Kan ', # 0x2a
'Zong ', # 0x2b
'Yu ', # 0x2c
'Huang ', # 0x2d
'E ', # 0x2e
'Yao ', # 0x2f
'Yan ', # 0x30
'Bao ', # 0x31
'Ji ', # 0x32
'Mei ', # 0x33
'Chang ', # 0x34
'Du ', # 0x35
'Tuo ', # 0x36
'Yin ', # 0x37
'Feng ', # 0x38
'Zhong ', # 0x39
'Jie ', # 0x3a
'Zhen ', # 0x3b
'Feng ', # 0x3c
'Gang ', # 0x3d
'Chuan ', # 0x3e
'Jian ', # 0x3f
'Pyeng ', # 0x40
'Toride ', # 0x41
'Xiang ', # 0x42
'Huang ', # 0x43
'Leng ', # 0x44
'Duan ', # 0x45
'[?] ', # 0x46
'Xuan ', # 0x47
'Ji ', # 0x48
'Ji ', # 0x49
'Kuai ', # 0x4a
'Ying ', # 0x4b
'Ta ', # 0x4c
'Cheng ', # 0x4d
'Yong ', # 0x4e
'Kai ', # 0x4f
'Su ', # 0x50
'Su ', # 0x51
'Shi ', # 0x52
'Mi ', # 0x53
'Ta ', # 0x54
'Weng ', # 0x55
'Cheng ', # 0x56
'Tu ', # 0x57
'Tang ', # 0x58
'Que ', # 0x59
'Zhong ', # 0x5a
'Li ', # 0x5b
'Peng ', # 0x5c
'Bang ', # 0x5d
'Sai ', # 0x5e
'Zang ', # 0x5f
'Dui ', # 0x60
'Tian ', # 0x61
'Wu ', # 0x62
'Cheng ', # 0x63
'Xun ', # 0x64
'Ge ', # 0x65
'Zhen ', # 0x66
'Ai ', # 0x67
'Gong ', # 0x68
'Yan ', # 0x69
'Kan ', # 0x6a
'Tian ', # 0x6b
'Yuan ', # 0x6c
'Wen ', # 0x6d
'Xie ', # 0x6e
'Liu ', # 0x6f
'Ama ', # 0x70
'Lang ', # 0x71
'Chang ', # 0x72
'Peng ', # 0x73
'Beng ', # 0x74
'Chen ', # 0x75
'Cu ', # 0x76
'Lu ', # 0x77
'Ou ', # 0x78
'Qian ', # 0x79
'Mei ', # 0x7a
'Mo ', # 0x7b
'Zhuan ', # 0x7c
'Shuang ', # 0x7d
'Shu ', # 0x7e
'Lou ', # 0x7f
'Chi ', # 0x80
'Man ', # 0x81
'Biao ', # 0x82
'Jing ', # 0x83
'Qi ', # 0x84
'Shu ', # 0x85
'Di ', # 0x86
'Zhang ', # 0x87
'Kan ', # 0x88
'Yong ', # 0x89
'Dian ', # 0x8a
'Chen ', # 0x8b
'Zhi ', # 0x8c
'Xi ', # 0x8d
'Guo ', # 0x8e
'Qiang ', # 0x8f
'Jin ', # 0x90
'Di ', # 0x91
'Shang ', # 0x92
'Mu ', # 0x93
'Cui ', # 0x94
'Yan ', # 0x95
'Ta ', # 0x96
'Zeng ', # 0x97
'Qi ', # 0x98
'Qiang ', # 0x99
'Liang ', # 0x9a
'[?] ', # 0x9b
'Zhui ', # 0x9c
'Qiao ', # 0x9d
'Zeng ', # 0x9e
'Xu ', # 0x9f
'Shan ', # 0xa0
'Shan ', # 0xa1
'Ba ', # 0xa2
'Pu ', # 0xa3
'Kuai ', # 0xa4
'Dong ', # 0xa5
'Fan ', # 0xa6
'Que ', # 0xa7
'Mo ', # 0xa8
'Dun ', # 0xa9
'Dun ', # 0xaa
'Dun ', # 0xab
'Di ', # 0xac
'Sheng ', # 0xad
'Duo ', # 0xae
'Duo ', # 0xaf
'Tan ', # 0xb0
'Deng ', # 0xb1
'Wu ', # 0xb2
'Fen ', # 0xb3
'Huang ', # 0xb4
'Tan ', # 0xb5
'Da ', # 0xb6
'Ye ', # 0xb7
'Sho ', # 0xb8
'Mama ', # 0xb9
'Yu ', # 0xba
'Qiang ', # 0xbb
'Ji ', # 0xbc
'Qiao ', # 0xbd
'Ken ', # 0xbe
'Yi ', # 0xbf
'Pi ', # 0xc0
'Bi ', # 0xc1
'Dian ', # 0xc2
'Jiang ', # 0xc3
'Ye ', # 0xc4
'Yong ', # 0xc5
'Bo ', # 0xc6
'Tan ', # 0xc7
'Lan ', # 0xc8
'Ju ', # 0xc9
'Huai ', # 0xca
'Dang ', # 0xcb
'Rang ', # 0xcc
'Qian ', # 0xcd
'Xun ', # 0xce
'Lan ', # 0xcf
'Xi ', # 0xd0
'He ', # 0xd1
'Ai ', # 0xd2
'Ya ', # 0xd3
'Dao ', # 0xd4
'Hao ', # 0xd5
'Ruan ', # 0xd6
'Mama ', # 0xd7
'Lei ', # 0xd8
'Kuang ', # 0xd9
'Lu ', # 0xda
'Yan ', # 0xdb
'Tan ', # 0xdc
'Wei ', # 0xdd
'Huai ', # 0xde
'Long ', # 0xdf
'Long ', # 0xe0
'Rui ', # 0xe1
'Li ', # 0xe2
'Lin ', # 0xe3
'Rang ', # 0xe4
'Ten ', # 0xe5
'Xun ', # 0xe6
'Yan ', # 0xe7
'Lei ', # 0xe8
'Ba ', # 0xe9
'[?] ', # 0xea
'Shi ', # 0xeb
'Ren ', # 0xec
'[?] ', # 0xed
'Zhuang ', # 0xee
'Zhuang ', # 0xef
'Sheng ', # 0xf0
'Yi ', # 0xf1
'Mai ', # 0xf2
'Ke ', # 0xf3
'Zhu ', # 0xf4
'Zhuang ', # 0xf5
'Hu ', # 0xf6
'Hu ', # 0xf7
'Kun ', # 0xf8
'Yi ', # 0xf9
'Hu ', # 0xfa
'Xu ', # 0xfb
'Kun ', # 0xfc
'Shou ', # 0xfd
'Mang ', # 0xfe
'Zun ', # 0xff
)
|
import numpy as np
from scipy.integrate import odeint
from sgp4.api import Satrec
from astropy.time import Time
import astropy
from astropy import units as u
from astropy.coordinates import EarthLocation, ITRS, FK5, CartesianDifferential, CartesianRepresentation
if float(astropy.__version__[0:3]) > 4.1:
from astropy.coordinates import TEME
from orbitdeterminator.doppler.utils.constants import *
from orbitdeterminator.doppler.utils.utils import *
import json
def get_satellite_sgp4(tle, epoch_start, epoch_end, step):
""" Auxiliary function to obtain SGP4-propagated satellite coordinates
within the specified epoch.
# TODO: Different start/end Julian Days
Args:
tle (array): two-line element string array.
epoch_start (astropy.time.Time): starting epoch.
epoch_end (astropy.time.Time): ending epich.
step (float): step in Julian Day fractions.
verbose (bool): debug output. Defaults to False.
Returns:
e (np.ndarray): vector of SGP4 error codes.
r (np.ndarray): vector of satellite positions (TEME).
v (np.ndarray): vector of satellite velocities (TEME).
jd (np.ndarray): vector of Julian Day numbers.
fr (np.ndarray): vector of Julian Day fractions.
"""
satellite = Satrec.twoline2rv(tle[0], tle[1])
fr = np.arange(epoch_start.jd2, epoch_end.jd2, step)
jd = np.ones(fr.shape[0]) * epoch_start.jd1
e, r, v = satellite.sgp4_array(jd, fr)
return e, r, v, jd, fr
def get_satellite(tle, epoch_start, epoch_end, step, frame='itrs'):
""" Auxiliary function to get satellite coordinates in the specified frame
(ITRS or TEME), propagated using SGP with given Two-Line Element (TLE).
Coordinates are returned as numpy array.
Args:
tle (array): two-line element string array.
epoch_start (astropy.time.Time): starting epoch.
epoch_end (astropy.time.Time): ending epich.
step (float): step in Julian Day fractions.
verbose (bool): debug output. Defaults to False.
frame (str): frame (teme or itrs). Defaults to 'teme'.
Returns:
itrs (astropy.coordinates.builtin_frames.itrs.ITRS): satellite position in ITRS.
t (astropy.time.core.Time): corresponding times
"""
_, r, v, jd, fr = get_satellite_sgp4(tle, epoch_start, epoch_end, 1.0/86400.0)
t = Time(jd + fr, format='jd')
r_teme = CartesianRepresentation(r[:,0], r[:,1], r[:,2], unit=u.km)
v_teme = CartesianDifferential(v[:,0], v[:,1], v[:,2], unit=u.km/u.s)
# Temporary workaround until astropy version 4.1 that supports TEME
astropy_version = float(astropy.__version__[0:3])
if astropy_version < 4.1:
print(f"Warning: astropy version {astropy_version} < 4.1, treating SGP4 output (TEME) as FK5")
eci = FK5(r_teme.with_differentials(v_teme), obstime=t)
frame = 'fk5'
else:
eci = TEME(r_teme.with_differentials(v_teme), obstime=t)
# Coordinate frame transformations
if frame=='teme':
x_sat = np.array([eci.x.value, eci.y.value, eci.z.value,
eci.v_x.value, eci.v_y.value, eci.v_z.value])
if frame=='fk5':
# If the astropy version < 4.1, keep it there
if astropy_version < 4.1:
x_sat = np.array([eci.x.value, eci.y.value, eci.z.value,
eci.v_x.value, eci.v_y.value, eci.v_z.value])
# If the astropy vesion >= 4.1, transform TEME to FK5
else:
fk5 = eci.transform_to(FK5(obstime=t))
x_sat = np.array([fk5.x.value, fk5.y.value, fk5.z.value,
fk5.v_x.value, fk5.v_y.value, fk5.v_z.value])
elif frame=='itrs':
itrs = eci.transform_to(ITRS(obstime=t))
x_sat = np.array([itrs.x.value, itrs.y.value, itrs.z.value,
itrs.v_x.value, itrs.v_y.value, itrs.v_z.value])
return x_sat, t
def get_site(lat, lon, height, obstime, frame='teme'):
""" Auxiliary function to obtain site coordinates in ITRS or TEME frame.
Args:
lat (float): latitude (degrees).
lon (float): longitude (degrees).
height (float): altitude (m).
obstime (astropy.time.Time): time array (n, ).
frame (str): frame (teme or itrs). Defaults to 'teme'.
Returns:
x_obs (np.ndarray): array with site positions in ITRS/TEME frame (6, n).
"""
v = np.zeros(obstime.shape[0]) # Temporary variable
# Switch to FK5 if astropy version doesn't support TEME frame
if float(astropy.__version__[0:3]) < 4.1:
frame='fk5'
if frame == 'itrs':
site = EarthLocation(lat=lat*u.deg, lon=lon*u.deg, height=height*u.m)
site_itrs_temp = site.get_itrs(obstime=obstime)
x_obs = np.array([site_itrs_temp.x.value, site_itrs_temp.y.value, site_itrs_temp.z.value,
v, v, v])
elif frame == 'teme':
# Need some workaround conversions for TEME frame
site = EarthLocation(lat=lat*u.deg, lon=lon*u.deg, height=height/1e3*u.km)
site_itrs_temp = site.get_itrs(obstime=obstime)
r_itrs = site_itrs_temp.cartesian
v_itrs = CartesianDifferential(v, v, v, unit=u.km/u.s)
site_itrs = ITRS(r_itrs.with_differentials(v_itrs), obstime=obstime)
site_teme = site_itrs.transform_to(TEME(obstime=obstime))
x_obs = np.array([site_teme.x.value, site_teme.y.value, site_teme.z.value,
site_teme.v_x.value, site_teme.v_y.value, site_teme.v_z.value])*1e3 # Meters
elif frame == 'fk5':
# Need some workaround conversions for TEME frame
# TODO: Check units for FK5(m/km)
site = EarthLocation(lat=lat*u.deg, lon=lon*u.deg, height=height/1e3*u.km)
site_itrs_temp = site.get_itrs(obstime=obstime)
r_itrs = CartesianRepresentation(
site_itrs_temp.data.xyz.value[0,:],
site_itrs_temp.data.xyz.value[1,:],
site_itrs_temp.data.xyz.value[2,:], unit=u.km)
v_itrs = CartesianDifferential(v, v, v, unit=u.km/u.s)
site_itrs = ITRS(r_itrs.with_differentials(v_itrs), obstime=obstime)
site_fk5 = site_itrs.transform_to(FK5(obstime=obstime))
x_obs = np.array([site_fk5.x.value, site_fk5.y.value, site_fk5.z.value,
site_fk5.v_x.value, site_fk5.v_y.value, site_fk5.v_z.value])*1e3 # Meters
return x_obs
def get_x_sat_odeint_stm(x_0, t):
""" Auxiliary function to get odeint propagations of state vector and state transition matrix.
Args:
x_0 (np.ndarray): initial conditions (6, 1).
t (np.ndarray): time array (n,).
Returns:
x_sat_orbdyn_stm (np.ndarray): odeint propagated position of the satellite (6, n).
Phi (np.ndarray): array of corresponding state transition matrices (6, 6, n).
"""
x_Phi_0 = np.concatenate([x_0.squeeze(), np.eye(x_0.shape[0]).flatten()])
x_Phi = np.transpose(odeint(orbdyn_2body_stm, x_Phi_0, t, args=(MU,)))
x_sat_orbdyn_stm = x_Phi[0:6,]
Phi = x_Phi[6:,].reshape((x_0.shape[0], x_0.shape[0], t.shape[0]))
return x_sat_orbdyn_stm, Phi
def get_6_oe_from_tle(tle):
""" Get six orbital elements from given TLE.
This function is used in the process of generating possible orbital configurations.
Args:
tle (list[str]): Two-line element set
Returns:
oe (np.ndarray): Array containing eccentricity, semi-major axis, inclination,
right ascension of the ascending node, argument of perigee and mean anomaly
"""
sat = Satrec.twoline2rv(tle[0], tle[1])
# Orbitral elements
oe = np.array([sat.ecco, # Eccentricity
sat.a, # Semi-major axis
sat.inclo, # Inclination
sat.nodeo, # Right ascension of the ascending node
sat.argpo, # Argument of perigee
sat.mo]) # Mean anomaly
return oe
def get_example_scenario(id=0, frame='teme'):
""" Auxiliary function to obtain example scenario variables.
Scenario 1 or 2 works.
Args:
id (int): Scenario id.
frame (str): frame (teme or itrs). Defaults to 'teme'.
Returns:
x_0 (np.ndarray): initial satellite position in ITRF frame.
t_sec (np.ndarray): time array (seconds).
x_sat_orbdyn_stm (np.ndarray): odeint propagated position of the satellite.
x_obs_1 (np.ndarray): observer 1 position.
x_obs_multiple (np.ndarray): multiple observer positions.
f_downlink (float): downlink frequency of the satellite.
"""
f_downlink = [435.103, 145.980, 137.620, 435.103]
epoch_start = [Time('2020-05-27 23:46:00'), Time('2020-06-25 06:30:00'), Time('2020-07-01 05:00:00'),
Time('2020-05-27 23:46:00')]
epoch_end = [Time('2020-05-27 23:50:00'), Time('2020-06-25 06:37:00'), Time('2020-07-01 05:45:00'),
Time('2020-05-27 23:50:00')]
tle = dict.fromkeys(range(4), [])
# Scenario 0 - FALCONSAT-3, Sites: Atlanta, Jacksonville, Charlotte
tle[0] = [ '1 30776U 07006E 20146.24591950 .00002116 00000-0 57170-4 0 9998',
'2 30776 35.4350 68.4822 0003223 313.1473 46.8985 15.37715972733265']
# Scenario 1 - FOX-1A (AO-85), Sites: Santiago, La Serena, ~La Silla
tle[1] = [ '1 40967U 15058D 20175.33659500 +.00000007 +00000+0 +20124-4 0 687',
'2 40967 64.7742 112.9087 0170632 72.3744 289.5913 14.76130447162443']
# Scenario 2 -
tle[2] = [ '1 40069U 14037A 20182.71359025 -.00000046 00000-0 -19083-5 0 9997',
'2 40069 98.5008 219.7482 0004702 237.2338 122.8403 14.20673317310092']
# Scenario 3 = Scenario 1, 4 stations
tle[3] = [ '1 30776U 07006E 20146.24591950 .00002116 00000-0 57170-4 0 9998',
'2 30776 35.4350 68.4822 0003223 313.1473 46.8985 15.37715972733265']
x_sat, t = get_satellite(tle[id], epoch_start[id], epoch_end[id], 1.0/86400.0, frame=frame)
# Set first position
x_0 = np.expand_dims(x_sat[:,0] * 1e3, axis=1)
t_sec = t.to_value('unix')
t_sec -= t_sec[0]
# Propagate in order to get range rate measurements
x_sat_orbdyn_stm, _ = get_x_sat_odeint_stm(x_0, t_sec)
# Set observer position
# Ids 0, 1, 2 - batch
if id==0:
x_obs_1 = get_site(33.7743331, -84.3970209, 288, obstime=t, frame=frame) # Atlanta
x_obs_2 = get_site(30.3449153, -81.8231881, 100, obstime=t, frame=frame) # Jacksonville
x_obs_3 = get_site(35.2030728, -80.9799098, 100, obstime=t, frame=frame) # Charlotte
x_obs_multiple = np.transpose(np.concatenate([[x_obs_1], [x_obs_2], [x_obs_3]]), (1,2,0))
elif id==1:
x_obs_1 = get_site(-33.43, -70.61, 500, obstime=t, frame=frame) # Santiago
x_obs_2 = get_site(-30.02, -70.70, 700, obstime=t, frame=frame) # Vicuna
x_obs_3 = get_site(-28.92, -70.58, 2000, obstime=t, frame=frame) # ~La Silla
x_obs_multiple = np.transpose(np.concatenate([[x_obs_1], [x_obs_2], [x_obs_3]]), (1,2,0))
elif id==2:
# TODO: Fix
x_obs_1 = get_site(51.1483578, -1.4384458, 100, obstime=t, frame=frame) # Santiago
x_obs_2 = get_site(44.075, 5.5346, 50, obstime=t, frame=frame) # Vicuna
x_obs_3 = get_site(48.835, 2.280, 50, obstime=t, frame=frame) # ~La Silla
x_obs_multiple = np.transpose(np.concatenate([[x_obs_1], [x_obs_2], [x_obs_3]]), (1,2,0))
# TDoA simulation
elif id==3:
x_obs_1 = get_site(33.7743331, -84.3970209, 288, obstime=t, frame=frame) # Atlanta
x_obs_2 = get_site(30.3449153, -81.8231881, 100, obstime=t, frame=frame) # Jacksonville
x_obs_3 = get_site(35.2030728, -80.9799098, 100, obstime=t, frame=frame) # Charlotte
x_obs_4 = get_site(36.1755204, -86.8595446, 100, obstime=t, frame=frame) # Test
x_obs_multiple = np.transpose(np.concatenate([[x_obs_1], [x_obs_2], [x_obs_3], [x_obs_4]]), (1,2,0))
return x_0, t_sec, x_sat_orbdyn_stm, x_obs_multiple, f_downlink[id]
def parse_json_data(filename:str):
""" Temporary function to process the data from json file (end of project simulation.)
Args:
filename (str): path to the file that contains simulation data for the final evaluation
"""
json_file = open(filename)
data_json = json.load(json_file)
n_s = len(data_json['observation']) # Number of stations
t_start = np.zeros(n_s) # First observation time (per station)
t_end = np.zeros(n_s) # Last observation time (per station)
t_start_idx = np.zeros(n_s, dtype=int)
t_end_idx = np.zeros(n_s, dtype=int)
# Temporary workaround
for i, d in zip(range(n_s), data_json['observation']):
print(f"{len(d['data']['doppler'])}, {d['data']['gpstime_unix'][0]}, {d['data']['gpstime_unix'][-1]}" )
# Get start and end times
t_start[i] = d['data']['gpstime_unix'][0]
t_end[i] = d['data']['gpstime_unix'][-1]
# Interval
t_start_max = np.max(t_start)
t_end_min = np.min(t_end)
#
for i, d in zip(range(n_s), data_json['observation']):
temp = np.array(d['data']['gpstime_unix'])
t_start_idx[i] = np.argwhere(temp==t_start_max)
t_end_idx[i] = np.argwhere(temp==t_end_min)
diff = t_end_idx - t_start_idx
print(f"Start indices: \t{t_start_idx}")
print(f"End indices: \t{t_end_idx}")
print(f"Difference: \t{diff}")
print(t_start_max, t_end_min)
#t_start_idx = np
#n_m = len(data_json['observation'][0]['data']['gpstime_unix']) # Number of measurements (first station)
n_m = int(diff[0])
data_tle = [] # Satellite
data_station_name = [] # Station name
data_gpstime_unix = np.zeros((n_m, n_s)) # Time
data_range = np.zeros((n_m, n_s))
data_doppler = np.zeros((n_m, n_s)) # Mesurement (?)
data_station_pos = np.zeros((3, n_m, n_s)) # Position
data_station_vel = np.zeros((3, n_m, n_s)) # Velocity
#data_station_vec = np.zeros((6, n_m, n_s)) # State vector
# Temporary
for i, d in zip(range(n_s), data_json['observation']):
#print (f"{i} {d['station']}")
data_tle.append([d['orbit']['tle1'], d['orbit']['tle2']])
data_station_name.append(d['station'])
temp_data_doppler = np.array(d['data']['doppler'])
temp_data_range = np.array(d['data']['range'])
temp_gpstime_unix = np.array(d['data']['gpstime_unix'])
temp_station_pos = np.array(d['data']['station_pos']).T
temp_station_vel = np.array(d['data']['station_vel']).T
data_doppler[:,i] = temp_data_doppler[t_start_idx[i]:t_end_idx[i]]
data_range[:,i] = temp_data_range[t_start_idx[i]:t_end_idx[i]]
data_gpstime_unix[:,i] = temp_gpstime_unix[t_start_idx[i]:t_end_idx[i]]
data_station_pos[:,:,i] = temp_station_pos[:,t_start_idx[i]:t_end_idx[i]]
data_station_vel[:,:,i] = temp_station_vel[:,t_start_idx[i]:t_end_idx[i]]
#data_station_vec[0:3, :, :] = data_station_pos
#data_station_vec[3:6, :, :] = data_station_vel
# Return dictionary
# Measurements are trimmed to start and end at the same time.
data_trunc = dict()
data_trunc['tle'] = data_tle
data_trunc['station_name'] = data_station_name
data_trunc['gpstime_unix'] = data_gpstime_unix
data_trunc['doppler'] = data_doppler
data_trunc['range'] = data_range * 1e3 # Temp conversion from km to m
data_trunc['station_pos'] = data_station_pos
data_trunc['station_vel'] = data_station_vel
data_trunc['n_s'] = n_s
data_trunc['n_m'] = n_m
return data_json, data_trunc
def get_site_temp(data_station_pos, obstime):
_, n_m, n_s = data_station_pos.shape
x_obs = np.zeros((6, n_m, n_s))
v = np.zeros(obstime.shape[0])
for i in range(n_s):
site = EarthLocation(lat=data_station_pos[0,:,i]*u.deg, lon=data_station_pos[1,:,i]*u.deg, height = data_station_pos[2,:,i]/1000*u.km)
site_itrs_temp = site.get_itrs(obstime=obstime)
r_itrs = CartesianRepresentation(
site_itrs_temp.data.xyz.value[0,:],
site_itrs_temp.data.xyz.value[1,:],
site_itrs_temp.data.xyz.value[2,:], unit=u.km)
v_itrs = CartesianDifferential(v, v, v, unit=u.km/u.s)
site_itrs = ITRS(r_itrs.with_differentials(v_itrs), obstime=obstime)
site_teme = site_itrs.transform_to(TEME(obstime=obstime))
x_obs_temp = np.array([site_teme.x.value, site_teme.y.value, site_teme.z.value,
site_teme.v_x.value, site_teme.v_y.value, site_teme.v_z.value])*1e3
x_obs[:,:,i] = x_obs_temp
return x_obs
|
#!/usr/bin/env python3
import argparse
import os
import re
import sys
from typing import IO, List, Optional
from colors import COLORS, Style
from system_info import get_system_info
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("--hide-logo", action="store_true")
parser.add_argument("--set-logo-file")
parser.add_argument("--extra-logos-dir", action="append", default=[])
parser.add_argument("--list-logos", action="store_true")
args = parser.parse_args()
logos_search_dirs: List[str] = []
logos_search_dirs.extend(args.extra_logos_dir)
logos_search_dirs.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), "logos"))
if args.list_logos:
for logo_dir in logos_search_dirs:
try:
for file in os.scandir(logo_dir):
try:
logo_file = open(os.path.join(logo_dir, file))
except IsADirectoryError:
continue
with logo_file:
print()
print(logo_file.name)
print()
for line in logo_file.read().splitlines():
print(render_logo_line(line))
print()
print()
except IOError as e:
print(e, file=sys.stderr)
continue
return
logo_id, info_lines = get_system_info()
logo_lines: List[str] = []
logo_line_widths: List[int] = []
logo_width = 0
logo_file: Optional[IO[str]] = None
if args.set_logo_file:
logo_file = open(args.set_logo_file)
elif not args.hide_logo:
for logo_dir in logos_search_dirs:
try:
logo_file = open(os.path.join(logo_dir, logo_id))
except FileNotFoundError:
continue
except IOError as e:
print(e, file=sys.stderr)
continue
if logo_file is not None:
with logo_file:
logo_lines = logo_file.read().splitlines()
if len(logo_lines) > 0:
logo_line_widths = [len(render_logo_line(line, remove_styling=True)) for line in logo_lines]
logo_width = max(logo_line_widths)
print()
for line_index in range(0, max(len(logo_lines), len(info_lines))):
output_line: List[str] = []
output_line.append(" ")
if logo_width > 0:
logo_line_width = 0
if line_index < len(logo_lines):
logo_line = logo_lines[line_index]
logo_line_width = logo_line_widths[line_index]
output_line.append(render_logo_line(logo_line))
output_line.append(" " * (logo_width - logo_line_width + 1))
output_line.append(" ")
if line_index < len(info_lines):
info_line = info_lines[line_index]
output_line.append(info_line)
print("".join(output_line))
print()
LOGO_LINE_TEMPLATE_RE = re.compile(r"{(\d+)}")
def render_logo_line(line: str, remove_styling: bool = False) -> str:
def logo_line_replacer(match: "re.Match[str]") -> str:
return COLORS[int(match.group(1))]
if remove_styling:
return LOGO_LINE_TEMPLATE_RE.sub("", line)
else:
return Style.BRIGHT + LOGO_LINE_TEMPLATE_RE.sub(logo_line_replacer, line) + Style.RESET_ALL
if __name__ == "__main__":
main()
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An example gRPC Python-using application's common code elements."""
from tests.testing.proto import requests_pb2
from tests.testing.proto import services_pb2
SERVICE_NAME = 'tests_of_grpc_testing.FirstService'
UNARY_UNARY_METHOD_NAME = 'UnUn'
UNARY_STREAM_METHOD_NAME = 'UnStre'
STREAM_UNARY_METHOD_NAME = 'StreUn'
STREAM_STREAM_METHOD_NAME = 'StreStre'
UNARY_UNARY_REQUEST = requests_pb2.Up(first_up_field=2)
ERRONEOUS_UNARY_UNARY_REQUEST = requests_pb2.Up(first_up_field=3)
UNARY_UNARY_RESPONSE = services_pb2.Down(first_down_field=5)
ERRONEOUS_UNARY_UNARY_RESPONSE = services_pb2.Down(first_down_field=7)
UNARY_STREAM_REQUEST = requests_pb2.Charm(first_charm_field=11)
STREAM_UNARY_REQUEST = requests_pb2.Charm(first_charm_field=13)
STREAM_UNARY_RESPONSE = services_pb2.Strange(first_strange_field=17)
STREAM_STREAM_REQUEST = requests_pb2.Top(first_top_field=19)
STREAM_STREAM_RESPONSE = services_pb2.Bottom(first_bottom_field=23)
TWO_STREAM_STREAM_RESPONSES = (STREAM_STREAM_RESPONSE,) * 2
ABORT_REQUEST = requests_pb2.Up(first_up_field=42)
ABORT_SUCCESS_QUERY = requests_pb2.Up(first_up_field=43)
ABORT_NO_STATUS_RESPONSE = services_pb2.Down(first_down_field=50)
ABORT_SUCCESS_RESPONSE = services_pb2.Down(first_down_field=51)
ABORT_FAILURE_RESPONSE = services_pb2.Down(first_down_field=52)
INFINITE_REQUEST_STREAM_TIMEOUT = 0.2
|
# ##################################################################################################
# Copyright (c) 2020. HuiiBuh #
# This file (test_auth_flows.py) is part of AsyncSpotify which is released under MIT. #
# You are not allowed to use this code or this file for another project without #
# linking to the original source. #
# ##################################################################################################
import pytest
from async_spotify import SpotifyApiClient
from async_spotify.authentification.authorization_flows import AuthorizationCodeFlow
from async_spotify.authentification.authorization_flows.client_credentials_flow import ClientCredentialsFlow
from async_spotify.spotify_errors import SpotifyError
from conftest import TestDataTransfer
class TestAuthFlows:
@pytest.mark.asyncio
async def test_authorization_code_flow(self, api: SpotifyApiClient):
auth_code_flow = AuthorizationCodeFlow()
auth_code_flow.load_from_env()
auth_code_flow.scopes = TestDataTransfer.scopes
api = SpotifyApiClient(auth_code_flow)
code = await api.get_code_with_cookie(TestDataTransfer.cookies)
await api.get_auth_token_with_code(code)
@pytest.mark.asyncio
async def test_inability_to_get_token_with_client_credentials(self, api: SpotifyApiClient):
auth_code_flow = AuthorizationCodeFlow()
auth_code_flow.load_from_env()
api = SpotifyApiClient(auth_code_flow)
with pytest.raises(SpotifyError):
await api.get_auth_token_with_client_credentials()
@pytest.mark.asyncio
async def test_client_credentials(self):
client_credentials = ClientCredentialsFlow()
client_credentials.load_from_env()
client_credentials.scopes = TestDataTransfer.scopes
api = SpotifyApiClient(client_credentials)
await api.get_auth_token_with_client_credentials()
await api.create_new_client()
resp = await api.albums.get_one('03dlqdFWY9gwJxGl3AREVy')
assert isinstance(resp, dict)
@pytest.mark.asyncio
async def test_inability_to_get_token_client_credential(self):
client_credentials = ClientCredentialsFlow()
client_credentials.load_from_env()
client_credentials.scopes = TestDataTransfer.scopes
api = SpotifyApiClient(client_credentials)
with pytest.raises(SpotifyError):
await api.get_code_with_cookie(TestDataTransfer.cookies)
def test_client_valid(self):
c = ClientCredentialsFlow()
c.load_from_env()
v = c.valid
assert v
c.application_secret = None
v = c.valid
assert not v
def test_invalid_access(self):
c = ClientCredentialsFlow()
with pytest.raises(KeyError):
c["will_error"] = "test"
with pytest.raises(KeyError):
v = c["will_error"]
|
'''
Configure Seaborn through a RC file, similarly to how Matplotlib does it.
To load the configuration from the RC file, simply import this package after
seaborn:
>>> import seaborn as sns
>>> import seaborn_rc
To reload the rc:
>>> seaborn_rc.load()
The RC file used is the first found in the following list:
- `$PWD/seabornrc`
- `$SEABORNRC/seabornrc`
- `$SNSCONFIGDIR/seabornrc`
- On Linux,
- `$HOME/.seaborn/seabornrc`
- `$XDG_CONFIG_HOME/seaborn/seabornrc` (if
$XDG_CONFIG_HOME is defined)
- `$HOME/.config/seaborn/seabornrc` (if
$XDG_CONFIG_HOME is not defined)
- On other platforms,
- `$HOME/.seaborn/seabornrc` if `$HOME` is defined.
'''
from .seaborn_rc import load, get_fname
load()
|
# Copyright 2017 Covata Limited or its affiliates
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import shutil
import tempfile
import pytest
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from covata.delta.keystore import FileSystemKeyStore
@pytest.yield_fixture(scope="function")
def temp_directory():
directory = tempfile.mkdtemp()
yield directory
shutil.rmtree(directory)
@pytest.fixture(scope="function")
def key_store(temp_directory):
return FileSystemKeyStore(temp_directory, "passphrase")
@pytest.fixture(scope="session")
def private_key():
return rsa.generate_private_key(public_exponent=65537,
key_size=4096,
backend=default_backend())
@pytest.fixture(scope="session")
def key2bytes():
def convert(key):
if isinstance(key, rsa.RSAPrivateKey):
return key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption())
elif isinstance(key, rsa.RSAPublicKey):
der = key.public_bytes(
encoding=serialization.Encoding.DER,
format=serialization.PublicFormat.SubjectPublicKeyInfo)
return base64.b64encode(der).decode(encoding='utf-8')
return convert
|
import asyncio
import datetime
from nonebot import get_driver, logger, on_command, require
from nonebot.adapters.onebot.v11 import GroupMessageEvent, Message
from nonebot.params import CommandArg
from nonebot.permission import SUPERUSER
from utils.config_util import SubManager, SubList
from utils.utils import get_diff_days_2_now, send_group_msg
from ..weather import Weather
from .data_source import StatusCode, cs_manager, s_config
course_sub = require("nonebot_plugin_apscheduler").scheduler
week_table = {1: '一', 2: '二', 3: '三', 4: '四', 5: '五', 6: '六', 7: '日'}
super_group = get_driver().config.super_group
@course_sub.scheduled_job("cron", day_of_week='4', hour='19', minute='35', second='00')
async def update():
code = await cs_manager.refresh_data(True)
if code.code == 0:
logger.success("课表定时更新成功")
for id in super_group:
await send_group_msg(id, "课表更新成功~")
else:
logger.warning("课表定时更新失败")
for id in super_group:
await send_group_msg(id, "课表更新失败,请检查日志~")
c_schedule_sub = SubList.add('每日课表', SubManager('cs_sub'))
@course_sub.scheduled_job("cron", day_of_week='0-4', hour='07', minute='10', second='00')
async def run():
"""
每日定时发送课表
TODO 待完善
优化开关
"""
if not s_config.is_begin():
return
if not c_schedule_sub.get_status():
logger.info("每日课表提醒已被关闭")
return
# 获取天气
city = s_config.get_location()
w_daily = await Weather.daily(city)
data = w_daily['daily'][0]
weekday = datetime.datetime.now().weekday() + 1
week = get_diff_days_2_now(s_config.get_start_date()) // 7 + 1
msg = f'早上好!\n今天是周{week_table.get(weekday)},本学期第 {week} 周\n==========\n' + cs_manager.get_cs_today()
# 附加天气
msg += f'\n============\n{city} 日间天气:\n{data["textDay"]},{data["tempMin"]}~{data["tempMax"]}℃'
group_id = c_schedule_sub.get_groups()
for g_id in group_id:
await asyncio.sleep(5)
if await send_group_msg(g_id, msg):
logger.success(f'向 {g_id} 发送今日课表')
else:
logger.warning(f'向 {g_id} 发送课表失败')
cs_select = on_command("查课表", priority=5)
cs_select_week = on_command("本周课表", priority=5)
cs_update = on_command("添加课表", priority=5, permission=SUPERUSER)
cs_delete = on_command("删除课表", priority=5, permission=SUPERUSER)
cs_refresh = on_command("更新课表", priority=5, permission=SUPERUSER)
cs_black_list = on_command("课表黑名单", priority=5, permission=SUPERUSER)
@cs_select.handle()
async def _(event: GroupMessageEvent):
msg = cs_manager.get_cs_today()
await cs_select.finish(msg)
@cs_select_week.handle()
async def _(event: GroupMessageEvent):
msg = cs_manager.get_cs_week()
await cs_select_week.finish(msg)
@cs_update.handle()
async def _(event: GroupMessageEvent, par: Message = CommandArg()):
if not par:
await cs_update.finish("参数格式:\n课程名称 第几节 教室 开始周次 结束周次\n中间用空格分隔\nEG: 体育与健康 30506 操场 4 13")
else:
par_list = par.extract_plain_text().split(' ')
if len(par_list) != 5:
await cs_update.finish('参数有误!')
code = await cs_manager.update_data(*par_list)
await cs_update.finish(code.errmsg)
@cs_delete.handle()
async def _(event: GroupMessageEvent, par: Message = CommandArg()):
if not par:
msg = cs_manager.get_sub_table_name_list()
await cs_delete.finish(msg)
else:
param = par.extract_plain_text()
li = cs_manager.get_sub_table_list()
if not param.isdigit():
await cs_delete.finish("参数有误~")
elif int(param) not in range(1, len(li) + 1):
await cs_delete.finish("参数有误~")
else:
code = cs_manager.del_data(int(param))
await cs_delete.finish(code.errmsg)
@cs_refresh.handle()
async def _(event: GroupMessageEvent):
code = await cs_manager.refresh_data()
await cs_refresh.finish(code.errmsg)
@cs_black_list.handle()
async def _(event: GroupMessageEvent, par: Message = CommandArg()):
if not par:
msg = cs_manager.get_black_list()
if not msg:
msg = '黑名单为空~'
await cs_black_list.finish(msg)
else:
param = par.extract_plain_text()
code = cs_manager.add_black_list(param)
await cs_black_list.finish(code.errmsg)
|
# Pandas Data Operation
import pandas as pd
import numpy as np
def make_df(cols, ind):
"""Quickly make a DataFrame"""
data = {c: [str(c) + str(i) for i in ind]
for c in cols}
return pd.DataFrame(data, ind)
def concatenation():
x = [1, 2, 3]
y = [4, 5, 6]
z = [7, 8, 9]
print('np.concatenate([x, y, z]) \r\n', np.concatenate([x, y, z]))
x = [[1, 2],
[3, 4]]
print('np.concatenate([x, x], axis=1) \r\n', np.concatenate([x, x], axis=1))
ser1 = pd.Series(['A', 'B', 'C'], index=[1, 2, 3])
ser2 = pd.Series(['D', 'E', 'F'], index=[3, 5, 6])
print(pd.concat([ser1, ser2]))
df1 = make_df('AB', [1, 2])
df2 = make_df('AB', [3, 4])
print('df1 \r\n', df1)
print('df2 \r\n', df2)
print(pd.concat([df1, df2]))
df3 = make_df('AB', [0, 1])
df4 = make_df('CD', [0, 1])
print('df3 \r\n', df3)
print('df4 \r\n', df4)
print(pd.concat([df3, df4], axis=1)) # can't use the intuitive axis='col'.
# Duplicate indices
x = make_df('AB', [0, 1])
y = make_df('AB', [2, 3])
y.index = x.index # make duplicate indices!
print('x \r\n', x)
print('y \r\n', y)
print('Duplicate indices \r\n', pd.concat([x, y]))
# Catching the repeats as an error¶
try:
pd.concat([x, y], verify_integrity=True)
except ValueError as e:
print("ValueError:", e)
# Ignoring the index
print('ignore_index=sTrue \r\n', pd.concat([x, y], ignore_index=True))
# Adding MultiIndex keys¶
print('keys=[x,y] \r\n', pd.concat([x, y], keys=['x', 'y']))
# Concatenation with joins
df5 = make_df('ABC', [1, 2])
df6 = make_df('BCD', [3, 4])
print('x \r\n', df5)
print('y \r\n', df6)
print('concat \r\n', pd.concat([df5, df6]))
print('join=inner \r\n', pd.concat([df5, df6], join='inner'))
print('join_axes=[df.columns] \r\n', pd.concat([df5, df6], join_axes=[df5.columns]))
# append
print('append \r\n', df1.append(df2))
print('df1 \r\n', df1)
def join():
df1 = pd.DataFrame({'employee': ['Bob', 'Jake', 'Lisa', 'Sue'],
'group': ['Accounting', 'Engineering', 'Engineering', 'HR']})
df2 = pd.DataFrame({'employee': ['Lisa', 'Bob', 'Jake', 'Sue'],
'hire_date': [2004, 2008, 2012, 2014]})
df3 = pd.merge(df1, df2)
print('df1: \r\n', df1, '\r\ndf2: \r\n', df2, '\r\ndf3 = pd.merge(df1, df2): \r\n', df3)
# Many-to-one joins
df4 = pd.DataFrame({'group': ['Accounting', 'Engineering', 'HR'],
'supervisor': ['Carly', 'Guido', 'Steve']})
df34 = pd.merge(df3, df4)
print('df4: \r\n', df4, '\r\npd.merge(df3, df5): \r\n', df34)
# Many-to-many joins
df5 = pd.DataFrame({'group': ['Accounting', 'Accounting',
'Engineering', 'Engineering', 'HR', 'HR'],
'skills': ['math', 'spreadsheets', 'coding', 'linux',
'spreadsheets', 'organization']})
df15 = pd.merge(df1, df5)
print('df5: \r\n', df5, '\r\npd.merge(df1, df5): \r\n', df15)
pd.merge(df1, df2, on='employee')
# The left_on and right_on keywords
df6 = pd.DataFrame({'name': ['Bob', 'Jake', 'Lisa', 'Sue'],
'salary': [70000, 80000, 120000, 90000]})
df7 = pd.merge(df1, df6, left_on="employee", right_on="name")
print('df6: \r\n', df6, '\r\n pd.merge(df1, df6,left_on=employee, right_on=name): \r\n', df7)
print(df7.drop('name', axis=1)) # The result has a redundant column that we can drop
# The left_index and right_index keywords
df1a = df1.set_index('employee')
df2a = df2.set_index('employee')
print('\r\n df1a: \r\n', df1a, 'df2a: \r\n', df2a)
m1 = pd.merge(df1a, df2a, left_index=True, right_index=True)
print('merge(df1a, df2a, left_index=True, right_index=True) : \r\n', m1)
df1a.join(df2a) # implement the join() method, which performs a merge that defaults to joining on indices
# mix indices and columns
m2 = pd.merge(df1a, df6, left_index=True, right_on='name')
print('merge(df1a, df6, left_index=True, right_on=name) : \r\n', m2)
df8 = pd.DataFrame({'name': ['Peter', 'Paul', 'Mary'],
'food': ['fish', 'beans', 'bread']},
columns=['name', 'food'])
df9 = pd.DataFrame({'name': ['Mary', 'Joseph'],
'drink': ['wine', 'beer']},
columns=['name', 'drink'])
print('df8: \r\n', df8, '\r\ndf9: \r\n', df9)
print('pd.merge(df8, df9) \r\n', pd.merge(df8, df9))
print('pd.merge(df8, df9, how=outer) \r\n', pd.merge(df8, df9, how='outer'))
print('pd.merge(df8, df9, how=left) \r\n', pd.merge(df8, df9, how='left'))
df10 = pd.DataFrame({'name': ['Bob', 'Jake', 'Lisa', 'Sue'],
'rank': [1, 2, 3, 4]})
df11 = pd.DataFrame({'name': ['Bob', 'Jake', 'Lisa', 'Sue'],
'rank': [3, 1, 4, 2]})
print('pd.merge(df10, df11,on="name") \r\n', pd.merge(df10, df11, on="name"))
print('pd.merge(df10, df11, on=name, suffixes=[_L, _R]) \r\n', pd.merge(df10, df11, on="name", suffixes=["_L", "_R"]))
if __name__ == '__main__':
print('Numpy Version:', np.__version__)
print('Pandas Version:', pd.__version__)
# concatenation()
join()
|
import tokenize
import token
from StringIO import StringIO
def fixLazyJsonWithComments (in_text):
""" Same as fixLazyJson but removing comments as well
"""
result = []
tokengen = tokenize.generate_tokens(StringIO(in_text).readline)
sline_comment = False
mline_comment = False
last_token = ''
for tokid, tokval, _, _, _ in tokengen:
# ignore single line and multi line comments
if sline_comment:
if (tokid == token.NEWLINE) or (tokid == tokenize.NL):
sline_comment = False
continue
# ignore multi line comments
if mline_comment:
if (last_token == '*') and (tokval == '/'):
mline_comment = False
last_token = tokval
continue
# fix unquoted strings
if (tokid == token.NAME):
if tokval not in ['true', 'false', 'null', '-Infinity', 'Infinity', 'NaN']:
tokid = token.STRING
tokval = u'"%s"' % tokval
# fix single-quoted strings
elif (tokid == token.STRING):
if tokval.startswith ("'"):
tokval = u'"%s"' % tokval[1:-1].replace ('"', '\\"')
# remove invalid commas
elif (tokid == token.OP) and ((tokval == '}') or (tokval == ']')):
if (len(result) > 0) and (result[-1][1] == ','):
result.pop()
# detect single-line comments
elif tokval == "//":
sline_comment = True
continue
# detect multiline comments
elif (last_token == '/') and (tokval == '*'):
result.pop() # remove previous token
mline_comment = True
continue
result.append((tokid, tokval))
last_token = tokval
return tokenize.untokenize(result) |
# Generated by Django 3.2.1 on 2021-05-20 21:02
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Acct',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('typeName', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('catName', models.CharField(max_length=45)),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('firstName', models.CharField(max_length=45)),
('lastName', models.CharField(max_length=45)),
('email', models.EmailField(max_length=254, unique=True)),
('username', models.CharField(max_length=45)),
('password', models.CharField(max_length=45)),
('userCreatedAt', models.DateTimeField(auto_now_add=True)),
('userUpdatedAt', models.DateTimeField(auto_now=True)),
('acct', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='users', to='marketApp.acct')),
],
),
migrations.CreateModel(
name='Shop',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('shopName', models.CharField(max_length=45)),
('shopDescription', models.TextField()),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='shops', to='marketApp.user')),
],
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('itemName', models.CharField(max_length=45)),
('itemDescription', models.TextField()),
('itemPrice', models.CharField(max_length=45)),
('itemImg', models.CharField(max_length=255)),
('itemCount', models.CharField(max_length=45)),
('categories', models.ManyToManyField(related_name='products', to='marketApp.Category')),
('shop', models.ManyToManyField(related_name='theProducts', to='marketApp.Shop')),
('theOwner', models.ManyToManyField(related_name='ownerProducts', to='marketApp.User')),
],
),
migrations.AddField(
model_name='category',
name='theUser',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='categories', to='marketApp.user'),
),
]
|
# ---------------------------------------------------------------
# visual_spatial.py
# Set-up time: 2020/4/28 下午8:46
# Copyright (c) 2020 ICT
# Licensed under The MIT License [see LICENSE for details]
# Written by Kenneth-Wong (Wenbin-Wang) @ VIPL.ICT
# Contact: wenbin.wang@vipl.ict.ac.cn [OR] nkwangwenbin@gmail.com
# ---------------------------------------------------------------
from __future__ import division
import torch
import torch.nn as nn
from torch.nn.modules.utils import _pair
from mmcv.cnn import normal_init, kaiming_init
from mmdet import ops
from mmdet.ops import ConvModule
from mmdet.core import force_fp32
from mmdet.core.utils import enumerate_by_image
from ..registry import RELATION_ROI_EXTRACTORS
from mmdet.models.relation_heads.approaches import PointNetFeat
import numpy as np
@RELATION_ROI_EXTRACTORS.register_module
class VisualSpatialExtractor(nn.Module):
"""Extract RoI features from a single level feature map.
If there are mulitple input feature levels, each RoI is mapped to a level
according to its scale.
Args:
roi_layer (dict): Specify RoI layer type and arguments.
out_channels (int): Output channels of RoI layers.
featmap_strides (int): Strides of input feature maps.
finest_scale (int): Scale threshold of mapping to level 0.
"""
def __init__(self,
bbox_roi_layer,
in_channels,
featmap_strides,
roi_out_channels=256,
fc_out_channels=1024,
finest_scale=56,
mask_roi_layer=None,
with_avg_pool=False,
with_visual_bbox=True,
with_visual_mask=False,
with_visual_point=False,
with_spatial=False,
separate_spatial=False,
gather_visual='sum',
conv_cfg=None,
norm_cfg=dict(type='BN', requires_grad=True)):
super(VisualSpatialExtractor, self).__init__()
self.roi_feat_size = _pair(bbox_roi_layer.get('out_size', 7))
self.roi_feat_area = self.roi_feat_size[0] * self.roi_feat_size[1]
self.in_channels = in_channels
self.roi_out_channels = roi_out_channels
self.fc_out_channels = fc_out_channels
self.featmap_strides = featmap_strides
self.finest_scale = finest_scale
self.fp16_enabled = False
self.with_avg_pool = with_avg_pool
self.with_visual_bbox = with_visual_bbox
self.with_visual_mask = with_visual_mask
self.with_visual_point = with_visual_point
self.with_spatial = with_spatial
self.separate_spatial = separate_spatial
self.gather_visual = gather_visual
# NOTE: do not inculde the visual_point_head
self.num_visual_head = int(self.with_visual_bbox) + int(self.with_visual_mask)
if self.num_visual_head == 0:
raise ValueError('There must be at least one visual head. ')
in_channels = self.in_channels
if self.with_avg_pool:
self.avg_pool = nn.AvgPool2d(self.roi_feat_size)
else:
in_channels *= self.roi_feat_area
# set some caches
self._union_rois = None
self._pair_rois = None
# build visual head: extract visual features.
if self.with_visual_bbox:
assert bbox_roi_layer is not None
self.bbox_roi_layers = self.build_roi_layers(bbox_roi_layer, featmap_strides)
self.visual_bbox_head = nn.Sequential(*[
nn.Linear(in_channels, self.fc_out_channels),
nn.ReLU(inplace=True),
nn.Linear(self.fc_out_channels, self.fc_out_channels),
nn.ReLU(inplace=True)])
if self.with_visual_mask:
assert mask_roi_layer is not None
self.mask_roi_layers = self.build_roi_layers(mask_roi_layer, featmap_strides)
self.visual_mask_head = nn.Sequential(*[
nn.Linear(in_channels, self.fc_out_channels),
nn.ReLU(inplace=True),
nn.Linear(self.fc_out_channels, self.fc_out_channels),
nn.ReLU(inplace=True)])
if self.with_visual_point:
# TODO: build the point feats extraction head.
self.pointFeatExtractor = PointNetFeat()
if self.num_visual_head > 1:
gather_in_channels = self.fc_out_channels * 2 if self.gather_visual == 'cat' else self.fc_out_channels
self.gather_visual_head = nn.Sequential(*[
nn.Linear(gather_in_channels, self.fc_out_channels),
nn.ReLU(inplace=True)])
# build spatial_head
if self.with_spatial:
self.spatial_size = self.roi_feat_size[0] * 4 - 1
self.spatial_conv = nn.Sequential(*[
ConvModule(2,
self.in_channels // 2,
kernel_size=7,
stride=2,
padding=3,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg,
order=('conv', 'act', 'norm')),
nn.MaxPool2d(kernel_size=3, stride=2, padding=1),
ConvModule(self.in_channels // 2,
self.roi_out_channels,
kernel_size=3,
stride=1,
padding=1,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg,
order=('conv', 'act', 'norm'))])
if self.separate_spatial:
self.spatial_head = nn.Sequential(*[nn.Linear(in_channels, self.fc_out_channels),
nn.ReLU(inplace=True),
nn.Linear(self.fc_out_channels, self.fc_out_channels),
nn.ReLU(inplace=True)])
@property
def num_inputs(self):
"""int: Input feature map levels."""
return len(self.featmap_strides)
@property
def union_rois(self):
return self._union_rois
@property
def pair_rois(self):
return self._pair_rois
def init_weights(self):
if self.with_visual_bbox:
for m in self.visual_bbox_head:
if isinstance(m, nn.Linear):
kaiming_init(m, distribution='uniform', a=1)
if self.with_visual_mask:
for m in self.visual_mask_head:
if isinstance(m, nn.Linear):
kaiming_init(m, distribution='uniform', a=1)
if self.with_visual_point:
pass
# for the pointNet head, just leave it there, do not
if self.num_visual_head > 1:
for m in self.gather_visual_head:
if isinstance(m, nn.Linear):
kaiming_init(m, distribution='uniform', a=1)
if self.with_spatial:
for m in self.spatial_conv:
if isinstance(m, ConvModule):
normal_init(m.conv, std=0.01)
if self.separate_spatial:
for m in self.spatial_head:
if isinstance(m, nn.Linear):
kaiming_init(m, distribution='uniform', a=1)
def build_roi_layers(self, layer_cfg, featmap_strides):
cfg = layer_cfg.copy()
layer_type = cfg.pop('type')
assert hasattr(ops, layer_type)
layer_cls = getattr(ops, layer_type)
roi_layers = nn.ModuleList(
[layer_cls(spatial_scale=1 / s, **cfg) for s in featmap_strides])
return roi_layers
def map_roi_levels(self, rois, num_levels):
"""Map rois to corresponding feature levels by scales.
- scale < finest_scale * 2: level 0
- finest_scale * 2 <= scale < finest_scale * 4: level 1
- finest_scale * 4 <= scale < finest_scale * 8: level 2
- scale >= finest_scale * 8: level 3
Args:
rois (Tensor): Input RoIs, shape (k, 5).
num_levels (int): Total level number.
Returns:
Tensor: Level index (0-based) of each RoI, shape (k, )
"""
scale = torch.sqrt(
(rois[:, 3] - rois[:, 1] + 1) * (rois[:, 4] - rois[:, 2] + 1))
target_lvls = torch.floor(torch.log2(scale / self.finest_scale + 1e-6))
target_lvls = target_lvls.clamp(min=0, max=num_levels - 1).long()
return target_lvls
def roi_rescale(self, rois, scale_factor):
cx = (rois[:, 1] + rois[:, 3]) * 0.5
cy = (rois[:, 2] + rois[:, 4]) * 0.5
w = rois[:, 3] - rois[:, 1] + 1
h = rois[:, 4] - rois[:, 2] + 1
new_w = w * scale_factor
new_h = h * scale_factor
x1 = cx - new_w * 0.5 + 0.5
x2 = cx + new_w * 0.5 - 0.5
y1 = cy - new_h * 0.5 + 0.5
y2 = cy + new_h * 0.5 - 0.5
new_rois = torch.stack((rois[:, 0], x1, y1, x2, y2), dim=-1)
return new_rois
def roi_forward(self, roi_layers, feats, rois, masks=None, roi_scale_factor=None):
if len(feats) == 1:
if roi_layers[0].__class__.__name__ == 'ShapeAwareRoIAlign':
assert masks is not None
roi_feats = roi_layers[0](feats[0], rois, masks)
else:
roi_feats = roi_layers[0](feats[0], rois)
else:
out_size = roi_layers[0].out_size
num_levels = self.num_inputs
target_lvls = self.map_roi_levels(rois, num_levels)
roi_feats = feats[0].new_zeros(
rois.size(0), self.roi_out_channels, *out_size)
if roi_scale_factor is not None:
assert masks is None # not applicated for shape-aware roi align
rois = self.roi_rescale(rois, roi_scale_factor)
for i in range(num_levels):
inds = target_lvls == i
if inds.any():
rois_ = rois[inds, :]
if roi_layers[i].__class__.__name__ == 'ShapeAwareRoIAlign':
masks_ = [masks[idx] for idx in torch.nonzero(inds).view(-1)]
roi_feats_t = roi_layers[i](feats[i], rois_, masks_)
else:
roi_feats_t = roi_layers[i](feats[i], rois_)
roi_feats[inds] = roi_feats_t
return roi_feats
def single_roi_forward(self, feats, rois, masks=None, points=None, roi_scale_factor=None):
roi_feats_bbox, roi_feats_mask, roi_feats_point = None, None, None
# 1. Use the visual and spatial head to extract roi features.
if self.with_visual_bbox:
roi_feats_bbox = self.roi_forward(self.bbox_roi_layers, feats, rois, masks, roi_scale_factor)
if self.with_visual_mask:
roi_feats_mask = self.roi_forward(self.mask_roi_layers, feats, rois, masks, roi_scale_factor)
if self.with_visual_point:
# input: (N_entity, Ndim(2), N_point)
# output: (N_entity, feat_dim(1024))
roi_feats_point, trans_matrix, _ = self.pointFeatExtractor(torch.stack(points).transpose(2, 1))
roi_feats_result = []
# gather the visual features, do not include the features from points
for roi_feats, head in ((roi_feats_bbox, getattr(self, 'visual_bbox_head', None)),
(roi_feats_mask, getattr(self, 'visual_mask_head', None))):
if head is not None:
roi_feats_result.append(head(roi_feats.view(roi_feats.size(0), -1)))
if self.num_visual_head > 1:
if self.gather_visual == 'cat':
roi_feats_result = torch.cat(roi_feats_result, dim=-1)
elif self.gather_visual == 'sum':
roi_feats_result = torch.stack(roi_feats_result).sum(0)
elif self.gather_visual == 'prod':
roi_feats_result = torch.stack(roi_feats_result).prod(0)
else:
raise NotImplementedError(
'The gathering operation {} is not implemented yet.'.format(self.gather_visual))
roi_feats = self.gather_visual_head(roi_feats_result)
else:
roi_feats = roi_feats_result[0]
if self.with_visual_point:
return (roi_feats, roi_feats_point, trans_matrix)
else:
return (roi_feats,)
def union_roi_forward(self, feats, img_metas, rois, rel_pair_idx, masks=None, points=None, roi_scale_factor=None):
assert self.with_spatial
num_images = feats[0].size(0)
assert num_images == len(rel_pair_idx)
rel_pair_index = []
im_inds = rois[:, 0]
acc_obj = 0
for i, s, e in enumerate_by_image(im_inds):
num_obj_i = e - s
rel_pair_idx_i = rel_pair_idx[i].clone()
rel_pair_idx_i[:, 0] += acc_obj
rel_pair_idx_i[:, 1] += acc_obj
acc_obj += num_obj_i
rel_pair_index.append(rel_pair_idx_i)
rel_pair_index = torch.cat(rel_pair_index, 0)
# prepare the union rois
head_rois = rois[rel_pair_index[:, 0], :]
tail_rois = rois[rel_pair_index[:, 1], :]
head_rois_int = head_rois.cpu().numpy().astype(np.int32)
tail_rois_int = tail_rois.cpu().numpy().astype(np.int32)
union_rois = torch.stack([head_rois[:, 0],
torch.min(head_rois[:, 1], tail_rois[:, 1]),
torch.min(head_rois[:, 2], tail_rois[:, 2]),
torch.max(head_rois[:, 3], tail_rois[:, 3]),
torch.max(head_rois[:, 4], tail_rois[:, 4])], -1)
self._union_rois = union_rois[:, 1:]
self._pair_rois = torch.cat((head_rois[:, 1:], tail_rois[:, 1:]), dim=-1)
# OPTIONAL: prepare the union masks
union_masks = None
if masks is not None and self.with_visual_mask:
union_rois_int = union_rois.cpu().numpy().astype(np.int32)
union_heights = union_rois_int[:, 4] - union_rois_int[:, 2] + 1
union_widths = union_rois_int[:, 3] - union_rois_int[:, 1] + 1
union_masks = []
for i, pair_idx in enumerate(rel_pair_index.cpu().numpy()):
head_mask, tail_mask = masks[pair_idx[0]], masks[pair_idx[1]]
union_mask = torch.zeros(union_heights[i], union_widths[i]).to(head_mask)
base_x, base_y = union_rois_int[i, 1], union_rois_int[i, 2]
union_mask[(head_rois_int[i, 2] - base_y):(head_rois_int[i, 4] - base_y + 1),
(head_rois_int[i, 1] - base_x):(head_rois_int[i, 3] - base_x + 1)] = head_mask
union_mask[(tail_rois_int[i, 2] - base_y):(tail_rois_int[i, 4] - base_y + 1),
(tail_rois_int[i, 1] - base_x):(tail_rois_int[i, 3] - base_x + 1)] = tail_mask
union_masks.append(union_mask)
# OPTIONAL: prepare the union points
union_points = None
if points is not None and self.with_visual_point:
union_points = []
for i, pair_idx in enumerate(rel_pair_index.cpu().numpy()):
head_points, tail_points = points[pair_idx[0]], points[pair_idx[1]]
pts = torch.cat((head_points, tail_points), dim=0)
union_points.append(pts)
roi_feats_bbox, roi_feats_mask, roi_feats_point, rect_feats = None, None, None, None
# 1. Use the visual and spatial head to extract roi features.
if self.with_visual_bbox:
roi_feats_bbox = self.roi_forward(self.bbox_roi_layers, feats, union_rois, union_masks, roi_scale_factor)
if self.with_visual_mask:
roi_feats_mask = self.roi_forward(self.mask_roi_layers, feats, union_rois, union_masks, roi_scale_factor)
if self.with_visual_point:
roi_feats_point, trans_matrix, _ = self.pointFeatExtractor(torch.stack(union_points, dim=0).transpose(2, 1))
# rect_feats: use range to construct rectangle, sized (rect_size, rect_size)
num_rel = len(rel_pair_index)
dummy_x_range = torch.arange(self.spatial_size).to(rel_pair_index.device).view(1, 1, -1).expand(num_rel,
self.spatial_size,
self.spatial_size)
dummy_y_range = torch.arange(self.spatial_size).to(rel_pair_index.device).view(1, -1, 1).expand(num_rel,
self.spatial_size,
self.spatial_size)
size_list = [np.array(img_meta['img_shape'][:2]).reshape(1, -1) for img_meta in img_metas]
img_input_sizes = np.empty((0, 2), dtype=np.float32)
for img_id in range(len(rel_pair_idx)):
num_rel = len(rel_pair_idx[img_id])
img_input_sizes = np.vstack((img_input_sizes, np.tile(size_list[img_id], (num_rel, 1))))
img_input_sizes = torch.from_numpy(img_input_sizes).to(rois)
# resize bbox to the scale rect_size
head_proposals = head_rois.clone()
head_proposals[:, 1::2] *= self.spatial_size / img_input_sizes[:, 1:2]
head_proposals[:, 2::2] *= self.spatial_size / img_input_sizes[:, 0:1]
tail_proposals = tail_rois.clone()
tail_proposals[:, 1::2] *= self.spatial_size / img_input_sizes[:, 1:2]
tail_proposals[:, 2::2] *= self.spatial_size / img_input_sizes[:, 0:1]
head_rect = ((dummy_x_range >= head_proposals[:, 1].floor().view(-1, 1, 1).long()) & \
(dummy_x_range <= head_proposals[:, 3].ceil().view(-1, 1, 1).long()) & \
(dummy_y_range >= head_proposals[:, 2].floor().view(-1, 1, 1).long()) & \
(dummy_y_range <= head_proposals[:, 4].ceil().view(-1, 1, 1).long())).float()
tail_rect = ((dummy_x_range >= tail_proposals[:, 1].floor().view(-1, 1, 1).long()) & \
(dummy_x_range <= tail_proposals[:, 2].ceil().view(-1, 1, 1).long()) & \
(dummy_y_range >= tail_proposals[:, 3].floor().view(-1, 1, 1).long()) & \
(dummy_y_range <= tail_proposals[:, 4].ceil().view(-1, 1, 1).long())).float()
rect_input = torch.stack((head_rect, tail_rect), dim=1) # (num_rel, 2, rect_size, rect_size)
rect_feats = self.spatial_conv(rect_input)
# gather the different visual features and spatial features
if self.separate_spatial: # generally, it is False
roi_feats_result = []
for roi_feats, head in ((roi_feats_bbox, getattr(self, 'visual_bbox_head', None)),
(roi_feats_mask, getattr(self, 'visual_mask_head', None))):
if head is not None:
roi_feats_result.append(head(roi_feats.view(roi_feats.size(0), -1)))
if self.num_visual_head > 1:
if self.gather_visual == 'cat':
roi_feats_result = torch.cat(roi_feats_result, dim=-1)
elif self.gather_visual == 'sum':
roi_feats_result = torch.stack(roi_feats_result).sum(0)
elif self.gather_visual == 'prod':
roi_feats_result = torch.stack(roi_feats_result).prod(0)
else:
raise NotImplementedError(
'The gathering operation {} is not implemented yet.'.format(self.gather_visual))
roi_feats = self.gather_visual_head(roi_feats_result)
else:
roi_feats = roi_feats_result[0]
roi_feats_spatial = self.spatial_head(rect_feats)
if self.with_visual_point:
return (roi_feats, roi_feats_spatial, roi_feats_point, trans_matrix)
else:
return (roi_feats, roi_feats_spatial)
else:
roi_feats_result = []
for roi_feats, head in ((roi_feats_bbox, getattr(self, 'visual_bbox_head', None)),
(roi_feats_mask, getattr(self, 'visual_mask_head', None))):
if head is not None:
roi_feats_result.append(head((roi_feats + rect_feats).view(roi_feats.size(0), -1)))
if self.num_visual_head > 1:
if self.gather_visual == 'cat':
roi_feats_result = torch.cat(roi_feats_result, dim=-1)
elif self.gather_visual == 'sum':
roi_feats_result = torch.stack(roi_feats_result).sum(0)
elif self.gather_visual == 'prod':
roi_feats_result = torch.stack(roi_feats_result).prod(0)
else:
raise NotImplementedError(
'The gathering operation {} is not implemented yet.'.format(self.gather_visual))
roi_feats = self.gather_visual_head(roi_feats_result)
else:
roi_feats = roi_feats_result[0]
if self.with_visual_point:
return (roi_feats, roi_feats_point, trans_matrix)
else:
return (roi_feats,)
@force_fp32(apply_to=('feats',), out_fp16=True)
def forward(self, feats, img_metas, rois, rel_pair_idx=None, masks=None, points=None, roi_scale_factor=None):
if rois.shape[0] == 0:
return torch.from_numpy(np.empty((0, self.fc_out_channels))).to(feats[0])
if self.with_spatial:
assert rel_pair_idx is not None
return self.union_roi_forward(feats, img_metas, rois, rel_pair_idx, masks, points, roi_scale_factor)
else:
return self.single_roi_forward(feats, rois, masks, points, roi_scale_factor)
|
def DitBonjour():
print("Bonjour")
def DivBy2(x):
return x/2
|
import random
from os import path
from tempfile import TemporaryDirectory
import numpy as np
import torch
from deepsnap.batch import Batch
from deepsnap.dataset import GraphDataset
from graphgym.config import assert_cfg, cfg
from graphgym.model_builder import create_model
from graphgym.utils.device import auto_select_device
from PySpice.Spice.Netlist import Node
from PySpice.Spice.Parser import SpiceParser
from spice_completion.datasets import PrototypeLinkDataset
from spice_completion.datasets import helpers as h
from torch.utils.data import DataLoader
SPICE_NODE_INDEX = h.component_types.index(Node)
script_dir = path.dirname(path.realpath(__file__))
def local_file(name):
return path.join(script_dir, name)
def without_lines(filename, fn):
with open(filename, "r") as f:
contents = "".join([line for line in f.readlines() if not fn(line)])
return contents
# Load config file
config_str = without_lines(
local_file("config.yaml"), lambda line: line.startswith("param")
)
config = cfg.load_cfg(config_str)
cfg.merge_from_other_cfg(config)
assert_cfg(cfg)
# Set Pytorch environment
torch.set_num_threads(cfg.num_threads)
out_dir_parent = cfg.out_dir
random.seed(cfg.seed)
np.random.seed(cfg.seed)
torch.manual_seed(cfg.seed)
auto_select_device()
# Set learning environment
model = create_model(dim_in=29, dim_out=2)
ckpt = torch.load(local_file("model.ckpt"))
model.load_state_dict(ckpt["model_state"])
mean = np.load(local_file("mean.npy"))
stddev = np.load(local_file("stddev.npy"))
def node(index, components):
return components[index]
def component(index, edges):
return [
{
"type": h.component_index_name(index),
"pins": [edge.name for edge in edges],
}
]
def get_proto_node_edges(node_features):
proto_nodes = node_features[:, -1].nonzero()
spice_nodes = (node_features[:, SPICE_NODE_INDEX].nonzero()).flatten()[
0:-1
] # last node is proto node (remove it!)
proto_idx = (
torch.ones((proto_nodes.shape[0], spice_nodes.shape[0])) * proto_nodes
).flatten()
spice_node_idx = spice_nodes.repeat(proto_nodes.shape[0])
return torch.stack([proto_idx.long(), spice_node_idx])
def interpret_results(edges, node_features, probs, components):
proto_ids = edges[0]
node_types = (node_features[:, 0:-1] > 0.99).nonzero()[:, 1]
proto_edges = {}
for (i, p_id) in enumerate(proto_ids):
is_unknown_type = node_types[p_id] == 0
if is_unknown_type:
continue
node_id = edges[1][i].item()
prob = probs[i].item()
p_id = p_id.item()
if p_id not in proto_edges:
proto_edges[p_id] = []
proto_edges[p_id].append((node_id, prob))
proto_top_edges = [
(k, zip(*sorted(edges, key=lambda e: -e[1])[0:2]))
for (k, edges) in proto_edges.items()
]
protos_w_probs = [
(
component(node_types[k], (node(e, components) for e in edges)),
sum(probs) / len(probs),
)
for (k, (edges, probs)) in proto_top_edges
]
return protos_w_probs
def analyze(circuit):
with TemporaryDirectory() as td:
netlist_path = path.join(td, "circuit.net")
with open(netlist_path, "w") as f:
netlist = str(circuit)
f.write(netlist)
return analyze_file(netlist_path, circuit)
def analyze_file(filename, circuit):
netlists = [filename]
dataset = PrototypeLinkDataset(netlists, mean=mean, std=stddev, train=False)
graphs = h.to_deepsnap(dataset)
ds_dataset = GraphDataset(
graphs,
task=cfg.dataset.task,
edge_train_mode=cfg.dataset.edge_train_mode,
edge_message_ratio=cfg.dataset.edge_message_ratio,
edge_negative_sampling_ratio=cfg.dataset.edge_negative_sampling_ratio,
minimum_node_per_graph=0,
)
loader = DataLoader(
ds_dataset, batch_size=1, collate_fn=Batch.collate(), pin_memory=False
)
batch = next(iter(loader))
batch.to(torch.device(cfg.device))
node_features = dataset.unnormalize(batch.node_feature.cpu())
batch.edge_label_index = get_proto_node_edges(node_features)
logits, _ = model(batch)
probs = torch.sigmoid(logits)
return interpret_results(
batch.edge_label_index, node_features, probs, h.components(circuit)
)
if __name__ == "__main__":
import json
import sys
filename = sys.argv[1]
contents = next(h.valid_netlist_sources([filename]))
parser = SpiceParser(source=contents)
print(json.dumps(analyze_file(filename, parser.build_circuit())))
|
# -*- coding: utf-8 -*-
# Copyright (c) 2021 Ramon van der Winkel.
# All rights reserved.
# Licensed under BSD-3-Clause-Clear. See LICENSE file for details.
from django.test import TestCase
from Functie.models import maak_functie
from NhbStructuur.models import NhbRegio, NhbVereniging
from Sporter.models import Sporter
from Wedstrijden.models import WedstrijdLocatie
from .models import KalenderWedstrijd
from TestHelpers.e2ehelpers import E2EHelpers
class TestKalenderMaand(E2EHelpers, TestCase):
""" unit tests voor de Kalender applicatie """
url_kalender = '/kalender/'
url_kalender_maand = '/kalender/pagina-%s-%s/' # jaar, maand
url_kalender_vereniging = '/kalender/vereniging/'
url_kalender_info = '/kalender/%s/info/' # wedstrijd_pk
def setUp(self):
""" initialisatie van de test case """
self.account_admin = self.e2e_create_account_admin()
self.account_admin.is_BB = True
self.account_admin.save()
sporter = Sporter(
lid_nr=100000,
voornaam='Ad',
achternaam='de Admin',
geboorte_datum='1966-06-06',
sinds_datum='2020-02-02',
account=self.account_admin)
sporter.save()
# maak een test vereniging
self.nhbver1 = NhbVereniging(
ver_nr=1000,
naam="Grote Club",
regio=NhbRegio.objects.get(regio_nr=112))
self.nhbver1.save()
self.functie_hwl = maak_functie('HWL Ver 1000', 'HWL')
self.functie_hwl.nhb_ver = self.nhbver1
self.functie_hwl.accounts.add(self.account_admin)
self.functie_hwl.save()
self.nhbver2 = NhbVereniging(
ver_nr=1001,
naam="Kleine Club",
regio=NhbRegio.objects.get(regio_nr=112))
self.nhbver2.save()
@staticmethod
def _maak_externe_locatie(ver):
# voeg een locatie toe
locatie = WedstrijdLocatie(
baan_type='E', # externe locatie
naam='Test locatie')
locatie.save()
locatie.verenigingen.add(ver)
return locatie
def test_basic(self):
# maand als getal
with self.assert_max_queries(20):
resp = self.client.get(self.url_kalender_maand % (2020, 1))
self.assertEqual(resp.status_code, 200) # 200 = OK
self.assert_html_ok(resp)
self.assert_template_used(resp, ('kalender/overzicht-maand.dtl', 'plein/site_layout.dtl'))
# illegale maand getallen
resp = self.client.get(self.url_kalender_maand % (2020, 0))
self.assert404(resp)
resp = self.client.get(self.url_kalender_maand % (2020, 0))
self.assert404(resp)
# maand als tekst
with self.assert_max_queries(20):
resp = self.client.get(self.url_kalender_maand % (2020, 'mrt'))
self.assertEqual(resp.status_code, 200) # 200 = OK
self.assert_html_ok(resp)
# maand als tekst
with self.assert_max_queries(20):
resp = self.client.get(self.url_kalender_maand % (2020, 'maart'))
self.assertEqual(resp.status_code, 200) # 200 = OK
self.assert_html_ok(resp)
# illegale maand tekst
with self.assert_max_queries(20):
resp = self.client.get(self.url_kalender_maand % (2020, 'xxx'))
self.assertEqual(resp.status_code, 200) # 200 = OK
self.assert_html_ok(resp)
# illegaal jaar
with self.assert_max_queries(20):
resp = self.client.get(self.url_kalender_maand % (2020, 'maart'))
self.assertEqual(resp.status_code, 200) # 200 = OK
self.assert_html_ok(resp)
# wrap-around in december voor 'next'
with self.assert_max_queries(20):
resp = self.client.get(self.url_kalender_maand % (2020, 12))
self.assertEqual(resp.status_code, 200) # 200 = OK
self.assert_html_ok(resp)
# wrap-around in januari voor 'prev'
with self.assert_max_queries(20):
resp = self.client.get(self.url_kalender_maand % (2020, 1))
self.assertEqual(resp.status_code, 200) # 200 = OK
self.assert_html_ok(resp)
def test_wedstrijd(self):
# wordt HWL
self.e2e_login_and_pass_otp(self.account_admin)
self.e2e_wissel_naar_functie(self.functie_hwl)
# maak een wedstrijd en sessie aan
self._maak_externe_locatie(self.nhbver1)
resp = self.client.post(self.url_kalender_vereniging, {'nieuwe_wedstrijd': 'ja'})
self.assert_is_redirect(resp, self.url_kalender_vereniging)
self.assertEqual(1, KalenderWedstrijd.objects.count())
wedstrijd = KalenderWedstrijd.objects.all()[0]
# accepteer de wedstrijd zodat deze getoond wordt
wedstrijd.status = 'A'
wedstrijd.save()
self.client.logout()
# haal de maand pagina op met een wedstrijd erop
with self.assert_max_queries(20):
resp = self.client.get(self.url_kalender)
self.assertEqual(resp.status_code, 302) # redirect naar juiste maand-pagina
url = resp.url
with self.assert_max_queries(20):
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assert_html_ok(resp)
# annuleer de wedstrijd
wedstrijd.status = 'X'
wedstrijd.save()
with self.assert_max_queries(20):
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assert_html_ok(resp)
def test_info(self):
# wordt HWL
self.e2e_login_and_pass_otp(self.account_admin)
self.e2e_wissel_naar_functie(self.functie_hwl)
# maak een wedstrijd en sessie aan
self._maak_externe_locatie(self.nhbver1)
resp = self.client.post(self.url_kalender_vereniging, {'nieuwe_wedstrijd': 'ja'})
self.assert_is_redirect(resp, self.url_kalender_vereniging)
self.assertEqual(1, KalenderWedstrijd.objects.count())
wedstrijd = KalenderWedstrijd.objects.all()[0]
# haal de info pagina van de wedstrijd op
url = self.url_kalender_info % wedstrijd.pk
with self.assert_max_queries(20):
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assert_html_ok(resp)
# niet bestaande wedstrijd
with self.assert_max_queries(20):
resp = self.client.get(self.url_kalender_info % 999999)
self.assert404(resp)
self.e2e_assert_other_http_commands_not_supported(url)
# end of file
|
from common.mongo.bson_c import json_util
from model import merchant
merchants = merchant.populates(filter={'support': {'$nin': [None, '']}},
projection={'_id': 1, 'name': 1, 'sname': 1, 'support': 1},
sort=[('_id', 1)],
skip=0,
limit=10,
pop_fields={
'user': {
'from': 'users',
'local_field': 'support',
'foreign_field': '_id',
'projection': {'_id': 1, 'phone': 1},
'pop_fields': {
'city': {
'from': 'cities',
'local_field': 'cityId',
'foreign_field': '_id',
'projection': {'_id': 1, 'name': 1},
'as': 'city'
}
}
}
})
for m in merchants:
print(json_util.dumps(m))
|
from random import randint
def key(_arg):
"""
Returns a random integer in [0, 2**64)
Outgoing correlation between original and sorted < 1%
Also have tried:
* `hash(arg)` 17% correlation
* `id(arg)` 98% correlation
"""
return randint(0, 2 ** 64 - 1)
|
# Generated by Django 3.0.5 on 2020-06-06 23:36
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('trainings', '0004_auto_20200601_2052'),
]
operations = [
migrations.DeleteModel(
name='NetworkBayesianRatingConfiguration',
),
]
|
"""Include single scripts with doc string, code, and image
Use case
--------
There is an "examples" directory in the root of a repository,
e.g. 'include_doc_code_img_path = "../examples"' in conf.py
(default). An example is a file ("an_example.py") that consists
of a doc string at the beginning of the file, the example code,
and, optionally, an image file (png, jpg) ("an_example.png").
Configuration
-------------
In conf.py, set the parameter
fancy_include_path = "../examples"
to wherever the included files reside.
Usage
-----
The directive
.. fancy_include:: an_example.py
will display the doc string formatted with the first line as a
heading, a code block with line numbers, and the image file.
"""
import io
import os.path as op
from docutils.statemachine import ViewList
from docutils.parsers.rst import Directive
from sphinx.util.nodes import nested_parse_with_titles
from docutils import nodes
class IncludeDirective(Directive):
required_arguments = 1
optional_arguments = 0
def run(self):
path = self.state.document.settings.env.config.fancy_include_path
full_path = op.join(path, self.arguments[0])
with io.open(full_path, "r") as myfile:
text = myfile.read()
# add reference
name = op.basename(full_path)[:-3]
rst = [".. _example_{}:".format(name),
"",
]
# add docstring
source = text.split('"""')
doc = source[1].split("\n")
doc.insert(1, "~" * len(doc[0])) # make title heading
code = source[2].split("\n")
for line in doc:
rst.append(line)
# image
for ext in [".png", ".jpg"]:
image_path = full_path[:-3] + ext
if op.exists(image_path):
break
else:
image_path = ""
if image_path:
rst.append(".. figure:: {}".format(image_path))
rst.append("")
# download file
rst.append(":download:`{}<{}>`".format(
op.basename(full_path), full_path))
# code
rst.append("")
rst.append(".. code-block:: python")
rst.append(" :linenos:")
rst.append("")
for line in code:
rst.append(" {}".format(line))
rst.append("")
vl = ViewList(rst, "fakefile.rst")
# Create a node.
node = nodes.section()
node.document = self.state.document
# Parse the rst.
nested_parse_with_titles(self.state, vl, node)
return node.children
def setup(app):
app.add_config_value('fancy_include_path', "../examples", 'html')
app.add_directive('fancy_include', IncludeDirective)
return {'version': '0.1'} # identifies the version of our extension
|
from setuptools import setup
from setuptools import find_packages
setup(name='gcncc',
version='0.2',
description='Graph Convolutional Network for Clustering and Classification',
author='Omar Maddouri',
author_email='omar.maddouri@gmail.com',
url='https://github.com/omarmaddouri',
download_url='...',
license='MIT',
install_requires=['keras'],
extras_require={
'model_saving': ['json', 'h5py'],
},
package_data={'gcncc': ['README.md']},
packages=find_packages()) |
from .metrics import precision_at_k, dcg_score_at_k, ndcg_score_at_k
# These are not required
del metrics
|
import logging
import os
import sys
PROJECT_ROOT = os.path.abspath(os.path.split(os.path.split(__file__)[0])[0])
logging.disable(logging.CRITICAL)
ROOT_URLCONF = 'urls'
STATIC_URL = '/static/'
STATIC_ROOT = '%s/staticserve' % PROJECT_ROOT
STATICFILES_DIRS = (
('global', '%s/static' % PROJECT_ROOT),
)
UPLOADS_DIR_NAME = 'uploads'
MEDIA_URL = '/%s/' % UPLOADS_DIR_NAME
MEDIA_ROOT = os.path.join(PROJECT_ROOT, '%s' % UPLOADS_DIR_NAME)
IS_DEV = False
IS_STAGING = False
IS_PROD = False
IS_TEST = 'test' in sys.argv or 'test_coverage' in sys.argv
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
MIDDLEWARE = [
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware'
]
# Adding for backwards compatibility for Django 1.8 tests
MIDDLEWARE_CLASSES = MIDDLEWARE
TEMPLATE_CONTEXT_PROCESSORS = [
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.media',
'django.core.context_processors.request',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.static',
"allauth.account.context_processors.account",
"allauth.socialaccount.context_processors.socialaccount",
]
# avoid deprecation warnings during tests
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
# insert your TEMPLATE_DIRS here
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': TEMPLATE_CONTEXT_PROCESSORS,
},
},
]
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
'dj_rest_auth.jwt_auth.JWTCookieAuthentication',
)
}
TEST_RUNNER = 'xmlrunner.extra.djangotestrunner.XMLTestRunner'
TEST_OUTPUT_DIR = 'test-results'
INSTALLED_APPS = [
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.humanize',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.sitemaps',
'django.contrib.staticfiles',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.facebook',
'allauth.socialaccount.providers.twitter',
'rest_framework',
'rest_framework.authtoken',
'dj_rest_auth',
'dj_rest_auth.registration',
'rest_framework_simplejwt.token_blacklist'
]
SECRET_KEY = "38dh*skf8sjfhs287dh&^hd8&3hdg*j2&sd"
ACCOUNT_ACTIVATION_DAYS = 1
SITE_ID = 1
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
'django.contrib.auth.backends.ModelBackend',
# `allauth` specific authentication methods, such as login by e-mail
'allauth.account.auth_backends.AuthenticationBackend',
)
|
from django.shortcuts import render
# Create your views here.
from django.views.decorators.cache import cache_page
from services.blog.blog_service import BlogService
from services.blog.category_service import CategoryService
# @cache_page(300)
def index(request):
"""
首页-
:param request:
:return:
"""
top_categorys, msg = CategoryService.get_top_category()
new_blogs, msg1 = BlogService.get_new_blog()
hot_blogs, msg2 = BlogService.get_hot_blog()
params = dict(top_categorys=top_categorys, new_blogs=new_blogs, hot_blogs=hot_blogs)
return render(request, 'index.html', params)
|
from unittest import TestCase
from .problem_6_18_spiral_ordering import *
class TestSolution(TestCase):
def testTopPrinting(self):
self.assertEqual([1, 2, 3], print_top([[1, 2, 3]], 0, 0, 3))
def testBottomPrinting(self):
self.assertEqual([3, 2, 1], print_bottom([[1, 2, 3]], 0, 0, 3))
def testRightPrinting(self):
self.assertEqual([3, 6], print_right_side([[1, 2, 3], [4, 5, 6]], 0, 2, 2))
def testRightPrinting2(self):
self.assertEqual([6], print_right_side([[1, 2, 3], [4, 5, 6]], 0, 2, 2))
def testLeftPrinting(self):
self.assertEqual([4], print_left_side([[1, 2, 3], [4, 5, 6], [7, 8, 9]], 0, 2, 0))
def testRecursion(self):
array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
self.assertEqual([1, 2, 3, 6, 9, 8, 7, 4, 5], solution(array))
def testRecursion2(self):
array = [[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13, 14, 15, 16]]
self.assertEqual([1, 2, 3, 4, 8, 12, 16, 15, 14, 13, 9, 5, 6, 7, 11, 10], solution(array))
|
"""
@author: Deniz Altinbuken, Emin Gun Sirer
@note: Queue proxy
@copyright: See LICENSE
"""
from concoord.clientproxy import ClientProxy
class Queue:
def __init__(self, bootstrap, timeout=60, debug=False, token=None):
self.proxy = ClientProxy(bootstrap, timeout, debug, token)
def __concoordinit__(self):
return self.proxy.invoke_command('__init__')
def append(self, item):
return self.proxy.invoke_command('append', item)
def remove(self):
return self.proxy.invoke_command('remove')
def get_size(self):
return self.proxy.invoke_command('get_size')
def get_queue(self):
return self.proxy.invoke_command('get_queue')
def __str__(self):
return self.proxy.invoke_command('__str__')
|
from selenium import webdriver
from bs4 import BeautifulSoup
import re
from tmdbv3api import TMDb, TV, Season, Movie
from module.api_key import MY_API_KEY
tmdb = TMDb()
tmdb.api_key = MY_API_KEY
debug=0
def get_ep_list(id, season):
"""Return a dictionary of episodes list from MovieDb, return None if not found"""
if not id : return None
ep_dict=dict()
season_api= Season()
try:
season_api.details(id, season)
tv = TV()
name = tv.details(id)['name']
for ep in season_api.details(id, season)['episodes']:
nb_ep = int(ep['episode_number'])
if ep['name']:
title = ep['name']
else:
title = 'Episode ' + str(nb_ep)
if debug:
print(ep['name'])
print('title ', title)
res = name + " - " + "S"+str(season)+"E"+str(nb_ep)+" - "+ title
res = "{tv_name} - S{s_nb:02}E{ep_nb:02} - {tv_title}".format(tv_name=name, s_nb=int(season), ep_nb=int(nb_ep), tv_title=title[:30])
if debug:
print('*'*20, '\n',res, '\n', '*'*20,'\n')
illegalChar = re.compile("( \< | \> | \: | \" | \/ | \\ | \| | \? | \*)")
title = illegalChar.sub(" ", title)
ep_dict.setdefault(nb_ep, title)
if debug:
print(ep_dict)
except IndexError as error:
print("Tv Show Not Found - error : ", error)
return None
return ep_dict
def get_best_matched_id(tv_show_name):
"""Return a dictionary of episodes list from MovieDb, return None if not found"""
tv = TV()
# name = tv.details(id)['name']
if tv.search(tv_show_name):
return tv.search(tv_show_name)[0]['id']
else:
return None
def get_best_matched_name(tv_show_name):
"""Return a dictionary of episodes list from MovieDb, return None if not found"""
tv = TV()
name = tv.search(tv_show_name)[0]['name']
if tv.search(tv_show_name):
return name
else:
return None
def mvdbquery_str(tv_show_name):
"""Return the query string for MovieDB WebSite"""
query_str = base
query_str += """/search/tv?query="""
words = tv_show_name.split()
words_lower = [word.lower() for word in words]
if debug:
for word in words_lower:
print("Word ", word)
for w_ind in range(len(words_lower)-1):
word = words_lower[w_ind]
query_str+=word
query_str+="+"
query_str+=words_lower[-1]
if debug:
print("Query str : ", query_str)
return query_str
if __name__=="__main__":
if debug:
set_w = ["86 Eighty Six",
"Walking Dead",
"Kaguya Sama",
"Lucifer",
"Flash",
"Non-existing show",
"Aie Caramba",
"Shadow-test"
]
# set_w = ["Walking Dead"]
for tv_show in set_w:
print(get_ep_list(get_best_matched_id(tv_show), 1))
print(get_ep_list(get_best_matched_id(tv_show), 1))
else:
tv_show = str(input("Type the TV Show Name : "))
s = int(input("Type Season : "))
print("TV Show : ", tv_show, ", season : ", s)
print(get_best_matched_name(tv_show))
get_ep_list(get_best_matched_id(tv_show), s)
|
# Copyright (C) [2021] by Cambricon, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# pylint: disable=missing-docstring, invalid-name, too-many-locals
"""A multi-platform code link example test for BANGPy TCP."""
import numpy as np
import pytest
import bangpy
from bangpy import tcp
from bangpy.common import utils, load_op_by_type
from bangpy.platform.bang_config import ALIGN_LENGTH, TARGET
from bangpy.tcp.runtime import TaskType
from add import DTYPES, KERNEL_NAME, TARGET_LIST
@pytest.mark.parametrize(
"shape",
[
(2048,),
(4096,),
(6144,),
],
)
@pytest.mark.parametrize(
"dtype", DTYPES,
)
def test_add(target, shape, dtype):
if target not in TARGET_LIST:
return
data_in0 = np.random.uniform(low=-10, high=10, size=shape)
data_in1 = np.random.uniform(low=-10, high=10, size=shape)
data_out = data_in0.astype(dtype.as_numpy_dtype) + data_in1.astype(
dtype.as_numpy_dtype
)
dev = bangpy.device(0)
# set I/O data
data_in0_dev = bangpy.Array(data_in0.astype(dtype.as_numpy_dtype), dev)
data_in1_dev = bangpy.Array(data_in1.astype(dtype.as_numpy_dtype), dev)
data_out_dev = bangpy.Array(np.zeros(data_out.shape, dtype.as_numpy_dtype), dev)
f1 = load_op_by_type(KERNEL_NAME, dtype.name)
f1(data_in0_dev, data_in1_dev, data_out_dev)
bangpy.assert_allclose(
data_out_dev.numpy(), data_out.astype(dtype.as_numpy_dtype)
)
|
# Copyright 2020 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import torch
from parameterized import parameterized
from monai.transforms import AffineGrid
TEST_CASES = [
[
{"as_tensor_output": False, "device": torch.device("cpu:0")},
{"spatial_size": (2, 2)},
np.array([[[-0.5, -0.5], [0.5, 0.5]], [[-0.5, 0.5], [-0.5, 0.5]], [[1.0, 1.0], [1.0, 1.0]]]),
],
[
{"as_tensor_output": True, "device": None},
{"spatial_size": (2, 2)},
torch.tensor([[[-0.5, -0.5], [0.5, 0.5]], [[-0.5, 0.5], [-0.5, 0.5]], [[1.0, 1.0], [1.0, 1.0]]]),
],
[{"as_tensor_output": False, "device": None}, {"grid": np.ones((3, 3, 3))}, np.ones((3, 3, 3))],
[{"as_tensor_output": True, "device": torch.device("cpu:0")}, {"grid": np.ones((3, 3, 3))}, torch.ones((3, 3, 3))],
[{"as_tensor_output": False, "device": None}, {"grid": torch.ones((3, 3, 3))}, np.ones((3, 3, 3))],
[
{"as_tensor_output": True, "device": torch.device("cpu:0")},
{"grid": torch.ones((3, 3, 3))},
torch.ones((3, 3, 3)),
],
[
{
"rotate_params": (1.0, 1.0),
"scale_params": (-20, 10),
"as_tensor_output": True,
"device": torch.device("cpu:0"),
},
{"grid": torch.ones((3, 3, 3))},
torch.tensor(
[
[[-19.2208, -19.2208, -19.2208], [-19.2208, -19.2208, -19.2208], [-19.2208, -19.2208, -19.2208]],
[[-11.4264, -11.4264, -11.4264], [-11.4264, -11.4264, -11.4264], [-11.4264, -11.4264, -11.4264]],
[[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]],
]
),
],
[
{
"rotate_params": (1.0, 1.0, 1.0),
"scale_params": (-20, 10),
"as_tensor_output": True,
"device": torch.device("cpu:0"),
},
{"grid": torch.ones((4, 3, 3, 3))},
torch.tensor(
[
[
[[-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435]],
[[-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435]],
[[-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435]],
],
[
[[-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381]],
[[-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381]],
[[-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381]],
],
[
[[-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844]],
[[-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844]],
[[-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844]],
],
[
[[1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000]],
[[1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000]],
[[1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000]],
],
]
),
],
]
class TestAffineGrid(unittest.TestCase):
@parameterized.expand(TEST_CASES)
def test_affine_grid(self, input_param, input_data, expected_val):
g = AffineGrid(**input_param)
result = g(**input_data)
self.assertEqual(torch.is_tensor(result), torch.is_tensor(expected_val))
if torch.is_tensor(result):
np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4)
else:
np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4)
if __name__ == "__main__":
unittest.main()
|
from PyQt5.QtWidgets import QDialog
from UI.Layouts.EvolutionParametersDialog import Ui_EvolutionParametersDialog
class EvolutionParametersDialog(QDialog):
def __init__(self, parent = None):
QDialog.__init__(self, parent)
self.ui = Ui_EvolutionParametersDialog()
self.ui.setupUi(self)
def getParentCount(self):
return self.ui.spinBoxParentCount.value()
def getDepthSearch(self):
return bool(self.ui.radioButtonDepthSearchYes.isChecked())
def getMutationMethod(self):
return self.ui.comboBoxMutator.currentText()
def getProbability(self):
return (self.ui.spinBoxProbability.value() / 100)
def getElitismFactor(self):
return self.ui.spinBoxElitism.value()
def getTournamentSize(self):
return self.ui.spinBoxTournamentSize.value()
@staticmethod
def getEvolutionParameters(parent = None):
evolutionParametersDialog = EvolutionParametersDialog(parent)
accepted = evolutionParametersDialog.exec_()
if accepted == QDialog.Rejected:
return (None, None, None, None, None, False)
parentCount = evolutionParametersDialog.getParentCount()
depthSearch = evolutionParametersDialog.getDepthSearch()
mutationMethod = evolutionParametersDialog.getMutationMethod()
probability = evolutionParametersDialog.getProbability()
elitismFactor = evolutionParametersDialog.getElitismFactor()
tournamentSize = evolutionParametersDialog.getTournamentSize()
return (parentCount, depthSearch, mutationMethod, probability, elitismFactor, tournamentSize, True)
|
from datasets.decorators import OrderDataset
from datasets.utils import FullDatasetBase
from torchvision.datasets import cifar
from torchvision import transforms
from torch.utils.data import Dataset
from torch.utils.data.dataset import Subset
class CIFAR100(FullDatasetBase):
mean = (0.5071, 0.4865, 0.4409)
std = (0.2673, 0.2564, 0.2762)
img_shape = (3, 32, 32)
num_classes = 100
name = "cifar100"
def gen_train_transforms(self):
test_transforms, _ = self.gen_test_transforms()
return transforms.Compose([transforms.RandomCrop(32, padding=4, fill=128),
transforms.RandomHorizontalFlip(),
test_transforms]), _
def gen_train_datasets(self, transform=None, target_transform=None) -> Dataset:
return cifar.CIFAR100(root="~/.cache/torch/data", train=True, download=True,
transform=transform, target_transform=target_transform)
def gen_val_datasets(self, transform=None, target_transform=None) -> Dataset:
return cifar.CIFAR100(root="~/.cache/torch/data", train=False, download=True,
transform=transform, target_transform=target_transform)
def gen_test_datasets(self, transform=None, target_transform=None) -> Dataset:
return cifar.CIFAR100(root="~/.cache/torch/data", train=False, download=True,
transform=transform, target_transform=target_transform)
@staticmethod
def is_dataset_name(name: str):
import re
return re.match("(Cifar|cifar|CIFAR)([-_])*100$", name)
class OrderedCIFAR100VAL(CIFAR100):
name = "cifar100_withval"
def gen_train_datasets(self, transform=None, target_transform=None) -> Dataset:
ds = cifar.CIFAR100(root="~/.cache/torch/data", train=True, download=True,
transform=transform, target_transform=target_transform)
return Subset(OrderDataset(ds), list(range(40000)))
def gen_val_datasets(self, transform=None, target_transform=None) -> Dataset:
ds = cifar.CIFAR100(root="~/.cache/torch/data", train=True, download=True,
transform=transform, target_transform=target_transform)
return Subset(OrderDataset(ds), list(range(40000, 50000)))
@staticmethod
def is_dataset_name(name: str):
import re
return re.match("(Order|order|o|O)(Cifar|cifar|CIFAR)([-_])*100(val|VAL)$", name)
|
# coding: utf-8
import os
DEFAULT_TEMPLATE = os.environ.get("DEFAULT_TEMPLATE")
if not DEFAULT_TEMPLATE:
DEFAULT_TEMPLATE = '''{"id": $count, "ts": "$ts"}'''
|
"""Data model and functions for Tapis profiles
"""
from tapis_cli.commands.taccapis.v2 import SERVICE_VERSION
from tapis_cli.commands.taccapis import TapisModel
from tapis_cli.display import Verbosity
from tapis_cli.search import argtype, argmod
__all__ = ['Profile', 'API_NAME', 'SERVICE_VERSION']
API_NAME = 'profiles'
class Profile(TapisModel):
"""Model of a Tapis user profile
"""
service_id_type = 'User'
SEARCH_ARGS = [
# JSON_field, type, verbosity, mods_allowed, default_mod, choices, override_option, searchable
("first_name", argtype.STRING, Verbosity.BRIEF, [argmod.EQUALS],
argmod.DEFAULT, None, None, True),
("last_name", argtype.STRING, Verbosity.BRIEF, [argmod.EQUALS],
argmod.DEFAULT, None, None, True),
("full_name", argtype.STRING, Verbosity.RECORD_VERBOSE,
[argmod.EQUALS], argmod.DEFAULT, None, None, False),
("email", argtype.STRING, Verbosity.BRIEF, [argmod.EQUALS],
argmod.DEFAULT, None, None, True),
("mobile_phone", argtype.STRING, Verbosity.RECORD, [argmod.EQUALS],
argmod.DEFAULT, None, None, False),
("phone", argtype.STRING, Verbosity.RECORD, [argmod.EQUALS],
argmod.DEFAULT, None, None, False),
("username", argtype.STRING, Verbosity.BRIEF, [argmod.EQUALS],
argmod.DEFAULT, None, None, True),
("uid", argtype.INTEGER, Verbosity.RECORD_VERBOSE, [argmod.EQUALS],
argmod.DEFAULT, None, None, False),
("nonce", argtype.STRING, Verbosity.RECORD_VERBOSE, [argmod.EQUALS],
argmod.DEFAULT, None, None, False),
("status", argtype.STRING, Verbosity.RECORD_VERBOSE, [argmod.EQUALS],
argmod.DEFAULT, None, None, False),
("create_time", argtype.DATETIME, Verbosity.RECORD_VERBOSE,
[argmod.EQUALS], argmod.DEFAULT, None, None, False),
]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 19 19:05:05 2022
@author: Alexander Southan
"""
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from pyRandomWalk import random_walk
# Define the box for the random walks
limits = {'r': 0.75, 'x_c': 0.5, 'y_c': 0.5}
# Generate random walks
random_walks = random_walk(
step_number=200, number_of_walks=2, limits=limits, wall_mode='exclude',
step_length=0.5, box_shape='circle')
# Query the different coordinates that are used for the plots
all_coords = random_walks.get_coords('all')
walk_coords = random_walks.get_coords('walk_points')
end_coords = random_walks.get_coords('end_points')
# Plot the random walks
fig1, ax1 = plt.subplots()
for curr_walk in range(random_walks.number_of_walks):
ax1.plot(end_coords[curr_walk][:, 0], end_coords[curr_walk][:, 1],
ls='none', marker='o', ms='10', c='g')
ax1.plot(walk_coords[curr_walk][:, 0], walk_coords[curr_walk][:, 1],
ls='none', marker='o', c=['k', 'grey'][curr_walk % 2])
ax1.plot(all_coords[curr_walk][:, 0], all_coords[curr_walk][:, 1],
c=['k', 'grey'][curr_walk % 2])
ax1.set_xlim([-0.6, 1.6])
ax1.set_ylim([-0.6, 1.6])
box = patches.Circle((limits['x_c'], limits['y_c']), radius=limits['r'],
linewidth=1, edgecolor='k', facecolor='none', ls='--')
ax1.add_patch(box)
ax1.set_aspect('equal', adjustable='box')
fig1.set_facecolor('grey')
fig1.savefig('plot walks with circular wall exclusion.png', dpi=600)
|
# -*- coding: utf-8 -*-
"""
Useful utility decorators.
"""
class renamed:
"""Decorator to mark functions are renamed and will be removed some later
```
@pg.renamed(newname, '1.2')
def oldname(args, kwargs):
pass
```
"""
def __init__(self, newFunc, removed=''):
self.newFunc = newFunc
self.removed = removed
def __call__(self, func):
def wrapper(*args, **kwargs):
import pygimli as pg
pg.warning(func.__name__ + ' had been renamed to ' + \
self.newFunc.__name__ + \
' and will be removed in: ' + self.removed)
return self.newFunc(*args, **kwargs)
return wrapper
import functools
def singleton(cls):
"""Make a class a Singleton class (only one instance)"""
@functools.wraps(cls)
def wrapper(*args, **kwargs):
if wrapper.instance is None:
wrapper.instance = cls(*args, **kwargs)
return wrapper.instance
wrapper.instance = None
return wrapper
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from mailing.utils import html_to_text
class HtmlToTextTestCase(TestCase):
def test_no_html(self):
html = "Ceci est du HTML sans balises"
text = "Ceci est du HTML sans balises"
self.assertEqual(html_to_text(html), text)
def test_simple_html(self):
html = "<p>Ceci est du <b>HTML</b> simple</p>"
text = "Ceci est du HTML simple"
self.assertEqual(html_to_text(html), text)
def test_html_with_attributes(self):
html = (
'<p style="line-height:20px;color:blue">Ceci est du '
'<a href="http://example.com/" target=_blank>HTML</a>.</p>'
)
text = (
"Ceci est du HTML (http://example.com/)."
)
self.assertMultiLineEqual(html_to_text(html), text)
def test_html_with_attributes_and_apostrophe(self):
html = (
"<p style='line-height:20px;color:blue'>Ceci est du "
"<a href='http://example.com/' target=_blank>HTML</a>.</p>"
)
text = (
'Ceci est du HTML (http://example.com/).'
)
self.assertMultiLineEqual(html_to_text(html), text)
def test_linefeed(self):
html = (
"<p>Ceci\n"
"est un paragraphe</p>\n\n"
"<p>Ceci est un autre paragraphe</p>\n"
"<p>Un autre paragraphe</p>\n\n\n"
"<p>Et encore un autre paragraphe</p>"
)
text = (
"Ceci\n"
"est un paragraphe\n\n"
"Ceci est un autre paragraphe\n"
"Un autre paragraphe\n\n\n"
"Et encore un autre paragraphe"
)
self.assertMultiLineEqual(html_to_text(html), text)
def test_script_tag(self):
html = (
"<p>Ceci est un paragraphe</p>\n\n"
"<script>Et ceci est un script</script>"
)
text = (
"Ceci est un paragraphe\n\n"
)
self.assertMultiLineEqual(html_to_text(html), text)
def test_script_tag_with_atributes(self):
html = (
"<p>Ceci est un paragraphe</p>\n\n"
"<script type='text/javascript'>console.log('Et ceci est un "
"<script>script</script>');</script>\n\n"
"<p>Un autre paragraphe</p>"
)
text = (
"Ceci est un paragraphe\n\n\n\n"
"Un autre paragraphe"
)
self.assertMultiLineEqual(html_to_text(html), text)
def test_picture(self):
html = (
'<p>Une image : <img src="https://example.com/example.jpg" '
'alt="Example"></p>\n\n'
'<p>Une autre image : <img '
'src="https://example.com/example.png"/></p>\n\n'
'<a href="https://example.com/my-ads"><img alt="Voir mes '
'annonces" src="https://example.com/view-ads.png"></a>\n\n'
'<img alt="Toto" />'
)
text = (
"Une image : Example\n\n"
"Une autre image : \n\n"
"Voir mes annonces (https://example.com/my-ads)\n\n"
"Toto"
)
self.assertMultiLineEqual(html_to_text(html), text)
def test_wrong_html(self):
html = "<p>Coucou <b>ça <i>va</b> ?</i>"
text = "Coucou ça va ?"
self.assertEqual(html_to_text(html), text)
def test_link_without_anchor(self):
html = "<a>Je suis un faux lien</a>"
text = "Je suis un faux lien"
self.assertEqual(html_to_text(html), text)
def test_link_equal_text(self):
html = "<a href='https://github.com/'>https://github.com/</a>"
text = "https://github.com/"
self.assertEqual(html_to_text(html), text)
|
import streamlit as st
# SETTING PAGE CONFIG TO WIDE MODE
#st.set_page_config(layout="wide")
#lottie_book = load_lottieurl('https://assets4.lottiefiles.com/temp/lf20_aKAfIn.json')
def load_page(df_metadata_complete):
###Streamlit app
row1_spacer1, row1_1, row1_spacer2 = st.beta_columns((0.01, 3.2, 0.01))
with row1_1:
st.markdown("# 📊 Exploratory Data Analysis") |
# Angold4 20200613
import Complexity
def normal_power(x, n):
"""Complexity: O(n)"""
if n == 0:
return 1
else:
return x * normal_power(x, n-1)
def power(x, n):
"""Complexity: O(log n)"""
if n == 0:
return 1
else:
partial = power(x, n // 2)
result = partial * partial
if n % 2 == 1:
result *= x
return result
if __name__ == "__main__":
from time import time
start_time = time()
print(power(5, 24))
end_time = time()
print(start_time - end_time)
start_time = time()
print(normal_power(5, 24))
end_time = time()
print(start_time - end_time)
"""
59604644775390625
-1.0967254638671875e-05
59604644775390625
-7.152557373046875e-06
"""
np = Complexity.Complexity(normal_power)
np.set_first_element(2)
np.set_second_element(1)
np.set_test_range(1, 300, 1)
np.average(10000)
np.draw()
p = Complexity.Complexity(power)
p.set_first_element(2)
p.set_second_element(1)
p.set_test_range(1, 300, 1)
p.average(10000)
p.draw()
Complexity.Compare(np, p)
|
from flask import Flask, render_template, Response
import datetime
from Camera import VideoCamera
app = Flask(__name__)
@app.route("/")
def index():
now = datetime.datetime.now()
timeString = now.strftime('%I:%M:%S')
templateData = {
'title' : 'KittyTalk',
'time': timeString,
}
return render_template('KittyTalk.html', **templateData)
def gen(camera):
while True:
frame = camera.get_frame()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n\r\n')
@app.route('/video_feed')
def video_feed():
return Response(gen(VideoCamera()),
mimetype='multipart/x-mixed-replace; boundary=frame')
|
# -*- coding: utf-8 -*-
from urllib2 import urlopen
import urllib
import json
import base64
class BaiduRest:
def __init__(self, cu_id, api_key, api_secert):
# token认证的url
self.token_url = "https://openapi.baidu.com/oauth/2.0/token?grant_type=client_credentials&client_id=%s&client_secret=%s"
# 语音合成的resturl
self.getvoice_url = "http://tsn.baidu.com/text2audio?tex=%s&lan=zh&cuid=%s&ctp=1&tok=%s"
# 语音识别的resturl
self.upvoice_url = 'http://vop.baidu.com/server_api'
self.cu_id = cu_id
self.getToken(api_key, api_secert)
return
def getToken(self, api_key, api_secert):
# 1.获取token
token_url = self.token_url % (api_key,api_secert)
r_str = urlopen(token_url).read()
token_data = json.loads(r_str)
self.token_str = token_data['access_token']
pass
def getVoice(self, text, filename):
# 2. 向Rest接口提交数据
# get_url = self.getvoice_url % (urllib.parse.quote(text), self.cu_id, self.token_str)
get_url = self.getvoice_url % (urllib.pathname2url(text), self.cu_id, self.token_str)
voice_data = urlopen(get_url).read()
# 3.处理返回数据
voice_fp = open(filename,'wb+')
voice_fp.write(voice_data)
voice_fp.close()
pass
def getText(self, filename):
# 2. 向Rest接口提交数据
data = {}
# 语音的一些参数
data['format'] = 'wav'
data['rate'] = 8000
data['channel'] = 1
data['cuid'] = self.cu_id
data['token'] = self.token_str
wav_fp = open(filename,'rb')
voice_data = wav_fp.read()
data['len'] = len(voice_data)
data['speech'] = base64.b64encode(voice_data).decode('utf-8')
post_data = json.dumps(data)
# r_data = urlopen(self.upvoice_url,data=bytes(post_data,encoding="utf-8")).read()
r_data = urlopen(self.upvoice_url,data=post_data).read()
# 3.处理返回数据
# return json.loads(r_data)['result']
# print(json.loads(r_data,encoding="utf-8"))
import ipdb; ipdb.set_trace()
return json.loads(r_data)['result']
if __name__ == "__main__":
# 我的api_key,供大家测试用,在实际工程中请换成自己申请的应用的key和secert
api_key = "SrhYKqzl3SE1URnAEuZ0FKdT"
api_secert = "hGqeCkaMPb0ELMqtRGc2VjWdmjo7T89d"
# 初始化
bdr = BaiduRest("test_python", api_key, api_secert)
# 将字符串语音合成并保存为out.mp3
bdr.getVoice("你好北京邮电大学!", "out.mp3")
# 识别test.wav语音内容并显示
a =bdr.getText("new.wav")
print(bdr.getText("new.wav"))
|
#!/usr/bin/env python
"""
"""
from .bpsf_keys import *
from .h5_file_ops import * |
x = input("player A: ")
y = input("player B: ")
def game(x,y):
if x==y:
print ("It is a tie")
elif x=="paper" and y=="rock":
print ("A won")
elif x=="paper" and y=="scissors":
print("B won")
elif x=="rock" and y=="paper":
print ("B won")
elif x=="rock" and y=="scissors":
print ("A won")
elif x=="scissors" and y=="rock":
print ("B won")
elif x=="scissors" and y=="paper":
print ("A won")
else:
print ("Please type rock, paper or scissors")
game(x,y)
|
import logging
from stocks import tsx
from stocks.tsx import TSX
logging.basicConfig(
filename="logs.log",
filemode="w",
level=logging.INFO,
format="{asctime} {levelname:<8} {message}",
style='{'
)
# Date range for historical data download
start_date = "2015-01-01"
end_date = "2020-12-31"
logging.info(f"Initiating update_all_tickers_for using (start_date : {start_date}, end_date: {end_date}) ")
# Create a TSX object
tsx = TSX()
test_db = 'TSX_Data.sqlite'
logging.info(f"Target database to store information: {test_db}) ")
# Extracte all TSX ticker symbols from the TMX website abd create a SQlogging.info(f"Initiating update_all_tickers_for with (start_date : {start_date}, end_date: {end_date}) ")LITE3 database name TSX_Data.sqlite
tsx.update_all_tickers(test_db, progess=True)
logging.info(f"Extraction and storing of all ticker symbols completed")
# Loop through all letters in the alphabet to extract prices data for 2015-01-01 to 2020-12-31
logging.info(f"Looping through all letters to scrap data from tsx website ")
alph = ['A','B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y','Z']
for letter in alph:
logging.info(f"get_yahoo_tickers_for_({letter}) ")
stocks = tsx.get_yahoo_tickers_for(test_db, letter)
logging.info(f"get_prices_from_yahoo_for : {stocks}")
prices = tsx.get_prices_from_yahoo(stocks, start_date, end_date)
print(prices)
logging.info(f"save_prices_to_DB() ")
success = tsx.save_prices_to_DB(test_db, prices)
print(f"\n\nLetter {letter} save status : {success}\n\n")
logging.info(f"Save prices to DB succesful for letter : {letter}")
tsx.remove_duplicates(test_db)
logging.info(f"Disposing of tsx object ")
tsx.dispose()
|
# Copyright L.P.Klyne 2013
# Licenced under 3 clause BSD licence
import sys, logging , time , os , thread
import unittest
from EventHandlers.tests.DummyRouter import *
from EventHandlers.hvac_components import *
from EventLib.Event import *
from MiscLib.DomHelpers import *
from EventHandlers.EventRouterLoad import EventRouterLoader
import EventHandlers.tests.TestEventLogger as TestEventLogger
_log = None
dCfg = """<valve name="valve name" key="unique key" type="diverting">
<directions>
<direction name="Triangle Square (hot only)" key="TS" wbTarget ="10.100.100.102" cmd ="DO;1;N:" />
<direction name="Circle Square (cold only)" key="CS" wbTarget ="10.100.100.102" cmd ="DO;2;N:" />
</directions>
</valve>
"""
aCfg = """<valve name="valve name" key="unique key" type="diverting">
<directions>
<direction name="Triangle Square (hot only)" key="TS" wbTarget ="10.100.100.102" cmd ="AA;1;100:" />
<direction name="Circle Square (cold only)" key="CS" wbTarget ="10.100.100.102" cmd ="AA;1;0:" />
</directions>
</valve>
"""
testELogger = """<eventInterfaces><eventInterface module='EventHandlers.tests.TestEventLogger' name='TestEventLogger'>
<eventtype type="">
<eventsource source="" >
<event>
</event>
</eventsource>
</eventtype>
</eventInterface>
</eventInterfaces>
"""
class TestDigitalOutput(unittest.TestCase):
def setUp(self):
self._log = logging.getLogger( "TestAnalogOuput" )
self._log.debug( "\n\nsetUp" )
eLogCfg = getDictFromXmlString(testELogger)
self.loader = EventRouterLoader()
self.loader.loadHandlers(eLogCfg)
self.loader.start()
self.router = self.loader.getEventRouter()
self.triggered = False
def tearDown(self):
self._log.debug( "\n\nteardown" )
def sendDelayedEvent(self,event,*args):
time.sleep(2)
self.router.publish(event.getSource() , event)
def turnDoOn(self,do):
do.turnOn()
self.triggered = True
def turnDoOff(self,do):
do.turnOff()
self.triggered = True
def testConfig(self):
self._log.debug( "\n\nTestConfig")
do = DigitalOutput('do/type' , 'do/source', self.router, '10.100.100.100','2')
httpActionCfg = do.httpAction.__dict__['_typeConfig']
self._log.debug(httpActionCfg)
self._log.debug("wbTarget : %s" %do.wbTarget)
self._log.debug("eType : %s" %do.eType)
self._log.debug("eOnSource : %s" %do.eOnSource)
self._log.debug("eOffSource : %s" %do.eOffSource)
self._log.debug("onCmd : %s" %do.onCmd)
self._log.debug("offCmd : %s" %do.offCmd)
self._log.debug("doNum : %s" %do.doNum)
self._log.debug("nodeNum : %s" %do.nodeNum)
self._log.debug("eDwellSource : %s" %do.eDwellSource)
self._log.debug("dwellCmd : %s" %do.dwellCmd)
#check httpaction was configured properly, some funkyness required to get to the action uri
assert len(httpActionCfg['do/type']) == 3
assert httpActionCfg['do/type']['do/source/off'][0][1][0]['uri'] == '/cfg.spi?com=DO;2;F:'
assert httpActionCfg['do/type']['do/source/on'][0][1][0]['uri'] == '/cfg.spi?com=DO;2;N:'
assert httpActionCfg['do/type']['do/source/dwell'][0][1][0]['uri'] == '/cfg.spi?com=DO;2;D;%(val)s:'
assert do.wbTarget == '10.100.100.100'
assert do.eType == 'do/type'
assert do.eOnSource == 'do/source/on'
assert do.eOffSource == 'do/source/off'
assert do.localrouter == self.router
assert do.onCmd == 'DO;2;N:'
assert do.offCmd == 'DO;2;F:'
assert do.doNum == '2'
assert do.nodeNum == '100'
assert do.eDwellSource == 'do/source/dwell'
assert do.dwellCmd == 'DO;2;D;%(val)s:'
def testSetDOOn(self):
self._log.debug( "\n\ntestSetDOOn" )
self.timeout = time.time() + 15
do = DigitalOutput('do/type','do/source',self.router,'10.100.100.100','2')
do.turnOn()
#let eventrouter catch up
time.sleep(0.2)
TestEventLogger.logEvents()
assert TestEventLogger._events[0].getType() == 'do/type'
assert TestEventLogger._events[0].getSource() == 'do/source/on'
def testSetDOOnDwell(self):
self._log.debug( "\n\ntestSetDOOn" )
self.timeout = time.time() + 15
do = DigitalOutput('do/type','do/source',self.router,'10.100.100.100','2')
do.turnOn(100)
#let eventrouter catch up
time.sleep(0.2)
assert TestEventLogger._events[0].getType() == 'do/type'
assert TestEventLogger._events[0].getSource() == 'do/source/dwell'
assert TestEventLogger._events[0].getPayload()['val'] == '100'
TestEventLogger.logEvents()
def testSetDODwellFinish(self):
self._log.debug( "\n\ntestSetDOOn" )
self.timeout = time.time() + 15
do = DigitalOutput('do/type','do/source',self.router,'10.100.100.100','2')
do.turnOn()
#let eventrouter catch up
time.sleep(0.2)
TestEventLogger.logEvents()
assert TestEventLogger._events[0].getType() == 'do/type'
assert TestEventLogger._events[0].getSource() == 'do/source/on'
def testSetDOOff(self):
self._log.debug( "\n\ntestSetDOOff" )
self.timeout = time.time() + 5
do = DigitalOutput('do/type','do/source',self.router,'10.100.100.100','2')
do.turnOff()
time.sleep(0.2)
TestEventLogger.logEvents()
assert TestEventLogger._events[0].getType() == 'do/type'
assert TestEventLogger._events[0].getSource() == 'do/source/off'
def testSetDOOnCorrect(self):
self._log.debug( "\n\ntestSetDOOn" )
self.timeout = time.time() + 15
do = DigitalOutput('do/type','do/source',self.router,'10.100.100.100','2')
doEvent = makeEvent('http://id.webbrick.co.uk/events/webbrick/DO','webbrick/100/DO/2',{'state':'0'})
do.turnOn()
self.router.publish(doEvent.getSource(),doEvent)
#let eventrouter catch up
time.sleep(0.2)
TestEventLogger.logEvents()
assert TestEventLogger._events[0].getType() == 'do/type'
assert TestEventLogger._events[0].getSource() == 'do/source/on'
assert TestEventLogger._events[2].getType() == 'do/type'
assert TestEventLogger._events[2].getSource() == 'do/source/on'
def testSetDOOnDwellCorrect(self):
self._log.debug( "\n\ntestSetDOOn" )
self.timeout = time.time() + 15
do = DigitalOutput('do/type','do/source',self.router,'10.100.100.100','2')
doEvent = makeEvent('http://id.webbrick.co.uk/events/webbrick/DO','webbrick/100/DO/2',{'state':'0'})
do.turnOn(10)
self.router.publish(doEvent.getSource(),doEvent)
#let eventrouter catch up
time.sleep(0.2)
TestEventLogger.logEvents()
assert TestEventLogger._events[0].getType() == 'do/type'
assert TestEventLogger._events[0].getSource() == 'do/source/dwell'
assert TestEventLogger._events[0].getPayload()['val'] == '10'
assert TestEventLogger._events[2].getType() == 'do/type'
assert TestEventLogger._events[2].getSource() == 'do/source/dwell'
assert TestEventLogger._events[2].getPayload()['val'] == '10'
def testSetDOOffCorrect(self):
self._log.debug( "\n\ntestSetDOOn" )
self.timeout = time.time() + 15
do = DigitalOutput('do/type','do/source',self.router,'10.100.100.100','2')
doEvent = makeEvent('http://id.webbrick.co.uk/events/webbrick/DO','webbrick/100/DO/2',{'state':'1'})
do.turnOff()
self.router.publish(doEvent.getSource(),doEvent)
#let eventrouter catch up
time.sleep(0.2)
TestEventLogger.logEvents()
assert TestEventLogger._events[0].getType() == 'do/type'
assert TestEventLogger._events[0].getSource() == 'do/source/off'
assert TestEventLogger._events[2].getType() == 'do/type'
assert TestEventLogger._events[2].getSource() == 'do/source/off'
#test every possible transition
def testOnOff(self):
self._log.debug( "\n\ntestSetDOOn" )
self.timeout = time.time() + 15
do = DigitalOutput('do/type','do/source',self.router,'10.100.100.100','2')
do.turnOn()
do.turnOff()
#let eventrouter catch up
time.sleep(0.2)
TestEventLogger.logEvents()
assert TestEventLogger._events[0].getType() == 'do/type'
assert TestEventLogger._events[0].getSource() == 'do/source/on'
assert TestEventLogger._events[1].getType() == 'do/type'
assert TestEventLogger._events[1].getSource() == 'do/source/off'
def testOffOn(self):
self._log.debug( "\n\ntestSetDOOn" )
self.timeout = time.time() + 15
do = DigitalOutput('do/type','do/source',self.router,'10.100.100.100','2')
do.turnOff()
do.turnOn()
#let eventrouter catch up
time.sleep(0.2)
TestEventLogger.logEvents()
assert TestEventLogger._events[0].getType() == 'do/type'
assert TestEventLogger._events[0].getSource() == 'do/source/off'
assert TestEventLogger._events[1].getType() == 'do/type'
assert TestEventLogger._events[1].getSource() == 'do/source/on'
def testDwellDwell(self):
self._log.debug( "\n\ntestSetDOOn" )
self.timeout = time.time() + 15
do = DigitalOutput('do/type','do/source',self.router,'10.100.100.100','2')
do.turnOn(10)
do.turnOn(10)
#let eventrouter catch up
time.sleep(0.2)
TestEventLogger.logEvents()
assert TestEventLogger._events[0].getType() == 'do/type'
assert TestEventLogger._events[0].getSource() == 'do/source/dwell'
assert TestEventLogger._events[0].getPayload()['val'] == '10'
assert TestEventLogger._events[1].getType() == 'do/type'
assert TestEventLogger._events[1].getSource() == 'do/source/dwell'
assert TestEventLogger._events[1].getPayload()['val'] == '10'
def testDwellOff(self):
self._log.debug( "\n\ntestSetDOOn" )
self.timeout = time.time() + 15
do = DigitalOutput('do/type','do/source',self.router,'10.100.100.100','2')
do.turnOn(10)
do.turnOff()
#let eventrouter catch up
time.sleep(0.2)
TestEventLogger.logEvents()
assert TestEventLogger._events[0].getType() == 'do/type'
assert TestEventLogger._events[0].getSource() == 'do/source/dwell'
assert TestEventLogger._events[0].getPayload()['val'] == '10'
assert TestEventLogger._events[1].getType() == 'do/type'
assert TestEventLogger._events[1].getSource() == 'do/source/off'
def testDwellOn(self):
self._log.debug( "\n\ntestSetDOOn" )
self.timeout = time.time() + 15
do = DigitalOutput('do/type','do/source',self.router,'10.100.100.100','2')
do.turnOn(10)
do.turnOn()
#let eventrouter catch up
time.sleep(0.2)
TestEventLogger.logEvents()
assert TestEventLogger._events[0].getType() == 'do/type'
assert TestEventLogger._events[0].getSource() == 'do/source/dwell'
assert TestEventLogger._events[0].getPayload()['val'] == '10'
assert TestEventLogger._events[1].getType() == 'do/type'
assert TestEventLogger._events[1].getSource() == 'do/source/on'
def testOffDwell(self):
self._log.debug( "\n\ntestSetDOOn" )
self.timeout = time.time() + 15
do = DigitalOutput('do/type','do/source',self.router,'10.100.100.100','2')
do.turnOff()
do.turnOn(10)
#let eventrouter catch up
time.sleep(0.2)
TestEventLogger.logEvents()
assert TestEventLogger._events[0].getType() == 'do/type'
assert TestEventLogger._events[0].getSource() == 'do/source/off'
assert TestEventLogger._events[1].getType() == 'do/type'
assert TestEventLogger._events[1].getSource() == 'do/source/dwell'
assert TestEventLogger._events[1].getPayload()['val'] == '10'
def testOnDwell(self):
self._log.debug( "\n\ntestSetDOOn" )
self.timeout = time.time() + 15
do = DigitalOutput('do/type','do/source',self.router,'10.100.100.100','2')
do.turnOn()
do.turnOn(10)
#let eventrouter catch up
time.sleep(0.2)
TestEventLogger.logEvents()
assert TestEventLogger._events[0].getType() == 'do/type'
assert TestEventLogger._events[0].getSource() == 'do/source/on'
assert TestEventLogger._events[1].getType() == 'do/type'
assert TestEventLogger._events[1].getSource() == 'do/source/dwell'
assert TestEventLogger._events[1].getPayload()['val'] == '10'
def testOnOn(self):
self._log.debug( "\n\ntestSetDOOn" )
self.timeout = time.time() + 15
do = DigitalOutput('do/type','do/source',self.router,'10.100.100.100','2')
do.turnOn()
do.turnOn()
#let eventrouter catch up
time.sleep(0.2)
TestEventLogger.logEvents()
assert TestEventLogger._events[0].getType() == 'do/type'
assert TestEventLogger._events[0].getSource() == 'do/source/on'
assert TestEventLogger._events[1].getType() == 'do/type'
assert TestEventLogger._events[1].getSource() == 'do/source/on'
def testOffOff(self):
self._log.debug( "\n\ntestSetDOOn" )
self.timeout = time.time() + 15
do = DigitalOutput('do/type','do/source',self.router,'10.100.100.100','2')
do.turnOff()
do.turnOff()
#let eventrouter catch up
time.sleep(0.2)
TestEventLogger.logEvents()
assert TestEventLogger._events[0].getType() == 'do/type'
assert TestEventLogger._events[0].getSource() == 'do/source/off'
assert TestEventLogger._events[1].getType() == 'do/type'
assert TestEventLogger._events[1].getSource() == 'do/source/off'
from MiscLib import TestUtils
def getTestSuite(select="unit"):
testdict = {
"unit":
[ "testSetDOOn",
"testSetDOOnDwell",
"testSetDOOff",
"testConfig",
"testOnOff",
"testOffOn",
"testDwellDwell",
"testDwellOff",
"testDwellOn",
"testOffDwell",
"testOnDwell",
"testOnOn",
"testOffOff"
]
}
return TestUtils.getTestSuite(TestDigitalOutput,testdict,select=select)
if __name__ == "__main__":
TestUtils.runTests("TestDigitalOutput.log" , getTestSuite , sys.argv)
|
# Generated by Django 3.1.3 on 2020-11-30 14:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shop', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='product',
name='available',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='product',
name='image',
field=models.ImageField(default='shop/products/no_image.jpeg', upload_to='shop/products/images'),
),
]
|
"""
@author: Rossi
@time: 2021-01-26
"""
import json
import re
from Broca.utils import find_class, list_class
from Broca.task_engine.slot import Slot
from .event import UserUttered, BotUttered
from .skill import ConfirmSkill, FormSkill, ListenSkill, OptionSkill, Skill, UndoSkill, DeactivateFormSkill
class Agent:
skill_pattern = re.compile("(?P<name>[a-zA-Z_0-9]+)?(:(?P<parameters>\{.+\})$)?")
def __init__(self, name, parser, tracker_store, policy, intents, slots,
prompt_trigger=None):
self.name = name
self.parser = parser
tracker_store.agent = self
self.tracker_store = tracker_store
self.policy = policy
self.intents = intents
self.skills = {}
self.slots = slots
self.prompt_trigger = prompt_trigger
def set_script(self, script):
self.script = script
self.policy.parse_script(script, self)
@classmethod
def from_config_file(cls, config_file):
with open(config_file, encoding="utf-8") as fi:
config = json.load(fi)
return cls.from_config(config)
@classmethod
def from_config(cls, config):
parser_config = config.get("parser")
if parser_config:
parser_cls = find_class(parser_config["class"])
parser = parser_cls.from_config(parser_config)
else:
parser = None
tracker_store_config = config["tracker_store"]
tracker_store_cls = find_class(tracker_store_config["class"])
tracker_store = tracker_store_cls.from_config(tracker_store_config)
policy_config = config["policy"]
policy_cls = find_class(policy_config["class"])
policy = policy_cls.from_config(policy_config)
agent_name = config["agent_name"]
intents = config["intents"]
slots = []
for slot_config in config["slots"]:
slot_cls = find_class(slot_config["class"])
slots.append(slot_cls.from_config(slot_config))
slots.append(Slot("confirmed_slot"))
slots.append(Slot("options_slot"))
slots.append(Slot("option_slot"))
slots.append(Slot("main_form"))
slots.append(Slot("form_utterance"))
prompt_trigger = config.get("prompt_trigger", None)
return cls(agent_name, parser, tracker_store, policy, intents, slots, prompt_trigger)
def can_handle_message(self, message):
self._parse_if_needed(message)
uttered = UserUttered(message)
tracker = self.tracker_store.get_tracker(message.sender_id)
temp_tracker = tracker.copy()
temp_tracker.update(uttered)
self.listen(temp_tracker)
skill_name = self.policy.pick_skill(temp_tracker)
return skill_name is not None
def handle_message(self, message):
self._parse_if_needed(message)
uttered = UserUttered(message)
tracker = self.tracker_store.get_tracker(message.sender_id)
tracker.update(uttered)
self.listen(tracker)
skill_name = self.policy.pick_skill(tracker)
responses = []
if skill_name is not None:
while skill_name is not None:
skill_name, parameters = self._parse_skill_name(skill_name)
skill = self.skills[skill_name]()
for event in skill.perform(tracker, **parameters):
tracker.update(event)
if isinstance(event, BotUttered):
bot_message = event.bot_message
responses.append(bot_message)
skill_name = self.policy.pick_skill(tracker)
elif "help_skill" in self.skills: # perform help skill
help_skill = self.skills["help_skill"]()
for event in help_skill.perform(tracker):
tracker.update(event)
if isinstance(event, BotUttered):
bot_message = event.bot_message
responses.append(bot_message)
self.tracker_store.update_tracker(tracker)
return responses
def parse(self, message):
self._parse_if_needed(message)
def _parse_if_needed(self, message):
processed_by = message.get("processed_by")
if processed_by is None:
processed_by = set()
message.set("processed_by", processed_by)
if self.name in processed_by:
return
if self.parser is not None:
self.parser.parse(message)
processed_by.add(self.name)
def _parse_skill_name(self, skill_name):
match = self.skill_pattern.match(skill_name)
if not match:
raise RuntimeError("invalid skill name")
else:
skill_name = match.group("name")
parameters = match.group("parameters")
if parameters:
parameters = json.loads(parameters)
else:
parameters = {}
return skill_name, parameters
def add_skill(self, skill_cls):
skill = skill_cls()
self.skills[skill.name] = skill_cls
if skill.trigger_intent:
mappings = self.script.get("mappings")
script = skill.generate_script()
mappings.update(script)
self.intents[skill.trigger_intent] = {"name": skill.trigger_intent, "use_entities": []}
def listen(self, tracker):
events = ListenSkill().perform(tracker)
for event in events:
tracker.update(event)
def is_active(self, sender_id):
tracker = self.tracker_store.get_tracker(sender_id)
return tracker.active_form is not None
def load_skills(self, skill_module):
for cls in list_class(skill_module):
if issubclass(cls, Skill) and cls not in [Skill, FormSkill]:
self.add_skill(cls)
self.add_skill(UndoSkill)
self.add_skill(DeactivateFormSkill)
self.add_skill(ConfirmSkill)
self.add_skill(OptionSkill)
def collect_intent_patterns(self):
intent_patterns = []
for skill_cls in self.skills.values():
skill = skill_cls()
if skill.trigger_intent and skill.intent_patterns:
intent = {"name": skill.trigger_intent, "agent": self.name}
intent_patterns.append((intent, skill.intent_patterns))
return intent_patterns
|
from pyknow import *
class Counter(Fact):
counter_normal=Field(int, default=0)
counter=Field(int, default=0)
class Result(Fact):
pass
class Input(Fact):
pass
class Output(Fact):
def retrieve(self):
return self.as_dict()
class State(Fact):
pass
class ExpertSystem(KnowledgeEngine):
def __init__(self, n_count, n_normal):
super().__init__()
self.returnv = dict()
self.cnt = dict()
self.N_COUNTS = n_count
self.N_NORMAL = n_normal
self.reset()
def evaluate(self, idn, data, prediction):
i = 0
if idn not in list(self.cnt.keys()):
i = 1
self.cnt[idn] = 0
input_data = data.copy()
input_data['WT_id'] = idn # we add to data the ID
input_data['prediction'] = prediction # we add to data the prediction
counter_data = dict()
counter_data['WT_id'] = idn # we add to data the ID
counter_data['counter'] = self.cnt[idn] # we add to data the counter
counter_data['maxim'] = self.N_COUNTS # we add to data the counter
counter_data['maxim_normal'] = self.N_NORMAL
if i:
self.declare(Counter(**counter_data))
self.declare(Input(**input_data))
self.returnv = dict()
self.run()
self.cnt[idn] = self.returnv['counter']
return self.returnv['response']
# Folling rules sets the upper and lower limits, depening on what the power output is.
@Rule(Input(status='active',
WT_id=MATCH.ID,
prediction=MATCH.pred),
TEST(lambda pred: pred <= 2500000),
salience=7)
def set_limit1(self, ID):
self.declare(State(WT_id=ID, flag="L_1"))
@Rule(Input(status='active',
WT_id=MATCH.ID,
prediction=MATCH.pred),
TEST(lambda pred: (2800000 >= pred > 2500000)),
salience=7)
def set_limit2(self, ID):
self.declare(State(WT_id=ID, flag="L_2"))
@Rule(Input(status='active',
WT_id=MATCH.ID,
prediction=MATCH.pred),
TEST(lambda pred: (5000000 >= pred > 2800000)),
salience=7)
def set_limit3(self, ID):
self.declare(State(WT_id=ID, flag="L_3"))
@Rule(Input(status='active',
WT_id=MATCH.ID,
prediction=MATCH.pred),
TEST(lambda pred: (5600000 >= pred > 5000000)),
salience=7)
def set_limit4(self, ID):
self.declare(State(WT_id=ID, flag="L_4"))
@Rule(Input(status='active',
WT_id=MATCH.ID,
prediction=MATCH.pred),
TEST(lambda pred: (7800000 >= pred > 5600000)),
salience=7)
def set_limit5(self, ID):
self.declare(State(WT_id=ID, flag="L_5"))
@Rule(Input(status='active',
WT_id=MATCH.ID,
prediction=MATCH.pred),
TEST(lambda pred: (8200000 >= pred > 7800000)),
salience=7)
def set_limit6(self, ID):
self.declare(State(WT_id=ID, flag="L_6"))
@Rule(Input(status='active',
WT_id=MATCH.ID,
prediction=MATCH.pred),
TEST(lambda pred: (pred > 8200000)),
salience=7)
def set_limit7(self, ID):
self.declare(State(WT_id=ID, flag="L_7"))
@Rule(AS.i << Input(output_power=MATCH.out_p,
prediction=MATCH.pred,
WT_id=MATCH.ID,
status='active'),
AS.f << Counter(WT_id=MATCH.ID,
counter=MATCH.c,
counter_normal=MATCH.cn),
AS.s << State(WT_id=MATCH.ID,
flag="L_1"),
TEST(lambda out_p, pred: (out_p >= 1.2*(pred + 2e5)) or (out_p < 0.8*(pred - 2e5))),
salience=5)
def bad_state_1(self, c, f, i, s, ID, cn):
print("Abnormal production1! {}".format(c+1))
c += 1
self.modify(f, counter=c, counter_normal=0)
self.retract(i)
self.retract(s)
self.declare(Result(WT_id=ID))
# Rule for detecting abnormal power production
@Rule(AS.i << Input(output_power=MATCH.out_p,
prediction=MATCH.pred,
WT_id=MATCH.ID,
status='active'),
AS.f << Counter(WT_id=MATCH.ID,
counter=MATCH.c,
counter_normal=MATCH.cn),
AS.s << State(WT_id=MATCH.ID,
flag="L_2"),
TEST(lambda out_p, pred: (out_p >= (1.1*(pred + 2e5) + 0.2e6)) or (out_p < 0.8 * (pred - 2e5))),
salience=5)
def bad_state_2(self, c, f, i, s, ID, cn):
print("Abnormal production2 {}".format(c+1))
c += 1
self.modify(f, counter=c, counter_normal=0)
self.retract(i)
self.retract(s)
self.declare(Result(WT_id=ID))
# Rule for detecting abnormal power production
@Rule(AS.i << Input(output_power=MATCH.out_p,
prediction=MATCH.pred,
WT_id=MATCH.ID,
status='active'),
AS.f << Counter(WT_id=MATCH.ID,
counter=MATCH.c,
counter_normal=MATCH.cn),
AS.s << State(WT_id=MATCH.ID,
flag="L_3"),
TEST(lambda out_p, pred: (out_p >= (1.1 * (pred + 2e5)+0.2e6)) or (out_p < (1.12 * (pred - 2e5)-0.9e6))),
salience=5)
def bad_state_3(self, c, f, i, s, ID, cn):
print("Abnormal production3! {}".format(c+1))
c += 1
self.modify(f, counter=c, counter_normal=0)
self.retract(i)
self.retract(s)
self.declare(Result(WT_id=ID))
# Rule for detecting abnormal power production
@Rule(AS.i << Input(output_power=MATCH.out_p,
prediction=MATCH.pred,
WT_id=MATCH.ID,
status='active'),
AS.f << Counter(WT_id=MATCH.ID,
counter=MATCH.c,
counter_normal=MATCH.cn),
AS.s << State(WT_id=MATCH.ID,
flag="L_4"),
TEST(lambda out_p, pred: (out_p >= (0.76 * (pred + 2e5)+1.8e6)) or (out_p < (1.12*(pred - 2e5)-0.9e6))),
salience=5)
def bad_state_4(self, c, f, i, s, ID, cn):
print("Abnormal production4! {}".format(c+1))
c += 1
self.modify(f, counter=c, counter_normal=0)
self.retract(i)
self.retract(s)
self.declare(Result(WT_id=ID))
@Rule(AS.i << Input(output_power=MATCH.out_p,
prediction=MATCH.pred,
WT_id=MATCH.ID,
status='active'),
AS.f << Counter(WT_id=MATCH.ID,
counter=MATCH.c,
counter_normal=MATCH.cn),
AS.s << State(WT_id=MATCH.ID,
flag="L_5"),
TEST(lambda out_p, pred: (out_p >= (0.65 * (pred + 2e5)+2.5e6)) or (out_p < (0.8 * (pred - 2e5)+0.8e6))),
salience=5)
def bad_state_5(self, c, f, i, s, ID, pred, out_p, cn):
print('prediction: ' + str(pred))
print('reported: ' + str(out_p))
print("Abnormal production5! {}".format(c+1))
c += 1
self.modify(f, counter=c, counter_normal=0)
self.retract(i)
self.retract(s)
self.declare(Result(WT_id=ID))
@Rule(AS.i << Input(output_power=MATCH.out_p,
prediction=MATCH.pred,
WT_id=MATCH.ID,
status='active'),
AS.f << Counter(WT_id=MATCH.ID,
counter=MATCH.c,
counter_normal=MATCH.cn),
AS.s << State(WT_id=MATCH.ID,
flag="L_6"),
TEST(lambda out_p, pred: (out_p >= 7.7e6 or out_p < (0.8 * (pred - 2e5)+0.8e6))),
salience=5)
def bad_state_6(self, c, f, i, s, ID, cn):
print("Abnormal production6! {}".format(c+1))
c += 1
self.modify(f, counter=c, counter_normal=0)
self.retract(i)
self.retract(s)
self.declare(Result(WT_id=ID))
@Rule(AS.i << Input(output_power=MATCH.out_p,
prediction=MATCH.pred,
WT_id=MATCH.ID,
status='active'),
AS.f << Counter(WT_id=MATCH.ID,
counter=MATCH.c,
counter_normal=MATCH.cn),
AS.s << State(WT_id=MATCH.ID,
flag="L_7"),
TEST(lambda out_p, pred: (out_p >= 7.7e6 or out_p < 7.3e6)),
salience=5)
def bad_state_7(self, c, f, i, s, ID, cn):
print("Abnormal production7! {}".format(c + 1))
c += 1
self.modify(f, counter=c, counter_normal=0)
self.retract(i)
self.retract(s)
self.declare(Result(WT_id=ID))
@Rule(AS.i << Input(output_power=MATCH.out_p,
prediction=MATCH.pred,
WT_id=MATCH.ID,
status='active'),
AS.f << Counter(WT_id=MATCH.ID,
counter_normal=MATCH.c),
AS.s << State(WT_id=MATCH.ID,
flag="L_1"),
TEST(lambda out_p, pred: (1.2 * (pred + 2e5)) > out_p >= (0.8 * (pred - 2e5))),
salience=5)
def good_state_1(self, f, i, s, ID, c):
print("Normal production1! {}".format(c))
c+=1
self.modify(f, counter_normal=c)
self.retract(i)
self.retract(s)
self.declare(Result(WT_id=ID))
@Rule(AS.i << Input(output_power=MATCH.out_p,
prediction=MATCH.pred,
WT_id=MATCH.ID,
status='active'),
AS.f << Counter(WT_id=MATCH.ID,
counter_normal=MATCH.c),
AS.s << State(WT_id=MATCH.ID,
flag="L_2"),
TEST(lambda out_p, pred: (1.1 * (pred + 2e5)+0.2e6) > out_p >= (0.8 * (pred - 2e5))),
salience=5)
def good_state_2(self, f, i, s, ID, c):
print("Normal production2! {}".format(c))
c+=1
self.modify(f, counter_normal=c)
self.retract(i)
self.retract(s)
self.declare(Result(WT_id=ID))
@Rule(AS.i << Input(output_power=MATCH.out_p,
prediction=MATCH.pred,
WT_id=MATCH.ID,
status='active'),
AS.f << Counter(WT_id=MATCH.ID,
counter_normal=MATCH.c),
AS.s << State(WT_id=MATCH.ID,
flag="L_3"),
TEST(lambda out_p, pred: (1.1 * (pred + 2e5)+0.2e6) > out_p >= (1.12 * (pred - 2e5)-0.9e6)),
salience=5)
def good_state_3(self, f, i, s, ID, c):
print("Normal production3! {}".format(c))
c+=1
self.modify(f, counter_normal=c)
self.retract(i)
self.retract(s)
self.declare(Result(WT_id=ID))
# Rule for detecting abnormal power production
@Rule(AS.i << Input(output_power=MATCH.out_p,
prediction=MATCH.pred,
WT_id=MATCH.ID,
status='active'),
AS.f << Counter(WT_id=MATCH.ID,
counter_normal=MATCH.c),
AS.s << State(WT_id=MATCH.ID,
flag="L_4"),
TEST(lambda out_p, pred: (0.76 * (pred + 2e5)+1.8e6) > out_p >= (1.12*(pred - 2e5)-0.9e6)),
salience=5)
def good_state_4(self, f, i, s, ID, c):
print("Normal production4! {}".format(c))
c+=1
self.modify(f, counter_normal=c)
self.retract(i)
self.retract(s)
self.declare(Result(WT_id=ID))
@Rule(AS.i << Input(output_power=MATCH.out_p,
prediction=MATCH.pred,
WT_id=MATCH.ID,
status='active'),
AS.f << Counter(WT_id=MATCH.ID,
counter_normal=MATCH.c),
AS.s << State(WT_id=MATCH.ID,
flag="L_5"),
TEST(lambda out_p, pred: (0.65 * (pred + 2e5)+2.5e6) > out_p >= (0.8 * (pred-2e5)+0.8e6)),
salience=5)
def good_state_5(self, f, i, s, ID, c):
print("Normal production5! {}".format(c))
c+=1
self.modify(f, counter_normal=c)
self.retract(i)
self.retract(s)
self.declare(Result(WT_id=ID))
@Rule(AS.i << Input(output_power=MATCH.out_p,
prediction=MATCH.pred,
WT_id=MATCH.ID,
status='active'),
AS.f << Counter(WT_id=MATCH.ID,
counter_normal=MATCH.c),
AS.s << State(WT_id=MATCH.ID,
flag="L_6"),
TEST(lambda out_p, pred: (7.7e6 > out_p >= 0.8 * (pred - 2e5)+0.8e6)),
salience=5)
def good_state_6(self, f, i, s, ID, c):
print("Normal production6! {}".format(c))
c+=1
self.modify(f, counter_normal=c)
self.retract(i)
self.retract(s)
self.declare(Result(WT_id=ID))
@Rule(AS.i << Input(output_power=MATCH.out_p,
prediction=MATCH.pred,
WT_id=MATCH.ID,
status='active'),
AS.f << Counter(WT_id=MATCH.ID,
counter_normal=MATCH.c),
AS.s << State(WT_id=MATCH.ID,
flag="L_7"),
TEST(lambda out_p, pred: (7.7e6 > out_p >= 7.3e6)),
salience=5)
def good_state_7(self, f, i, s, ID, c):
print("Normal production7! {}".format(c))
c+=1
self.modify(f, counter_normal=c)
self.retract(i)
self.retract(s)
self.declare(Result(WT_id=ID))
@Rule(AS.i << Input(WT_id=MATCH.ID,
status='stop'),
AS.f << Counter(WT_id=MATCH.ID,
counter=MATCH.c),
salience=2)
def reset_counter(self, f, ID, i, c):
self.modify(f, counter=0)
self.retract(i)
self.declare(Output(id=ID, response="stop", counter=0))
@Rule(AS.f <<Counter(counter_normal=MATCH.cn,
counter=MATCH.c,
maxim_normal=MATCH.m),
TEST(lambda cn, c, m: ((cn>=m)&(c>0))),
salience=2)
def decrease_counter1(self, f, cn, c):
c-=1
self.modify(f, counter=c, counter_normal=0)
@Rule(AS.f <<Counter(counter_normal=MATCH.cn,
counter=MATCH.c,
maxim_normal=MATCH.m),
TEST(lambda cn, c, m: ((cn>=m)&(c==0))),
salience=2)
def decrease_counter2(self, f, cn):
self.modify(f, counter_normal=0)
# If we exceed the counter -> return stop
@Rule(AS.r << Result(WT_id=MATCH.ID),
AS.f << Counter(WT_id=MATCH.ID,
counter=MATCH.c,
maxim=MATCH.m),
TEST(lambda c, m: c >= m),
salience=2)
def stop_turbine(self, ID, f, r, c):
self.modify(f, counter=0)
self.retract(r)
self.declare(Output(id=ID, response="stop", counter=c))
# If we don't exceed the counter -> return active
@Rule(AS.r << Result(WT_id=MATCH.ID),
AS.f << Counter(WT_id=MATCH.ID,
counter=MATCH.c,
maxim=MATCH.m),
TEST(lambda c, m: c < m),
salience=2)
def start_turbine(self, ID, r, c):
self.retract(r)
self.declare(Output(id=ID, response="active", counter=c))
@Rule(AS.out << Output(), salience=0)
def _returnstate(self, out):
self.returnv.update(**out.retrieve())
self.retract(out)
def get_return(self, key):
return self.returnv.get(key)
|
import numpy
from keras import backend as K
import keras
import time
from keras.datasets import mnist
from keras.layers import Dense
from keras.layers import Dropout
from keras.layers import Flatten
from keras.layers.convolutional import Conv2D
from keras.layers.convolutional import MaxPooling2D
from keras.models import Sequential
from keras.utils import np_utils
K.set_image_dim_ordering('th')
import os
#os.environ["CUDA_VISIBLE_DEVICES"]="-1"
if 'tensorflow' == K.backend():
import tensorflow as tf
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
config.gpu_options.visible_device_list = "0"
set_session(tf.Session(config=config))
# define the keras model
def keras_model():
# create model
network = Sequential()
network.add(Conv2D(60, (5, 5), input_shape=(1, 28, 28), activation='relu'))
network.add(MaxPooling2D(pool_size=(2, 2)))
network.add(Conv2D(15, (3, 3), activation='relu'))
network.add(MaxPooling2D(pool_size=(2, 2)))
network.add(Dropout(0.2))
network.add(Flatten())
network.add(Dense(128, activation='relu'))
network.add(Dropout(0.15))
network.add(Dense(64, activation='relu'))
network.add(Dense(num_classes, activation='softmax'))
# Compile model
network.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
return network
def tutorial_model():
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=(1, 28, 28)))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
return model
allruns=0;
allscore=0;
for x in range(1, 51):
# fix random seed for reproducibility
seed = 7
numpy.random.seed(seed)
tbCallBack = keras.callbacks.TensorBoard(log_dir='./Graph', histogram_freq=0, write_graph=True, write_images=True)
# load data
(X_train, y_train), (X_test, y_test) = mnist.load_data()
# reshape to be [samples][pixels][width][height]
X_train = X_train.reshape(X_train.shape[0], 1, 28, 28).astype('float32')
X_test = X_test.reshape(X_test.shape[0], 1, 28, 28).astype('float32')
# normalize inputs from 0-255 to 0-1
X_train = X_train / 255
X_test = X_test / 255
# one hot encode outputs
y_train = np_utils.to_categorical(y_train)
y_test = np_utils.to_categorical(y_test)
num_classes = y_test.shape[1]
start = time.time()
# build the model
model = keras_model()
# model = tutorial_model()
# Fit the model
model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=15, batch_size=200,callbacks=[tbCallBack])
# Final evaluation of the model
scores = model.evaluate(X_test, y_test, verbose=0)
end = time.time()
print("Large CNN Error: %.2f%%" % (100-scores[1]*100))
allscore+=scores[1]
run=end-start
allruns +=run
print("Avg Time "+ str(allruns/x) + "run " + str(x))
print("Avg Error: %.2f%%" % (100 - (allscore/x) * 100))
resultTime=allruns/x
|
import json
import uuid
from flask import jsonify, request
from flask_classy import FlaskView
from app.exceptions.exceptions import DomainError
from app.service.domain.service import Service as Service_Domain
from app.service.database.service import Service as Service_DB
from utils.celery.celery import start_search
from utils.response.response import Response
response = Response()
class BaseView(FlaskView):
route_base = '/api/getemail/'
def get(self, token):
all_data = Service_DB.get_all()
with open('result.txt', 'w') as file:
file.write(str(all_data))
result = Service_DB.get_one_by_token(token=token)
if result:
return result.answer
return response.response_404()
def post(self):
data_json = json.loads(request.data)
url = data_json.get('url')
if url is None:
return response.response_400()
service = Service_Domain(url)
try:
service.check_domain()
except DomainError:
return response.response_400()
_token = uuid.uuid4()
start_search.delay(url, _token)
return jsonify({'token': _token})
|
from assembler import Assembler
class Section:
header = 0
length = 0
name = ""
type = ""
def __init__(self,header,length,name,type):
self.header = header
self.length = length
self.name = name
self.type = type
class Section_table:
header = 0
num = 0
modules = dict()
def __init__(self):
self.header = 0
self.num = 0
self.modules = dict()
def add(self,length,name,type):
sec = Section(self.header,length,name,type)
self.header+=length
self.modules[name] = sec
class Linker:
main_file = ""
linked_file=""
U=[]
D=[]
E=[]
data_section_table = Section_table()
code_sectopn_table = Section_table()
linkable=False
def __init__(self,file_name_1,file_name_2):
self.main_file=file_name_1
self.linked_file = file_name_2
self.ass1 = Assembler(file_name_1)
self.ass2 = Assembler(file_name_2)
def pre_check(self):
self.E.append(self.main_file)
self.ass1.pre_ass()
self.ass1.ass()
if len(self.ass1.find_unsolved_symbol()) == 0:
self.ass1.output()
print("无需链接,直接汇编生成可执行文件")
self.linkable=True
return
for item in self.ass1.find_unsolved_symbol():
self.U.append(item)
for item in self.ass1.solved_symbol_table:
self.D.append(item)
self.E.append(self.linked_file)
if len(self.ass2.find_unsolved_symbol()) !=0:
print("链接文件中出现未定义的项,不符合约定,链接失败")
return
for item in self.ass2.solved_symbol_table:
self.D.append(item)
for item in self.U:
if item not in self.D:
print("链接过程中发现没有定义的全局变量,链接失败")
return
print("符号解析结束,未发现未定义符号,可以进行链接")
self.linkable = True
def linkE(self):
if self.linkable == False:
print("需要进行符号解析")
w = open("mid.ast","w")
w.write(".data\n")
data = 0
text = 0
datas = []
codes = []
for i in range(len(self.ass1.codes)):
if ".data" in self.ass1.codes[i]:
data = i
if ".code" in self.ass1.codes[i]:
code = i
for j in range(data+1,code):
datas.append(self.ass1.codes[j])
for j in range(code+1,len(self.ass1.codes)):
codes.append(self.ass1.codes[j])
for i in range(len(self.ass2.codes)):
if ".data" in self.ass2.codes[i]:
data = i
if ".code" in self.ass2.codes[i]:
code = i
for j in range(data+1,code):
datas.append(self.ass2.codes[j])
for j in range(code+1,len(self.ass2.codes)):
codes.append(self.ass2.codes[j])
for item in datas:
w.write(item+"\n")
w.write(".text\n")
for item in codes:
w.write(item+"\n")
w.close()
result = Assembler("mid.ast")
result.pre_ass()
result.ass()
result.output()
print("代码生成结束")
def linkH(self):
if self.linkable == False:
print("需要进行符号解析")
code = 0
data = 0
for i in range(len(self.ass1.codes)):
if ".data" in self.ass1.codes[i]:
data = i
if ".code" in self.ass1.codes[i]:
code = i
self.data_section_table.add(len(self.ass1.data_smooth()),"ass1.data","data")
self.code_sectopn_table.add((len(self.ass1.codes)-code-1)*4,"ass1.code","code")
for i in range(len(self.ass2.codes)):
if ".data" in self.ass2.codes[i]:
data = i
if ".code" in self.ass2.codes[i]:
code = i
self.data_section_table.add(len(self.ass2.data_smooth()), "ass2.data", "data")
self.code_sectopn_table.add((len(self.ass2.codes)-code-1)*4, "ass2.code", "code")
def main():
l = Linker("test1.asm","test2.asm")
l.pre_check()
main()
|
import asyncio
from datetime import datetime, timedelta
version = None
async def get_cmd_output(cmd, do_strip=True):
proc = await asyncio.create_subprocess_shell(cmd, stdout=asyncio.subprocess.PIPE)
stdout, _ = await proc.communicate()
output = stdout.decode('utf-8')
if do_strip:
return output.strip()
else:
return output
async def get_version():
global version
if version is None:
version = await get_cmd_output('git rev-parse --short HEAD')
return version
async def sleep_to_minute(freq):
cur_time = datetime.utcnow()
last_check_time = datetime(cur_time.year, cur_time.month, cur_time.day, cur_time.hour, int(cur_time.minute / freq), 0)
next_check_time = last_check_time + timedelta(minutes=freq)
return await asyncio.sleep((next_check_time - datetime.utcnow()).total_seconds())
|
# -*- coding: utf-8 -*-
import glob
import os
import re
from zhon.hanzi import punctuation
if __name__ == "__main__":
output_path = "jieba1.txt"
fout = open(output_path, 'wb')
dic = {}
input_dir = "/Volumes/MyDisk/studio/ai/Tacotron/data_thchs30/"
trn_files = glob.glob(os.path.join(input_dir, "data", '*.trn'))
for trn in trn_files:
with open(trn,encoding='utf-8') as f:
basename = trn[:-4]
zhText = f.readline()
arr = zhText.split(" ")
print(zhText)
for text in arr:
if len(text.strip()) > 1:
dic[text] = text.strip()
for k,v in dic.items():
fout.write(f'{v}\n'.encode('utf-8')) |
#What's Your Name
#https://www.hackerrank.com/challenges/whats-your-name/problem
def print_full_name(a, b):
print("Hello {} {}! You just delved into python.".format(a, b))
|
import json
from phonepiece.config import phonepiece_config
from phonepiece.inventory import read_inventory
from phonepiece.grapheme import read_grapheme
from phonepiece.unit import read_unit
import numpy as np
def rec_read_cart_tree(tree_dict):
node = Node(None)
if tree_dict['left_set'] is not None:
node.left_set = set(tree_dict['left_set'])
if tree_dict['label'] is not None:
node.label = int(tree_dict['label'])
if tree_dict['left']:
node.left = rec_read_cart_tree(tree_dict['left'])
if tree_dict['right']:
node.right = rec_read_cart_tree(tree_dict['right'])
node.feature_idx = int(tree_dict['feature_idx'])
return node
def read_cart_tree(path):
tree_dict = json.load(open(path, 'r'))
meta_dict = tree_dict['meta']
id2node = {}
for k, v in tree_dict['id2node'].items():
id2node[int(k)] = rec_read_cart_tree(v)
tree = Tree(id2node, meta_dict)
return tree
def read_cart(lang_id):
return read_cart_tree(phonepiece_config.data_path / 'cart' / lang_id / 'tree.json')
def build_cart_dict(node):
node_dict = {}
if node.left_set is not None:
node_dict['left_set'] = sorted(map(int, list(node.left_set)))
else:
node_dict['left_set'] = []
if node.label is not None:
node_dict['label'] = int(node.label)
else:
node_dict['label'] = None
if node.left:
node_dict['left'] = build_cart_dict(node.left)
else:
node_dict['left'] = None
if node.right:
node_dict['right'] = build_cart_dict(node.right)
else:
node_dict['right'] = None
node_dict['feature_idx'] = node.feature_idx
return node_dict
def write_cart_tree(tree, path):
tree_dict = {}
meta_dict = {}
id2node_dict = {}
for k,v in tree.id2node.items():
id2node_dict[int(k)] = build_cart_dict(v)
tree_dict['id2node'] = id2node_dict
# setup meta
meta_dict['score'] = tree.score
meta_dict['sample_size'] = int(tree.sample_size)
meta_dict['lang_id'] = tree.lang_id
tree_dict['meta'] = meta_dict
json_str = json.dumps(tree_dict, indent=4)
w = open(path, 'w', encoding='utf-8')
w.write(json_str)
w.close()
class Tree:
def __init__(self, id2node, meta):
self.meta = meta
self.lang_id = self.meta['lang_id']
self.score = self.meta['score']
self.sample_size = self.meta['sample_size']
self.id2node = id2node
self.char = read_grapheme(self.lang_id)
self.phoneme = read_inventory(self.lang_id).phoneme
def __len__(self):
return sum(len(node) for node in self.id2node.values())
def add_top_node(self, center_id, node):
self.id2node[center_id] = node
def classify_units(self, chars):
# map char to the closest char
chars = [self.char.get_nearest_unit(char) for char in chars]
ids = self.char.get_ids(chars)
phoneme_id = self.classify_ids(np.array(ids))
#print(ids, '->', phone_id)
phoneme_lst = self.phoneme.get_joint_unit(phoneme_id)
return phoneme_lst
def classify_ids(self, x):
center_id = x[2]
if center_id in self.id2node:
return self.id2node[center_id].classify(x)
else:
char = self.char.get_joint_unit(center_id)[0]
phone = self.phoneme.get_nearest_phoneme(char)
return self.phoneme.get_id(phone)
class Node:
def __init__(self, config):
self.config = config
self.left_set = set()
self.feature_idx = -1
self.left = None
self.right = None
self.label = None
def __len__(self):
if self.label is not None:
return 1
else:
return len(self.left) + len(self.right)
def classify(self, x):
if self.label is not None:
return self.label
if x[self.feature_idx] in self.left_set:
#print("left")
return self.left.classify(x)
else:
#print("right")
return self.right.classify(x) |
import numpy as np
import math
def shortestLRTime():
henry = float(input('Henry: '))
Count = float(input('Count of resistors: '))
ohms = float(input('ohms: '))
final = (henry/ohms)
final = final / Count
print(final)
shortestLRTime() |
# Initialize!
|
class Solution:
def solve(self, n):
facts = [prod(range(1,i+1)) for i in range(1,13)]
def solve(n, right_bound):
if n == 0: return True
right_bound = min(right_bound, bisect_right(facts,n))
return any(n - facts[i] >= 0 and solve(n-facts[i], i) for i in range(right_bound-1,-1,-1))
return solve(n, len(facts))
|
n = int(input())
h = [int(x) for x in input().split()]
if n == 2 and h[0] == h[1]:
pico = 0
else:
pico = 1
for i in range(1, n-1):
if not ((h[i] < h[i-1] and h[i] < h[i+1]) or (h[i] > h[i-1] and h[i] > h[i+1])):
pico = 0
break
print(pico) |
# Python
#
# This module implements the Main Markdown class.
#
# This file is part of mdutils. https://github.com/didix21/mdutils
#
# MIT License: (C) 2018 Dídac Coll
"""Module **mdutils**
The available features are:
* Create Headers, Til 6 sub-levels.
* Auto generate a table of contents.
* Create List and sub-list.
* Create paragraph.
* Generate tables of different sizes.
* Insert Links.
* Insert Code.
* Place text anywhere using a marker.
"""
from mdutils.fileutils.fileutils import MarkDownFile
from mdutils.tools import tools
class MdUtils:
"""This class give some basic methods that helps the creation of Markdown files while you are executing a python
code.
The ``__init__`` variables are:
- **file_name:** it is the name of the Markdown file.
- **author:** it is the author fo the Markdown file.
- **header:** it is an instance of Header Class.
- **textUtils:** it is an instance of TextUtils Class.
- **title:** it is the title of the Markdown file. It is written with Setext-style.
- **table_of_contents:** it is the table of contents, it can be optionally created.
- **file_data_text:** contains all the file data that will be written on the markdown file.
"""
def __init__(self, file_name, title="", author=""):
"""
:param file_name: it is the name of the Markdown file.
:type file_name: str
:param title: it is the title of the Markdown file. It is written with Setext-style.
:type title: str
:param author: it is the author fo the Markdown file.
:type author: str
"""
self.file_name = file_name
self.author = author
self.header = tools.Header()
self.textUtils = tools.TextUtils()
self.title = self.header.choose_header(level=1, title=title, style='setext')
self.table_of_contents = ""
self.file_data_text = ""
self._table_titles = []
def create_md_file(self):
"""It creates a new Markdown file.
:return: return an instance of a MarkDownFile."""
md_file = MarkDownFile(self.file_name)
md_file.rewrite_all_file(data=self.title + self.table_of_contents + self.file_data_text)
return md_file
def read_md_file(self, file_name):
"""Reads a Markdown file and save it to global class `file_data_text`.
:param file_name: Markdown file's name that has to be read.
:type file_name: str
:return: optionally returns the file data content.
:rtype: str
"""
file_data = MarkDownFile().read_file(file_name)
self.file_data_text += file_data
return file_data
def new_header(self, level, title, style='atx', add_table_of_contents='y'):
""" Add a new header to the Markdown file.
:param level: Header level. *atx* style can take values 1 til 6 and *setext* style take values 1 and 2.
:type level: int
:param title: Header title.
:type title: str
:param style: Header style, can be ``'atx'`` or ``'setext'``. By default ``'atx'`` style is chosen.
:type style: str
:param add_table_of_contents: by default the atx and setext headers of level 1 and 2 are added to the
table of contents, setting this parameter to 'n'.
:type add_table_of_contents: str
The example below consist in creating two types Headers examples:
:Example:
>>> mdfile = MdUtils("Header_Example")
>>> print(mdfile.new_header(level=2, title='Header Level 2 Title', style='atx', add_table_of_contents='y'))
'\\n## Header Level 2 Title\\n'
>>> print(mdfile.new_header(level=2, title='Header Title', style='setext'))
'\\nHeader Title\\n-------------\\n'
"""
if add_table_of_contents == 'y':
self._add_new_item_table_of_content(level, title)
self.file_data_text += self.header.choose_header(level, title, style)
return self.header.choose_header(level, title, style)
def _add_new_item_table_of_content(self, level, item):
"""Automatically add new atx headers to the table of contents.
:param level: add til 2 levels. Only can take 1 or 2.
:type level: int
:param item: items to add.
:type item: list or str
"""
if level == 1:
self._table_titles.append(item)
self._table_titles.append([])
elif level == 2:
self._table_titles[-1].append(item)
self._table_titles[-1].append([])
def new_table_of_contents(self, table_title="Table of contents", depth=1, marker=''):
"""Table of contents can be created if Headers of 'atx' style have been defined.
This method allows to create a table of contents and define a title for it. Moreover, `depth` allows user to
define if headers of level 1 and 2 or only level 1 have to be placed on the table of contents.
If no marker is defined, the table of contents will be placed automatically after the file's title.
:param table_title: The table content's title, by default "Table of contents"
:type table_title: str
:param depth: allows to include Headers 1 and 2 or only Headers of level 1. Possible values 1 or 2.
:type depth: int
:param marker: allows to place the table of contents using a marker.
:type marker: str
:return: a string with the data is returned.
:rtype: str
"""
if marker:
self.table_of_contents = ""
marker_table_of_contents = self.header.choose_header(level=1, title=table_title, style='setext')
marker_table_of_contents += tools.TableOfContents().create_table_of_contents(self._table_titles, depth)
self.file_data_text = self.place_text_using_marker(marker_table_of_contents, marker)
else:
marker_table_of_contents = ""
self.table_of_contents += self.header.choose_header(level=1, title=table_title, style='setext')
self.table_of_contents += tools.TableOfContents().create_table_of_contents(self._table_titles, depth)
return self.table_of_contents + marker_table_of_contents
def new_table(self, columns, rows, text, text_align='center', marker=''):
"""This method takes a list of strings and creates a table.
Using arguments ``columns`` and ``rows`` allows to create a table of *n* columns and *m* rows. The
``columns * rows`` operations has to correspond to the number of elements of ``text`` list argument.
Moreover, ``argument`` allows to place the table wherever you want from the file.
:param columns: this variable defines how many columns will have the table.
:type columns: int
:param rows: this variable defines how many rows will have the table.
:type rows: int
:param text: it is a list containing all the strings which will be placed in the table.
:type text: list
:param text_align: allows to align all the cells to the ``'right'``, ``'left'`` or ``'center'``.
By default: ``'center'``.
:type text_align: str
:param marker: using ``create_marker`` method can place the table anywhere of the markdown file.
:type marker: str
:return: can return the table created as a string.
:rtype: str
:Example:
>>> from mdutils.tools.tools import Table
>>> text_list = ['List of Items', 'Description', 'Result', 'Item 1', 'Description of item 1', '10', 'Item 2', 'Description of item 2', '0']
>>> table = Table().new_table(columns=3, rows=3, text=text_list, text_align='center')
>>> print(repr(table))
'\\n|List of Items|Description|Result|\\n| :---: | :---: | :---: |\\n|Item 1|Description of item 1|10|\\n|Item 2|Description of item 2|0|\\n'
.. csv-table:: **Table result on Markdown**
:header: "List of Items", "Description", "Results"
"Item 1", "Description of Item 1", 10
"Item 2", "Description of Item 2", 0
"""
new_table = tools.Table()
text_table = new_table.create_table(columns, rows, text, text_align)
if marker:
self.file_data_text = self.place_text_using_marker(text_table, marker)
else:
self.file_data_text += text_table
return text_table
def new_paragraph(self, text='', bold_italics_code='', color='black', align=''):
"""Add a new paragraph to Markdown file. The text is saved to the global variable file_data_text.
:param text: is a string containing the paragraph text. Optionally, the paragraph text is returned.
:type text: str
:param bold_italics_code: bold_italics_code: using ``'b'``: **bold**, ``'i'``: *italics* and
``'c'``: ``inline_code``.
:type bold_italics_code: str
:param color: Can change text color. For example: ``'red'``, ``'green'``, ``'orange'``...
:type color: str
:param align: Using this parameter you can align text.
:type align: str
:return: ``'\\n\\n' + text``. Not necessary to take it, if only has to be written to
the file.
:rtype: str
"""
if bold_italics_code or color != 'black' or align:
self.file_data_text += '\n\n' + self.textUtils.text_format(text, bold_italics_code, color, align)
else:
self.file_data_text += '\n\n' + text
return self.file_data_text
def new_line(self, text='', bold_italics_code='', color='black', align=''):
"""Add a new line to Markdown file. The text is saved to the global variable file_data_text.
:param text: is a string containing the paragraph text. Optionally, the paragraph text is returned.
:type text: str
:param bold_italics_code: bold_italics_code: using ``'b'``: **bold**, ``'i'``: *italics* and
``'c'``: ``inline_code``..
:type bold_italics_code: str
:param color: Can change text color. For example: ``'red'``, ``'green'``, ``'orange'``...
:type color: str
:param align: Using this parameter you can align text.
:type align: str
:return: return a string ``'\\n' + text``. Not necessary to take it, if only has to be written to the
file.
:rtype: str
"""
if bold_italics_code or color != 'black' or align:
self.file_data_text += ' \n' + self.textUtils.text_format(text, bold_italics_code, color, align)
else:
self.file_data_text += ' \n' + text
return self.file_data_text
def write(self, text='', bold_italics_code='', color='black', align='', marker=''):
"""Write text in ``file_Data_text`` string.
:param text: a text a string.
:type text: str
:param bold_italics_code: bold_italics_code: using ``'b'``: **bold**, ``'i'``: *italics* and
``'c'``: ``inline_code``..
:type bold_italics_code: str
:param color: Can change text color. For example: ``'red'``, ``'green'``, ``'orange'``...
:type color: str
:param align: Using this parameter you can align text.
:type align: str
:param marker: allows to replace a marker on some point of the file by the text.
:type marker: str
"""
if bold_italics_code or color or align:
new_text = self.textUtils.text_format(text, bold_italics_code, color, align)
else:
new_text = text
if marker:
self.file_data_text = self.place_text_using_marker(new_text, marker)
else:
self.file_data_text += new_text
return new_text
def insert_code(self, code, language=''):
"""This method allows to insert a peace of code on a markdown file.
:param code: code string.
:type code: str
:param language: code language: python. c++, c#...
:type language: str
:return:
:rtype: str
"""
md_code = '\n\n' + self.textUtils.insert_code(code, language)
self.file_data_text += md_code
return md_code
def create_marker(self, text_marker):
"""This will add a marker to ``file_data_text`` and returns the marker result in order to be used whenever
you need.
Markers allows to place them to the string data text and they can be replaced by a peace of text using
``place_text_using_marker`` method.
:param text_marker: marker name.
:type text_marker: str
:return: return a marker of the following form: ``'##--[' + text_marker + ']--##'``
:rtype: str
"""
new_marker = '##--[' + text_marker + ']--##'
self.file_data_text += new_marker
return new_marker
def place_text_using_marker(self, text, marker):
"""It replace a previous marker created with ``create_marker`` with a text string.
This method is going to search for the ``marker`` argument, which has been created previously using
``create_marker`` method, in ``file_data_text`` string.
:param text: the new string that will replace the marker.
:type text: str
:param marker: the marker that has to be replaced.
:type marker: str
:return: return a new file_data_text with the replace marker.
:rtype: str
"""
return self.file_data_text.replace(marker, text)
|
"""
unittest for Astronomy
"""
import unittest
import Astronomy.formats
class testDatesTimes(unittest.TestCase):
def test_dms_delimited_angle_to_rads(self):
self.assertEqual(Astronomy.formats.dms_delimited_angle_to_rads('''19d14'33.801860"'''),
0.33584886884199222)
def test_hms_delimited_angle_to_rads(self):
self.assertEqual(Astronomy.formats.hms_delimited_angle_to_rads('00h01m08.621563s'),
0.0049903008842279899)
def test_parse_dms_delimited_angle(self):
self.assertEqual(Astronomy.formats.parse_dms_delimited_angle('''19d14'33.801860"'''),
['19', '14', '33.801860'])
def test_parse_hms_delimited_angle(self):
self.assertEqual(Astronomy.formats.parse_hms_delimited_angle('00h01m08.621563s'),
['00', '01', '08.621563'])
if __name__ == "__main__":
unittest.main()
|
# %%
from enum import Enum, Flag, auto
from pandas.core.frame import DataFrame
class Band(Enum):
R = 'R'
G = 'g'
class AlertType(Flag):
cand = auto()
ulim = auto()
llim = auto()
# %%
# extract a band.
def extract_info(lightcurve: DataFrame, band: Band, alertType: AlertType):
'''
@return A series of brightness value in the given band.
Include jld? y
'''
lc = lightcurve
lc = (
lc[(\
lc['alert_id'].apply(lambda x: x.startswith('ztf_candidate'))&\
lc['ant_passband'].apply(lambda x: x == band.value))][
[
'ant_mjd', 'ant_mag', 'ant_magerr'
]
])
lc['ant_mjd'] -= lc['ant_mjd'].min()
lc = lc.reset_index(drop=True)
lc = lc.rename(columns={
'ant_mjd': 'mjd',
'ant_mag': 'mag',
'ant_magerr': 'err'
})
return lc
# %%
def get_date_range(lc):
_julian_dates = lc['ant_mjd']
duration = _julian_dates.max() - _julian_dates.min()
return duration |
import numpy as np
from sklearn import model_selection, metrics
from .containers import Data
from .querystrategies import QueryStrategy, SimpleMargin
'''
>>> from sklearn.svm import NuSVC
>>> clf = NuSVC(nu= 0.46, probability=True)
>>> qs = LeastConfidence(model_change=False)
>>> learner = ActiveLearningModel(clf, qs)
>>> train_x, test_x, train_y, test_y = read_data(datafile)
>>> type(train_x)
<class 'numpy.ndarray'>
>>> scores = learner.run(train_x, test_x, train_y, test_y)
'''
class ActiveLearningModel(object):
def __init__(self, classifier, query_strategy, U_proportion=0.9, random_state=None):
'''
:param sklearn.base.BaseEstimator classifier: Classifier to build the model.
:param QueryStrategy query_strategy: QueryStrategy instance to use.
:param float U_proportion: proportion of training data to be assigned
the unlabeled set.
:param int random_state: Sets the random_state parameter of train_test_split.
'''
self.__check_args(classifier, query_strategy, U_proportion)
self.classifier = classifier
self.query_strategy = query_strategy
self.U_proportion = U_proportion
self.random_state = random_state
self.L = Data() # Labeled data.
self.U = Data() # Unlabeled data.
self.T = Data() # Test data.
self.classes = None
def __check_args(self, classifier, query_strategy, U_proportion):
if not isinstance(query_strategy, QueryStrategy):
raise ValueError("query_strategy must be an instance of QueryStrategy.")
if not 0 < U_proportion < 1:
raise ValueError("U_proportion must be in range (0,1) exclusive. Got {}."
.format(U_proportion))
if isinstance(query_strategy, SimpleMargin) and \
not hasattr(classifier, "decision_function"):
raise ValueError("{} compatible only with discriminative models."
.format(str(query_strategy)))
def split_data(self, train_x, test_x, train_y, test_y):
'''
Splits data into unlabeled, labeled, and test sets
according to self.U_proportion.
:param np.array train_x: Training data features.
:param np.array test_x: Test data features.
:param np.array train_y: Training data labels.
:param np.array test_y: Test data labels.
'''
U_size = int(np.ceil(self.U_proportion * train_x.shape[0]))
if not 0 < U_size < train_x.shape[0]:
raise ValueError("U_proportion must result in non-empty labeled and unlabeled sets.")
if train_x.shape[0] - U_size <= 1:
raise ValueError("U_proportion must result in a labeled set with > 1 members.")
temp = model_selection.train_test_split(train_x, train_y,
test_size=U_size,
random_state=self.random_state)
self.L.x, self.U.x, self.L.y, self.U.y = temp
self.T.x = test_x
self.T.y = test_y
def update_labels(self):
'''
Gets the chosen index from the query strategy,
adds the corresponding data point to L and removes
it from U. Logs which instance is picked from U.
:returns: chosen x and y, for use with partial_train()
:rtype: tuple(numpy.ndarray, numpy.ndarray)
'''
index = self.query_strategy.query(self.U, self.L, self.classifier)
chosen_x = self.U.x[index]
chosen_y = np.array([self.U.y[index]])
self.L.y = np.append(self.L.y, chosen_y, axis=0)
self.L.x = np.vstack((self.L.x, chosen_x))
self.U.x = np.delete(self.U.x, index, axis=0)
self.U.y = np.delete(self.U.y, index, axis=0)
return chosen_x.reshape(1, -1), chosen_y
def train(self):
'''
Trains the classifier on L.
'''
self.classifier.fit(self.L.x, self.L.y)
def partial_train(self, new_x, new_y):
'''
Given a subset of training examples, calls partial_fit.
:param numpy.ndarray new_x: Feature array.
:param numpy.ndarray new_y: Label array.
'''
if self.classes is None:
self.classes = np.unique(self.U.y)
self.classifier.partial_fit(new_x, new_y, classes=self.classes)
def score(self):
'''
Computes Area Under the ROC Curve for the current classifier.
:returns: AUC score.
:rtype: float
'''
try: # If the classifier is probabilistic.
scores = self.classifier.predict_proba(self.T.x)[:, 1]
except AttributeError:
scores = self.classifier.decision_function(self.T.x)
auc = metrics.roc_auc_score(self.T.y, scores)
return auc
def _get_choice_order(self, ndraws):
mask = np.ones(self.L.y.shape, dtype=bool)
L_0_index = self.L.y.shape[0] - ndraws
mask[:L_0_index] = False
choice_order = {'x': self.L.x[mask], 'y': self.L.y[mask]}
return choice_order
def run(self, train_x, test_x, train_y, test_y, ndraws=None):
'''
Run the active learning model. Saves AUC scores for
each sampling iteration.
:param np.array train_x: Training data features.
:param np.array test_x: Test data features.
:param np.array train_y: Training data labels.
:param np.array test_y: Test data labels.
:param int ndraws: Number of times to query the unlabeled set.
If None, query entire unlabeled set.
:returns: AUC scores for each sampling iteration.
:rtype: numpy.ndarray(shape=(ndraws, ))
'''
# Populate L, U, and T
self.split_data(train_x, test_x, train_y, test_y)
if ndraws is None:
ndraws = self.U.x.shape[0]
scores = np.zeros(ndraws, dtype=np.float32)
for i in range(ndraws):
self.train()
auc = self.score()
scores[i] = auc
x, y = self.update_labels()
choice_order = self._get_choice_order(ndraws)
return scores, choice_order
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import connection, models, migrations
import cityhallmonitor.models
def add_text_tsvector_index(apps, schema_editor):
MatterAttachment = apps.get_model('cityhallmonitor', 'MatterAttachment')
db_table = MatterAttachment._meta.db_table
with connection.cursor() as c:
sql = "CREATE INDEX cityhallmonitor_matterattachment_text_vector_gin ON %s USING gin(text_vector)" \
% db_table
c.execute(sql)
class Migration(migrations.Migration):
dependencies = [
('cityhallmonitor', '0011_subscription_active'),
]
operations = [
migrations.AddField(
model_name='matterattachment',
name='dc_id',
field=models.TextField(default='', blank=True),
),
migrations.AddField(
model_name='matterattachment',
name='text',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='matterattachment',
name='text_vector',
field=cityhallmonitor.models.TsVectorField(null=True, editable=False, serialize=False),
),
migrations.RunPython(add_text_tsvector_index),
]
|
# You are given two strings, str_1 and str_2, where str_2 is generated by randomly shuffling str_1 and then adding one letter at a random position.
# Write a function that returns the letter that was added to str_2.
# Examples:
# csFindAddedLetter(str_1 = "bcde", str_2 = "bcdef") -> "f"
# csFindAddedLetter(str_1 = "", str_2 = "z") -> "z"
# csFindAddedLetter(str_1 = "b", str_2 = "bb") -> "b"
# csFindAddedLetter(str_1 = "bf", str_2 = "bfb") -> "b"
# Notes:
# str_1 and str_2 both consist of only lowercase alpha characters.
# [execution time limit] 4 seconds (py3)
# [input] string str_1
# [input] string str_2
# [output] string
def csFindAddedLetter(str_1, str_2):
m1 = {}
for i in str_2:
if i in m1:
m1[i] += 1
else:
m1[i] = 1
for i in str_1:
m1[i] -= 1
for h1 in m1:
if m1[h1] == 1:
return h1
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
Created on May 15, 2019
@author: Robert BASOMINGERA
@Ajou University
This project is developed and tested with Python3.5 using pycharm on an Ubuntu 16.04 LTS machine
'''
from keras.applications.vgg19 import VGG19
# from keras.applications.vgg19 import decode_predictions
from keras.applications.vgg19 import preprocess_input
from sklearn.feature_selection import VarianceThreshold
from sklearn.neural_network import MLPClassifier
from sklearn.pipeline import Pipeline
from keras.preprocessing import image
from keras.models import Sequential
from keras.layers import Dense, Flatten, InputLayer
import numpy as np
import os
import pandas as pd
import random
import json
import keras
from sklearn.metrics import accuracy_score, log_loss, brier_score_loss, roc_auc_score
from keras.layers import Dropout
from keras.layers.convolutional import Conv1D
from keras.layers.convolutional import MaxPooling1D
# from PIL import Image
import matplotlib.pyplot as plt
DEBUG = False
JSON_LABEL_PATH = "./flowers.json"
# VGG19
vgg19_weights = './weights/vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5'
vgg19_model = VGG19(weights=vgg19_weights, include_top=False)
def init():
pass
def get_labels():
# read to ensure they are real there
raw_labels = os.listdir("./data/")
# labels_name = []
all_labels = []
for this_label in raw_labels:
all_labels.append(labels_of_id[this_label]['id'])
# print("labels", all_labels, raw_labels)
return all_labels
def initialize_images_data(number_of_labels):
this_data = {}
this_test_images = {}
this_expected_labels = []
for i in range(number_of_labels):
this_data[labels[i]] = os.listdir("./data/" + id_of_labels[labels[i]] + "/")
this_test_images[labels[i]] = []
for j in range(len(this_data[labels[i]]) // 4):
# print(labels[i], len(data[labels[i]]))
temp_flower = random.randint(0, len(this_data[labels[i]]) - 1)
this_expected_labels.append(labels[i])
this_test_images[labels[i]].append(this_data[labels[i]][temp_flower])
# remove images added to the test images
del this_data[labels[i]][temp_flower]
return this_data, this_test_images, this_expected_labels
def get_json_label_id():
with open(JSON_LABEL_PATH) as json_file:
json_label = json.load(json_file)
return json_label
def id_to_label_dict():
json_label = get_json_label_id()
flower_dict = {}
for each_flower in json_label:
flower_dict[json_label[each_flower]['id']] = each_flower
return flower_dict
def load_image(img_path):
if DEBUG:
print("Opening image:", img_path)
img = image.load_img(img_path, target_size=(224, 224))
img_data = image.img_to_array(img)
img_data = np.expand_dims(img_data, axis=0)
img_data = preprocess_input(img_data)
return img_data
def get_features(img_path, this_model):
img_data = load_image(img_path)
img_features = this_model.predict(img_data)
return img_features
def extract_features():
this_features = {}
# initialize dict of features
# TODO use if to check instead of a for loop
for x in range(nber_labels):
this_features[labels[x]] = []
for label, flowers in data.items():
basepath = "./data/" + id_of_labels[label]
for flower in flowers:
feats = get_features(basepath + "/" + flower, vgg19_model)
# flatten the features
this_features[label].append(feats.flatten())
# squeeze the features
# this_features[label].append(feats.squeeze())
return this_features
init()
id_of_labels = id_to_label_dict()
labels_of_id = get_json_label_id()
labels = get_labels()
nber_labels = len(labels)
# initialize a dict of labels and the data set
data, test_images, expected_labels = initialize_images_data(nber_labels)
features = extract_features()
# convert features to data frame (from dict)
trainY = list() # labels
trainX = list() # pd.DataFrame() # images data
for label, feats in features.items():
for i in range(len(feats)):
trainY.append([label]) # temp_label, ignore_index=True)
# temp_df = pd.DataFrame(feats)
# temp_label = pd.DataFrame(label)
# temp_df['label'] = label
# dataset = dataset.append(temp_df, ignore_index=True)
# trainX.append(feats)
trainX.append(feats)
# trainX = trainX.append(temp_df, ignore_index=True)
# dataset.head()
# trainX.head()
# trainY = pd.DataFrame(trainY)
# trainY = keras.utils.to_categorical(trainY)
# trainY.head()
# print(len(trainX))
# print(type(trainY))
# get test images features
test_features = []
test_images_paths = []
for test_label, test_flowers in test_images.items():
basepath = "./data/" + id_of_labels[test_label]
for test_flower in test_flowers:
test_flower_path = basepath + "/" + test_flower
test_images_paths.append(test_flower_path)
test_feats = get_features(test_flower_path, vgg19_model)
# flatten the features
test_features.append(test_feats.flatten())
# test_features.append(test_feats.squeeze())
# y = dataset.label
# x = dataset.drop('label', axis=1)
# classifier model
# model = MLPClassifier(hidden_layer_sizes=(10, 100))
# model = Sequential()
# model.add(Dense(100, activation='softmax', name='fc2'))
# model.add(Dense(len(labels), kernel_initializer="uniform", activation='softmax'))
# model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
# pipeline = Pipeline([('low_variance_filter', VarianceThreshold()), ('model', model)])
# pipeline.fit(trainX, keras.utils.to_categorical(trainY))
classifier = Sequential()
# n_timesteps, n_features, n_outputs = (trainX).shape[0], (trainX).shape[1], (trainY).shape[0]
# print(n_timesteps, n_features, n_outputs)
#
# classifier.add(InputLayer(input_shape=(trainX.shape[1],)))
classifier.add(Conv1D(filters=64, kernel_size=2, activation='relu', name='conv1D1'))
# classifier.add(Conv1D(filters=64, kernel_size=3, activation='relu', batch_input_shape=(0, n_timesteps, n_features), name='conv1D1'))
classifier.add(Conv1D(filters=64, kernel_size=3, activation='relu', name='conv2'))
classifier.add(Dropout(0.5))
classifier.add(MaxPooling1D(pool_size=2))
classifier.add(Dense(128, name='ds0', activation='relu')) # , input_dim=11))
classifier.add(Dropout(0.3))
classifier.add(Dense(100, activation='relu', name='ds1')) # , input_shape=(,)))
classifier.add(Dropout(0.3))
classifier.add(Dense(100, activation='softmax', name='ds2'))
classifier.add(Flatten())
classifier.add(Dense(max(labels)+1, name='ds3', activation='softmax')) # softmax sigmoid
classifier.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # rmsprop adam
classifier.fit(trainX, trainY)
score = classifier.evaluate(test_features, expected_labels)
print(score)
pipeline = Pipeline([('low_variance_filter', VarianceThreshold()), ('model', classifier)])
# pipeline.set_params(steps_per_epoch=800, epochs=10)
pipeline.fit(trainX, trainY) # , steps_per_epoch=80, epochs=10, batch_size=128)
# print(type(list(trainX)))
# classifier.fit({'input': list(trainX)}, {'targets': keras.utils.to_categorical(trainY)}, steps_per_epoch=800, epochs=10)
# validation_data=(test_features.shape, expected_labels.shape), steps_per_epoch=800, epochs=10)
# another model
# verbose, epochs, batch_size = 0, 10, 32
# print(trainX.shape)
# n_timesteps, n_features, n_outputs = trainX.shape[0], trainX.shape[1], trainY.shape[0]
# model = Sequential()
# model.add(Flatten())
# model.add(Conv1D(filters=64, kernel_size=3, activation='relu', input_shape=(n_timesteps, n_features), name='conv1'))
# model.add(Conv1D(filters=64, kernel_size=3, activation='relu', name='conv2'))
# model.add(Dropout(0.5))
# model.add(MaxPooling1D(pool_size=2))
#
# model.add(Dense(100, activation='relu'))
#
# model.add(Dense(n_outputs, activation='softmax'))
# model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
# # fit network
# pipeline2 = Pipeline([('low_variance_filter', VarianceThreshold()), ('model', model)])
# pipeline2.fit(trainX, trainY) # , epochs=epochs, batch_size=batch_size, verbose=verbose)
# # evaluate model
# _, accuracy = pipeline2.predict(test_features, expected_labels, batch_size=batch_size, verbose=0)
# print(accuracy, accuracy)
# print(pipeline.(test_features))
predicted_labels = pipeline.predict(test_features)
print(predicted_labels)
# detection process
# predicted_labels = pipeline.predict(test_features)
# loss = log_loss(expected_labels, predicted_labels)
# print("loss:", loss)
# brier_loss = brier_score_loss(expected_labels, predicted_labels)
# print("brier", brier_loss)
# calculate roc curve
fpr, tpr, thresholds = roc_auc_score (expected_labels, predicted_labels)
the_score = accuracy_score(expected_labels, predicted_labels)
print("Accuracy: ", the_score * 100, "%")
f, ax = plt.subplots(1, len(test_images_paths))
for z in range(len(test_images_paths)):
print(test_images_paths[z], predicted_labels[z])
# ax[i].imshow(Image.open(test_images_paths[i]).resize((200, 200), Image.ANTIALIAS))
# ax[i].text(10, 180, preds[i], color='k', backgroundcolor='red', alpha=0.8)
# plt.show()
# if __name__ == '__main__':
# pass
|
from django.shortcuts import render
from rest_framework import viewsets
from rest_framework import generics
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.decorators import detail_route
from rest_framework import status
from rest_framework.decorators import detail_route, list_route
from rest_framework.response import Response
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import Http404
from rest_framework import status
from cicerotwebapp import models
from django.contrib import auth
from cicerotapi import serializers as serial
from oauth2_provider.views.generic import ProtectedResourceView
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from rest_framework import permissions, routers, serializers, viewsets
from oauth2_provider.ext.rest_framework import TokenHasReadWriteScope, TokenHasScope
from django.db.models import Q
from django.contrib.auth.views import password_reset, password_reset_confirm
from fcm_django.models import FCMDevice
from django.http import Http404, HttpResponse
from django.core.mail import EmailMessage
from templated_email import send_templated_mail, get_templated_mail
from django.template.loader import render_to_string, get_template
from rest_framework.decorators import api_view, permission_classes
# Servicios publicos y genericos
class RegionViewSet(viewsets.ModelViewSet):
queryset = models.Region.objects.all()
serializer_class = serial.RegionSerializers
class ProvinciaViewSet(viewsets.ModelViewSet):
queryset = models.Provincia.objects.all()
serializer_class = serial.ProvinciaSerializers
class ComunaViewSet(viewsets.ModelViewSet):
queryset = models.Comuna.objects.all()
serializer_class = serial.ComunaSerializers
class TipoTagViewSet(viewsets.ModelViewSet):
queryset = models.TipoTag.objects.all()
serializer_class = serial.TipoTagSerializers
class TagViewSet(viewsets.ModelViewSet):
queryset = models.Tag.objects.all()
serializer_class = serial.TagSerializers
class EstadoTourViewSet(viewsets.ModelViewSet):
queryset = models.EstadoTour.objects.all()
serializer_class = serial.EstadoTourSerializers
class TipoTourViewSet(viewsets.ModelViewSet):
queryset = models.TipoTour.objects.all()
serializer_class = serial.TipoTourSerializers
class TipoGuiaViewSet(viewsets.ModelViewSet):
queryset = models.TipoGuia.objects.all()
serializer_class = serial.TipoGuiaSerializers
class RegistroGuiaViewSet(viewsets.ModelViewSet):
queryset = models.RegistroGuia.objects.all()
serializer_class = serial.RegistroGuiaSerializers
class GuiaViewSet(viewsets.ModelViewSet):
queryset = models.Guia.objects.all()
serializer_class = serial.GuiaSerializers
class TourViewSet(viewsets.ModelViewSet):
queryset = models.Tour.objects.all()
serializer_class = serial.TourSerializers
class HorarioViewSet(viewsets.ModelViewSet):
queryset = models.Horario.objects.all()
serializer_class = serial.HorarioSerializers
class PaisViewSet(viewsets.ModelViewSet):
queryset = models.Pais.objects.all()
serializer_class = serial.PaisSerializers
class TuristaViewSet(viewsets.ModelViewSet):
queryset = models.Turista.objects.all()
serializer_class = serial.TuristaSerializers
class ActividadViewSet(viewsets.ModelViewSet):
queryset = models.Actividad.objects.all()
serializer_class = serial.ActividadSerializers
class RolViewSet(viewsets.ModelViewSet):
queryset = models.Rol.objects.all()
serializer_class = serial.RolSerializers
class TextoSelectViewSet(viewsets.ModelViewSet):
queryset = models.TextoSelect.objects.all()
serializer_class = serial.TextoSelectSerializers
class EvaluacionViewSet(viewsets.ModelViewSet):
queryset = models.Evaluacion.objects.all()
serializer_class = serial.EvaluacionSerializers
class TipoMultimediaViewSet(viewsets.ModelViewSet):
queryset = models.TipoMultimedia.objects.all()
serializer_class = serial.TipoMultimediaSerializers
class MultimediaViewSet(viewsets.ModelViewSet):
queryset = models.Multimedia.objects.all()
serializer_class = serial.MultimediaSerializers
class StaffViewSet(viewsets.ModelViewSet):
queryset = models.Staff.objects.all()
serializer_class = serial.StaffSerializers
class FavoritoViewSet(viewsets.ModelViewSet):
queryset = models.Favorito.objects.all()
serializer_class = serial.FavoritoSerializers
class SuscripcionViewSet(viewsets.ModelViewSet):
queryset = models.Suscripcion.objects.all()
serializer_class = serial.SuscripcionSerializers
class TransaccionViewSet(viewsets.ModelViewSet):
queryset = models.Transaccion.objects.all()
serializer_class = serial.TransaccionSerializers
class InstanciaTourViewSet(viewsets.ModelViewSet):
queryset = models.InstanciaTour.objects.all()
serializer_class = serial.InstanciaTourSerializers
class InscripcionViewSet(viewsets.ModelViewSet):
queryset = models.Inscripcion.objects.all()
serializer_class = serial.InscripcionSerializers
class TipoServicioViewSet(viewsets.ModelViewSet):
queryset = models.TipoServicio.objects.all()
serializer_class = serial.TipoServicioSerializers
class ServicioTourViewSet(viewsets.ModelViewSet):
queryset = models.ServicioTour.objects.all()
serializer_class = serial.ServicioTourSerializers
class ComentarioViewSet(viewsets.ModelViewSet):
queryset = models.Comentario.objects.all()
serializer_class = serial.ComentarioSerializers
class FCMDeviceViewSet(viewsets.ModelViewSet):
serializer_class = serial.FCMDeviceSerializers
queryset = FCMDevice.objects.all()
|
from SOC.models import BTW
import numpy as np
import pytest
def test_boundary_shape():
sim = BTW(10)
assert sim.values.shape == (12, 12)
assert sim.L_with_boundary == 12
def test_run():
sim = BTW(10)
sim.run(10)
def test_deterministic_result():
b = BTW(5, save_every = 1)
b.values[...] = 3
b.values[3, 3] += 1
b.topple_dissipate()
output = [[3, 4, 4, 4, 4, 4, 3],
[4, 1, 3, 3, 3, 1, 4],
[4, 3, 1, 3, 1, 3, 4],
[4, 3, 3, 0, 3, 3, 4],
[4, 3, 1, 3, 1, 3, 4],
[4, 1, 3, 3, 3, 1, 4],
[3, 4, 4, 4, 4, 4, 3],]
np.testing.assert_allclose(b.values, output)
|
from DateTime import DateTime
class Flight:
def __init__(self, id, source, destination, departure_time, airfare, number_of_seats_available):
self.iD = id
self.source = source
self.destination = destination
self.departure_time = departure_time
self.airfare = airfare
self.number_of_seats_available = number_of_seats_available
self.stringValue = ''
def UpdateStringValue(self):
self.stringValue = str(self.iD) + self.source + self.destination + self.departure_time.Get_string() + str(self.airfare) + str(self.number_of_seats_available)
def Get_string(self):
self.UpdateStringValue()
return self.stringValue
def Get_printable_string(self):
result = '----------------------------------------------------------------------------------------------------\n'
result += 'Flight ' + str(self.iD) + ' from ' + self.source + ' to ' + self.destination + ' will be departing on ' + self.departure_time.Get_printable_string() + '\n'
result += 'Airfare: ' + str(self.airfare) + '\n'
result += 'Number of seats available: ' + str(self.number_of_seats_available) + '\n'
return result
def CheckSeatAvailabilityReservation(self, n_of_seats_to_reserve):
if n_of_seats_to_reserve <= self.number_of_seats_available:
return True
else:
return False
def UpdateAvailabilityReservation(self,n_of_seats_to_reserve):
self.number_of_seats_available -= n_of_seats_to_reserve |
from amuse.community import *
from amuse.test.amusetest import TestWithMPI
from .interface import vaderInterface
from .interface import vader
class vaderInterfaceTests(TestWithMPI):
def test1(self):
instance = vader()
instance.initialize_code()
instance.initialize_keplerian_grid(128, True, 0.1|units.AU, 10.|units.AU, 1.|units.MSun)
instance.stop()
|
import cv2
import numpy as np
#image_path
img_path= r"C:\Users\evan\Documents\GitHub\OCR-Project\sample.jpg"
#read image
img_raw = cv2.imread(img_path)
#select ROIs function
ROIs = cv2.selectROIs("Select Rois",img_raw)
#print rectangle points of selected roi
print(ROIs)
#Crop selected roi ffrom raw image
#counter to save image with different name
crop_number=0
#loop over every bounding box save in array "ROIs"
for rect in ROIs:
x1=rect[0]
y1=rect[1]
x2=rect[2]
y2=rect[3]
#crop roi from original image
img_crop=img_raw[y1:y1+y2,x1:x1+x2]
#show cropped image
cv2.imshow("crop"+str(crop_number),img_crop)
#save cropped image
cv2.imwrite("crop"+str(crop_number)+".jpeg",img_crop)
crop_number+=1
#hold window
cv2.waitKey(0)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.