content
stringlengths 5
1.05M
|
|---|
#!/usr/bin/python3
# -*- coding: latin-1 -*-
import os
import sys
# import psycopg2
import json
from bson import json_util
from pymongo import MongoClient
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, flash
def create_app():
app = Flask(__name__)
return app
app = create_app()
# REPLACE WITH YOUR DATABASE NAME
MONGODATABASE = "myDatabase"
MONGOSERVER = "localhost"
MONGOPORT = 27017
client = MongoClient(MONGOSERVER, MONGOPORT)
mongodb = client[MONGODATABASE]
''' # Uncomment for postgres connection
# REPLACE WITH YOUR DATABASE NAME, USER AND PASS
POSTGRESDATABASE = "mydatabase"
POSTGRESUSER = "myuser"
POSTGRESPASS = "mypass"
postgresdb = psycopg2.connect(
database=POSTGRESDATABASE,
user=POSTGRESUSER,
password=POSTGRESPASS)
'''
QUERIES_FILENAME = 'queries'
@app.route("/")
def home():
with open(QUERIES_FILENAME, 'r') as queries_file:
json_file = json.load(queries_file)
pairs = [(x["name"],
x["database"],
x["description"],
x["query"]) for x in json_file]
return render_template('file.html', results=pairs)
@app.route("/mongo")
def mongo():
query = request.args.get("query")
results = eval('mongodb.'+query)
results = json_util.dumps(results, sort_keys=True, indent=4)
if "find" in query:
return render_template('mongo.html', results=results)
else:
return "ok"
@app.route("/postgres")
def postgres():
query = request.args.get("query")
cursor = postgresdb.cursor()
cursor.execute(query)
results = [[a for a in result] for result in cursor]
print(results)
return render_template('postgres.html', results=results)
@app.route("/example")
def example():
return render_template('example.html')
if __name__ == "__main__":
app.run()
|
import time
import pytube
from .log import config_logger
from .step import Step
from pytube import YouTube
from multiprocessing import Process
from yt_concate.settings import VIDEOS_DIR
from threading import Thread
class DownloadVideos(Step):
def process(self, data, inputs, utils):
logging = config_logger()
start = time.time()
Threads = []
for i in range(4):
Threads.append(Thread(target=self.downloadvideos(data, utils)))
for thread in Threads:
thread.start()
for thread in Threads:
thread.join()
# yt_set = set([found.yt for found in data])
# print('videos to download=', len(yt_set))
#
# for yt in yt_set:
# url = yt.url
#
# if utils.video_file_exists(yt):
# print(f'found existing video file for {url}, skipping')
# continue
# try:
# print('downloading', url)
# YouTube(url).streams.first().download(output_path=VIDEOS_DIR, filename=yt.id)
# except pytube.exceptions.RegexMatchError:
# print('downloading error', url)
end = time.time()
logging.debug(f'總共費時{end-start}')
return data
@staticmethod
def downloadvideos(data, utils):
logging = config_logger()
yt_set = set([found.yt for found in data])
logging.info('videos to download={}'.format(len(yt_set)))
for yt in yt_set:
url = yt.url
if utils.video_file_exists(yt):
logging.info(f'found existing video file for {url}, skipping')
continue
try:
logging.info('downloading', url)
YouTube(url).streams.first().download(output_path=VIDEOS_DIR, filename=yt.id)
except pytube.exceptions.RegexMatchError:
logging.warning('downloading error {}'.format(url))
|
# Generated by Django 3.1.3 on 2020-11-08 05:52
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("user", "0004_auto_20201108_0004"),
]
operations = [
migrations.AlterField(
model_name="user",
name="phone_number",
field=models.PositiveBigIntegerField(
validators=[django.core.validators.MinValueValidator(100)],
verbose_name="Phone Number",
),
),
]
|
import torch
import warnings
from torch.nn import Parameter
class WeightDrop(torch.nn.Module):
def __init__(self, module, weights, dropout=0):
super(WeightDrop, self).__init__()
self.module = module
self.weights = weights
if hasattr(module, "bidirectional") and module.bidirectional:
self.weights.extend(
[weight + "_reverse" for weight in self.weights])
self.dropout = dropout
for name_w in self.weights:
w = getattr(self.module, name_w)
self.register_parameter(name_w + '_raw', Parameter(w.data))
def _setweights(self):
for name_w in self.weights:
raw_w = getattr(self, name_w + '_raw')
w = None
mask = torch.ones(1, raw_w.size(1))
if raw_w.is_cuda: mask = mask.cuda()
mask = torch.nn.functional.dropout(
mask, p=self.dropout, training=self.training)
w = mask.expand_as(raw_w) * raw_w
self.module._parameters[name_w] = w
def forward(self, *args):
self._setweights()
with warnings.catch_warnings():
# Ignore lack of flattening warning
warnings.simplefilter("ignore")
return self.module.forward(*args)
if __name__ == '__main__':
# Input is (seq, batch, input)
x = torch.autograd.Variable(torch.randn(2, 1, 10)).cuda()
h0 = None
###
print('Testing WeightDrop')
print('=-=-=-=-=-=-=-=-=-=')
###
print('Testing WeightDrop with Linear')
lin = WeightDrop(torch.nn.Linear(10, 10), ['weight'], dropout=0.9)
lin.cuda()
run1 = [x.sum() for x in lin(x).data]
run2 = [x.sum() for x in lin(x).data]
print('All items should be different')
print('Run 1:', run1)
print('Run 2:', run2)
assert run1[0] != run2[0]
assert run1[1] != run2[1]
print('---')
###
print('Testing WeightDrop with LSTM')
wdrnn = WeightDrop(
torch.nn.LSTM(10, 10, bidirectional=False), ['weight_hh_l0'],
dropout=0.9)
wdrnn.cuda()
run1 = [x.sum() for x in wdrnn(x, h0)[0].data]
run2 = [x.sum() for x in wdrnn(x, h0)[0].data]
# This is not true in bidirectional rnns or if batch_first
print('First timesteps should be equal, all others should differ')
print('Run 1:', run1)
print('Run 2:', run2)
# First time step, not influenced by hidden to hidden weights, should be equal
assert run1[0] == run2[0]
# Second step should not
assert run1[1] != run2[1]
print('---')
|
class Constants:
WebSockerURL = "ws://localhost:2203"
Success = "success"
Type = "type"
SetCapabilities = "setCapabilities"
ErrorCode = "errorCode"
Error = "error"
Message = "message"
Gesture = "gesture"
GestureData = "gestureData"
FingerShortcutData = "fingerShortcutData"
Fingers = "fingers"
FingerPositionalData = "fingerPositionalData"
PYRData = "pyrData"
QuaternionData = "quaternionData"
W = "w"
X = "x"
Y = "y"
Z = "z"
Quaternion = "quaternion"
Accelerometer = "accelerometer"
Gyroscope = "gyroscope"
ModuleSecret = "moduleSecret"
Data = "data"
IncomingData = "incomingData"
Authentication = "authentication"
Authenticated = "authenticated"
KaiID = "kaiId"
ForegroundProcess = "foregroundProcess"
ModuleId = "moduleId"
ListConnectedKais = "listConnectedKais"
KaiConnected = "kaiConnected"
Kais = "kais"
Hand = "hand"
Default = "default"
DefaultKai = "defaultKai"
DefaultLeftKai = "defaultLeftKai"
DefaultLeft = "defaultLeft"
DefaultRightKai = "defaultRightKai"
DefaultRight = "defaultRight"
Yaw = "yaw"
Pitch = "pitch"
Roll = "roll"
LinearFlickData = "linearFlickData"
Flick = "flick"
AccelerometerData = "accelerometerData"
GyroscopeData = "gyroscopeData"
MagnetometerData = "magnetometerData"
Magnetometer = "magnetometer"
KaiSerialNumber = "kaiSerialNumber"
GetSDKVersion = "getSDKVersion";
|
from __future__ import absolute_import
import re
import unittest
from sqlbuilder.mini import P, Q, compile
__all__ = ('TestMini', 'TestMiniQ')
class TestCase(unittest.TestCase):
maxDiff = None
class TestMini(TestCase):
def test_mini(self):
sql = [
'SELECT', [
'author.id', 'author.first_name', 'author.last_name'
],
'FROM', [
'author', 'INNER JOIN', ['book as b', 'ON', 'b.author_id = author.id']
],
'WHERE', [
'b.status', '==', P('new')
],
'ORDER BY', [
'author.first_name', 'author.last_name'
]
]
# Let change query
sql[sql.index('SELECT') + 1].append('author.age')
self.assertEqual(
compile(sql),
('SELECT author.id, author.first_name, author.last_name, author.age FROM author INNER JOIN book as b ON b.author_id = author.id WHERE b.status == %s ORDER BY author.first_name, author.last_name', ['new'])
)
def test_mini_precompiled(self):
sql = [
'SELECT', [
'author.id', 'author.first_name', 'author.last_name'
],
'FROM', [
'author', 'INNER JOIN', ['book as b', 'ON', 'b.author_id = author.id']
],
'WHERE', [
'b.status == %(status)s'
],
'ORDER BY', [
'author.first_name', 'author.last_name'
]
]
# Let change query
sql[sql.index('SELECT') + 1].append('author.age')
sql_str = compile(sql)[0]
self.assertEqual(
(sql_str, {'status': 'new'}),
('SELECT author.id, author.first_name, author.last_name, author.age FROM author INNER JOIN book as b ON b.author_id = author.id WHERE b.status == %(status)s ORDER BY author.first_name, author.last_name', {'status': 'new'})
)
class TestMiniQ(TestCase):
def setUp(self):
self._sql = [
'SELECT', [
'author.id', 'author.first_name', 'author.last_name'
],
'FROM', [
'author', 'INNER JOIN', [
'(', 'SELECT', [
'book.title'
],
'FROM', [
'book'
],
')', 'AS b', 'ON', 'b.author_id = author.id'
],
],
'WHERE', [
'b.status', '==', P('new')
],
'ORDER BY', [
'author.first_name', 'author.last_name'
]
]
def test_mini_q(self):
sql = Q(self._sql)
sql.prepend_child(
['FROM', 'INNER JOIN', 'SELECT'],
['book.id', 'book.pages']
)
sql.append_child(
['FROM', 'INNER JOIN', 'SELECT'],
['book.date']
)
sql.insert_after(
['FROM', 'INNER JOIN', (list, 1), ],
['WHERE', ['b.pages', '>', P(100)]]
)
sql.insert_before(
['FROM', 'INNER JOIN', 'WHERE', 'b.pages'],
['b.pages', '<', P(500), 'AND']
)
sql.append_child(
['FROM', 'INNER JOIN', (lambda i, item, collection: item == 'SELECT')],
['book.added_by_callable']
)
sql.append_child(
['FROM', 'INNER JOIN', ('SELECT', 0)],
['book.added_by_tuple']
)
sql.append_child(
['FROM', enumerate, 'SELECT'],
['book.added_by_each']
)
sql.append_child(
['FROM', 'INNER JOIN', 1],
['book.added_by_index']
)
sql.append_child(
['FROM', 'INNER JOIN', re.compile("^SELECT$")],
['book.added_by_re']
)
self.assertEqual(
compile(sql),
('SELECT author.id, author.first_name, author.last_name FROM author INNER JOIN ( SELECT book.id, book.pages, book.title, book.date, book.added_by_callable, book.added_by_tuple, book.added_by_each, book.added_by_index, book.added_by_re FROM book WHERE b.pages < %s AND b.pages > %s ) AS b ON b.author_id = author.id WHERE b.status == %s ORDER BY author.first_name, author.last_name', [500, 100, 'new'])
)
|
def add(lst):
"""Given a non-empty list of integers lst. add the even elements that are at odd indices..
Examples:
add([4, 2, 6, 7]) ==> 2
Example solution:
# line 1
even_sum = 0
# line 2
for i in range(len(lst)):
# line 3
if i % 2 == 0 and lst[i] % 2 == 0:
# line 4
even_sum += lst[i]
# line 5
return even_sum
"""
# Please print out which line of the above program contains an error. E.g. if the bug is on line 4 then print 4
# END OF CONTEXT
print("3")
# END OF SOLUTION
def check(candidate):
import io
from contextlib import redirect_stdout
f = io.StringIO()
with redirect_stdout(f):
candidate([])
out = f.getvalue().strip('\n')
assert "3" == out
for i in range(0, 10):
if i != 3:
assert str(i) != out
if __name__ == '__main__':
check(add)
|
A, B, C, D = map(int, input().split())
if A / B == C / D:
print("DRAW")
elif A / B < C / D:
print("TAKAHASHI")
else:
print("AOKI")
|
import os
from PyQt4.QtCore import QSettings
import distutils
from distutils import util
#https://github.com/qgis/QGIS/blob/f38856e7381519431f828cc890bc8b33a8f2a544/src/gui/qgsmaptoolidentify.cpp#L413-L427
#min area in canvas units <- selected display projection
#e.g. WebMerc 100km2 = 100000000m2
min_area= 100000000
sel_features = []
lyr = iface.activeLayer()
if lyr is None:
raise Exception('select layer in TOC')
print 'min_area:', min_area
print 'layer:', lyr.name()
displayAreaUnits = QgsUnitTypes.distanceToAreaUnit(iface.mapCanvas().mapUnits())
print 'displayAreaUnits:', displayAreaUnits
settings = QSettings()
baseUnit = bool(distutils.util.strtobool(settings.value( "/qgis/measure/keepbaseunit", False )))
print 'baseUnit:', baseUnit
ellipsoid = QgsProject.instance().readEntry( "Measure", "/Ellipsoid", GEO_NONE )
print 'ellipsoid:', ellipsoid
calc = QgsDistanceArea()
calc.setEllipsoidalMode(iface.mapCanvas().hasCrsTransformEnabled())
calc.setEllipsoid( ellipsoid[0] )
src_srs = lyr.crs().srsid()
print 'src_srs:', src_srs
calc.setSourceCrs(src_srs)
#calc.setSourceCrs(3857)
#feats = lyr.selectedFeatures() if lyr.selectedFeatureCount() > 0 else lyr.getFeatures()
feats = lyr.getFeatures()
for feat in feats:
area = calc.measureArea(feat.geometry())
#print feat.id(), ':', area
area = calc.convertAreaMeasurement(area, displayAreaUnits)
#print feat.id(), ':', area
if area >= min_area:
sel_features.append(feat.id())
#print as readable string
#area = QgsDistanceArea.formatArea(area, 3, displayAreaUnits, baseUnit)
#print feat.id(), ':', area
lyr.setSelectedFeatures(sel_features)
print 'feats to select:', len(sel_features)
print 'selectedFeatureCount():', lyr.selectedFeatureCount()
|
"""
Author: shikechen
Function: Use requests lib to get AQI data
Version: 5.1
Date: 2019/5/13
"""
import requests
def get_html_text(url):
r = requests.get(url, timeout=30)
# print(r.status_code)
return r.text
def main():
city_pinyin = input('Input city\'s pinyin:')
url = 'http://pm25.in/' + city_pinyin
url_text = get_html_text(url)
aqi_div = '''<div class="span12 data">
<div class="span1">
<div class="value">
'''
index = url_text.find(aqi_div)
begin_index = index + len(aqi_div)
end_index = begin_index + 2
aqi_val = url_text[begin_index: end_index]
print('AQI value is {}'.format(aqi_val))
if __name__ == '__main__':
main()
|
from model import G
data_path = 'items.json'
model = G(data_path)
model.main()
|
import pytest
from predictionserver.app.attributeapp_autogen import create_attribute_app
from predictionserver.app.localapp import create_local_app_process, kill_local_app_process
@pytest.fixture
def attribute_client():
app = create_attribute_app()
process = create_local_app_process(app=app)
yield app.test_client()
kill_local_app_process(process=process)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import connection
from django.db import models, migrations
from django.contrib.contenttypes.models import ContentType
from taiga.base.utils.contenttypes import update_all_contenttypes
def create_notifications(apps, schema_editor):
update_all_contenttypes(verbosity=0)
sql="""
INSERT INTO notifications_watched (object_id, created_date, content_type_id, user_id, project_id)
SELECT milestone_id AS object_id, now() AS created_date, {content_type_id} AS content_type_id, user_id, project_id
FROM milestones_milestone_watchers INNER JOIN milestones_milestone ON milestones_milestone_watchers.milestone_id = milestones_milestone.id""".format(content_type_id=ContentType.objects.get(model='milestone').id)
cursor = connection.cursor()
cursor.execute(sql)
class Migration(migrations.Migration):
dependencies = [
('notifications', '0004_watched'),
('milestones', '0001_initial'),
]
operations = [
migrations.RunPython(create_notifications),
migrations.RemoveField(
model_name='milestone',
name='watchers',
),
]
|
# Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""TBR Matched Markets: utilities.
"""
import collections
import heapq
from typing import Any, Dict, List, TypeVar
DictKey = TypeVar('DictKey', str, int, float)
class HeapDict:
"""A dictionary of priority queues of a given limited size.
Each dictionary key points to a separate queue that has a fixed maximum
size. Upon pushing an item in a queue, the smallest item will be discarded if
the maximum size is exceeded. Hence each queue stores the largest items that
have been pushed in.
Each item must be sortable; an item of arbitrary class can be used if it
features a custom __lt__ method.
Example:
h = HeapDict(1) # Keep only the largest item.
h.push(10, 0.5)
h.push(10, 1.0)
h.push(20, 1.0)
h.push(20, 2.0)
h.get_result() # Returns {10: [1.0], 20: [2.0]}.
"""
def __init__(self, size: int):
"""Initialize a HeapDict.
Args:
size: Maximum size of each heap (priority queue).
"""
self._size = size
self._result = collections.defaultdict(list)
def push(self, key: DictKey, item: Any):
"""Push an item into the queue associated with the key.
Args:
key: A dictionary key, string, integer, or float.
item: Any object. The queue corresponding to the key will be sorted based
on this object.
"""
queue = self._result[key]
if len(queue) < self._size:
heapq.heappush(queue, item)
else:
# Push the new item, and remove the smallest item.
heapq.heappushpop(queue, item)
self._result[key] = queue
def get_result(self) -> Dict[DictKey, List[Any]]:
"""Return a copy of the dictionary, each queue sorted in descending order.
Returns:
A dictionary with the sorted lists as values, largest values first.
"""
result = {}
for key, q in self._result.items():
result[key] = heapq.nlargest(len(q), q)
return result
|
# MIT License
#
# Copyright (C) IBM Corporation 2018
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import unittest
import keras.backend as k
import numpy as np
import tensorflow as tf
from art.attacks import SpatialTransformation
from art.utils import load_dataset, master_seed
from art.utils_test import get_classifier_tf, get_classifier_kr, get_classifier_pt, get_iris_classifier_kr
logger = logging.getLogger('testLogger')
BATCH_SIZE = 10
NB_TRAIN = 100
NB_TEST = 10
class TestSpatialTransformation(unittest.TestCase):
"""
A unittest class for testing Spatial attack.
"""
@classmethod
def setUpClass(cls):
(x_train, y_train), (x_test, y_test), _, _ = load_dataset('mnist')
cls.x_train = x_train[:NB_TRAIN]
cls.y_train = y_train[:NB_TRAIN]
cls.x_test = x_test[:NB_TEST]
cls.y_test = y_test[:NB_TEST]
def setUp(self):
master_seed(1234)
def test_tfclassifier(self):
"""
First test with the TensorFlowClassifier.
:return:
"""
# Build TensorFlowClassifier
tfc, sess = get_classifier_tf()
# Attack
attack_st = SpatialTransformation(tfc, max_translation=10.0, num_translations=3, max_rotation=30.0,
num_rotations=3)
x_train_adv = attack_st.generate(self.x_train)
self.assertAlmostEqual(x_train_adv[0, 8, 13, 0], 0.49004024, delta=0.01)
self.assertAlmostEqual(attack_st.fooling_rate, 0.72, delta=0.01)
self.assertEqual(attack_st.attack_trans_x, 3)
self.assertEqual(attack_st.attack_trans_y, 3)
self.assertEqual(attack_st.attack_rot, 30.0)
x_test_adv = attack_st.generate(self.x_test)
self.assertAlmostEqual(x_test_adv[0, 14, 14, 0], 0.013572651, delta=0.01)
sess.close()
@unittest.skipIf(tf.__version__[0] == '2', reason='Skip unittests for TensorFlow v2 until Keras supports TensorFlow'
' v2 as backend.')
def test_krclassifier(self):
"""
Second test with the KerasClassifier.
:return:
"""
# Build KerasClassifier
krc = get_classifier_kr()
# Attack
attack_st = SpatialTransformation(krc, max_translation=10.0, num_translations=3, max_rotation=30.0,
num_rotations=3)
x_train_adv = attack_st.generate(self.x_train)
self.assertAlmostEqual(x_train_adv[0, 8, 13, 0], 0.49004024, delta=0.01)
self.assertAlmostEqual(attack_st.fooling_rate, 0.72, delta=0.01)
self.assertEqual(attack_st.attack_trans_x, 3)
self.assertEqual(attack_st.attack_trans_y, 3)
self.assertEqual(attack_st.attack_rot, 30.0)
x_test_adv = attack_st.generate(self.x_test)
self.assertAlmostEqual(x_test_adv[0, 14, 14, 0], 0.013572651, delta=0.01)
k.clear_session()
def test_ptclassifier(self):
"""
Third test with the PyTorchClassifier.
:return:
"""
# Build PyTorchClassifier
ptc = get_classifier_pt()
x_train = np.swapaxes(self.x_train, 1, 3).astype(np.float32)
x_test = np.swapaxes(self.x_test, 1, 3).astype(np.float32)
# Attack
attack_st = SpatialTransformation(ptc, max_translation=10.0, num_translations=3, max_rotation=30.0,
num_rotations=3)
x_train_adv = attack_st.generate(x_train)
print('abs(x_train_adv[0, 0, 13, :]', abs(x_train[0, 0, 13, :]))
print('abs(x_train_adv[0, 0, 13, :]', abs(x_train_adv[0, 0, 13, :]))
self.assertAlmostEqual(x_train_adv[0, 0, 13, 7], 0.287, delta=0.01)
self.assertAlmostEqual(attack_st.fooling_rate, 0.82, delta=0.01)
self.assertEqual(attack_st.attack_trans_x, 0)
self.assertEqual(attack_st.attack_trans_y, 3)
self.assertEqual(attack_st.attack_rot, -30.0)
x_test_adv = attack_st.generate(x_test)
self.assertLessEqual(abs(x_test_adv[0, 0, 14, 14] - 0.008591662), 0.01)
@unittest.skipIf(tf.__version__[0] == '2', reason='Skip unittests for TensorFlow v2 until Keras supports TensorFlow'
' v2 as backend.')
def test_failure_feature_vectors(self):
attack_params = {"max_translation": 10.0, "num_translations": 3, "max_rotation": 30.0, "num_rotations": 3}
classifier, _ = get_iris_classifier_kr()
attack = SpatialTransformation(classifier=classifier)
attack.set_params(**attack_params)
data = np.random.rand(10, 4)
# Assert that value error is raised for feature vectors
with self.assertRaises(ValueError) as context:
attack.generate(data)
self.assertIn('Feature vectors detected.', str(context.exception))
def test_classifier_type_check_fail_classifier(self):
# Use a useless test classifier to test basic classifier properties
class ClassifierNoAPI:
pass
classifier = ClassifierNoAPI
with self.assertRaises(TypeError) as context:
_ = SpatialTransformation(classifier=classifier)
self.assertIn('For `SpatialTransformation` classifier must be an instance of '
'`art.classifiers.classifier.Classifier`, the provided classifier is instance of '
'(<class \'object\'>,).', str(context.exception))
if __name__ == '__main__':
unittest.main()
|
"""
Django settings for trailerplan_auth_py project.
Generated by 'django-admin startproject' using Django 3.1.7.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
import datetime
import os
from datetime import timedelta
from pathlib import Path
from django.core.wsgi import get_wsgi_application
from django.conf import settings
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '*+bng539#9_^=_a3jgu&_#p#b6e_*ca647sfdy)vv3^e_%2vs-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'debug_toolbar',
'corsheaders',
'rest_framework',
'rest_framework.authtoken',
'app_python.apps.AppPythonConfig',
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'corsheaders.middleware.CorsPostCsrfMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.locale.LocaleMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
ALLOWED_HOSTS = ['localhost']
# If this is used then `CORS_ORIGIN_WHITELIST` will not have any effect
CORS_ORIGIN_ALLOW_ALL = False
CORS_ALLOW_CREDENTIALS = True
CORS_ORIGIN_WHITELIST = [
'http://localhost:4200',
]
# If this is used, then not need to use `CORS_ORIGIN_ALLOW_ALL = True`
CORS_ORIGIN_REGEX_WHITELIST = [
'http://localhost:4200',
'http://localhost:8000',
]
ROOT_URLCONF = 'trailerplan_auth_py.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'trailerplan_auth_py.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'trailerplan_db',
'USER': 'postgres',
'PASSWORD': 'P@55w*rD',
'HOST': '127.0.0.1',
'PORT': '5432',
'OPTIONS': {
'options': '-c search_path=trailerplan_schema'
}
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
# Customization of the django user model
AUTH_USER_MODEL = 'app_python.User'
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAdminUser',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework_jwt.authentication.JSONWebTokenAuthentication',
),
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 10,
}
JWT_AUTH = {
'JWT_ENCODE_HANDLER':
'rest_framework_jwt.utils.jwt_encode_handler',
'JWT_DECODE_HANDLER':
'rest_framework_jwt.utils.jwt_decode_handler',
'JWT_PAYLOAD_HANDLER':
'rest_framework_jwt.utils.jwt_payload_handler',
'JWT_PAYLOAD_GET_USER_ID_HANDLER':
'rest_framework_jwt.utils.jwt_get_user_id_from_payload_handler',
'JWT_RESPONSE_PAYLOAD_HANDLER':
'rest_framework_jwt.utils.jwt_response_payload_handler',
'JWT_SECRET_KEY': settings.SECRET_KEY,
'JWT_GET_USER_SECRET_KEY': None,
'JWT_PUBLIC_KEY': None,
'JWT_PRIVATE_KEY': None,
'JWT_ALGORITHM': 'HS256',
'JWT_VERIFY': True,
'JWT_VERIFY_EXPIRATION': True,
'JWT_LEEWAY': 0,
'JWT_EXPIRATION_DELTA': datetime.timedelta(seconds=300),
'JWT_AUDIENCE': None,
'JWT_ISSUER': None,
'JWT_ALLOW_REFRESH': True,
'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7),
'JWT_AUTH_HEADER_PREFIX': 'JWT',
'JWT_AUTH_COOKIE': True,
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '{levelname} {asctime} {module} {message}',
'style': '{',
},
'simple': {
'format': '{levelname} {message}',
'style': '{',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'level': 'INFO',
'formatter': 'verbose'
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
'app_python': {
'handlers': ['console'],
'level': 'INFO',
}
},
}
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 31 14:32:51 2019
@author: Steve O'Hagan
"""
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from IPython.display import SVG, display
from rdkit import Chem
from time import time
import pickle
#Import Keras objects
from keras.models import Model
from keras.layers import Input, Masking
from keras.layers import Dense, Bidirectional
from keras.layers import GRU, TimeDistributed
from keras.callbacks import ModelCheckpoint, ReduceLROnPlateau
from keras.utils.vis_utils import model_to_dot
#%%
class SmilesUtil():
@staticmethod
def cmpSmiles(s1,s2):
s1=s1.strip()
s2=s2.strip()
mx=max(len(s1),len(s2))
s1=s1.ljust(mx)
s2=s2.ljust(mx)
hit=sum([x==y for x,y in zip(s1,s2)])
return hit/mx
@staticmethod
def isGood(smi):
m = Chem.MolFromSmiles(smi)
return m is not None
def __init__(self,dat):
#Find unique chars in smiles
#takes 580s for 6 million items
self.smiCodes=set()
self.smiLen = 0
for i,p in dat.iterrows():
s = p.Molecule
if i % 1000 == 0:
print(i,s)
self.smiLen = max(self.smiLen,len(s))
s = s.ljust(self.smiLen)
self.smiCodes.update(list(s))
self.smiCodes = sorted(list(self.smiCodes))
self.codeLen=len(self.smiCodes)
self.code2int = dict((c,i) for i,c in enumerate(self.smiCodes))
self.int2code = dict((i,c) for i,c in enumerate(self.smiCodes))
def to_OH(self,dat):
rowCount,_= np.shape(dat)
xs=np.zeros((rowCount,self.smiLen,self.codeLen),'f')
for i,p in dat.iterrows():
inP=list(p.Molecule.ljust(self.smiLen))
for j,c in enumerate(inP):
xs[i,j,self.code2int[c]] = 1.0
return xs
def oh2Smiles(self,oh):
rslt = map(self.reverseSS,oh)
return list(rslt)
def reverseSS(self,x):
if (np.ndim(x)==3):
x=np.reshape(x,(self.smiLen,self.codeLen))
xx=pd.DataFrame(x)
xx.columns=self.smiCodes
xx=list(xx.idxmax(axis=1))
s = "".join(xx)
return s.strip()
class AE4Smiles:
def __init__(self,smiObj,LATENT=4,RNN=16):
self.LATENT = LATENT
self.RNN = RNN
self.smiObj=smiObj
inputs = Input(shape=(smiObj.smiLen,smiObj.codeLen,))
x = Masking()(inputs)
x = Bidirectional(GRU(RNN,dropout=0.2, return_sequences=True))(x)
x = TimeDistributed(Dense(LATENT*2,activation='relu'))(x)
enc = TimeDistributed(Dense(LATENT,activation='relu'))(x)
x = TimeDistributed(Dense(LATENT*2,activation='relu'))(enc)
x = TimeDistributed(Dense(smiObj.codeLen,activation='sigmoid'))(x)
#inp2 = Input(shape=(smiLen,LATENT,))
self.aen = Model(inputs,x)
#decoder = Model(inp2,x)
self.encoder = Model(inputs,enc)
self.aen.compile(optimizer='adam', loss='binary_crossentropy',metrics=['acc'])
def aeTrain(self,name,sTrain,sValid, EPOCHS=1,BATCH=64):
self.model_save = name + '_SAE' + str(self.LATENT) + '_E' + str(EPOCHS) + '_R' + str(self.RNN)+'.hdf5'
print(self.model_save)
self.EPOCHS = EPOCHS
self.BATCH = BATCH
if not os.path.isfile(self.model_save):
checkpointer = ModelCheckpoint(filepath = self.model_save, verbose = 1, save_best_only = True)
reduce_lr = ReduceLROnPlateau(monitor = 'val_loss', factor = 0.2, patience = 3, min_lr = 0.0001)
self.aen.fit(sTrain, sTrain, shuffle = True, epochs = EPOCHS, batch_size = BATCH,
callbacks = [checkpointer, reduce_lr], validation_data = (sValid,sValid))
self.aen.load_weights(self.model_save)
def plotm(model):
display(SVG(model_to_dot(model,show_shapes=True).create(prog='dot', format='svg')))
def getSOH():
pth='data/6MSmiles.csv'
dat = pd.read_csv(pth)
pkFile = 'data/6MSmiles.pkl'
t0 = time()
if os.path.exists(pkFile):
print('Loading SMILES codes.')
with open(pkFile, 'rb') as f:
su = pickle.load(f)
else:
print('Calculating SMILES codes.')
su = SmilesUtil(dat)
with open(pkFile,'wb') as f:
pickle.dump(su,f)
t1 = time() - t0
print('Time:',t1)
return dat,su
#%%
if __name__ == "__main__":
dat,su = getSOH()
kk = 25000
trnDat = dat[0:kk]
#2k from end & reindex
vldDat = dat.iloc[-2000:]
vldDat = vldDat.reset_index(drop=True)
tstDat = dat.iloc[-4000:-2000]
tstDat = tstDat.reset_index(drop=True)
del dat
trd = su.to_OH(trnDat)
vld = su.to_OH(vldDat)
tsd = su.to_OH(tstDat)
nn = AE4Smiles(su,LATENT=1)
plotm(nn.aen)
nn.aeTrain('25k',trd,vld,EPOCHS=16)
yTest = nn.aen.predict(tsd)
#%%
sm = 0.0
perfect = 0
good = 0.0
nr = len(tsd)
st = su.oh2Smiles(tsd)
sy = su.oh2Smiles(yTest)
for x,y in zip(st,sy):
hit=su.cmpSmiles(x,y)
if hit >= 1.0:
perfect+=1
if su.isGood(y):
good+=1
#print(hit,su.isGood(y))
print(x)
print(y,flush=True)
sm=sm+100.0*hit
print(f'Perfect: {100*perfect/nr:.2f}, Good:{100*good/nr:.2f}, Match:{sm/nr:.2f}')
tenc = nn.encoder.predict(tsd)
#%%
try:
h = nn.aen.history.history
plt.plot(h["acc"], label="acc")
plt.plot(h["val_acc"], label="Val_acc")
#plt.yscale("log")
plt.legend()
except:
pass
|
import secure
__all__ = ("set_secure_headers",)
# Angular requires default-src 'self'; style-src 'self' 'unsafe-inline';
# https://angular.io/guide/security#content-security-policy
csp_policy = (
secure.ContentSecurityPolicy()
.default_src("'self'")
.font_src("'self'", "fonts.gstatic.com")
.style_src("'self'", "'unsafe-inline'", "fonts.googleapis.com")
)
# Secure 0.3.0 mistakenly has a semicolon in the default value,
# which is fixed in an unreleased version.
permissions_policy = secure.PermissionsPolicy()
permissions_policy.value = (
"accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), "
"camera=(), clipboard-read=(), clipboard-write=(), cross-origin-isolated=(), "
"display-capture=(), document-domain=(), encrypted-media=(), "
"execution-while-not-rendered=(), execution-while-out-of-viewport=(), "
"fullscreen=(), gamepad=(), geolocation=(), gyroscope=(), magnetometer=(), "
"microphone=(), midi=(), navigation-override=(), payment=(), "
"picture-in-picture=(), publickey-credentials-get=(), screen-wake-lock=(), "
"speaker-selection=(), sync-xhr=(), usb=(), web-share=(), "
"xr-spatial-tracking=()"
)
secure_headers = secure.Secure(csp=csp_policy, permissions=permissions_policy)
def set_secure_headers(get_response):
def middleware(request):
response = get_response(request)
secure_headers.framework.django(response)
return response
return middleware
|
# Generated by Django 2.2.9 on 2020-01-18 03:46
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ifthen', '0002_move_guid'),
]
operations = [
migrations.AlterField(
model_name='game',
name='loser',
field=models.ForeignKey(blank=True, help_text='Loser of the game', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='lost_games', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='game',
name='winner',
field=models.ForeignKey(blank=True, help_text='Winner of the game', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='won_games', to=settings.AUTH_USER_MODEL),
),
]
|
from turtle import *
from enum import Enum
class Direction(Enum):
UL = 0
UR = 1
DL = 2
DR = 3
def square(size):
fd(size)
lt(90)
fd(size)
lt(90)
fd(size)
lt(90)
fd(size)
lt(90)
def square_dir(size, direction):
if direction == Direction.UL:
fd(size)
lt(90)
fd(size)
lt(90)
fd(size)
lt(90)
fd(size)
lt(90)
if direction == Direction.DL:
fd(size)
rt(90)
fd(size)
rt(90)
fd(size)
rt(90)
fd(size)
rt(90)
if direction == Direction.DR:
bk(size)
lt(90)
bk(size)
lt(90)
bk(size)
lt(90)
bk(size)
lt(90)
if direction == Direction.UR:
bk(size)
rt(90)
bk(size)
rt(90)
bk(size)
rt(90)
bk(size)
rt(90)
def line():
fd(80)
bk(80)
rt(90)
fd(1)
lt(90)
def reset():
seth(180)
fd(255)
seth(90)
bk(80)
def draw_gra_lot():
i=0
seth(90)
fd(320)
while i < 255:
pencolor(i, 0, 255-i)
line()
i+=1
reset()
i = 0
while i < 255:
pencolor(i, round(i/2), 255-i)
line()
i+=1
reset()
i = 0
while i < 255:
pencolor(i, i, i)
line()
i+=1
reset()
i = 0
while i < 255:
pencolor(255, round(i/2), 255-i)
line()
i+=1
reset()
i = 0
while i < 255:
pencolor(255, i, 255)
line()
i+=1
reset()
i = 0
while i < 255:
pencolor(255, round(i/2), 255-i)
line()
i+=1
reset()
i = 0
while i < 255:
pencolor(i,i,255-i)
line()
i+=1
reset()
i = 0
while i < 255:
pencolor(255-i,round(i/2),round(i/2))
line()
i+=1
if __name__== "__main__":
Screen().screensize(1000, 1000)
colormode(255)
speed(1000)
tracer(0, 0)
up()
down()
draw_gra_lot()
up()
update()
#ht()
|
from django.conf.urls import patterns, include, url
from gallery import views
urlpatterns = patterns(
'',
url(r'^$', views.index, name='index'),
url(r"^(?P<gallery_id>\d+)/$", views.detail, name='detail'),
url(r"^(?P<gallery_id>\d+)/delete/$", views.delete, name='delete'),
url(r"^new/$", views.upload, name='upload'),
)
|
from selenium import webdriver
from .driver import browser
import unittest
class JdTest(unittest.TestCase):
def setUp(self):
self.driver = browser()
self.driver.implicitly_wait(30)
self.driver.maximize_window()
def tearDown(self):
self.driver.quit()
|
from .docker import *
import os
INSTALLED_APPS.append('raven.contrib.django.raven_compat')
RAVEN_CONFIG = {
'dsn': os.environ["SENTRY_DSN"],
'release': os.environ.get("APP_GIT_COMMIT", "no-git-commit-available")
}
|
import numpy as np
import pandas as pd
from sklearn.cluster import DBSCAN as skDBSCAN
from cuml import DBSCAN as cumlDBSCAN
import cudf
import os
from collections import OrderedDict
import argparse
import datetime
from azureml.core.run import Run
## GPU execution
def gpu_load_data(fname, ncols):
dtypes = OrderedDict([
("feature_{0}".format(i), "float64") for i in range(ncols)])
print(fname)
return cudf.read_csv(fname, names=list(dtypes.keys()), delimiter=',', dtype=list(dtypes.values()), skiprows=1)
def run_gpu_workflow(fname, ncols, eps, min_samples):
mortgage_cudf = gpu_load_data(fname, ncols)
clustering_cuml = cumlDBSCAN(eps = eps,min_samples = min_samples)
dbscan_gpu = clustering_cuml.fit(mortgage_cudf)
## CPU execution
def cpu_load_data(fname):
print(fname)
return pd.read_csv(fname, header=0)
def run_cpu_workflow(fname, eps, min_samples):
mortgage_df = cpu_load_data(fname)
clustering_sk = skDBSCAN(eps = eps, min_samples = min_samples)
clustering_sk.fit(mortgage_df)
def main():
parser = argparse.ArgumentParser("RAPIDS_DBSCAN")
parser.add_argument("--data_dir", type=str, help="Location of data")
parser.add_argument("--gpu", type=int, help="Use GPU?", default=0)
parser.add_argument("--ncols", type=int, help="How many columns?", default=128)
parser.add_argument("--eps", type=int, help="How many columns?", default=3)
parser.add_argument("--min_samples", type=int, help="How many columns?", default=2)
parser.add_argument('-f', type=str, default='') # added for notebook execution scenarios
args = parser.parse_args()
data_dir = args.data_dir
gpu = args.gpu
ncols = args.ncols
eps = args.eps
min_samples = args.min_samples
run = Run.get_context()
run.log("Running on GPU?", gpu)
run.log("ncols", ncols)
run.log("eps", eps)
run.log("min_samples", min_samples)
print("Running DBSCAN on {0}...".format('GPU' if gpu else 'CPU'))
t1 = datetime.datetime.now()
fname = data_dir + "/mortgage.csv"
if gpu:
run_gpu_workflow(fname, ncols, eps, min_samples)
else:
run_cpu_workflow(fname, eps, min_samples)
t2 = datetime.datetime.now()
print("Total DBSCAN Time on {0}: {1}".format('GPU' if gpu else 'CPU', str(t2-t1)))
run.log("Total runtime", t2-t1)
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function, division
from flask import Blueprint
data_mod = Blueprint('data', __name__, url_prefix='/data', template_folder='./templates', static_folder='static')
from . import views
|
import torch
class DiscreteLowerboundModel(torch.nn.Module):
def __init__(self, model_pv, model_qu_x, model_context):
super(DiscreteLowerboundModel, self).__init__()
self.model_pv = model_pv
self.model_qu_x = model_qu_x
self.model_context = model_context
def forward(self, x):
x = x.float()
if self.model_context is not None:
context = self.model_context(x)
else:
context = None
u, log_qu = self.model_qu_x.sample(context, n_samples=x.size(0))
v = x + u
log_pv = self.model_pv(v, context=None)
return log_pv - log_qu
def sample(self, n_samples):
v, log_pv = self.model_pv.sample(context=None, n_samples=n_samples)
return v
class DiscreteLowerboundAugmentedModel(torch.nn.Module):
def __init__(self, model_pva, model_qu_x, model_qa_v,
model_context_x, model_context_v):
super(DiscreteLowerboundAugmentedModel, self).__init__()
self.model_pva = model_pva
self.model_qu_x = model_qu_x
self.model_qa_v = model_qa_v
self.model_context_x = model_context_x
self.model_context_v = model_context_v
self.v_channels = None
def forward(self, x):
x = x.float()
context_x = self.model_context_x(x)
u, log_qu = self.model_qu_x.sample(context_x, n_samples=x.size(0))
v = x + u
if self.v_channels is None:
self.v_channels = v.size(1)
context_v = self.model_context_v(v)
a, log_qa_v = self.model_qa_v.sample(context_v, n_samples=x.size(0))
va = torch.cat([v, a], dim=1)
log_pva = self.model_pva(va, context=None)
return log_pva - log_qu - log_qa_v
def sample(self, n_samples):
v, log_pv = self.model_pva.sample(context=None, n_samples=n_samples)
return v[:, :self.v_channels]
class PredictiveDiscreteLowerboundModel(torch.nn.Module):
def __init__(self, model_pv_x, model_qu_y, model_context_y, model_context_x):
super(PredictiveDiscreteLowerboundModel, self).__init__()
self.model_pv_x = model_pv_x
self.model_qu_y = model_qu_y
self.model_context_y = model_context_y
self.model_context_x = model_context_x
def forward(self, x, y):
if self.model_context_x is not None:
context_x = self.model_context_x(x)
else:
context_x = None
if self.model_context_y is not None:
context_y = self.model_context_y(y)
else:
context_y = None
u, log_pu = self.model_qu_x.sample(context_y, n_samples=x.size(0))
v = y.float() + u
log_pv_x = self.model_pv_x(v, context=context_x)
return log_pv_x - log_pu
def sample(self, x, n_samples):
if self.model_context_x is not None:
context_x = self.model_context_x(x)
else:
context_x = None
v, log_pv = self.model_pv_x.sample(
context=context_x, n_samples=n_samples)
return v
|
# Copyright 2006-2012 Mark Diekhans
# Copyright sebsauvage.net
# FIXME cant this be replaced with https://pypi.python.org/pypi/sqlitedict/
"""
Code from:
http://sebsauvage.net/python/snyppets/index.html#dbdict
A dictionnary-like object for LARGE datasets
Python dictionnaries are very efficient objects for fast data access. But when data is too large to fit in memory, you're in trouble.
Here's a dictionnary-like object which uses a SQLite database and behaves like a dictionnary object:
- You can work on datasets which to not fit in memory. Size is not limited
by memory, but by disk. Can hold up to several tera-bytes of data (thanks
to SQLite).
- Behaves like a dictionnary (can be used in place of a dictionnary object
in most cases.)
- Data persists between program runs.
- ACID (data integrity): Storage file integrity is assured. No half-written
data. It's really hard to mess up data.
- Efficient: You do not have to re-write a whole 500 Gb file when changing
only one item. Only the relevant parts of the file are changed.
- You can mix several key types (you can do d["foo"]=bar and d[7]=5468)
(You can't to this with a standard dictionnary.)
- You can share this dictionnary with other languages and systems (SQLite
databases are portable, and the SQlite library is available on a wide
range of systems/languages, from mainframes to PDA/iPhone, from Python to
Java/C++/C#/perl...)
Modified by markd:
- renamed dbdict -> DbDict
- include key name in KeyError exceptions
- specify name of file, not the dictName in imported code that didn't allow
specifying the directory.
- added table option to allow storing multiple dictionaries in table
- add truncate constructor option
"""
# FIXME: can this be replaced with something in pypi, is it really needed
from collections import UserDict
from sqlite3 import dbapi2 as sqlite
class DbDict(UserDict):
''' DbDict, a dictionnary-like object for large datasets (several
Tera-bytes) backed by an SQLite database'''
def __init__(self, db_filename, table="data", truncate=False):
self.db_filename = db_filename
self.table = table
self.con = sqlite.connect(self.db_filename)
if truncate:
self.con.execute("drop table if exists {}".format(self.table))
self.con.execute("create table if not exists {} (key PRIMARY KEY,value)".format(self.table))
def __getitem__(self, key):
row = self.con.execute("select value from {} where key=?".format(self.table),
(key,)).fetchone()
if not row:
raise KeyError(str(key))
return row[0]
def __setitem__(self, key, item):
if self.con.execute("select key from {} where key=?".format(self.table),
(key,)).fetchone():
self.con.execute("update {} set value=? where key=?".format(self.table),
(item, key))
else:
self.con.execute("insert into {} (key,value) values (?,?)".format(self.table),
(key, item))
self.con.commit()
def __delitem__(self, key):
if self.con.execute("select key from {} where key=?".format(self.table),
(key,)).fetchone():
self.con.execute("delete from {} where key=?".format(self.table),
(key,))
self.con.commit()
else:
raise KeyError(str(key))
def keys(self):
return [row[0] for row in self.con.execute("select key from {}".format(self.table)).fetchall()]
|
constants.physical_constants["proton Compton wavelength over 2 pi"]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['Instance']
class Instance(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access_ip_v4: Optional[pulumi.Input[str]] = None,
access_ip_v6: Optional[pulumi.Input[str]] = None,
admin_pass: Optional[pulumi.Input[str]] = None,
availability_zone: Optional[pulumi.Input[str]] = None,
availability_zone_hints: Optional[pulumi.Input[str]] = None,
block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceBlockDeviceArgs']]]]] = None,
config_drive: Optional[pulumi.Input[bool]] = None,
flavor_id: Optional[pulumi.Input[str]] = None,
flavor_name: Optional[pulumi.Input[str]] = None,
force_delete: Optional[pulumi.Input[bool]] = None,
image_id: Optional[pulumi.Input[str]] = None,
image_name: Optional[pulumi.Input[str]] = None,
key_pair: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[Mapping[str, Any]]] = None,
name: Optional[pulumi.Input[str]] = None,
network_mode: Optional[pulumi.Input[str]] = None,
networks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceNetworkArgs']]]]] = None,
personalities: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstancePersonalityArgs']]]]] = None,
power_state: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
scheduler_hints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceSchedulerHintArgs']]]]] = None,
security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
stop_before_destroy: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
user_data: Optional[pulumi.Input[str]] = None,
vendor_options: Optional[pulumi.Input[pulumi.InputType['InstanceVendorOptionsArgs']]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Create a Instance resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] access_ip_v4: The first detected Fixed IPv4 address.
:param pulumi.Input[str] access_ip_v6: The first detected Fixed IPv6 address.
:param pulumi.Input[str] admin_pass: The administrative password to assign to the server.
Changing this changes the root password on the existing server.
:param pulumi.Input[str] availability_zone: The availability zone in which to create
the server. Conflicts with `availability_zone_hints`. Changing this creates
a new server.
:param pulumi.Input[str] availability_zone_hints: The availability zone in which to
create the server. This argument is preferred to `availability_zone`, when
scheduling the server on a
[particular](https://docs.openstack.org/nova/latest/admin/availability-zones.html)
host or node. Conflicts with `availability_zone`. Changing this creates a
new server.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceBlockDeviceArgs']]]] block_devices: Configuration of block devices. The block_device
structure is documented below. Changing this creates a new server.
You can specify multiple block devices which will create an instance with
multiple disks. This configuration is very flexible, so please see the
following [reference](https://docs.openstack.org/nova/latest/user/block-device-mapping.html)
for more information.
:param pulumi.Input[bool] config_drive: Whether to use the config_drive feature to
configure the instance. Changing this creates a new server.
:param pulumi.Input[str] flavor_id: The flavor ID of
the desired flavor for the server. Changing this resizes the existing server.
:param pulumi.Input[str] flavor_name: The name of the
desired flavor for the server. Changing this resizes the existing server.
:param pulumi.Input[bool] force_delete: Whether to force the OpenStack instance to be
forcefully deleted. This is useful for environments that have reclaim / soft
deletion enabled.
:param pulumi.Input[str] image_id: (Optional; Required if `image_name` is empty and not booting
from a volume. Do not specify if booting from a volume.) The image ID of
the desired image for the server. Changing this creates a new server.
:param pulumi.Input[str] image_name: (Optional; Required if `image_id` is empty and not booting
from a volume. Do not specify if booting from a volume.) The name of the
desired image for the server. Changing this creates a new server.
:param pulumi.Input[str] key_pair: The name of a key pair to put on the server. The key
pair must already be created and associated with the tenant's account.
Changing this creates a new server.
:param pulumi.Input[Mapping[str, Any]] metadata: Metadata key/value pairs to make available from
within the instance. Changing this updates the existing server metadata.
:param pulumi.Input[str] name: The human-readable
name of the network. Changing this creates a new server.
:param pulumi.Input[str] network_mode: Special string for `network` option to create
the server. `network_mode` can be `"auto"` or `"none"`.
Please see the following [reference](https://docs.openstack.org/api-ref/compute/?expanded=create-server-detail#id11) for more information. Conflicts with `network`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceNetworkArgs']]]] networks: An array of one or more networks to attach to the
instance. The network object structure is documented below. Changing this
creates a new server.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstancePersonalityArgs']]]] personalities: Customize the personality of an instance by
defining one or more files and their contents. The personality structure
is described below.
:param pulumi.Input[str] power_state: Provide the VM state. Only 'active' and 'shutoff'
are supported values. *Note*: If the initial power_state is the shutoff
the VM will be stopped immediately after build and the provisioners like
remote-exec or files are not supported.
:param pulumi.Input[str] region: The region in which to create the server instance. If
omitted, the `region` argument of the provider is used. Changing this
creates a new server.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceSchedulerHintArgs']]]] scheduler_hints: Provide the Nova scheduler with hints on how
the instance should be launched. The available hints are described below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_groups: An array of one or more security group names
to associate with the server. Changing this results in adding/removing
security groups from the existing server. *Note*: When attaching the
instance to networks using Ports, place the security groups on the Port
and not the instance. *Note*: Names should be used and not ids, as ids
trigger unnecessary updates.
:param pulumi.Input[bool] stop_before_destroy: Whether to try stop instance gracefully
before destroying it, thus giving chance for guest OS daemons to stop correctly.
If instance doesn't stop within timeout, it will be destroyed anyway.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of string tags for the instance. Changing this
updates the existing instance tags.
:param pulumi.Input[str] user_data: The user data to provide when launching the instance.
Changing this creates a new server.
:param pulumi.Input[pulumi.InputType['InstanceVendorOptionsArgs']] vendor_options: Map of additional vendor-specific options.
Supported options are described below.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['access_ip_v4'] = access_ip_v4
__props__['access_ip_v6'] = access_ip_v6
__props__['admin_pass'] = admin_pass
__props__['availability_zone'] = availability_zone
__props__['availability_zone_hints'] = availability_zone_hints
__props__['block_devices'] = block_devices
__props__['config_drive'] = config_drive
__props__['flavor_id'] = flavor_id
__props__['flavor_name'] = flavor_name
__props__['force_delete'] = force_delete
__props__['image_id'] = image_id
__props__['image_name'] = image_name
__props__['key_pair'] = key_pair
__props__['metadata'] = metadata
__props__['name'] = name
__props__['network_mode'] = network_mode
__props__['networks'] = networks
__props__['personalities'] = personalities
__props__['power_state'] = power_state
__props__['region'] = region
__props__['scheduler_hints'] = scheduler_hints
__props__['security_groups'] = security_groups
__props__['stop_before_destroy'] = stop_before_destroy
__props__['tags'] = tags
__props__['user_data'] = user_data
__props__['vendor_options'] = vendor_options
__props__['all_metadata'] = None
__props__['all_tags'] = None
super(Instance, __self__).__init__(
'openstack:compute/instance:Instance',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
access_ip_v4: Optional[pulumi.Input[str]] = None,
access_ip_v6: Optional[pulumi.Input[str]] = None,
admin_pass: Optional[pulumi.Input[str]] = None,
all_metadata: Optional[pulumi.Input[Mapping[str, Any]]] = None,
all_tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
availability_zone: Optional[pulumi.Input[str]] = None,
availability_zone_hints: Optional[pulumi.Input[str]] = None,
block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceBlockDeviceArgs']]]]] = None,
config_drive: Optional[pulumi.Input[bool]] = None,
flavor_id: Optional[pulumi.Input[str]] = None,
flavor_name: Optional[pulumi.Input[str]] = None,
force_delete: Optional[pulumi.Input[bool]] = None,
image_id: Optional[pulumi.Input[str]] = None,
image_name: Optional[pulumi.Input[str]] = None,
key_pair: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[Mapping[str, Any]]] = None,
name: Optional[pulumi.Input[str]] = None,
network_mode: Optional[pulumi.Input[str]] = None,
networks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceNetworkArgs']]]]] = None,
personalities: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstancePersonalityArgs']]]]] = None,
power_state: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
scheduler_hints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceSchedulerHintArgs']]]]] = None,
security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
stop_before_destroy: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
user_data: Optional[pulumi.Input[str]] = None,
vendor_options: Optional[pulumi.Input[pulumi.InputType['InstanceVendorOptionsArgs']]] = None) -> 'Instance':
"""
Get an existing Instance resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] access_ip_v4: The first detected Fixed IPv4 address.
:param pulumi.Input[str] access_ip_v6: The first detected Fixed IPv6 address.
:param pulumi.Input[str] admin_pass: The administrative password to assign to the server.
Changing this changes the root password on the existing server.
:param pulumi.Input[Sequence[pulumi.Input[str]]] all_tags: The collection of tags assigned on the instance, which have
been explicitly and implicitly added.
:param pulumi.Input[str] availability_zone: The availability zone in which to create
the server. Conflicts with `availability_zone_hints`. Changing this creates
a new server.
:param pulumi.Input[str] availability_zone_hints: The availability zone in which to
create the server. This argument is preferred to `availability_zone`, when
scheduling the server on a
[particular](https://docs.openstack.org/nova/latest/admin/availability-zones.html)
host or node. Conflicts with `availability_zone`. Changing this creates a
new server.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceBlockDeviceArgs']]]] block_devices: Configuration of block devices. The block_device
structure is documented below. Changing this creates a new server.
You can specify multiple block devices which will create an instance with
multiple disks. This configuration is very flexible, so please see the
following [reference](https://docs.openstack.org/nova/latest/user/block-device-mapping.html)
for more information.
:param pulumi.Input[bool] config_drive: Whether to use the config_drive feature to
configure the instance. Changing this creates a new server.
:param pulumi.Input[str] flavor_id: The flavor ID of
the desired flavor for the server. Changing this resizes the existing server.
:param pulumi.Input[str] flavor_name: The name of the
desired flavor for the server. Changing this resizes the existing server.
:param pulumi.Input[bool] force_delete: Whether to force the OpenStack instance to be
forcefully deleted. This is useful for environments that have reclaim / soft
deletion enabled.
:param pulumi.Input[str] image_id: (Optional; Required if `image_name` is empty and not booting
from a volume. Do not specify if booting from a volume.) The image ID of
the desired image for the server. Changing this creates a new server.
:param pulumi.Input[str] image_name: (Optional; Required if `image_id` is empty and not booting
from a volume. Do not specify if booting from a volume.) The name of the
desired image for the server. Changing this creates a new server.
:param pulumi.Input[str] key_pair: The name of a key pair to put on the server. The key
pair must already be created and associated with the tenant's account.
Changing this creates a new server.
:param pulumi.Input[Mapping[str, Any]] metadata: Metadata key/value pairs to make available from
within the instance. Changing this updates the existing server metadata.
:param pulumi.Input[str] name: The human-readable
name of the network. Changing this creates a new server.
:param pulumi.Input[str] network_mode: Special string for `network` option to create
the server. `network_mode` can be `"auto"` or `"none"`.
Please see the following [reference](https://docs.openstack.org/api-ref/compute/?expanded=create-server-detail#id11) for more information. Conflicts with `network`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceNetworkArgs']]]] networks: An array of one or more networks to attach to the
instance. The network object structure is documented below. Changing this
creates a new server.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstancePersonalityArgs']]]] personalities: Customize the personality of an instance by
defining one or more files and their contents. The personality structure
is described below.
:param pulumi.Input[str] power_state: Provide the VM state. Only 'active' and 'shutoff'
are supported values. *Note*: If the initial power_state is the shutoff
the VM will be stopped immediately after build and the provisioners like
remote-exec or files are not supported.
:param pulumi.Input[str] region: The region in which to create the server instance. If
omitted, the `region` argument of the provider is used. Changing this
creates a new server.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceSchedulerHintArgs']]]] scheduler_hints: Provide the Nova scheduler with hints on how
the instance should be launched. The available hints are described below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_groups: An array of one or more security group names
to associate with the server. Changing this results in adding/removing
security groups from the existing server. *Note*: When attaching the
instance to networks using Ports, place the security groups on the Port
and not the instance. *Note*: Names should be used and not ids, as ids
trigger unnecessary updates.
:param pulumi.Input[bool] stop_before_destroy: Whether to try stop instance gracefully
before destroying it, thus giving chance for guest OS daemons to stop correctly.
If instance doesn't stop within timeout, it will be destroyed anyway.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: A set of string tags for the instance. Changing this
updates the existing instance tags.
:param pulumi.Input[str] user_data: The user data to provide when launching the instance.
Changing this creates a new server.
:param pulumi.Input[pulumi.InputType['InstanceVendorOptionsArgs']] vendor_options: Map of additional vendor-specific options.
Supported options are described below.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["access_ip_v4"] = access_ip_v4
__props__["access_ip_v6"] = access_ip_v6
__props__["admin_pass"] = admin_pass
__props__["all_metadata"] = all_metadata
__props__["all_tags"] = all_tags
__props__["availability_zone"] = availability_zone
__props__["availability_zone_hints"] = availability_zone_hints
__props__["block_devices"] = block_devices
__props__["config_drive"] = config_drive
__props__["flavor_id"] = flavor_id
__props__["flavor_name"] = flavor_name
__props__["force_delete"] = force_delete
__props__["image_id"] = image_id
__props__["image_name"] = image_name
__props__["key_pair"] = key_pair
__props__["metadata"] = metadata
__props__["name"] = name
__props__["network_mode"] = network_mode
__props__["networks"] = networks
__props__["personalities"] = personalities
__props__["power_state"] = power_state
__props__["region"] = region
__props__["scheduler_hints"] = scheduler_hints
__props__["security_groups"] = security_groups
__props__["stop_before_destroy"] = stop_before_destroy
__props__["tags"] = tags
__props__["user_data"] = user_data
__props__["vendor_options"] = vendor_options
return Instance(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accessIpV4")
def access_ip_v4(self) -> pulumi.Output[str]:
"""
The first detected Fixed IPv4 address.
"""
return pulumi.get(self, "access_ip_v4")
@property
@pulumi.getter(name="accessIpV6")
def access_ip_v6(self) -> pulumi.Output[str]:
"""
The first detected Fixed IPv6 address.
"""
return pulumi.get(self, "access_ip_v6")
@property
@pulumi.getter(name="adminPass")
def admin_pass(self) -> pulumi.Output[Optional[str]]:
"""
The administrative password to assign to the server.
Changing this changes the root password on the existing server.
"""
return pulumi.get(self, "admin_pass")
@property
@pulumi.getter(name="allMetadata")
def all_metadata(self) -> pulumi.Output[Mapping[str, Any]]:
return pulumi.get(self, "all_metadata")
@property
@pulumi.getter(name="allTags")
def all_tags(self) -> pulumi.Output[Sequence[str]]:
"""
The collection of tags assigned on the instance, which have
been explicitly and implicitly added.
"""
return pulumi.get(self, "all_tags")
@property
@pulumi.getter(name="availabilityZone")
def availability_zone(self) -> pulumi.Output[str]:
"""
The availability zone in which to create
the server. Conflicts with `availability_zone_hints`. Changing this creates
a new server.
"""
return pulumi.get(self, "availability_zone")
@property
@pulumi.getter(name="availabilityZoneHints")
def availability_zone_hints(self) -> pulumi.Output[Optional[str]]:
"""
The availability zone in which to
create the server. This argument is preferred to `availability_zone`, when
scheduling the server on a
[particular](https://docs.openstack.org/nova/latest/admin/availability-zones.html)
host or node. Conflicts with `availability_zone`. Changing this creates a
new server.
"""
return pulumi.get(self, "availability_zone_hints")
@property
@pulumi.getter(name="blockDevices")
def block_devices(self) -> pulumi.Output[Optional[Sequence['outputs.InstanceBlockDevice']]]:
"""
Configuration of block devices. The block_device
structure is documented below. Changing this creates a new server.
You can specify multiple block devices which will create an instance with
multiple disks. This configuration is very flexible, so please see the
following [reference](https://docs.openstack.org/nova/latest/user/block-device-mapping.html)
for more information.
"""
return pulumi.get(self, "block_devices")
@property
@pulumi.getter(name="configDrive")
def config_drive(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to use the config_drive feature to
configure the instance. Changing this creates a new server.
"""
return pulumi.get(self, "config_drive")
@property
@pulumi.getter(name="flavorId")
def flavor_id(self) -> pulumi.Output[str]:
"""
The flavor ID of
the desired flavor for the server. Changing this resizes the existing server.
"""
return pulumi.get(self, "flavor_id")
@property
@pulumi.getter(name="flavorName")
def flavor_name(self) -> pulumi.Output[str]:
"""
The name of the
desired flavor for the server. Changing this resizes the existing server.
"""
return pulumi.get(self, "flavor_name")
@property
@pulumi.getter(name="forceDelete")
def force_delete(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to force the OpenStack instance to be
forcefully deleted. This is useful for environments that have reclaim / soft
deletion enabled.
"""
return pulumi.get(self, "force_delete")
@property
@pulumi.getter(name="imageId")
def image_id(self) -> pulumi.Output[str]:
"""
(Optional; Required if `image_name` is empty and not booting
from a volume. Do not specify if booting from a volume.) The image ID of
the desired image for the server. Changing this creates a new server.
"""
return pulumi.get(self, "image_id")
@property
@pulumi.getter(name="imageName")
def image_name(self) -> pulumi.Output[str]:
"""
(Optional; Required if `image_id` is empty and not booting
from a volume. Do not specify if booting from a volume.) The name of the
desired image for the server. Changing this creates a new server.
"""
return pulumi.get(self, "image_name")
@property
@pulumi.getter(name="keyPair")
def key_pair(self) -> pulumi.Output[Optional[str]]:
"""
The name of a key pair to put on the server. The key
pair must already be created and associated with the tenant's account.
Changing this creates a new server.
"""
return pulumi.get(self, "key_pair")
@property
@pulumi.getter
def metadata(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
Metadata key/value pairs to make available from
within the instance. Changing this updates the existing server metadata.
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The human-readable
name of the network. Changing this creates a new server.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkMode")
def network_mode(self) -> pulumi.Output[Optional[str]]:
"""
Special string for `network` option to create
the server. `network_mode` can be `"auto"` or `"none"`.
Please see the following [reference](https://docs.openstack.org/api-ref/compute/?expanded=create-server-detail#id11) for more information. Conflicts with `network`.
"""
return pulumi.get(self, "network_mode")
@property
@pulumi.getter
def networks(self) -> pulumi.Output[Sequence['outputs.InstanceNetwork']]:
"""
An array of one or more networks to attach to the
instance. The network object structure is documented below. Changing this
creates a new server.
"""
return pulumi.get(self, "networks")
@property
@pulumi.getter
def personalities(self) -> pulumi.Output[Optional[Sequence['outputs.InstancePersonality']]]:
"""
Customize the personality of an instance by
defining one or more files and their contents. The personality structure
is described below.
"""
return pulumi.get(self, "personalities")
@property
@pulumi.getter(name="powerState")
def power_state(self) -> pulumi.Output[Optional[str]]:
"""
Provide the VM state. Only 'active' and 'shutoff'
are supported values. *Note*: If the initial power_state is the shutoff
the VM will be stopped immediately after build and the provisioners like
remote-exec or files are not supported.
"""
return pulumi.get(self, "power_state")
@property
@pulumi.getter
def region(self) -> pulumi.Output[str]:
"""
The region in which to create the server instance. If
omitted, the `region` argument of the provider is used. Changing this
creates a new server.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="schedulerHints")
def scheduler_hints(self) -> pulumi.Output[Optional[Sequence['outputs.InstanceSchedulerHint']]]:
"""
Provide the Nova scheduler with hints on how
the instance should be launched. The available hints are described below.
"""
return pulumi.get(self, "scheduler_hints")
@property
@pulumi.getter(name="securityGroups")
def security_groups(self) -> pulumi.Output[Sequence[str]]:
"""
An array of one or more security group names
to associate with the server. Changing this results in adding/removing
security groups from the existing server. *Note*: When attaching the
instance to networks using Ports, place the security groups on the Port
and not the instance. *Note*: Names should be used and not ids, as ids
trigger unnecessary updates.
"""
return pulumi.get(self, "security_groups")
@property
@pulumi.getter(name="stopBeforeDestroy")
def stop_before_destroy(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to try stop instance gracefully
before destroying it, thus giving chance for guest OS daemons to stop correctly.
If instance doesn't stop within timeout, it will be destroyed anyway.
"""
return pulumi.get(self, "stop_before_destroy")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A set of string tags for the instance. Changing this
updates the existing instance tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="userData")
def user_data(self) -> pulumi.Output[Optional[str]]:
"""
The user data to provide when launching the instance.
Changing this creates a new server.
"""
return pulumi.get(self, "user_data")
@property
@pulumi.getter(name="vendorOptions")
def vendor_options(self) -> pulumi.Output[Optional['outputs.InstanceVendorOptions']]:
"""
Map of additional vendor-specific options.
Supported options are described below.
"""
return pulumi.get(self, "vendor_options")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
|
"""
# Copyright (c) 2018 Works Applications Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Paper: Adaptive Learning Rate via Covariance Matrix Based Preconditioning for Deep Neural Networks
Link: https://www.ijcai.org/proceedings/2017/0267.pdf
Base: RMSProp
"""
import torch
from torch.optim import Optimizer
class SDProp(Optimizer):
def __init__(self, params, lr=1e-2, alpha=0.99, eps=1e-8, gamma=0.99, weight_decay=0,
momentum=0, centered=False):
if not 0.0 <= lr:
raise ValueError("Invalid learning rate: {}".format(lr))
if not 0.0 <= eps:
raise ValueError("Invalid epsilon value: {}".format(eps))
if not 0.0 <= momentum:
raise ValueError("Invalid momentum value: {}".format(momentum))
if not 0.0 <= weight_decay:
raise ValueError("Invalid weight_decay value: {}".format(weight_decay))
if not 0.0 <= alpha:
raise ValueError("Invalid alpha value: {}".format(alpha))
if not 0 < gamma <= 1:
raise ValueError("Invalid gamma value: {}".format(gamma))
defaults = dict(lr=lr, momentum=momentum, alpha=alpha, eps=eps, centered=centered,
weight_decay=weight_decay, gamma=gamma)
super(SDProp, self).__init__(params, defaults)
def __setstate__(self, state):
super(SDProp, self).__setstate__(state)
for group in self.param_groups:
group.setdefault('momentum', 0)
group.setdefault('centered', False)
def step(self, closure=None):
"""Performs a single optimization step.
Arguments:
closure (callable, optional): A closure that reevaluates the model
and returns the loss.
"""
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
continue
grad = p.grad.data
if grad.is_sparse:
raise RuntimeError('RMSprop does not support sparse gradients')
state = self.state[p]
# State initialization
if len(state) == 0:
state['step'] = 0
state['square_avg'] = torch.zeros_like(p.data)
if group['momentum'] > 0:
state['momentum_buffer'] = torch.zeros_like(p.data)
if group['centered']:
state['grad_avg'] = torch.zeros_like(p.data)
square_avg = state['square_avg']
alpha = group['alpha']
state['step'] += 1
if group['weight_decay'] != 0:
grad = grad.add(group['weight_decay'], p.data)
square_avg.mul_(alpha).addcmul_(1 - alpha, grad, grad)
if group['centered']:
grad_avg = state['grad_avg']
grad_avg.mul_(alpha).add_(1 - alpha, grad)
avg = square_avg.addcmul(-1, grad_avg, grad_avg).sqrt().add_(group['eps'])
else:
avg = square_avg.sqrt().add_(group['eps'])
if group['momentum'] > 0:
buf = state['momentum_buffer']
buf.mul_(group['momentum']).addcdiv_(grad, avg)
p.data.add_(-group['lr'], buf)
else:
p.data.addcdiv_(-group['lr'], grad, avg)
return loss
|
#!/usr/bin/env python
#===============================================================================
# Copyright (c) 2014 Geoscience Australia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither Geoscience Australia nor the names of its contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#===============================================================================
"""
abstract_bandstack.py - interface for the bandstack class.
Different types of dataset will have different versions of this
class, obtained by sub-classing and overriding the abstract methods.
It is the responsibility of the stack_bands method of the dataset
object to instantiate the correct subclass.
"""
from __future__ import absolute_import
from osgeo import gdal
from agdc.abstract_ingester import AbstractBandstack
from collections import OrderedDict
class ModisBandstack(AbstractBandstack):
"""Modis subclass of AbstractBandstack class"""
def __init__(self, dataset, band_dict):
"""The bandstack allows for the construction of a list, or stack, of
bands from the given dataset."""
super(ModisBandstack, self).__init__(dataset.metadata_dict)
#Order the band_dict by the file number key
self.dataset = dataset
self.band_dict = \
OrderedDict(sorted(band_dict.items(), key=lambda t: t[0]))
self.source_file_list = None
self.nodata_list = None
self.vrt_name = None
self.vrt_band_stack = None
def buildvrt(self, temp_dir):
"""Given a dataset_record and corresponding dataset, build the vrt that
will be used to reproject the dataset's data to tile coordinates"""
self.vrt_name = self.dataset._vrt_file
self.vrt_band_stack = self.dataset._vrt_file
self.add_metadata(self.vrt_name)
def list_source_files(self):
"""Given the dictionary of band source information, form a list
of scene file names from which a vrt can be constructed. Also return a
list of nodata values for use by add_metadata"""
pass
def get_vrt_name(self, vrt_dir):
"""Use the dataset's metadata to form the vrt file name"""
#dataset_basename = os.path.basename(self.dataset_mdd['dataset_path'])
#return os.path.join(vrt_dir, dataset_basename)
return self.vrt_name
def add_metadata(self, vrt_filename):
"""Add metadata to the VRT."""
band_stack_dataset = gdal.Open(vrt_filename)
assert band_stack_dataset, 'Unable to open VRT %s' % vrt_filename
band_stack_dataset.SetMetadata(
{'satellite': self.dataset_mdd['satellite_tag'].upper(),
'sensor': self.dataset_mdd['sensor_name'].upper(),
'start_datetime': self.dataset_mdd['start_datetime'].isoformat(),
'end_datetime': self.dataset_mdd['end_datetime'].isoformat(),
'path': '%03d' % self.dataset_mdd['x_ref']}
)
self.nodata_list = []
for band_info in self.band_dict.values():
band_number = band_info['tile_layer']
band = band_stack_dataset.GetRasterBand(band_number)
self.nodata_list.append(band.GetNoDataValue())
if band.GetNoDataValue() is not None:
band.SetNoDataValue(band.GetNoDataValue())
band_stack_dataset.FlushCache()
|
'''OpenGL extension NV.vertex_program1_1
This module customises the behaviour of the
OpenGL.raw.GL.NV.vertex_program1_1 to provide a more
Python-friendly API
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions, wrapper
from OpenGL.GL import glget
import ctypes
from OpenGL.raw.GL.NV.vertex_program1_1 import *
### END AUTOGENERATED SECTION
|
#!/usr/bin/env python
"""
Module that performs extraction. For usage, refer to documentation for the class
'Extractor'. This module can also be executed directly,
e.g. 'extractor.py <input> <output>'.
"""
import argparse
import hashlib
import multiprocessing
import os
import shutil
import tempfile
import traceback
import magic
import binwalk
class Extractor(object):
"""
Class that extracts kernels and filesystems from firmware images, given an
input file or directory and output directory.
"""
# Directories that define the root of a UNIX filesystem, and the
# appropriate threshold condition
UNIX_DIRS = ["bin", "etc", "dev", "home", "lib", "mnt", "opt", "root",
"run", "sbin", "tmp", "usr", "var"]
UNIX_THRESHOLD = 4
# Lock to prevent concurrent access to visited set. Unfortunately, must be
# static because it cannot be pickled or passed as instance attribute.
visited_lock = multiprocessing.Lock()
def __init__(self, indir, outdir=None, rootfs=True, kernel=True,
numproc=True, server=None, brand=None):
# Input firmware update file or directory
self._input = os.path.abspath(indir)
# Output firmware directory
self.output_dir = os.path.abspath(outdir) if outdir else None
# Whether to attempt to extract kernel
self.do_kernel = kernel
# Whether to attempt to extract root filesystem
self.do_rootfs = rootfs
# Brand of the firmware
self.brand = brand
# Hostname of SQL server
self.database = server
# Worker pool.
self._pool = multiprocessing.Pool() if numproc else None
# Set containing MD5 checksums of visited items
self.visited = set()
# List containing tagged items to extract as 2-tuple: (tag [e.g. MD5],
# path)
self._list = list()
def __getstate__(self):
"""
Eliminate attributes that should not be pickled.
"""
self_dict = self.__dict__.copy()
del self_dict["_pool"]
del self_dict["_list"]
return self_dict
@staticmethod
def io_dd(indir, offset, size, outdir):
"""
Given a path to a target file, extract size bytes from specified offset
to given output file.
"""
if not size:
return
with open(indir, "rb") as ifp:
with open(outdir, "wb") as ofp:
ifp.seek(offset, 0)
ofp.write(ifp.read(size))
@staticmethod
def magic(indata, mime=False):
"""
Performs file magic while maintaining compatibility with different
libraries.
"""
try:
if mime:
mymagic = magic.open(magic.MAGIC_MIME_TYPE)
else:
mymagic = magic.open(magic.MAGIC_NONE)
mymagic.load()
except AttributeError:
mymagic = magic.Magic(mime)
mymagic.file = mymagic.from_file
return mymagic.file(indata)
@staticmethod
def io_md5(target):
"""
Performs MD5 with a block size of 64kb.
"""
blocksize = 65536
hasher = hashlib.md5()
with open(target, 'rb') as ifp:
buf = ifp.read(blocksize)
while buf:
hasher.update(buf)
buf = ifp.read(blocksize)
return hasher.hexdigest()
@staticmethod
def io_rm(target):
"""
Attempts to recursively delete a directory.
"""
shutil.rmtree(target, ignore_errors=False, onerror=Extractor._io_err)
@staticmethod
def _io_err(function, path, excinfo):
"""
Internal function used by '_rm' to print out errors.
"""
print(("!! %s: Cannot delete %s!\n%s" % (function, path, excinfo)))
@staticmethod
def io_find_rootfs(start, recurse=True):
"""
Attempts to find a Linux root directory.
"""
# Recurse into single directory chains, e.g. jffs2-root/fs_1/.../
path = start
while (len(os.listdir(path)) == 1 and
os.path.isdir(os.path.join(path, os.listdir(path)[0]))):
path = os.path.join(path, os.listdir(path)[0])
# count number of unix-like directories
count = 0
for subdir in os.listdir(path):
if subdir in Extractor.UNIX_DIRS and \
os.path.isdir(os.path.join(path, subdir)):
count += 1
# check for extracted filesystem, otherwise update queue
if count >= Extractor.UNIX_THRESHOLD:
return (True, path)
# in some cases, multiple filesystems may be extracted, so recurse to
# find best one
if recurse:
for subdir in os.listdir(path):
if os.path.isdir(os.path.join(path, subdir)):
res = Extractor.io_find_rootfs(os.path.join(path, subdir),
False)
if res[0]:
return res
return (False, start)
def extract(self):
"""
Perform extraction of firmware updates from input to tarballs in output
directory using a thread pool.
"""
if os.path.isdir(self._input):
for path, _, files in os.walk(self._input):
for item in files:
self._list.append(os.path.join(path, item))
elif os.path.isfile(self._input):
self._list.append(self._input)
else:
print("!! Cannot read file: %s" % (self._input,))
if self.output_dir and not os.path.isdir(self.output_dir):
os.makedirs(self.output_dir)
if self._pool:
self._pool.map(self._extract_item, self._list)
else:
for item in self._list:
self._extract_item(item)
def _extract_item(self, path):
"""
Wrapper function that creates an ExtractionItem and calls the extract()
method.
"""
ExtractionItem(self, path, 0).extract()
class ExtractionItem(object):
"""
Class that encapsulates the state of a single item that is being extracted.
"""
# Maximum recursion breadth and depth
RECURSION_BREADTH = 5
RECURSION_DEPTH = 3
def __init__(self, extractor, path, depth, tag=None):
# Temporary directory
self.temp = None
# Recursion depth counter
self.depth = depth
# Reference to parent extractor object
self.extractor = extractor
# File path
self.item = path
# Database connection
if self.extractor.database:
import psycopg2
self.database = psycopg2.connect(database="firmware",
user="firmadyne",
password="firmadyne",
host=self.extractor.database)
else:
self.database = None
# Checksum
self.checksum = Extractor.io_md5(path)
# Tag
self.tag = tag if tag else self.generate_tag()
# Output file path and filename prefix
self.output = os.path.join(self.extractor.output_dir, self.tag) if \
self.extractor.output_dir else None
# Status, with terminate indicating early termination for this item
self.terminate = False
self.status = None
self.update_status()
def __del__(self):
if self.database:
self.database.close()
if self.temp:
self.printf(">> Cleaning up %s..." % self.temp)
Extractor.io_rm(self.temp)
def printf(self, fmt):
"""
Prints output string with appropriate depth indentation.
"""
print(("\t" * self.depth + fmt))
def generate_tag(self):
"""
Generate the filename tag.
"""
if not self.database:
return os.path.basename(self.item) + "_" + self.checksum
try:
image_id = None
cur = self.database.cursor()
if self.extractor.brand:
brand = self.extractor.brand
else:
brand = os.path.relpath(self.item).split(os.path.sep)[0]
cur.execute("SELECT id FROM brand WHERE name=%s", (brand, ))
brand_id = cur.fetchone()
if not brand_id:
cur.execute("INSERT INTO brand (name) VALUES (%s) RETURNING id",
(brand, ))
brand_id = cur.fetchone()
if brand_id:
cur.execute("SELECT id FROM image WHERE hash=%s",
(self.checksum, ))
image_id = cur.fetchone()
if not image_id:
cur.execute("INSERT INTO image (filename, brand_id, hash) \
VALUES (%s, %s, %s) RETURNING id",
(os.path.basename(self.item), brand_id[0],
self.checksum))
image_id = cur.fetchone()
self.database.commit()
except BaseException:
traceback.print_exc()
self.database.rollback()
finally:
if cur:
cur.close()
if image_id:
self.printf(">> Database Image ID: %s" % image_id[0])
return str(image_id[0]) if \
image_id else os.path.basename(self.item) + "_" + self.checksum
def get_kernel_status(self):
"""
Get the flag corresponding to the kernel status.
"""
return self.status[0]
def get_rootfs_status(self):
"""
Get the flag corresponding to the root filesystem status.
"""
return self.status[1]
def update_status(self):
"""
Updates the status flags using the tag to determine completion status.
"""
kernel_done = os.path.isfile(self.get_kernel_path()) if \
self.extractor.do_kernel and self.output else \
not self.extractor.do_kernel
rootfs_done = os.path.isfile(self.get_rootfs_path()) if \
self.extractor.do_rootfs and self.output else \
not self.extractor.do_rootfs
self.status = (kernel_done, rootfs_done)
if self.database and kernel_done and self.extractor.do_kernel:
self.update_database("kernel_extracted", "True")
if self.database and rootfs_done and self.extractor.do_rootfs:
self.update_database("rootfs_extracted", "True")
return self.get_status()
def update_database(self, field, value):
"""
Update a given field in the database.
"""
ret = True
if self.database:
try:
cur = self.database.cursor()
cur.execute("UPDATE image SET " + field + "='" + value +
"' WHERE id=%s", (self.tag, ))
self.database.commit()
except BaseException:
ret = False
traceback.print_exc()
self.database.rollback()
finally:
if cur:
cur.close()
return ret
def get_status(self):
"""
Returns True if early terminate signaled, extraction is complete,
otherwise False.
"""
return True if self.terminate or all(i for i in self.status) else False
def get_kernel_path(self):
"""
Return the full path (including filename) to the output kernel file.
"""
return self.output + ".kernel" if self.output else None
def get_rootfs_path(self):
"""
Return the full path (including filename) to the output root filesystem
file.
"""
return self.output + ".tar.gz" if self.output else None
def extract(self):
"""
Perform the actual extraction of firmware updates, recursively. Returns
True if extraction complete, otherwise False.
"""
self.printf("\n" + self.item.encode("utf-8", "replace").decode("utf-8"))
# check if item is complete
if self.get_status():
self.printf(">> Skipping: completed!")
return True
# check if exceeding recursion depth
if self.depth > ExtractionItem.RECURSION_DEPTH:
self.printf(">> Skipping: recursion depth %d" % self.depth)
return self.get_status()
# check if checksum is in visited set
self.printf(">> MD5: %s" % self.checksum)
with Extractor.visited_lock:
if self.checksum in self.extractor.visited:
self.printf(">> Skipping: %s..." % self.checksum)
return self.get_status()
else:
self.extractor.visited.add(self.checksum)
# check if filetype is blacklisted
if self._check_blacklist():
return self.get_status()
# create working directory
self.temp = tempfile.mkdtemp()
try:
self.printf(">> Tag: %s" % self.tag)
self.printf(">> Temp: %s" % self.temp)
self.printf(">> Status: Kernel: %s, Rootfs: %s, Do_Kernel: %s, \
Do_Rootfs: %s" % (self.get_kernel_status(),
self.get_rootfs_status(),
self.extractor.do_kernel,
self.extractor.do_rootfs))
for analysis in [self._check_archive, self._check_encryption, self._check_firmware,
self._check_kernel, self._check_rootfs,
self._check_compressed]:
# Move to temporary directory so binwalk does not write to input
os.chdir(self.temp)
# Update status only if analysis changed state
if analysis():
if self.update_status():
self.printf(">> Skipping: completed!")
return True
except Exception:
traceback.print_exc()
return False
def _check_blacklist(self):
"""
Check if this file is blacklisted for analysis based on file type.
"""
# First, use MIME-type to exclude large categories of files
filetype = Extractor.magic(self.item.encode("utf-8", "surrogateescape"),
mime=True)
if any(s in filetype for s in ["application/x-executable",
"application/x-dosexec",
"application/x-object",
"application/pdf",
"application/msword",
"image/", "text/", "video/"]):
self.printf(">> Skipping: %s..." % filetype)
return True
# Next, check for specific file types that have MIME-type
# 'application/octet-stream'
filetype = Extractor.magic(self.item.encode("utf-8", "surrogateescape"))
if any(s in filetype for s in ["executable", "universal binary",
"relocatable", "bytecode", "applet"]):
self.printf(">> Skipping: %s..." % filetype)
return True
# Finally, check for specific file extensions that would be incorrectly
# identified
if self.item.endswith(".dmg"):
self.printf(">> Skipping: %s..." % (self.item))
return True
return False
def _check_archive(self):
"""
If this file is an archive, recurse over its contents, unless it matches
an extracted root filesystem.
"""
return self._check_recursive("archive")
def _check_encryption(self):
header = b""
with open(self.item, "rb") as f:
header = f.read(4)
if header == b"SHRS":
print(">>>> Found D-Link encrypted firmware in %s!" % (self.item))
# Source: https://github.com/0xricksanchez/dlink-decrypt
command = 'dd if=%s skip=1756 iflag=skip_bytes status=none | openssl aes-128-cbc -d -nopad -nosalt -K "c05fbf1936c99429ce2a0781f08d6ad8" -iv "67c6697351ff4aec29cdbaabf2fbe346" --nosalt -in /dev/stdin -out %s > /dev/null 2>&1' % (self.item, os.path.join(self.temp, "dlink_decrypt"))
os.system(command)
return True
return False
def _check_firmware(self):
"""
If this file is of a known firmware type, directly attempt to extract
the kernel and root filesystem.
"""
for module in binwalk.scan(self.item, "-y", "header", "--run-as=root", "--preserve-symlinks",
signature=True, quiet=True):
for entry in module.results:
# uImage
if "uImage header" in entry.description:
if not self.get_kernel_status() and \
"OS Kernel Image" in entry.description:
kernel_offset = entry.offset + 64
kernel_size = 0
for stmt in entry.description.split(','):
if "image size:" in stmt:
kernel_size = int(''.join(
i for i in stmt if i.isdigit()), 10)
if kernel_size != 0 and kernel_offset + kernel_size \
<= os.path.getsize(self.item):
self.printf(">>>> %s" % entry.description)
tmp_fd, tmp_path = tempfile.mkstemp(dir=self.temp)
os.close(tmp_fd)
Extractor.io_dd(self.item, kernel_offset,
kernel_size, tmp_path)
kernel = ExtractionItem(self.extractor, tmp_path,
self.depth, self.tag)
return kernel.extract()
# elif "RAMDisk Image" in entry.description:
# self.printf(">>>> %s" % entry.description)
# self.printf(">>>> Skipping: RAMDisk / initrd")
# self.terminate = True
# return True
# TP-Link or TRX
elif not self.get_kernel_status() and \
not self.get_rootfs_status() and \
"rootfs offset: " in entry.description and \
"kernel offset: " in entry.description:
kernel_offset = 0
kernel_size = 0
rootfs_offset = 0
rootfs_size = 0
for stmt in entry.description.split(','):
if "kernel offset:" in stmt:
kernel_offset = int(stmt.split(':')[1], 16)
elif "kernel length:" in stmt:
kernel_size = int(stmt.split(':')[1], 16)
elif "rootfs offset:" in stmt:
rootfs_offset = int(stmt.split(':')[1], 16)
elif "rootfs length:" in stmt:
rootfs_size = int(stmt.split(':')[1], 16)
# compute sizes if only offsets provided
if kernel_offset != rootfs_size and kernel_size == 0 and \
rootfs_size == 0:
kernel_size = rootfs_offset - kernel_offset
rootfs_size = os.path.getsize(self.item) - rootfs_offset
# ensure that computed values are sensible
if (kernel_size > 0 and kernel_offset + kernel_size \
<= os.path.getsize(self.item)) and \
(rootfs_size != 0 and rootfs_offset + rootfs_size \
<= os.path.getsize(self.item)):
self.printf(">>>> %s" % entry.description)
tmp_fd, tmp_path = tempfile.mkstemp(dir=self.temp)
os.close(tmp_fd)
Extractor.io_dd(self.item, kernel_offset, kernel_size,
tmp_path)
kernel = ExtractionItem(self.extractor, tmp_path,
self.depth, self.tag)
kernel.extract()
tmp_fd, tmp_path = tempfile.mkstemp(dir=self.temp)
os.close(tmp_fd)
Extractor.io_dd(self.item, rootfs_offset, rootfs_size,
tmp_path)
rootfs = ExtractionItem(self.extractor, tmp_path,
self.depth, self.tag)
rootfs.extract()
return self.update_status()
return False
def _check_kernel(self):
"""
If this file contains a kernel version string, assume it is a kernel.
Only Linux kernels are currently extracted.
"""
if not self.get_kernel_status():
for module in binwalk.scan(self.item, "-y", "kernel", "--run-as=root", "--preserve-symlinks",
signature=True, quiet=True):
for entry in module.results:
if "kernel version" in entry.description:
self.update_database("kernel_version",
entry.description)
if "Linux" in entry.description:
if self.get_kernel_path():
shutil.copy(self.item, self.get_kernel_path())
else:
self.extractor.do_kernel = False
self.printf(">>>> %s" % entry.description)
return True
# VxWorks, etc
else:
self.printf(">>>> Ignoring: %s" % entry.description)
return False
return False
return False
def _check_rootfs(self):
"""
If this file contains a known filesystem type, extract it.
"""
if not self.get_rootfs_status():
# work-around issue with binwalk signature definitions for ubi
for module in binwalk.scan(self.item, "-e", "-r", "-y",
"filesystem", "-y", "ubi", "--run-as=root", "--preserve-symlinks",
signature=True, quiet=True):
for entry in module.results:
self.printf(">>>> %s" % entry.description)
break
if module.extractor.directory:
unix = Extractor.io_find_rootfs(module.extractor.directory)
if not unix[0]:
return False
self.printf(">>>> Found Linux filesystem in %s!" % unix[1])
if self.output:
shutil.make_archive(self.output, "gztar",
root_dir=unix[1])
else:
self.extractor.do_rootfs = False
return True
return False
def _check_compressed(self):
"""
If this file appears to be compressed, decompress it and recurse over
its contents.
"""
return self._check_recursive("compressed")
# treat both archived and compressed files using the same pathway. this is
# because certain files may appear as e.g. "xz compressed data" but still
# extract into a root filesystem.
def _check_recursive(self, fmt):
"""
Unified implementation for checking both "archive" and "compressed"
items.
"""
desc = None
# perform extraction
for module in binwalk.scan(self.item, "-e", "-r", "-y", fmt, "--run-as=root", "--preserve-symlinks",
signature=True, quiet=True):
for entry in module.results:
# skip cpio/initrd files since they should be included with
# kernel
# if "cpio archive" in entry.description:
# self.printf(">> Skipping: cpio: %s" % entry.description)
# self.terminate = True
# return True
desc = entry.description
self.printf(">>>> %s" % entry.description)
break
if module.extractor.directory:
unix = Extractor.io_find_rootfs(module.extractor.directory)
# check for extracted filesystem, otherwise update queue
if unix[0]:
self.printf(">>>> Found Linux filesystem in %s!" % unix[1])
if self.output:
shutil.make_archive(self.output, "gztar",
root_dir=unix[1])
else:
self.extractor.do_rootfs = False
return True
else:
count = 0
self.printf(">> Recursing into %s ..." % fmt)
for root, _, files in os.walk(module.extractor.directory):
# sort both descending alphabetical and increasing
# length
files.sort()
files.sort(key=len)
# handle case where original file name is restored; put
# it to front of queue
if desc and "original file name:" in desc:
orig = None
for stmt in desc.split(","):
if "original file name:" in stmt:
orig = stmt.split("\"")[1]
if orig and orig in files:
files.remove(orig)
files.insert(0, orig)
for filename in files:
if count > ExtractionItem.RECURSION_BREADTH:
self.printf(">> Skipping: recursion breadth %d"\
% ExtractionItem.RECURSION_BREADTH)
self.terminate = True
return True
else:
new_item = ExtractionItem(self.extractor,
os.path.join(root,
filename),
self.depth + 1,
self.tag)
if new_item.extract():
# check that we are actually done before
# performing early termination. for example,
# we might decide to skip on one subitem,
# but we still haven't finished
if self.update_status():
return True
count += 1
return False
def main():
parser = argparse.ArgumentParser(description="Extracts filesystem and \
kernel from Linux-based firmware images")
parser.add_argument("input", action="store", help="Input file or directory")
parser.add_argument("output", action="store", nargs="?", default="images",
help="Output directory for extracted firmware")
parser.add_argument("-sql ", dest="sql", action="store", default=None,
help="Hostname of SQL server")
parser.add_argument("-nf", dest="rootfs", action="store_false",
default=True, help="Disable extraction of root \
filesystem (may decrease extraction time)")
parser.add_argument("-nk", dest="kernel", action="store_false",
default=True, help="Disable extraction of kernel \
(may decrease extraction time)")
parser.add_argument("-np", dest="parallel", action="store_false",
default=True, help="Disable parallel operation \
(may increase extraction time)")
parser.add_argument("-b", dest="brand", action="store", default=None,
help="Brand of the firmware image")
result = parser.parse_args()
extract = Extractor(result.input, result.output, result.rootfs,
result.kernel, result.parallel, result.sql,
result.brand)
extract.extract()
if __name__ == "__main__":
main()
|
import turtle
import random
riley = turtle.Turtle()
riley.width(5)
mood = random.choice(["happy", "sad", "angry", "party"])
if mood == "happy":
riley.color("yellow")
elif mood == "sad":
riley.color("blue")
elif mood == "angry":
riley.color("red")
elif mood == "party":
riley.color("magenta")
else:
riley.color("gray")
for side in range(5):
riley.forward(100)
riley.right(144)
input()
|
from __future__ import print_function, division
import numpy as np
from numpy.random import RandomState
def estimate_mean_std(vals, esttype):
if esttype == 'robust':
mean = np.median(vals)
std = 1.4826 * np.median(np.abs(vals - mean))
elif esttype == 'mle':
mean = np.mean(vals)
std = np.std(vals - mean)
return mean, std
class CryoDataset:
def __init__(self, imgstack, ctfstack):
self.imgstack = imgstack
self.ctfstack = ctfstack
assert self.imgstack.get_num_images() == self.ctfstack.get_num_images()
self.N = self.imgstack.get_num_pixels()
self.pixel_size = self.imgstack.get_pixel_size()
def compute_noise_statistics(self):
# self.mleDC_est = self.estimate_dc()
self.noise_var = self.imgstack.estimate_noise_variance()
self.data_var = self.imgstack.compute_variance()
self.noise_var = 1.0
print('Dataset noise profile')
print(' Noise: {0:.3g}'.format(np.sqrt(self.noise_var)))
print(' Data: {0:.3g}'.format(np.sqrt(self.data_var)))
# assert self.data_var > self.noise_var
self.signal_var = self.data_var - self.noise_var
print(' Signal: {0:.3g}'.format(np.sqrt(self.signal_var)))
print(' Signal-to-Noise Ratio: {0:.1f}% ({1:.1f}dB)'.format(100 * self.signal_var / self.noise_var, 10 * np.log10(self.signal_var / self.noise_var)))
def normalize_dataset(self):
self.imgstack.scale_images(1.0 / np.sqrt(self.noise_var))
self.ctfstack.scale_ctfs(1.0 / np.sqrt(self.noise_var))
self.data_var = self.data_var / self.noise_var
self.signal_var = self.signal_var / self.noise_var
self.noise_var = 1.0
def divide_dataset(self, minibatch_size, testset_size, partition, num_partitions, seed):
self.rand = RandomState(seed)
self.N_D = self.imgstack.get_num_images()
self.idxs = self.rand.permutation(self.N_D)
print('Dividing dataset of {0} images with minisize of {1}'.format(self.N_D, minibatch_size))
if testset_size != None:
print(' Test Images: {0}'.format(testset_size))
self.test_idxs = self.idxs[0:testset_size]
self.train_idxs = self.idxs[testset_size:]
else:
self.train_idxs = self.idxs
self.test_idxs = []
if num_partitions > 1:
print(' Partition: {0} of {1}'.format(partition + 1, num_partitions))
N_D = len(self.train_idxs)
partSz = N_D / num_partitions
self.train_idxs = self.train_idxs[partition *
partSz:(partition + 1) * partSz]
self.N_D_Test = len(self.test_idxs)
self.N_D_Train = len(self.train_idxs)
numBatches = int(np.floor(float(self.N_D_Train) / minibatch_size))
real_minisize = int(np.floor(float(self.N_D_Train) / numBatches))
N_Rem = self.N_D_Train - real_minisize * numBatches
numRegBatches = numBatches - N_Rem
batchInds = [(real_minisize * i, real_minisize * (i + 1))
for i in range(numRegBatches)] + \
[(real_minisize * numRegBatches + (real_minisize + 1) * i,
min(real_minisize * numRegBatches + (real_minisize + 1) * (i + 1), self.N_D_Train))
for i in range(N_Rem)]
self.batch_idxs = np.array(batchInds)
self.N_batches = self.batch_idxs.shape[0]
self.batch_order = self.rand.permutation(self.N_batches)
batch_sizes = self.batch_idxs[:, 1] - self.batch_idxs[:, 0]
print(' Train Images: {0}'.format(self.N_D_Train))
print(' Minibatches: {0}'.format(self.N_batches))
print(' Batch Size Range: {0} - {1}'.format(batch_sizes.min(), batch_sizes.max()))
self.minibatch_size = minibatch_size
self.testset_size = testset_size
self.partition = partition
self.num_partitions = num_partitions
self.reset_minibatches(True)
def get_dc_estimate(self):
return self.mleDC_est
def estimate_dc(self, esttype='robust'):
N = self.N
obs = []
ctf_dcs = {}
zeros = np.zeros((1, 2))
for img_i, img in enumerate(self.imgstack):
ctf_i = self.ctfstack.get_ctf_idx_for_image(img_i)
if ctf_i not in ctf_dcs:
ctf_dcs[ctf_i] = self.ctfstack.get_ctf(ctf_i).compute(zeros)
obs.append(np.mean(img) * np.sqrt(float(N)) / ctf_dcs[ctf_i])
obs = np.array(obs)
mleDC, mleDC_std = estimate_mean_std(obs, esttype)
mleDC_est_std = mleDC_std / np.sqrt(len(obs))
return mleDC, mleDC_std, mleDC_est_std
def set_datasign(self, datasign):
mleDC, _, mleDC_est_std = self.get_dc_estimate()
datasign_est = 1 if mleDC > 2 * mleDC_est_std else - \
1 if mleDC < -2 * mleDC_est_std else 0
print('Estimated DC Component: {0:.3g} +/- {1:.3g}'.format(mleDC, mleDC_est_std))
if datasign == 'auto':
if datasign_est == 0:
print(' WARNING: estimated DC component has large variance, detected sign could be wrong.')
datasign = np.sign(mleDC)
else:
datasign = datasign_est
else:
if datasign_est * datasign < 0:
print(' WARNING: estimated DC component and specified datasign disagree; be sure this is correct!')
if datasign != 1:
print(' Using negative datasign')
assert datasign == -1
self.ctfstack.flip_datasign()
else:
print(' Using positive datasign')
assert datasign == 1
def reset_minibatches(self, epochReset=True):
self.curr_batch = None
self.epoch_frac = 0
if epochReset:
self.epoch = 0
self.data_visits = 0
def get_testbatch(self):
miniidx = self.test_idxs
ret = {'img_idxs': miniidx,
'ctf_idxs': self.ctfstack.get_ctf_idx_for_image(miniidx),
'N_M': len(miniidx), 'test_batch': True}
return ret
def get_next_minibatch(self, shuffle_minibatches):
if self.curr_batch == None:
self.curr_batch = 1
batchInd = 0
newepoch = False
else:
batchInd = self.curr_batch
self.curr_batch = (self.curr_batch + 1) % self.N_batches
newepoch = batchInd == 0
if newepoch:
if shuffle_minibatches:
self.batch_order = self.rand.permutation(self.N_batches)
self.epoch = self.epoch + 1
self.epoch_frac = 0
batch_id = self.batch_order[batchInd]
startI = self.batch_idxs[batch_id, 0]
endI = self.batch_idxs[batch_id, 1]
miniidx = self.train_idxs[startI:endI]
self.data_visits += endI - startI
self.epoch_frac += float(endI - startI) / self.N_D_Train
ret = {'img_idxs': miniidx,
'ctf_idxs': self.ctfstack.get_ctf_idx_for_image(miniidx),
'N_M': len(miniidx), 'id': batch_id, 'epoch': self.epoch + self.epoch_frac,
'num_batches': self.N_batches, 'newepoch': newepoch, 'test_batch': False}
return ret
def get_epoch(self, frac=False):
if self.epoch == None: # Data not yet loaded
return 0
if frac:
return self.epoch + self.epoch_frac
else:
return self.epoch
|
class Solution:
def divisorGame(self, N: int) -> bool:
if N==2:
return True
elif N==1:
return False
dp =[]
dp.append(True)
dp.append(True)
dp.append(True)
print(dp)
for i in range(3,N+1):
if i%2!=0:
dp.append(not(dp[i-1]))
else:
dp.append((dp[i-1]^dp[i-2]))
print(dp)
return dp[N]
|
# -*- coding:utf-8 -*-
'''
#Thanks to Osama Arafa's Camera_SwitchMenu, Help me learned to call ops in ops
#Thanks xVan for helping me with the camera'view undo option
#Thanks Bookyakuno for helping me add keymap (https://blenderartists.org/t/keymap-for-addons/685544/19?u=atticus_lv)
'''
bl_info = {
"name": "Smart Scene Manager",
"author": "Atticus",
"version": (0, 1, 7,1),
"blender": (2, 83, 2),
"location": "3D View > Object mode > Shortcut 'F' / Side Menu > Edit",
"description": "An elegant way to set up your scene",
"doc_url": "https://atticus-lv.github.io/SSM_document/",
"category": "Object",
}
from .Ui_Menu import *
from .Ops_Cam import *
from .Ops_Move_obj import *
from .Ops_Extra import *
from .Ops_ModifyCam import *
from .Ops_Light import *
from .Ops_ListManager import *
from .Ops_Materials import *
from .Ui_Translations import *
from .Props import *
import bpy
import rna_keymap_ui
from bpy.props import *
from .Ops_Extra import CN_ON
panels = (
SSM_PT_SideMenu, SSM_PT_SideObjInfo,
)
def update_categort(self, context):
message = "Smart Scene Manager : Updating Panel locations has failed"
try:
for panel in panels:
if "bl_rna" in panel.__dict__:
bpy.utils.unregister_class(panel)
for panel in panels:
panel.bl_category = context.preferences.addons[__name__].preferences.category
bpy.utils.register_class(panel)
except Exception as e:
print("\n[{}]\n{}\n\nError:\n{}".format(__name__, message, e))
pass
#
# ___ ____ ____ ____
# |__] |__/ |___ |___
# | | \ |___ |
#
#
class AddonPreferences(bpy.types.AddonPreferences):
bl_idname = __package__
#change draw
settings:EnumProperty(
name="enumprop_pref",
items=[('MENU', 'Menu', ''), ('PROPERTIES', 'Properties', ''), ('KEYMAP', 'Keymap', '')],
default='MENU'
)
category: StringProperty(name="Tab Category",
description="Choose a name for the category of the panel",
default="Edit",
update=update_categort
)
# list manager
list_type:EnumProperty(
items=[('IMAGE', 'Image', 'Use image manager',"TEXTURE",0), ('MAT', 'Materials', 'Use material manager',"MATERIAL_DATA",1)],
default='MAT'
)
filter_tpye: EnumProperty(
items=[('NONE', 'None', ''), ('FAKE', 'Fake User', ''), ('NOUSER', 'No User', '')],
default='NONE'
)
LIST_MG: BoolProperty(name="List Manager", default=False)
Picker_mode: BoolProperty(name="Picker Mode", default=False)
# menu setting
PIE_MENU:BoolProperty(name="Enable Pie Menu", default=True)
SIDE_MENU:BoolProperty(name="Enable Side Menu", default=True)
SM_sub:BoolProperty(name="Object Data",default=True)
# Side Panel
openCam: BoolProperty(default=False)
openMove: BoolProperty(default=False)
#light group
LG: BoolProperty(name="LightGroup", default=False)
#export setting
tempdir:StringProperty(name="Temppath",description="Temp Path")
usecustom:BoolProperty(name="Use Custom",default = False)
usedate: BoolProperty(name="Date", default=True)
usetime: BoolProperty(name="Time", default=True)
usenum: BoolProperty(name="Time", default=True)
useactiveN: BoolProperty(name="ActiveName", default=True)
#popwindow setting
usePop: BoolProperty(name="Use Popup Window",default=False)
RX: IntProperty(name="Resolution X",default= 1200)
RY: IntProperty(name="Resolution Y",default= 800)
#cam setting
camlens:IntProperty(name="Focal Length",default= 50)
useCamName:BoolProperty(name="Use Name",default=True)
#transPSR
Tp:BoolProperty(name="Location",default=True)
Ts:BoolProperty(name="Scale",default=False)
Tr:BoolProperty(name="Rotation",default=False)
#D2F
OM:BoolProperty(name="Only Mesh",default=False)
def draw(self, context):
CNON = CN_ON(context)
layout = self.layout
col = layout.column()
row = col.row(align=True)
row.prop(self, 'settings', expand=True)
col.separator()
def drawmenu(col):
pref = context.preferences.addons[__package__].preferences
col = col.box()
row = col.row(align=True)
id = 0 if CNON else 1
T_list = [
["使用侧边栏菜单","Use Side Menu"],
['中文手册','Update'],
["https://atticus-lv.gitee.io/ssm_document/","https://atticus-lv.github.io/SSM_document/changelog/"],
["物体信息面板","Object Info "],
["选项卡位置","Tab Category"],
["使用饼菜单","Use Pie"],
["材质拾取(使用弹窗","Mat Picker(Use pop window)"],
]
row.prop(self, "SIDE_MENU", text='', icon='PRESET')
row.label(text=T_list[0][id])
sub = row.row()
sub.scale_x = 0.5
sub.operator('wm.url_open',text=T_list[1][id],icon = 'URL').url = T_list[2][id]
if pref.SIDE_MENU:
box = col.box()
row = box.row(align=True)
row.label(text=T_list[3][id], icon='OBJECT_DATA')
row.prop(self, "SM_sub", text="")
row = box.row(align=True)
row.prop(self, "category", text=T_list[4][id])
row = col.row(align=True)
row.prop(self, "PIE_MENU",text='', icon='ANTIALIASED')
row.label(text=T_list[5][id])
row = col.row(align=True)
row.prop(self, 'usePop', text="", icon='TOPBAR')
row.label(text=T_list[6][id])
if pref.usePop :
row = col.row(align=True)
row.prop(self, "RX")
row.prop(self, "RY")
row = col.row(align=True)
row.prop(self, 'LG', text='', icon="GROUP")
row.label(text="Light Group (Experimental)")
def drawProperties(col):
id = 0 if CNON else 1
T_list = [
["添加相机","Add Cam"],
["使用名字","Use Name"],
["位置转移","TransPSR"],
]
col.label(text=T_list[0][id], icon='VIEW_CAMERA')
row = col.row(align=True)
row.separator();row.separator();row.separator()
row.prop(self, "camlens")
row.label(text="mm")
row.prop(self, T_list[1][id],text = "使用名字")
#trans psr
col.label(text=T_list[2][id], icon='CON_LOCLIKE')
row = col.row(align=True)
row.separator()
row.separator()
row.separator()
row.prop(self, "Tp",icon ="ORIENTATION_GLOBAL")
row.prop(self, "Ts",icon = "ORIENTATION_LOCAL")
row.prop(self, "Tr",icon = "ORIENTATION_GIMBAL")
row.separator()
row.separator()
row.separator()
#export
col.label(text="Export", icon='FOLDER_REDIRECT')
row = col.row(align=True)
row.prop(self,"tempdir", text="Output Path")
row.operator('buttons.directory_browse',icon ='FILEBROWSER',text = '')
row.prop(self, "usecustom", text="",icon ='CHECKMARK')
row = col.row(align=False)
row.label(text="Name",)
row.prop(self, "useactiveN", text="Active Object")
row.prop(self, "usenum", text="Count")
row.prop(self, "usedate", text="Date")
row.prop(self, "usetime", text="Time")
def drawKeymap(col):
col = col.box()
col = col.column()
# col.label(text="Keymap", icon="KEYINGSET")
wm = bpy.context.window_manager
kc = wm.keyconfigs.user
old_km_name = ""
get_kmi_l = []
for km_add, kmi_add in addon_keymaps:
for km_con in kc.keymaps:
if km_add.name == km_con.name:
km = km_con
break
for kmi_con in km.keymap_items:
if kmi_add.idname == kmi_con.idname:
if kmi_add.name == kmi_con.name:
get_kmi_l.append((km, kmi_con))
get_kmi_l = sorted(set(get_kmi_l), key=get_kmi_l.index)
for km, kmi in get_kmi_l:
if not km.name == old_km_name:
col.label(text=str(km.name), icon="DOT")
pass
col.context_pointer_set("keymap", km)
rna_keymap_ui.draw_kmi([], kc, km, kmi, col, 0)
old_km_name = km.name
#Excute
if context.preferences.addons[__package__].preferences.settings == 'MENU':
drawmenu(col)
if context.preferences.addons[__package__].preferences.settings == 'PROPERTIES':
col = col.box()
drawProperties(col)
if context.preferences.addons[__package__].preferences.settings == 'KEYMAP':
drawKeymap(col)
#
# ____ _ ____ ____ ____
# | | |__| [__ [__
# |___ |___ | | ___] ___]
#
#
classes = (
# Preferences
AddonPreferences,
# Props
SSM_CameraProps, SSM_LightProps,
# Menu
SSM_MT_PieMenu, SSM_MT_Select, SSM_MT_AlignMenu, SSM_PT_SideMenu, SSM_PT_SideObjInfo,
# Ops_Cam
ActiveCam, FlipCam, AddViewCam, focusPicker,
# Ops_ModifyCam
CamSetEV, SSM_PT_CameraSwitcher, CamList, SceneSetEV,
# Ops_Move_obj
OBJECT_OT_Drop2floor, OBJECT_OT_TransPSR, OBJECT_OT_LookAT,
PANEL_PT_TransPanel, PANEL_PT_D2fPanel,
# Ops_Extra
ExportObj, Translater, LightCheck,
# light group
ToggleLightGroup, SSM_OT_CreatLightGroup, SSM_OT_RenameLightGroup, SSM_OT_SoloGroup, SSM_OT_ResetGroupSolo,
RemoveLightGroup,SSM_OT_SoloSingle,ActiveLight,
# Image Manager
IMAGE_OT_Remove, SSM_UL_ImageList, SSM_OT_Pack, SSM_OT_RemoveUnused,PopImageEditor,
#mat
SSM_OT_Remove_By_Filter, SSM_OT_Material_Picker, SSM_PickerMatProps, SSM_UL_MatList,SSM_OT_Remove_Single_Mat,
PICKER_OT_AddMat, PICKER_OT_RemoveMat, PICKER_OT_clearList,PICKER_UL_MatList,
PopShaderEditor,
)
#
# ____ ____ ____ _ ____ ___ ____ ____
# |__/ |___ | __ | [__ | |___ |__/
# | \ |___ |__] | ___] | |___ | \
#
#
addon_keymaps = []
def addKeymap():
wm = bpy.context.window_manager
if wm.keyconfigs.addon:
km = wm.keyconfigs.addon.keymaps.new(name='3D View', space_type='VIEW_3D')
kmi = km.keymap_items.new('wm.call_menu_pie', 'F', 'PRESS')
kmi.properties.name = "SSM_MT_PieMenu"
addon_keymaps.append((km, kmi))
km = wm.keyconfigs.addon.keymaps.new(name='Mesh')
kmi = km.keymap_items.new('wm.call_menu_pie', 'A', 'PRESS')
kmi.properties.name = "SSM_MT_Select"
addon_keymaps.append((km, kmi))
def removeKeymap():
wm = bpy.context.window_manager
kc = wm.keyconfigs.addon
if kc:
for km, kmi in addon_keymaps:
km.keymap_items.remove(kmi)
addon_keymaps.clear()
def register():
for cls in classes:
bpy.utils.register_class(cls)
PropsADD()
#add keymap
addKeymap()
# add menu
bpy.types.VIEW3D_MT_camera_add.append(add_cam_menu)
# translate
bpy.app.translations.register(__name__, Ui_Translations.translations_dict)
def unregister():
# translate
bpy.app.translations.unregister(__name__)
# remove menu
bpy.types.VIEW3D_MT_camera_add.remove(add_cam_menu)
for cls in classes:
bpy.utils.unregister_class(cls)
removeKeymap()
Proposremove()
if __name__ == "__main__":
register()
|
# Copyright 2014 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from oslo.config import cfg
from neutron import manager
from neutron.openstack.common import log as logging
from neutron.plugins.cisco.l3 import hosting_device_drivers
LOG = logging.getLogger(__name__)
# Length mgmt port UUID to be part of VM's config drive filename
CFG_DRIVE_UUID_START = 24
CFG_DRIVE_UUID_LEN = 12
CSR1KV_HD_DRIVER_OPTS = [
cfg.StrOpt('csr1kv_configdrive_template', default='csr1kv_cfg_template',
help=_("CSR1kv configdrive template file.")),
]
cfg.CONF.register_opts(CSR1KV_HD_DRIVER_OPTS, "hosting_devices")
class CSR1kvHostingDeviceDriver(hosting_device_drivers.HostingDeviceDriver):
def hosting_device_name(self):
return "CSR1kv"
def create_config(self, context, mgmtport):
mgmt_ip = mgmtport['fixed_ips'][0]['ip_address']
subnet_data = self._core_plugin.get_subnet(
context, mgmtport['fixed_ips'][0]['subnet_id'],
['cidr', 'gateway_ip', 'dns_nameservers'])
netmask = str(netaddr.IPNetwork(subnet_data['cidr']).netmask)
params = {'<ip>': mgmt_ip, '<mask>': netmask,
'<gw>': subnet_data['gateway_ip'],
'<name_server>': '8.8.8.8'}
try:
cfg_template_filename = (
cfg.CONF.general.templates_path + "/" +
cfg.CONF.hosting_devices.csr1kv_configdrive_template)
vm_cfg_data = ''
with open(cfg_template_filename, 'r') as cfg_template_file:
# insert proper instance values in the template
for line in cfg_template_file:
tokens = line.strip('\n').split(' ')
line = ' '.join(map(lambda x: params.get(x, x),
tokens)) + '\n'
vm_cfg_data += line
return {'iosxe_config.txt': vm_cfg_data}
except IOError as e:
LOG.error(_('Failed to create config file: %s. Trying to'
'clean up.'), str(e))
self.delete_configdrive_files(context, mgmtport)
raise
@property
def _core_plugin(self):
return manager.NeutronManager.get_plugin()
|
# coding=utf8
from setuptools import setup
setup(
name="plugml",
version="0.2.4",
description="easy-to-use and highly modular machine learning framework",
long_description="easy-to-use and highly modular machine learning framework based on scikit-learn with postgresql data bindings",
url="https://github.com/mkraemer67/plugml",
author="Martin Krämer",
author_email="mkraemer.de@gmail.com",
license="Apache",
packages=["plugml"],
install_requires=[
"nltk>=3.0.0",
"numpy>=1.9.0",
"psycopg2>=2.5.0",
"scikit-learn>=0.15.0",
"scipy>=0.14.0"
],
include_package_data=True,
zip_safe=False
)
|
class User(object):
def __init__(self, id=None, name=None) -> None:
self.id = id
self.name = name
def setId(self, id):
self.id = id
def getId(self):
return self.id
def setName(self, name):
self.id = name
def getName(self):
return self.name
|
# hst.py Demo/test for Horizontal Slider class for Pyboard TFT GUI
# Adapted for (and requires) uasyncio V3
# Released under the MIT License (MIT). See LICENSE.
# Copyright (c) 2016-2020 Peter Hinch
import uasyncio as asyncio
import pyb
from tft.driver.constants import *
from tft.driver.tft_local import setup
from tft.driver.ugui import Screen
from tft.widgets.dial import Dial
from tft.widgets.label import Label
from tft.widgets.buttons import Button, ButtonList
from tft.widgets.horiz_slider import HorizSlider
from tft.widgets.meter import Meter
from tft.widgets.led import LED
from tft.fonts import font14
from tft.fonts import font10
# CALLBACKS
# cb_end occurs when user stops touching the control
def to_string(val):
return '{:3.1f} ohms'.format(val * 10)
class HorizontalSliderScreen(Screen):
def __init__(self):
super().__init__()
# Common args for the labels
labels = { 'width' : 70,
'fontcolor' : WHITE,
'border' : 2,
'fgcolor' : RED,
'bgcolor' : (0, 40, 0),
}
# Common arguments for all three sliders
table = {'fontcolor' : WHITE,
'legends' : ('0', '5', '10'),
'cb_end' : self.slide_release,
}
self.led = LED((420, 0), border = 2)
meter1 = Meter((320, 0), font=font10, legends=('0','5','10'), pointercolor = YELLOW, fgcolor = GREEN)
meter2 = Meter((360, 0), font=font10, legends=('0','5','10'), pointercolor = YELLOW)
btnquit = Button((390, 240), font = font14, callback = self.quit, fgcolor = RED,
text = 'Quit', shape = RECTANGLE, width = 80, height = 30)
# Create labels
x = 230
lstlbl = []
for n in range(3):
lstlbl.append(Label((x, 40 + 60 * n), font = font10, **labels))
# Sliders
x = 10
self.slave1 = HorizSlider((x, 100), font = font10, fgcolor = GREEN, cbe_args = ('Slave1',),
cb_move = self.slave_moved, cbm_args = (lstlbl[1],), **table)
self.slave2 = HorizSlider((x, 160), font = font10, fgcolor = GREEN, cbe_args = ('Slave2',),
cb_move = self.slave_moved, cbm_args = (lstlbl[2],), **table)
master = HorizSlider((x, 40), font = font10, fgcolor = YELLOW, cbe_args = ('Master',),
cb_move = self.master_moved, slidecolor=RED, border = 2,
cbm_args = (lstlbl[0],), value=0.5, **table)
# On/Off toggle: enable/disable
bs = ButtonList(self.cb_en_dis)
self.lst_en_dis = [self.slave1, btnquit]
button = bs.add_button((280, 240), font = font14, fontcolor = BLACK, height = 30, width = 90,
fgcolor = GREEN, shape = RECTANGLE, text = 'Disable', args = (True,))
button = bs.add_button((280, 240), font = font14, fontcolor = BLACK, height = 30, width = 90,
fgcolor = RED, shape = RECTANGLE, text = 'Enable', args = (False,))
# Tasks test meters
self.reg_task(self.test_meter(meter1))
self.reg_task(self.test_meter(meter2))
def slide_release(self, slider, control_name):
print('{} returned {}'.format(control_name, slider.value()))
def master_moved(self, slider, label):
val = slider.value()
self.slave1.value(val)
self.slave2.value(val)
label.value(to_string(val))
self.led.value(val > 0.8)
# Either slave has had its slider moved (by user or by having value altered)
def slave_moved(self, slider, label):
val = slider.value()
if val > 0.8:
slider.color(RED)
else:
slider.color(GREEN)
label.value(to_string(val))
def quit(self, button):
Screen.shutdown()
def cb_en_dis(self, button, disable):
for item in self.lst_en_dis:
item.greyed_out(disable)
# Meters move linearly between random values
async def test_meter(self, meter):
oldvalue = 0
await asyncio.sleep(0)
while True:
val = pyb.rng() / 2**30
steps = 20
delta = (val - oldvalue) / steps
for _ in range(steps):
oldvalue += delta
meter.value(oldvalue)
await asyncio.sleep_ms(100)
def test():
print('Test TFT panel...')
setup()
Screen.change(HorizontalSliderScreen)
test()
|
import config
import features
import colorama
import sys
import os
import json
from datetime import datetime
from mutagen.easyid3 import EasyID3
np = features.get_norm_path
colorama.init()
c_reset = colorama.Style.RESET_ALL
c_red = colorama.Fore.RED
c_green = colorama.Fore.GREEN
c_bright = colorama.Style.BRIGHT
c_dim = colorama.Style.DIM
def select_files():
"""
Select files that need to be edited
:return: list of files that need to be edited; working_dir - directory where these files are placed
"""
files = []
print(f'{c_bright}Enter the absolute or relative path to directory: {c_reset}', end='')
working_dir = np(input())
if not os.path.exists(working_dir):
print(f'{c_red}err: {c_reset}incorrect path. Try again.')
exit(1)
for file in os.listdir(working_dir):
if file.split('.')[-1] == 'mp3':
files.append(np(f'{working_dir}/{file}'))
return files, working_dir
def ask_user(file: str, default: dict, ignore: set, leave_copy: bool = False):
"""
Ask the user for new metadata values
:param file: the file to edit
:param default: predefined metadata values
:param ignore: other metadata values to leave unchanged
:param leave_copy: bool, True, if you need to leave copyright information
:return: dict with pairs 'metadata': 'value'; bool var: True, if you need to return to the prev iteration
"""
file = np(file)
file_title = os.path.split(file)[-1]
text = config.LOCALE
track = EasyID3(file)
edited_md = dict()
actual_data = set(track.keys())
print(f'\n{c_green}{file_title}{c_reset}')
# getting data from user and editing the metadata of the current file
for data in text:
if data in default:
edited_md[data] = [default[data]]
if data in ignore:
# skip the iteration if the data in ignored or in default
# (if the data in ignored, then they are in default also)
continue
# validate current value
tmp = features.validate_data(track, data)
print(f'{c_bright}{text[data]}{c_reset}{c_dim} ({tmp}): ', end='')
usr_input = input()
if usr_input == '^':
return dict(), True
edited_md[data] = [features.validate_input(data, usr_input)] if usr_input else [tmp]
# leave information about the copyright holder
if leave_copy:
for data in config.COPYRIGHT:
if data in actual_data:
edited_md[data] = track[data][0]
return edited_md, False
def set_defaults(title: bool, artist: bool, album: bool, number: bool, genre: bool, date: bool):
"""
Ask the user for the values that need to be set for all files
:param title: True, if you need to leave the title
:param artist: True, if you need to leave the artist
:param album: True, if you need to leave the album
:param number: True, if you need to leave the number
:param genre: True, if you need to leave the genre
:param date: True, if you need to leave the date
:return: default: dict with pairs 'metadata': 'predefined value';
ignored: set with data that should be ignored in ask_user
"""
default = dict()
ignored = set()
args = {'title': title,
'artist': artist,
'album': album,
'tracknumber': number,
'genre': genre,
'date': date}
for data in args:
if args[data]:
print(f'{c_bright}Set the {data} for all next tracks: {c_reset}', end='')
default[data] = input()
ignored.add(data)
return default, ignored
def parse_log():
"""
Parse the json file with information about the file metadata
:return: dict: 'filename' : {'metadata': 'value'}
"""
# find the later log file
log_path = np(config.LOG_PATH)
files = os.listdir(log_path)
files = [file for file in files if file.split('.')[-1] == 'json']
files = [np(os.path.join(log_path, file)) for file in files]
files = [file for file in files if os.path.isfile(file)]
log_file = np('<default file not found>' if not files else max(files, key=os.path.getctime))
# ask the path to the log file
print(f'{c_bright}Enter the absolute or relative path to the log file: {c_reset}{c_dim} ({log_file}): ', end='')
usr_input = input()
log_file = np(usr_input) if usr_input else log_file
if not os.path.exists(log_file):
print(f'{c_red}err: {c_reset}The log file wasn\'t found. Make sure that the correct path is specified.')
exit(1)
# read log
with open(log_file, 'r', encoding='utf-8') as read_file:
return json.load(read_file)
def edit_files(files: dict, path: str, clear_all: bool, do_rename: bool):
"""
Set, edit or delete the metadata of the selected file and rename these files
:param files: information from user about the metadata of each file
:param path: the directory where these files are located
:param clear_all: True, if you need to remove all the metadata
:param do_rename: True, if you need to rename files in the form of artist-track_title
:return: None
"""
renamed = dict()
for file in files:
current_path = np(os.path.join(path, file))
# valid the path
if not os.path.exists(current_path):
print(f'{c_red}warn: {c_reset}{current_path} doesn\'t exist. Try to run again.')
continue
track = EasyID3(current_path)
actual_data = set(files[file].keys())
# set or edit metadata
for data in files[file]:
track[data] = files[file][data]
# validate and leave unchanged some metadata
if config.LEAVE_SOME_DATA:
for i in config.LEAVE_THIS_DATA:
if i in track.keys():
actual_data.add(i)
track[i] = [features.validate_data(track, i)]
# delete ignored metadata
for del_data in track:
if del_data not in actual_data or clear_all:
del track[del_data]
# save metadata and rename file
track.save()
if do_rename:
file_name_tmp = features.get_new_filename(track['artist'][0], track['title'][0])
os.rename(current_path, np(f'{path}/{file_name_tmp}'))
renamed[file] = file_name_tmp
return renamed
def create_logs(log: dict, renamed: dict):
"""
Create json file and save the log in it
:param log: the data to be saved
:param renamed: dict like this: {'old_file_name.mp3': 'new_file_name.mp3'}
:return: None
"""
file_name = datetime.today().isoformat('-').replace(':', '-').split('.')[0] + '.json'
log_path = np(config.LOG_PATH)
log_file_path = os.path.join(log_path, file_name)
if not os.path.isdir(log_path):
os.mkdir(log_path)
if renamed:
# rename the files in the log
log_tmp = {renamed[i]: log[i] for i in log}
log = log_tmp
del log_tmp
with open(log_file_path, 'w', encoding='utf-8') as write_file:
json.dump(log, write_file, ensure_ascii=False)
def main():
"""
Main process
:return: None
"""
# get the CLI arguments
cli_parser = config.set_parser()
namespace = cli_parser.parse_args(sys.argv[1:])
scan_mode = namespace.scan
log = parse_log() if namespace.parse else dict()
# set the local variables
renamed_files = False
mp3_files, path = select_files()
default, ignored = set_defaults(namespace.title, namespace.artist, namespace.album, namespace.number,
namespace.genre, namespace.date)
if namespace.minimal:
ignored.update({'tracknumber', 'date'})
if namespace.copyright:
ignored.update(config.COPYRIGHT)
if not namespace.parse:
cur_index = 0
while cur_index < len(mp3_files):
file = mp3_files[cur_index]
# ask for information about each file, fill in the log, or return to prev iteration
file_title = os.path.split(file)[-1]
log[file_title], need_returns = (dict(), False) if namespace.delete else (dict(EasyID3(file)), False) \
if (scan_mode or namespace.auto_rename) else ask_user(file, default, ignored, namespace.copyright)
cur_index += -1 if need_returns else 1
# edit the files
if not scan_mode:
renamed_files = edit_files(log, path, namespace.delete, (namespace.rename or namespace.auto_rename))
# create log file
if (namespace.log or scan_mode) and not namespace.parse:
create_logs(log, renamed_files)
print(f'{c_green}\nDone! Press [Enter] to exit')
input()
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
"""
Created on Fri May 04 10:26:49 2018
@author: Mostafa Meliani <melimostafa@gmail.com>
Multi-Fidelity co-Kriging: recursive formulation with autoregressive model of order 1 (AR1)
Partial Least Square decomposition added on highest fidelity level
KPLSK model combined PLS followed by a Krging model in the initial dimension
Adapted on March 2020 by Nathalie Bartoli to the new SMT version
Adapted on January 2021 by Andres Lopez-Lopera to the new SMT version
"""
from smt.utils.kriging_utils import componentwise_distance
from smt.applications import MFKPLS
class MFKPLSK(MFKPLS):
def _initialize(self):
super(MFKPLSK, self)._initialize()
declare = self.options.declare
# Like KPLSK, MFKPLSK used only with "squar_exp" correlations
declare(
"corr",
"squar_exp",
values=("squar_exp"),
desc="Correlation function type",
types=(str),
)
self.name = "MFKPLSK"
def _componentwise_distance(self, dx, opt=0):
# Modif for KPLSK model
if opt == 0:
# Kriging step
d = componentwise_distance(dx, self.options["corr"], self.nx)
else:
# KPLS step
d = super(MFKPLSK, self)._componentwise_distance(dx, opt)
return d
def _new_train(self):
"""
Overrides KrgBased implementation
Trains the Multi-Fidelity model + PLS (done on the highest fidelity level) + Kriging (MFKPLSK)
"""
self._new_train_init()
self.n_comp = self.options["n_comp"]
theta0 = self.options["theta0"].copy()
noise0 = self.options["noise0"].copy()
for lvl in range(self.nlvl):
self._new_train_iteration(lvl)
self.options["n_comp"] = self.n_comp
self.options["theta0"] = theta0
self.options["noise0"] = noise0
self._new_train_finalize(lvl)
def _get_theta(self, i):
return self.optimal_theta[i]
|
# coding=utf-8
# Author: Jianghan LI
# Question: 807.Max_Increase_to_Keep_City_Skyline
# Complexity: O(N)
# Date: 2018-03-24 0:11:06 - 0:20:48, 1 wrong try
class Solution(object):
def maxIncreaseKeepingSkyline(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
bot = map(max, grid)
grid2 = zip(*grid)
left = map(max, grid2)
after = 0
n = len(grid)
for i in range(n):
for j in range(n):
after += min(left[i], bot[j])
return after - sum(map(sum, grid))
def maxIncreaseKeepingSkyline(self, grid):
row, col = map(max, grid), map(max, zip(*grid))
return sum(min(i, j) for i in row for j in col) - sum(map(sum, grid))
############ test case ###########
s = Solution()
print s.maxIncreaseKeepingSkyline([[3, 0, 8, 4], [2, 4, 5, 7], [9, 2, 6, 3], [0, 3, 1, 0]]) # 35
print s.maxIncreaseKeepingSkyline([[59, 88, 44], [3, 18, 38], [21, 26, 51]]) # 117
|
import pytest
from optuna.study import create_study
from optuna.trial import Trial
from optuna.visualization.matplotlib import plot_optimization_history
@pytest.mark.parametrize("direction", ["minimize", "maximize"])
def test_plot_optimization_history(direction: str) -> None:
# Test with no trial.
study = create_study(direction=direction)
figure = plot_optimization_history(study)
assert not figure.has_data()
def objective(trial: Trial) -> float:
if trial.number == 0:
return 1.0
elif trial.number == 1:
return 2.0
elif trial.number == 2:
return 0.0
return 0.0
# Test with a trial.
# TODO(ytknzw): Add more specific assertion with the test case.
study = create_study(direction=direction)
study.optimize(objective, n_trials=3)
figure = plot_optimization_history(study)
assert figure.has_data()
# Test customized target.
figure = plot_optimization_history(study, target=lambda t: t.number)
assert figure.has_data()
# Test customized target name.
figure = plot_optimization_history(study, target_name="Target Name")
assert figure.has_data()
# Ignore failed trials.
def fail_objective(_: Trial) -> float:
raise ValueError
study = create_study(direction=direction)
study.optimize(fail_objective, n_trials=1, catch=(ValueError,))
figure = plot_optimization_history(study)
assert not figure.has_data()
|
import numpy as np
import pandas as pd
from eppy import modeleditor
from eppy.modeleditor import IDF
import subprocess
import csv
def LHSample( D,bounds,N):
''' :param D:参数个数 :param bounds:参数对应范围(list) :param N:拉丁超立方层数 :return:样本数据 '''
result = np.empty([N, D])
temp = np.empty([N])
d = 1.0 / N
for i in range(D):
for j in range(N):
temp[j] = np.random.uniform(
low=j * d, high=(j + 1) * d, size = 1)[0]
np.random.shuffle(temp)
for j in range(N):
result[j, i] = temp[j]
#对样本数据进行拉伸
b = np.array(bounds)
lower_bounds = b[:,0]
upper_bounds = b[:,1]
if np.any(lower_bounds > upper_bounds):
print('范围出错')
return None
np.add(np.multiply(result,
(upper_bounds - lower_bounds),
out=result),
lower_bounds,
out=result)
return result
def comp_data_reader(eso_file, yc_keys, xc_keys):
with open(eso_file) as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
Eplusout = []
for row in readCSV:
Eplusout.append(row)
yc_mtr_number=[]
for i in range(len(yc_keys)):
for row in Eplusout:
if len(row)>3:
if yc_keys[i] in row[2]:
yc_mtr_number.append(row[0])
yc_mtr_values=[]
for i in range(len(yc_mtr_number)):
yc_mtr_value=[]
for row in Eplusout:
if len(row)>3:
if yc_mtr_number[i] == row[0]:
if yc_keys[i] not in row[2]:
yc_mtr_value.append(float(row[1])/3600000)
yc_mtr_values.append(yc_mtr_value)
xc_mtr_number=[]
for i in range(len(xc_keys)):
for row in Eplusout:
if len(row)>3:
if xc_keys[i] in row[3]:
xc_mtr_number.append(row[0])
xc_mtr_values=[]
for i in range(len(xc_mtr_number)):
xc_mtr_value=[]
for row in Eplusout:
if len(row)>3:
if xc_mtr_number[i] == row[0]:
if xc_keys[i] not in row[3]:
xc_mtr_value.append(float(row[1]))
xc_mtr_values.append(xc_mtr_value)
return [yc_mtr_values,xc_mtr_values]
def datafield(yc_keys,xc_keys):
iddfile = "./Energy+9.1.idd"
IDF.setiddname(iddfile)
idfname = "./RefBldgLargeOfficeNew2004_Chicago.idf"
idf = IDF(idfname)
# change the output variable and meter
# output_frequency='Daily'
output_frequency='Monthly'
variable=[]
for i in range(len(xc_keys)):
variable1 = idf.newidfobject("Output:Variable".upper())
variable1.Key_Value = '*'
variable1.Variable_Name = xc_keys[i]
variable1.Reporting_Frequency = output_frequency
variable.append(variable1)
idf.idfobjects['Output:Variable'.upper()]=variable
meter=[]
for i in range(len(yc_keys)):
meter1 = idf.newidfobject("Output:Meter".upper())
meter1.Key_Name = yc_keys[i]
meter1.Reporting_Frequency = output_frequency
meter.append(meter1)
idf.idfobjects['Output:Meter'.upper()]=meter
idf.idfobjects['RUNPERIOD'][0].Begin_Month=1
idf.idfobjects['RUNPERIOD'][0].Begin_Day_of_Month=1
idf.idfobjects['RUNPERIOD'][0].End_Month=12
idf.idfobjects['RUNPERIOD'][0].End_Day_of_Month=31
idf.saveas('C:/Users/songc/Desktop/work file/Updated_Model.idf')
idfname1 = 'C:/Users/songc/Desktop/work file/Updated_Model.idf' # This IDF file is updated at each iteration.
epwfile = './SPtMasterTable_52384_2011_amy.epw'
subprocess.call(['C:/EnergyPlusV9-1-0/energyplus.exe', '-d', "C:/Users/songc/Desktop/work file/result_folder", '-w', epwfile, idfname1])
eso_file='./result_folder/eplusout.eso'
[ycoutput,xcoutput]=comp_data_reader(eso_file, yc_keys, xc_keys)
yc_df1 = pd.DataFrame(ycoutput, index=yc_keys).T
xc_df1 = pd.DataFrame(xcoutput, index=xc_keys).T
epwfile2 = './SPtMasterTable_52384_2012_amy.epw'
subprocess.call(['C:/EnergyPlusV9-1-0/energyplus.exe', '-d', "C:/Users/songc/Desktop/work file/result_folder", '-w', epwfile2, idfname1])
eso_file='./result_folder/eplusout.eso'
[ycoutput,xcoutput]=comp_data_reader(eso_file, yc_keys, xc_keys)
yc_df2 = pd.DataFrame(ycoutput, index=yc_keys).T
xc_df2 = pd.DataFrame(xcoutput, index=xc_keys).T
yc_df = pd.concat([yc_df1,yc_df2],axis=0)
xc_df = pd.concat([xc_df1,xc_df2],axis=0)
df = pd.concat([yc_df,xc_df],axis=1)
df.to_csv('DATAFIELD_Multi.csv',index=False)
df_single = pd.concat([yc_df.iloc[:,0],xc_df],axis=1)
df_single.to_csv('DATAFIELD_Single.csv',index=False)
def datacomp(yc_keys,xc_keys,tc_keys):
iddfile = "./Energy+9.1.idd"
IDF.setiddname(iddfile)
yc_df = pd.DataFrame(columns=yc_keys)
xc_df = pd.DataFrame(columns=xc_keys)
tc_df = pd.DataFrame(columns=tc_keys)
for n in range(len(LHS_result)):
idfname = "./RefBldgLargeOfficeNew2004_Chicago.idf"
idf = IDF(idfname)
# change the output variable and meter
# output_frequency='Daily'
output_frequency='Monthly'
variable=[]
for i in range(len(xc_keys)):
variable1 = idf.newidfobject("Output:Variable".upper())
variable1.Key_Value = '*'
variable1.Variable_Name = xc_keys[i]
variable1.Reporting_Frequency = output_frequency
variable.append(variable1)
idf.idfobjects['Output:Variable'.upper()]=variable
meter=[]
for i in range(len(yc_keys)):
meter1 = idf.newidfobject("Output:Meter".upper())
meter1.Key_Name = yc_keys[i]
meter1.Reporting_Frequency = output_frequency
meter.append(meter1)
idf.idfobjects['Output:Meter'.upper()]=meter
# change the runperiod and other idf objects
for i in range(len(idf.idfobjects['LIGHTS'])):
idf.idfobjects['LIGHTS'][i].Watts_per_Zone_Floor_Area=LHS_result[n][0]
for i in range(len(idf.idfobjects['ELECTRICEQUIPMENT'])):
idf.idfobjects['ELECTRICEQUIPMENT'][i].Watts_per_Zone_Floor_Area=LHS_result[n][1]
for i in range(len(idf.idfobjects['FAN:VARIABLEVOLUME'])):
# idf.idfobjects['FAN:VARIABLEVOLUME'][i].Pressure_Rise=LHS_result[n][2]
idf.idfobjects['FAN:VARIABLEVOLUME'][i].Fan_Total_Efficiency=LHS_result[n][2]
for i in range(len(idf.idfobjects['ZONEINFILTRATION:DESIGNFLOWRATE'])):
idf.idfobjects['ZONEINFILTRATION:DESIGNFLOWRATE'][i].Flow_per_Exterior_Surface_Area=LHS_result[n][3]
for i in range(len(idf.idfobjects['CHILLER:ELECTRIC:REFORMULATEDEIR'])):
idf.idfobjects['CHILLER:ELECTRIC:REFORMULATEDEIR'][i].Reference_COP=LHS_result[n][4]
for i in range(len(idf.idfobjects['BOILER:HOTWATER'])):
idf.idfobjects['BOILER:HOTWATER'][i].Nominal_Thermal_Efficiency=LHS_result[n][5]
# for i in range(len(idf.idfobjects['SCHEDULE:COMPACT'])):
# if 'Building_Cooling_Sp_Schedule' in idf.idfobjects['SCHEDULE:COMPACT'][i].Name:
# idf.idfobjects['SCHEDULE:COMPACT'][i].Field_4=LHS_result[n][4]
idf.idfobjects['RUNPERIOD'][0].Begin_Month=1
idf.idfobjects['RUNPERIOD'][0].Begin_Day_of_Month=1
idf.idfobjects['RUNPERIOD'][0].End_Month=12
idf.idfobjects['RUNPERIOD'][0].End_Day_of_Month=31
idf.saveas('C:/Users/songc/Desktop/work file/Updated_Model.idf')
idfname1 = 'C:/Users/songc/Desktop/work file/Updated_Model.idf' # This IDF file is updated at each iteration.
epwfile = './SPtMasterTable_52384_2011_amy.epw'
subprocess.call(['C:/EnergyPlusV9-1-0/energyplus.exe', '-d', "C:/Users/songc/Desktop/work file/result_folder", '-w', epwfile, idfname1])
eso_file='./result_folder/eplusout.eso'
[ycoutput,xcoutput]=comp_data_reader(eso_file, yc_keys, xc_keys)
yc_df = yc_df.append(pd.DataFrame(ycoutput, index=yc_keys).T)
xc_df = xc_df.append(pd.DataFrame(xcoutput, index=xc_keys).T)
tc_df = tc_df.append(pd.DataFrame(np.reshape(list(LHS_result[n])*len(ycoutput[0]),(len(ycoutput[0]),len(tc_keys))),columns=tc_keys))
df = pd.concat([yc_df,xc_df,tc_df],axis=1)
df.to_csv('DATACOMP_Multi.csv',index=False)
df_single = pd.concat([yc_df.iloc[:,0],xc_df,tc_df],axis=1)
df_single.to_csv('DATACOMP_Single.csv',index=False)
bounds=[[10.76*0.8,10.76*1.2],[10.76*0.8,10.76*1.2],[0.605*0.8,0.605*1.2],[0.000302*0.8,0.000302*1.2],[5.5*0.8,5.5*1.2],[0.78*0.8,0.78*1.2]]
LHS_result=LHSample(6,bounds,30)
# bounds=[[10.76*0.8,10.76*1.2],[10.76*0.8,10.76*1.2],[0.605*0.8,0.605*1.2],[0.000302*0.8,0.000302*1.2]]
# LHS_result=LHSample(4,bounds,30)
# bounds=[[10.76*0.8,10.76*1.2],[10.76*0.8,10.76*1.2],[0.605*0.8,0.605*1.2]]
# LHS_result=LHSample(3,bounds,30)
# bounds=[[10.76*0.8,10.76*1.2],[10.76*0.8,10.76*1.2]]
# LHS_result=LHSample(2,bounds,30)
# yc_keys=['Electricity:Facility','InteriorLights:Electricity','Fans:Electricity','InteriorEquipment:Electricity']
yc_keys=['Electricity:Facility','InteriorLights:Electricity','InteriorEquipment:Electricity','Electricity:HVAC','Heating:Gas']
# yc_keys=['Electricity:Facility','InteriorEquipment:Electricity']
# yc_keys=['Electricity:Facility','Cooling:Electricity']
xc_keys=['Site Outdoor Air Drybulb Temperature','Site Outdoor Air Relative Humidity','Site Direct Solar Radiation Rate per Area']
tc_keys=['tc1','tc2','tc3','tc4','tc5','tc6']
datafield(yc_keys, xc_keys)
datacomp(yc_keys, xc_keys, tc_keys)
|
'''
Visualize steps of the calibration process to ensure everything went according to plan
'''
from matplotlib import pyplot as plt
from astropy.io import fits
from visualization import zscale #https://github.com/abostroem/utilities
overscan_size = 32 #pixels
unusable_bottom = 48//2 #pixels
def visualize_bias(biasfile, out_filename):
fig, ax_list = plt.subplots(nrows=1, ncols=12, sharey=True, figsize=[10, 7])
ofile = fits.open(biasfile)
object_name = ofile[0].header['OBJECT']
fig.suptitle(object_name)
if len(ofile) > 13:
ofile = ofile[1::3]
else:
ofile = ofile[1:]
extnum = 1
for ax, ext in zip(ax_list, ofile):
img = ext.data
#remove overscan region
if extnum%2 == 0:
img = img[unusable_bottom//2:, overscan_size:]
else:
img = img[unusable_bottom:, :-overscan_size]
vmin, vmax = zscale(img)
ax.imshow(img, cmap='bone', vmin=vmin, vmax=vmax)
ax.set_xticks([])
ax.set_title('EXT {}'.format(extnum))
extnum+=1
plt.subplots_adjust(wspace=0)
plt.savefig(out_filename)
def visualize_flat(flatfile, out_filename):
fig, ax_list = plt.subplots(nrows=1, ncols=12, sharey=True, figsize=[10, 7])
ofile = fits.open(flatfile)
object_name = ofile[0].header['OBJECT']
fig.suptitle(object_name)
extnum = 1
for ax, ext in zip(ax_list, ofile[1:]):
img = ext.data
vmin, vmax = zscale(img)
ax.imshow(img, cmap='bone', vmin=vmin, vmax=vmax)
ax.set_xticks([])
ax.set_title('EXT {}'.format(extnum))
extnum += 1
plt.subplots_adjust(wspace=0)
plt.savefig(out_filename)
def visualize_science(sciencefile, out_filename, remove_overscan=False):
fig, ax_list = plt.subplots(nrows=1, ncols=12, sharey=True, figsize=[10, 7])
ofile = fits.open(sciencefile)
object_name = ofile[0].header['OBJECT']
fig.suptitle(object_name)
extnum=1
for ax, ext in zip(ax_list, ofile[1:]):
img = ext.data
#remove overscan region
if remove_overscan is True:
if extnum%2 == 0:
img = img[:, overscan_size:]
else:
img = img[:, :-overscan_size]
img = img[unusable_bottom:, :]
vmin, vmax = zscale(img)
ax.imshow(img, cmap='bone', vmin=vmin, vmax=vmax)
ax.set_xticks([])
ax.set_title('EXT {}'.format(extnum))
extnum += 1
plt.savefig(out_filename)
def comp_to_science(biasfile, flatfile, sciencefile, out_filename, remove_overscan=False):
fig, ax_list = plt.subplots(nrows=1, ncols=36, sharey=True, figsize=[25, 7])
#BIAS
ofile = fits.open(biasfile)
extnum = 1
for ax, ext in zip(ax_list[0::3], ofile[1::3]):
img = ext.data
#remove overscan region
if extnum%2 == 0:
img = img[unusable_bottom//2:, overscan_size:]
else:
img = img[unusable_bottom//2:, :-overscan_size]
vmin, vmax = zscale(img)
ax.imshow(img, cmap='bone', vmin=vmin, vmax=vmax)
ax.set_xticks([])
ax.set_title('BIA {}'.format(extnum))
extnum+=1
#FLAT
ofile = fits.open(flatfile)
extnum = 1
for ax, ext in zip(ax_list[1::3], ofile[1:]):
img = ext.data
vmin, vmax = zscale(img)
ax.imshow(img, cmap='bone', vmin=vmin, vmax=vmax)
ax.set_xticks([])
ax.set_title('FLT {}'.format(extnum))
extnum += 1
#Science
ofile = fits.open(sciencefile)
object_name = ofile[0].header['OBJECT']
fig.suptitle(object_name)
extnum=1
for ax, ext in zip(ax_list[2::3], ofile[1:]):
img = ext.data
#remove overscan region
if remove_overscan is True:
if extnum%2 == 0:
img = img[unusable_bottom:, overscan_size:]
else:
img = img[unusable_bottom:, :-overscan_size]
vmin, vmax = zscale(img)
ax.imshow(img, cmap='bone', vmin=vmin, vmax=vmax)
ax.set_xticks([])
ax.set_title('SCI {}'.format(extnum))
extnum += 1
plt.savefig(out_filename)
|
import unittest
from unittest import TestCase
from agents.meter import Meter
class TestMeter(TestCase):
def test_get_latest_aggregate_consumption(self):
expected1 = 0.434
expected2 = 0.561
meter = Meter('MAC000002')
actual1 = meter.get_latest_consumption()
actual2 = meter.get_latest_consumption()
self.assertEqual(expected1, actual1)
self.assertEqual(expected2, actual2)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env mayapy
#
# Copyright 2020 Autodesk
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pxr import Sdf, Usd, Vt
import mayaUsd.schemas as mayaUsdSchemas
import mayaUsd.lib as mayaUsdLib
from maya import cmds
from maya import standalone
import os
import unittest
import fixturesUtils
class testUsdImportMayaReference(unittest.TestCase):
@classmethod
def setUpClass(cls):
inputPath = fixturesUtils.readOnlySetUpClass(__file__)
cmds.namespace(add='unique_namespace_1')
usdFile = os.path.join(inputPath, "UsdImportMayaReferenceTest", "MayaReference.usda")
cmds.usdImport(file=usdFile, shadingMode=[['none', 'default'], ])
@classmethod
def tearDownClass(cls):
standalone.uninitialize()
def testImport(self):
mayaReference = 'rig:cubeRig'
self.assertTrue(cmds.objExists(mayaReference))
def testMergeNamespacesOnClash(self):
mayaReference = 'unique_namespace_1:cubeRig'
self.assertTrue(cmds.objExists(mayaReference))
if __name__ == '__main__':
unittest.main(verbosity=2)
|
import json
import os
from random import shuffle
import pytest
from django.core.management import call_command
from django_dynamic_fixture import get
from readthedocs.projects.constants import PUBLIC
from readthedocs.projects.models import HTMLFile, Project
from readthedocs.search.documents import PageDocument
from readthedocs.sphinx_domains.models import SphinxDomain
from .dummy_data import ALL_PROJECTS, PROJECT_DATA_FILES
@pytest.fixture
def es_index():
call_command('search_index', '--delete', '-f')
call_command('search_index', '--create')
yield
call_command('search_index', '--delete', '-f')
@pytest.fixture
def all_projects(es_index, mock_processed_json, db, settings):
settings.ELASTICSEARCH_DSL_AUTOSYNC = True
projects_list = []
for project_slug in ALL_PROJECTS:
project = get(
Project,
slug=project_slug,
name=project_slug,
main_language_project=None,
privacy_level=PUBLIC,
)
project.versions.update(privacy_level=PUBLIC)
for file_basename in PROJECT_DATA_FILES[project.slug]:
# file_basename in config are without extension so add html extension
file_name = file_basename + '.html'
version = project.versions.all()[0]
html_file = get(
HTMLFile,
project=project,
version=version,
name=file_name,
path=file_name,
build=1,
)
# creating sphinx domain test objects
file_path = get_json_file_path(project.slug, file_basename)
if os.path.exists(file_path):
with open (file_path) as f:
data = json.load(f)
domains = data['domains']
for domain_data in domains:
domain_role_name = domain_data.pop('role_name')
domain, type_ = domain_role_name.split(':')
get(
SphinxDomain,
project=project,
version=version,
html_file=html_file,
domain=domain,
type=type_,
**domain_data
)
PageDocument().update(html_file)
projects_list.append(project)
shuffle(projects_list)
return projects_list
@pytest.fixture
def project(all_projects):
# Return a single project
return all_projects[0]
def get_json_file_path(project_slug, basename):
current_path = os.path.abspath(os.path.dirname(__file__))
file_name = f'{basename}.json'
file_path = os.path.join(current_path, 'data', project_slug, file_name)
return file_path
def get_dummy_processed_json(instance):
project_slug = instance.project.slug
basename = os.path.splitext(instance.name)[0]
file_path = get_json_file_path(project_slug, basename)
if os.path.exists(file_path):
with open(file_path) as f:
return json.load(f)
@pytest.fixture
def mock_processed_json(mocker):
mocked_function = mocker.patch.object(HTMLFile, 'get_processed_json', autospec=True)
mocked_function.side_effect = get_dummy_processed_json
|
import os
import configparser
PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
config = configparser.ConfigParser()
config.read(os.path.join(PROJECT_ROOT, 'config.cfg'))
|
from django.contrib import admin
from .models import Project, UserProject
admin.site.register(Project)
admin.site.register(UserProject)
|
import re
def get_octets(address: str):
pattern = re.compile(r'\b(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})')
result = pattern.fullmatch(address)
try:
if int(max(result.groups(), key=lambda i: int(i))) > 255:
raise ValueError('Not a Valid Ip address')
octets = ['{:08b}'.format(int(i)) for i in result.groups()]
print(octets)
return octets
except AttributeError as ae:
print(ae)
except ValueError as ve:
print(ve)
def un_mask(host: str, mask: str):
host, mask = get_octets(host), get_octets(mask)
net = []
for i, j in zip(host, mask):
b = ''
for x, y in zip(i, j):
z = int(x) & int(y)
b += str(z)
net.append(int(b, 2))
network_id = '.'.join(str(i) for i in net)
print(network_id)
ip = '8780.36.78.0'
ips = '255.255.255.0'
get_octets(ip)
|
from bs4 import *
import urllib2
def page_parser(url):
print "downloading %s now" % url
page = urllib2.urlopen(url).read()
soup = BeautifulSoup(page)
paragraphs = soup.find_all('p')
text = [p.getText() for p in paragraphs]
return " ".join(text)
url_list = ["https://cran.r-project.org/doc/manuals/R-intro.html",
"https://cran.r-project.org/doc/manuals/R-exts.html",
"https://cran.r-project.org/doc/manuals/R-data.html",
"https://cran.r-project.org/doc/manuals/R-lang.html",
"https://cran.r-project.org/doc/manuals/R-admin.html",
"http://adv-r.had.co.nz/Introduction.html",
"http://adv-r.had.co.nz/Environments.html",
"http://adv-r.had.co.nz/Functionals.html",
"http://adv-r.had.co.nz/Performance.html",
"http://adv-r.had.co.nz/memory.html",
"http://adv-r.had.co.nz/Rcpp.html",
"http://r-pkgs.had.co.nz/r.html"]
text = [page_parser(url) for url in url_list]
text = " ".join(text).encode('utf-8')
with open("input.txt", "w") as file:
file.write(text)
|
# -*- coding:utf-8 -*-
import os
import re
import time
from datetime import timedelta
from .nVector import nVector
from mathutils import Vector, Matrix
from mathutils.geometry import intersect_sphere_sphere_2d
import math
import blf
import bgl
import bpy
import gpu
import bmesh
from gpu_extras.batch import batch_for_shader
from bpy_extras.io_utils import ImportHelper, ExportHelper
from bpy.types import (
Text,
Scene,
Panel,
Object,
Operator,
PropertyGroup,
AddonPreferences,
UIList,
)
from bpy.props import (
IntProperty,
BoolProperty,
EnumProperty,
FloatProperty,
StringProperty,
PointerProperty,
BoolVectorProperty,
CollectionProperty,
FloatVectorProperty
)
from bpy_extras.view3d_utils import (
region_2d_to_vector_3d,
region_2d_to_origin_3d
)
from nCNC.modules.serial import Serial
# from nCNC.pars.connection import NCNC_PR_Connection, NCNC_PT_Connection
# USB portlarını bulur...
from nCNC.modules.serial.tools.list_ports import comports
bl_info = {
"name": "nCNC",
"description": "CNC Controls, G code operations",
"author": "Manahter",
"version": (0, 6, 5),
"blender": (2, 90, 0),
"location": "View3D",
"category": "Generic",
"warning": "Under development. Nothing is guaranteed",
"doc_url": "https://github.com/manahter/nCNC/wiki",
"tracker_url": "https://github.com/manahter/nCNC/issues"
}
# Serial Connecting Machine
dev = None
tr_translate = str.maketrans("ÇĞİÖŞÜçğıöşü", "CGIOSUcgiosu")
"""
Eklenecek Özellikler;
* Objeyi, ToolPaths'a convert etmeden ekleyebilmelisin.
Mesela Vievportta Text oluşturduk, bunu convert etmeden, ToolPaths'a ekleyebilmeliyiz.
* Kod çizgileri görününce, included objeler görünmesin. (Vision'dan bu özellik aktifleştirilebilir olur)
* Toolpaths HeaderDraw'a Göster/Gizle Ekle -> Objeler için
* Sadece belli bir objenin yollarını (kodunu) göster/gizle özelliği ekle
* Koddaki hatalı kısımların çizgisi kırmızı olacak şekilde düzenle. Vision'a da eklenebilir
"""
class NCNC_Prefs(AddonPreferences):
# This must match the addon name, use '__package__'
# when defining this in a submodule of a python package.
bl_idname = __name__
last_preset: StringProperty()
text_editor_files = []
class NCNC_PR_Texts(PropertyGroup):
loading: IntProperty(
name="Loading...",
subtype="PERCENTAGE",
default=0,
min=0,
max=100
)
def template_texts(self, layout, context=None):
row = layout.row(align=True)
# Show / Hide
if context:
context.scene.ncnc_pr_vision.prop_bool(row, "gcode")
row.prop(self, "texts", text="", icon="TEXT", icon_only=True)
if self.loading > 0:
# row = layout.row(align=True)
row.prop(self, "loading", slider=True)
else:
if self.active_text:
row.prop(self.active_text, "name", text="")
row.operator("ncnc.textsopen", icon="FILEBROWSER", text=("" if self.active_text else "Open"))
if self.active_text:
row.operator("ncnc.textsremove", icon="X", text="")
# row.operator("ncnc.textssave", icon="EXPORT", text="")
return row
def texts_items(self, context):
# The reason we used different variables in between was that we got an error when the unicode character was
# in the file name.
# Reference:
# https://devtalk.blender.org/t/enumproperty-and-string-encoding/7835
text_editor_files.clear()
text_editor_files.extend([(i.name, i.name, "") for i in bpy.data.texts])
return text_editor_files
def update_texts(self, context):
self.active_text = bpy.data.texts[self.texts]
last_texts = []
texts: EnumProperty(
items=texts_items,
name="Texts",
description="Select CNC code text",
update=update_texts
)
def update_active_text(self, context):
if not self.active_text:
return
if bpy.ops.ncnc.vision.poll():
bpy.ops.ncnc.vision()
self.active_text.ncnc_pr_text.load()
for area in context.screen.areas:
if area.type == "TEXT_EDITOR":
area.spaces[0].text = self.active_text
context.scene.ncnc_pr_vision.gcode = context.scene.ncnc_pr_vision.gcode
active_text: PointerProperty(
type=Text,
update=update_active_text
)
@property
def code(self):
return bpy.data.texts[self.texts].as_string() if self.texts else ""
@classmethod
def register(cls):
Scene.ncnc_pr_texts = PointerProperty(
name="NCNC_PR_Texts Name",
description="NCNC_PR_Texts Description",
type=cls)
@classmethod
def unregister(cls):
del Scene.ncnc_pr_texts
class NCNC_OT_TextsOpen(Operator, ImportHelper):
bl_idname = "ncnc.textsopen"
bl_label = "Open GCode Text"
bl_description = "Import a GCode file"
bl_options = {'REGISTER'}
# References:
# https://docs.blender.org/api/current/bpy_extras.io_utils.html
# https://sinestesia.co/blog/tutorials/using-blenders-filebrowser-with-python/
# https://blender.stackexchange.com/questions/177742/how-do-i-create-a-text-datablock-and-populate-it-with-text-with-python
filter_glob: StringProperty(
default='*.text;*.txt;*.cnc;*.nc;*.tap;*.ngc;*.gc;*.gcode;*.ncnc;*.ncc',
options={'HIDDEN'}
)
def execute(self, context):
with open(self.filepath, 'r') as f:
txt = bpy.data.texts.new(os.path.basename(self.filepath))
txt.write(f.read())
if context.scene.ncnc_pr_texts.texts_items:
context.scene.ncnc_pr_texts.texts = txt.name
return {'FINISHED'}
class NCNC_OT_TextsSave(Operator, ExportHelper):
bl_idname = "ncnc.textssave"
bl_label = "Export to GCode"
bl_description = "Export a GCode file"
bl_options = {'REGISTER'}
# References:
# https://docs.blender.org/api/current/bpy_extras.io_utils.html
# https://blender.stackexchange.com/questions/150932/export-file-dialog-in-blender-2-80
filter_glob: StringProperty(
default='*.text;*.txt;*.cnc;*.nc;*.tap;*.ngc;*.gc;*.gcode;*.ncnc;*.ncc',
options={'HIDDEN'}
)
filename_ext = ".cnc"
def execute(self, context):
active = context.scene.ncnc_pr_texts.active_text
if active:
text = active.as_string()
with open(self.filepath, "wb") as f:
f.write(text.encode("ASCII"))
self.report({"INFO"}, "Exported")
return {'FINISHED'}
class NCNC_OT_TextsRemove(Operator):
bl_idname = "ncnc.textsremove"
bl_label = "Remove Text File"
bl_description = "Remove selected Text File"
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
txt = context.scene.ncnc_pr_texts.active_text
if txt:
bpy.data.texts.remove(txt)
return {"FINISHED"}
# #################################
# #################################
# #################################
class NCNC_PR_Lines(PropertyGroup):
co: FloatVectorProperty()
class NCNC_PR_TextLine(PropertyGroup):
lines: CollectionProperty(type=NCNC_PR_Lines)
index: IntProperty()
ismove: BoolProperty(default=False)
code_full: StringProperty()
code: StringProperty()
comment: StringProperty()
mode_distance: IntProperty(default=90)
mode_plane: IntProperty(default=17)
mode_units: IntProperty(default=21)
mode_move: IntProperty(default=0)
xyz_in_code: FloatVectorProperty()
ijk_in_code: FloatVectorProperty()
xyz: FloatVectorProperty()
ijk: FloatVectorProperty()
r: FloatProperty()
f: FloatProperty()
length: FloatProperty(default=0)
pause: FloatProperty(default=0)
error: BoolProperty(default=False)
def get_estimated_time(self):
f = 500 if self.mode_move == 0 else self.f
return (self.length / (f * (1 if self.mode_units == 21 else 25.4))) * 60
estimated_time: FloatProperty(get=get_estimated_time)
def load(self, value: str):
ismove_xyz = False
ismove_ijk = False
ismove_r = False
self.code_full = value
self.prev_line = self.id_data.ncnc_pr_text.lines[self.index - 1]
# ###############################################
# ####################################### Comment
for i in re.findall(r'\([^()]*\)', value):
self.comment += i
value = value.replace(i, "")
# ###############################################
# ######################################## G0-3
value = value.upper()
self.code = value
for rex, key in [(r'G *(9[01])(?:\D|$)', "mode_distance"),
(r'G *(1[7-9])(?:\D|$)', "mode_plane"),
(r'G *(2[01])(?:\D|$)', "mode_units"),
(r'G *(0?[0-3])(?:\D|$)', "mode_move"),
]:
fn = re.findall(rex, value)
exec(f"self.{key} = int(fn[0]) if {len(fn) == 1} else self.prev_line.{key}")
if self.prev_line:
self.xyz = self.prev_line.xyz
# ###############################################
# ################################ X0.0 Y0.0 Z0.0
for j, v in enumerate("XYZ"):
ps = re.findall(f'{v} *([+-]?\d*\.?\d*)', value)
if len(ps) == 1 and re.sub("[+-.]", "", ps[0]).isdigit():
ismove_xyz = True
self.xyz_in_code[j] = float(ps[0])
self.xyz[j] = float(ps[0]) + (self.xyz[j] if self.mode_distance == 91 else 0)
self.xyz[j] *= (1 if self.mode_units == 21 else 25.4)
# ###############################################
# ################################ I0.0 J0.0 K0.0
for j, v in enumerate("IJK"):
ps = re.findall(f'{v} *([+-]?\d*\.?\d*)', value)
if len(ps) == 1 and re.sub("[+-.]", "", ps[0]).isdigit():
ismove_ijk = True
self.ijk_in_code[j] = float(ps[0])
self.ijk[j] = float(ps[0]) * (1 if self.mode_units == 21 else 25.4)
# ###############################################
# ############################################# F
ps = re.findall('F *([+]?\d*\.?\d*)', value)
if len(ps) == 1 and re.sub("[+.]", "", ps[0]).isdigit():
self.f = float(ps[0])
else:
self.f = self.prev_line.f
ps = re.findall('R *([+-]?\d*\.?\d*)', value)
if len(ps) == 1 and re.sub("[+-.]", "", ps[0]).isdigit():
ismove_r = True
self.r = float(ps[0]) * (1 if self.mode_units == 21 else 25.4)
if ismove_ijk:
self.error = True
# ###############################################
# ######################################## PAUSE
ps = re.findall('G4 *P([+]?\d*\.?\d*)', value)
if len(ps) == 1 and re.sub("[+.]", "", ps[0]).isdigit():
self.pause = float(ps[0])
if (ismove_xyz and self.mode_move in (0, 1)) or (ismove_xyz and ismove_ijk) or (ismove_xyz and ismove_r):
self.ismove = True
if self.ismove and not self.error:
for i in self.calc_lines():
a = self.lines.add()
a.co = i
if self.error:
self.mode_distance = self.prev_line.mode_distance
self.mode_plane = self.prev_line.mode_plane
self.mode_units = self.prev_line.mode_units
self.mode_move = self.prev_line.mode_move
self.xyz = self.prev_line.xyz
self.ismove = False
self.f = self.prev_line.f
return
def calc_lines(self, step: int = 0):
"""For this item"""
mv = self.mode_move
prev_xyz = Vector(self.prev_line.xyz)
xyz = Vector(self.xyz)
if mv in (0, 1):
self.length = (prev_xyz - xyz).length
return prev_xyz, xyz
# If the R code is used, we must convert the R code to IJK
# +R: Short angle way
# -R: Long angle way
if self.r:
# Reference:
# https://docs.blender.org/api/current/mathutils.geometry.html?highlight=intersect_sphere_sphere_2d#mathutils.geometry.intersect_sphere_sphere_2d
r = abs(self.r)
distance = round((xyz - prev_xyz).length / 2, 3)
# Distance greater than diameter
if distance > round(r, 3):
self.error = True
return []
# Distance equal to diameter
elif distance == round(r, 3):
ijk = (xyz + prev_xyz) / 2
# Distance smaller than diameter
else:
intersects = intersect_sphere_sphere_2d(prev_xyz[:2], r, xyz[:2], r)
if mv == 3:
ijk = intersects[self.r > 0]
else:
ijk = intersects[self.r < 0]
ijk = Vector((*ijk[:], 0))
ijk = ijk - prev_xyz
else:
ijk = Vector(self.ijk)
center = prev_xyz + ijk
bm = bmesh.new()
# Uyarı Buradan sonrası G17 düzlemi için hesaplanmıştır.
# Diğer düzlemler için düzenlemek kolay.
# Farkettiysen, Vektörlerin Z'lerinin yerine 0 yazdık.
# Oraları düzenleyerek diğer düzlemler için uygulayabilirsin.
# From the CENTER to the CURRENT POINT
v1 = prev_xyz - center
v1.z = 0
# From the CENTER to the POINT of DESTINATION
v2 = xyz - center
v2.z = 0
try:
if abs(v1.length - v2.length) > 0.01:
raise Exception
# Angle between V1 and V2 (RADIANS)
angle = v1.angle(v2)
except:
self.error = True
return []
if v1.cross(v2).z > 0 and mv == 2:
angle = math.radians(360) - angle
elif v1.cross(v2).z < 0 and mv == 3:
angle = math.radians(360) - angle
elif v1.cross(v2).z == 0:
angle = math.radians(360 if not self.r else 180)
self.length = angle * v1.length
# Angle between V1 and V2 (DEGREES)
angle_degrees = math.degrees(angle)
if step:
pass
elif v1.length < 10:
step = math.ceil(angle_degrees / 10)
elif v1.length < 50:
step = math.ceil(angle_degrees / 5)
else:
step = math.ceil(angle_degrees / 2)
# ####### !!!
# Bu kısımda axis'i güncelle ileride
# Çünkü, G17, G18 vs düzlemine göre axis değişir
bmesh.ops.spin(bm,
geom=[bm.verts.new(prev_xyz)],
axis=(0, 0, (1 if mv == 2 else -1)),
# axis=(.7, 0, (1 if mv == 2 else -1)),
steps=step,
angle=-angle,
cent=center
)
# print("\n"*2)
# print("Prev :", prev_xyz)
# print("XYZ :", xyz)
# print("IJK :", ijk)
# print("Center :", center)
# print("Vector1 :", v1)
# print("Vector2 :", v2)
# print("Angle :", angle)
# print("Degrees", angle_degrees)
# print("Cross :", v1.cross(v2))
# print("Dot :", round(v1.dot(v2), 3))
lines = []
z_step = (xyz.z - prev_xyz.z) / step if step else 0
for n, t in enumerate(bm.verts):
x = round(t.co.x, 3)
y = round(t.co.y, 3)
z = round(t.co.z + n * z_step, 3)
lines.append((prev_xyz.x, prev_xyz.y, prev_xyz.z))
prev_xyz.x = x
prev_xyz.y = y
prev_xyz.z = z
lines.append((prev_xyz.x, prev_xyz.y, prev_xyz.z))
return lines
class NCNC_PR_Text(PropertyGroup):
# Modals, stop, run ...
isrun = []
event: BoolProperty(default=False)
event_selected: BoolProperty(default=False)
last_cur_index: IntProperty()
last_end_index: IntProperty()
lines: CollectionProperty(
type=NCNC_PR_TextLine,
name="Objects",
description="All Object Items Collection",
)
# Total Line
count: IntProperty()
# Milimeters
distance_to_travel: FloatProperty()
# Seconds
estimated_time: FloatProperty()
minimum: FloatVectorProperty()
maximum: FloatVectorProperty()
def event_control(self):
cur_ind = self.id_data.current_line_index + 1
end_ind = self.id_data.select_end_line_index + 1
cur_ind, end_ind = min(cur_ind, end_ind), max(cur_ind, end_ind) + 1
if cur_ind != self.last_cur_index or end_ind != self.last_end_index:
self.last_cur_index = cur_ind
self.last_end_index = end_ind
self.event_selected = True
self.load()
def get_points(self):
return [c.xyz for c in self.lines if c.ismove]
def get_lines(self, move_mode=0):
self.event = False
lines = []
for c in self.lines:
if c.ismove and (c.mode_move == move_mode):
lines.extend([i.co[:] for i in c.lines])
return lines
def get_selected(self):
self.event_selected = False
if self.isrun and self.isrun[-1]:
return []
count = len(self.lines)
if count >= self.last_end_index > self.last_cur_index:
lines = []
for i in range(self.last_cur_index, self.last_end_index):
line = self.lines[i]
if line.ismove:
lines.extend([i.co[:] for i in line.lines])
return lines
return [(0, 0, 0), (0, 0, 0)]
def load(self):
if not self.ismodified:
return
count = len(self.isrun)
if count:
self.isrun[-1] = False
self.isrun.append(True)
# ####################
# Before Reset to vars
self.lines.clear()
self.count = 0
self.distance_to_travel = 0
self.estimated_time = 0
self.minimum = (0, 0, 0)
self.maximum = (0, 0, 0)
bpy.ops.ncnc.gcode(text_name=self.id_data.name, run_index=count)
self.prev_str = self.id_data.as_string()
prev_str: StringProperty()
def get_ismodified(self):
return self.id_data.as_string() != self.prev_str
ismodified: BoolProperty(get=get_ismodified)
@classmethod
def register(cls):
Text.ncnc_pr_text = PointerProperty(
name="NCNC_PR_Text Name",
description="NCNC_PR_Text Description",
type=cls)
@classmethod
def unregister(cls):
del Text.ncnc_pr_text
class NCNC_OT_Text(Operator):
bl_idname = "ncnc.gcode"
bl_label = "Gcode Read"
bl_description = ""
bl_options = {'REGISTER'}
text_name: StringProperty()
run_index: IntProperty()
code_lines = []
last_index = 0
pr_txt = None
delay = .1
# Added radius R value reading feature in G code.
# Reference
# https://www.bilkey.com.tr/online-kurs-kurtkoy/cnc/fanuc-cnc-programlama-kodlari.pdf
# https://www.cnccookbook.com/cnc-g-code-arc-circle-g02-g03/
# http://www.helmancnc.com/circular-interpolation-concepts-programming-part-2/
# R açıklama
# x0'dan x10'a gideceğiz diyelim.
# R -5 ile 5 aralığında olamaz. Çünkü X'in başlangıç ve bitiş noktası arası mesafe zaten 10.
# 10/2 = 5 yapar. R değeri en küçük 5 olur.
# R - değer alırsa, çemberin büyük tarafını takip eder. + değer alırsa küçük tarafını.
#
def execute(self, context):
return self.invoke(context, None)
def invoke(self, context, event):
self.pr_txt = bpy.data.texts[self.text_name].ncnc_pr_text
context.window_manager.modal_handler_add(self)
line_0 = self.pr_txt.lines.add()
line_0.load("G0 G90 G17 G21 X0 Y0 Z0 F500")
self.code_lines = self.pr_txt.id_data.as_string().splitlines()
return self.timer_add(context)
def timer_add(self, context):
wm = context.window_manager
self._timer = wm.event_timer_add(self.delay, window=context.window)
return {"RUNNING_MODAL"}
def timer_remove(self, context):
wm = context.window_manager
wm.event_timer_remove(self._timer)
return {'CANCELLED'}
def modal(self, context, event):
if not self.pr_txt.isrun[self.run_index]:
return self.timer_remove(context)
pr = self.pr_txt
context.scene.ncnc_pr_texts.loading = (self.last_index / len(self.code_lines)) * 100
loop_count = 100 if event.type == "TIMER" else 20
for no, code in enumerate(self.code_lines[self.last_index:], start=self.last_index + 1):
pr.event = True
pr.event_selected = True
self.last_index += 1
pr.count = no
l = pr.lines.add()
l.index = no
l.load(code)
# Calc -> Total Length, Time
if l.length:
pr.distance_to_travel += l.length
pr.estimated_time += l.estimated_time
# Calc -> Total Pause Time
if l.pause:
pr.estimated_time += l.pause
# Calc -> Min/Max X,Y,Z
for j, v in enumerate(l.xyz):
if pr.minimum[j] > v:
pr.minimum[j] = v
if pr.maximum[j] < v:
pr.maximum[j] = v
if self.last_index % loop_count == 0:
return {'PASS_THROUGH'}
pr.event = True
if context.area:
context.area.tag_redraw()
self.report({'INFO'}, "G-Code Loaded")
self.pr_txt.isrun[self.run_index] = False
context.scene.ncnc_pr_texts.loading = 0
return self.timer_remove(context)
##################################
##################################
##################################
class NCNC_PR_Head(PropertyGroup):
def update_common(self, context, key):
keys = ["scene", "gcode", "machine", "vision"]
keys.remove(key)
for key in keys:
exec(f"self.tool_{key} = False")
# Apply Scene Settings
bpy.ops.ncnc.scene()
pr_vis = context.scene.ncnc_pr_vision
# Load recent settings for pr_vis
pref = bpy.context.preferences.addons.get(__name__)
if pref and pref.preferences.last_preset:
pr_vis.presets = pref.preferences.last_preset
pr_vis.gcode = pr_vis.gcode
pr_vis.dash = pr_vis.dash
pr_vis.mill = pr_vis.mill
def update_tool_scene(self, context):
if self.tool_scene:
self.update_common(context, "scene")
def update_tool_machine(self, context):
if self.tool_machine:
self.update_common(context, "machine")
def update_tool_vision(self, context):
if self.tool_vision:
self.update_common(context, "vision")
def update_tool_gcode(self, context):
if self.tool_gcode:
self.update_common(context, "gcode")
# Track Included Objects
bpy.ops.ncnc.objects(start=True)
else:
# Cancel Track
bpy.ops.ncnc.objects(start=False)
tool_scene: BoolProperty(
name="Scene Tools",
description="Show/Hide regions",
default=True,
update=update_tool_scene
)
tool_machine: BoolProperty(
name="Machine Tools",
description="Show/Hide regions",
default=False,
update=update_tool_machine
)
tool_gcode: BoolProperty(
name="G-code Generation Tools",
description="Show/Hide regions",
default=False,
update=update_tool_gcode
)
tool_vision: BoolProperty(
name="Vision Tools",
description="Show/Hide regions",
default=False,
update=update_tool_vision
)
@classmethod
def register(cls):
Scene.ncnc_pr_head = PointerProperty(
name="NCNC_PR_Head Name",
description="NCNC_PR_Head Description",
type=cls
)
@classmethod
def unregister(cls):
del Scene.ncnc_pr_head
class NCNC_PT_Head(Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_category = "nCNC"
bl_label = ""
bl_idname = "NCNC_PT_head"
def draw(self, context):
pr_txs = context.scene.ncnc_pr_texts
pr_con = context.scene.ncnc_pr_connection
pr_com = context.scene.ncnc_pr_communication
layout = self.layout
layout.template_running_jobs()
pr_txs.template_texts(layout, context=context)
if pr_con.isconnected:
row = layout.row()
if pr_com.run_mode == "stop":
row.operator("ncnc.communicationrun", icon="PLAY", text="Start").action = "start"
elif pr_com.run_mode == "pause":
row.operator("ncnc.communicationrun", icon="PLAY", text="Resume").action = "resume"
row.operator("ncnc.communicationrun", icon="SNAP_FACE", text="Stop").action = "stop"
else:
row.operator("ncnc.communicationrun", icon="PAUSE", text="Pause").action = "pause"
row.operator("ncnc.communicationrun", icon="SNAP_FACE", text="Stop").action = "stop"
def draw_header(self, context):
prop = context.scene.ncnc_pr_head
row = self.layout.row(align=True)
row.prop(prop, "tool_scene", text="", expand=True, icon="TOOL_SETTINGS")
row.separator(factor=1)
row.prop(prop, "tool_gcode", text="", expand=True, icon="COLOR_GREEN")
row.prop(prop, "tool_machine", text="", expand=True, icon="PLUGIN")
def draw_header_preset(self, context):
self.layout.prop(context.scene.ncnc_pr_head, "tool_vision", text="", expand=True, icon="CAMERA_STEREO")
class NCNC_PT_HeadTextDetails(Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_category = "nCNC"
bl_label = "GCode Details"
bl_idname = "NCNC_PT_filedetails"
bl_parent_id = "NCNC_PT_head"
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
return context.scene.ncnc_pr_texts.active_text
def draw(self, context):
pr_txs = context.scene.ncnc_pr_texts
if not pr_txs.active_text:
return
pr_txt = pr_txs.active_text.ncnc_pr_text
layout = self.layout
row = layout.row(align=True)
col1 = row.column()
col1.alignment = "RIGHT"
col1.label(text="Distance to Travel")
col1.label(text="Estimated Time")
col1.label(text="Total Line")
for i in range(3):
col1.label(text=f"{round(pr_txt.minimum[i], 1)} || {round(pr_txt.maximum[i], 1)}")
col2 = row.column(align=False)
col2.label(text=f"{int(pr_txt.distance_to_travel)} mm")
col2.label(text=f"{timedelta(seconds=int(pr_txt.estimated_time))}")
col2.label(text=f"{pr_txt.count}")
for i in "XYZ":
col2.label(text=i)
row = layout.row()
row.operator("ncnc.textssave", icon="EXPORT", text="Export")
class NCNC_PR_Scene(PropertyGroup):
def set_mm(self, val):
unit = bpy.context.scene.unit_settings
if unit.system != 'METRIC':
unit.system = 'METRIC'
if unit.length_unit != 'MILLIMETERS':
unit.length_unit = 'MILLIMETERS'
def get_mm(self):
return bpy.context.scene.unit_settings.length_unit == 'MILLIMETERS'
mm: BoolProperty(
name="Milimeters",
set=set_mm,
get=get_mm
)
def set_inc(self, val):
unit = bpy.context.scene.unit_settings
if unit.system != 'IMPERIAL':
unit.system = 'IMPERIAL'
if unit.length_unit != 'INCHES':
unit.length_unit = 'INCHES'
def get_inc(self):
return bpy.context.scene.unit_settings.length_unit == 'INCHES'
inc: BoolProperty(
name="Inches",
set=set_inc,
get=get_inc
)
@classmethod
def register(cls):
Scene.ncnc_pr_scene = PointerProperty(
name="NCNC_PR_Head Name",
description="NCNC_PR_Head Description",
type=cls
)
@classmethod
def unregister(cls):
del Scene.ncnc_pr_scene
class NCNC_OT_Scene(Operator):
bl_idname = "ncnc.scene"
bl_label = "NCNC Scene Settings"
bl_description = "New: Deletes the objects and renewed the workspace\n" \
"Mod: Adjust scene settings for nCNC"
bl_options = {'REGISTER', 'UNDO'}
newscene: BoolProperty(
name="New Scene",
description="Deletes the objects and renewed the workspace",
default=False
)
settings: BoolProperty(
name="Apply nCNC Scene Settings",
description="Adjust scene settings",
default=True
)
def execute(self, context):
return self.invoke(context, None)
def invoke(self, context, event=None):
if self.newscene:
for i in bpy.data.objects:
i.ncnc_pr_toolpathconfigs.included = False
bpy.ops.object.select_all(action='SELECT')
bpy.ops.object.delete(use_global=False, confirm=False)
bpy.ops.curve.primitive_bezier_curve_add(radius=20, enter_editmode=False, location=(0, 0, 0))
bpy.ops.view3d.view_all(center=True)
context.active_object.ncnc_pr_toolpathconfigs.included = True
bpy.ops.ncnc.convert()
self.report({'INFO'}, "Workspace has been renewed for nCNC")
bpy.context.space_data.overlay.show_extra_edge_length = True
bpy.ops.view3d.view_axis(type="TOP")
self.report({'INFO'}, "Applied to nCNC Settings")
if self.settings:
unit = context.scene.unit_settings
spce = context.space_data
prop = context.scene.ncnc_pr_scene
if prop.inc:
prop.inc = True
else:
prop.mm = True
if unit.scale_length != 0.001:
unit.scale_length = 0.001
if spce.overlay.grid_scale != 0.001:
spce.overlay.grid_scale = 0.001
if spce.clip_end != 10000:
spce.clip_end = 10000
return {"FINISHED"}
class NCNC_PT_Scene(Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_category = "nCNC"
bl_label = "Scene"
bl_idname = "NCNC_PT_scene"
@classmethod
def poll(cls, context):
return context.scene.ncnc_pr_head.tool_scene
def draw(self, context):
pr_scn = context.scene.ncnc_pr_scene
row = self.layout.row(align=True)
col1 = row.column()
col1.alignment = "RIGHT"
col1.label(text="Scene")
col1.label(text="")
col1.label(text="Units")
col1.scale_x = 1
col2 = row.column(align=False)
col2.operator("ncnc.scene", text="New", icon="FILE_NEW").newscene = True
col2.operator("ncnc.scene", text="Apply", icon="SETTINGS").settings = True # "OPTIONS"
col2.prop(pr_scn, "mm", text="Milimeters")
col2.prop(pr_scn, "inc", text="Inches")
class nCompute:
# References;
# Circle Center;
# https://blender.stackexchange.com/questions/28239/how-can-i-determine-calculate-a-circumcenter-with-3-points
@classmethod
def replace_col(cls, M, i, C):
for r in range(len(M)):
M[r][i] = C[r]
@classmethod
def circle_center_(cls, B, C, N):
m_d = Matrix([
B, C, N
])
col = [B.dot(B) * 0.5,
C.dot(C) * 0.5,
0]
m_x = m_d.copy()
cls.replace_col(m_x, 0, col)
m_y = m_d.copy()
cls.replace_col(m_y, 1, col)
m_z = m_d.copy()
cls.replace_col(m_z, 2, col)
m_d_d = m_d.determinant() or 1
x = m_x.determinant() / m_d_d
y = m_y.determinant() / m_d_d
z = m_z.determinant() / m_d_d
return Vector([x, y, z])
@classmethod
def circle_center(cls, A, B, C):
B_ = B - A
C_ = C - A
N = B_.cross(C_)
return A + cls.circle_center_(B_, C_, N)
# my_icons_dir = os.path.join(os.path.dirname(__file__), "icons")
# icons = bpy.utils.previews.new()
# icons.load("my_icon", os.path.join(my_icons_dir, "auto.png"), 'IMAGE')
# row.prop( ... icon_value=icons["my_icon"].icon_id ...)
# Sahne güncellendiğinde bir iş yaptır.
# Bu metod, * aktif obje değiştiğinde, * veya ekranda birşey değiştiğinde çağrılıyor.
def convert_updated_objects(scene):
updated_objects = []
for o in scene.objects:
if o.update_from_editmode() or o.update_tag():
updated_objects.append(o)
if(len(updated_objects) > 0):
print("updated objects: %s"%updated_objects[0])
# bu fonksiyonun içinde operatör çalışmıyor. Program akapnıyor.
# bpy.ops.ncnc.convert(auto_call=True)
# Bu fonksiyonu, bir operatörün içine ekle. Yani program yüklendikten daha sonra bu fonksiyon yüklensin.
bpy.app.handlers.depsgraph_update_post.append(convert_updated_objects)
# #################################
# #################################
# #################################
class NCNC_PR_Convert(PropertyGroup):
isrun = []
loading: IntProperty(
name="Loading...",
subtype="PERCENTAGE",
default=0,
min=0,
max=100
)
def update_overwrite(self, context):
if not self.overwrite:
self.auto_convert = False
overwrite: BoolProperty(
name="Overwrite",
default=True,
description="Overwrite the last text",
update=update_overwrite
)
auto_convert: BoolProperty(
name="Auto Convert",
default=False,
description="On / Off"
)
def template_convert(self, layout, context=None):
row = layout.row(align=True)
row.prop(self, "overwrite",
icon_only=True,
icon=("RADIOBUT_ON" if self.overwrite else "RADIOBUT_OFF"),
invert_checkbox=self.overwrite)
row.separator()
row.operator("ncnc.convert",
text="Convert to G-Code" if not self.loading else "",
icon="COLOR_GREEN",
)
if self.loading:
row.prop(self, "loading", slider=True)
if self.overwrite:
row.prop(self, "auto_convert",
icon_only=True,
icon=("ONIONSKIN_ON" if self.auto_convert else "ONIONSKIN_OFF"),
# invert_checkbox=self.auto_convert
)
return row
@classmethod
def register(cls):
Scene.ncnc_pr_convert = PointerProperty(
name="NCNC_PR_Convert Name",
description="NCNC_PR_Convert Description",
type=cls
)
@classmethod
def unregister(cls):
del Scene.ncnc_pr_convert
class NCNC_OT_Convert(Operator):
bl_idname = "ncnc.convert"
bl_label = "Convert"
bl_description = "Convert included objects to Gcode"
bl_options = {'REGISTER'}
# if auto converting, auto_call must True
auto_call: BoolProperty(default=False)
kodlar = []
shape = 0
block = 0
delay = .1
_last_time = 0
run_index = 0
last_index = 0
pr_txs = None
first_point = None
last_selected_object = None
# ## !!! Döngüdeki Z step için de modalı kullanılabilir yap ki, convert edilirken donma olmasın
def execute(self, context):
return self.invoke(context, None)
def invoke(self, context, event):
pr_cvr = context.scene.ncnc_pr_convert
if self.auto_call and not pr_cvr.auto_convert:
return {'CANCELLED'}
len_isrun = len(pr_cvr.isrun)
if len_isrun:
pr_cvr.isrun[-1] = False
self.run_index = len_isrun
pr_cvr.isrun.append(True)
self.first_point = Vector((0, 0, 0))
self.pr_obj = bpy.context.scene.ncnc_pr_objects
self.pr_txs = bpy.context.scene.ncnc_pr_texts
self.last_selected_object = context.active_object
##################
# Convert to GCodes
self.kodlar.clear()
self.add_header(context)
self.shape = 0
context.window_manager.modal_handler_add(self)
return self.timer_add(context)
def timer_add(self, context):
wm = context.window_manager
self._timer = wm.event_timer_add(self.delay, window=context.window)
return {"RUNNING_MODAL"}
def timer_remove(self, context):
wm = context.window_manager
wm.event_timer_remove(self._timer)
return {'CANCELLED'}
def modal(self, context, event):
if time.time() - self._last_time < self.delay:
return {'PASS_THROUGH'}
self._last_time = time.time()
pr_cvr = context.scene.ncnc_pr_convert
pr_cvr.loading = (self.last_index / len(self.pr_obj.items)) * 100
if not pr_cvr.isrun[self.run_index] or (len(self.pr_obj.items) <= self.last_index):
return self.finished(context)
#############################################
#############################################
obj_orj = self.pr_obj.items[self.last_index]
# Copy and select to object
obj = obj_orj.obj.copy()
obj.data = obj_orj.obj.data.copy()
# !!!
# Default var olan koleksiyon'u sildiğimizde, şu hatayı veriyor;
# bpy.data.collections[0].objects.link(obj)
# ndexError: bpy_prop_collection[index]: index 0 out of range, size 0
if not len(bpy.data.collections):
collection = bpy.data.collections.new("nCNC")
bpy.context.scene.collection.children.link(collection)
bpy.data.collections[0].objects.link(obj)
self.last_index += 1
# Do active to object
bpy.ops.object.select_all(action='DESELECT')
context.view_layer.objects.active = obj
obj.select_set(True)
# To avoid the error in 2D
obj.data.dimensions = "3D"
# Reference:
# https://blender.stackexchange.com/questions/75380/apply-transforms-on-object-copies-not-working
bpy.ops.object.transform_apply(location=True, scale=True, rotation=True)
if self.last_selected_object:
# Select previous object
bpy.ops.object.select_all(action='DESELECT')
context.view_layer.objects.active = self.last_selected_object
self.last_selected_object.select_set(True)
##################################################
##################################################
if not obj or not obj.ncnc_pr_toolpathconfigs.included:
# Remove to volatile object
bpy.data.collections[0].objects.unlink(obj)
bpy.data.objects.remove(obj)
return {'PASS_THROUGH'}
elif obj.type == 'CURVE':
# The configurations of the object
conf = obj.ncnc_pr_toolpathconfigs
self.dongu = []
if conf.step > conf.depth:
# Remove to volatile object
bpy.data.collections[0].objects.unlink(obj)
bpy.data.objects.remove(obj)
return {'PASS_THROUGH'}
# Steps in the Z axis -> 0.5, 1.0, 1.5, 2.0 ...
self.dongu.extend([i * conf.step for i in range(1, int(conf.depth / conf.step + 1), )])
# Calculate last Z step
if conf.depth % conf.step > 0.01:
if len(self.dongu):
self.dongu.append(round(self.dongu[-1] + conf.depth % conf.step, conf.round_loca))
else:
self.dongu.append(round(self.dongu[-1], conf.round_loca))
self.block = 0
self.shape += 1
# Create initial configs of the shape -> Block x.0
self.add_block(expand="1", enable="1")
self.kodlar.append(f"{conf.plane} ( Plane Axis )")
self.kodlar.append(f"S{conf.spindle} ( Spindle )")
self.kodlar.append(f"( Safe Z : {conf.safe_z} )")
self.kodlar.append(f"( Step Z : {conf.step} )")
self.kodlar.append(f"( Total depth : {round(conf.depth, 3)} )")
self.kodlar.append(f"( Feed Rate -mm/min- : {conf.feed} )")
self.kodlar.append(f"( Plunge Rate -mm/min- : {conf.plunge} )")
# Necessary calculations have been made
# Gcode can now be creating for object
self.convert_gcode(obj)
# Remove to volatile object
bpy.data.collections[0].objects.unlink(obj)
bpy.data.objects.remove(obj)
#############################################
#############################################
return {'PASS_THROUGH'}
def finished(self, context):
pr_cvr = context.scene.ncnc_pr_convert
pr_cvr.isrun[self.run_index] = False
pr_cvr.loading = 0
self.add_footer()
###########################
# Create Internal Text File
file_name = "nCNC"
if pr_cvr.overwrite and file_name in bpy.data.texts.keys():
bpy.data.texts.remove(bpy.data.texts[file_name])
str_codes = "\n".join(self.kodlar)
mytext = bpy.data.texts.new(file_name)
mytext.write(str_codes)
self.pr_txs.texts = mytext.name
self.report({"INFO"}, "Converted")
return self.timer_remove(context)
def add_header(self, context):
self.add_block(name="Header", expand="1", enable="1")
self.kodlar.append("(Made in Blender by nCNC addons)")
self.kodlar.append("M3 S1200")
self.kodlar.append("G4 P1 (Pause 1 second)")
self.kodlar.append("G21 (All units in mm)")
# self.kodlar.append("G0 Z5")
def add_footer(self):
self.add_block(name="Footer", expand="1", enable="1")
self.kodlar.append(f"G0 Z{round(self.first_point.z, 3) or 5}")
self.kodlar.append("M5")
self.kodlar.append("G0 X0 Y0")
self.kodlar.append("M2")
self.kodlar.append("(Total Number of Lines : {})".format(len(self.kodlar)))
def add_block(self, name=None, expand="0", enable="1"):
self.kodlar.append("") if len(self.kodlar) > 0 else None
self.kodlar.append("(Block-name: " + ("Shape{}.{})".format(self.shape, self.block) if not name else name + ")"))
self.kodlar.append("(Block-expand: %s)" % expand)
self.kodlar.append("(Block-enable: %s)" % enable)
def convert_gcode(self, obj):
for i, subcurve in enumerate(obj.data.splines): # Curve altındaki tüm Spline'ları sırayla al
self.block += 1
self.add_block(expand="0", enable="1") # Yeni bir blok başlığı ekle
curvetype = subcurve.type
print("curvetype", curvetype)
for j, k in enumerate(self.dongu):
self.z_adim = Vector((0, 0, k))
if curvetype == 'NURBS':
# Yapım aşamasında !!!
# print("curve is closed:", subcurve.use_cyclic_u)
xl = []
yl = []
# for i in range(11):
# a = nVector.bul_nurbs_1t1pl(0.1 * i, context)
# # print(a)
# xl.append(a.x)
# yl.append(a.y)
# empty = bpy.data.objects["Empty"].location
# print("a",a)
# empty.x = a.x
# empty.y = a.y
# empty.z = a.z
# print("a",a)
# for nurbspoint in subcurve.points:
# print([nurbspoint.co[0], nurbspoint.co[1], nurbspoint.co[2]], ',')
# Poly tipindeki Spline'ı convert et
elif curvetype == 'POLY':
self.poly(obj, subcurve)
# Bezier tipindeki Spline'ı convert et
elif curvetype == 'BEZIER':
self.bezier(obj, subcurve, reverse=j % 2 is 1)
def bezier(self, obj, subcurve, reverse=False):
"""Burqada bir güncelleme yap:
pref.as_line değerine göre g2 ve g3 kodlarını kullan veya kullanma"""
pref = obj.ncnc_pr_toolpathconfigs
rc = pref.round_circ
r = pref.round_loca
z_safe = pref.safe_z
max_z = -10000
step_z = obj.ncnc_pr_toolpathconfigs.step
nokta_sayisi = len(subcurve.bezier_points) - (0 if subcurve.use_cyclic_u else 1)
nokta_list = []
for j in range(nokta_sayisi):
cycle_point = j == nokta_sayisi - 1 and subcurve.use_cyclic_u
# last point
lp = 0 if cycle_point else j + 1
# Point Head
m1 = subcurve.bezier_points[j].co - self.z_adim
hr = subcurve.bezier_points[j].handle_right - self.z_adim
hl = subcurve.bezier_points[lp].handle_left - self.z_adim
# Point End
m2 = subcurve.bezier_points[lp].co - self.z_adim
# Aşağıda yapılan iş şöyle özetlenebilir;
# Üstteki m1 ve m2 (baş ve son) noktaları arasından alınan 3 değer ile bir inceleme yapılır
# Bu m1, m2 ve diğer 3 değerin;
# Bir çember üzerinde mi
# Bir doğru üzerinde mi .. olduğu kontrol edilir. Eğer öyleyseler daha az Gkodu elde edilir
sorgula = [0.25, 0.5, 0.75]
bak_merkez = []
bak_dogru = []
for i in sorgula:
ps = nVector.bul_bezier_nokta_4p1t(i, m1, hr, hl, m2)
# print("m1",m1,"m2",m2,"ps",ps)
# bak_merkez.append(nVector.yuvarla_vector(rc, nVector.bul_cember_merkezi_3p(m1, ps, m2, duzlem="XYZ")))
bak_merkez.append(nVector.yuvarla_vector(rc, nCompute.circle_center(m1, ps, m2)))
bak_dogru.append(nVector.bul_dogru_uzerindemi_3p(m1, m2, ps))
# print("Doğruda mı",nVector.bul_dogru_uzerindemi_3p(m1,m2,ps))
# print("\n\n")
if False not in bak_dogru: # Eğer düz bir doğruysa
if j == 0:
nokta_list.append(m1)
nokta_list.append(nVector.bul_dogrunun_ortasi_2p(m1, m2))
nokta_list.append(m2)
elif not pref.as_line and bak_merkez[0] == bak_merkez[1] and bak_merkez[1] == bak_merkez[2]:
if j == 0:
nokta_list.append(m1)
nokta_list.append(nVector.bul_bezier_nokta_4p1t(0.5, m1, hr, hl, m2))
nokta_list.append(m2)
# If you want a Line rather than a Curve
elif pref.as_line:
resolution = subcurve.resolution_u
step = 1 / resolution / 2
for i in range(resolution * 2 + 1):
o = nVector.bul_bezier_nokta_4p1t(step * i, m1, hr, hl, m2)
if i != 0 or j == 0:
nokta_list.append(o)
# For Curve
else:
resolution = subcurve.resolution_u
# Çözünürlük çift katsayılı yapıldı.
if resolution % 2 == 1:
resolution += 1
step = 1 / resolution
for i in range(resolution + 1):
o = nVector.bul_bezier_nokta_4p1t(step * i, m1, hr, hl, m2)
if i == 0 and j != 0:
pass
else:
nokta_list.append(o)
if reverse:
nokta_list.reverse()
kac_kesit = len(nokta_list) - 2
for i in range(0, kac_kesit, 2):
p1 = nokta_list[i]
p2 = nokta_list[i + 1]
p3 = nokta_list[i + 2]
m = nVector.bul_cember_merkezi_3p(p1, p2, p3, duzlem=pref.plane)
# print(m)
b = nVector.bul_yonu_1m3p(m, p1, p2, p3)
I = m.x - p1.x if pref.plane != "G19" else 0
J = m.y - p1.y if pref.plane != "G18" else 0
K = m.z - p1.z if pref.plane != "G17" else 0
# print("p1", p1, "p2", p2, "p3", p3, "m", m, I, J, K)
limit = 800
if i == 0:
# Find Max Z Point
for nlp in nokta_list:
if nlp.z > max_z:
max_z = nlp.z + step_z
if not self.first_point.z:
# First Z Position (Safe Z)
self.first_point.z = max(max_z + z_safe, z_safe)
self.first_point.x = p1.x
self.first_point.y = p1.y
self.kodlar.append(f"G0 Z{round(self.first_point.z, r)}")
# First XY Pozition
self.kodlar.append(f"G0 X{round(p1.x, r)} Y{round(p1.y, r)}")
# Rapid Z, Nearest point
self.kodlar.append(f"G0 Z{round(max_z + 1, r)}")
# First Plunge in Z
self.kodlar.append(f"G1 Z{round(p1.z, r)} F{pref.plunge}")
if pref.as_line or abs(I) > limit or abs(J) > limit or abs(K) > limit:
# q = "G1 X{1:.{0}f} Y{2:.{0}f} Z{3:.{0}f}".format(r, p2.x, p2.y, p2.z)
q = "G1 X{1:.{0}f} Y{2:.{0}f} Z{3:.{0}f}".format(r, p3.x, p3.y, p3.z)
else:
q = "{1} X{2:.{0}f} Y{3:.{0}f} Z{4:.{0}f} I{5:.{0}f} J{6:.{0}f} K{7:.{0}f}".format(r, b, p3.x, p3.y,
p3.z, I, J, K)
if i == 0: q += " F{}".format(pref.feed)
self.kodlar.append(q)
# z_pos = max(max_z + z_safe, z_safe)
# self.kodlar.append(f"G0 Z{round(max_z + 1, r)}")
# self.kodlar.append("G0 Z{1:.{0}f}".format(r, z_safe))
def poly(self, obj, subcurve):
pref = obj.ncnc_pr_toolpathconfigs
r = pref.round_loca
z_safe = pref.safe_z
for i, p in enumerate(subcurve.points):
# obj2 = obj.copy()
# p.co.rotate(obj2.rotation_euler)
# p.co
# resize
# resize_3d
# rotate
loc = p.co.to_3d() - self.z_adim
if i == 0:
self.kodlar.append("G0 Z{1:.{0}f}".format(r, z_safe))
self.kodlar.append("G0 X{1:.{0}f} Y{2:.{0}f}".format(r, loc.x, loc.y))
# self.kodlar.append("G0 Z1")
self.kodlar.append("G1 Z{1:.{0}f} F{2}".format(r, loc.z, pref.plunge))
else:
q = "G1 X{1:.{0}f} Y{2:.{0}f} Z{3:.{0}f}".format(r, loc.x, loc.y, loc.z)
if i == 1: q += " F{}".format(pref.feed)
self.kodlar.append(q)
if subcurve.use_cyclic_u:
loc = subcurve.points[0].co.to_3d() - self.z_adim
self.kodlar.append("G1 X{1:.{0}f} Y{2:.{0}f} Z{3:.{0}f}".format(r, loc.x, loc.y, loc.z))
self.kodlar.append("G0 Z{1:.{0}f}".format(r, z_safe))
else:
self.kodlar.append("G0 Z{1:.{0}f}".format(r, z_safe))
# #################################
# #################################
# #################################
class NCNC_PR_Connection(PropertyGroup):
"""
Only CNC Connection Panel Properties
"""
def get_isconnected(self):
if dev:
try:
dev.inWaiting()
except:
return False
return True if dev else False
def set_isconnected(self, value):
"""Value : True->Connect, False->Disconnect"""
global dev
if dev:
try:
dev.close()
except:
...
dev = None
if value:
try:
s = Serial(self.ports, self.bauds)
s.write("\r\n\r\n".encode("ascii"))
time.sleep(.1)
s.flushInput()
dev = s
except:
...
bpy.ops.ncnc.communication(start=True)
else:
bpy.ops.ncnc.communication(start=False)
def get_ports(self, context):
return [(i.device, str(i), i.name) for i in comports()]
isconnected: BoolProperty(
name="IsConnected",
description="Is Connected ?",
default=False,
get=get_isconnected,
set=set_isconnected
)
ports: EnumProperty(
name="Select Machine",
description="Select the machine you want to connect",
items=get_ports
)
bauds: EnumProperty(
items=[("2400", "2400", ""),
("4800", "4800", ""),
("9600", "9600", ""),
("19200", "19200", ""),
("38400", "38400", ""),
("57600", "57600", ""),
("115200", "115200", ""),
("230400", "230400", "")
],
name="Select Baud",
description="Select the machine you want to connect",
default="115200"
)
controller: EnumProperty(
items=[("GRBL", "GRBL v1.1 (Tested)", "")],
name="Controller",
description="Under development...",
default="GRBL"
)
@classmethod
def register(cls):
Scene.ncnc_pr_connection = PointerProperty(
name="NCNC_PR_Connection Name",
description="NCNC_PR_Connection Description",
type=cls
)
@classmethod
def unregister(cls):
del Scene.ncnc_pr_connection
class NCNC_OT_Connection(Operator):
bl_idname = "ncnc.connection"
bl_label = "Connection"
bl_description = "Connect / Disconnect"
bl_options = {'REGISTER'}
def invoke(self, context, event):
pr_con = context.scene.ncnc_pr_connection
pr_con.isconnected = not pr_con.isconnected
context.scene.ncnc_pr_vision.dash = pr_con.isconnected
context.scene.ncnc_pr_vision.mill = pr_con.isconnected
# Start communication when connected
# bpy.ops.ncnc.communication(start=pr_con.isconnected)
bpy.ops.ncnc.decoder(start=pr_con.isconnected)
return {'FINISHED'}
class NCNC_PT_Connection(Panel):
bl_idname = "NCNC_PT_connection"
bl_label = "Connection"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
@classmethod
def poll(cls, context):
return context.scene.ncnc_pr_head.tool_machine
def draw(self, context):
pr_con = context.scene.ncnc_pr_connection
layout = self.layout
col = layout.column()
col.prop(pr_con, "ports", text="Port")
col.prop(pr_con, "bauds", text="Baud")
col.prop(pr_con, "controller")
conn = pr_con.isconnected
col.operator("ncnc.connection",
text=("Connected" if conn else "Connect"),
icon=("LINKED" if conn else "UNLINKED"),
depress=conn
)
# #################################
# #################################
# #################################
class NCNC_PR_MessageItem(PropertyGroup):
ingoing: BoolProperty(
name="Ingoing?",
description="Message is Ingoing / Outgoing"
)
message: StringProperty(
name="Messsage?",
description="Message"
)
# time = time.time()
# incoming = StringProperty(name="Incoming", default="")
@classmethod
def register(cls):
Scene.ncnc_pr_messageitem = PointerProperty(
name="NCNC_PR_MessageItem Name",
description="NCNC_PR_MessageItem Description",
type=cls)
@classmethod
def unregister(cls):
del Scene.ncnc_pr_messageitem
class NCNC_UL_Messages(UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname):
row = layout.row()
if item.message.startswith("error:"):
icon = "FUND" # "FUND" or "COLORSET_01_VEC"
elif item.ingoing:
icon = "BLANK1"
else:
icon = "RIGHTARROW_THIN"
row.prop(item, "message",
text="", # time.strftime(item.time),
icon=icon, # "BLANK1" "NONE"
emboss=False)
class NCNC_OP_Messages(Operator):
bl_idname = "ncnc.messages"
bl_label = "Messages Operator"
bl_description = "Clear Messages in the ListBox"
bl_options = {'REGISTER'}
action: EnumProperty(
items=[
("add", "Add to message", ""),
("remove", "Remove to message", ""),
("clear", "Clear all messages", ""),
("clearqueu", "Clear Queu", "")]
)
def execute(self, context):
pr_com = context.scene.ncnc_pr_communication
if self.action == "add":
print("Developing ...")
elif self.action == "remove":
print("Developing ...")
pr_com.items.remove(pr_com.active_item_index)
elif self.action == "clear":
pr_com.items.clear()
pr_com.active_item_index = 0
elif self.action == "clearqueu":
pr_com.clear_queue()
return {'FINISHED'}
class NCNC_PR_Communication(PropertyGroup):
def get_active(self):
return bpy.context.scene.ncnc_pr_machine.status in ("IDLE", "RUN", "JOG", "CHECK", "HOME", "")
def run_mode_update(self, context):
self.isrun = self.run_mode != "stop"
items: CollectionProperty(
type=NCNC_PR_MessageItem,
name="Messages",
description="All Message Items Collection"
)
active_item_index: IntProperty(
name="Active Item",
default=-1,
description="Selected message index in Collection"
)
isactive: BoolProperty(
name='Communication is Active?',
description='İletişimi durdur veya sürdür',
default=True,
get=get_active
)
isrun: BoolProperty(default=False)
run_mode: EnumProperty(
items=[
("stop", "Stop", "Stop and end"),
("start", "Run", "Send to GCodes"),
("pause", "Pause", "Pause to Send"),
("resume", "Resume", "Pause to Sending"),
],
name="Gcode",
default="stop",
update=run_mode_update
)
############################################################
# #################################################### QUEUE
# Mesaj Kuyruğu
queue_list = []
######################################
# ############################# Hidden
# Mesaj Kuyruğu Gizli
queue_list_hidden = []
# Cevap Kuyruğu Gizli
answers = []
def set_hidden(self, message):
self.queue_list_hidden.append(message)
# if len(self.queue_list_hidden) > 10:
# _volatile = self.queue_list_hidden[:10]
# self.queue_list_hidden.clear()
# self.queue_list_hidden.extend(_volatile)
# print("queue_list_hidden", self.queue_list_hidden)
def get_answer(self):
if self.isrun and not len(self.queue_list):
self.run_mode = "stop"
return self.answers.pop(0) if len(self.answers) else ""
######################################
# ############################# Hardly
# Mesaj Kuyruğu zorla
queue_list_hardly = []
def set_hardly(self, message):
self.queue_list_hardly.append(message)
def clear_queue(self):
self.queue_list.clear()
self.queue_list_hidden.clear()
############################################################
# ################################################ MESSAGING
def update_messaging(self, context):
if not self.messaging:
return
self.send_in_order(self.messaging)
self.messaging = ""
messaging: StringProperty(name="Outgoing Message",
update=update_messaging)
############################################################
# ################################################## METHODS
def send_in_order(self, msg=None):
if not msg:
return
if "=" in msg and "$J" not in msg:
self.set_hidden("$$")
self.queue_list.append(msg)
@classmethod
def register(cls):
Scene.ncnc_pr_communication = PointerProperty(
name="NCNC_PR_Communication Name",
description="NCNC_PR_Communication Description",
type=cls)
@classmethod
def unregister(cls):
del Scene.ncnc_pr_communication
class NCNC_OT_CommunicationRun(Operator):
bl_idname = "ncnc.communicationrun"
bl_label = "Communication Run"
bl_description = "Communication Description"
bl_options = {'REGISTER'}
action: EnumProperty(
items=[
("start", "Start", ""),
("pause", "Pause", ""),
("resume", "Resume", ""),
("stop", "Stop", "")]
)
def execute(self, context):
return self.invoke(context, None)
def invoke(self, context, event):
pr_com = context.scene.ncnc_pr_communication
pr_txt = context.scene.ncnc_pr_texts.active_text
if self.action == "start":
if not pr_txt:
self.report({'INFO'}, "No Selected Text")
return {"CANCELLED"}
for i in pr_txt.as_string().splitlines():
x = i.strip()
if not x: # or (x.startswith("(") and x.endswith(")")):
continue
pr_com.send_in_order(x)
pr_com.run_mode = "start"
elif self.action == "pause":
bpy.ops.ncnc.machine(action="hold")
pr_com.run_mode = "pause"
elif self.action == "resume":
bpy.ops.ncnc.machine(action="resume")
pr_com.run_mode = "start"
elif self.action == "stop":
pr_com.run_mode = "stop"
bpy.ops.ncnc.machine(action="reset")
return {'FINISHED'}
# ##########################################################
# ##########################################################
running_modals = {}
def register_modal(self):
# if exists previous modal (self), stop it
unregister_modal(self)
# Register to self
running_modals[self.bl_idname] = self
# self.report({'INFO'}, "NCNC Communication: Started")
def unregister_modal(self):
# Get previous running modal
self_prev = running_modals.get(self.bl_idname)
try:
# if exists previous modal (self), stop it
if self_prev:
self_prev.inloop = False
running_modals.pop(self.bl_idname)
# self.report({'INFO'}, "NCNC Communication: Stopped (Previous Modal)")
except:
running_modals.pop(self.bl_idname)
# ##########################################################
# ##########################################################
class NCNC_OT_Communication(Operator):
bl_idname = "ncnc.communication"
bl_label = "Communication"
bl_description = "Communication Description"
bl_options = {'REGISTER'}
# Sent Mode (only_read)
# 0.0: Hardly -> Read
# 0.1: Hardly -> Write
# 1.0: Public -> Read
# 1.1: Public -> Write
# 2.0: Hidden -> Read
# 2.1: Hidden -> Write
sent = 0
pr_con = None
pr_com = None
pr_dev = None
inloop = True
delay = 0.1
_last_time = 0
start: BoolProperty(default=True)
def execute(self, context):
return self.invoke(context, None)
def invoke(self, context, event):
# ########################### STANDARD
if not self.start:
unregister_modal(self)
return {'CANCELLED'}
register_modal(self)
# ####################################
# ####################################
# bpy.app.driver_namespace[self.bl_idname] = self
self.pr_dev = context.scene.ncnc_pr_machine
self.pr_con = context.scene.ncnc_pr_connection
self.pr_com = context.scene.ncnc_pr_communication
context.window_manager.modal_handler_add(self)
return self.timer_add(context)
def timer_add(self, context):
wm = context.window_manager
self._timer = wm.event_timer_add(self.delay, window=context.window)
return {"RUNNING_MODAL"}
def timer_remove(self, context):
wm = context.window_manager
wm.event_timer_remove(self._timer)
return {'CANCELLED'}
def modal(self, context, event):
# ########################### STANDARD
if not self.inloop:
if context.area:
context.area.tag_redraw()
return self.timer_remove(context)
if time.time() - self._last_time < self.delay:
return {'PASS_THROUGH'}
self._last_time = time.time()
if not self.pr_con.isconnected:
unregister_modal(self)
return self.timer_remove(context)
# ####################################
# ####################################
self.delay = self.contact()
return {'PASS_THROUGH'}
def contact(self):
"""return: delay ms -> float"""
pr_com = self.pr_com
pr_dev = self.pr_dev
# READ HARDLY
if self.sent == 0.0:
for i in self.read().strip().split("\n"):
c = i.strip()
if not c:
continue
item = pr_com.items.add()
item.ingoing = True
item.message = c
pr_com.active_item_index = len(pr_com.items) - 1
pr_com.answers.append(c)
self.sent = 3.1
# print("READ HARDLY", c)
# READ PUBLIC
elif self.sent == 1.0:
for i in self.read().strip().split("\n"):
c = i.strip()
if not c:
continue
item = pr_com.items.add()
item.ingoing = True
item.message = c
pr_com.active_item_index = len(pr_com.items) - 1
pr_com.answers.append(c)
# One visible code has been sent and read. The queue is in one hidden code.
self.sent = 2.1
# READ HIDDEN
elif self.sent == 2.0:
c = [i.strip() for i in self.read().strip().split("\n")]
pr_com.answers.extend(c)
self.sent = 1.1
# print("READ HIDDEN", c)
#############
# SEND HARDLY
if len(pr_com.queue_list_hardly):
code = pr_com.queue_list_hardly.pop(0)
gi = self.send(code)
item = pr_com.items.add()
item.ingoing = False
item.message = gi
pr_com.active_item_index = len(pr_com.items) - 1
self.sent = 0.0
# print("SEND HARDLY", code, "\n"*5)
return .1
if self.sent == 3.1:
self.sent = 2.1
elif not pr_com.isactive:
# print("Communication Passive")
return 0
# SEND PUBLIC
if self.sent == 1.1:
if len(pr_com.queue_list) and pr_dev.buffer > 10: # and pr_dev.bufwer > 100
# If the buffer's remainder is greater than 10, new code can be sent.
code = pr_com.queue_list.pop(0)
gi = self.send(code)
item = pr_com.items.add()
item.ingoing = False
item.message = gi
pr_com.active_item_index = len(pr_com.items) - 1
self.sent = 1.0
# "G4 P3" -> 3 sn bekle gibi komutunu bize de uygula
wait = re.findall('(?<!\()[Gg]0*4 *[pP](\d+\.*\d*)', code)
if wait:
return float(wait[0])
# print("SEND PUBLIC", code)
return .2
else:
self.sent = 2.1
# SEND HIDDEN
if self.sent == 2.1:
if len(pr_com.queue_list_hidden):
code = pr_com.queue_list_hidden.pop(0)
self.send(code)
self.sent = 2.0
# print("SEND HIDDEN", code)
return .1 # if (pr_dev.buffer > 0) and (pr_dev.bufwer > 100) else 1
else:
self.sent = 1.1
return 0
@classmethod
def send(cls, msg=None):
if not dev:
return
if not msg:
msg = "$$" # Texinput here
if msg.startswith("0x") or msg.startswith("0X"):
code = bytearray.fromhex(msg[2:]) # int(msg[2:], 16)
dev.write(code)
return msg
msg = msg.translate(tr_translate).upper()
dev.write(f"{msg}\n".encode("ascii"))
return msg
@classmethod
def read(cls):
if not dev:
return
a = dev.read_all().decode("utf-8")
return a
class NCNC_PT_Communication(Panel):
bl_idname = "NCNC_PT_communication"
bl_label = "Communication"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
# bl_options = {"DEFAULT_CLOSED", "HIDE_HEADER"}
@classmethod
def poll(cls, context):
return context.scene.ncnc_pr_head.tool_machine
def draw(self, context):
layout = self.layout
pr_com = context.scene.ncnc_pr_communication
col = layout.column(align=True)
col.template_list(
"NCNC_UL_Messages", # TYPE
"ncnc_ul_messages", # ID
pr_com, # Data Pointer
"items", # Propname
pr_com, # active_dataptr
"active_item_index", # active_propname
rows=3,
type='DEFAULT'
)
row = col.row(align=True)
# if not context.scene.ncnc_pr_connection.isconnected:
# row.enabled = False
# row.alert = True
row.prop(pr_com, "messaging", text="", full_event=False)
row.operator("ncnc.messages", text="", icon="TRASH", ).action = "clear"
# row = layout.row(align=True)
# row.label(text=f"Messages -> {len(pr_com.items)}")
# row.operator("ncnc.messages", text="", icon="TRASH").action = "clear"
row = layout.row(align=True)
row.label(text=f"Queue -> Public {len(pr_com.queue_list)}, Hidden {len(pr_com.queue_list_hidden)}")
row.operator("ncnc.messages", text="", icon="TRASH").action = "clearqueu"
rex_conf = '\$ *(\d*?) *\= *(\d+\.*\d*?)(?:$|\D+.*$)'
"""
>>> re.findall('\$ *(\d*?) *\= *(\d+\.*\d*?)(?:$|\D+.*$)', "$12=34.56 a1b2c3")
$12=34 #-> [('12', '34')]
$ 12 = 34 #-> [('12', '34')]
$12=34.56 #-> [('12', '34.56')]
$12=34.56 a1b2c3 #-> [('12', '34.56')]
"""
def mask(my_int, min_len=3):
"""
my_int:
1 -> 001
15 -> 1111
...
min_len: minimum_len -> List Count
1 -> [ True ]
2 -> [ True, True ]
3 -> [ True, True, True ]
...
"""
return [b == '1' for b in bin(my_int)[2:].rjust(min_len)[::-1]]
def mask_s10(my_int):
return str(my_int % 3)
dev_list = {
"0": int, # $0=10
"1": int, # $1=25
"2": mask, # $2=0 # BoolVectorProperty
"3": mask, # $3=5 # BoolVectorProperty
"4": bool, # $4=0
"5": bool, # $5=0
"6": bool, # $6=0
"10": int, # $10=1
"11": float, # $11=0.010
"12": float, # $12=0.002
"13": str, # $13=0
"20": bool, # $20=0
"21": bool, # $21=0
"22": bool, # $22=0
"23": mask, # $23=0 # BoolVectorProperty
"24": float, # $24=25.000
"25": float, # $25=500.000
"26": int, # $26=250
"27": float, # $27=1.000
"30": int, # $30=1000
"31": int, # $31=0
"100": float, # $100=800.000
"101": float, # $101=800.000
"102": float, # $102=800.000
"110": float, # $110=500.000
"111": float, # $111=500.000
"112": float, # $112=500.000
"120": float, # $120=10.000
"121": float, # $121=10.000
"122": float, # $122=10.000
"130": float, # $130=200.000
"131": float, # $131=200.000
"132": float, # $132=200.000
}
class NCNC_OT_Decoder(Operator):
bl_idname = "ncnc.decoder"
bl_label = "NCNC Decoder"
bl_description = "Resolve Receive Codes"
# bl_options = {'REGISTER'}
q_count = 0
ct_reg = None # Context, Regions
pr_con = None
pr_com = None
pr_dev = None
inloop = True
delay = 0.1
_last_time = 0
start: BoolProperty(default=True)
def execute(self, context):
return self.invoke(context, None)
def invoke(self, context, event):
# ########################### STANDARD
if not self.start:
unregister_modal(self)
return {'CANCELLED'}
register_modal(self)
context.window_manager.modal_handler_add(self)
# ####################################
# ####################################
self.report({'INFO'}, "NCNC Decoder Started")
return self.timer_add(context)
def timer_add(self, context):
# add to timer
wm = context.window_manager
self._timer = wm.event_timer_add(self.delay, window=context.window)
return {"RUNNING_MODAL"}
def timer_remove(self, context):
wm = context.window_manager
wm.event_timer_remove(self._timer)
return {'CANCELLED'}
def modal(self, context, event):
# ########################### STANDARD
if not self.inloop:
if context.area:
context.area.tag_redraw()
return self.timer_remove(context)
if time.time() - self._last_time < self.delay:
return {'PASS_THROUGH'}
self._last_time = time.time()
# ####################################
# ####################################
# !!! Bug: 3D ViewPort kısmını, sol üstten, TextEditor vs 'ye çevirince, bu kısımda hata çıkıyor.
# Bug fixed in v0.6.4
if not context.area:
self.report({'WARNING'}, "Main Area Closed")
self.report({'Info'}, "You need to re-establish the connection.")
unregister_modal(self)
context.scene.ncnc_pr_connection.isconnected = False
return self.timer_remove(context)
self.ct_reg = context.area.regions
self.pr_dev = context.scene.ncnc_pr_machine
self.pr_con = context.scene.ncnc_pr_connection
self.pr_com = context.scene.ncnc_pr_communication
if not self.pr_con.isconnected:
return self.timer_remove(context)
if not self.pr_com.isactive or self.pr_com.isrun or self.q_count < 5:
self.decode("?")
self.q_count += 1
else:
self.decode("$G")
self.q_count = 0
# self.decode("?")
return {'PASS_THROUGH'}
def decode(self, msg="?"):
if msg:
if not (len(self.pr_com.queue_list_hidden) and self.pr_com.queue_list_hidden[-1] == msg):
self.pr_com.set_hidden(msg)
while 1:
c = self.pr_com.get_answer()
if not c:
break
c = c.lower()
# print("get_answer ->", c)
if c == "ok":
"""ok : Indicates the command line received was parsed and executed (or set to be executed)."""
continue
elif c.startswith("error:"):
"""error:x : Indicated the command line received contained an error, with an error code x, and was
purged. See error code section below for definitions."""
continue
elif c.startswith("alarm"):
self.pr_dev.status = c.upper()
continue
elif c.startswith("<") and c.endswith(">"):
"""< > : Enclosed chevrons contains status report data.Examples;
<Idle|WPos:120.000,50.000,0.000|FS:0,0>
<Jog|WPos:94.853,50.000,0.000|FS:500,0>
"""
self.status_report(c.strip("<>"))
continue
elif re.findall("\[gc\:(.*)\]", c): # c.startswith("[gc") and c.endswith("]"):
"""[gc:g0 g54 g17 g21 g90 g94 m5 m9 t0 f0 s0]"""
self.modes(re.findall("\[gc\:(.*)\]", c)[0])
# ############################################### RESOLVE
# ################################################ $x=val
# r = [('12', '0.002')]
for i in re.findall(rex_conf, c):
# i = ('12', '0.002')
if i[0] in dev_list.keys():
# '12', "0.002" before -> "$12=0.002"
x, val = i
# float/int/set/mask
conv = dev_list[x]
# prop = cls.pr_dev.s1/2/3...
local_vars = {}
exec(f"p = self.pr_dev.s{x}", {"self": self}, local_vars)
prop = local_vars["p"] if conv is not float else round(local_vars["p"], 4)
# float("0.002")
var = conv(int(val)) if conv in [bool, mask, mask_s10] else conv(val)
# [True, False, True]
if conv is mask:
for k in range(len(var)):
if var[k] != prop[k]:
exec(f"self.pr_dev.s{x}[{k}] = {var[k]}")
# cls.pr_dev[f"s"][k] = var[k]
else:
if var != prop:
if conv in [str, mask_s10]:
exec(f'self.pr_dev.s{x} = "{var}"')
else:
exec(f'self.pr_dev.s{x} = {var}')
# cls.pr_dev[f"s{x}"] = var
if self.ct_reg:
for region in self.ct_reg:
if region.type == "UI":
region.tag_redraw()
def status_report(self, code):
""" >> ?
Idle|MPos:0.000,0.000,0.000|FS:0,0|WCO:-80.000,-50.000,0.000
Idle|MPos:0.000,0.000,0.000|FS:0,0|Ov:100,100,100
Idle|MPos:0.000,0.000,0.000|FS:0,0
Idle|WPos:0.000,0.000,0.000|FS:0,0
jog|wpos:90.003,50.000,0.000|bf:15,127|fs:0,0
Status; Idle, Run, Hold, Jog, Alarm, Door, Check, Home, Sleep
"""
codes = code.split("|")
if len(codes):
self.pr_dev.status = codes.pop(0).upper()
for i in codes:
a = i.split(":")[1].split(",")
for key, var in (("mpos", self.pr_dev.mpos),
("wpos", self.pr_dev.wpos),
("wco", self.pr_dev.wco)):
if key in i:
for j in range(len(a)):
var[j] = float(a[j])
if "fs" in i:
self.pr_dev.feed = float(a[0])
self.pr_dev.spindle = float(a[1])
elif "bf" in i:
self.pr_dev.buffer = int(a[0])
self.pr_dev.bufwer = int(a[1])
def modes(self, code):
"""Mode Group"""
for c in code.upper().split():
for key, var in (("motion_mode", ("G0", "G1", "G2", "G3", "G38.2", "G38.3", "G38.4", "G38.5", "G80")),
("coordinate_system", ("G54", "G55", "G56", "G57", "G58", "G59")),
("plane", ("G17", "G18", "G19")),
("distance_mode", ("G90", "G91")),
("arc_ijk_distance", ["G91.1"]),
("feed_rate_mode", ("G93", "G94")),
("units_mode", ("G20", "G21")),
("cutter_radius_compensation", ["G40"]),
("tool_length_offset", ("G43.1", "G49")),
("program_mode", ("M0", "M1", "M2", "M30")),
("spindle_state", ("M3", "M4", "M5")),
("coolant_state", ("M7", "M8", "M9")),
):
vars = {}
exec(f"eq = self.pr_dev.{key} == c", {"self": self, "c": c}, vars)
if c in var and not vars["eq"]:
exec(f"self.pr_dev.{key} = c", {"self": self, "c": c}, {})
if c.startswith("S"):
self.pr_dev.saved_spindle = float(c[1:])
elif c.startswith("F"):
self.pr_dev.saved_feed = float(c[1:])
"""
>>> $$
$0 = 10 (Step pulse time, microseconds)
$1 = 25 (Step idle delay, milliseconds)
$2 = 0 (Step pulse invert, mask)
$3 = 5 (Step direction invert, mask)
$4 = 0 (Invert step enable pin, boolean)
$5 = 0 (Invert limit pins, boolean)
$6 = 0 (Invert probe pin, boolean)
$10 = 0 (Status report options, mask)
$11 = 0.010 (Junction deviation, millimeters)
$12 = 0.002 (Arc tolerance, millimeters)
$13 = 0 (Report in inches, boolean)
$20 = 0 (Soft limits enable, boolean)
$21 = 0 (Hard limits enable, boolean)
$22 = 0 (Homing cycle enable, boolean)
$23 = 0 (Homing direction invert, mask)
$24 = 25.000 (Homing locate feed rate, mm/min)
$25 = 500.000 (Homing search seek rate, mm/min)
$26 = 250 (Homing switch debounce delay, milliseconds)
$27 = 1.000 (Homing switch pull-off distance, millimeters)
$30 = 1000 (Maximum spindle speed, RPM)
$31 = 0 (Minimum spindle speed, RPM)
$32 = 0 (Laser-mode enable, boolean)
$100 = 800.000 (X-axis travel resolution, step/mm)
$101 = 800.000 (Y-axis travel resolution, step/mm)
$102 = 800.000 (Z-axis travel resolution, step/mm)
$110 = 500.000 (X-axis maximum rate, mm/min)
$111 = 500.000 (Y-axis maximum rate, mm/min)
$112 = 500.000 (Z-axis maximum rate, mm/min)
$120 = 10.000 (X-axis acceleration, mm/sec^2)
$121 = 10.000 (Y-axis acceleration, mm/sec^2)
$122 = 10.000 (Z-axis acceleration, mm/sec^2)
$130 = 200.000 (X-axis maximum travel, millimeters)
$131 = 200.000 (Y-axis maximum travel, millimeters)
$132 = 200.000 (Z-axis maximum travel, millimeters)
>>> $G
[GC:G0 G54 G17 G21 G90 G94 M5 M9 T0 F0 S0]
"""
# #################################
# #################################
# #################################
class NCNC_PR_Machine(PropertyGroup):
# ################################################ ?
status: StringProperty(name="Status")
"""IDLE, JOG, RUN, ALARM:0.., HOLD:0.., DOOR:0..,"""
wco: FloatVectorProperty(
name="WCO",
subtype='XYZ',
default=[0.0, 0.0, 0.0]
)
def wpos_update(self, context):
if self.pos_type == "mpos":
for i in range(3):
self.mpos[i] = self.wpos[i] + self.wco[i]
# Workspace Position
wpos: FloatVectorProperty(
name="WPos",
subtype='XYZ',
update=wpos_update,
default=[0.0, 0.0, 0.0]
)
def mpos_update(self, context):
if self.pos_type == "wpos":
for i in range(3):
self.wpos[i] = self.mpos[i] - self.wco[i]
# Machine Position
mpos: FloatVectorProperty(
name="MPos",
subtype='XYZ',
update=mpos_update,
default=[0.0, 0.0, 0.0],
)
feed: FloatProperty(
name="Feed",
default=0,
precision=1,
description="Feed Rate (Current)"
)
spindle: FloatProperty(
name="Spindle",
default=0,
precision=1,
description="Spindle (Current)"
)
saved_feed: FloatProperty(
name="&Feed",
default=0,
precision=1,
description="Feed Rate (Saved) - Only read"
)
saved_spindle: FloatProperty(
name="Saved Spindle",
default=0,
precision=1,
description="Spindle (Saved) - Only read"
)
buffer: IntProperty(
name="Buffer",
default=15,
description="""Buffer State:
Bf:15,128. The first value is the number of available blocks in the planner buffer and the second is number of available bytes in the serial RX buffer.
The usage of this data is generally for debugging an interface, but is known to be used to control some GUI-specific tasks. While this is disabled by default, GUIs should expect this data field to appear, but they may ignore it, if desired.
NOTE: The buffer state values changed from showing "in-use" blocks or bytes to "available". This change does not require the GUI knowing how many block/bytes Grbl has been compiled with.
This data field appears:
In every status report when enabled. It is disabled in the settings mask by default.
This data field will not appear if:
It is disabled by the $ status report mask setting or disabled in the config.h file.
""")
bufwer: IntProperty(
name="Buffer Answer on Machine",
default=15,
description="""Buffer State:
Bf:15,128. The first value is the number of available blocks in the planner buffer and the second is number of available bytes in the serial RX buffer.
The usage of this data is generally for debugging an interface, but is known to be used to control some GUI-specific tasks. While this is disabled by default, GUIs should expect this data field to appear, but they may ignore it, if desired.
NOTE: The buffer state values changed from showing "in-use" blocks or bytes to "available". This change does not require the GUI knowing how many block/bytes Grbl has been compiled with.
This data field appears:
In every status report when enabled. It is disabled in the settings mask by default.
This data field will not appear if:
It is disabled by the $ status report mask setting or disabled in the config.h file.
""")
# ########################################################################## $0
def s0_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$0={self.s0}")
s0: IntProperty(
name="Step pulse (µs)",
default=10,
min=1,
max=255,
subtype='TIME',
update=s0_update,
description="""$0 – Step pulse, microseconds
Stepper drivers are rated for a certain minimum step pulse length.
Check the data sheet or just try some numbers. You want the shortest
pulses the stepper drivers can reliably recognize. If the pulses are
too long, you might run into trouble when running the system at very
high feed and pulse rates, because the step pulses can begin to
overlap each other. We recommend something around 10 microseconds,
which is the default value.""")
# ########################################################################## $1
def s1_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$1={self.s1}")
s1: IntProperty(
name="Step idle delay (ms)",
default=25,
min=0,
max=255,
update=s1_update,
description="""$1 - Step idle delay, milliseconds
Every time your steppers complete a motion and come to a stop, Grbl will delay disabling the steppers by this value. OR, you can always keep your axes enabled (powered so as to hold position) by setting this value to the maximum 255 milliseconds. Again, just to repeat, you can keep all axes always enabled by setting $1=255.
The stepper idle lock time is the time length Grbl will keep the steppers locked before disabling. Depending on the system, you can set this to zero and disable it. On others, you may need 25-50 milliseconds to make sure your axes come to a complete stop before disabling. This is to help account for machine motors that do not like to be left on for long periods of time without doing something. Also, keep in mind that some stepper drivers don't remember which micro step they stopped on, so when you re-enable, you may witness some 'lost' steps due to this. In this case, just keep your steppers enabled via $1=255.""")
# ########################################################################## $2
def s2_update(self, context):
a = 0
if self.s2[0]:
a += 1
if self.s2[1]:
a += 2
if self.s2[2]:
a += 4
context.scene.ncnc_pr_communication.send_in_order(f"$2={a}")
s2: BoolVectorProperty(
name="Step Port", # Invert
default=[False, False, False],
subtype='XYZ',
update=s2_update,
description="""$2 – Step port invert, mask
This setting inverts the step pulse signal. By default, a step signal starts at normal-low and goes high upon a step pulse event. After a step pulse time set by $0, the pin resets to low, until the next step pulse event. When inverted, the step pulse behavior switches from normal-high, to low during the pulse, and back to high. Most users will not need to use this setting, but this can be useful for certain CNC-stepper drivers that have peculiar requirements. For example, an artificial delay between the direction pin and step pulse can be created by inverting the step pin.
This invert mask setting is a value which stores the axes to invert as bit flags. You really don't need to completely understand how it works. You simply need to enter the settings value for the axes you want to invert. For example, if you want to invert the X and Z axes, you'd send $2=5 to Grbl and the setting should now read $2=5 (step port invert mask:00000101)""")
"""
Setting Value Mask Invert X Invert Y Invert Z
0 00000000 N N N
1 00000001 Y N N
2 00000010 N Y N
3 00000011 Y Y N
4 00000100 N N Y
5 00000101 Y N Y
6 00000110 N Y Y
7 00000111 Y Y Y
"""
# ########################################################################## $3
def s3_update(self, context):
a = 0
if self.s3[0]:
a += 1
if self.s3[1]:
a += 2
if self.s3[2]:
a += 4
context.scene.ncnc_pr_communication.send_in_order(f"$3={a}")
s3: BoolVectorProperty(
name="Direction Port", # Invert
default=[True, False, True],
subtype='XYZ',
update=s3_update,
description="""$3 – Direction port invert, mask
This setting inverts the direction signal for each axis. By default, Grbl assumes that the axes move in a positive direction when the direction pin signal is low, and a negative direction when the pin is high. Often, axes don't move this way with some machines. This setting will invert the direction pin signal for those axes that move the opposite way.
This invert mask setting works exactly like the step port invert mask and stores which axes to invert as bit flags. To configure this setting, you simply need to send the value for the axes you want to invert. Use the table above. For example, if want to invert the Y axis direction only, you'd send $3=2 to Grbl and the setting should now read $3=2 (dir port invert mask:00000010)""")
"""
Setting Value Mask Invert X Invert Y Invert Z
0 00000000 N N N
1 00000001 Y N N
2 00000010 N Y N
3 00000011 Y Y N
4 00000100 N N Y
5 00000101 Y N Y
6 00000110 N Y Y
7 00000111 Y Y Y
"""
# ########################################################################## $4
def s4_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$4={1 if self.s4 else 0}")
s4: BoolProperty(
name="$4 - Step enable invert",
default=False,
update=s4_update,
description="""$4 - Step enable invert, boolean
By default, the stepper enable pin is high to disable and low to enable. If your setup needs the opposite, just invert the stepper enable pin by typing $4=1. Disable with $4=0. (May need a power cycle to load the change.)""")
# ########################################################################## $5
def s5_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$5={1 if self.s5 else 0}")
s5: BoolProperty(
name="$5 - Limit pins invert",
default=False,
update=s5_update,
description="""$5 - Limit pins invert, boolean
By default, the limit pins are held normally-high with the Arduino's internal pull-up resistor. When a limit pin is low, Grbl interprets this as triggered. For the opposite behavior, just invert the limit pins by typing $5=1. Disable with $5=0. You may need a power cycle to load the change.
NOTE: For more advanced usage, the internal pull-up resistor on the limit pins may be disabled in config.h.""")
# ########################################################################## $6
def s6_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$6={1 if self.s6 else 0}")
s6: BoolProperty(
name="$6 - Probe pin invert",
default=False,
update=s6_update,
description="""$6 - Probe pin invert, boolean
By default, the probe pin is held normally-high with the Arduino's internal pull-up resistor. When the probe pin is low, Grbl interprets this as triggered. For the opposite behavior, just invert the probe pin by typing $6=1. Disable with $6=0. You may need a power cycle to load the change.""")
# ########################################################################## $10
def s10_update(self, context):
if self.s10 != 2:
context.scene.ncnc_pr_communication.send_in_order(f"$10=2")
s10: IntProperty(
name="$10 - Status report, mask",
default=2,
min=0,
max=255,
description="$10 - Status report, mask\n0:WPos, 1:MPos, 2:Buf",
update=s10_update
)
# Not CNC Configuration, only select for UI
pos_type: EnumProperty(
name="Select Position Mode for Display",
description="$10 - Status report", # 0:WPos, 1:MPos, 2:Buf
default="wpos",
update=s10_update,
items=[("wpos", "WPos", "Working Position"), # "MATPLANE", "SNAP_GRID"
("mpos", "MPos", "Machine Position"), # "ORIENTATION_LOCAL"
])
"""
$10 --> '?' query. Get Position Info
Position Type 0 Enable WPos: Disable MPos:.
Position Type 1 Enable MPos:. Disable WPos:.
Buffer Data 2 Enabled Buf: field appears with planner and serial RX available buffer.
"""
# ########################################################################## $11
def s11_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$11={round(self.s11, 4)}")
s11: FloatProperty(
name="Junction deviation (mm)",
default=0.010,
precision=3,
update=s11_update,
description="""$11 - Junction deviation, mm
Junction deviation is used by the acceleration manager to determine how fast it can move through line segment junctions of a G-code program path. For example, if the G-code path has a sharp 10 degree turn coming up and the machine is moving at full speed, this setting helps determine how much the machine needs to slow down to safely go through the corner without losing steps.
How we calculate it is a bit complicated, but, in general, higher values gives faster motion through corners, while increasing the risk of losing steps and positioning. Lower values makes the acceleration manager more careful and will lead to careful and slower cornering. So if you run into problems where your machine tries to take a corner too fast, decrease this value to make it slow down when entering corners. If you want your machine to move faster through junctions, increase this value to speed it up. For curious people, hit this link to read about Grbl's cornering algorithm, which accounts for both velocity and junction angle with a very simple, efficient, and robust method.""")
# ########################################################################## $12
def s12_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$12={round(self.s12, 4)}")
s12: FloatProperty(
name="Arc tolerance (mm)",
default=0.002,
precision=3,
update=s12_update,
description="""$12 – Arc tolerance, mm
Grbl renders G2/G3 circles, arcs, and helices by subdividing them into teeny tiny lines, such that the arc tracing accuracy is never below this value. You will probably never need to adjust this setting, since 0.002mm is well below the accuracy of most all CNC machines. But if you find that your circles are too crude or arc tracing is performing slowly, adjust this setting. Lower values give higher precision but may lead to performance issues by overloading Grbl with too many tiny lines. Alternately, higher values traces to a lower precision, but can speed up arc performance since Grbl has fewer lines to deal with.
For the curious, arc tolerance is defined as the maximum perpendicular distance from a line segment with its end points lying on the arc, aka a chord. With some basic geometry, we solve for the length of the line segments to trace the arc that satisfies this setting. Modeling arcs in this way is great, because the arc line segments automatically adjust and scale with length to ensure optimum arc tracing performance, while never losing accuracy.""")
# ########################################################################## $13
def s13_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$13={self.s13}")
s13: EnumProperty(
items=[("0", "0: mm", ""),
("1", "1: inch", ""),
],
name="Unit Mode",
default="0",
update=s13_update,
description="""$13 - Report inches, boolean
Grbl has a real-time positioning reporting feature to provide a user feedback on where the machine is exactly at that time, as well as, parameters for coordinate offsets and probing. By default, it is set to report in mm, but by sending a $13=1 command, you send this boolean flag to true and these reporting features will now report in inches. $13=0 to set back to mm.""")
# ########################################################################## $20
def s20_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$20={1 if self.s20 else 0}")
s20: BoolProperty(
name="$20 - Soft limits",
default=False,
update=s20_update,
description="""$20 - Soft limits, boolean
Soft limits is a safety feature to help prevent your machine from traveling too far and beyond the limits of travel, crashing or breaking something expensive. It works by knowing the maximum travel limits for each axis and where Grbl is in machine coordinates. Whenever a new G-code motion is sent to Grbl, it checks whether or not you accidentally have exceeded your machine space. If you do, Grbl will issue an immediate feed hold wherever it is, shutdown the spindle and coolant, and then set the system alarm indicating the problem. Machine position will be retained afterwards, since it's not due to an immediate forced stop like hard limits.
NOTE: Soft limits requires homing to be enabled and accurate axis maximum travel settings, because Grbl needs to know where it is. $20=1 to enable, and $20=0 to disable.""")
# ########################################################################## $21
def s21_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$21={1 if self.s21 else 0}")
s21: BoolProperty(
name="$21 - Hard limits",
default=False,
update=s21_update,
description="""$21 - Hard limits, boolean
Hard limit work basically the same as soft limits, but use physical switches instead. Basically you wire up some switches (mechanical, magnetic, or optical) near the end of travel of each axes, or where ever you feel that there might be trouble if your program moves too far to where it shouldn't. When the switch triggers, it will immediately halt all motion, shutdown the coolant and spindle (if connected), and go into alarm mode, which forces you to check your machine and reset everything.
To use hard limits with Grbl, the limit pins are held high with an internal pull-up resistor, so all you have to do is wire in a normally-open switch with the pin and ground and enable hard limits with $21=1. (Disable with $21=0.) We strongly advise taking electric interference prevention measures. If you want a limit for both ends of travel of one axes, just wire in two switches in parallel with the pin and ground, so if either one of them trips, it triggers the hard limit.
Keep in mind, that a hard limit event is considered to be critical event, where steppers immediately stop and will have likely have lost steps. Grbl doesn't have any feedback on position, so it can't guarantee it has any idea where it is. So, if a hard limit is triggered, Grbl will go into an infinite loop ALARM mode, giving you a chance to check your machine and forcing you to reset Grbl. Remember it's a purely a safety feature.""")
# ########################################################################## $22
def s22_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$22={1 if self.s22 else 0}")
s22: BoolProperty(
name="$22 - Homing cycle",
default=False,
update=s22_update,
description="""$22 - Homing cycle, boolean
Ahh, homing. For those just initiated into CNC, the homing cycle is used to accurately and precisely locate a known and consistent position on a machine every time you start up your Grbl between sessions. In other words, you know exactly where you are at any given time, every time. Say you start machining something or are about to start the next step in a job and the power goes out, you re-start Grbl and Grbl has no idea where it is due to steppers being open-loop control. You're left with the task of figuring out where you are. If you have homing, you always have the machine zero reference point to locate from, so all you have to do is run the homing cycle and resume where you left off.
To set up the homing cycle for Grbl, you need to have limit switches in a fixed position that won't get bumped or moved, or else your reference point gets messed up. Usually they are setup in the farthest point in +x, +y, +z of each axes. Wire your limit switches in with the limit pins, add a recommended RC-filter to help reduce electrical noise, and enable homing. If you're curious, you can use your limit switches for both hard limits AND homing. They play nice with each other.
Prior to trying the homing cycle for the first time, make sure you have setup everything correctly, otherwise homing may behave strangely. First, ensure your machine axes are moving in the correct directions per Cartesian coordinates (right-hand rule). If not, fix it with the $3 direction invert setting. Second, ensure your limit switch pins are not showing as 'triggered' in Grbl's status reports. If are, check your wiring and settings. Finally, ensure your $13x max travel settings are somewhat accurate (within 20%), because Grbl uses these values to determine how far it should search for the homing switches.
By default, Grbl's homing cycle moves the Z-axis positive first to clear the workspace and then moves both the X and Y-axes at the same time in the positive direction. To set up how your homing cycle behaves, there are more Grbl settings down the page describing what they do (and compile-time options as well.)
Also, one more thing to note, when homing is enabled. Grbl will lock out all G-code commands until you perform a homing cycle. Meaning no axes motions, unless the lock is disabled ($X) but more on that later. Most, if not all CNC controllers, do something similar, as it is mostly a safety feature to prevent users from making a positioning mistake, which is very easy to do and be saddened when a mistake ruins a part. If you find this annoying or find any weird bugs, please let us know and we'll try to work on it so everyone is happy. :)
NOTE: Check out config.h for more homing options for advanced users. You can disable the homing lockout at startup, configure which axes move first during a homing cycle and in what order, and more.""")
# ########################################################################## $23
def s23_update(self, context):
a = 0
if self.s23[0]:
a += 1
if self.s23[1]:
a += 2
if self.s23[2]:
a += 4
context.scene.ncnc_pr_communication.send_in_order(f"$23={a}")
s23: BoolVectorProperty(
name="Homing Dir", # Invert
default=[False, False, False],
subtype='XYZ',
update=s23_update,
description="""$23 - Homing dir invert, mask
By default, Grbl assumes your homing limit switches are in the positive direction, first moving the z-axis positive, then the x-y axes positive before trying to precisely locate machine zero by going back and forth slowly around the switch. If your machine has a limit switch in the negative direction, the homing direction mask can invert the axes' direction. It works just like the step port invert and direction port invert masks, where all you have to do is send the value in the table to indicate what axes you want to invert and search for in the opposite direction.""")
# ########################################################################## $24
def s24_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$24={round(self.s24, 4)}")
s24: FloatProperty(
name="Homing feed (mm/min)",
default=25.000,
precision=3,
update=s24_update,
description="""$24 - Homing feed, mm/min
The homing cycle first searches for the limit switches at a higher seek rate, and after it finds them, it moves at a slower feed rate to home into the precise location of machine zero. Homing feed rate is that slower feed rate. Set this to whatever rate value that provides repeatable and precise machine zero locating.""")
# ########################################################################## $25
def s25_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$25={round(self.s25, 4)}")
s25: FloatProperty(
name="Homing seek (mm/min)",
default=500.000,
precision=3,
update=s25_update,
description="""$25 - Homing seek, mm/min
Homing seek rate is the homing cycle search rate, or the rate at which it first tries to find the limit switches. Adjust to whatever rate gets to the limit switches in a short enough time without crashing into your limit switches if they come in too fast.""")
# ########################################################################## $26
def s26_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$26={self.s26}")
s26: IntProperty(
name="Homing debounce (ms)",
default=250,
min=10,
max=1000,
subtype='TIME',
update=s26_update,
description="""$26 - Homing debounce, milliseconds
Whenever a switch triggers, some of them can have electrical/mechanical noise that actually 'bounce' the signal high and low for a few milliseconds before settling in. To solve this, you need to debounce the signal, either by hardware with some kind of signal conditioner or by software with a short delay to let the signal finish bouncing. Grbl performs a short delay, only homing when locating machine zero. Set this delay value to whatever your switch needs to get repeatable homing. In most cases, 5-25 milliseconds is fine.""")
# ########################################################################## $27
def s27_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$27={round(self.s27, 4)}")
s27: FloatProperty(
name="Homing pull-off (mm)",
default=1.000,
precision=3,
update=s27_update,
description="""$27 - Homing pull-off, mm
To play nice with the hard limits feature, where homing can share the same limit switches, the homing cycle will move off all of the limit switches by this pull-off travel after it completes. In other words, it helps to prevent accidental triggering of the hard limit after a homing cycle. Make sure this value is large enough to clear the limit switch. If not, Grbl will throw an alarm error for failing to clear it.""")
# ########################################################################## $30
def s30_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$30={self.s30}")
s30: IntProperty(
name="Max spindle speed (RPM)",
default=1000,
min=0,
max=25000,
subtype='ANGLE',
update=s30_update,
description="""$30 - Max spindle speed, RPM
This sets the spindle speed for the maximum 5V PWM pin output. For example, if you want to set 10000rpm at 5V, program $30=10000. For 255rpm at 5V, program $30=255. If a program tries to set a higher spindle RPM greater than the $30 max spindle speed, Grbl will just output the max 5V, since it can't go any faster. By default, Grbl linearly relates the max-min RPMs to 5V-0.02V PWM pin output in 255 equally spaced increments. When the PWM pin reads 0V, this indicates spindle disabled. Note that there are additional configuration options are available in config.h to tweak how this operates.""")
# ########################################################################## $31
def s31_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$31={self.s31}")
s31: IntProperty(
name="Min spindle speed (RPM)",
default=0,
min=0,
max=25000,
subtype='ANGLE',
update=s31_update,
description="""$31 - Min spindle speed, RPM
This sets the spindle speed for the minimum 0.02V PWM pin output (0V is disabled). Lower RPM values are accepted by Grbl but the PWM output will not go below 0.02V, except when RPM is zero. If zero, the spindle is disabled and PWM output is 0V.""")
# ########################################################################## $32
def s32_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$32={1 if self.s32 else 0}")
s32: BoolProperty(
name="$32 - Laser mode",
default=False,
update=s32_update,
description="""$32 - Laser mode, boolean
When enabled, Grbl will move continuously through consecutive G1, G2, or G3 motion commands when programmed with a S spindle speed (laser power). The spindle PWM pin will be updated instantaneously through each motion without stopping. Please read the GRBL laser documentation and your laser machine documentation prior to using this mode. Lasers are very dangerous. They can instantly damage your vision permanantly and cause fires. Grbl does not assume any responsibility for any issues the firmware may cause, as defined by its GPL license.
When disabled, Grbl will operate as it always has, stopping motion with every S spindle speed command. This is the default operation of a milling machine to allow a pause to let the spindle change speeds.""")
# ########################################################################## $100
def s100_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$100={round(self.s100, 3)}")
s100: FloatProperty(
name="X",
default=800.000,
precision=3,
update=s100_update,
description="""X-axis travel resolution, step/mm
$100, $101 and $102 – [X,Y,Z] steps/mm
Grbl needs to know how far each step will take the tool in reality. To calculate steps/mm for an axis of your machine you need to know:
The mm traveled per revolution of your stepper motor. This is dependent on your belt drive gears or lead screw pitch.
The full steps per revolution of your steppers (typically 200)
The microsteps per step of your controller (typically 1, 2, 4, 8, or 16). Tip: Using high microstep values (e.g., 16) can reduce your stepper motor torque, so use the lowest that gives you the desired axis resolution and comfortable running properties.
The steps/mm can then be calculated like this: steps_per_mm = (steps_per_revolution*microsteps)/mm_per_rev
Compute this value for every axis and write these settings to Grbl.""")
# ########################################################################## $101
def s101_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$101={round(self.s101, 3)}")
s101: FloatProperty(
name="Y",
default=800.000,
precision=3,
update=s101_update,
description="""Y-axis travel resolution, step/mm
$100, $101 and $102 – [X,Y,Z] steps/mm
Grbl needs to know how far each step will take the tool in reality. To calculate steps/mm for an axis of your machine you need to know:
The mm traveled per revolution of your stepper motor. This is dependent on your belt drive gears or lead screw pitch.
The full steps per revolution of your steppers (typically 200)
The microsteps per step of your controller (typically 1, 2, 4, 8, or 16). Tip: Using high microstep values (e.g., 16) can reduce your stepper motor torque, so use the lowest that gives you the desired axis resolution and comfortable running properties.
The steps/mm can then be calculated like this: steps_per_mm = (steps_per_revolution*microsteps)/mm_per_rev
Compute this value for every axis and write these settings to Grbl.""")
# ########################################################################## $102
def s102_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$102={round(self.s102, 3)}")
s102: FloatProperty(
name="Y",
default=800.000,
precision=3,
update=s102_update,
description="""Z-axis travel resolution, step/mm
$100, $101 and $102 – [X,Y,Z] steps/mm
Grbl needs to know how far each step will take the tool in reality. To calculate steps/mm for an axis of your machine you need to know:
The mm traveled per revolution of your stepper motor. This is dependent on your belt drive gears or lead screw pitch.
The full steps per revolution of your steppers (typically 200)
The microsteps per step of your controller (typically 1, 2, 4, 8, or 16). Tip: Using high microstep values (e.g., 16) can reduce your stepper motor torque, so use the lowest that gives you the desired axis resolution and comfortable running properties.
The steps/mm can then be calculated like this: steps_per_mm = (steps_per_revolution*microsteps)/mm_per_rev
Compute this value for every axis and write these settings to Grbl.""")
# ########################################################################## $110
def s110_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$110={round(self.s110, 3)}")
s110: FloatProperty(
name="X",
default=500.000,
precision=3,
update=s110_update,
description="""X-axis maximum rate, mm/min
$110, $111 and $112 – [X,Y,Z] Max rate, mm/min
This sets the maximum rate each axis can move. Whenever Grbl plans a move, it checks whether or not the move causes any one of these individual axes to exceed their max rate. If so, it'll slow down the motion to ensure none of the axes exceed their max rate limits. This means that each axis has its own independent speed, which is extremely useful for limiting the typically slower Z-axis.
The simplest way to determine these values is to test each axis one at a time by slowly increasing max rate settings and moving it. For example, to test the X-axis, send Grbl something like G0 X50 with enough travel distance so that the axis accelerates to its max speed. You'll know you've hit the max rate threshold when your steppers stall. It'll make a bit of noise, but shouldn't hurt your motors. Enter a setting a 10-20% below this value, so you can account for wear, friction, and the mass of your workpiece/tool. Then, repeat for your other axes.
NOTE: This max rate setting also sets the G0 seek rates.""")
# ########################################################################## $111
def s111_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$111={round(self.s111, 3)}")
s111: FloatProperty(
name="Y",
default=500.000,
precision=3,
update=s111_update,
description="""Y-axis maximum rate, mm/min
$110, $111 and $112 – [X,Y,Z] Max rate, mm/min
This sets the maximum rate each axis can move. Whenever Grbl plans a move, it checks whether or not the move causes any one of these individual axes to exceed their max rate. If so, it'll slow down the motion to ensure none of the axes exceed their max rate limits. This means that each axis has its own independent speed, which is extremely useful for limiting the typically slower Z-axis.
The simplest way to determine these values is to test each axis one at a time by slowly increasing max rate settings and moving it. For example, to test the X-axis, send Grbl something like G0 X50 with enough travel distance so that the axis accelerates to its max speed. You'll know you've hit the max rate threshold when your steppers stall. It'll make a bit of noise, but shouldn't hurt your motors. Enter a setting a 10-20% below this value, so you can account for wear, friction, and the mass of your workpiece/tool. Then, repeat for your other axes.
NOTE: This max rate setting also sets the G0 seek rates.""")
# ########################################################################## $112
def s112_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$112={round(self.s112, 3)}")
s112: FloatProperty(
name="Z",
default=500.000,
precision=3,
update=s112_update,
description="""Z-axis maximum rate, mm/min
$110, $111 and $112 – [X,Y,Z] Max rate, mm/min
This sets the maximum rate each axis can move. Whenever Grbl plans a move, it checks whether or not the move causes any one of these individual axes to exceed their max rate. If so, it'll slow down the motion to ensure none of the axes exceed their max rate limits. This means that each axis has its own independent speed, which is extremely useful for limiting the typically slower Z-axis.
The simplest way to determine these values is to test each axis one at a time by slowly increasing max rate settings and moving it. For example, to test the X-axis, send Grbl something like G0 X50 with enough travel distance so that the axis accelerates to its max speed. You'll know you've hit the max rate threshold when your steppers stall. It'll make a bit of noise, but shouldn't hurt your motors. Enter a setting a 10-20% below this value, so you can account for wear, friction, and the mass of your workpiece/tool. Then, repeat for your other axes.
NOTE: This max rate setting also sets the G0 seek rates.""")
# ########################################################################## $120
def s120_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$120={round(self.s120, 3)}")
s120: FloatProperty(
name="X",
default=10.000,
precision=3,
update=s120_update,
description="""X-axis acceleration, mm/sec^2
$120, $121, $122 – [X,Y,Z] Acceleration, mm/sec^2
This sets the axes acceleration parameters in mm/second/second. Simplistically, a lower value makes Grbl ease slower into motion, while a higher value yields tighter moves and reaches the desired feed rates much quicker. Much like the max rate setting, each axis has its own acceleration value and are independent of each other. This means that a multi-axis motion will only accelerate as quickly as the lowest contributing axis can.
Again, like the max rate setting, the simplest way to determine the values for this setting is to individually test each axis with slowly increasing values until the motor stalls. Then finalize your acceleration setting with a value 10-20% below this absolute max value. This should account for wear, friction, and mass inertia. We highly recommend that you dry test some G-code programs with your new settings before committing to them. Sometimes the loading on your machine is different when moving in all axes together.
""")
# ########################################################################## $121
def s121_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$121={round(self.s121, 3)}")
s121: FloatProperty(
name="Y",
default=10.000,
precision=3,
update=s121_update,
description="""Y-axis acceleration, mm/sec^2
$120, $121, $122 – [X,Y,Z] Acceleration, mm/sec^2
This sets the axes acceleration parameters in mm/second/second. Simplistically, a lower value makes Grbl ease slower into motion, while a higher value yields tighter moves and reaches the desired feed rates much quicker. Much like the max rate setting, each axis has its own acceleration value and are independent of each other. This means that a multi-axis motion will only accelerate as quickly as the lowest contributing axis can.
Again, like the max rate setting, the simplest way to determine the values for this setting is to individually test each axis with slowly increasing values until the motor stalls. Then finalize your acceleration setting with a value 10-20% below this absolute max value. This should account for wear, friction, and mass inertia. We highly recommend that you dry test some G-code programs with your new settings before committing to them. Sometimes the loading on your machine is different when moving in all axes together.
""")
# ########################################################################## $122
def s122_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$122={round(self.s122, 3)}")
s122: FloatProperty(
name="Z",
default=10.000,
precision=3,
update=s122_update,
description="""Z-axis acceleration, mm/sec^2
$120, $121, $122 – [X,Y,Z] Acceleration, mm/sec^2
This sets the axes acceleration parameters in mm/second/second. Simplistically, a lower value makes Grbl ease slower into motion, while a higher value yields tighter moves and reaches the desired feed rates much quicker. Much like the max rate setting, each axis has its own acceleration value and are independent of each other. This means that a multi-axis motion will only accelerate as quickly as the lowest contributing axis can.
Again, like the max rate setting, the simplest way to determine the values for this setting is to individually test each axis with slowly increasing values until the motor stalls. Then finalize your acceleration setting with a value 10-20% below this absolute max value. This should account for wear, friction, and mass inertia. We highly recommend that you dry test some G-code programs with your new settings before committing to them. Sometimes the loading on your machine is different when moving in all axes together.
""")
# ########################################################################## $130
def s130_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$130={round(self.s130, 3)}")
s130: FloatProperty(
name="X",
default=200.000,
precision=3,
update=s130_update,
description="""X-axis maximum travel, millimeters
$130, $131, $132 – [X,Y,Z] Max travel, mm
This sets the maximum travel from end to end for each axis in mm. This is only useful if you have soft limits (and homing) enabled, as this is only used by Grbl's soft limit feature to check if you have exceeded your machine limits with a motion command.""")
# ########################################################################## $131
def s131_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$131={round(self.s131, 3)}")
s131: FloatProperty(
name="Y",
default=200.000,
precision=3,
update=s131_update,
description="""Y-axis maximum travel, millimeters
$130, $131, $132 – [X,Y,Z] Max travel, mm
This sets the maximum travel from end to end for each axis in mm. This is only useful if you have soft limits (and homing) enabled, as this is only used by Grbl's soft limit feature to check if you have exceeded your machine limits with a motion command.""")
# ########################################################################## $132
def s132_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"$132={round(self.s132, 3)}")
s132: FloatProperty(
name="Z",
default=200.000,
precision=3,
update=s132_update,
description="""Z-axis maximum travel, millimeters
$130, $131, $132 – [X,Y,Z] Max travel, mm
This sets the maximum travel from end to end for each axis in mm. This is only useful if you have soft limits (and homing) enabled, as this is only used by Grbl's soft limit feature to check if you have exceeded your machine limits with a motion command.""")
# ##############################################################################
# def motion_mode_update(self, context):
# context.scene.ncnc_pr_communication.send_in_order(f"{self.motion_mode}")
motion_mode: EnumProperty(
name="Motion Mode",
default="G0",
description="Only Read",
items=[("G0", "G0 - Rapid Move", "G0 - For rapid motion, program G0 axes, where all the axis words are "
"optional. The G0 is optional if the current motion mode is G0. This will "
"produce coordinated motion to the destination point at the maximum rapid "
"rate (or slower). G0 is typically used as a positioning move."),
("G1", "G1 - Linear Move",
"G1 - For linear (straight line) motion at programed feed "
"rate (for cutting or not), program G1 'axes', "
"where all the axis words are optional. The G1 is optional "
"if the current motion mode is G1. This will produce "
"coordinated motion to the destination point at the "
"current feed rate (or slower)."),
("G2", "G2 - Clockwise Arc Move", "G2 CW - A circular or helical arc is specified "
"using either G2 (clockwise arc) or G3 ("
"counterclockwise arc) at the current feed rate. "
"The direction (CW, CCW) is as viewed from the "
"positive end of the axis about which the circular "
"motion occurs."),
("G3", "G3 - CounterClockwise Arc Move", "G3 CCW - A circular or helical arc is "
"specified using either G2 (clockwise arc) "
"or G3 (counterclockwise arc) at the current "
"feed rate. The direction (CW, CCW) is as "
"viewed from the positive end of the axis "
"about which the circular motion occurs."),
("G38.2", "G38.2 - Straight Probe", "G38.2 - probe toward workpiece, stop on contact, signal error if "
"failure "),
("G38.3", "G38.3 - Straight Probe", "G38.3 - probe toward workpiece, stop on contact "),
("G38.4", "G38.4 - Straight Probe", "G38.4 - probe away from workpiece, stop on loss of contact, "
"signal error if failure"),
("G38.5", "G38.5 - Straight Probe", "G38.5 - probe away from workpiece, stop on loss of contact"),
("G80", "G80 - Cancel Canned Cycle", "G80 - cancel canned cycle modal motion. G80 is part of modal "
"group 1, so programming any other G code from modal group 1 will"
" also cancel the canned cycle. "),
],
# update=motion_mode_update
)
# ##############################################################################
def coordinate_system_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"{self.coordinate_system}")
coordinate_system: EnumProperty(
name="Coordinate System",
default="G54",
update=coordinate_system_update,
items=[("G54", "G54 - System 1", "Select coordinate system 1"),
("G55", "G55 - System 2", "Select coordinate system 2"),
("G56", "G56 - System 3", "Select coordinate system 3"),
("G57", "G57 - System 4", "Select coordinate system 4"),
("G58", "G58 - System 5", "Select coordinate system 5"),
("G59", "G59 - System 6", "Select coordinate system 6"),
# ("G59.1", "G59.1 - System 7", "Select coordinate system 7"),
# ("G59.2", "G59.2 - System 8", "Select coordinate system 8"),
# ("G59.3", "G59.3 - System 9", "Select coordinate system 9"),
])
# ##############################################################################
def distance_mode_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"{self.distance_mode}")
distance_mode: EnumProperty(
name="Distance Mode",
default="G90",
update=distance_mode_update,
items=[("G90", "G90 - Absolute", "G90 - Absolute Distance Mode"),
("G91", "G91 - Incremental", "91 - Incremental Distance Mode")
])
# ##############################################################################
def plane_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"{self.plane}")
plane: EnumProperty(
name="Plane Select",
description="These codes set the current plane",
default="G17",
update=plane_update,
items=[
("G17", "G17 - XY", ""),
("G18", "G18 - ZX", ""),
("G19", "G19 - YZ", "")
])
# ##############################################################################
def arc_ijk_distance_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"{self.arc_ijk_distance}")
arc_ijk_distance: EnumProperty(
name="Arc IJK Distance Mode",
description="Arc Distance Mode",
default="G91.1",
update=arc_ijk_distance_update,
items=[("G91.1", "G91.1", "G91.1 - incremental distance mode for I, J & K offsets. G91.1 Returns I, J & K to "
"their default behavior. ")
])
# ##############################################################################
def feed_rate_mode_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"{self.feed_rate_mode}")
feed_rate_mode: EnumProperty(
name="Feed Rate Mode",
description="",
default="G94",
update=feed_rate_mode_update,
items=[
("G93", "G93 - Inverse Time", "G93 - is Inverse Time Mode. In inverse time feed "
"rate mode, "
"an F word means the move should be completed in [one divided by "
"the F number] minutes. For example, if the F number is 2.0, "
"the move should be completed in half a minute.\nWhen the inverse "
"time feed rate mode is active, an F word must appear on every "
"line which has a G1, G2, or G3 motion, and an F word on a line "
"that does not have G1, G2, or G3 is ignored. Being in inverse "
"time feed rate mode does not affect G0 (rapid move) motions."),
("G94", "G94 - Units per Minute", "G94 - is Units per Minute Mode. In units per "
"minute feed mode, "
"an F word is interpreted to mean the controlled point should "
"move at a certain number of inches per minute, millimeters per "
"minute, or degrees per minute, depending upon what length units "
"are being used and which axis or axes are moving. ")
])
# ##############################################################################
def units_mode_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"{self.units_mode}")
units_mode: EnumProperty(
name="Units Mode",
description="",
default="G21",
update=units_mode_update,
items=[
("G20", "G20 - inc", "G20 - to use inches for length units."),
("G21", "G21 - mm", "G21 - to use millimeters for length units.")
])
cutter_radius_compensation: EnumProperty(
name="Cutter Radius Compensation",
description="",
default="G40",
items=[
("G40", "G40", "G40 - turn cutter compensation off. If tool "
"compensation was on the next move must be a linear "
"move and longer than the tool diameter. It is OK to "
"turn compensation off when it is already off. ")
])
# ##############################################################################
def tool_length_offset_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"{self.tool_length_offset}")
tool_length_offset: EnumProperty(
name="Tool Length Offset",
description="",
default="G49",
update=tool_length_offset_update,
items=[
("G43.1", "G43.1 - Dynamic", "G43.1 axes - change subsequent motions by "
"replacing the current offset(s) of axes. G43.1 "
"does not cause any motion. The next time a "
"compensated axis is moved, that axis’s "
"endpoint is the compensated location. "),
("G49", "G49 - Cancels", "It is OK to program using the same offset already "
"in use. It is also OK to program using no tool "
"length offset if none is currently being used.")
])
# ##############################################################################
def program_mode_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"{self.program_mode}")
program_mode: EnumProperty(
name="Program Mode",
description="",
default="M0",
update=program_mode_update,
items=[
("M0", "M0 - Pause", "M0 - pause a running program temporarily. CNC remains in the "
"Auto Mode so MDI and other manual actions are not enabled. "
"Pressing the resume button will restart the program at the "
"following line. "),
("M1", "M1 - Pause", "M1 - pause a running program temporarily if the optional "
"stop switch is on. LinuxCNC remains in the Auto Mode so MDI "
"and other manual actions are not enabled. Pressing the "
"resume button will restart the program at the following "
"line. "),
("M2", "M2 - End", 'M2 - end the program. Pressing Cycle Start ("R" in the Axis '
'GUI) will restart the program at the beginning of the file. '),
("M30", "M30 - End", "M30 - exchange pallet shuttles and end the program. Pressing "
"Cycle Start will start the program at the beginning of the "
"file. ")
])
# ##############################################################################
def spindle_state_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"{self.spindle_state}")
spindle_state: EnumProperty(
name="Spindle State",
description="Spindle Control",
default="M5",
update=spindle_state_update,
items=[
("M3", "M3 - Start CW", "M3 - start the spindle clockwise at the S speed."),
("M4", "M4 - Start CCW", "M4 - start the spindle counterclockwise at the S speed."),
("M5", "M5 - Stop", "M5 - stop the spindle. ")
])
# ##############################################################################
def coolant_state_update(self, context):
context.scene.ncnc_pr_communication.send_in_order(f"{self.coolant_state}")
coolant_state: EnumProperty(
name="Coolant State",
description="",
default="M9",
update=coolant_state_update,
items=[
("M7", "M7 - turn mist coolant on", "M7 - turn mist coolant on. M7 controls "
"iocontrol.0.coolant-mist pin. "),
("M8", "M8 - turn flood coolant on", "M8 - turn flood coolant on. M8 controls "
"iocontrol.0.coolant-flood pin."),
("M9", "M9 - turn off", "M9 - turn both M7 and M8 off. ")
])
@classmethod
def register(cls):
Scene.ncnc_pr_machine = PointerProperty(
name="NCNC_PR_Machine Name",
description="NCNC_PR_Machine Description",
type=cls)
@classmethod
def unregister(cls):
del Scene.ncnc_pr_machine
class NCNC_OT_Machine(Operator):
bl_idname = "ncnc.machine"
bl_label = "Machine Controls"
bl_description = "Machine Controllers"
bl_options = {'REGISTER'}
action: EnumProperty(items=[
("bos", "", ""),
("reset", "Soft Reset", "Immediately halts and safely resets Grbl without a power-cycle."
"Accepts and executes this command at any time."),
("resume", "Cycle Start / Resume", "Resumes a feed hold, a safety door/parking state when the door is closed, "
"and the M0 program pause states."),
("hold", "Feed Hold", "Places Grbl into a suspend or HOLD state. If in motion, the machine will decelerate to "
"a stop and then be suspended.Command executes when Grbl is in an IDLE, RUN, "
"or JOG state. It is otherwise ignored."),
("door", "Safety Door", "Although typically connected to an input pin to detect the opening of a safety door, "
"this command allows a GUI to enact the safety door behavior with this command."),
("cancel", "Jog Cancel", "Immediately cancels the current jog state by a feed hold and automatically flushing "
"any remaining jog commands in the buffer. Command is ignored, if not in a JOG state "
"or if jog cancel is already invoked and in-process. Grbl will return to the IDLE "
"state or the DOOR state, if the safety door was detected as ajar during the "
"cancel."),
("unlock", "Kill alarm lock", "Grbl's alarm mode is a state when something has gone critically wrong, "
"such as a hard limit or an abort during a cycle, or if Grbl doesn't know its "
"position. By default, if you have homing enabled and power-up the Arduino, "
"Grbl enters the alarm state, because it does not know its position. The alarm "
"mode will lock all G-code commands until the '$H' homing cycle has been "
"performed. Or if a user needs to override the alarm lock to move their axes "
"off their limit switches, for example, '$X' kill alarm lock will override the "
"locks and allow G-code functions to work again."),
("sleep", "Sleep", "This command will place Grbl into a de-powered sleep state, shutting down the spindle, "
"coolant, and stepper enable pins and block any commands. It may only be exited by a "
"soft-reset or power-cycle. Once re-initialized, Grbl will automatically enter an ALARM "
"state, because it's not sure where it is due to the steppers being disabled."),
("run", "Run", ""),
])
def execute(self, context):
return self.invoke(context, None)
def invoke(self, context, event):
pr_com = context.scene.ncnc_pr_communication
pr_vis = context.scene.ncnc_pr_vision
if self.action == "run":
if not pr_vis.texts:
self.report({'INFO'}, "No Selected Text")
return {"CANCELLED"}
txt = bpy.data.texts[pr_vis.texts]
for i in txt.as_string().splitlines():
x = i.strip()
if not x or (x.startswith("(") and x.endswith(")")):
continue
pr_com.send_in_order(x)
elif self.action == "reset":
pr_com.set_hardly("0x18")
pr_com.set_hardly("$X")
pr_com.clear_queue()
elif self.action == "resume":
pr_com.set_hardly("~")
elif self.action == "hold":
pr_com.set_hardly("!")
elif self.action == "door":
pr_com.set_hardly("0x84")
elif self.action == "cancel":
pr_com.set_hardly("0x85")
elif self.action == "unlock":
pr_com.set_hardly("$X")
elif self.action == "sleep":
pr_com.set_hardly("$SLP")
return {'FINISHED'}
class NCNC_PT_Machine(Panel):
bl_idname = "NCNC_PT_machine"
bl_label = "Machine"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
@classmethod
def poll(cls, context):
return context.scene.ncnc_pr_head.tool_machine
def draw(self, context):
layout = self.layout
if not context.scene.ncnc_pr_connection.isconnected:
layout.enabled = False
status = context.scene.ncnc_pr_machine.status
row = layout.row()
row.alert = status.startswith("ALARM") or status in ("HOLD:0", "SLEEP", "DOOR:0")
row.operator("ncnc.machine", text="Reset", icon="FILE_REFRESH", ).action = "reset"
row.alert = status in ("ALARM:3")
row.operator("ncnc.machine", text="Unlock", icon="UNLOCKED", ).action = "unlock"
row = layout.row()
row.operator("ncnc.machine", text="Hold!", icon="PAUSE", ).action = "hold"
row.alert = status in ("HOLD:0", "HOLD:1", "DOOR:0")
row.operator("ncnc.machine", text="Resume", icon="PLAY", ).action = "resume"
row = layout.row()
row.operator("ncnc.machine", text="Sleep", icon="SORTTIME", ).action = "sleep"
row.operator("ncnc.machine", text="Door", icon="ARMATURE_DATA", ).action = "door"
def draw_header(self, context):
status = context.scene.ncnc_pr_machine.status
if status.startswith("ALARM") or status in ("HOLD:0", "SLEEP", "DOOR:0"):
self.layout.operator("ncnc.machine", text="", icon="FILE_REFRESH", ).action = "reset"
class NCNC_PT_MachineDash(Panel):
bl_idname = "NCNC_PT_machinedash"
bl_label = "Dashboard"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
bl_parent_id = "NCNC_PT_machine"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
props = context.scene.ncnc_pr_machine
layout = self.layout
if not context.scene.ncnc_pr_connection.isconnected:
layout.enabled = False
# STATUS
row = layout.row()
row.alert = True
row.operator("ncnc.empty", text=f"{props.status}")
col = layout.column(align=True)
# POS MODE
row = col.row(align=True)
row.prop(props, "pos_type", expand=True, )
row.scale_y = 0.8
pos = props.mpos if props.pos_type == "mpos" else props.wpos
# POS LABEL
row = col.row(align=True)
row.alert = True
row.operator("ncnc.empty", text="X", depress=True) # emboss=True,
row.operator("ncnc.empty", text="Y", depress=True) # emboss=True,
row.operator("ncnc.empty", text="Z", depress=True) # emboss=True,
# POS
row = layout.row(align=True)
row.operator("ncnc.empty", text=f"{round(pos[0], 2)}", emboss=False) # , depress=True
row.operator("ncnc.empty", text=f"{round(pos[1], 2)}", emboss=False) # , depress=True
row.operator("ncnc.empty", text=f"{round(pos[2], 2)}", emboss=False) # , depress=True
# SPLIT
row = layout.split()
# LABELS
row = layout.row(align=True)
row.alert = True
row.operator("ncnc.empty", text="Feed", depress=True) # emboss=False,
row.operator("ncnc.empty", text="Spindle", depress=True) # emboss=False,
row.operator("ncnc.empty", text="Buffer", depress=True) # emboss=False,
row.enabled = True
# VALS
row = layout.row(align=True)
row.operator("ncnc.empty", text=f"{props.feed}", emboss=False)
row.operator("ncnc.empty", text=f"{props.spindle}", emboss=False)
row.operator("ncnc.empty", text=f"{props.buffer},{props.bufwer}", emboss=False)
def draw_header(self, context):
context.scene.ncnc_pr_vision.prop_bool(self.layout, "dash")
class NCNC_PT_MachineModes(Panel):
bl_idname = "NCNC_PT_machinemodes"
bl_label = "Modes"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
bl_parent_id = "NCNC_PT_machine"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
props = context.scene.ncnc_pr_machine
layout = self.layout
if not context.scene.ncnc_pr_connection.isconnected:
layout.enabled = False
row = layout.row(heading="Motion Mode")
row.prop(props, "motion_mode", text="")
row = layout.row(heading="Coordinate System")
row.prop(props, "coordinate_system", text="")
row = layout.row(heading="Distance Mode")
row.prop(props, "distance_mode", text="")
row = layout.row(heading="Plane")
row.prop(props, "plane", text="")
row = layout.row(heading="Feed Rate Mode")
row.prop(props, "feed_rate_mode", text="")
row = layout.row(heading="Units Mode")
row.prop(props, "units_mode", text="")
row = layout.row(heading="Spindle State")
row.prop(props, "spindle_state", text="")
row = layout.row(heading="Coolant State")
row.prop(props, "coolant_state", text="")
row = layout.row(heading="Saved Feed")
row.prop(props, "saved_feed", text="")
# row.enabled = False
row = layout.row(heading="Saved Spindle")
row.prop(props, "saved_spindle", text="")
# row.enabled = False
# row = layout.row(heading="Cutter Radius Compensation")
# row.prop(props, "cutter_radius_compensation", text="")
# row = layout.row(heading="Arc Distance")
# row.prop(props, "arc_ijk_distance", text="")
# row = layout.row(heading="Tool Length Offset")
# row.prop(props, "tool_length_offset", text="")
# row = layout.row(heading="Program Mode")
# row.prop(props, "program_mode", text="")
class NCNC_PT_MachineDetails(Panel):
bl_idname = "NCNC_PT_machinedetails"
bl_label = "Configs"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
bl_parent_id = "NCNC_PT_machine"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
pass
class NCNC_PT_MachineDetail(Panel):
bl_label = "Detail"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
bl_parent_id = "NCNC_PT_machinedetails"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
props = context.scene.ncnc_pr_machine
layout = self.layout
if not context.scene.ncnc_pr_connection.isconnected:
layout.enabled = False
row = layout.row(heading="Motion Mode")
row.prop(props, "motion_mode", text="")
# ### Numeric
col = layout.column(align=True)
col.prop(props, "s0")
col.prop(props, "s1")
col.prop(props, "s11")
col.prop(props, "s12")
col.prop(props, "s24")
col.prop(props, "s25")
col.prop(props, "s26")
col.prop(props, "s27")
col.prop(props, "s30")
col.prop(props, "s31")
class NCNC_PT_MachineDetailInvert(Panel):
bl_label = "Detail: Invert"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
bl_parent_id = "NCNC_PT_machinedetails"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
props = context.scene.ncnc_pr_machine
layout = self.layout
if not context.scene.ncnc_pr_connection.isconnected:
layout.enabled = False
col = layout.column(align=False)
col.alignment = "RIGHT"
col.prop(props, "s4")
col.prop(props, "s5")
col.prop(props, "s6")
col.prop(props, "s20")
col.prop(props, "s21")
col.prop(props, "s22")
col.prop(props, "s32")
class NCNC_PT_MachineDetailAxis(Panel):
bl_label = "Detail: Invert Axis"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
bl_parent_id = "NCNC_PT_machinedetails"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
props = context.scene.ncnc_pr_machine
layout = self.layout
if not context.scene.ncnc_pr_connection.isconnected:
layout.enabled = False
# row = layout.column(align=False)
"""
row = layout.row(align=False)
col = row.column()
col.prop(props, "s2")
col = row.column()
col.prop(props, "s3")
col = row.column()
col.prop(props, "s23")"""
col = layout.column(align=False)
col.label(text="Step Port Invert:")
row = col.row()
row.prop(props, "s2", text="")
col.label(text="Direction Port Invert:")
row = col.row()
row.prop(props, "s3", text="")
col.label(text="Homing Dir Invert:")
row = col.row()
row.prop(props, "s23", text="")
class NCNC_PT_MachineDetailAxisInvert(Panel):
bl_label = "Detail: Axis"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
bl_parent_id = "NCNC_PT_machinedetails"
bl_options = {'DEFAULT_CLOSED'} # DEFAULT_CLOSED
def draw(self, context):
props = context.scene.ncnc_pr_machine
layout = self.layout
if not context.scene.ncnc_pr_connection.isconnected:
layout.enabled = False
# return
col = layout.column(align=True, heading="Axis Travel Resolution (step/mm)")
col.prop(props, "s100")
col.prop(props, "s101")
col.prop(props, "s102")
col = layout.column(align=True, heading="Axis Maximum Rate (mm/min)")
col.prop(props, "s110")
col.prop(props, "s111")
col.prop(props, "s112")
col = layout.column(align=True, heading="Axis Acceleration (mm/sec^2)")
col.prop(props, "s120")
col.prop(props, "s121")
col.prop(props, "s122")
col = layout.column(align=True, heading="Axis Maximum Travel (mm)")
col.prop(props, "s130")
col.prop(props, "s131")
col.prop(props, "s132")
# #################################
# #################################
# #################################
class NCNC_PR_JogController(PropertyGroup):
def update_spindle_speed(self, context):
pr_com = context.scene.ncnc_pr_communication
pr_com.send_in_order(f"S{self.spindle_speed}")
def update_spindle_state(self, context):
pr_com = context.scene.ncnc_pr_communication
pr_mac = context.scene.ncnc_pr_machine
if pr_mac.spindle_state not in ("M3", "M4"):
pr_com.send_in_order(f"M3 S{self.spindle_speed}")
else:
pr_com.send_in_order(f"M5")
# Auto Update On/Off BUTTON
step_size_xy: FloatProperty(
name="Step Size XY",
step=200,
default=10.000
)
step_size_z: FloatProperty(
name="Step Size Z",
step=100,
default=1.0
)
feed_rate: IntProperty(
name="Feed",
step=50,
default=500,
description="Feed Rate"
)
spindle_speed: IntProperty(
name="Spindle",
default=1000,
step=200,
min=0,
max=75000,
description="Current Speed",
update=update_spindle_speed
)
spindle_state: BoolProperty(
name="Spindle On/Off",
default=False,
description="Start / Stop",
update=update_spindle_state
)
@classmethod
def register(cls):
Scene.ncnc_pr_jogcontroller = PointerProperty(
name="NCNC_PR_JogController Name",
description="NCNC_PR_JogController Description",
type=cls)
@classmethod
def unregister(cls):
del Scene.ncnc_pr_jogcontroller
class NCNC_OT_JogController(Operator):
bl_idname = "ncnc.jogcontroller"
bl_label = "Jog Control Operators"
bl_description = "Jog Control Operators,\nMove X / Y / Z"
bl_options = {'REGISTER'}
action: EnumProperty(name="Jog Controller",
items=[("x+", "X+", "TRIA_RIGHT"), # EVENT_X
("x-", "X-", "TRIA_LEFT"), # EVENT_Y
("y+", "Y+", "TRIA_UP"),
("y-", "Y-", "TRIA_DOWN"),
("z+", "Z+", "TRIA_UP"),
("z-", "Z-", "TRIA_DOWN"),
("x+y+", "X+ Y+", "DOT"),
("x+y-", "X+ Y-", "DOT"),
("x-y+", "X- Y+", "DOT"),
("x-y-", "X- Y-", "DOT"),
("x0y0", "X0 Y0", "DOT"),
("z0", "Z0", "DOT"),
("0xy", "XY:0", "XY:0"),
("0x", "X:0", "X:0"),
("0y", "Y:0", "Y:0"),
("0z", "Z:0", "Z:0"),
("home", "Home", "Home: XYZ"),
("safez", "Safe Z", "Safe Z: 5mm"),
("cancel", "Jog Cancel", "Jog Cancel and Clear"),
("mousepos", "Mouse Pos", "Set mouse pos")
])
def invoke(self, context, event=None):
pr_dev = context.scene.ncnc_pr_machine
pr_jog = context.scene.ncnc_pr_jogcontroller
pr_com = context.scene.ncnc_pr_communication
if self.action == "x+":
pr_com.send_in_order(f"$J=G21 G91 X{pr_jog.step_size_xy} F{pr_jog.feed_rate}")
elif self.action == "x-":
pr_com.send_in_order(f"$J=G21 G91 X-{pr_jog.step_size_xy} F{pr_jog.feed_rate}")
elif self.action == "y+":
pr_com.send_in_order(f"$J=G21 G91 Y{pr_jog.step_size_xy} F{pr_jog.feed_rate}")
elif self.action == "y-":
pr_com.send_in_order(f"$J=G21 G91 Y-{pr_jog.step_size_xy} F{pr_jog.feed_rate}")
elif self.action == "z+":
pr_com.send_in_order(f"$J=G21 G91 Z{pr_jog.step_size_z} F{pr_jog.feed_rate}")
elif self.action == "z-":
pr_com.send_in_order(f"$J=G21 G91 Z-{pr_jog.step_size_z} F{pr_jog.feed_rate}")
elif self.action == "x+y+":
pr_com.send_in_order(f"$J=G21 G91 X{pr_jog.step_size_xy} Y{pr_jog.step_size_xy} F{pr_jog.feed_rate}")
elif self.action == "x+y-":
pr_com.send_in_order(f"$J=G21 G91 X{pr_jog.step_size_xy} Y-{pr_jog.step_size_xy} F{pr_jog.feed_rate}")
elif self.action == "x-y+":
pr_com.send_in_order(f"$J=G21 G91 X-{pr_jog.step_size_xy} Y{pr_jog.step_size_xy} F{pr_jog.feed_rate}")
elif self.action == "x-y-":
pr_com.send_in_order(f"$J=G21 G91 X-{pr_jog.step_size_xy} Y-{pr_jog.step_size_xy} F{pr_jog.feed_rate}")
elif self.action == "x0y0":
pos = pr_dev.mpos if pr_dev.pos_type == "mpos" else pr_dev.wpos
if pos[2] < 3:
pr_com.send_in_order(f"$J=G21 G90 Z3 F{pr_jog.feed_rate}")
# pr_com.send_in_order(f"$J=G21 G91 X{round(pos[0], 3) * -1}Y{round(pos[1], 3) * -1}F{pr_jog.feed_rate}")
pr_com.send_in_order(f"$J=G21 G90 X0 Y0 F{pr_jog.feed_rate}")
elif self.action == "z0":
pos = pr_dev.mpos if pr_dev.pos_type == "mpos" else pr_dev.wpos
pr_com.send_in_order(f"$J=G21G91Z{round(pos[2], 3) * -1}F{pr_jog.feed_rate}")
# #### Reset Zero XYZ
elif self.action == "0xy":
pr_com.send_in_order("G10 L20 X0 Y0")
elif self.action == "0x":
pr_com.send_in_order("G10 L20 X0")
elif self.action == "0y":
pr_com.send_in_order("G10 L20 Y0")
elif self.action == "0z":
pr_com.send_in_order("G10 L20 Z0")
elif self.action == "home":
pos = pr_dev.mpos if pr_dev.pos_type == "mpos" else pr_dev.wpos
if pos[2] < 3:
pr_com.send_in_order(f"$J=G21 G90 Z3 F{pr_jog.feed_rate}")
pr_com.send_in_order(f"$J=G21 G90 X0 Y0 F{pr_jog.feed_rate}")
pr_com.send_in_order(f"$J=G21 G90 Z0 F{pr_jog.feed_rate}")
elif self.action == "safez":
pos = pr_dev.mpos if pr_dev.pos_type == "mpos" else pr_dev.wpos
pr_com.send_in_order(f"$J=G21 G90 Z5 F{pr_jog.feed_rate}")
elif self.action == "cancel":
pr_com.set_hardly("0x85")
elif self.action == "mousepos":
# context.region
# bpy.ops.view3d.view_axis(type="TOP")
context.window_manager.modal_handler_add(self)
self.draw_handle_2d = bpy.types.SpaceView3D.draw_handler_add(self.draw_callback_2d,
(self, context),
"WINDOW",
"POST_PIXEL")
return {"RUNNING_MODAL"}
return {"FINISHED"}
def modal(self, context, event):
if event.type == "LEFTMOUSE":
# print("Mouse ; ", event.mouse_x, event.mouse_y)
# print("Mouse Prev; ", event.mouse_prev_x, event.mouse_prev_y)
# print("Mouse Regn; ", event.mouse_region_x, event.mouse_region_y)
for area in context.window.screen.areas:
if area.type != 'VIEW_3D':
continue
if area.x < event.mouse_x < area.x + area.width and area.y < event.mouse_y < area.y + area.height:
active_region = None
active_region_3d = None
##############
# on Quad View
if len(area.spaces.active.region_quadviews):
# +-----------------+
# | quad 1 | quad 3 |
# |--------+--------|
# | quad 0 | quad 2 |
# +-----------------+
quad_index = -1
for region in area.regions:
if region.type == "WINDOW":
quad_index += 1
if (region.x <= event.mouse_x < region.width + region.x) and \
(region.y <= event.mouse_y < region.height + region.y):
active_region = region
active_region_3d = area.spaces.active.region_quadviews[quad_index]
break
#####################
# on Normal View (3D)
else:
for region in area.regions:
if region.type == "WINDOW":
active_region = region
break
active_region_3d = area.spaces[0].region_3d
if not (active_region and active_region_3d):
self.report({'WARNING'}, "View should be [TOP, LEFT, RIGHT ...]")
return {'CANCELLED'}
m_pos = (event.mouse_x - region.x, event.mouse_y - region.y)
origin = region_2d_to_origin_3d(active_region, active_region_3d, m_pos)
direction = region_2d_to_vector_3d(active_region, active_region_3d, m_pos)
# print(origin, direction)
# print("Area ;", area)
# print("Region ;", active_region)
# print("Region3D ;", active_region_3d)
# print("Origin ;", origin)
# print("Direction;", direction)
pr_jog = context.scene.ncnc_pr_jogcontroller
pr_com = context.scene.ncnc_pr_communication
at = ""
# ##################
# Move XY - TOP VIEW
if direction[2] == -1:
at = f"X{round(origin[0], 2)} Y{round(origin[1], 2)}"
# #####################
# Move XY - BOTTOM VIEW
if direction[2] == 1:
at = f"X{round(origin[0], 2)} Y{round(origin[1], 2)}"
# ####################
# Move XZ - FRONT VIEW
elif direction[1] == 1:
at = f"X{round(origin[0], 2)} Z{round(origin[2], 2)}"
# ###################
# Move XZ - BACK VIEW
elif direction[1] == -1:
at = f"X{round(origin[0], 2)} Z{round(origin[2], 2)}"
# ####################
# Move YZ - RIGHT VIEW
elif direction[0] == -1:
at = f"Y{round(origin[1], 2)} Z{round(origin[2], 2)}"
# ###################
# Move YZ - LEFT VIEW
elif direction[0] == 1:
at = f"Y{round(origin[1], 2)} Z{round(origin[2], 2)}"
if at:
pr_com.send_in_order(f"$J=G21 G90 {at} F{pr_jog.feed_rate}")
else:
self.report({'WARNING'}, "View should be [TOP, LEFT, RIGHT ...]")
break
if event.value == "PRESS" or event.type == "ESC":
bpy.types.SpaceView3D.draw_handler_remove(self.draw_handle_2d, "WINDOW")
if context.area:
context.area.tag_redraw()
return {'CANCELLED'}
return {"PASS_THROUGH"}
def draw_callback_2d(self, op, context):
# Draw text to indicate that draw mode is active
region = context.region
text = "- Move: Mouse Left Click (inView: TOP, LEFT, RIGHT ...)-"
subtext = "Close: Press Anything"
xt = int(region.width / 2.0)
blf.size(0, 24, 72)
blf.position(0, xt - blf.dimensions(0, text)[0] / 2, 60, 0)
blf.draw(0, text)
blf.size(1, 20, 72)
blf.position(1, xt - blf.dimensions(0, subtext)[0] / 2, 30, 1)
blf.draw(1, subtext)
# Draw handler to paint onto the screen
class NCNC_PT_JogController(Panel):
bl_idname = "NCNC_PT_jogcontroller"
bl_label = "Jog"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
@classmethod
def poll(cls, context):
return context.scene.ncnc_pr_head.tool_machine
def draw(self, context):
if not context.scene.ncnc_pr_connection.isconnected:
self.layout.enabled = False
pr_jog = context.scene.ncnc_pr_jogcontroller
layout = self.layout
row_jog = layout.row(align=True)
row_jog.scale_y = 1.8
col = row_jog.column(align=True)
col.operator("ncnc.jogcontroller", text="", icon="DOT").action = "x-y+"
col.operator("ncnc.jogcontroller", text="", icon="TRIA_LEFT").action = "x-"
col.operator("ncnc.jogcontroller", text="", icon="DOT").action = "x-y-"
zero = col.split()
zero.operator("ncnc.jogcontroller", text="X:0").action = "0x"
zero.scale_y = 0.5
col = row_jog.column(align=True)
col.operator("ncnc.jogcontroller", text="", icon="TRIA_UP").action = "y+"
col.operator("ncnc.jogcontroller", text="", icon="RADIOBUT_ON").action = "x0y0" # SNAP_FACE_CENTER
col.operator("ncnc.jogcontroller", text="", icon="TRIA_DOWN").action = "y-"
zero = col.split()
zero.operator("ncnc.jogcontroller", text="Y:0").action = "0y"
zero.scale_y = 0.5
col = row_jog.column(align=True)
col.operator("ncnc.jogcontroller", text="", icon="DOT").action = "x+y+"
col.operator("ncnc.jogcontroller", text="", icon="TRIA_RIGHT").action = "x+"
col.operator("ncnc.jogcontroller", text="", icon="DOT").action = "x+y-"
zero = col.split()
zero.operator("ncnc.jogcontroller", text="XY:0").action = "0xy"
zero.scale_y = 0.5
col = row_jog.column(align=True)
col.label(icon="BLANK1")
col.operator("ncnc.jogcontroller", text="", icon="CON_OBJECTSOLVER").action = "mousepos"
col = row_jog.column(align=True)
col.operator("ncnc.jogcontroller", text="", icon="TRIA_UP").action = "z+"
col.operator("ncnc.jogcontroller", text="", icon="RADIOBUT_ON").action = "z0"
col.operator("ncnc.jogcontroller", text="", icon="TRIA_DOWN").action = "z-"
zero = col.split()
zero.operator("ncnc.jogcontroller", text="Z:0").action = "0z"
zero.scale_y = 0.5
row_conf = layout.row(align=True)
col = row_conf.column(align=True)
col.prop(pr_jog, "step_size_xy", icon="AXIS_TOP")
col.prop(pr_jog, "step_size_z", icon="EMPTY_SINGLE_ARROW", )
col.prop(pr_jog, "feed_rate", icon="CON_TRACKTO")
col.prop(pr_jog, "spindle_speed", icon="CON_TRACKTO")
col = row_conf.column(align=True)
col.operator("ncnc.jogcontroller", text="", icon="HOME").action = "home"
col.operator("ncnc.jogcontroller", text="", icon="EMPTY_SINGLE_ARROW").action = "safez"
if context.scene.ncnc_pr_machine.status == "JOG":
col.operator("ncnc.jogcontroller", text="", icon="CANCEL").action = "cancel"
else:
col.label(icon="BLANK1")
pr_mac = context.scene.ncnc_pr_machine
col.alert = pr_mac.spindle_state != "M5"
col.prop(pr_jog, "spindle_state", icon="DISC", icon_only=True,
invert_checkbox=pr_jog.spindle_state or col.alert)
def draw_header(self, context):
context.scene.ncnc_pr_vision.prop_bool(self.layout, "mill")
if context.scene.ncnc_pr_machine.status == "JOG":
self.layout.operator("ncnc.jogcontroller", text="", icon="CANCEL").action = "cancel"
##################################
##################################
##################################
def handles() -> dict:
keycode = "ncnc_pr_vision.handles"
ns = bpy.app.driver_namespace
if ns.get(keycode):
return ns.get(keycode)
ns[keycode] = {}
return ns[keycode]
def handle_remove(keycode) -> handles:
handle_list = handles()
if handle_list.get(keycode):
bpy.types.SpaceView3D.draw_handler_remove(handle_list.pop(keycode), 'WINDOW')
return handle_list
class NCNC_PR_Vision(PropertyGroup):
# ##########################
# ########### Layout Methods
def prop_bool(self, layout, prop: str):
return layout.prop(self, prop,
emboss=False,
text="",
icon=("RESTRICT_VIEW_OFF" if eval(f"self.{prop}") else "RESTRICT_VIEW_ON"), )
def prop_theme(self, layout, prop: str, text=""):
row = layout.row(align=True)
self.prop_bool(row, prop)
row.label(text=text)
col = row.column(align=True)
col.prop(self, f"color_{prop}", text="")
col.prop(self, f"thick_{prop}", text="")
return row
# ##########################
# ################## Presets
def update_presets(self, context):
prs = {"def": (("g0", (.5, .5, .5, 0.5), 1),
("g1", (0, .44, .77, 0.5), 2),
("g2", (.77, .2, .3, 0.5), 2),
("g3", (.3, .77, .2, 0.5), 2),
("gp", (.1, .1, .1, 1), 2),
("dash", (1, 1, 1, .9), 14),
("status", (1, .8, .2, .9), 14),
("pos", (1, .8, .2, .9), 14),
("mill", (.9, .4, .6, .9), 3),
),
"blu": (("gcode", (0, .44, .77, 0.5), 1),
("g0", (.2, .3, .5, .5), 1),
("gp", (0, .1, .2, 1), 2),
("dash", (.5, .7, 1, .9), 14),
("mill", (0, .5, .8, .9), 3),
),
"bla": (("gcode", (0, 0, 0, 1), 1),
("g0", (0, 0, 0, 1), 1),
("gp", (.3, .3, .3, 1), 2),
("dash", (0, 0, 0, 1), 14),
("mill", (0, 0, .1, 1), 3),
),
"whi": (("gcode", (1, 1, 1, 1), 2),
("g0", (1, 1, 1, 1), 1),
("gp", (.4, .4, .4, 1), 2),
("dash", (1, 1, 1, .9), 14),
("mill", (.7, .8, 1, 1), 3),
),
}
for key, color, thick in prs[self.presets]:
exec(f"self.color_{key} = {color}")
exec(f"self.thick_{key} = {thick}")
# Save to last preset
addon = bpy.context.preferences.addons.get(__name__)
if addon:
addon.preferences.last_preset = self.presets
bpy.context.preferences.use_preferences_save = True
presets: EnumProperty(
items=[("def", "Default", ""),
("bla", "Black", ""),
("whi", "White", ""),
("blu", "Blue", "")],
name="Presets",
update=update_presets
)
# ##########################
# #################### DASH
def update_dash(self, context):
keycode = "DASH"
handles = handle_remove(keycode)
if self.dash:
handles[keycode] = bpy.types.SpaceView3D.draw_handler_add(self.dash_callback,
(self, context),
"WINDOW",
"POST_PIXEL")
dash: BoolProperty(
name="Machine Dashboard",
description="Show/Hide in Viewport",
default=False,
update=update_dash
)
feed: BoolProperty(
name="Feed on Dashboard",
description="Show/Hide in Viewport",
default=True
)
spindle: BoolProperty(
name="Spindle on Dashboard",
description="Show/Hide in Viewport",
default=True
)
buffer: BoolProperty(
name="Buffer on Dashboard",
description="Show/Hide in Viewport",
default=True
)
status: BoolProperty(
name="Status on Dashboard",
description="Show/Hide in Viewport",
default=True
)
pos: BoolProperty(
name="Position on Dashboard",
description="Show/Hide in Viewport",
default=True
)
def update_color_dash(self, context):
for key in ("feed", "spindle", "buffer", "status", "pos"):
self[f"color_{key}"] = self.color_dash
color_dash: FloatVectorProperty(
name='Dashboard',
subtype='COLOR',
size=4,
min=0.0,
max=1.0,
default=(1, 1, 1, 0.9),
update=update_color_dash
)
color_feed: FloatVectorProperty(
subtype='COLOR',
size=4,
min=0.0,
max=1.0,
default=(1, 1, 1, 0.9)
)
color_spindle: FloatVectorProperty(
subtype='COLOR',
size=4,
min=0.0,
max=1.0,
default=(1, 1, 1, 0.9)
)
color_buffer: FloatVectorProperty(
subtype='COLOR',
size=4,
min=0.0,
max=1.0,
default=(1, 1, 1, 0.9)
)
color_status: FloatVectorProperty(
subtype='COLOR',
size=4,
min=0.0,
max=1.0,
default=(1, .8, .2, 0.9)
)
color_pos: FloatVectorProperty(
subtype='COLOR',
size=4,
min=0.0,
max=1.0,
default=(1, .8, .2, 0.9)
)
def update_thick_dash(self, context):
for key in ("feed", "spindle", "buffer", "status", "pos"):
self[f"thick_{key}"] = self.thick_dash
thick_dash: IntProperty(default=14, min=8, max=20, description="Font Size", update=update_thick_dash)
thick_feed: IntProperty(default=14, min=8, max=20, description="Font Size")
thick_spindle: IntProperty(default=14, min=8, max=20, description="Font Size")
thick_buffer: IntProperty(default=14, min=8, max=20, description="Font Size")
thick_status: IntProperty(default=14, min=8, max=20, description="Font Size")
thick_pos: IntProperty(default=14, min=8, max=20, description="Font Size")
@classmethod
def dash_callback(cls, self, context):
if not cls.register_check(context):
return
# Draw text to indicate that draw mode is active
pr_mac = context.scene.ncnc_pr_machine
pos = pr_mac.mpos if pr_mac.pos_type == "mpos" else pr_mac.wpos
blf_pos_y = 10
pos_type = 'WPos' if pr_mac.pos_type == 'wpos' else 'MPos'
for prop, text, val in [
("pos", pos_type, f"X {round(pos[0], 2)} Y {round(pos[1], 2)} Z {round(pos[2], 2)}"),
("buffer", "Buffer", f"{pr_mac.buffer},{pr_mac.bufwer}"),
("spindle", "Spindle", pr_mac.spindle),
("feed", "Feed", pr_mac.feed),
("status", "Status", pr_mac.status),
]:
if not eval(f"self.{prop}"):
continue
size = eval(f"self.thick_{prop}")
blf.color(0, *eval(f"self.color_{prop}"))
blf.size(0, size, 64)
blf.position(0, 10, blf_pos_y, 0)
blf.draw(0, text)
blf.position(0, size * 5, blf_pos_y, 0)
blf.draw(0, f"{val}")
blf_pos_y += size * 1.5
@classmethod
def dash_callback_recovery(cls, self, context):
if not cls.register_check(context):
return
# Draw text to indicate that draw mode is active
pr_mac = context.scene.ncnc_pr_machine
pos = pr_mac.mpos if pr_mac.pos_type == "mpos" else pr_mac.wpos
blf_pos_y = 10
pos_type = 'WPos' if pr_mac.pos_type == 'wpos' else 'MPos'
pos_str = f"X {round(pos[0], 2)} Y {round(pos[1], 2)} Z {round(pos[2], 2)}"
buf_str = f"{pr_mac.buffer},{pr_mac.bufwer}"
for text, val, show, color, size in [(pos_type, pos_str, self.pos, self.color_pos, self.thick_pos),
("Buffer", buf_str, self.buffer, self.color_buffer, self.thick_buffer),
("Spindle", pr_mac.spindle, self.spindle, self.color_spindle,
self.thick_spindle),
("Feed", pr_mac.feed, self.feed, self.color_feed, self.thick_feed),
("Status", pr_mac.status, self.status, self.color_status,
self.thick_status),
]:
print(eval(f"self.pos"))
if not show:
continue
blf.color(0, *color)
blf.size(0, size, 64)
blf.position(0, 10, blf_pos_y, 0)
blf.draw(0, text)
blf.position(0, size * 5, blf_pos_y, 0)
blf.draw(0, f"{val}")
blf_pos_y += size * 1.5
# ##########################
# #################### GCODE
def update_gcode(self, context):
keycode = "GCODE"
handles = handle_remove(keycode)
pr_act = context.scene.ncnc_pr_texts.active_text
if not pr_act:
return
pr_txt = pr_act.ncnc_pr_text
if self.gcode:
# For different shader / color
# https://docs.blender.org/api/current/gpu.html#mesh-with-random-vertex-colors
# Dotted Line For G0
# https://docs.blender.org/api/current/gpu.html#custom-shader-for-dotted-3d-line
cls = self.__class__
for i in range(4):
cls.gcode_shaders[i] = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
cls.gcode_batchs[i] = batch_for_shader(cls.gcode_shaders[i],
'LINES',
{"pos": pr_txt.get_lines(i)}
# {"pos": []}
)
cls.gcode_shaders["p"] = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
cls.gcode_batchs["p"] = batch_for_shader(cls.gcode_shaders["p"],
'POINTS',
{"pos": pr_txt.get_points()}
# {"pos": []}
)
cls.gcode_shaders["c"] = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
cls.gcode_batchs["c"] = batch_for_shader(cls.gcode_shaders["c"],
'LINES',
{"pos": []}
)
handles[keycode] = bpy.types.SpaceView3D.draw_handler_add(cls.gcode_callback,
(self, context),
"WINDOW",
"POST_VIEW")
@classmethod
def gcode_callback(cls, self, context):
if not cls.register_check(context):
return
pr_txt = context.scene.ncnc_pr_texts.active_text
if not pr_txt:
return
pr_txt = pr_txt.ncnc_pr_text
if pr_txt.event:
cls.gcode_batchs["p"] = batch_for_shader(cls.gcode_shaders["p"],
'POINTS',
{"pos": pr_txt.get_points()})
for i in range(4):
cls.gcode_batchs[i] = batch_for_shader(cls.gcode_shaders[i],
'LINES',
{"pos": pr_txt.get_lines(i)})
if context.area:
context.area.tag_redraw()
if pr_txt.event_selected:
cls.gcode_batchs["c"] = batch_for_shader(cls.gcode_shaders["c"],
'LINES',
{"pos": pr_txt.get_selected()})
for i, color, thick, show in [(0, self.color_g0, self.thick_g0, self.g0),
(1, self.color_g1, self.thick_g1, self.g1),
(2, self.color_g2, self.thick_g2, self.g2),
(3, self.color_g3, self.thick_g3, self.g3),
("p", self.color_gp, self.thick_gp, self.gp),
("c", self.color_gc, self.thick_gc, self.gc)
]:
if not show:
continue
if i == "p":
bgl.glPointSize(thick)
else:
bgl.glLineWidth(thick)
cls.gcode_shaders[i].bind()
cls.gcode_shaders[i].uniform_float("color", color)
cls.gcode_batchs[i].draw(cls.gcode_shaders[i])
gcode_shaders = {}
gcode_batchs = {}
gcode_last = ""
gcode_prev_current_line = None
gcode: BoolProperty(default=True, update=update_gcode)
gp: BoolProperty(default=True)
gc: BoolProperty(default=True)
g0: BoolProperty(default=True)
g1: BoolProperty(default=True)
g2: BoolProperty(default=True)
g3: BoolProperty(default=True)
def update_thick_gcode(self, context):
for key in (0, 1, 2, 3, "p"):
self[f"thick_g{key}"] = self.thick_gcode
thick_gcode: FloatProperty(name="General", default=2.0, min=0, max=10, description="Line Thickness",
update=update_thick_gcode)
thick_gp: FloatProperty(name="Point", default=3.0, min=0, max=10, description="Point Thickness")
thick_gc: FloatProperty(name="Current", default=3.0, min=0, max=10, description="Line Thickness")
thick_g0: FloatProperty(name="Rapid", default=1.0, min=0, max=10, description="Line Thickness")
thick_g1: FloatProperty(name="Linear", default=2.0, min=0, max=10, description="Line Thickness")
thick_g2: FloatProperty(name="Arc CW", default=2.0, min=0, max=10, description="Line Thickness")
thick_g3: FloatProperty(name="Arc CCW", default=2.0, min=0, max=10, description="Line Thickness")
def update_color_gcode(self, context):
for key in (0, 1, 2, 3, "p"):
self[f"color_g{key}"] = self.color_gcode
color_gcode: FloatVectorProperty(
name='General',
subtype='COLOR',
size=4,
min=0.0,
max=1.0,
default=(.5, .5, .5, .5),
update=update_color_gcode
)
color_gp: FloatVectorProperty(
name='Point Color',
subtype='COLOR',
size=4,
min=0.0,
max=1.0,
default=(.1, .1, .1, .5)
)
color_gc: FloatVectorProperty(
name='Current Code Line Color',
subtype='COLOR',
size=4,
min=0.0,
max=1.0,
default=(1, 0, 1, .5)
)
color_g0: FloatVectorProperty(
name='Rapid Color',
subtype='COLOR',
size=4,
min=0.0,
max=1.0,
default=(.5, .5, .5, .5)
)
color_g1: FloatVectorProperty(
name='Linear Color',
subtype='COLOR',
size=4,
min=0.0,
max=1.0,
# default=(0.7, 0.5, 0.2, 0.5)
default=(0, .44, .77, 0.5)
)
color_g2: FloatVectorProperty(
name='Arc Color CW',
subtype='COLOR',
size=4,
min=0.0,
max=1.0,
default=(.77, .2, .3, 0.5)
)
color_g3: FloatVectorProperty(
name='Arc Color CCW',
subtype='COLOR',
size=4,
min=0.0,
max=1.0,
default=(.3, .77, .2, 0.5)
)
# ##########################
# #################### MILL
def update_mill(self, context):
keycode = "MILL"
handles = handle_remove(keycode)
if self.mill:
cls = self.__class__
pr_mac = context.scene.ncnc_pr_machine
pos = pr_mac.mpos if pr_mac.pos_type == "mpos" else pr_mac.wpos
cls.mill_shader = gpu.shader.from_builtin('3D_UNIFORM_COLOR')
cls.mill_batch = batch_for_shader(cls.mill_shader,
'LINES',
{"pos": cls.mill_lines(*pos)})
handles[keycode] = bpy.types.SpaceView3D.draw_handler_add(cls.mill_callback,
(self, context),
"WINDOW",
"POST_VIEW")
mill: BoolProperty(
name="Machine Mill",
description="Show/Hide in Viewport",
default=False,
update=update_mill
)
color_mill: FloatVectorProperty(
name='Arc Color CCW',
subtype='COLOR',
size=4,
min=0.0,
max=1.0,
default=(0.9, 0.3, 0.5, 0.5)
)
thick_mill: FloatProperty(name="Arc CCW", default=3.0, min=0, max=10, description="Line Thickness")
mill_delay = .5
mill_last_time = 0
mill_shader = None
mill_batch = None
@classmethod
def mill_callback(cls, self, context):
if not cls.register_check(context):
return
if time.time() - cls.mill_last_time > cls.mill_delay:
pr_mac = context.scene.ncnc_pr_machine
pos = pr_mac.mpos if pr_mac.pos_type == "mpos" else pr_mac.wpos
cls.mill_last_time = time.time()
cls.mill_delay = .1 if pr_mac.status in ("JOG", "RUN") else .5
cls.mill_batch = batch_for_shader(cls.mill_shader,
'LINES',
{"pos": cls.mill_lines(*pos)})
bgl.glLineWidth(self.thick_mill)
cls.mill_shader.bind()
cls.mill_shader.uniform_float("color", self.color_mill)
cls.mill_batch.draw(cls.mill_shader)
@classmethod
def mill_lines(cls, x, y, z):
s = 1.5
s2 = s * 5
return [
(x, y, z), (x + s, y + s, z + s2),
(x, y, z), (x - s, y - s, z + s2),
(x, y, z), (x + s, y - s, z + s2),
(x, y, z), (x - s, y + s, z + s2),
(x - s, y - s, z + s2), (x - s, y + s, z + s2),
(x - s, y + s, z + s2), (x + s, y + s, z + s2),
(x + s, y - s, z + s2), (x + s, y + s, z + s2),
(x - s, y - s, z + s2), (x + s, y - s, z + s2),
(x, y, z + s2), (x, y, z + s2 * 2)
]
@classmethod
def register_check(cls, context) -> bool:
return hasattr(context.scene, "ncnc_pr_machine") and hasattr(context.scene, "ncnc_pr_vision")
@classmethod
def register(cls):
Scene.ncnc_pr_vision = PointerProperty(
name="NCNC_PR_Vision Name",
description="NCNC_PR_Vision Description",
type=cls)
# bpy.context.scene.ncnc_pr_vision.presets = pf.last_preset
@classmethod
def unregister(cls):
del Scene.ncnc_pr_vision
for keycode in ("DASH", "MILL", "GCODE"):
handle_remove(keycode)
class NCNC_OT_Vision(Operator):
bl_idname = "ncnc.vision"
bl_label = "Update View"
bl_description = "Update View"
bl_options = {'REGISTER'}
inloop = True
delay = 0.1
_last_time = 0
start: BoolProperty(default=True)
def execute(self, context):
return self.invoke(context, None)
def invoke(self, context, event):
# ########################### STANDARD
if not self.start:
unregister_modal(self)
return {'CANCELLED'}
register_modal(self)
context.window_manager.modal_handler_add(self)
# ####################################
# ####################################
return self.timer_add(context)
def timer_add(self, context):
wm = context.window_manager
self._timer = wm.event_timer_add(self.delay, window=context.window)
return {"RUNNING_MODAL"}
def timer_remove(self, context):
wm = context.window_manager
wm.event_timer_remove(self._timer)
return {'CANCELLED'}
def modal(self, context, event):
# ########################### STANDARD
if not self.inloop:
if context.area:
context.area.tag_redraw()
return self.timer_remove(context)
if time.time() - self._last_time < self.delay:
return {'PASS_THROUGH'}
self._last_time = time.time()
# ####################################
# ####################################
pr_act = context.scene.ncnc_pr_texts.active_text
if not pr_act:
return {'PASS_THROUGH'}
pr_txt = pr_act.ncnc_pr_text
pr_txt.event_control()
if pr_txt.event or pr_txt.event_selected:
for area in context.screen.areas:
if area.type == "VIEW_3D":
area.tag_redraw()
return {'PASS_THROUGH'}
class NCNC_PT_Vision(Panel):
bl_idname = "NCNC_PT_vision"
bl_label = "Vision"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
@classmethod
def poll(cls, context):
return context.scene.ncnc_pr_head.tool_vision
def draw(self, context):
# Filtreleme özelliği Ekle
# Koddaki belli satırlar arası Filtrele
# X Y Z aralıkları filtrele
pr_vis = context.scene.ncnc_pr_vision
layout = self.layout
class NCNC_PT_VisionThemes(Panel):
bl_idname = "NCNC_PT_visionthemes"
bl_label = "Themes"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
@classmethod
def poll(cls, context):
return context.scene.ncnc_pr_head.tool_vision
def draw(self, context):
pr_vis = context.scene.ncnc_pr_vision
layout = self.layout
layout.prop(pr_vis, "presets", text="")
class NCNC_PT_VisionThemesGcode(Panel):
bl_label = "G Codes"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
bl_parent_id = "NCNC_PT_visionthemes"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
pr_vis = context.scene.ncnc_pr_vision
layout = self.layout
for pr, text in [("gcode", "General"),
("gp", "G Points"),
("g0", "G0 - Rapid"),
("g1", "G1 - Linear"),
("g2", "G2 - Arc (CW)"),
("g3", "G3 - Arc (CCW)"),
("gc", "Current Line"),
]:
pr_vis.prop_theme(layout, pr, text)
def draw_header(self, context):
context.scene.ncnc_pr_vision.prop_bool(self.layout, "gcode")
class NCNC_PT_VisionThemesDash(Panel):
bl_label = "Dashboard"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
bl_parent_id = "NCNC_PT_visionthemes"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
pr_vis = context.scene.ncnc_pr_vision
layout = self.layout
for pr, text in [("dash", "General"),
("status", "Status"),
("feed", "Feed"),
("spindle", "Spindle"),
("buffer", "Buffer"),
("pos", "Position"),
]:
pr_vis.prop_theme(layout, pr, text)
def draw_header(self, context):
context.scene.ncnc_pr_vision.prop_bool(self.layout, "dash")
class NCNC_PT_VisionThemesMill(Panel):
bl_label = "Mill"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
bl_parent_id = "NCNC_PT_visionthemes"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
pr_vis = context.scene.ncnc_pr_vision
layout = self.layout
for pr, text in [("mill", "Mill")]:
pr_vis.prop_theme(layout, pr, text)
def draw_header(self, context):
context.scene.ncnc_pr_vision.prop_bool(self.layout, "mill")
# #################################
# #################################
# #################################
class NCNC_PR_ToolpathConfigs(PropertyGroup):
"""Configs of the object. Located on the object itself"""
obj: PointerProperty(type=Object, name="Object")
def reload_gcode(self, context):
bpy.ops.ncnc.convert(auto_call=True)
def update_included(self, context):
if self.included:
if self.check_curve(self.id_data):
context.scene.ncnc_pr_objects.add_item(self.id_data)
self.reload_gcode(context)
else:
self.included = False
else:
context.scene.ncnc_pr_objects.remove_item(self.id_data)
included: BoolProperty(
name="Included",
default=False,
description="Include in CNC machining?",
update=update_included
)
plane: EnumProperty(
name="Working Plane Selector",
description="Select Plane (Under development. Doesn't work yet)",
update=reload_gcode,
items=[("G17", "XY", "G17: Work in XY Plane"),
("G18", "XZ", "G18: Work in XZ Plane"),
("G19", "YZ", "G19: Work in YZ Plane"),
("G17", "XYZ", "Under development (Doesn't work with GRBL v1.1)"),
]
)
##############################################################################
##############################################################################
safe_z: FloatProperty(
name="Safe Z",
default=5,
# unit="LENGTH",
description="Safe Z position (default:5)",
update=reload_gcode
)
step: FloatProperty(
name="Step Z",
min=.01,
default=0.5,
# unit="LENGTH",
description="Z Machining depth in one step",
update=reload_gcode
)
depth: FloatProperty(
name="Total Depth",
default=1,
min=0,
# unit="LENGTH",
description="Son işleme derinliği",
update=reload_gcode
)
##############################################################################
##############################################################################
feed: IntProperty(
name="Feed Rate (mm/min)",
default=60,
min=30,
description="Feed rate is the velocity at which the cutter is fed, that is, advanced against "
"the workpiece. It is expressed in units of distance per revolution for turning and "
"boring (typically inches per revolution [ipr] or millimeters per "
"revolution).\nDefault:200",
update=reload_gcode
)
plunge: IntProperty(
name="Plunge Rate (mm/min)",
default=50,
min=10,
update=reload_gcode,
description="Plunge rate is the speed at which the router bit is driven down into the "
"material when starting a cut and will vary depending on the bit used and the "
"material being processed. It is important not to plunge too fast as it is easy "
"to damage the tip of the cutter during this operation\ndefault: 100",
)
spindle: IntProperty(
name="Spindle (rpm/min)", # "Spindle Speed (rpm/min)"
default=1000,
min=600,
update=reload_gcode,
description="The spindle speed is the rotational frequency of the spindle of the machine, "
"measured in revolutions per minute (RPM). The preferred speed is determined by "
"working backward from the desired surface speed (sfm or m/min) and "
"incorporating the diameter (of workpiece or cutter).\nDefault:1200",
)
# #############################################################################
# #############################################################################
round_loca: IntProperty(
name="Round (Location)",
default=3,
min=0,
max=6,
update=reload_gcode,
description="Floating point resolution of location analysis? (default=3)\n"
"[0-6] = Rough analysis - Detailed analysis"
)
round_circ: IntProperty(
name="Round (Circle)",
default=1,
min=0,
max=6,
update=reload_gcode,
description="Floating point resolution of circular analysis? (default=1)\n"
"[0-6] = Rough analysis - Detailed analysis"
)
as_line: BoolProperty(
name="As a Line or Curve",
update=reload_gcode,
description="as Line: Let it consist of lines only. Don't use G2-G3 code.\n"
"as Curve: Use curves and lines. Use all, including G2-G3."
)
yvrla_g23d: IntProperty(
name="Yuvarla (G2-G3 Koordinat)",
default=0,
min=0,
max=6,
update=reload_gcode,
description="G2-G3 koordinatları kaç basamak yuvarlanacak ? (d=0). \n"
"[0-6] = Kaba-Detaylı. GRBL v1.1 için 0 değeri gir"
)
cmbr_m_lmt: IntProperty(
name="Çember Merkez Uzaklığı Limiti",
default=800,
min=0,
max=6,
update=reload_gcode,
description="Eğri hesaplanırken, radyal merkez çok uzakta çıkarsa \n"
"[0-6] = Kaba Hesap - Detaylı hesap"
)
icindeki_tipler: StringProperty()
def check_curve(self, obj):
""" Checks if the object type is Curve (Bezier or Poly)"""
if obj.type == "CURVE":
o = []
for i in obj.data.splines:
o.append(i.type == "POLY" or i.type == "BEZIER")
return False not in o
else:
return False
@classmethod
def register(cls):
Object.ncnc_pr_toolpathconfigs = PointerProperty(
name="NCNC_PR_ToolpathConfigs Name",
description="NCNC_PR_ToolpathConfigs Description",
type=cls)
@classmethod
def unregister(cls):
del Object.ncnc_pr_toolpathconfigs
class NCNC_OT_ToolpathConfigs(Operator):
bl_idname = "ncnc.toolpathconfigs"
bl_label = "Convert to Curve"
bl_description = "Convert to curve for CNC machining"
bl_options = {'REGISTER'}
def execute(self, context):
return self.invoke(context)
def invoke(self, context, event=None):
obj = context.active_object
obj.select_set(True)
objAyar = obj.ncnc_pr_toolpathconfigs
if not obj:
self.report({'WARNING'}, "No Object Selected")
return {"FINISHED"}
if obj.type != 'CURVE': # Curve değilse
bpy.ops.object.convert(target='CURVE') # Curve'e çevir
if obj.type != 'CURVE': # Curve'e çevrilmiyorsa
self.report({'WARNING'}, f"Cannot convert to curve : {obj.name}")
return {"CANCELLED"}
if not objAyar.check_curve(obj): # Curve ama Bezier veya Poly değilse (ilerde geliştirilecek)
self.report({'INFO'}, "Curve tipi uygun değil : %s" % (obj.name))
return {"FINISHED"} # Bitir
objAyar.included = True # Convert edildikten sonra, CNC'de işlenmek üzere included edilir.
if "nCurve" not in obj.name:
obj.name = "nCurve." + obj.name
self.report({'INFO'}, f"Convert to Curve : {obj.name}")
return {"FINISHED"}
class NCNC_PT_ToolpathConfigs(Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_category = "nCNC"
bl_label = "Toolpath Configs"
bl_idname = "NCNC_PT_objectconfigs"
@classmethod
def poll(cls, context):
return context.scene.ncnc_pr_head.tool_gcode
def draw(self, context):
obj = context.active_object
layout = self.layout
if not obj:
col = layout.column()
col.label(text="No object selected", icon="CURVE_DATA")
for i in range(7):
col.label()
return
props = obj.ncnc_pr_toolpathconfigs
row = layout.row(align=True)
row.prop(props, "included", text="", icon="CHECKBOX_HLT" if props.included else "CHECKBOX_DEHLT")
row.enabled = props.check_curve(obj)
row.prop(obj, "name", text="")
row.prop(props, "as_line",
icon="IPO_CONSTANT" if props.as_line else "IPO_EASE_IN_OUT",
icon_only=True)
# if not props.check_curve(obj):
# row.operator("ncnc.toolpathconfigs", text="", icon="CURVE_DATA")
isok = props.check_curve(obj)
row = layout.row(align=True)
if not isok:
row.operator("ncnc.toolpathconfigs", text="Convert to Curve", icon="CURVE_DATA")
else:
row.enabled = props.included # Tip uygun değilse buraları pasif yapar
row.prop(props, "plane", expand=True)
row.enabled = False
col = layout.column(align=True)
col.enabled = props.included # Tip uygun değilse buraları pasif yapar
col.prop(props, "safe_z")
col.prop(props, "step")
col.prop(props, "depth")
col = layout.column(align=True)
col.enabled = props.included # Tip uygun değilse buraları pasif yapar
col.prop(props, "feed")
col.prop(props, "plunge")
col.prop(props, "spindle")
class NCNC_PT_ToolpathConfigsDetails(Panel):
bl_idname = "NCNC_PT_tconfigsdetails"
bl_label = "Detail: Converting"
bl_region_type = "UI"
bl_space_type = "VIEW_3D"
bl_category = "nCNC"
bl_parent_id = "NCNC_PT_objectconfigs"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
obj = context.active_object
if not obj:
return
props = obj.ncnc_pr_toolpathconfigs
if not props.check_curve(obj):
return
layout = self.layout
col = layout.column(align=True)
col.enabled = props.included # Tip uygun değilse buraları pasif yapar
col.prop(props, "round_circ", slider=True)
col.prop(props, "round_loca", slider=True)
col = layout.column(align=True)
col.enabled = props.included # Tip uygun değilse buraları pasif yapar
if obj.type == "CURVE":
col.prop(obj.data, "resolution_u", slider=True, text="Resolution Obj General")
if obj.data.splines.active:
col.prop(obj.data.splines.active, "resolution_u", slider=True, text="Resolution Spline in Obj")
##################################
##################################
##################################
class NCNC_UL_Objects(UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname):
obj = item.obj
sor = obj.name not in context.scene.objects.keys()
row = layout.row()
row.prop(obj, "name",
text="",
emboss=False,
icon_only=sor,
icon=f"OUTLINER_OB_{obj.type}" if not sor else "TRASH",
# icon_value=layout.icon(obj.data)
)
class NCNC_PR_Objects(PropertyGroup):
def add_item(self, obj):
if obj.type != 'CURVE':
obj.ncnc_pr_toolpathconfigs.included = False
return
for j, i in enumerate(self.items):
if obj == i.obj:
# if obj.select_get():
# self.active_item_index = j
return
item = self.items.add()
item.obj = obj
self.active_item_index = len(self.items) - 1
def remove_item(self, obj):
for j, i in enumerate(self.items):
if obj == i.obj:
self.items.remove(j)
# self.active_item_index = len(self.items) - 1
break
def update_active_item_index(self, context):
bpy.ops.object.select_all(action='DESELECT')
obj = self.items[self.active_item_index].obj
if obj.name not in context.scene.objects.keys():
self.items.remove(self.active_item_index)
return
obj.select_set(True)
context.view_layer.objects.active = obj
items: CollectionProperty(
type=NCNC_PR_ToolpathConfigs,
name="Objects",
description="All Object Items Collection",
)
active_item_index: IntProperty(
name="Active Item",
default=-1,
description="Selected object index in Collection",
update=update_active_item_index,
)
@classmethod
def register(cls):
Scene.ncnc_pr_objects = PointerProperty(
name="NCNC_PR_Objects Name",
description="NCNC_PR_Objects Description",
type=cls)
@classmethod
def unregister(cls):
del Scene.ncnc_pr_objects
class NCNC_OT_Objects(Operator):
bl_idname = "ncnc.objects"
bl_label = "Objects Operator"
bl_description = "for Selected Object ;\n" \
"( + ) : Add the object to the CNC work" \
"( - ) : Removing the object from CNC work\n" \
"(bin) : Delete object"
bl_options = {'REGISTER', 'UNDO'}
action: EnumProperty(name="Select Object",
items=[("bos", "Select", ""),
("add", "Addt", ""),
("remove", "Remove", ""),
("delete", "Delete", ""),
("up", "Up", ""),
("down", "Down", "")
])
inloop = True
delay = 0.2 # 0.5
_last_time = 0
start: BoolProperty(default=True)
def execute(self, context):
return self.invoke(context, None)
def invoke(self, context, event):
props = context.scene.ncnc_pr_objects
items = props.items
index = props.active_item_index
if self.action == "add":
bpy.context.active_object.ncnc_pr_toolpathconfigs.included = True
self.report({'INFO'}, "Object Added")
elif self.action == "remove":
bpy.context.active_object.ncnc_pr_toolpathconfigs.included = False
self.report({'INFO'}, "Object Removed")
elif self.action == "delete":
self.report({'INFO'}, "Object Deleted")
bpy.ops.object.delete(use_global=False, confirm=False)
elif self.action == 'down' and index < len(items) - 1:
items.move(index, index + 1)
props.active_item_index += 1
elif self.action == 'up' and index >= 1:
items.move(index, index - 1)
props.active_item_index -= 1
# ########################### STANDARD
else:
if not self.start:
unregister_modal(self)
return {'CANCELLED'}
register_modal(self)
context.window_manager.modal_handler_add(self)
# ####################################
# ####################################
return self.timer_add(context)
def timer_add(self, context):
wm = context.window_manager
self._timer = wm.event_timer_add(self.delay, window=context.window)
return {"RUNNING_MODAL"}
def timer_remove(self, context):
wm = context.window_manager
wm.event_timer_remove(self._timer)
return {'CANCELLED'}
def modal(self, context, event):
# ########################### STANDARD
if not self.inloop:
if context.area:
context.area.tag_redraw()
return self.timer_remove(context)
if time.time() - self._last_time < self.delay:
return {'PASS_THROUGH'}
self._last_time = time.time()
# ####################################
# ####################################
props = context.scene.ncnc_pr_objects
# Add new items
for obj in context.scene.objects:
if obj.ncnc_pr_toolpathconfigs.included:
props.add_item(obj)
# Remove items
for i in props.items:
if not i.obj or (i.obj.name not in context.scene.objects.keys()) or (
not i.obj.ncnc_pr_toolpathconfigs.included):
props.remove_item(i.obj)
if context.area:
context.area.tag_redraw()
return {'PASS_THROUGH'}
class NCNC_PT_Objects(Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_category = "nCNC"
bl_label = "Toolpaths" # Included Objects
bl_idname = "NCNC_PT_objects"
# bl_parent_id = "NCNC_PT_output"
@classmethod
def poll(cls, context):
return context.scene.ncnc_pr_head.tool_gcode
def draw(self, context):
layout = self.layout
props = context.scene.ncnc_pr_objects
row = layout.row()
col2 = row.column(align=True)
col2.operator("ncnc.objects", icon="ADD", text="").action = "add"
col2.operator("ncnc.objects", icon="REMOVE", text="").action = "remove"
col2.operator("ncnc.objects", icon="TRASH", text="").action = "delete"
col2.separator()
col2.operator("ncnc.objects", icon="TRIA_UP", text="").action = "up"
col2.operator("ncnc.objects", icon="TRIA_DOWN", text="").action = "down"
col1 = row.column() # .box()
col1.template_list(
"NCNC_UL_Objects", # TYPE
"ncnc_ul_objects", # ID
props, # Data Pointer
"items", # Propname
props, # active_dataptr
"active_item_index", # active_propname
rows=5,
type='DEFAULT'
)
context.scene.ncnc_pr_convert.template_convert(layout, context=context)
##################################
##################################
##################################
class NCNC_OT_Empty(Operator):
bl_idname = "ncnc.empty"
bl_label = ""
bl_description = ""
bl_options = {'REGISTER'}
def invoke(self, context, event=None):
return {"CANCELLED"}
"""
Header -> _HT_
Menu -> _MT_
Operator -> _OT_
Panel -> _PT_
UIList -> _UL_
"""
classes = [
NCNC_OT_Decoder,
NCNC_OT_Empty,
NCNC_Prefs,
NCNC_PR_Head,
NCNC_PT_Head,
NCNC_PT_HeadTextDetails,
NCNC_PR_Texts,
NCNC_OT_TextsRemove,
NCNC_OT_TextsOpen,
NCNC_OT_TextsSave,
NCNC_PR_Lines,
NCNC_PR_TextLine,
NCNC_PR_Text,
NCNC_OT_Text,
NCNC_PR_Scene,
NCNC_OT_Scene,
NCNC_PT_Scene,
NCNC_PR_Convert,
NCNC_OT_Convert,
NCNC_PR_Connection,
NCNC_OT_Connection,
NCNC_PT_Connection,
NCNC_PR_MessageItem,
NCNC_PR_Communication,
NCNC_OT_CommunicationRun,
NCNC_OT_Communication,
NCNC_UL_Messages,
NCNC_OP_Messages,
NCNC_PT_Communication,
NCNC_PR_Machine,
NCNC_OT_Machine,
NCNC_PT_Machine,
NCNC_PR_JogController,
NCNC_OT_JogController,
NCNC_PT_JogController,
NCNC_PT_MachineDash,
NCNC_PT_MachineModes,
NCNC_PT_MachineDetails,
NCNC_PT_MachineDetail,
NCNC_PT_MachineDetailInvert,
NCNC_PT_MachineDetailAxis,
NCNC_PT_MachineDetailAxisInvert,
NCNC_PR_Vision,
NCNC_OT_Vision,
NCNC_PT_Vision,
NCNC_PT_VisionThemes,
NCNC_PT_VisionThemesGcode,
NCNC_PT_VisionThemesDash,
NCNC_PT_VisionThemesMill,
NCNC_PR_ToolpathConfigs,
NCNC_OT_ToolpathConfigs,
NCNC_UL_Objects,
NCNC_PR_Objects,
NCNC_OT_Objects,
NCNC_PT_ToolpathConfigs,
NCNC_PT_ToolpathConfigsDetails,
NCNC_PT_Objects,
]
def register():
for i in classes:
bpy.utils.register_class(i)
def unregister():
for i in classes[::-1]:
bpy.utils.unregister_class(i)
if __name__ == "__main__":
register()
|
def iteritems(p):
try:
it = p.iteritems()
except AttributeError:
it = p.items()
return it
def itervalues(p):
try:
it = p.itervalues()
except AttributeError:
it = p.values()
return it
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import List, Optional
import bunch
import click
from openr.cli.commands import prefix_mgr
from openr.OpenrCtrl import ttypes as ctrl_types
class PrefixMgrCli:
def __init__(self):
self.prefixmgr.add_command(WithdrawCli().withdraw)
self.prefixmgr.add_command(AdvertiseCli().advertise)
self.prefixmgr.add_command(SyncCli().sync)
self.prefixmgr.add_command(AdvertisedRoutesCli().show)
self.prefixmgr.add_command(OriginatedRoutesCli().show)
@click.group()
@click.pass_context
def prefixmgr(ctx): # noqa: B902
"""CLI tool to peek into Prefix Manager module."""
pass
class WithdrawCli(object):
@click.command()
@click.argument("prefixes", nargs=-1)
@click.option(
"--prefix-type",
"-t",
default="BREEZE",
help="Type or client-ID associated with prefix.",
)
@click.pass_obj
def withdraw(
cli_opts: bunch.Bunch, prefixes: List[str], prefix_type: str # noqa: B902
):
"""Withdraw the prefixes being advertised from this node"""
prefix_mgr.WithdrawCmd(cli_opts).run(prefixes, prefix_type)
class AdvertiseCli(object):
@click.command()
@click.argument("prefixes", nargs=-1)
@click.option(
"--prefix-type",
"-t",
default="BREEZE",
help="Type or client-ID associated with prefix.",
)
@click.option(
"--forwarding-type",
default="IP",
help="Use label forwarding instead of IP forwarding in data path",
)
@click.pass_obj
def advertise(cli_opts, prefixes, prefix_type, forwarding_type): # noqa: B902
"""Advertise the prefixes from this node with specific type"""
prefix_mgr.AdvertiseCmd(cli_opts).run(prefixes, prefix_type, forwarding_type)
class SyncCli(object):
@click.command()
@click.argument("prefixes", nargs=-1)
@click.option(
"--prefix-type",
"-t",
default="BREEZE",
help="Type or client-ID associated with prefix.",
)
@click.option(
"--forwarding-type",
default="IP",
help="Use label forwarding instead of IP forwarding in data path",
)
@click.pass_obj
def sync(cli_opts, prefixes, prefix_type, forwarding_type): # noqa: B902
"""Sync the prefixes from this node with specific type"""
prefix_mgr.SyncCmd(cli_opts).run(prefixes, prefix_type, forwarding_type)
class AdvertisedRoutesCli(object):
@click.group("advertised-routes")
@click.option(
"--prefix-type",
"-t",
help="Filter on source of origination. e.g. RIB, BGP, LINK_MONITOR",
)
@click.option(
"--detail/--no-detail",
default=False,
help="Show all details including tags and area-stack",
)
@click.option(
"--tag2name/--no-tag2name",
default=False,
help="Translate tag string to human readable name",
)
@click.option("--json/--no-json", default=False, help="Output in JSON format")
@click.pass_context
def show(
ctx: bunch.Bunch, # noqa: B902
prefix_type: Optional[str],
detail: bool,
tag2name: bool,
json: bool,
) -> None:
"""
Show advertised routes in various stages of policy
"""
# Set options & arguments in cli_opts
if ctx.obj is None:
ctx.obj = bunch.Bunch()
ctx.obj["advertised_routes_options"] = bunch.Bunch(
prefix_type=prefix_type,
detail=detail,
json=json,
tag2name=tag2name,
)
@show.command("all")
@click.argument("prefix", nargs=-1, type=str, required=False)
@click.pass_obj
def all(cli_opts: bunch.Bunch, prefix: List[str]) -> None: # noqa: B902
"""
Show routes that this node should be advertising across all areas. This
is pre-area-policy routes. Note this does not show routes denied by origination policy
"""
opts = cli_opts.advertised_routes_options
prefix_mgr.AdvertisedRoutesCmd(cli_opts).run(
prefix, opts.prefix_type, opts.json, opts.detail
)
@show.command("pre-area-policy")
@click.argument("area", type=str)
@click.argument("prefix", nargs=-1, type=str, required=False)
@click.pass_obj
def pre_area_policy(
cli_opts: bunch.Bunch, area: str, prefix: List[str] # noqa: B902
) -> None:
"""
Show pre-policy routes for advertisment of specified area
but after applying origination, if applicable
"""
opts = cli_opts.advertised_routes_options
prefix_mgr.AreaAdvertisedRoutesCmd(cli_opts).run(
area,
ctrl_types.RouteFilterType.PREFILTER_ADVERTISED,
prefix,
opts.prefix_type,
opts.json,
opts.detail,
)
@show.command("post-area-policy")
@click.argument("area", type=str)
@click.argument("prefix", nargs=-1, type=str, required=False)
@click.pass_obj
def post_area_policy(
cli_opts: bunch.Bunch, area: str, prefix: List[str] # noqa: B902
) -> None:
"""
Show post-policy routes that are advertisment to specified area
"""
opts = cli_opts.advertised_routes_options
prefix_mgr.AreaAdvertisedRoutesCmd(cli_opts).run(
area,
ctrl_types.RouteFilterType.POSTFILTER_ADVERTISED,
prefix,
opts.prefix_type,
opts.json,
opts.detail,
)
@show.command("rejected-on-area")
@click.argument("area", type=str)
@click.argument("prefix", nargs=-1, type=str, required=False)
@click.pass_obj
def rejected_on_area(
cli_opts: bunch.Bunch, area: str, prefix: List[str] # noqa: B902
) -> None:
"""
Show routes rejected by area policy on advertisement
"""
opts = cli_opts.advertised_routes_options
prefix_mgr.AreaAdvertisedRoutesCmd(cli_opts).run(
area,
ctrl_types.RouteFilterType.REJECTED_ON_ADVERTISE,
prefix,
opts.prefix_type,
opts.json,
opts.detail,
)
@show.command("pre-policy")
@click.argument("area", type=str)
@click.argument("prefix", nargs=-1, type=str, required=False)
@click.pass_obj
def pre_policy(
cli_opts: bunch.Bunch, area: str, prefix: List[str] # noqa: B902
) -> None:
"""
DEPRECATED. use pre-area-policy
"""
click.secho("pre-policy is deprecated, use pre-area-policy", fg="red")
@show.command("post-policy")
@click.argument("area", type=str)
@click.argument("prefix", nargs=-1, type=str, required=False)
@click.pass_obj
def post_policy(
cli_opts: bunch.Bunch, area: str, prefix: List[str] # noqa: B902
) -> None:
"""
DEPRECATED. use post-area-policy
"""
click.secho("post-policy is deprecated, use post-area-policy", fg="red")
@show.command("rejected")
@click.argument("area", type=str)
@click.argument("prefix", nargs=-1, type=str, required=False)
@click.pass_obj
def rejected(
cli_opts: bunch.Bunch, area: str, prefix: List[str] # noqa: B902
) -> None:
"""
DEPRECATED. use rejected_on_area
"""
click.secho("rejected is deprecated, use rejected-on-area", fg="red")
@show.command("pre-origination-policy")
@click.argument("prefix", nargs=-1, type=str, required=False)
@click.pass_obj
def pre_origination_policy(
cli_opts: bunch.Bunch, prefix: List[str] # noqa: B902
) -> None:
"""
Show pre-origination-policy routes.
Note: Only displays routes that came with an origination policy.
"""
opts = cli_opts.advertised_routes_options
prefix_mgr.AdvertisedRoutesWithOriginationPolicyCmd(cli_opts).run(
ctrl_types.RouteFilterType.PREFILTER_ADVERTISED,
prefix,
opts.prefix_type,
opts.json,
opts.detail,
)
@show.command("post-origination-policy")
@click.argument("prefix", nargs=-1, type=str, required=False)
@click.pass_obj
def post_origination_policy(
cli_opts: bunch.Bunch, prefix: List[str] # noqa: B902
) -> None:
"""
Show post-policy routes that are accepted by origination policy. Only
displays routes that came with an origination policy
"""
opts = cli_opts.advertised_routes_options
prefix_mgr.AdvertisedRoutesWithOriginationPolicyCmd(cli_opts).run(
ctrl_types.RouteFilterType.POSTFILTER_ADVERTISED,
prefix,
opts.prefix_type,
opts.json,
opts.detail,
)
@show.command("rejected-on-origination")
@click.argument("prefix", nargs=-1, type=str, required=False)
@click.pass_obj
def rejected_on_origination(
cli_opts: bunch.Bunch, prefix: List[str] # noqa: B902
) -> None:
"""
Show routes rejected by origination policy
"""
opts = cli_opts.advertised_routes_options
prefix_mgr.AdvertisedRoutesWithOriginationPolicyCmd(cli_opts).run(
ctrl_types.RouteFilterType.REJECTED_ON_ADVERTISE,
prefix,
opts.prefix_type,
opts.json,
opts.detail,
)
class OriginatedRoutesCli(object):
@click.command("originated-routes")
@click.option(
"--detail/--no-detail",
default=False,
help="Show all details including tags and area-stack",
)
@click.option(
"--tag2name/--no-tag2name",
default=False,
help="Translate tag string to human readable name",
)
@click.pass_obj
def show(
cli_opts: bunch.Bunch, # noqa: B902
detail: bool,
tag2name: bool,
) -> None:
"""
Show originated routes configured on this node. Will show all by default
"""
prefix_mgr.OriginatedRoutesCmd(cli_opts).run(detail, tag2name)
|
#!/bin/python3
# -*- coding: utf-8 -*-
import os
import sys
import time
import getopt
import matplotlib
from Enums import *
from matplotlib import pyplot as plt
from Crawler import Crawler
from NeuralNetwork import NeuralNetwork
from Hyperparameters import Hyperparameters
from Utils import Utils
def getPredefHyperparams():
hyperparameters=[]
# Forecast
name='Subject 1'
feature_group=0
binary_classifier=False
train_percent=.8
val_percent=.3
amount_companies=1
index_feature='Date'
normalize=True
if binary_classifier:
input_features=[Features.UP]+Hyperparameters.getFeatureGroups()[feature_group]
output_feature=Features.UP
model_metrics=['accuracy','mean_squared_error']
loss='categorical_crossentropy'
else:
input_features=[Features.CLOSE]+Hyperparameters.getFeatureGroups()[feature_group]
output_feature=Features.CLOSE
model_metrics=['R2','mean_squared_error','mean_absolute_error','accuracy','cosine_similarity']
loss='mean_squared_error'
# fixed
shuffle=False
# irace
backwards_samples=16
forward_samples=11
lstm_layers=1
max_epochs=4889
patience_epochs_stop=1525
patience_epochs_reduce=539
reduce_factor=0.01259
batch_size=122
stateful=False
optimizer=Optimizers.RMSPROP # Optimizers.ADAM
use_dense_on_output=False
activation_functions=[NodeType.SIGMOID]
recurrent_activation_functions=[NodeType.TANH]
layer_sizes=[89]
dropout_values=[0.03741]
recurrent_dropout_values=[0.24559]
bias=[False]
unit_forget_bias=[True]
go_backwards=[True]
hyperparameters.append(Hyperparameters(name=name,binary_classifier=binary_classifier,input_features=input_features,output_feature=output_feature,index_feature=index_feature,backwards_samples=backwards_samples,forward_samples=forward_samples,lstm_layers=lstm_layers,max_epochs=max_epochs,patience_epochs_stop=patience_epochs_stop,patience_epochs_reduce=patience_epochs_reduce,reduce_factor=reduce_factor,batch_size=batch_size,stateful=stateful,dropout_values=dropout_values,layer_sizes=layer_sizes,normalize=normalize,optimizer=optimizer,model_metrics=model_metrics,loss=loss,train_percent=train_percent,val_percent=val_percent,amount_companies=amount_companies,shuffle=shuffle,activation_functions=activation_functions,recurrent_activation_functions=recurrent_activation_functions,bias=bias,use_dense_on_output=use_dense_on_output,unit_forget_bias=unit_forget_bias,go_backwards=go_backwards,recurrent_dropout_values=recurrent_dropout_values))
name='Subject 2'
feature_group=0
binary_classifier=False
train_percent=.8
val_percent=.3
amount_companies=1
index_feature='Date'
normalize=True
if binary_classifier:
input_features=[Features.UP]+Hyperparameters.getFeatureGroups()[feature_group]
output_feature=Features.UP
model_metrics=['accuracy','mean_squared_error']
loss='categorical_crossentropy'
else:
input_features=[Features.CLOSE]+Hyperparameters.getFeatureGroups()[feature_group]
output_feature=Features.CLOSE
model_metrics=['R2','mean_squared_error','mean_absolute_error','accuracy','cosine_similarity']
loss='mean_squared_error'
# fixed
shuffle=False
# irace
backwards_samples=31
forward_samples=7
lstm_layers=1
max_epochs=3183
patience_epochs_stop=1827
patience_epochs_reduce=228
reduce_factor=0.05265
batch_size=46
stateful=False
optimizer=Optimizers.ADAM # Optimizers.RMSPROP
use_dense_on_output=False
activation_functions=[NodeType.SIGMOID]
recurrent_activation_functions=[NodeType.TANH]
layer_sizes=[67]
dropout_values=[0.24864]
recurrent_dropout_values=[0.15223]
bias=[True]
unit_forget_bias=[True]
go_backwards=[False]
hyperparameters.append(Hyperparameters(name=name,binary_classifier=binary_classifier,input_features=input_features,output_feature=output_feature,index_feature=index_feature,backwards_samples=backwards_samples,forward_samples=forward_samples,lstm_layers=lstm_layers,max_epochs=max_epochs,patience_epochs_stop=patience_epochs_stop,patience_epochs_reduce=patience_epochs_reduce,reduce_factor=reduce_factor,batch_size=batch_size,stateful=stateful,dropout_values=dropout_values,layer_sizes=layer_sizes,normalize=normalize,optimizer=optimizer,model_metrics=model_metrics,loss=loss,train_percent=train_percent,val_percent=val_percent,amount_companies=amount_companies,shuffle=shuffle,activation_functions=activation_functions,recurrent_activation_functions=recurrent_activation_functions,bias=bias,use_dense_on_output=use_dense_on_output,unit_forget_bias=unit_forget_bias,go_backwards=go_backwards,recurrent_dropout_values=recurrent_dropout_values))
name='Subject 3'
feature_group=0
binary_classifier=False
train_percent=.8
val_percent=.3
amount_companies=1
index_feature='Date'
normalize=True
if binary_classifier:
input_features=[Features.UP]+Hyperparameters.getFeatureGroups()[feature_group]
output_feature=Features.UP
model_metrics=['accuracy','mean_squared_error']
loss='categorical_crossentropy'
else:
input_features=[Features.CLOSE]+Hyperparameters.getFeatureGroups()[feature_group]
output_feature=Features.CLOSE
model_metrics=['R2','mean_squared_error','mean_absolute_error','accuracy','cosine_similarity']
loss='mean_squared_error'
# fixed
shuffle=False
# irace
backwards_samples=38
forward_samples=10
lstm_layers=1
max_epochs=3002
patience_epochs_stop=853
patience_epochs_reduce=73
reduce_factor=0.06082
batch_size=112
stateful=False
optimizer=Optimizers.ADAM # Optimizers.RMSPROP
use_dense_on_output=False
activation_functions=[NodeType.TANH]
recurrent_activation_functions=[NodeType.RELU]
layer_sizes=[164]
dropout_values=[0.23404]
recurrent_dropout_values=[0.00155]
bias=[True]
unit_forget_bias=[True]
go_backwards=[False]
hyperparameters.append(Hyperparameters(name=name,binary_classifier=binary_classifier,input_features=input_features,output_feature=output_feature,index_feature=index_feature,backwards_samples=backwards_samples,forward_samples=forward_samples,lstm_layers=lstm_layers,max_epochs=max_epochs,patience_epochs_stop=patience_epochs_stop,patience_epochs_reduce=patience_epochs_reduce,reduce_factor=reduce_factor,batch_size=batch_size,stateful=stateful,dropout_values=dropout_values,layer_sizes=layer_sizes,normalize=normalize,optimizer=optimizer,model_metrics=model_metrics,loss=loss,train_percent=train_percent,val_percent=val_percent,amount_companies=amount_companies,shuffle=shuffle,activation_functions=activation_functions,recurrent_activation_functions=recurrent_activation_functions,bias=bias,use_dense_on_output=use_dense_on_output,unit_forget_bias=unit_forget_bias,go_backwards=go_backwards,recurrent_dropout_values=recurrent_dropout_values))
# Binary
name='Subject 1'
feature_group=0
binary_classifier=True
train_percent=.8
val_percent=.3
amount_companies=1
index_feature='Date'
normalize=True
if binary_classifier:
input_features=[Features.UP]+Hyperparameters.getFeatureGroups()[feature_group]
output_feature=Features.UP
model_metrics=['accuracy','mean_squared_error']
loss='categorical_crossentropy'
else:
input_features=[Features.CLOSE]+Hyperparameters.getFeatureGroups()[feature_group]
output_feature=Features.CLOSE
model_metrics=['R2','mean_squared_error','mean_absolute_error','accuracy','cosine_similarity']
loss='mean_squared_error'
# fixed
shuffle=False
# irace
backwards_samples=37
forward_samples=7
lstm_layers=1
max_epochs=4346
patience_epochs_stop=3574
patience_epochs_reduce=606
reduce_factor=0.09476
batch_size=10
stateful=True
optimizer=Optimizers.ADAM # Optimizers.RMSPROP
use_dense_on_output=True
activation_functions=[NodeType.SIGMOID]
recurrent_activation_functions=[NodeType.TANH]
layer_sizes=[125]
dropout_values=[0.19655]
recurrent_dropout_values=[0.29698]
bias=[False]
unit_forget_bias=[False]
go_backwards=[False]
hyperparameters.append(Hyperparameters(name=name,binary_classifier=binary_classifier,input_features=input_features,output_feature=output_feature,index_feature=index_feature,backwards_samples=backwards_samples,forward_samples=forward_samples,lstm_layers=lstm_layers,max_epochs=max_epochs,patience_epochs_stop=patience_epochs_stop,patience_epochs_reduce=patience_epochs_reduce,reduce_factor=reduce_factor,batch_size=batch_size,stateful=stateful,dropout_values=dropout_values,layer_sizes=layer_sizes,normalize=normalize,optimizer=optimizer,model_metrics=model_metrics,loss=loss,train_percent=train_percent,val_percent=val_percent,amount_companies=amount_companies,shuffle=shuffle,activation_functions=activation_functions,recurrent_activation_functions=recurrent_activation_functions,bias=bias,use_dense_on_output=use_dense_on_output,unit_forget_bias=unit_forget_bias,go_backwards=go_backwards,recurrent_dropout_values=recurrent_dropout_values))
name='Subject 2'
feature_group=0
binary_classifier=True
train_percent=.8
val_percent=.3
amount_companies=1
index_feature='Date'
normalize=True
if binary_classifier:
input_features=[Features.UP]+Hyperparameters.getFeatureGroups()[feature_group]
output_feature=Features.UP
model_metrics=['accuracy','mean_squared_error']
loss='categorical_crossentropy'
else:
input_features=[Features.CLOSE]+Hyperparameters.getFeatureGroups()[feature_group]
output_feature=Features.CLOSE
model_metrics=['R2','mean_squared_error','mean_absolute_error','accuracy','cosine_similarity']
loss='mean_squared_error'
# fixed
shuffle=False
# irace
backwards_samples=5
forward_samples=7
lstm_layers=2
max_epochs=2800
patience_epochs_stop=692
patience_epochs_reduce=634
reduce_factor=0.12503
batch_size=102
stateful=True
optimizer=Optimizers.ADAM # Optimizers.RMSPROP
use_dense_on_output=True
activation_functions=[NodeType.LINEAR,NodeType.LINEAR]
recurrent_activation_functions=[NodeType.LINEAR,NodeType.HARD_SIGMOID]
layer_sizes=[95, 77]
dropout_values=[0.16891, 0.01028]
recurrent_dropout_values=[0.24156, 0.03735]
bias=[True,False]
unit_forget_bias=[False,True]
go_backwards=[True,False]
hyperparameters.append(Hyperparameters(name=name,binary_classifier=binary_classifier,input_features=input_features,output_feature=output_feature,index_feature=index_feature,backwards_samples=backwards_samples,forward_samples=forward_samples,lstm_layers=lstm_layers,max_epochs=max_epochs,patience_epochs_stop=patience_epochs_stop,patience_epochs_reduce=patience_epochs_reduce,reduce_factor=reduce_factor,batch_size=batch_size,stateful=stateful,dropout_values=dropout_values,layer_sizes=layer_sizes,normalize=normalize,optimizer=optimizer,model_metrics=model_metrics,loss=loss,train_percent=train_percent,val_percent=val_percent,amount_companies=amount_companies,shuffle=shuffle,activation_functions=activation_functions,recurrent_activation_functions=recurrent_activation_functions,bias=bias,use_dense_on_output=use_dense_on_output,unit_forget_bias=unit_forget_bias,go_backwards=go_backwards,recurrent_dropout_values=recurrent_dropout_values))
name='Subject 3'
feature_group=0
binary_classifier=True
train_percent=.8
val_percent=.3
amount_companies=1
index_feature='Date'
normalize=True
if binary_classifier:
input_features=[Features.UP]+Hyperparameters.getFeatureGroups()[feature_group]
output_feature=Features.UP
model_metrics=['accuracy','mean_squared_error']
loss='categorical_crossentropy'
else:
input_features=[Features.CLOSE]+Hyperparameters.getFeatureGroups()[feature_group]
output_feature=Features.CLOSE
model_metrics=['R2','mean_squared_error','mean_absolute_error','accuracy','cosine_similarity']
loss='mean_squared_error'
# fixed
shuffle=False
# irace
backwards_samples=38
forward_samples=7
lstm_layers=4
max_epochs=4116
patience_epochs_stop=2418
patience_epochs_reduce=869
reduce_factor=0.09956
batch_size=58
stateful=True
optimizer=Optimizers.RMSPROP # Optimizers.ADAM
use_dense_on_output=True
activation_functions=[NodeType.RELU,NodeType.EXPONENTIAL,NodeType.SIGMOID,NodeType.RELU]
recurrent_activation_functions=[NodeType.SIGMOID,NodeType.TANH,NodeType.LINEAR,NodeType.SIGMOID]
layer_sizes=[48, 30, 194, 72]
dropout_values=[0.06482, 0.18694, 0.19006, 0.27753]
recurrent_dropout_values=[0.13098, 0.25548, 0.24709, 0.14463]
bias=[False,True,False,False]
unit_forget_bias=[False,False,False,True]
go_backwards=[False,False,False,False]
hyperparameters.append(Hyperparameters(name=name,binary_classifier=binary_classifier,input_features=input_features,output_feature=output_feature,index_feature=index_feature,backwards_samples=backwards_samples,forward_samples=forward_samples,lstm_layers=lstm_layers,max_epochs=max_epochs,patience_epochs_stop=patience_epochs_stop,patience_epochs_reduce=patience_epochs_reduce,reduce_factor=reduce_factor,batch_size=batch_size,stateful=stateful,dropout_values=dropout_values,layer_sizes=layer_sizes,normalize=normalize,optimizer=optimizer,model_metrics=model_metrics,loss=loss,train_percent=train_percent,val_percent=val_percent,amount_companies=amount_companies,shuffle=shuffle,activation_functions=activation_functions,recurrent_activation_functions=recurrent_activation_functions,bias=bias,use_dense_on_output=use_dense_on_output,unit_forget_bias=unit_forget_bias,go_backwards=go_backwards,recurrent_dropout_values=recurrent_dropout_values))
return hyperparameters
def run(train_model,force_train,eval_model,plot,plot_eval,plot_dataset,blocking_plots,save_plots,restore_checkpoints,download_if_needed,stocks,start_date,end_date,enrich_dataset,analyze_metrics,move_models,all_hyper_for_all_stocks,only_first_hyperparam,add_more_fields_to_hyper,test_date):
never_crawl=os.getenv('NEVER_CRAWL',default='False')
never_crawl=never_crawl.lower() in ('true', '1', 't', 'y', 'yes', 'sim', 'verdade')
crawler=Crawler()
if save_plots:
matplotlib.use('Agg')
print('Using plot id: ',NeuralNetwork.SAVED_PLOTS_ID)
print('Running for stocks: {}'.format(','.join(stocks)))
if start_date is None:
start_date=Utils.FIRST_DATE
else:
Utils.assertDateFormat(start_date)
if end_date is None:
end_date='07/05/2021'
else:
Utils.assertDateFormat(end_date)
if test_date is None:
test_date='10/03/2021'
else:
Utils.assertDateFormat(test_date)
start_date_formated_for_file=''.join(Utils.extractNumbersFromDate(start_date,reverse=True))
end_date_formated_for_file=''.join(Utils.extractNumbersFromDate(end_date,reverse=True))
filepaths={}
for stock in stocks:
filename='{}_daily_{}-{}.csv'.format(stock,start_date_formated_for_file,end_date_formated_for_file)
filepath=crawler.getDatasetPath(filename)
filepaths[stock]=filepath
if not Utils.checkIfPathExists(filepath) and download_if_needed and not never_crawl:
crawler.downloadStockDailyData(stock,filename,start_date=start_date,end_date=end_date)
# crawler.downloadStockDataCustomInterval(stock,filename,data_range='max') # just example
hyperparameters_tmp=getPredefHyperparams()
if only_first_hyperparam:
hyperparameters_tmp=[hyperparameters_tmp[0]]
if not all_hyper_for_all_stocks: # then create a circular"ish" list, only works when running all stocks together, otherwise it will always use the first
if len(stocks) > len(hyperparameters_tmp):
for i in range(len(stocks)-len(hyperparameters_tmp)):
hyperparameters_tmp.append(hyperparameters_tmp[i%len(hyperparameters_tmp)].copy())
hyperparameters={}
for i,stock in enumerate(stocks):
new_input_fields=('fast_moving_avg','slow_moving_avg','Volume','Open','High','Low','Adj Close')
if all_hyper_for_all_stocks:
hyperparameters[stock]=[]
for hyper in hyperparameters_tmp:
if hyper.name != '':
hyper.setName('{} - from: {} to: {}'.format(hyper.name,start_date,end_date))
else:
hyper.setName('manual tunning - from: {} to: {}'.format(start_date,end_date))
hyperparameters[stock].append(hyper.copy())
if add_more_fields_to_hyper:
for new_input_field in new_input_fields:
new_hyperparameters=hyper.copy()
new_hyperparameters.input_features.append(new_input_field)
new_hyperparameters.genAndSetUuid()
hyperparameters[stock].append(new_hyperparameters)
else:
if hyperparameters_tmp[i].name != '':
hyperparameters_tmp[i].setName('{} - from: {} to: {}'.format(hyperparameters_tmp[i].name,start_date,end_date))
else:
hyperparameters_tmp[i].setName('manual tunning - from: {} to: {}'.format(start_date,end_date))
hyperparameters[stock]=[hyperparameters_tmp[i]]
if add_more_fields_to_hyper:
for new_input_field in new_input_fields:
new_hyperparameters=hyperparameters[stock][-1].copy()
new_hyperparameters.input_features.append(new_input_field)
new_hyperparameters.genAndSetUuid()
hyperparameters[stock].append(new_hyperparameters)
hyperparameters_tmp=[]
if enrich_dataset:
for stock in stocks:
NeuralNetwork.enrichDataset(filepaths[stock])
if train_model or force_train:
for stock in stocks:
# build and train
for hyperparameter in hyperparameters[stock]:
neuralNetwork=NeuralNetwork(hyperparameter,stock_name=stock,verbose=True)
if not neuralNetwork.checkTrainedModelExists() or force_train:
neuralNetwork.loadDataset(filepaths[stock],plot=plot_dataset,blocking_plots=blocking_plots,save_plots=save_plots)
neuralNetwork.buildModel(plot_model_to_file=plot)
neuralNetwork.train()
neuralNetwork.eval(plot=plot,plot_training=plot,blocking_plots=blocking_plots,save_plots=save_plots)
neuralNetwork.save()
neuralNetwork.destroy()
if restore_checkpoints:
NeuralNetwork.restoreAllBestModelsCPs() # restore the best models
if eval_model:
for stock in stocks:
# load
for hyperparameter in hyperparameters[stock]:
neuralNetwork=NeuralNetwork(hyperparameter,stock_name=stock,verbose=True)
neuralNetwork.load()
neuralNetwork.loadTestDataset(filepaths[stock],from_date=test_date,blocking_plots=blocking_plots,save_plots=save_plots)
neuralNetwork.eval(plot=(plot or plot_eval),print_prediction=True,blocking_plots=blocking_plots,save_plots=save_plots)
neuralNetwork.destroy()
if analyze_metrics:
NeuralNetwork.runPareto(use_ok_instead_of_f1=True,plot=plot,blocking_plots=blocking_plots,save_plots=save_plots,label='{}-{}'.format(start_date_formated_for_file,end_date_formated_for_file))
if move_models:
print('Backing up models...',end='')
paths_to_backup=Utils.getFolderPathsThatMatchesPattern(NeuralNetwork.MODELS_PATH,r'[a-zA-Z0-9]*_.*\.(h5|json|bin)')
for path_to_backup in paths_to_backup:
Utils.moveFile(path_to_backup,Utils.joinPath(NeuralNetwork.BACKUP_MODELS_PATH, Utils.filenameFromPath(path_to_backup,get_extension=True)))
print('OK!')
if not blocking_plots or save_plots:
plt.clf()
plt.cla()
plt.close() # delete the last and empty figure
if not blocking_plots and not save_plots:
plt.show()
def main(argv):
all_known_stocks=[
'GOOG','AMD','CSCO','TSLA','AAPL', # international companies
'T','IBM', # dividend aristocrats
'BTC-USD', # crypto currencies
'BRL=X', # currency exchange rate
r'%5EDJI',r'%5EBVSP', # stock market indexes
'CESP3.SA','CPLE6.SA','CSMG3.SA','ENBR3.SA','TRPL4.SA' # brazilian stable companies
]
python_exec_name=Utils.getPythonExecName()
help_str='main.py\n\t[-h | --help]\n\t[-t | --train]\n\t[--force-train]\n\t[-e | --eval]\n\t[-p | --plot]\n\t[--plot-eval]\n\t[--plot-dataset]\n\t[--blocking-plots]\n\t[--save-plots]\n\t[--force-no-plots]\n\t[--do-not-restore-checkpoints]\n\t[--do-not-download]\n\t[--stock <stock-name>]\n\t\t*default: all\n\t[--start-date <dd/MM/yyyy>]\n\t[--end-date <dd/MM/yyyy>]\n\t[--test-date <dd/MM/yyyy>]\n\t[--enrich-dataset]\n\t[--clear-plots-models-and-datasets]\n\t[--analyze-metrics]\n\t[--move-models-to-backup]\n\t[--restore-backups]\n\t[--dummy]\n\t[--run-all-stocks-together]\n\t[--use-all-hyper-on-all-stocks] *warning: heavy\n\t[--only-first-hyperparam]\n\t[--do-not-test-hyperparams-with-more-fields]'
help_str+='\n\n\t\t Example for testing datasets: '
help_str+=r"""
{python} main.py --dummy --clear-plots-models-and-datasets \
echo -e "2018\n\n" >> log.txt; \
{python} main.py --train --eval --plot --plot-eval --save-plots --enrich-dataset --start-date 01/01/2018 --use-all-hyper-on-all-stocks --analyze-metrics --move-models-to-backup >> log.txt; \
echo -e "\n\n\n\n2015\n\n" >> log.txt; \
{python} main.py --train --eval --plot --plot-eval --save-plots --enrich-dataset --start-date 01/01/2015 --use-all-hyper-on-all-stocks --analyze-metrics --move-models-to-backup >> log.txt; \
echo -e "\n\n\n\nALL\n\n" >> log.txt; \
{python} main.py --train --eval --plot --plot-eval --save-plots --enrich-dataset --use-all-hyper-on-all-stocks --analyze-metrics --move-models-to-backup >> log.txt \
{python} main.py --dummy --restore-backups >> log.txt; \
echo -e "\n\n\nDONE\n" >> log.txt
""".format(python=python_exec_name) # FAST RUN: --force-train -e -p --plot-eval --enrich-dataset --start-date 01/01/2018 --stock GOOG --clear-plots-models-and-datasets --analyze-metrics --only-first-hyperparam --do-not-test-hyperparams-with-more-fields
used_args=[]
# args vars
train_model=False
force_train=False
eval_model=False
plot=False
plot_eval=False
plot_dataset=False
blocking_plots=False
save_plots=False
force_no_plots=False
restore_checkpoints=True
download_if_needed=True
start_date=None
end_date=None
enrich_dataset=False
analyze_metrics=False
move_models=False
dummy=False
run_stocks_together=False
all_hyper_for_all_stocks=False
only_first_hyperparam=False
add_more_fields_to_hyper=True
test_date=None
stocks=[]
try:
opts, _ = getopt.getopt(argv,'htep',['help','train','force-train','eval','plot','plot-eval','plot-dataset','blocking-plots','save-plots','force-no-plots','do-not-restore-checkpoints','do-not-download','stock=','start-date=','end-date=','test-date=','enrich-dataset','clear-plots-models-and-datasets','analyze-metrics','move-models-to-backup','restore-backups','dummy','run-all-stocks-together','use-all-hyper-on-all-stocks','only-first-hyperparam','do-not-test-hyperparams-with-more-fields'])
except getopt.GetoptError:
print ('ERROR PARSING ARGUMENTS, try to use the following:\n\n')
print (help_str)
sys.exit(2)
for opt, arg in opts:
opt=Utils.removeStrPrefix(Utils.removeStrPrefix(opt,'--'),'-')
used_args.append(opt)
if opt in ('h','help'):
print (help_str)
sys.exit()
elif opt in ('t','train'):
train_model=True
elif opt == 'force-train':
force_train=True
elif opt in ('e','eval'):
eval_model=True
elif opt in ('p','plot'):
plot=True
elif opt == 'use-all-hyper-on-all-stocks':
all_hyper_for_all_stocks=True
elif opt == 'run-all-stocks-together':
run_stocks_together=True
elif opt == 'plot-eval':
plot_eval=True
elif opt == 'plot-dataset':
plot_dataset=True
elif opt == 'blocking-plots':
blocking_plots=True
elif opt == 'save-plots':
save_plots=True
elif opt == 'force-no-plots':
force_no_plots=True
elif opt == 'do-not-test-hyperparams-with-more-fields':
add_more_fields_to_hyper=False
elif opt == 'do-not-restore-checkpoints':
restore_checkpoints=False
elif opt == 'do-not-download':
download_if_needed=False
elif opt == 'stock':
stocks.append(arg.strip())
elif opt == 'start-date':
start_date=arg.strip()
elif opt == 'end-date':
end_date=arg.strip()
elif opt == 'test-date':
test_date=arg.strip()
elif opt == 'enrich-dataset':
enrich_dataset=True
elif opt == 'only-first-hyperparam':
only_first_hyperparam=True
elif opt == 'clear-plots-models-and-datasets':
Utils.deleteFile('log.txt')
print('Clearing contents of: {}'.format(NeuralNetwork.MODELS_PATH))
Utils.deleteFolderContents(NeuralNetwork.MODELS_PATH)
print('Clearing contents of: {}'.format(NeuralNetwork.SAVED_PLOTS_PATH))
Utils.deleteFolderContents(NeuralNetwork.SAVED_PLOTS_PATH)
print('Clearing contents of: {}'.format(Crawler.DATASET_PATH))
Utils.deleteFolderContents(Crawler.DATASET_PATH,['shampoo_example_dataset.csv'])
elif opt == 'analyze-metrics':
analyze_metrics=True
elif opt == 'move-models-to-backup':
move_models=True
elif opt == 'restore-backups':
print('Restoring backups...',end='')
paths_to_restore=Utils.getFolderPathsThatMatchesPattern(NeuralNetwork.BACKUP_MODELS_PATH,r'[a-zA-Z0-9]*_.*\.(h5|json|bin)')
for path_to_restore in paths_to_restore:
Utils.moveFile(path_to_restore,Utils.joinPath(NeuralNetwork.MODELS_PATH, Utils.filenameFromPath(path_to_restore,get_extension=True)))
print('OK!')
elif opt == 'dummy':
dummy=True
if dummy:
sys.exit(0)
if len(stocks)==0:
for stock in all_known_stocks:
stocks.append(stock)
functional_args=('analyze-metrics','train','force-train','eval')
if 'analyze-metrics' in used_args and not any(i in used_args for i in functional_args[1:]):
print('Running only analyze metrics')
run_stocks_together=True
if len(opts) == 0 or not any(i in used_args for i in functional_args):
train_model=True
force_train=False
eval_model=True
analyze_metrics=False
move_models=False
if 'plot' not in used_args:
plot=True
if 'plot-eval' not in used_args:
plot_eval=False
if 'plot-dataset' not in used_args:
plot_dataset=False
if 'blocking-plots' not in used_args:
blocking_plots=False
if 'do-not-restore-checkpoints' not in used_args:
restore_checkpoints=True
if 'do-not-download' not in used_args:
download_if_needed=True
if 'enrich-dataset' not in used_args:
enrich_dataset=True
if 'save-plots' not in used_args:
save_plots=False
print('No functional arguments were found, using defaults:')
print('\tcmd: python3 main.py --train --eval --plot')
print('\ttrain_model:',train_model)
print('\tforce_train:',force_train)
print('\teval_model:',eval_model)
print('\tplot:',plot)
print('\tplot_eval:',plot_eval)
print('\tplot_dataset:',plot_dataset)
print('\tblocking_plots:',blocking_plots)
print('\tforce_no_plots:',force_no_plots)
print('\trestore_checkpoints:',restore_checkpoints)
print('\tdownload_if_needed:',download_if_needed)
print('\tenrich_dataset:',enrich_dataset)
print('\tsave_plots:',save_plots)
print('\tstocks:',stocks)
print('\tstart_date:',start_date)
print('\tend_date:',end_date)
print('\ttest_date:',test_date)
print('\tanalyze_metrics:',analyze_metrics)
print('\tmove_models:',move_models)
print('\trun_stocks_together:',run_stocks_together)
print('\tall_hyper_for_all_stocks:',all_hyper_for_all_stocks)
print('\tonly_first_hyperparam:',only_first_hyperparam)
print('\tadd_more_fields_to_hyper:',add_more_fields_to_hyper)
if run_stocks_together:
stocks=[stocks]
for stock in stocks:
if type(stock) is not list:
stocks_to_run=[stock]
else:
stocks_to_run=stock
run(train_model,force_train,eval_model,plot and not force_no_plots,plot_eval and not force_no_plots,plot_dataset and not force_no_plots,blocking_plots,save_plots,restore_checkpoints,download_if_needed,stocks_to_run,start_date,end_date,enrich_dataset,analyze_metrics,move_models,all_hyper_for_all_stocks,only_first_hyperparam,add_more_fields_to_hyper,test_date)
if __name__ == '__main__':
delta=-time.time()
main(sys.argv[1:])
delta+=time.time()
print('\n\nTotal run time is {}'.format(Utils.timestampByExtensive(delta)))
|
from app import db
from flask_dance.consumer.backend.sqla import OAuthConsumerMixin, SQLAlchemyBackend
from flask_login import current_user
from app import blueprint
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(30), unique=True)
email = db.Column(db.String, unique=True)
score = db.Column(db.Integer, default=0)
solved = db.Column(db.String(400))
lastSubmit = db.Column(db.DateTime)
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
def get_id(self):
return str(self.id)
def __init__(self, username, email, solved):
self.username = username
self.email = email
self.solved = solved
def __repr__(self):
return '<User %r>' % self.username
def get_chal(self):
return list(map(int, self.solved.split(',')[:-1]))
class OAuth(OAuthConsumerMixin, db.Model):
user_id = db.Column(db.Integer, db.ForeignKey(User.id))
user = db.relationship(User)
blueprint.backend = SQLAlchemyBackend(OAuth, db.session, user=current_user, user_required=False)
class Challenges(db.Model):
__tablename__ = 'challenges'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80), unique=True)
category = db.Column(db.String(80))
content = db.Column(db.Text)
flag = db.Column(db.String(40))
value = db.Column(db.String(20))
def __init__(self, name, category, content, flag, value):
self.name = name
self.category = category
self.content = content
self.flag = flag
self.value = value
def __repr__(self):
return '<Challenges %r>' % self.name
|
from typing import List
class Solution:
def merge(self, intervals: List[List[int]]) -> List[List[int]]:
result = [] # type: List[List[int]]
for start, end in sorted(intervals):
if result and start <= result[-1][1]:
result[-1][1] = max(result[-1][1], end)
else:
result.append([start, end])
return result
|
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
import numpy as np
import pytest
from gluonts.model.renewal._predictor import (
DeepRenewalProcessSampleOutputTransform,
)
@pytest.mark.parametrize(
"input, expected",
[
(
[[[[3, 1, 2, 3, 1, 1, 1], [3, 5, 4, 1, 1, 1, 1]]]],
[[[0, 0, 3, 5, 0, 4, 0]]],
),
(
[[[[7, 1, 2, 3, 1, 1, 1], [3, 5, 4, 1, 1, 1, 1]]]],
[[[0, 0, 0, 0, 0, 0, 3]]],
),
(
[[[[1, 9, 2, 3, 1, 1, 1], [14, 5, 4, 1, 1, 1, 1]]]],
[[[14, 0, 0, 0, 0, 0, 0]]],
),
(
[[[[8, 1, 2, 3, 1, 1, 1], [3, 5, 4, 1, 1, 1, 1]]]],
[[[0, 0, 0, 0, 0, 0, 0]]],
),
(
[
[
[[3, 1, 2, 3, 1, 1, 1], [3, 5, 4, 1, 1, 1, 1]],
[[3, 1, 2, 3, 1, 1, 1], [3, 5, 4, 1, 1, 1, 1]],
]
],
[[[0, 0, 3, 5, 0, 4, 0], [0, 0, 3, 5, 0, 4, 0]]],
),
(
[
[[[3, 1, 2, 3, 1, 1, 1], [3, 5, 4, 1, 1, 1, 1]]],
[[[3, 2, 1, 1, 1, 1, 1], [6, 7, 8, 9, 1, 1, 1]]],
],
[[[0, 0, 3, 5, 0, 4, 0]], [[0, 0, 6, 0, 7, 8, 9]]],
),
],
)
def test_output_transform(input, expected):
expected = np.array(expected)
tf = DeepRenewalProcessSampleOutputTransform()
out = tf({}, np.array(input))
assert np.allclose(out, expected)
assert out.shape == expected.shape
|
from aiogram.dispatcher.filters import BoundFilter
from aiogram import types
class IsPrivate(BoundFilter):
async def check(self, message: types.Message):
return types.ChatType.PRIVATE == message.chat.type
|
from .connect import ConnectAPI
from .connect import AsyncConsumer
from .metric import Metric
from .resource import Resource
from .webhooks import Webhook
|
scan_time = 0.6
prefix = 'NIH:TEMP'
|
"""Base class for August entity."""
from homeassistant.core import callback
from homeassistant.helpers.event import async_track_time_interval
class AugustSubscriberMixin:
"""Base implementation for a subscriber."""
def __init__(self, hass, update_interval):
"""Initialize an subscriber."""
super().__init__()
self._hass = hass
self._update_interval = update_interval
self._subscriptions = {}
self._unsub_interval = None
@callback
def async_subscribe_device_id(self, device_id, update_callback):
"""Add an callback subscriber."""
if not self._subscriptions:
self._unsub_interval = async_track_time_interval(
self._hass, self._async_refresh, self._update_interval
)
self._subscriptions.setdefault(device_id, []).append(update_callback)
@callback
def async_unsubscribe_device_id(self, device_id, update_callback):
"""Remove a callback subscriber."""
self._subscriptions[device_id].remove(update_callback)
if not self._subscriptions[device_id]:
del self._subscriptions[device_id]
if not self._subscriptions:
self._unsub_interval()
self._unsub_interval = None
@callback
def async_signal_device_id_update(self, device_id):
"""Call the callbacks for a device_id."""
if not self._subscriptions.get(device_id):
return
for update_callback in self._subscriptions[device_id]:
update_callback()
|
#%%
import numpy as np
from kdg.utils import generate_gaussian_parity, generate_ellipse, generate_spirals, generate_sinewave, generate_polynomial
from kdg.utils import plot_2dsim
from kdg import kdf
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
from scipy.io import savemat, loadmat
# %%
n_samples = 1e4
X, y = {}, {}
#%%
X['gxor'], y['gxor'] = generate_gaussian_parity(n_samples)
X['spiral'], y['spiral'] = generate_spirals(n_samples)
X['circle'], y['circle'] = generate_ellipse(n_samples)
X['sine'], y['sine'] = generate_sinewave(n_samples)
X['poly'], y['poly'] = generate_polynomial(n_samples, a=[1,3])
#%%
sns.set_context('talk')
fig, ax = plt.subplots(6,5, figsize=(40,48), sharex=True)
title_size = 45
ticksize = 30
plot_2dsim(X['gxor'], y['gxor'], ax=ax[0][0])
ax[0][0].set_ylabel('Simulation Data', fontsize=title_size-5)
ax[0][0].set_xlim([-2,2])
ax[0][0].set_ylim([-2,2])
ax[0][0].set_xticks([])
ax[0][0].set_yticks([-2,-1,0,1,2])
ax[0][0].tick_params(labelsize=ticksize)
ax[0][0].set_title('Gaussian XOR', fontsize=title_size)
plot_2dsim(X['spiral'], y['spiral'], ax=ax[0][1])
ax[0][1].set_xlim([-2,2])
ax[0][1].set_ylim([-2,2])
ax[0][1].set_xticks([])
ax[0][1].set_yticks([])
ax[0][1].tick_params(labelsize=ticksize)
ax[0][1].set_title('Spiral', fontsize=title_size)
plot_2dsim(X['circle'], y['circle'], ax=ax[0][2])
ax[0][2].set_xlim([-2,2])
ax[0][2].set_ylim([-2,2])
ax[0][2].set_xticks([])
ax[0][2].set_yticks([])
ax[0][2].tick_params(labelsize=ticksize)
ax[0][2].set_title('Circle', fontsize=title_size)
plot_2dsim(X['sine'], y['sine'], ax=ax[0][3])
ax[0][3].set_xlim([-2,2])
ax[0][3].set_ylim([-2,2])
ax[0][3].set_xticks([])
ax[0][3].set_yticks([])
ax[0][3].tick_params(labelsize=ticksize)
ax[0][3].set_title('Sinewave', fontsize=title_size)
plot_2dsim(X['poly'], y['poly'], ax=ax[0][4])
ax[0][4].set_xlim([-2,2])
ax[0][4].set_ylim([-2,2])
ax[0][4].set_xticks([])
ax[0][4].set_yticks([])
ax[0][4].tick_params(labelsize=ticksize)
ax[0][4].set_title('Polynomial', fontsize=title_size)
################################################
#define grids
p = np.arange(-2, 2, step=0.01)
q = np.arange(-2, 2, step=0.01)
xx, yy = np.meshgrid(p, q)
# get true posterior
tp_df = pd.read_csv("true_posterior/Gaussian_xor_pdf.csv")
proba_true = 0.5*np.ones((400, 400))
tmp = np.array([tp_df["posterior"][x] for x in range(40000)])
tmp = tmp.reshape(200, 200)
proba_true[100:300, 100:300] = tmp
ax0 = ax[1][0].imshow(
proba_true,
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
#ax[1][0].set_title("True Class Posteriors", fontsize=24)
ax[1][0].set_aspect("equal")
ax[1][0].tick_params(labelsize=ticksize)
ax[1][0].set_yticks([-2,-1,0,1,2])
ax[1][0].set_xticks([])
ax[1][0].set_ylabel('True Posteriors',fontsize=title_size-5)
tp_df = pd.read_csv("true_posterior/spiral_pdf.csv")
proba_true = 0.5*np.ones((400, 400))
tmp = np.array([tp_df["posterior"][x] for x in range(40000)])
tmp = tmp.reshape(200, 200)
proba_true[100:300, 100:300] = 1 - tmp
ax0 = ax[1][1].imshow(
np.flip(proba_true, axis=0),
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
#ax[1][1].set_title("True Class Posteriors", fontsize=24)
ax[1][1].set_aspect("equal")
ax[1][1].tick_params(labelsize=ticksize)
ax[1][1].set_yticks([])
ax[1][1].set_xticks([])
tp_df = pd.read_csv("true_posterior/ellipse_pdf.csv")
proba_true = 0.5*np.ones((400, 400))
tmp = np.array([tp_df["posterior"][x] for x in range(40000)])
tmp = tmp.reshape(200, 200)
proba_true[100:300, 100:300] = tmp
ax0 = ax[1][2].imshow(
proba_true,
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
#ax[1][2].set_title("True Class Posteriors", fontsize=24)
ax[1][2].set_aspect("equal")
ax[1][2].tick_params(labelsize=ticksize)
ax[1][2].set_yticks([])
ax[1][2].set_xticks([])
tp_df = pd.read_csv("true_posterior/sinewave_pdf.csv")
proba_true = 0.5*np.ones((400, 400))
tmp = np.array([tp_df["posterior"][x] for x in range(40000)])
tmp = np.flip(tmp.reshape(200, 200),axis=0)
proba_true[100:300, 100:300] = tmp
ax0 = ax[1][3].imshow(
proba_true,
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
#ax[1][3].set_title("True Class Posteriors", fontsize=24)
ax[1][3].set_aspect("equal")
ax[1][3].tick_params(labelsize=ticksize)
ax[1][3].set_yticks([])
ax[1][3].set_xticks([])
tp_df = pd.read_csv("true_posterior/polynomial_pdf.csv")
proba_true = 0.5*np.ones((400, 400))
tmp = np.array([tp_df["posterior"][x] for x in range(40000)])
tmp = np.flip(tmp.reshape(200, 200),axis=0)
proba_true[100:300, 100:300] = tmp
ax0 = ax[1][4].imshow(
proba_true,
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
#ax[1][4].set_title("True Class Posteriors", fontsize=24)
ax[1][4].set_aspect("equal")
ax[1][4].tick_params(labelsize=ticksize)
ax[1][4].set_yticks([])
ax[1][4].set_xticks([])
#########################################################
df = loadmat('kdf_experiments/results/gxor_plot_data.mat')
ax1 = ax[2][0].imshow(
df['posterior_rf'],
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[2][0].set_ylabel("RF Posteriors", fontsize=title_size-5)
ax[2][0].set_aspect("equal")
ax[2][0].tick_params(labelsize=ticksize)
ax[2][0].set_yticks([-2,-1,0,1,2])
ax[2][0].set_xticks([])
ax1 = ax[3][0].imshow(
df['posterior_kdf'],
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[3][0].set_ylabel('KDF Posteriors', fontsize=title_size-5)
ax[3][0].set_aspect("equal")
ax[3][0].tick_params(labelsize=ticksize)
ax[3][0].set_yticks([-2,-1,0,1,2])
ax[3][0].set_xticks([])
############################################
df = loadmat('kdf_experiments/results/spiral_plot_data.mat')
ax1 = ax[2][1].imshow(
1-np.flip(df['posterior_rf'],axis=0),
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[2][1].set_aspect("equal")
ax[2][1].tick_params(labelsize=ticksize)
ax[2][1].set_yticks([])
ax[2][1].set_xticks([])
ax1 = ax[3][1].imshow(
1-np.flip(df['posterior_kdf'],axis=0),
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[3][1].set_aspect("equal")
ax[3][1].tick_params(labelsize=ticksize)
ax[3][1].set_yticks([])
ax[3][1].set_xticks([])
#############################################
df = loadmat('kdf_experiments/results/circle_plot_data.mat')
ax1 = ax[2][2].imshow(
df['posterior_rf'],
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[2][2].set_aspect("equal")
ax[2][2].tick_params(labelsize=ticksize)
ax[2][2].set_yticks([])
ax[2][2].set_xticks([])
ax1 = ax[3][2].imshow(
df['posterior_kdf'],
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[3][2].set_aspect("equal")
ax[3][2].tick_params(labelsize=ticksize)
ax[3][2].set_yticks([])
ax[3][2].set_xticks([])
##################################################
df = loadmat('kdf_experiments/results/sinewave_plot_data.mat')
ax1 = ax[2][3].imshow(
np.flip(df['posterior_rf'],axis=0),
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[2][3].set_aspect("equal")
ax[2][3].tick_params(labelsize=ticksize)
ax[2][3].set_yticks([])
ax[2][3].set_xticks([])
ax1 = ax[3][3].imshow(
np.flip(df['posterior_kdf'], axis=0),
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[3][3].set_aspect("equal")
ax[3][3].tick_params(labelsize=ticksize)
ax[3][3].set_yticks([])
ax[3][3].set_xticks([])
###################################################
df = loadmat('kdf_experiments/results/polynomial_plot_data.mat')
ax1 = ax[2][4].imshow(
np.flip(df['posterior_rf'],axis=0),
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[2][4].set_aspect("equal")
ax[2][4].tick_params(labelsize=ticksize)
ax[2][4].set_yticks([])
ax[2][4].set_xticks([])
ax1 = ax[3][4].imshow(
np.flip(df['posterior_kdf'],axis=0),
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[3][4].set_aspect("equal")
ax[3][4].tick_params(labelsize=ticksize)
ax[3][4].set_yticks([])
ax[3][4].set_xticks([])
##############################################
##############################################
df = loadmat('kdn_experiments/results/gxor_plot_data.mat')
proba_nn = 1-np.flip(df["nn_proba"][:, 0].reshape(400, 400), axis=1)
proba_kdn = 1-np.flip(df["kdn_proba"][:, 0].reshape(400, 400), axis=1)
ax1 = ax[4][0].imshow(
proba_nn,
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[4][0].set_aspect("equal")
ax[4][0].tick_params(labelsize=ticksize)
ax[4][0].set_ylabel('NN Posteriors',fontsize=title_size-5)
ax[4][0].set_yticks([-2,-1,0,1,2])
ax[4][0].set_xticks([])
ax1 = ax[5][0].imshow(
proba_kdn,
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[5][0].set_aspect("equal")
ax[5][0].set_ylabel('KDN Posteriors',fontsize=title_size-5)
ax[5][0].tick_params(labelsize=ticksize)
ax[5][0].set_yticks([-2,-1,0,1,2])
ax[5][0].set_xticks([-2,-1,0,1,2])
########################################
df = loadmat('kdn_experiments/results/spiral_plot_data.mat')
proba_nn = np.flip(df["nn_proba"][:, 0].reshape(400, 400), axis=1)
proba_kdn = np.flip(df["kdn_proba"][:, 0].reshape(400, 400), axis=1)
ax1 = ax[4][1].imshow(
proba_nn,
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[4][1].set_aspect("equal")
ax[4][1].tick_params(labelsize=ticksize)
ax[4][1].set_yticks([])
ax[4][1].set_xticks([])
ax1 = ax[5][1].imshow(
proba_kdn,
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[5][1].set_aspect("equal")
ax[5][1].tick_params(labelsize=ticksize)
ax[5][1].set_yticks([])
ax[5][1].set_xticks([-2,-1,0,1,2])
########################################################
df = loadmat('kdn_experiments/results/circle_plot_data.mat')
proba_nn = np.flip(df["nn_proba"][:, 0].reshape(400, 400), axis=1)
proba_kdn = np.flip(df["kdn_proba"][:, 0].reshape(400, 400), axis=1)
ax1 = ax[4][2].imshow(
proba_nn,
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[4][2].set_aspect("equal")
ax[4][2].tick_params(labelsize=ticksize)
ax[4][2].set_yticks([])
ax[4][2].set_xticks([])
ax1 = ax[5][2].imshow(
proba_kdn,
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[5][2].set_aspect("equal")
ax[5][2].tick_params(labelsize=ticksize)
ax[5][2].set_yticks([])
ax[5][2].set_xticks([-2,-1,0,1,2])
####################################################
df = loadmat('kdn_experiments/results/sinewave_plot_data.mat')
proba_nn = np.flip(df["nn_proba"][:, 0].reshape(400, 400), axis=0)
proba_kdn = np.flip(df["kdn_proba"][:, 0].reshape(400, 400), axis=0)
ax1 = ax[4][3].imshow(
proba_nn,
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[4][3].set_aspect("equal")
ax[4][3].tick_params(labelsize=ticksize)
ax[4][3].set_yticks([])
ax[4][3].set_xticks([])
ax1 = ax[5][3].imshow(
proba_kdn,
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
ax[5][3].set_aspect("equal")
ax[5][3].tick_params(labelsize=ticksize)
ax[5][3].set_yticks([])
ax[5][3].set_xticks([-2,-1,0,1,2])
#######################################################
df = loadmat('kdn_experiments/results/polynomial_plot_data.mat')
proba_nn = 1-np.flip(df["nn_proba"][:, 0].reshape(400, 400), axis=1)
proba_kdn = np.flip(df["kdn_proba"][:, 0].reshape(400, 400), axis=1)
ax1 = ax[4][4].imshow(
proba_nn,
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
#fig.colorbar(ax1, ax=ax[4][4], anchor=(0, 0.3), shrink=0.85)
ax[4][4].set_aspect("equal")
ax[4][4].tick_params(labelsize=ticksize)
ax[4][4].set_yticks([])
ax[4][4].set_xticks([])
ax1 = ax[5][4].imshow(
proba_kdn,
extent=[xx.min(), xx.max(), yy.min(), yy.max()],
cmap="bwr",
vmin=0,
vmax=1,
interpolation="nearest",
aspect="auto",
)
#fig.colorbar(ax1, anchor=(0, 0.3), shrink=0.85)
ax[5][4].set_aspect("equal")
ax[5][4].tick_params(labelsize=ticksize)
ax[5][4].set_yticks([])
ax[5][4].set_xticks([-2,-1,0,1,2])
#plt.savefig('plots/simulations.pdf')
# %%
def calc_stat(a, reps=45):
a_med = []
a_25 = []
a_75 = []
a = a.reshape(-1,reps)
return np.median(a,axis=1), np.quantile(a,[.25], axis=1)[0], np.quantile(a,[.75], axis=1)[0]
# %%
sns.set_context('talk')
sample_size = [50, 100, 500, 1000, 5000, 10000]
fig, ax = plt.subplots(5,4, figsize=(45,40))
title_size = 45
ticksize = 30
for ax_ in ax:
for ax__ in ax_:
ax__.tick_params(labelsize=ticksize)
df = loadmat('kdn_experiments/results/graphs/gxor.mat')
med, a_25, a_75 = calc_stat(1-df['kdn_acc'])
med_nn, nn_25, nn_75 = calc_stat(1-df['nn_acc'])
ax[0][0].plot(sample_size, med[1:], c="b", label='KDN')
ax[0][0].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[0][0].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[0][0].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[0][0].set_xscale('log')
#ax[0][0].set_xlabel('Sample size')
ax[0][0].set_xticks([])
ax[0][0].set_ylabel('Generalization Error', fontsize=ticksize)
right_side = ax[0][0].spines["right"]
right_side.set_visible(False)
top_side = ax[0][0].spines["top"]
top_side.set_visible(False)
df_ = loadmat('kdf_experiments/results/gxor_plot_data.mat')
ax[0][0].plot(sample_size, df_['error_kdf_med'].ravel(), c="r", label='KDF')
ax[0][0].plot(sample_size, df_['error_rf_med'].ravel(), c="k", label='RF')
ax[0][0].fill_between(sample_size, df_["error_kdf_25"].ravel(), df_["error_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[0][0].fill_between(sample_size, df_["error_rf_25"].ravel(), df_["error_rf_75"].ravel(), facecolor='k', alpha=.3)
ax[0][0].legend(fontsize=ticksize, frameon=False)
##################################################################################################
med, a_25, a_75 = calc_stat(df['kdn_hd'])
med_nn, nn_25, nn_75 = calc_stat(df['nn_hd'])
ax[0][1].plot(sample_size, med[1:], c="b", label='KDN')
ax[0][1].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[0][1].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[0][1].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[0][1].set_xscale('log')
ax[0][1].set_xticks([])
ax[0][1].set_ylabel('Hellinger Distance', fontsize=ticksize)
right_side = ax[0][1].spines["right"]
right_side.set_visible(False)
top_side = ax[0][1].spines["top"]
top_side.set_visible(False)
ax[0][1].plot(sample_size, df_['hellinger_kdf_med'].ravel(), c="r", label='KDF')
ax[0][1].plot(sample_size, df_['hellinger_rf_med'].ravel(), c="k", label='RF')
ax[0][1].fill_between(sample_size, df_["hellinger_kdf_25"].ravel(), df_["hellinger_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[0][1].fill_between(sample_size, df_["hellinger_rf_25"].ravel(), df_["hellinger_rf_75"].ravel(), facecolor='k', alpha=.3)
ax[0][1].set_title('Gaussian XOR', fontsize=title_size)
##################################################################################################
med, a_25, a_75 = calc_stat(df['kdn_mmcIn'])
med_nn, nn_25, nn_75 = calc_stat(df['nn_mmcIn'])
ax[0][2].plot(sample_size, med[1:], c="b", label='KDN')
ax[0][2].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[0][2].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[0][2].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[0][2].set_xscale('log')
ax[0][2].set_xticks([])
ax[0][2].set_ylabel('Mean Max Confidence\n (In Distribution)', fontsize=ticksize)
right_side = ax[0][2].spines["right"]
right_side.set_visible(False)
top_side = ax[0][2].spines["top"]
top_side.set_visible(False)
ax[0][2].plot(sample_size, df_['mmcIn_kdf_med'].ravel(), c="r", label='KDF')
ax[0][2].plot(sample_size, df_['mmcIn_rf_med'].ravel(), c="k", label='RF')
ax[0][2].fill_between(sample_size, df_["mmcIn_kdf_25"].ravel(), df_["mmcIn_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[0][2].fill_between(sample_size, df_["mmcIn_rf_25"].ravel(), df_["mmcIn_rf_75"].ravel(), facecolor='k', alpha=.3)
##################################################################################################
med, a_25, a_75 = calc_stat(df['kdn_mmcOut'])
med_nn, nn_25, nn_75 = calc_stat(df['nn_mmcOut'])
ax[0][3].plot(sample_size, med[1:], c="b", label='KDN')
ax[0][3].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[0][3].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[0][3].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[0][3].set_xscale('log')
ax[0][3].set_xticks([])
ax[0][3].set_ylabel('Mean Max Confidence\n (Out Distribution)', fontsize=ticksize)
right_side = ax[0][3].spines["right"]
right_side.set_visible(False)
top_side = ax[0][3].spines["top"]
top_side.set_visible(False)
ax[0][3].plot(sample_size, df_['mmcOut_kdf_med'].ravel(), c="r", label='KDF')
ax[0][3].plot(sample_size, df_['mmcOut_rf_med'].ravel(), c="k", label='RF')
ax[0][3].fill_between(sample_size, df_["mmcOut_kdf_25"].ravel(), df_["mmcOut_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[0][3].fill_between(sample_size, df_["mmcOut_rf_25"].ravel(), df_["mmcOut_rf_75"].ravel(), facecolor='k', alpha=.3)
#########################################################
#########################################################
df = loadmat('kdn_experiments/results/graphs/spiral.mat')
med, a_25, a_75 = calc_stat(1-df['kdn_acc'])
med_nn, nn_25, nn_75 = calc_stat(1-df['nn_acc'])
ax[1][0].plot(sample_size, med[1:], c="b", label='KDN')
ax[1][0].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[1][0].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[1][0].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[1][0].set_xscale('log')
ax[1][0].set_xticks([])
ax[1][0].set_ylabel('Generalization Error', fontsize=ticksize)
right_side = ax[1][0].spines["right"]
right_side.set_visible(False)
top_side = ax[1][0].spines["top"]
top_side.set_visible(False)
df_ = loadmat('kdf_experiments/results/spiral_plot_data.mat')
ax[1][0].plot(sample_size, df_['error_kdf_med'].ravel(), c="r", label='KDF')
ax[1][0].plot(sample_size, df_['error_rf_med'].ravel(), c="k", label='RF')
ax[1][0].fill_between(sample_size, df_["error_kdf_25"].ravel(), df_["error_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[1][0].fill_between(sample_size, df_["error_rf_25"].ravel(), df_["error_rf_75"].ravel(), facecolor='k', alpha=.3)
##################################################################################################
med, a_25, a_75 = calc_stat(df['kdn_hd'])
med_nn, nn_25, nn_75 = calc_stat(df['nn_hd'])
ax[1][1].plot(sample_size, med[1:], c="b", label='KDN')
ax[1][1].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[1][1].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[1][1].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[1][1].set_xscale('log')
ax[1][1].set_xticks([])
ax[1][1].set_ylabel('Hellinger Distance', fontsize=ticksize)
right_side = ax[1][1].spines["right"]
right_side.set_visible(False)
top_side = ax[1][1].spines["top"]
top_side.set_visible(False)
ax[1][1].plot(sample_size, df_['hellinger_kdf_med'].ravel(), c="r", label='KDF')
ax[1][1].plot(sample_size, df_['hellinger_rf_med'].ravel(), c="k", label='RF')
ax[1][1].fill_between(sample_size, df_["hellinger_kdf_25"].ravel(), df_["hellinger_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[1][1].fill_between(sample_size, df_["hellinger_rf_25"].ravel(), df_["hellinger_rf_75"].ravel(), facecolor='k', alpha=.3)
ax[1][1].set_title('Spiral', fontsize=title_size)
##################################################################################################
med, a_25, a_75 = calc_stat(df['kdn_mmcIn'])
med_nn, nn_25, nn_75 = calc_stat(df['nn_mmcIn'])
ax[1][2].plot(sample_size, med[1:], c="b", label='KDN')
ax[1][2].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[1][2].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[1][2].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[1][2].set_xscale('log')
ax[1][2].set_xticks([])
ax[1][2].set_ylabel('Mean Max Confidence\n (In Distribution)', fontsize=ticksize)
right_side = ax[1][2].spines["right"]
right_side.set_visible(False)
top_side = ax[1][2].spines["top"]
top_side.set_visible(False)
ax[1][2].plot(sample_size, df_['mmcIn_kdf_med'].ravel(), c="r", label='KDF')
ax[1][2].plot(sample_size, df_['mmcIn_rf_med'].ravel(), c="k", label='RF')
ax[1][2].fill_between(sample_size, df_["mmcIn_kdf_25"].ravel(), df_["mmcIn_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[1][2].fill_between(sample_size, df_["mmcIn_rf_25"].ravel(), df_["mmcIn_rf_75"].ravel(), facecolor='k', alpha=.3)
##################################################################################################
med, a_25, a_75 = calc_stat(df['kdn_mmcOut'])
med_nn, nn_25, nn_75 = calc_stat(df['nn_mmcOut'])
ax[1][3].plot(sample_size, med[1:], c="b", label='KDN')
ax[1][3].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[1][3].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[1][3].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[1][3].set_xscale('log')
ax[1][3].set_xticks([])
ax[1][3].set_ylabel('Mean Max Confidence\n (Out Distribution)', fontsize=ticksize)
right_side = ax[1][3].spines["right"]
right_side.set_visible(False)
top_side = ax[1][3].spines["top"]
top_side.set_visible(False)
ax[1][3].plot(sample_size, df_['mmcOut_kdf_med'].ravel(), c="r", label='KDF')
ax[1][3].plot(sample_size, df_['mmcOut_rf_med'].ravel(), c="k", label='RF')
ax[1][3].fill_between(sample_size, df_["mmcOut_kdf_25"].ravel(), df_["mmcOut_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[1][3].fill_between(sample_size, df_["mmcOut_rf_25"].ravel(), df_["mmcOut_rf_75"].ravel(), facecolor='k', alpha=.3)
#########################################################
#########################################################
df = loadmat('kdn_experiments/results/graphs/circle.mat')
med, a_25, a_75 = calc_stat(1-df['kdn_acc'])
med_nn, nn_25, nn_75 = calc_stat(1-df['nn_acc'])
ax[2][0].plot(sample_size, med[1:], c="b", label='KDN')
ax[2][0].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[2][0].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[2][0].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[2][0].set_xscale('log')
ax[2][0].set_xticks([])
ax[2][0].set_ylabel('Generalization Error', fontsize=ticksize)
right_side = ax[2][0].spines["right"]
right_side.set_visible(False)
top_side = ax[2][0].spines["top"]
top_side.set_visible(False)
df_ = loadmat('kdf_experiments/results/circle_plot_data.mat')
ax[2][0].plot(sample_size, df_['error_kdf_med'].ravel(), c="r", label='KDF')
ax[2][0].plot(sample_size, df_['error_rf_med'].ravel(), c="k", label='RF')
ax[2][0].fill_between(sample_size, df_["error_kdf_25"].ravel(), df_["error_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[2][0].fill_between(sample_size, df_["error_rf_25"].ravel(), df_["error_rf_75"].ravel(), facecolor='k', alpha=.3)
##################################################################################################
med, a_25, a_75 = calc_stat(df['kdn_hd'])
med_nn, nn_25, nn_75 = calc_stat(df['nn_hd'])
ax[2][1].plot(sample_size, med[1:], c="b", label='KDN')
ax[2][1].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[2][1].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[2][1].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[2][1].set_xscale('log')
ax[2][1].set_xticks([])
ax[2][1].set_ylabel('Hellinger Distance', fontsize=ticksize)
right_side = ax[2][1].spines["right"]
right_side.set_visible(False)
top_side = ax[2][1].spines["top"]
top_side.set_visible(False)
ax[2][1].plot(sample_size, df_['hellinger_kdf_med'].ravel(), c="r", label='KDF')
ax[2][1].plot(sample_size, df_['hellinger_rf_med'].ravel(), c="k", label='RF')
ax[2][1].fill_between(sample_size, df_["hellinger_kdf_25"].ravel(), df_["hellinger_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[2][1].fill_between(sample_size, df_["hellinger_rf_25"].ravel(), df_["hellinger_rf_75"].ravel(), facecolor='k', alpha=.3)
ax[2][1].set_title('Circle', fontsize=title_size)
##################################################################################################
med, a_25, a_75 = calc_stat(df['kdn_mmcIn'])
med_nn, nn_25, nn_75 = calc_stat(df['nn_mmcIn'])
ax[2][2].plot(sample_size, med[1:], c="b", label='KDN')
ax[2][2].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[2][2].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[2][2].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[2][2].set_xscale('log')
ax[2][2].set_xticks([])
ax[2][2].set_ylabel('Mean Max Confidence\n (In Distribution)', fontsize=ticksize)
right_side = ax[2][2].spines["right"]
right_side.set_visible(False)
top_side = ax[2][2].spines["top"]
top_side.set_visible(False)
ax[2][2].plot(sample_size, df_['mmcIn_kdf_med'].ravel(), c="r", label='KDF')
ax[2][2].plot(sample_size, df_['mmcIn_rf_med'].ravel(), c="k", label='RF')
ax[2][2].fill_between(sample_size, df_["mmcIn_kdf_25"].ravel(), df_["mmcIn_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[2][2].fill_between(sample_size, df_["mmcIn_rf_25"].ravel(), df_["mmcIn_rf_75"].ravel(), facecolor='k', alpha=.3)
##################################################################################################
med, a_25, a_75 = calc_stat(df['kdn_mmcOut'])
med_nn, nn_25, nn_75 = calc_stat(df['nn_mmcOut'])
ax[2][3].plot(sample_size, med[1:], c="b", label='KDN')
ax[2][3].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[2][3].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[2][3].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[2][3].set_xscale('log')
ax[2][3].set_xticks([])
ax[2][3].set_ylabel('Mean Max Confidence\n (Out Distribution)', fontsize=ticksize)
right_side = ax[2][3].spines["right"]
right_side.set_visible(False)
top_side = ax[2][3].spines["top"]
top_side.set_visible(False)
ax[2][3].plot(sample_size, df_['mmcOut_kdf_med'].ravel(), c="r", label='KDF')
ax[2][3].plot(sample_size, df_['mmcOut_rf_med'].ravel(), c="k", label='RF')
ax[2][3].fill_between(sample_size, df_["mmcOut_kdf_25"].ravel(), df_["mmcOut_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[2][3].fill_between(sample_size, df_["mmcOut_rf_25"].ravel(), df_["mmcOut_rf_75"].ravel(), facecolor='k', alpha=.3)
#########################################################
#########################################################
df = loadmat('kdn_experiments/results/graphs/sinewave.mat')
med, a_25, a_75 = calc_stat(1-df['kdn_acc'])
med_nn, nn_25, nn_75 = calc_stat(1-df['nn_acc'])
ax[3][0].plot(sample_size, med[1:], c="b", label='KDN')
ax[3][0].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[3][0].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[3][0].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[3][0].set_xscale('log')
ax[3][0].set_xticks([])
ax[3][0].set_ylabel('Generalization Error', fontsize=ticksize)
right_side = ax[3][0].spines["right"]
right_side.set_visible(False)
top_side = ax[3][0].spines["top"]
top_side.set_visible(False)
df_ = loadmat('kdf_experiments/results/sinewave_plot_data.mat')
ax[3][0].plot(sample_size, df_['error_kdf_med'].ravel(), c="r", label='KDF')
ax[3][0].plot(sample_size, df_['error_rf_med'].ravel(), c="k", label='RF')
ax[3][0].fill_between(sample_size, df_["error_kdf_25"].ravel(), df_["error_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[3][0].fill_between(sample_size, df_["error_rf_25"].ravel(), df_["error_rf_75"].ravel(), facecolor='k', alpha=.3)
##################################################################################################
med, a_25, a_75 = calc_stat(df['kdn_hd'])
med_nn, nn_25, nn_75 = calc_stat(df['nn_hd'])
ax[3][1].plot(sample_size, med[1:], c="b", label='KDN')
ax[3][1].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[3][1].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[3][1].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[3][1].set_xscale('log')
ax[3][1].set_xticks([])
ax[3][1].set_ylabel('Hellinger Distance', fontsize=ticksize)
right_side = ax[3][1].spines["right"]
right_side.set_visible(False)
top_side = ax[3][1].spines["top"]
top_side.set_visible(False)
ax[3][1].plot(sample_size, df_['hellinger_kdf_med'].ravel(), c="r", label='KDF')
ax[3][1].plot(sample_size, df_['hellinger_rf_med'].ravel(), c="k", label='RF')
ax[3][1].fill_between(sample_size, df_["hellinger_kdf_25"].ravel(), df_["hellinger_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[3][1].fill_between(sample_size, df_["hellinger_rf_25"].ravel(), df_["hellinger_rf_75"].ravel(), facecolor='k', alpha=.3)
ax[3][1].set_title('Sinewave', fontsize=title_size)
##################################################################################################
med, a_25, a_75 = calc_stat(df['kdn_mmcIn'])
med_nn, nn_25, nn_75 = calc_stat(df['nn_mmcIn'])
ax[3][2].plot(sample_size, med[1:], c="b", label='KDN')
ax[3][2].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[3][2].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[3][2].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[3][2].set_xscale('log')
ax[3][2].set_xticks([])
ax[3][2].set_ylabel('Mean Max Confidence\n (In Distribution)', fontsize=ticksize)
right_side = ax[3][2].spines["right"]
right_side.set_visible(False)
top_side = ax[3][2].spines["top"]
top_side.set_visible(False)
ax[3][2].plot(sample_size, df_['mmcIn_kdf_med'].ravel(), c="r", label='KDF')
ax[3][2].plot(sample_size, df_['mmcIn_rf_med'].ravel(), c="k", label='RF')
ax[3][2].fill_between(sample_size, df_["mmcIn_kdf_25"].ravel(), df_["mmcIn_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[3][2].fill_between(sample_size, df_["mmcIn_rf_25"].ravel(), df_["mmcIn_rf_75"].ravel(), facecolor='k', alpha=.3)
##################################################################################################
med, a_25, a_75 = calc_stat(df['kdn_mmcOut'])
med_nn, nn_25, nn_75 = calc_stat(df['nn_mmcOut'])
ax[3][3].plot(sample_size, med[1:], c="b", label='KDN')
ax[3][3].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[3][3].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[3][3].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[3][3].set_xscale('log')
ax[3][3].set_xticks([])
ax[3][3].set_ylabel('Mean Max Confidence\n (Out Distribution)', fontsize=ticksize)
right_side = ax[3][3].spines["right"]
right_side.set_visible(False)
top_side = ax[3][3].spines["top"]
top_side.set_visible(False)
ax[3][3].plot(sample_size, df_['mmcOut_kdf_med'].ravel(), c="r", label='KDF')
ax[3][3].plot(sample_size, df_['mmcOut_rf_med'].ravel(), c="k", label='RF')
ax[3][3].fill_between(sample_size, df_["mmcOut_kdf_25"].ravel(), df_["mmcOut_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[3][3].fill_between(sample_size, df_["mmcOut_rf_25"].ravel(), df_["mmcOut_rf_75"].ravel(), facecolor='k', alpha=.3)
#########################################################
#########################################################
df = loadmat('kdn_experiments/results/graphs/polynomial.mat')
med, a_25, a_75 = calc_stat(1-df['kdn_acc'])
med_nn, nn_25, nn_75 = calc_stat(1-df['nn_acc'])
ax[4][0].plot(sample_size, med[1:], c="b", label='KDN')
ax[4][0].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[4][0].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[4][0].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[4][0].set_xscale('log')
ax[4][0].set_xlabel('Sample size', fontsize=ticksize)
ax[4][0].set_ylabel('Generalization Error', fontsize=ticksize)
right_side = ax[4][0].spines["right"]
right_side.set_visible(False)
top_side = ax[4][0].spines["top"]
top_side.set_visible(False)
df_ = loadmat('kdf_experiments/results/polynomial_plot_data.mat')
ax[4][0].plot(sample_size, df_['error_kdf_med'].ravel(), c="r", label='KDF')
ax[4][0].plot(sample_size, df_['error_rf_med'].ravel(), c="k", label='RF')
ax[4][0].fill_between(sample_size, df_["error_kdf_25"].ravel(), df_["error_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[4][0].fill_between(sample_size, df_["error_rf_25"].ravel(), df_["error_rf_75"].ravel(), facecolor='k', alpha=.3)
##################################################################################################
med, a_25, a_75 = calc_stat(df['kdn_hd'])
med_nn, nn_25, nn_75 = calc_stat(df['nn_hd'])
ax[4][1].plot(sample_size, med[1:], c="b", label='KDN')
ax[4][1].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[4][1].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[4][1].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[4][1].set_xscale('log')
ax[4][1].set_xlabel('Sample size', fontsize=ticksize)
ax[4][1].set_ylabel('Hellinger Distance', fontsize=ticksize)
right_side = ax[4][1].spines["right"]
right_side.set_visible(False)
top_side = ax[4][1].spines["top"]
top_side.set_visible(False)
ax[4][1].plot(sample_size, df_['hellinger_kdf_med'].ravel(), c="r", label='KDF')
ax[4][1].plot(sample_size, df_['hellinger_rf_med'].ravel(), c="k", label='RF')
ax[4][1].fill_between(sample_size, df_["hellinger_kdf_25"].ravel(), df_["hellinger_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[4][1].fill_between(sample_size, df_["hellinger_rf_25"].ravel(), df_["hellinger_rf_75"].ravel(), facecolor='k', alpha=.3)
ax[4][1].set_title('Polynomial', fontsize=title_size)
##################################################################################################
med, a_25, a_75 = calc_stat(df['kdn_mmcIn'])
med_nn, nn_25, nn_75 = calc_stat(df['nn_mmcIn'])
ax[4][2].plot(sample_size, med[1:], c="b", label='KDN')
ax[4][2].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[4][2].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[4][2].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[4][2].set_xscale('log')
ax[4][2].set_xlabel('Sample size', fontsize=ticksize)
ax[4][2].set_ylabel('Mean Max Confidence\n (In Distribution)', fontsize=ticksize)
right_side = ax[4][2].spines["right"]
right_side.set_visible(False)
top_side = ax[4][2].spines["top"]
top_side.set_visible(False)
ax[4][2].plot(sample_size, df_['mmcIn_kdf_med'].ravel(), c="r", label='KDF')
ax[4][2].plot(sample_size, df_['mmcIn_rf_med'].ravel(), c="k", label='RF')
ax[4][2].fill_between(sample_size, df_["mmcIn_kdf_25"].ravel(), df_["mmcIn_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[4][2].fill_between(sample_size, df_["mmcIn_rf_25"].ravel(), df_["mmcIn_rf_75"].ravel(), facecolor='k', alpha=.3)
##################################################################################################
med, a_25, a_75 = calc_stat(df['kdn_mmcOut'])
med_nn, nn_25, nn_75 = calc_stat(df['nn_mmcOut'])
ax[4][3].plot(sample_size, med[1:], c="b", label='KDN')
ax[4][3].plot(sample_size, med_nn[1:], c="c", label='NN')
ax[4][3].fill_between(sample_size, a_25[1:], a_75[1:], facecolor='b', alpha=.3)
ax[4][3].fill_between(sample_size, nn_25[1:], nn_75[1:], facecolor='c', alpha=.3)
ax[4][3].set_xscale('log')
ax[4][3].set_xlabel('Sample size', fontsize=ticksize)
ax[4][3].set_ylabel('Mean Max Confidence\n (Out Distribution)', fontsize=ticksize)
right_side = ax[4][3].spines["right"]
right_side.set_visible(False)
top_side = ax[4][3].spines["top"]
top_side.set_visible(False)
ax[4][3].plot(sample_size, df_['mmcOut_kdf_med'].ravel(), c="r", label='KDF')
ax[4][3].plot(sample_size, df_['mmcOut_rf_med'].ravel(), c="k", label='RF')
ax[4][3].fill_between(sample_size, df_["mmcOut_kdf_25"].ravel(), df_["mmcOut_kdf_75"].ravel(), facecolor='r', alpha=.3)
ax[4][3].fill_between(sample_size, df_["mmcOut_rf_25"].ravel(), df_["mmcOut_rf_75"].ravel(), facecolor='k', alpha=.3)
plt.savefig('plots/simulation_res.pdf')
# %%
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author: Morgane T.
mainScript
"""
# Libraries
from argparse import ArgumentParser
from distutils import util
import CTD_functions as CTD
import WP_functions as WP
import methods_functions as methods
import os
# Script version
VERSION = '1.0'
# Functions
def argumentParserFunction():
"""
Argument parser function.
:return:
- **parser** (*argparse.ArgumentParser*) – List of arguments
"""
parser = ArgumentParser(description='mainScript')
parser.add_argument('-v', '--version', action='version', version=VERSION)
parser.add_argument('-c', '--CTDFile', required=True, help='File path of the chemical name (or MeDH ID) list')
parser.add_argument('--directAssociations', required=True, type=util.strtobool,
help='Direct associations (only chem) or hierarchical associations (chem + all related chem) - False / True')
parser.add_argument('-o', '--outputPath', default='OutputResults', help='Folder path for writing results')
return parser
# Main
if __name__ == "__main__":
# Command-line interface
parser = argumentParserFunction()
args = parser.parse_args()
argsDict = vars(args)
# Input parameters
# CTDFile = "test/InputData/CTDFile_byMeSH_inputFile.txt"
# CTDFile = "test/InputData/CTDFile_byNames_inputFile.txt"
# CTDFile = "/home/morgane/Documents/05_EJPR_RD/WF_Environment/EnvironmentProject/test/InputData/InputFile_CTD_sevMeSH.txt"
CTDFile = argsDict['CTDFile']
if argsDict['directAssociations']:
association = 'directAssociations'
else:
association = 'hierarchicalAssociations'
outputPath = argsDict['outputPath']
# Check if outputPath exist and create it if does not
if not os.path.exists(outputPath):
os.mkdir(outputPath)
# Read CTD file and request CTD database
chemNameList = CTD.readCTDFile(CTDFile)
chemTargetsDict = CTD.CTDrequestFromList(chemList=chemNameList, association=association, outputPath=outputPath)
# Search Rare Diseases pathways and extract all genes from WP
WPGeneRDDict, WPDict = WP.rareDiseasesWPrequest(outputPath=outputPath)
WPBackgroundGenes = WP.allGenesFromWP()
# Overlap between our target list from CTD and WP of interest
methods.overlapAnalysis(chemTargetsDict=chemTargetsDict,
WPGeneRDDict=WPGeneRDDict,
WPBackgroundGenes=WPBackgroundGenes,
WPDict=WPDict,
outputPath=outputPath)
|
"""
RealEstateAppRealitica
-------------
Script that runs a web scraper in a background and gets
all available real estates in Balkan area, and filters them
by given parameters(country, city, municipality).
You can get it by downloading it directly or by typing:
$ pip install RealEstateAppRealitica
After it is installed you can start it by simply typing in your terminal:
$ realitica_real_estate
Results will be printed in terminal window, and saved into CSV
file for easier browsing.
"""
from setuptools import setup
setup(name='RealEstateAppRealitica',
version='0.2',
description='Script that runs a web scraper in a background and gets all available real estates in Balkan area, '
'and filters them by given parameters(country, city, municipality).',
long_description=__doc__,
long_description_content_type='text/markdown',
url="https://github.com/urosjevremovic/Real-estate-app",
license='MIT',
author='Uros Jevremovic',
author_email='jevremovic.uros91@gmail.com',
packages=['RealEstateApp'],
install_requires=['bs4', 'requests'],
entry_points={
"console_scripts": ["realitica_real_estate=RealEstateApp.real_estate_app_2:main"],
},
)
__author__ = 'Uros Jevremovic'
|
#*----------------------------------------------------------------------------*
#* Copyright (C) 2021 Politecnico di Torino, Italy *
#* SPDX-License-Identifier: Apache-2.0 *
#* *
#* Licensed under the Apache License, Version 2.0 (the "License"); *
#* you may not use this file except in compliance with the License. *
#* You may obtain a copy of the License at *
#* *
#* http://www.apache.org/licenses/LICENSE-2.0 *
#* *
#* Unless required by applicable law or agreed to in writing, software *
#* distributed under the License is distributed on an "AS IS" BASIS, *
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
#* See the License for the specific language governing permissions and *
#* limitations under the License. *
#* *
#* Author: Matteo Risso *
#*----------------------------------------------------------------------------*
#
# BEFORE RUNNING TAKE CARE THAT ARE THE CORRECT ONES
# CHECK DATASET AND SAVING PATHs
#
class Config:
def __init__(self, search_type, root='./'):
self.dataset = 'PPG_Dalia'
self.root = root
self.search_type = search_type
# Data preprocessing parameters. Needs to be left unchanged
self.time_window = 8
self.input_shape = 32 * self.time_window
# Training Parameters
self.batch_size = 128
self.lr = 0.001
self.epochs = 500
self.a = 35
self.path_PPG_Dalia = self.root
# warmup_epochs determines the number of training epochs without regularization
# it could be an integer number or the string 'max' to indicate that we fully train the
# network
self.warmup = 20
# reg_strength determines how agressive lasso-reg is
self.reg_strength = 1e-6
# Amount of l2 regularization to be applied. Usually 0.
self.l2 = 0.
# threshold value is the value at which a weight is treated as 0.
self.threshold = 0.5
self.hyst = 0
# Where data are saved
self.saving_path = self.root+'saved_models_'+self.search_type+'/'
# parameters MorphNet training
self.epochs_MN = 350
self.batch_size_MN = 128
|
def dict_intersection(dict1, dict2):
return {k: v for k, v in dict1.items() if dict2.get(k) == v}
|
import xlrd
import json
filename1 = r"C:\Users\Terry\Desktop\SKUs.xls"
def reader():
data = xlrd.open_workbook(filename1)
table = data.sheets()[0]
company = []
for i in range(0,table.ncols):
company.append(table.col_values(i))
# print(table.col_values(i))
print(company[1])
# print(merged_cells)
reader()
|
# Monsters Castle Socket Server
# Python 2.7.14
import socket, select, os, time, json, struct, hashlib
import game, msg, scene
CONNECTION_LIST = [] # Read sockets
CONNECTION_USERS = {} # Socket-Username (if not logined, username is None)
CONNECTION_MSGQUEUE = {} # Socket-MsgQueue [[len, str], tail]
USERS_CONNECTION = {} # Username-Socket (logined user)
PLAYER_GAME = {} # Username-Game
RECV_BUFFER = 8192
ADDRESS = '127.0.0.1'
PORT = 9121
def loadUserDatabase(jsonFile): # load user information from json
if not os.path.isfile(jsonFile):
return {}
with open(jsonFile, "r") as f:
temp = json.load(f)
users = {}
for k, v in temp.items():
users[k] = v # Username-Password
return users
def dumpUserDatabase(jsonFile): # dump user information to json
with open(jsonFile, "w") as f:
json.dump(USER_DATABASE, f)
def handleGameMonitorData(sock, data):
recog, usernameLen = struct.unpack("ii", data[4:12])
username = struct.unpack("16s", data[12:28])[0][:usernameLen]
if username in USERS_CONNECTION and username in PLAYER_GAME and PLAYER_GAME[username].recog == recog:
sendMsgToSock(USERS_CONNECTION[username], data[28:])
def createNewGame(username):
PLAYER_GAME[username] = game.game(username, ADDRESS, PORT, SCENE)
PLAYER_GAME[username].start() # start a client thread as game monitor
print ("'%s' create new game" % username)
def fetchUsernamePassword(data):
wordArr = data.split(' ')
md5 = hashlib.md5()
md5.update(wordArr[2])
return wordArr[1], md5.hexdigest()
def login(sock, username, password):
if username in USER_DATABASE:
if password == USER_DATABASE[username]:
if not username in USERS_CONNECTION:
CONNECTION_USERS[sock] = username
USERS_CONNECTION[username] = sock
print ("'%s' login" % username)
return 0
else:
return 3
else:
return 2
else:
return 1
def handleSignIn(sock, data):
username, password = fetchUsernamePassword(data)
ret = login(sock, username, password)
if ret == 0:
if not username in PLAYER_GAME:
createNewGame(username)
sendMsgToSock(sock, "$si0")
else:
sendMsgToSock(sock, "$si"+str(ret))
def register(username, password):
if username in USER_DATABASE:
return False
else:
USER_DATABASE[username] = password
print ("'%s' register" % username)
dumpUserDatabase("user.json")
return True
def handleSignUp(sock, data):
username, password = fetchUsernamePassword(data)
if register(username, password):
sendMsgToSock(sock, "$su0")
else:
sendMsgToSock(sock, "$su1")
def sendMsgToSock(sock, message): # send msg to client socket
try:
sock.send(msg.encode(message))
except:
pass
def handleClientData(sock, data): # handle data received from client (including game monitor thread)
connectionUsername = CONNECTION_USERS[sock]
if data.startswith("^^^@"): # game monitor
handleGameMonitorData(sock, data)
elif connectionUsername == None:
if data.startswith("$su "): # Sign up : $su username password
handleSignUp(sock, data)
elif data.startswith("$si "): # Sign in : $si username password
handleSignIn(sock, data)
else:
if connectionUsername in PLAYER_GAME:
handleResult = PLAYER_GAME[connectionUsername].handle(data)
if handleResult == 1: # Play Again
PLAYER_GAME[connectionUsername].stop()
createNewGame(connectionUsername)
sendMsgToSock(sock, "$si0")
elif handleResult == 2: # Logout
PLAYER_GAME[connectionUsername].stop()
PLAYER_GAME.pop(connectionUsername)
CONNECTION_USERS[sock] = None
logout(connectionUsername)
sendMsgToSock(sock, "$lot")
def logout(username):
if username != None and username in USERS_CONNECTION:
print ("User '%s' quit" % username)
USERS_CONNECTION.pop(username)
if __name__ == "__main__":
SCENE = scene.scene("height.bin", "triangle.bin", "octree.dll")
USER_DATABASE = loadUserDatabase("user.json")
gameSocketServer = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
gameSocketServer.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
gameSocketServer.bind((ADDRESS, PORT))
gameSocketServer.listen(1000)
gameSocketServer.setblocking(False)
CONNECTION_LIST.append(gameSocketServer)
print ("Welcome to Monsters Castle (Server) on %s:%d" % (ADDRESS, PORT))
while True:
readSockets, writeSockets, errSockets = select.select(CONNECTION_LIST, [], [])
for sock in readSockets:
# new connection
if sock == gameSocketServer:
newSock, addr = gameSocketServer.accept()
newSock.setblocking(False)
CONNECTION_LIST.append(newSock)
CONNECTION_USERS[newSock] = None
CONNECTION_MSGQUEUE[newSock] = [[], ""]
print ("Client (%s, %s) connected" % addr)
# message from existing client
else:
try:
data = sock.recv(RECV_BUFFER)
if data:
CONNECTION_MSGQUEUE[sock][1] = msg.enqueue(CONNECTION_MSGQUEUE[sock][0], data, CONNECTION_MSGQUEUE[sock][1])
for m in CONNECTION_MSGQUEUE[sock][0]:
if m[0] == len(m[1]):
handleClientData(sock, m[1])
else:
break
if len(CONNECTION_MSGQUEUE[sock][0]):
if CONNECTION_MSGQUEUE[sock][0][-1][0] == len(CONNECTION_MSGQUEUE[sock][0][-1][1]):
CONNECTION_MSGQUEUE[sock][0] = []
else:
CONNECTION_MSGQUEUE[sock][0] = [CONNECTION_MSGQUEUE[sock][0][-1]]
except:
logout(CONNECTION_USERS[sock])
try:
sock.close()
except:
pass
CONNECTION_LIST.remove(sock)
CONNECTION_USERS.pop(sock)
CONNECTION_MSGQUEUE.pop(sock)
continue
gameSocketServer.close()
|
# Copyright (C) 2020 Adek Maulana.
# All rights reserved.
"""
Heroku manager for your Fire-X
"""
import asyncio
import math
import os
import heroku3
import requests
from telegraph import Telegraph
from firebot import CMD_HELP
from firebot.utils import edit_or_reply, fire_on_cmd, sudo_cmd
telegraph = Telegraph()
tgnoob = telegraph.create_account(short_name="Fire 🇮🇳")
Heroku = heroku3.from_key(Var.HEROKU_API_KEY)
heroku_api = "https://api.heroku.com"
@fire.on(
fire_on_cmd(pattern="(set|get|del) var(?: |$)(.*)(?: |$)([\s\S]*)", outgoing=True)
)
@fire.on(
sudo_cmd(pattern="(set|get|del) var(?: |$)(.*)(?: |$)([\s\S]*)", allow_sudo=True)
)
async def variable(var):
"""
Manage most of ConfigVars setting, set new var, get current var,
or delete var...
"""
if Var.HEROKU_APP_NAME is not None:
app = Heroku.app(Var.HEROKU_APP_NAME)
else:
return await edit_or_reply(
var, "`[HEROKU]:" "\nPlease setup your` **HEROKU_APP_NAME**"
)
exe = var.pattern_match.group(1)
heroku_var = app.config()
if exe == "get":
await edit_or_reply(var, "`Getting information...`")
await asyncio.sleep(1.5)
try:
variable = var.pattern_match.group(2).split()[0]
if variable in heroku_var:
return await edit_or_reply(
var,
"**ConfigVars**:" f"\n\n`{variable} = {heroku_var[variable]}`\n",
)
else:
return await edit_or_reply(
var, "**ConfigVars**:" f"\n\n`Error:\n-> {variable} don't exists`"
)
except IndexError:
configs = prettyjson(heroku_var.to_dict(), indent=2)
with open("configs.json", "w") as fp:
fp.write(configs)
with open("configs.json", "r") as fp:
result = fp.read()
if len(result) >= 4096:
await var.client.send_file(
var.chat_id,
"configs.json",
reply_to=var.id,
caption="`Output too large, sending it as a file`",
)
else:
await edit_or_reply(
var,
"`[HEROKU]` ConfigVars:\n\n"
"================================"
f"\n```{result}```\n"
"================================",
)
os.remove("configs.json")
return
elif exe == "set":
await edit_or_reply(var, "`Setting information...`")
variable = var.pattern_match.group(2)
if not variable:
return await edit_or_reply(var, ">`.set var <ConfigVars-name> <value>`")
value = var.pattern_match.group(3)
if not value:
variable = variable.split()[0]
try:
value = var.pattern_match.group(2).split()[1]
except IndexError:
return await edit_or_reply(var, ">`.set var <ConfigVars-name> <value>`")
await asyncio.sleep(1.5)
if variable in heroku_var:
await edit_or_reply(
var, f"**{variable}** `successfully changed to` -> **{value}**"
)
else:
await edit_or_reply(
var, f"**{variable}** `successfully added with value` -> **{value}**"
)
heroku_var[variable] = value
elif exe == "del":
await edit_or_reply(var, "`Getting information to deleting variable...`")
try:
variable = var.pattern_match.group(2).split()[0]
except IndexError:
return await edit_or_reply(
var, "`Please specify ConfigVars you want to delete`"
)
await asyncio.sleep(1.5)
if variable in heroku_var:
await edit_or_reply(var, f"**{variable}** `successfully deleted`")
del heroku_var[variable]
else:
return await edit_or_reply(var, f"**{variable}** `is not exists`")
@fire.on(fire_on_cmd(pattern="usage$", outgoing=True))
@fire.on(sudo_cmd(pattern="usage$", allow_sudo=True))
async def dyno_usage(dyno):
"""
Get your account Dyno Usage
"""
await edit_or_reply(dyno, "`Trying To Fetch Dyno Usage....`")
useragent = (
"Mozilla/5.0 (Linux; Android 10; SM-G975F) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/80.0.3987.149 Mobile Safari/537.36"
)
user_id = Heroku.account().id
headers = {
"User-Agent": useragent,
"Authorization": f"Bearer {Var.HEROKU_API_KEY}",
"Accept": "application/vnd.heroku+json; version=3.account-quotas",
}
path = "/accounts/" + user_id + "/actions/get-quota"
r = requests.get(heroku_api + path, headers=headers)
if r.status_code != 200:
return await edit_or_reply(
dyno, "`Error: something bad happened`\n\n" f">.`{r.reason}`\n"
)
result = r.json()
quota = result["account_quota"]
quota_used = result["quota_used"]
""" - Used - """
remaining_quota = quota - quota_used
percentage = math.floor(remaining_quota / quota * 100)
minutes_remaining = remaining_quota / 60
hours = math.floor(minutes_remaining / 60)
minutes = math.floor(minutes_remaining % 60)
""" - Current - """
App = result["apps"]
try:
App[0]["quota_used"]
except IndexError:
AppQuotaUsed = 0
AppPercentage = 0
else:
AppQuotaUsed = App[0]["quota_used"] / 60
AppPercentage = math.floor(App[0]["quota_used"] * 100 / quota)
AppHours = math.floor(AppQuotaUsed / 60)
AppMinutes = math.floor(AppQuotaUsed % 60)
await asyncio.sleep(1.5)
return await edit_or_reply(
dyno,
"**Dyno Usage Data**:\n\n"
f"✗ **APP NAME =>** `{Var.HEROKU_APP_NAME}` \n"
f"✗ **Usage in Hours And Minutes =>** `{AppHours}h` `{AppMinutes}m`"
f"✗ **Usage Percentage =>** [`{AppPercentage} %`]\n"
"\n\n"
"✗ **Dyno Remaining This Months 📆:**\n"
f"✗ `{hours}`**h** `{minutes}`**m** \n"
f"✗ **Percentage :-** [`{percentage}`**%**]",
)
@command(pattern="^.info heroku")
async def info(event):
await borg.send_message(
event.chat_id,
"**Info for Module to Manage Heroku:**\n\n`.usage`\nUsage:__Check your heroku dyno hours status.__\n\n`.set var <NEW VAR> <VALUE>`\nUsage: __add new variable or update existing value variable__\n**!!! WARNING !!!, after setting a variable the bot will restart.**\n\n`.get var or .get var <VAR>`\nUsage: __get your existing varibles, use it only on your private group!__\n**This returns all of your private information, please be cautious...**\n\n`.del var <VAR>`\nUsage: __delete existing variable__\n**!!! WARNING !!!, after deleting variable the bot will restarted**",
)
await event.delete()
def prettyjson(obj, indent=2, maxlinelength=80):
"""Renders JSON content with indentation and line splits/concatenations to fit maxlinelength.
Only dicts, lists and basic types are supported"""
items, _ = getsubitems(
obj,
itemkey="",
islast=True,
maxlinelength=maxlinelength - indent,
indent=indent,
)
return indentitems(items, indent, level=0)
@fire.on(fire_on_cmd(pattern="logs$", outgoing=True))
@fire.on(sudo_cmd(pattern="logs$", allow_sudo=True))
async def _(givelogs):
try:
Heroku = heroku3.from_key(Var.HEROKU_API_KEY)
app = Heroku.app(Var.HEROKU_APP_NAME)
except:
return await givelogs.reply(
" Please make sure your Heroku API Key, Your App name are configured correctly in the heroku var !"
)
await edit_or_reply(givelogs, "`Trying To Fetch Logs...`")
with open("logs.txt", "w") as log:
log.write(app.get_log())
hmm = app.get_log()
starky = f"<code> {hmm} </code>"
title_of_page = "Fire-X UserBot Logs"
response = telegraph.create_page(title_of_page, html_content=starky)
km = response["path"]
suger = f"`Logs Can Be Found` [Here](https://telegra.ph/{km})"
await givelogs.client.send_file(
givelogs.chat_id,
"logs.txt",
reply_to=givelogs.id,
caption=suger,
)
CMD_HELP.update(
{
"heroku": "**Heroku**\
\n\n**Syntax : **`.set var <var key> <var value>`\
\n**Usage :** Add new variable or update existing value variable.\
\n\n**Syntax : **`.get var <var>`\
\n**Usage :** Get your existing variables, use it only on your private group!\
\n\n**Syntax : **`.del var <var>`\
\n**Usage :** Deletes existing variable.\
\n\n**Syntax : **`.usage`\
\n**Usage :** Gives you information about Dyno usage.\
\n\n**Syntax : **`.info heroku`\
\n**Usage :** Gives you information to use other commands of heroku.\
\n\n**Syntax : **`.logs`\
\n**Usage :** Gets logs from heroku."
}
)
|
#!/usr/bin/env python
# Brocapi HTTP API
__copyright__ = """
Copyright 2017 FireEye, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__license__ = "Apache 2.0"
import ConfigParser
import json
import logging
import os
import sys
import uuid
import flask
import redis
import rq
import brocapi_worker
# Set up our logger format
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s: %(message)s [%(filename)s:%(funcName)s]')
logger = logging.getLogger(__name__)
# Grab config
logging.info("Loading config file")
try:
config = ConfigParser.ConfigParser()
config.read('/etc/brocapi.conf')
except Exception as e:
logging.error("Could not read config file. Exiting.")
logging.error(e)
sys.exit(1)
logging.info("Config file loaded successfully.")
# Set to debug logging if enabled in the config
if config.getboolean("main", "debug"):
logger.info("Debug logger enabled")
logger.setLevel(logging.DEBUG)
# Find our processing dir, create it if it doesn't exist
BROCAPI_PROCESSING_DIR = config.get("bro", "processing_dir")
logger.debug("Using processing dir: %s", BROCAPI_PROCESSING_DIR)
if not os.path.isdir(BROCAPI_PROCESSING_DIR):
logger.warning("Processing directory %s doesn't exist, attempting to create it", BROCAPI_PROCESSING_DIR)
try:
os.makedirs(BROCAPI_PROCESSING_DIR)
except:
logger.error("Could not create Brocapi tmp dirs.")
sys.exit(1)
logger.info("Successfully created the processing directory %s", BROCAPI_PROCESSING_DIR)
# Create a connection to our rq worker queue
logger.info("Connecting to worker queue..")
try:
rs = redis.Redis()
# Test if the redis server is up
rs.get(None)
brocapi_queue = rq.Queue(connection=rs)
except Exception as e:
logger.error("Error attempting to connect to worker queue!")
logger.error(e)
sys.exit(1)
logger.info("Successfully connected to worker queue")
# Set up our Flask app
app = flask.Flask(__name__)
@app.route('/submit/pcap', methods=['POST'])
def api_submit_pcap():
"""API Endpoint for Bro pcap processing"""
# Create a unique job uuid and folders
job_uuid = str(uuid.uuid4())
# Grab the job tag if it was supplied
if 'tag' in flask.request.form:
job_tag = str(flask.request.form['tag'])
else:
job_tag = None
# Make sure we can get the pcaps from the POST data
try:
submitted_pcaps = flask.request.files.getlist("file[]")
except:
logger.error("Error retrieving pcaps from job %s", job_uuid)
response = json.dumps({"job_id": job_uuid, "success": False, "status": "error retrieving supplied pcaps", "tag": job_tag})
return response, 500
# If we didn't get any pcaps in the request, don't waste our time
if len(submitted_pcaps) == 0:
logger.warning("Job %s contained no pcaps", job_uuid)
response = json.dumps({"job_id": job_uuid, "success": False, "status": "no pcaps supplied", "tag": job_tag})
return response, 500
logger.info("Received the following PCAP request: job: %s, tag: %s, files: %s", job_uuid, job_tag, str(submitted_pcaps))
# Create all the jobs dirs inside the processing dir
job_path = os.path.join(BROCAPI_PROCESSING_DIR, job_uuid)
job_logs_dir = os.path.join(job_path, "logs")
job_logs_bro = os.path.join(job_logs_dir, "bro")
job_logs_syslog = os.path.join(job_logs_dir, "syslog")
job_pcaps_dir = os.path.join(job_path, "pcaps")
try:
logger.debug("Creating job directories for job %s", job_uuid)
os.makedirs(job_logs_dir)
os.makedirs(job_logs_bro)
os.makedirs(job_logs_syslog)
os.makedirs(job_pcaps_dir)
except Exception as e:
logger.error("Failed to create storage for job %s", job_uuid)
logger.error(e)
response = json.dumps({"job_id": job_uuid, "success": False, "status": "error creating job dirs", "tag": job_tag})
return response, 500
# Save all the files that were uploaded
uploaded_filenames = []
for _file in submitted_pcaps:
filename = os.path.split(_file.filename)[-1]
file_path = os.path.join(job_pcaps_dir, filename)
_file.save(os.path.abspath(file_path))
uploaded_filenames.append(filename)
# Once we created the jobs dirs and saved the pcaps, queue the job in the worker queue
brocapi_queue.enqueue(brocapi_worker.process_job, job_uuid, job_tag,
uploaded_filenames, config.get("bro", "bro_bin"), BROCAPI_PROCESSING_DIR,
config.get("syslog", "syslog_host"), config.getint("syslog", "syslog_port"),
config.get("syslog", "syslog_proto"), config.get("syslog", "syslog_prefix"))
logger.info("Brocapi job added to worker queue: " + job_uuid)
response = json.dumps({"job_id": job_uuid, "success": True, "status": "job queued", "tag": job_tag, "files": uploaded_filenames})
return response, 200
|
# info
READ_DOGS = 'Reading dogs information ...'
WRITE_DOGS = 'Writen new dog to database ...'
UPDATE_DOGS = 'Updating dog information in database ...'
DELETE_DOGS = 'Deleting dog information from database ...'
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import msgprint, _
from client.hr_services.doctype.end_of_service_award.end_of_service_award import get_award
from frappe.utils import cint, cstr, date_diff, flt, formatdate, getdate, get_link_to_form, \
comma_or, get_fullname, add_years, add_months, add_days, nowdate, get_first_day, get_last_day
class EmployeeResignation(Document):
def on_submit(self):
emp = frappe.get_doc("Employee",self.employee)
emp.status ="Left"
emp.relieving_date =self.last_working_date
emp.save(ignore_permissions=True)
salary = self.get_salary()
award_info = get_award(self.date_of_joining, self.last_working_date, salary,self.employment_type, "استقالة العامل")
eos_award = frappe.new_doc("End of Service Award")
eos_award.employee = self.employee
eos_award.end_date = self.last_working_date
eos_award.salary = salary
eos_award.reason="استقالة العامل"
eos_award.workflow_state="Pending"
eos_award.days = award_info['days']
eos_award.months = award_info['months']
eos_award.years = award_info['years']
eos_award.award = award_info['award']
eos_award.insert()
def get_salary(self):
# award_info = get_award(self)
# frappe.throw(str(award_info))
start_date = get_first_day(getdate(nowdate()))
end_date = get_last_day(getdate(nowdate()))
doc = frappe.new_doc("Salary Slip")
doc.salary_slip_based_on_timesheet="0"
doc.payroll_frequency= "Monthly"
doc.start_date= start_date
doc.end_date= end_date
doc.employee= self.employee
doc.employee_name= self.employee_name
doc.company= "Tawari"
doc.posting_date= start_date
doc.insert()
grosspay =doc.gross_pay
result=grosspay
if result:
return result
else:
frappe.throw("لا يوجد قسيمة راتب لهذا الموظف")
def validate(self):
if not self.last_working_date:
frappe.throw("Please enter your last working date")
if frappe.get_value('Employee Loan', filters={'employee' : self.employee,'status':'Sanctioned'}):
name=frappe.get_value('Employee Loan', filters={'employee' : self.employee,'status':'Sanctioned'})
loan_emp =frappe.get_doc("Employee Loan",name)
mm=loan_emp.status
frappe.throw(self.employee+"/ "+self.employee_name+" have an active loan")
self.validate_emp()
if self.workflow_state:
if "Rejected" in self.workflow_state:
self.docstatus = 1
self.docstatus = 2
def validate_emp(self):
if self.get('__islocal'):
if u'CEO' in frappe.get_roles(frappe.session.user):
self.workflow_state = "Created By CEO"
elif u'Director' in frappe.get_roles(frappe.session.user):
self.workflow_state = "Created By Director"
elif u'Manager' in frappe.get_roles(frappe.session.user):
self.workflow_state = "Created By Manager"
elif u'Line Manager' in frappe.get_roles(frappe.session.user):
self.workflow_state = "Created By Line Manager"
elif u'Employee' in frappe.get_roles(frappe.session.user):
self.workflow_state = "Pending"
#if frappe.get_value('Financial Custody', filters={'employee' : self.employee}):
#name=frappe.get_value('Financial Custody', filters={'employee' : self.employee})
#custody =frappe.get_doc("Financial Custody",name)
#approver=custody.reported_by
#if approver:
#frappe.throw(self.employee+"/ "+self.employee_name+" have an active Financial Custody approved by "+approver)
def get_permission_query_conditions(user):
pass
# if not user: user = frappe.session.user
# employees = frappe.get_list("Employee", fields=["name"], filters={'user_id': user}, ignore_permissions=True)
# if employees:
# query = ""
# employee = frappe.get_doc('Employee', {'name': employees[0].name})
# if u'Employee' in frappe.get_roles(user):
# if query != "":
# query+=" or "
# query+=""" employee = '{0}'""".format(employee.name)
# return query
|
import heterocl as hcl
import numpy as np
import time
import plotly.graph_objects as go
from gridProcessing import Grid
from shape_functions import *
from custom_graph_functions import *
from DubinsCar import *
import math
""" USER INTERFACES
- Define grid
- Generate initial values for grid using shape functions
- Time length for computations
- Run
"""
# Create a grid
g = grid(np.array([-5.0, -5.0, -math.pi]), np.array([5.0, 5.0, math.pi]), 3 ,np.array([100,100,100]), 2)
# Use the grid to initialize initial value function
shape = CyclinderShape(g, 3, np.zeros(3), 1)
# Define my car
myCar = DubinsCar(x=np.array([0,0,0]), wMax=1, speed=1, dMax=np.array([0,0,0]), uMode="min", dMode="max")
# Look-back lenght and time step
lookback_length = 1.00
t_step = 0.05
def HJ_PDE_solver(V_new, V_init, thetas ,t):
# These variables are used to dissipation calculation
max_alpha1 = hcl.scalar(-1e9, "max_alpha1")
max_alpha2 = hcl.scalar(-1e9, "max_alpha2")
max_alpha3 = hcl.scalar(-1e9, "max_alpha3")
# Calculate spatial derivative
def spa_derivX(i, j, k):
left_deriv = hcl.scalar(0, "left_deriv")
right_deriv = hcl.scalar(0, "right_deriv")
with hcl.if_(i == 0):
left_boundary = hcl.scalar(0, "left_boundary")
left_boundary[0] = V_init[i, j, k] + my_abs(V_init[i + 1, j, k] - V_init[i, j, k]) * my_sign(
V_init[i, j, k])
left_deriv[0] = (V_init[i, j, k] - left_boundary[0]) / g.dx[0]
right_deriv[0] = (V_init[i + 1, j, k] - V_init[i, j, k]) / g.dx[0]
with hcl.elif_(i == V_init.shape[0] - 1):
right_boundary = hcl.scalar(0, "right_boundary")
right_boundary[0] = V_init[i, j, k] + my_abs(V_init[i, j, k] - V_init[i - 1, j, k]) * my_sign(
V_init[i, j, k])
left_deriv[0] = (V_init[i, j, k] - V_init[i - 1, j, k]) / g.dx[0]
right_deriv[0] = (right_boundary[0] - V_init[i, j, k]) / g.dx[0]
with hcl.elif_(i != 0 and i != V_init.shape[0] - 1):
left_deriv[0] = (V_init[i, j, k] - V_init[i - 1, j, k]) / g.dx[0]
right_deriv[0] = (V_init[i + 1, j, k] - V_init[i, j, k]) / g.dx[0]
return left_deriv[0], right_deriv[0]
def spa_derivY(i, j, k):
left_deriv = hcl.scalar(0, "left_deriv")
right_deriv = hcl.scalar(0, "right_deriv")
with hcl.if_(j == 0):
left_boundary = hcl.scalar(0, "left_boundary")
left_boundary[0] = V_init[i, j, k] + my_abs(V_init[i, j + 1, k] - V_init[i, j, k]) * my_sign(
V_init[i, j, k])
left_deriv[0] = (V_init[i, j, k] - left_boundary[0]) / g.dx[1]
right_deriv[0] = (V_init[i, j + 1, k] - V_init[i, j, k]) / g.dx[1]
with hcl.elif_(j == V_init.shape[1] - 1):
right_boundary = hcl.scalar(0, "right_boundary")
right_boundary[0] = V_init[i, j, k] + my_abs(V_init[i, j, k] - V_init[i, j - 1, k]) * my_sign(
V_init[i, j, k])
left_deriv[0] = (V_init[i, j, k] - V_init[i, j - 1, k]) / g.dx[1]
right_deriv[0] = (right_boundary[0] - V_init[i, j, k]) / g.dx[1]
with hcl.elif_(j != 0 and j != V_init.shape[1] - 1):
left_deriv[0] = (V_init[i, j, k] - V_init[i, j - 1, k]) / g.dx[1]
right_deriv[0] = (V_init[i, j + 1, k] - V_init[i, j, k]) / g.dx[1]
return left_deriv[0], right_deriv[0]
def spa_derivT(i, j, k):
left_deriv = hcl.scalar(0, "left_deriv")
right_deriv = hcl.scalar(0, "right_deriv")
with hcl.if_(k == 0):
left_boundary = hcl.scalar(0, "left_boundary")
# left_boundary[0] = V_init[i,j,50]
left_boundary[0] = V_init[i, j, V_init.shape[2] - 1]
left_deriv[0] = (V_init[i, j, k] - left_boundary[0]) / g.dx[2]
right_deriv[0] = (V_init[i, j, k + 1] - V_init[i, j, k]) / g.dx[2]
with hcl.elif_(k == V_init.shape[2] - 1):
right_boundary = hcl.scalar(0, "right_boundary")
right_boundary[0] = V_init[i, j, 0]
left_deriv[0] = (V_init[i, j, k] - V_init[i, j, k - 1]) / g.dx[2]
right_deriv[0] = (right_boundary[0] - V_init[i, j, k]) / g.dx[2]
with hcl.elif_(k != 0 and k != V_init.shape[2] - 1):
left_deriv[0] = (V_init[i, j, k] - V_init[i, j, k - 1]) / g.dx[2]
right_deriv[0] = (V_init[i, j, k + 1] - V_init[i, j, k]) / g.dx[2]
return left_deriv[0], right_deriv[0]
def step_bound(): # Function to calculate time step
stepBoundInv = hcl.scalar(0, "stepBoundInv")
stepBound = hcl.scalar(0, "stepBound")
stepBoundInv[0] = max_alpha1[0]/g.dx[0] + max_alpha2[0]/g.dx[1] + max_alpha3[0]/g.dx[2]
stepBound[0] = 0.8/stepBoundInv[0]
with hcl.if_(stepBound > t_step):
stepBound[0] = t_step
time = stepBound[0]
return time
# Calculate Hamiltonian for every grid point in V_init
with hcl.Stage("Hamiltonian"):
with hcl.for_(0, V_init.shape[0], name="k") as k: # Plus 1 as for loop count stops at V_init.shape[0]
with hcl.for_(0, V_init.shape[1], name="j") as j:
with hcl.for_(0, V_init.shape[2], name="i") as i:
# Variables to calculate dV_dx
dV_dx_L = hcl.scalar(0, "dV_dx_L")
dV_dx_R = hcl.scalar(0, "dV_dx_R")
dV_dx = hcl.scalar(0, "dV_dx")
# Variables to calculate dV_dy
dV_dy_L = hcl.scalar(0, "dV_dy_L")
dV_dy_R = hcl.scalar(0, "dV_dy_R")
dV_dy = hcl.scalar(0, "dV_dy")
# Variables to calculate dV_dtheta
dV_dT_L = hcl.scalar(0, "dV_dT_L")
dV_dT_R = hcl.scalar(0, "dV_dT_R")
dV_dT = hcl.scalar(0, "dV_dT")
# Variables to keep track of dynamics
#dx_dt = hcl.scalar(0, "dx_dt")
#dy_dt = hcl.scalar(0, "dy_dt")
#dtheta_dt = hcl.scalar(0, "dtheta_dt")
# No tensor slice operation
dV_dx_L[0], dV_dx_R[0] = spa_derivX(i, j, k)
dV_dy_L[0], dV_dy_R[0] = spa_derivY(i, j, k)
dV_dT_L[0], dV_dT_R[0] = spa_derivT(i, j, k)
# Calculate average gradient
dV_dx[0] = (dV_dx_L + dV_dx_R) / 2
dV_dy[0] = (dV_dy_L + dV_dy_R) / 2
dV_dT[0] = (dV_dT_L + dV_dT_R) / 2
# Use method of DubinsCar to solve optimal control instead
uOpt = myCar.opt_ctrl((dV_dx[0], dV_dy[0], dV_dT[0]))
# Calculate dynamical rates of changes
dx_dt, dy_dt, dtheta_dt = myCar.dynamics(thetas[k], uOpt)
# Calculate Hamiltonian terms:
V_new[i, j, k] = -(dx_dt * dV_dx[0] + dy_dt * dV_dy[0] + dtheta_dt * dV_dT[0])
# Calculate dissipation step
dx_dt = my_abs(dx_dt)
dy_dt = my_abs(dy_dt)
dtheta_dt = my_abs(dtheta_dt)
diss = hcl.scalar(0, "diss")
diss[0] = 0.5*((dV_dx_R[0] - dV_dx_L[0])*dx_dt + (dV_dy_R[0] - dV_dy_L[0])*dy_dt + (dV_dT_R[0] - dV_dT_L[0])* dtheta_dt)
V_new[i, j, k] = -(V_new[i, j, k] - diss[0])
# Calculate alphas
with hcl.if_(dx_dt > max_alpha1):
max_alpha1[0] = dx_dt
with hcl.if_(dy_dt > max_alpha2):
max_alpha2[0] = dy_dt
with hcl.if_(dtheta_dt > max_alpha3):
max_alpha3[0] = dtheta_dt
# Determine time step
hcl.update(t, lambda x: step_bound())
# Integrate
result = hcl.update(V_new, lambda i,j,k: V_init[i,j,k] + V_new[i,j,k] * t[0])
# Copy V_new to V_init
hcl.update(V_init, lambda i,j,k: V_new[i,j,k] )
return result
def main():
hcl.init()
hcl.config.init_dtype = hcl.Float()
V_f = hcl.placeholder(tuple(g.pts_each_dim), name="V_f", dtype = hcl.Float())
V_init = hcl.placeholder(tuple(g.pts_each_dim), name="V_init", dtype=hcl.Float())
thetas = hcl.placeholder((g.pts_each_dim[2],), name="thetas", dtype=hcl.Float())
t = hcl.placeholder((1,), name="t", dtype=hcl.Float())
# Create schedule
s = hcl.create_schedule([V_f, V_init, thetas,t], HJ_PDE_solver)
# Here comes the optimization
# Accessing the hamiltonian stage
s_H = HJ_PDE_solver.Hamiltonian
# Split the loops
k_out, k_in = s[s_H].split(s_H.k, 10) # These numbers are experimental, changable
j_out, j_in = s[s_H].split(s_H.j, 10)
i_out, i_in = s[s_H].split(s_H.i, 10)
# Reorder the loops
s[s_H].reorder(j_out, k_in)
s[s_H].reorder(i_out, k_in)
s[s_H].reorder(k_in, j_in)
# FPGA Back end - parallel specs
s[s_H].pipeline(k_in)
s[s_H].unroll(i_out, 5)
# If CPU option
s[s_H].parallel(k_out)
# Inspect IR
#print(hcl.lower(s))
# Build the code
solve_pde = hcl.build(s)
#print(f)
# Prepare numpy array for graph computation
V_0 = hcl.asarray(shape)
V_1= hcl.asarray(np.zeros(tuple(g.pts_each_dim)))
t_minh = hcl.asarray(np.zeros(1))
# List thetas
list_theta = np.reshape(g.vs[2], g.pts_each_dim[2])
list_theta = hcl.asarray(list_theta)
# Variables used for timing
execution_time = 0
lookback_time = 0
print("I'm here\n")
# Test the executable from heteroCL:
while lookback_time <= lookback_length:
# Start timing
start = time.time()
# Printing some info
#print("Look back time is (s): {:.5f}".format(lookback_time))
# Run the execution and pass input into graph
solve_pde(V_1, V_0, list_theta, t_minh)
if lookback_time != 0: # Exclude first time of the computation
execution_time += time.time() - start
lookback_time += np.asscalar(t_minh.asnumpy())
# Some information printing
#print(t_minh)
print("Computational time to integrate (s): {:.5f}".format(time.time() - start))
#V = V_1.asnumpy()
#V = np.swapaxes(V, 0,2)
#V = np.swapaxes(V, 1,2)
#probe = probe.asnumpy()
#probe = np.swapaxes(probe, 0, 2)
#probe = np.swapaxes(probe, 1, 2)
#print(V)
#V_1 = V_1.asnumpy()
# Time info printing
print("Total kernel time (s): {:.5f}".format(execution_time))
print("Finished solving\n")
# Plotting
print("Plotting beautiful plots. Please wait\n")
fig = go.Figure(data=go.Isosurface(
x=g.mg_X.flatten(),
y=g.mg_Y.flatten(),
z=g.mg_T.flatten(),
value=V_1.asnumpy().flatten(),
colorscale='jet',
isomin=0,
surface_count=1,
isomax=0,
caps=dict(x_show=True, y_show=True)
))
fig.show()
print("Please check the plot on your browser.")
if __name__ == '__main__':
main()
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import tabs
from openstack_dashboard.api import sahara as saharaclient
LOG = logging.getLogger(__name__)
class GeneralTab(tabs.Tab):
name = _("General Info")
slug = "job_details_tab"
template_name = ("project/data_processing.jobs/_details.html")
def get_context_data(self, request):
job_id = self.tab_group.kwargs['job_id']
job = saharaclient.job_get(request, job_id)
return {"job": job}
class JobDetailsTabs(tabs.TabGroup):
slug = "job_details"
tabs = (GeneralTab,)
sticky = True
|
# coding: utf-8
import os
import time
import libmc
import slow_memcached_server
import subprocess
def memcached_server_ctl(cmd, port):
ctl_path = os.path.join(
os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__)
))),
'misc', 'memcached_server'
)
print(ctl_path)
subprocess.check_call([ctl_path, cmd, str(port)])
def test_soft_server_error():
mc = libmc.Client(["127.0.0.1:%d" % slow_memcached_server.PORT])
mc.config(libmc._client.MC_POLL_TIMEOUT,
slow_memcached_server.BLOCKING_SECONDS * 1000 * 2) # ms
RETRY_TIMEOUT = 2
mc.config(libmc.MC_RETRY_TIMEOUT, RETRY_TIMEOUT)
assert mc.set('foo', 1)
assert not mc.set(slow_memcached_server.KEY_SET_SERVER_ERROR.decode('utf8'), 1)
assert mc.set('foo', 1) # back to live
time.sleep(RETRY_TIMEOUT / 2)
assert mc.set('foo', 1) # alive
time.sleep(RETRY_TIMEOUT + 1)
assert mc.set('foo', 1) # alive
def test_hard_server_error():
normal_port = 21211
mc = libmc.Client(["127.0.0.1:%d" % normal_port])
RETRY_TIMEOUT = 20
mc.config(libmc.MC_RETRY_TIMEOUT, RETRY_TIMEOUT)
assert mc.set('foo', 1)
memcached_server_ctl('stop', normal_port)
assert not mc.set('foo', 1) # fail
memcached_server_ctl('start', normal_port)
assert not mc.set('foo', 1) # still fail
time.sleep(RETRY_TIMEOUT + 1)
assert mc.set('foo', 1) # back to live
def main():
test_soft_server_error()
test_hard_server_error()
if __name__ == '__main__':
main()
|
import numpy as np
import paddle
import pytest
import tensorflow as tf
import torch
from finetuner.tuner.callback import EarlyStopping
from finetuner.tuner.keras import KerasTuner
from finetuner.tuner.paddle import PaddleTuner
from finetuner.tuner.pytorch import PytorchTuner
from finetuner.tuner.state import TunerState
@pytest.fixture(scope='module')
def pytorch_model():
return torch.nn.Linear(in_features=10, out_features=10)
@pytest.fixture(scope='module')
def keras_model():
return tf.keras.Sequential([tf.keras.layers.Dense(10)])
@pytest.fixture(scope='module')
def paddle_model():
return paddle.nn.Linear(in_features=10, out_features=10)
@pytest.mark.parametrize(
'mode, monitor, operation, best',
(
('min', 'train_loss', np.less, np.Inf),
('max', 'train_loss', np.greater, -np.Inf),
('auto', 'train_loss', np.less, np.Inf),
('max', 'precision', np.greater, -np.Inf),
('somethingelse', 'precision', np.greater, -np.Inf),
),
)
def test_mode(mode: str, monitor: str, operation, best):
checkpoint = EarlyStopping(mode=mode, monitor=monitor)
assert checkpoint._monitor_op == operation
assert checkpoint._best == best
def test_early_stopping_pytorch(pytorch_model):
tuner = PytorchTuner(embed_model=pytorch_model)
checkpoint = EarlyStopping()
tuner.state = TunerState(epoch=0, current_loss=0.5)
checkpoint.on_val_batch_end(tuner)
checkpoint.on_epoch_end(tuner)
assert checkpoint._epoch_counter == 0
tuner.state = TunerState(epoch=1, current_loss=0.6)
checkpoint.on_val_batch_end(tuner)
checkpoint.on_epoch_end(tuner)
assert checkpoint._epoch_counter == 1
tuner.state = TunerState(epoch=2, current_loss=0.7)
checkpoint.on_val_batch_end(tuner)
checkpoint.on_epoch_end(tuner)
assert checkpoint._epoch_counter == checkpoint._patience
assert tuner.stop_training
def test_early_stopping_paddle(paddle_model):
tuner = PaddleTuner(embed_model=paddle_model)
checkpoint = EarlyStopping()
tuner.state = TunerState(epoch=0, current_loss=0.5)
checkpoint.on_val_batch_end(tuner)
checkpoint.on_epoch_end(tuner)
assert checkpoint._epoch_counter == 0
tuner.state = TunerState(epoch=1, current_loss=0.6)
checkpoint.on_val_batch_end(tuner)
checkpoint.on_epoch_end(tuner)
assert checkpoint._epoch_counter == 1
tuner.state = TunerState(epoch=2, current_loss=0.7)
checkpoint.on_val_batch_end(tuner)
checkpoint.on_epoch_end(tuner)
assert checkpoint._epoch_counter == checkpoint._patience
assert tuner.stop_training
def test_early_stopping_keras(keras_model):
tuner = KerasTuner(embed_model=keras_model)
checkpoint = EarlyStopping()
tuner.state = TunerState(epoch=0, current_loss=0.5)
checkpoint.on_val_batch_end(tuner)
checkpoint.on_epoch_end(tuner)
assert checkpoint._epoch_counter == 0
tuner.state = TunerState(epoch=1, current_loss=0.6)
checkpoint.on_val_batch_end(tuner)
checkpoint.on_epoch_end(tuner)
assert checkpoint._epoch_counter == 1
tuner.state = TunerState(epoch=2, current_loss=0.7)
checkpoint.on_val_batch_end(tuner)
checkpoint.on_epoch_end(tuner)
assert checkpoint._epoch_counter == checkpoint._patience
assert tuner.stop_training
def test_baseline(keras_model):
tuner = KerasTuner(embed_model=keras_model)
checkpoint = EarlyStopping(baseline=0.01)
tuner.state = TunerState(epoch=0, current_loss=0.5)
checkpoint.on_train_batch_end(tuner)
checkpoint.on_epoch_end(tuner)
assert checkpoint._epoch_counter == 1
tuner.state = TunerState(epoch=0, current_loss=0.3)
checkpoint.on_train_batch_end(tuner)
checkpoint.on_epoch_end(tuner)
assert checkpoint._epoch_counter == checkpoint._patience
assert tuner.stop_training
def test_counter_reset(pytorch_model):
tuner = PytorchTuner(embed_model=pytorch_model)
checkpoint = EarlyStopping()
tuner.state = TunerState(epoch=0, current_loss=0.5)
checkpoint.on_val_batch_end(tuner)
checkpoint.on_epoch_end(tuner)
assert checkpoint._epoch_counter == 0
tuner.state = TunerState(epoch=1, current_loss=0.6)
checkpoint.on_val_batch_end(tuner)
checkpoint.on_epoch_end(tuner)
assert checkpoint._epoch_counter == 1
tuner.state = TunerState(epoch=2, current_loss=0.4)
checkpoint.on_val_batch_end(tuner)
checkpoint.on_epoch_end(tuner)
assert checkpoint._epoch_counter == 0
assert not tuner.stop_training
|
from bs4 import BeautifulSoup
from bs4.element import Tag
from lxml import objectify
import xmltodict
from functools import wraps
from app.modules.firewall import Firewall
import ConfigParser, re, json, logging
from threading import Thread
from requests import get
from requests.packages.urllib3.exceptions import InsecureRequestWarning
from requests.packages.urllib3 import disable_warnings
#Disable requests insecure log
disable_warnings(InsecureRequestWarning)
#Get logger
logger = logging.getLogger(__name__)
class PAN(Firewall):
def __init__(self,firewall_config):
self.firewall_config = firewall_config
a = self.getMaster()
self.firewall_config['primary'] = a['active'] if a['ok'] else None
self.primary = self.firewall_config['primary']
def apicall(self,verify=False,**kwargs):
self.__url_base = "https://{0}/api?key={1}".format(self.firewall_config['primary'],self.firewall_config['key'])
response = get(self.__url_base,params=kwargs,verify=verify)
logger.debug("{0}: {1} {2}".format(self.firewall_config['primary'],self.__url_base,str(kwargs)))
return response
def getMaster(self):
response = self.apicall(type='op',\
cmd="<show><high-availability><state></state></high-availability></show>")
soup = BeautifulSoup(response.text,'xml')
if response.ok:
if soup.response['status'] == 'success':
if soup.response.result.enabled.text == 'no':
logger.info("No HA enabled on Firewall, using primary as active IP.")
return {'ok' : True,\
'active' : self.firewall_config['primary'], 'passive' : self.firewall_config['secondary']}
else:
return {'ok' : True,\
'active' : self.firewall_config['primary'] if soup.response.result.group.find('local-info').state.text == 'active' else soup.response.result.group.find('peer-info').find('mgmt-ip').text.split('/')[0],\
'passive' : self.firewall_config['primary'] if soup.response.result.group.find('local-info').state.text == 'passive' else soup.response.result.group.find('peer-info').find('mgmt-ip').text.split('/')[0] }
else:
return {'ok' : False, 'info' : 'Could not get active firewall\'s ip.', 'panos-response' : soup.response['status']}
else:
aux = self.firewall_config['secondary']
self.firewall_config['primary'] = self.firewall_config['secondary']
self.firewall_config['secondary'] = aux
del aux
response = self.apicall(type='op',\
cmd="<show><high-availability><state></state></high-availability></show>")
soup = BeautifulSoup(response.text,'xml')
if soup.response['status'] == 'success':
if soup.response.result.enabled.text == 'no':
logger.info("No HA enabled on Firewall, using primary as active IP.")
return {'ok' : True,\
'active' : self.firewall_config['primary'], 'passive' : self.firewall_config['secondary']}
else:
return {'status' : True,\
'active' : self.firewall_config['primary'] if soup.response.result.group.find('local-info').state.text == 'active' else soup.response.result.group.find('peer-info').find('mgmt-ip').text.split('/')[0],\
'passive' : self.firewall_config['primary'] if soup.response.result.group.find('local-info').state.text == 'passive' else soup.response.result.group.find('peer-info').find('mgmt-ip').text.split('/')[0] }
else:
return {'ok' : False, 'info' : 'Could not get active firewall\'s ip.', 'panos-response' : soup.response['status']}
def filter(self,args,_entries):
#Filter algorithm
for opt in args:
filter = list()
for entry in _entries:
if opt in entry:
if type(entry[opt]) == list:
for e in entry[opt]:
if args[opt].lower() in e.lower():
break
else:
filter.append(entry)
elif type(entry[opt]) == bool:
a = True if args[opt].lower() == 'true' else False if args[opt].lower() == 'false' else None
if a == None or a != entry[opt]:
filter.append(entry)
elif type(entry[opt]) == dict:
if json.loads(args[opt]) != entry[opt]:
filter.append(entry)
else:
if args[opt].lower() not in entry[opt].lower():
filter.append(entry)
else:
filter.append(entry)
for f in filter:
del _entries[_entries.index(f)]
return _entries
class configuration(PAN):
def get(self):
response = self.apicall(type='op', cmd='<show><config><running></running></config></show>')
if response.status_code != 200:
logger.error("{0}: ".format(self.firewall) + str(response.text))
return {'error' : str(response.text)}, 502
else:
soup = BeautifulSoup(response.text,'xml')
if soup.response['status'] == 'error':
return {'error' : str(soup.msg.text)}, 502
else:
return {'config' : response.text}, 200
class rules(PAN):
def get(self,args):
response = self.apicall(type='config',\
action='get',\
xpath='/config/devices/entry[@name="localhost.localdomain"]/vsys/entry[@name="vsys1"]/rulebase/security/rules')
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
_entries = list()
for entry in BeautifulSoup(response.text,'xml').rules.children:
#Some tags are a newline, skip them
if type(entry) != Tag:
continue
aux = {
'name' : entry['name'],
'from' : list(),
'to' : list(),
'source' : list(),
'destination' : list(),
'action' : entry.find('action').text,
'application' : list(),
'category' : list(),
'description' : entry.find('description').text if entry.find('description') else None,
'disabled' : False if not entry.find('disabled') else True if entry.find('disabled').text == 'yes' else False,
'hip-profiles' : list(),
'icmp-unreachable' : False if not entry.find('icmp-unreachable') else True if entry.find('icmp-unreachable').text == 'yes' else False,
'log-end' : False if not entry.find('log-end') else True if entry.find('log-end').text == 'yes' else False,
'log-setting' : entry.find('log-setting').text if entry.find('log-setting') else None,
'log-start' : False if not entry.find('log-start') else True if entry.find('log-start').text == 'yes' else False,
'negate-destination' : False if not entry.find('negate-destination') else True if entry.find('negate-destination').text == 'yes' else False,
'negate-source' : False if not entry.find('negate-source') else True if entry.find('negate-source').text == 'yes' else False,
'disable-server-response-inspection' : False if not entry.find('disable-server-response-inspection') else True if entry.find('disable-server-response-inspection').text == 'yes' else False,
'profile-setting' : dict(),
'qos' : {'marking' : entry.marking.next_element.next_element.name if entry.find('marking') else None, 'type' : entry.marking.next_element.next_element.text if entry.find('marking') else None},
'rule-type' : entry.find('rule-type').text if entry.find('rule-type') else 'universal',
'schedule' : entry.schedule.text if entry.find('schedule') else None,
'service' : list(),
'source-user' : list(),
'tag' : list()
}
#Iterate all lists
for s in ['from','to','source','destination','application','category','hip.profiles','service','source-user','tag']:
#Check if attribute exists
if not entry.find(s):
continue
for member in entry.find(s).children:
#Some tags are a newline, skip them
if type(member) != Tag:
continue
aux[s].append(member.text)
#Special iteration for profile setting
if not entry.find('profile-setting'):
aux['profile-setting'] = None
elif entry.find('profile-setting').group:
aux['profile-setting'] = {'type' : 'group', 'name' : entry.find('profile-setting').group.member.text if entry.find('profile-setting').group.find('member') else None}
else:
aux['profile-setting'] = {
'type' : 'profile',
'profiles' : {
'url-filtering' : entry.find('url-filtering').member.text if entry.find('url-filtering') else None,
'data-filtering' : entry.find('data-filtering').member.text if entry.find('data-filtering') else None,
'file-blocking' : entry.find('file-blocking').member.text if entry.find('file-blocking') else None,
'virus' : entry.find('virus').member.text if entry.find('virus') else None,
'spyware' : entry.find('spyware').member.text if entry.find('spyware') else None,
'vulnerability' : entry.find('vulnerability').member.text if entry.find('vulnerability') else None,
'wildfire-analysis' : entry.find('wildfire-analysis').member.text if entry.find('wildfire-analysis') else None
}
}
_entries.append(aux)
_entries = self.filter(args,_entries)
return {'len' : len(_entries), 'rules' : _entries}
def post(self,data):
response = self.apicall(type='config',\
action='get',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/rulebase/security/rules/entry[@name='{0}']".format(data['name']))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
soup = BeautifulSoup(response.text,'xml')
if not soup.result.isSelfClosing:
logger.warning("Rule already exists.")
return {'error' : 'Rule already exists.'}, 409
#Rule does not exists, add it.
element = BeautifulSoup('','xml')
for k,v in data.iteritems():
if k == 'name':
continue
if k in ['negate-destination','negate-source','icmp-unreachable','log-start','log-end','disabled']:
if v:
element.append(element.new_tag(k))
element.find(k).append('yes' if v else 'no')
elif k in ['action','log-setting','rule-type','description','schedule']:
if v:
element.append(element.new_tag(k))
element.find(k).append(v)
elif k in ['from','to','source','destination','source-user','tag','category','application','service','hip-profiles']:
element.append(element.new_tag(k))
if type(v) != list:
logger.warning('{0} must be a list.'.format(k))
return {'error' : '{0} must be a list.'.format(k)}, 400
for d in v:
element.find(k).append(element.new_tag('member'))
element.find(k).find_all('member')[-1].append(d)
elif k == 'disable-server-response-inspection':
if type(v) != bool:
logger.warning('{0} must be a boolean.'.format(k))
return {'error' : '{0} must be a boolean.'.format(k)}, 400
element.append(element.new_tag('option'))
element.option.append(element.new_tag('disable-server-response-inspection'))
element.find('disable-server-response-inspection').append('yes' if v else 'no')
elif k == 'qos':
element.append(element.new_tag('qos'))
if v['marking'] in ['ip-precedence','ip-dscp','folow-c2s-flow']:
element.qos.append(element.new_tag('marking'))
element.qos.marking.append(element.new_tag(v['marking']))
if v['type']:
element.find(v).append(v['type'])
elif k == 'profile-setting':
element.append(element.new_tag('profile-setting'))
if v['type'] == 'profile':
element.find('profile-setting').append(element.new_tag('profiles'))
for _k,_v in v['profiles'].iteritems():
if _v:
element.find('profile-setting').append(element.new_tag(_k))
element.find(_k).append(element.new_tag('member'))
element.find(_k).member.append(_v)
elif v['type'] == 'group':
element.find('profile-setting').append(element.new_tag('group'))
if v['name']:
element.find('profile-setting').group.append(element.new_tag('member'))
element.find('profile-setting').group.member.append(v['name'])
else:
logger.warning('{0} not a valid rule parameter.'.format(k))
return {'error' : '{0} not a valid rule parameter.'.format(k)}, 400
element = str(element).replace('<?xml version="1.0" encoding="utf-8"?>\n','')
response = self.apicall(type='config',\
action='set',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/rulebase/security/rules/entry[@name='{0}']".format(data['name']),\
element=element)
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
soup = BeautifulSoup(response.text,'xml')
if soup.response['status'] != 'success':
logger.warning("Rule badly formatted: " + str(response.status_code))
return {'error' : 'Rule badly formatted.'}, 400
else:
return data, 201
def patch(self,name,data):
response = self.apicall(type='config',\
action='get',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/rulebase/security/rules/entry[@name='{0}']".format(name))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
soup = BeautifulSoup(response.text,'xml')
if soup.result.isSelfClosing:
logger.warning("Rule does not exists.")
return {'error' : 'Rule does not exists.'}, 400
else:
entry = soup.find('entry')
#Rule exists, patch it
element = BeautifulSoup('','xml')
for k,v in data.iteritems():
if k == 'name':
continue
if k in ['negate-destination','negate-source','icmp-unreachable','log-start','log-end','disabled']:
if v:
element.append(element.new_tag(k))
element.find(k).append('yes' if v else 'no')
elif k in ['action','log-setting','rule-type','description','schedule']:
if v:
element.append(element.new_tag(k))
element.find(k).append(v)
elif k in ['from','to','source','destination','source-user','tag','category','application','service','hip-profiles']:
element.append(element.new_tag(k))
if type(v) != list:
logger.warning('{0} must be a list.'.format(k))
return {'error' : '{0} must be a list.'.format(k)}, 400
for d in v:
element.find(k).append(element.new_tag('member'))
element.find(k).find_all('member')[-1].append(d)
elif k == 'disable-server-response-inspection':
if type(v) != bool:
logger.warning('{0} must be a boolean.'.format(k))
return {'error' : '{0} must be a boolean.'.format(k)}, 400
element.append(element.new_tag('option'))
element.option.append(element.new_tag('disable-server-response-inspection'))
element.find('disable-server-response-inspection').append('yes' if v else 'no')
elif k == 'qos':
element.append(element.new_tag('qos'))
if v['marking'] in ['ip-precedence','ip-dscp','folow-c2s-flow']:
element.qos.append(element.new_tag('marking'))
element.qos.marking.append(element.new_tag(v['marking']))
if v['type']:
element.find(v).append(v['type'])
elif k == 'profile-setting':
element.append(element.new_tag('profile-setting'))
if v['type'] == 'profile':
element.find('profile-setting').append(element.new_tag('profiles'))
for _k,_v in v['profiles'].iteritems():
if _v:
element.find('profile-setting').append(element.new_tag(_k))
element.find(_k).append(element.new_tag('member'))
element.find(_k).member.append(_v)
elif v['type'] == 'group':
element.find('profile-setting').append(element.new_tag('group'))
if v['name']:
element.find('profile-setting').group.append(element.new_tag('member'))
element.find('profile-setting').group.member.append(v['name'])
else:
logger.warning('{0} not a valid rule parameter.'.format(k))
return {'error' : '{0} not a valid rule parameter.'.format(k)}, 400
element = str(element).replace('<?xml version="1.0" encoding="utf-8"?>\n','')
response = self.apicall(type='config',\
action='set',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/rulebase/security/rules/entry[@name='{0}']".format(name),\
element=element)
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
soup = BeautifulSoup(response.text,'xml')
if soup.response['status'] != 'success':
logger.warning("Rule badly formatted: " + str(response.status_code))
return {'error' : 'Rule badly formatted.'}, 400
else:
aux = {
'name' : entry['name'],
'from' : list(),
'to' : list(),
'source' : list(),
'destination' : list(),
'action' : entry.find('action').text,
'application' : list(),
'category' : list(),
'description' : entry.find('description').text if entry.find('description') else None,
'disabled' : False if not entry.find('disabled') else True if entry.find('disabled').text == 'yes' else False,
'hip-profiles' : list(),
'icmp-unreachable' : False if not entry.find('icmp-unreachable') else True if entry.find('icmp-unreachable').text == 'yes' else False,
'log-end' : False if not entry.find('log-end') else True if entry.find('log-end').text == 'yes' else False,
'log-setting' : entry.find('log-setting').text if entry.find('log-setting') else None,
'log-start' : False if not entry.find('log-start') else True if entry.find('log-start').text == 'yes' else False,
'negate-destination' : False if not entry.find('negate-destination') else True if entry.find('negate-destination').text == 'yes' else False,
'negate-source' : False if not entry.find('negate-source') else True if entry.find('negate-source').text == 'yes' else False,
'disable-server-response-inspection' : False if not entry.find('disable-server-response-inspection') else True if entry.find('disable-server-response-inspection').text == 'yes' else False,
'profile-setting' : dict(),
'qos' : {'marking' : entry.marking.next_element.next_element.name if entry.find('marking') else None, 'type' : entry.marking.next_element.next_element.text if entry.find('marking') else None},
'rule-type' : entry.find('rule-type').text if entry.find('rule-type') else 'universal',
'schedule' : entry.schedule.text if entry.find('schedule') else None,
'service' : list(),
'source-user' : list(),
'tag' : list()
}
#Iterate all lists
for s in ['from','to','source','destination','application','category','hip.profiles','service','source-user','tag']:
#Check if attribute exists
if not entry.find(s):
continue
for member in entry.find(s).children:
#Some tags are a newline, skip them
if type(member) != Tag:
continue
aux[s].append(member.text)
#Special iteration for profile setting
if not entry.find('profile-setting'):
aux['profile-setting'] = None
elif entry.find('profile-setting').group:
aux['profile-setting'] = {'type' : 'group', 'name' : entry.find('profile-setting').group.member.text if entry.find('profile-setting').group.find('member') else None}
else:
aux['profile-setting'] = {
'type' : 'profile',
'profiles' : {
'url-filtering' : entry.find('url-filtering').member.text if entry.find('url-filtering') else None,
'data-filtering' : entry.find('data-filtering').member.text if entry.find('data-filtering') else None,
'file-blocking' : entry.find('file-blocking').member.text if entry.find('file-blocking') else None,
'virus' : entry.find('virus').member.text if entry.find('virus') else None,
'spyware' : entry.find('spyware').member.text if entry.find('spyware') else None,
'vulnerability' : entry.find('vulnerability').member.text if entry.find('vulnerability') else None,
'wildfire-analysis' : entry.find('wildfire-analysis').member.text if entry.find('wildfire-analysis') else None
}
}
for k,v in data.iteritems():
if type(aux[k]) == list:
aux[k].append(v)
else:
aux[k] = v
return aux, 200
def put(self,name,data):
response = self.apicall(type='config',\
action='get',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/rulebase/security/rules/entry[@name='{0}']".format(name))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
soup = BeautifulSoup(response.text,'xml')
if soup.result.isSelfClosing:
logger.warning("Rule does not exists.")
return {'error' : 'Rule does not exists.'}, 400
else:
element = soup.find('entry')
#Rule exists, patch it
for k,v in data.iteritems():
if k == 'name':
continue
if k in ['negate-destination','negate-source','icmp-unreachable','log-start','log-end','disabled']:
if v:
if not element.find(k):
element.append(soup.new_tag(k))
else:
element.find(k).clear()
element.find(k).append('yes' if v else 'no')
elif k in ['action','log-setting','rule-type','description','schedule']:
if v:
if not element.find(k):
element.append(soup.new_tag(k))
else:
element.find(k).clear()
element.find(k).append(v)
elif k in ['from','to','source','destination','source-user','tag','category','application','service','hip-profiles']:
if not element.find(k):
element.append(soup.new_tag(k))
else:
element.find(k).clear()
if type(v) != list:
logger.warning('{0} must be a list.'.format(k))
return {'error' : '{0} must be a list.'.format(k)}, 400
for d in v:
element.find(k).append(soup.new_tag('member'))
element.find(k).find_all('member')[-1].append(d)
elif k == 'disable-server-response-inspection':
if type(v) != bool:
logger.warning('{0} must be a boolean.'.format(k))
return {'error' : '{0} must be a boolean.'.format(k)}, 400
if not element.find('option'):
element.append(soup.new_tag('option'))
else:
element.find('option').clear()
element.option.append(element.new_tag('disable-server-response-inspection'))
element.find('disable-server-response-inspection').append('yes' if v else 'no')
elif k == 'qos':
if not element.find(k):
element.append(soup.new_tag(k))
else:
element.find(k).clear()
if v['marking'] in ['ip-precedence','ip-dscp','folow-c2s-flow']:
element.qos.append(soup.new_tag('marking'))
element.qos.marking.append(soup.new_tag(v['marking']))
if v['type']:
element.find(v).append(v['type'])
elif k == 'profile-setting':
if not element.find(k):
element.append(soup.new_tag(k))
else:
element.find(k).clear()
if v['type'] == 'profile':
element.find('profile-setting').append(soup.new_tag('profiles'))
for _k,_v in v['profiles'].iteritems():
if _v:
element.find('profile-setting').append(soup.new_tag(_k))
element.find(_k).append(soup.new_tag('member'))
element.find(_k).member.append(_v)
elif v['type'] == 'group':
element.find('profile-setting').append(soup.new_tag('group'))
if v['name']:
element.find('profile-setting').group.append(soup.new_tag('member'))
element.find('profile-setting').group.member.append(v['name'])
else:
logger.warning('{0} not a valid rule parameter.'.format(k))
return {'error' : '{0} not a valid rule parameter.'.format(k)}, 400
logger.debug("Element: {0}".format(str(element).replace('<?xml version="1.0" encoding="utf-8"?>\n','')))
response = self.apicall(type='config',\
action='edit',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/rulebase/security/rules/entry[@name='{0}']".format(name),\
element=str(element).replace('<?xml version="1.0" encoding="utf-8"?>\n',''))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
soup = BeautifulSoup(response.text,'xml')
if soup.response['status'] != 'success':
logger.warning("Rule badly formatted: " + str(response.status_code))
return {'error' : 'Rule badly formatted.'}, 400
else:
aux = {
'name' : element['name'],
'from' : list(),
'to' : list(),
'source' : list(),
'destination' : list(),
'action' : element.find('action').text,
'application' : list(),
'category' : list(),
'description' : element.find('description').text if element.find('description') else None,
'disabled' : False if not element.find('disabled') else True if element.find('disabled').text == 'yes' else False,
'hip-profiles' : list(),
'icmp-unreachable' : False if not element.find('icmp-unreachable') else True if element.find('icmp-unreachable').text == 'yes' else False,
'log-end' : False if not element.find('log-end') else True if element.find('log-end').text == 'yes' else False,
'log-setting' : element.find('log-setting').text if element.find('log-setting') else None,
'log-start' : False if not element.find('log-start') else True if element.find('log-start').text == 'yes' else False,
'negate-destination' : False if not element.find('negate-destination') else True if element.find('negate-destination').text == 'yes' else False,
'negate-source' : False if not element.find('negate-source') else True if element.find('negate-source').text == 'yes' else False,
'disable-server-response-inspection' : False if not element.find('disable-server-response-inspection') else True if element.find('disable-server-response-inspection').text == 'yes' else False,
'profile-setting' : dict(),
'qos' : {'marking' : element.marking.next_element.next_element.name if element.find('marking') else None, 'type' : element.marking.next_element.next_element.text if element.find('marking') else None},
'rule-type' : element.find('rule-type').text if element.find('rule-type') else 'universal',
'schedule' : element.schedule.text if element.find('schedule') else None,
'service' : list(),
'source-user' : list(),
'tag' : list()
}
#Iterate all lists
for s in ['from','to','source','destination','application','category','hip.profiles','service','source-user','tag']:
#Check if attribute exists
if not element.find(s):
continue
for member in element.find(s).children:
#Some tags are a newline, skip them
if type(member) != Tag:
continue
aux[s].append(member.text)
#Special iteration for profile setting
if not element.find('profile-setting'):
aux['profile-setting'] = None
elif element.find('profile-setting').group:
aux['profile-setting'] = {'type' : 'group', 'name' : element.find('profile-setting').group.member.text if element.find('profile-setting').group.find('member') else None}
else:
aux['profile-setting'] = {
'type' : 'profile',
'profiles' : {
'url-filtering' : element.find('url-filtering').member.text if element.find('url-filtering') else None,
'data-filtering' : element.find('data-filtering').member.text if element.find('data-filtering') else None,
'file-blocking' : element.find('file-blocking').member.text if element.find('file-blocking') else None,
'virus' : element.find('virus').member.text if element.find('virus') else None,
'spyware' : element.find('spyware').member.text if element.find('spyware') else None,
'vulnerability' : element.find('vulnerability').member.text if element.find('vulnerability') else None,
'wildfire-analysis' : element.find('wildfire-analysis').member.text if element.find('wildfire-analysis') else None
}
}
return aux, 200
def delete(self,name):
response = self.apicall(type='config',\
action='get',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/rulebase/security/rules/entry[@name='{0}']".format(name))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
entry = BeautifulSoup(response.text,'xml')
if entry.result.isSelfClosing:
logger.warning("Rule does not exists.")
return {'error' : 'Rule does not exists.'}, 404
else:
entry = entry.find('entry')
#Rule exists, delete it
response = self.apicall(type='config',\
action='delete',\
xpath='/config/devices/entry[@name="localhost.localdomain"]/vsys/entry[@name="vsys1"]/rulebase/security/rules/entry[@name="{0}"]'.format(name))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
else:
logger.info("Rule {0} deleted.".format(name))
aux = {
'name' : entry['name'],
'from' : list(),
'to' : list(),
'source' : list(),
'destination' : list(),
'action' : entry.find('action').text,
'application' : list(),
'category' : list(),
'description' : entry.find('description').text if entry.find('description') else None,
'disabled' : False if not entry.find('disabled') else True if entry.find('disabled').text == 'yes' else False,
'hip-profiles' : list(),
'icmp-unreachable' : False if not entry.find('icmp-unreachable') else True if entry.find('icmp-unreachable').text == 'yes' else False,
'log-end' : False if not entry.find('log-end') else True if entry.find('log-end').text == 'yes' else False,
'log-setting' : entry.find('log-setting').text if entry.find('log-setting') else None,
'log-start' : False if not entry.find('log-start') else True if entry.find('log-start').text == 'yes' else False,
'negate-destination' : False if not entry.find('negate-destination') else True if entry.find('negate-destination').text == 'yes' else False,
'negate-source' : False if not entry.find('negate-source') else True if entry.find('negate-source').text == 'yes' else False,
'disable-server-response-inspection' : False if not entry.find('disable-server-response-inspection') else True if entry.find('disable-server-response-inspection').text == 'yes' else False,
'profile-setting' : dict(),
'qos' : {'marking' : entry.marking.next_element.next_element.name if entry.find('marking') else None, 'type' : entry.marking.next_element.next_element.text if entry.find('marking') else None},
'rule-type' : entry.find('rule-type').text if entry.find('rule-type') else 'universal',
'schedule' : entry.schedule.text if entry.find('schedule') else None,
'service' : list(),
'source-user' : list(),
'tag' : list()
}
#Iterate all lists
for s in ['from','to','source','destination','application','category','hip.profiles','service','source-user','tag']:
#Check if attribute exists
if not entry.find(s):
continue
for member in entry.find(s).children:
#Some tags are a newline, skip them
if type(member) != Tag:
continue
aux[s].append(member.text)
#Special iteration for profile setting
if not entry.find('profile-setting'):
aux['profile-setting'] = None
elif entry.find('profile-setting').group:
aux['profile-setting'] = {'type' : 'group', 'name' : entry.find('profile-setting').group.member.text if entry.find('profile-setting').group.find('member') else None}
else:
aux['profile-setting'] = {
'type' : 'profile',
'profiles' : {
'url-filtering' : entry.find('url-filtering').member.text if entry.find('url-filtering') else None,
'data-filtering' : entry.find('data-filtering').member.text if entry.find('data-filtering') else None,
'file-blocking' : entry.find('file-blocking').member.text if entry.find('file-blocking') else None,
'virus' : entry.find('virus').member.text if entry.find('virus') else None,
'spyware' : entry.find('spyware').member.text if entry.find('spyware') else None,
'vulnerability' : entry.find('vulnerability').member.text if entry.find('vulnerability') else None,
'wildfire-analysis' : entry.find('wildfire-analysis').member.text if entry.find('wildfire-analysis') else None
}
}
return aux, 200
class rules_move(PAN):
def post(self,where,rule1,rule2=None):
if where in ['top','bottom']:
response = self.apicall(type='config',\
action='move',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/rulebase/security/rules/entry[@name='{0}']".format(rule1),\
where=where)
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
else:
return {'where' : where, 'rule1' : rule1}
elif where in ['before', 'after'] and rule2:
response = self.apicall(type='config',\
action='move',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/rulebase/security/rules/entry[@name='{0}']".format(rule1),\
where=where,\
dst=rule2)
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
else:
return {'where' : where, 'rule1' : rule1, 'rule2' : rule2}
else:
logger.warning("'where' not in 'after', 'before', 'top', 'bottom' or 'rule2' not present.")
return {'error' : "'where' not in 'after', 'before', 'top', 'bottom' or 'rule2' not present."}, 400
class rules_rename(PAN):
def post(self,oldname,newname):
response = self.apicall(type='config',\
action='rename',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/rulebase/security/rules/entry[@name='{0}']".format(oldname),\
newname=newname)
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
else:
logger.info("Rule {0} renamed to {1}.".format(oldname,newname))
response = self.apicall(type='config',\
action='get',\
xpath='/config/devices/entry[@name="localhost.localdomain"]/vsys/entry[@name="vsys1"]/rulebase/security/rules/entry[@name="{0}"]'.format(newname))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
else:
entry = BeautifulSoup(response.text,'xml').entry
aux = {
'name' : entry['name'],
'from' : list(),
'to' : list(),
'source' : list(),
'destination' : list(),
'action' : entry.find('action').text,
'application' : list(),
'category' : list(),
'description' : entry.find('description').text if entry.find('description') else None,
'disabled' : False if not entry.find('disabled') else True if entry.find('disabled').text == 'yes' else False,
'hip-profiles' : list(),
'icmp-unreachable' : False if not entry.find('icmp-unreachable') else True if entry.find('icmp-unreachable').text == 'yes' else False,
'log-end' : False if not entry.find('log-end') else True if entry.find('log-end').text == 'yes' else False,
'log-setting' : entry.find('log-setting').text if entry.find('log-setting') else None,
'log-start' : False if not entry.find('log-start') else True if entry.find('log-start').text == 'yes' else False,
'negate-destination' : False if not entry.find('negate-destination') else True if entry.find('negate-destination').text == 'yes' else False,
'negate-source' : False if not entry.find('negate-source') else True if entry.find('negate-source').text == 'yes' else False,
'disable-server-response-inspection' : False if not entry.find('disable-server-response-inspection') else True if entry.find('disable-server-response-inspection').text == 'yes' else False,
'profile-setting' : dict(),
'qos' : {'marking' : entry.marking.next_element.next_element.name if entry.find('marking') else None, 'type' : entry.marking.next_element.next_element.text if entry.find('marking') else None},
'rule-type' : entry.find('rule-type').text if entry.find('rule-type') else 'universal',
'schedule' : entry.schedule.text if entry.find('schedule') else None,
'service' : list(),
'source-user' : list(),
'tag' : list()
}
#Iterate all lists
for s in ['from','to','source','destination','application','category','hip.profiles','service','source-user','tag']:
#Check if attribute exists
if not entry.find(s):
continue
for member in entry.find(s).children:
#Some tags are a newline, skip them
if type(member) != Tag:
continue
aux[s].append(member.text)
#Special iteration for profile setting
if not entry.find('profile-setting'):
aux['profile-setting'] = None
elif entry.find('profile-setting').group:
aux['profile-setting'] = {'type' : 'group', 'name' : entry.find('profile-setting').group.member.text if entry.find('profile-setting').group.find('member') else None}
else:
aux['profile-setting'] = {
'type' : 'profile',
'profiles' : {
'url-filtering' : entry.find('url-filtering').member.text if entry.find('url-filtering') else None,
'data-filtering' : entry.find('data-filtering').member.text if entry.find('data-filtering') else None,
'file-blocking' : entry.find('file-blocking').member.text if entry.find('file-blocking') else None,
'virus' : entry.find('virus').member.text if entry.find('virus') else None,
'spyware' : entry.find('spyware').member.text if entry.find('spyware') else None,
'vulnerability' : entry.find('vulnerability').member.text if entry.find('vulnerability') else None,
'wildfire-analysis' : entry.find('wildfire-analysis').member.text if entry.find('wildfire-analysis') else None
}
}
return aux
class rules_match(PAN):
def get(self,args):
if 'from' not in args or 'to' not in args or 'source' not in args or 'destination' not in args or 'protocol' not in args or 'port' not in args:
logger.warning('Migging parameters.')
return {'error' : 'Missing parameters.'}, 400
soup = BeautifulSoup('<test><security-policy-match></security-policy-match></test>','xml')
#from
soup.find('security-policy-match').append(soup.new_tag('from'))
soup.find('from').append(args['from'])
#to
soup.find('security-policy-match').append(soup.new_tag('to'))
soup.find('to').append(args['to'])
#source
soup.find('security-policy-match').append(soup.new_tag('source'))
soup.find('source').append(args['source'])
#destination
soup.find('security-policy-match').append(soup.new_tag('destination'))
soup.find('destination').append(args['destination'])
#protocol
soup.find('security-policy-match').append(soup.new_tag('protocol'))
soup.find('protocol').append('6' if args['protocol'].lower() == 'tcp' else '17')
#port
soup.find('security-policy-match').append(soup.new_tag('destination-port'))
soup.find('destination-port').append(args['port'])
if 'application' in args:
#application
soup.find('security-policy-match').append(soup.new_tag('application'))
soup.find('application').append(args['application'])
if 'source-user' in args:
#source-user
soup.find('security-policy-match').append(soup.new_tag('source-user'))
soup.find('source-user').append(args['source-user'])
if 'category' in args:
#category
soup.find('security-policy-match').append(soup.new_tag('category'))
soup.find('category').append(args['category'])
response = self.apicall(type='op',\
cmd=str(soup).replace('<?xml version="1.0" encoding="utf-8"?>\n',''))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
soup = BeautifulSoup(response.text,'xml').entry
aux = None
if soup:
response = self.apicall(type='config',\
action='get',\
xpath='/config/devices/entry[@name="localhost.localdomain"]/vsys/entry[@name="vsys1"]/rulebase/security/rules/entry[@name="{0}"]'.format(soup.text))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
else:
entry = BeautifulSoup(response.text,'xml').entry
aux = {
'name' : entry['name'],
'from' : list(),
'to' : list(),
'source' : list(),
'destination' : list(),
'action' : entry.find('action').text,
'application' : list(),
'category' : list(),
'description' : entry.find('description').text if entry.find('description') else None,
'disabled' : False if not entry.find('disabled') else True if entry.find('disabled').text == 'yes' else False,
'hip-profiles' : list(),
'icmp-unreachable' : False if not entry.find('icmp-unreachable') else True if entry.find('icmp-unreachable').text == 'yes' else False,
'log-end' : False if not entry.find('log-end') else True if entry.find('log-end').text == 'yes' else False,
'log-setting' : entry.find('log-setting').text if entry.find('log-setting') else None,
'log-start' : False if not entry.find('log-start') else True if entry.find('log-start').text == 'yes' else False,
'negate-destination' : False if not entry.find('negate-destination') else True if entry.find('negate-destination').text == 'yes' else False,
'negate-source' : False if not entry.find('negate-source') else True if entry.find('negate-source').text == 'yes' else False,
'disable-server-response-inspection' : False if not entry.find('disable-server-response-inspection') else True if entry.find('disable-server-response-inspection').text == 'yes' else False,
'profile-setting' : dict(),
'qos' : {'marking' : entry.marking.next_element.next_element.name if entry.find('marking') else None, 'type' : entry.marking.next_element.next_element.text if entry.find('marking') else None},
'rule-type' : entry.find('rule-type').text if entry.find('rule-type') else 'universal',
'schedule' : entry.schedule.text if entry.find('schedule') else None,
'service' : list(),
'source-user' : list(),
'tag' : list()
}
#Iterate all lists
for s in ['from','to','source','destination','application','category','hip.profiles','service','source-user','tag']:
#Check if attribute exists
if not entry.find(s):
continue
for member in entry.find(s).children:
#Some tags are a newline, skip them
if type(member) != Tag:
continue
aux[s].append(member.text)
#Special iteration for profile setting
if not entry.find('profile-setting'):
aux['profile-setting'] = None
elif entry.find('profile-setting').group:
aux['profile-setting'] = {'type' : 'group', 'name' : entry.find('profile-setting').group.member.text if entry.find('profile-setting').group.find('member') else None}
else:
aux['profile-setting'] = {
'type' : 'profile',
'profiles' : {
'url-filtering' : entry.find('url-filtering').member.text if entry.find('url-filtering') else None,
'data-filtering' : entry.find('data-filtering').member.text if entry.find('data-filtering') else None,
'file-blocking' : entry.find('file-blocking').member.text if entry.find('file-blocking') else None,
'virus' : entry.find('virus').member.text if entry.find('virus') else None,
'spyware' : entry.find('spyware').member.text if entry.find('spyware') else None,
'vulnerability' : entry.find('vulnerability').member.text if entry.find('vulnerability') else None,
'wildfire-analysis' : entry.find('wildfire-analysis').member.text if entry.find('wildfire-analysis') else None
}
}
return {'allowed' : False if not soup else False if aux['action'] != 'allow' else False, 'policy' : aux}
class objects(PAN):
def get(self,args,object):
response = self.apicall(type='config',\
action='get',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/{0}".format(object))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
_entries = list()
soup = BeautifulSoup(response.text,'xml')
if soup.response.result.isSelfClosing:
return {'len' : 0, 'objects' : list()}
for entry in BeautifulSoup(response.text,'xml').find(object).children:
if type(entry) != Tag:
continue
if object == 'address':
aux = {
'name' : entry['name'],
'type' : 'ip-netmask' if entry.find('ip-netmask') else 'fqdn' if entry.find('fqdn') else 'ip-range' if entry.find('ip-range') else None,
'value' : entry.find('ip-netmask').text if entry.find('ip-netmask') else entry.find('fqdn').text if entry.find('fqdn') else entry.find('ip-range').text if entry.find('ip-range') else None,
'description' : entry.find('description').text if entry.find('description') else None,
'tag' : list() if entry.find('tag') else None
}
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
_entries.append(aux)
elif object == 'service':
aux = {
'name' : entry['name'],
'destination-port' : entry.find('port').text if entry.find('port') else None,
'source-port' : entry.find('source-port').text if entry.find('source-port') else None,
'description' : entry.find('description').text if entry.find('description') else None,
'protocol' : 'tcp' if entry.find('tcp') else 'udp' if entry.find('udp') else None,
'tag' : list() if entry.find('tag') else None
}
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
_entries.append(aux)
elif object == 'service-group':
aux = {
'name' : entry['name'],
'tag' : list() if entry.find('tag') else None,
'value' : list()
}
for member in entry.find('members').children:
if type(member) != Tag:
continue
aux['value'].append(member.text)
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
_entries.append(aux)
elif object == 'address-group':
aux = {
'name' : entry['name'],
'description' : entry.find('description').text if entry.find('description') else None,
'type' : 'static' if entry.find('static') else 'dynamic' if entry.find('dynamic') else None,
'tag' : list() if entry.find('tag') else None,
}
if aux['type'] == 'static':
aux['static'] = list()
for member in entry.find('static').children:
if type(member) != Tag:
continue
aux['static'].append(member.text)
elif aux['type'] == 'dynamic':
aux['filter'] = entry.find('filter').text if entry.find('filter') else None
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
_entries.append(aux)
_entries = self.filter(args,_entries)
return {'len' : len(_entries), 'objects' : _entries}
def post(self,data,object):
response = self.apicall(type='config',\
action='get',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/{0}/entry[@name='{1}']".format(object,data['name']))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
soup = BeautifulSoup(response.text,'xml')
if not soup.result.isSelfClosing:
logger.warning("{0} already exists.".format(object))
return {'error' : "{0} already exists.".format(object)}, 409
#Object does not exists, create it
element = BeautifulSoup('','xml')
if object == 'address':
element.append(element.new_tag(data['type']))
element.find(data['type']).append(data['value'])
if 'tag' in data:
if data['tag']:
element.append(element.new_tag(data['tag']))
for t in data['tag']:
element.tag.append(element.new_tag('member'))
element.tag.find_all('member')[-1].append(t)
if 'description' in data:
if data['description']:
element.append(element.new_tag('description'))
element.description.append(data['description'])
elif object == 'service':
if 'description' in data:
if data['description']:
element.append(element.new_tag('description'))
element.description.append(data['description'])
if 'tag' in data:
if data['tag']:
element.append(element.new_tag('tag'))
for t in data['tag']:
element.tag.append(element.new_tag('member'))
element.tag.find_all('member')[-1].append(t)
element.append(element.new_tag('protocol'))
element.protocol.append(element.new_tag(data['protocol']))
element.find(data['protocol']).append(element.new_tag('port'))
if 'destination-port' in data:
if data['destination-port']:
element.port.append(data['destination-port'])
if 'source-port' in data:
if data['source-port']:
element.find(data['protocol']).append(element.new_tag('source-port'))
element.find(data['source-port']).append(data['source-port'])
elif object == 'address-group':
if 'description' in data:
if data['description']:
element.append(element.new_tag('description'))
element.description.append(data['description'])
if 'tag' in data:
if data['tag']:
element.append(element.new_tag('tag'))
for t in data['tag']:
element.tag.append(element.new_tag('member'))
element.tag.find_all('member')[-1].append(t)
if data['type'] == 'static':
element.append(element.new_tag('static'))
for d in data['static']:
element.static.append(element.new_tag('member'))
element.static.find_all('member')[-1].append(d)
elif data['type'] == 'dynamic':
element.append(element.new_tag(data['dynamic']))
element.dynamic.append(element.new_tag(data['filter']))
element.dynamic.filter.append(data['filter'])
elif object == 'service-group':
if 'tag' in data:
if data['tag']:
element.append(element.new_tag('tag'))
for t in data['tag']:
element.tag.append(element.new_tag('member'))
element.tag.find_all('member')[-1].append(t)
if 'value' in data:
element.append(element.new_tag('members'))
for d in data['value']:
element.members.append(element.new_tag('member'))
element.members.find_all('member')[-1].append(d)
else:
logger.warning("Object not found.")
return {'error' : 'Object not found.'}, 404
logger.debug(str(element).replace('<?xml version="1.0" encoding="utf-8"?>\n',''))
response = self.apicall(type='config',\
action='set',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/{0}/entry[@name='{1}']".format(object,data['name']),\
element=str(element).replace('<?xml version="1.0" encoding="utf-8"?>\n',''))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
else:
return data, 201
def patch(self,data,object):
response = self.apicall(type='config',\
action='get',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/{0}/entry[@name='{1}']".format(object,data['name']))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
soup = BeautifulSoup(response.text,'xml')
if soup.result.isSelfClosing:
logger.warning("Object does not exists.")
return {'error' : 'Object does not exists.'}, 400
element = BeautifulSoup('','xml')
if object == 'address':
element.append(element.new_tag(data['type']))
element.find(data['type']).append(data['value'])
if 'tag' in data:
if data['tag']:
element.append(element.new_tag(data['tag']))
for t in data['tag']:
element.tag.append(element.new_tag('member'))
element.tag.find_all('member')[-1].append(t)
if 'description' in data:
if data['description']:
element.append(element.new_tag('description'))
element.description.append(data['description'])
elif object == 'service':
if 'description' in data:
if data['description']:
element.append(element.new_tag('description'))
element.description.append(data['description'])
if 'tag' in data:
if data['tag']:
element.append(element.new_tag('tag'))
for t in data['tag']:
element.tag.append(element.new_tag('member'))
element.tag.find_all('member')[-1].append(t)
element.append(element.new_tag('protocol'))
element.protocol.append(element.new_tag(data['protocol']))
element.find(data['protocol']).append(element.new_tag('port'))
if 'destination-port' in data:
if data['destination-port']:
element.port.append(data['destination-port'])
if 'source-port' in data:
if data['source-port']:
element.find(data['protocol']).append(element.new_tag('source-port'))
element.find(data['source-port']).append(data['source-port'])
elif object == 'address-group':
if 'description' in data:
if data['description']:
element.append(element.new_tag('description'))
element.description.append(data['description'])
if 'tag' in data:
if data['tag']:
element.append(element.new_tag('tag'))
for t in data['tag']:
element.tag.append(element.new_tag('member'))
element.tag.find_all('member')[-1].append(t)
if 'static' in data:
element.append(element.new_tag('static'))
for d in data['static']:
element.static.append(element.new_tag('member'))
element.static.find_all('member')[-1].append(d)
elif 'filter' in data:
element.append(element.new_tag('dynamic'))
element.dynamic.append(element.new_tag('filter'))
element.dynamic.filter.append(data['filter'])
elif object == 'service-group':
if 'tag' in data:
if data['tag']:
element.append(element.new_tag('tag'))
for t in data['tag']:
element.tag.append(element.new_tag('member'))
element.tag.find_all('member')[-1].append(t)
if 'value' in data:
element.append(element.new_tag('members'))
for d in data['value']:
element.members.append(element.new_tag('member'))
element.members.find_all('member')[-1].append(d)
else:
logger.warning("Object not found.")
return {'error' : 'Object not found.'}, 404
logger.debug(str(element).replace('<?xml version="1.0" encoding="utf-8"?>\n',''))
response = self.apicall(type='config',\
action='set',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/{0}/entry[@name='{1}']".format(object,data['name']),\
element=str(element).replace('<?xml version="1.0" encoding="utf-8"?>\n',''))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
else:
aux = dict()
entry = soup.entry
if object == 'address':
aux = {
'name' : entry['name'],
'type' : 'ip-netmask' if entry.find('ip-netmask') else 'fqdn' if entry.find('fqdn') else 'ip-range' if entry.find('ip-range') else None,
'value' : entry.find('ip-netmask').text if entry.find('ip-netmask') else entry.find('fqdn').text if entry.find('fqdn') else entry.find('ip-range').text if entry.find('ip-range') else None,
'description' : entry.find('description').text if entry.find('description') else None,
'tag' : list() if entry.find('tag') else None
}
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
elif object == 'service':
aux = {
'name' : entry['name'],
'destination-port' : entry.find('port').text if entry.find('port') else None,
'source-port' : entry.find('source-port').text if entry.find('source-port') else None,
'description' : entry.find('description').text if entry.find('description') else None,
'protocol' : 'tcp' if entry.find('tcp') else 'udp' if entry.find('udp') else None,
'tag' : list() if entry.find('tag') else None
}
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
elif object == 'service-group':
aux = {
'name' : entry['name'],
'tag' : list() if entry.find('tag') else None,
'value' : list()
}
for member in entry.find('members').children:
if type(member) != Tag:
continue
aux['value'].append(member.text)
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
elif object == 'address-group':
aux = {
'name' : entry['name'],
'description' : entry.find('description').text if entry.find('description') else None,
'type' : 'static' if entry.find('static') else 'dynamic' if entry.find('dynamic') else None,
'tag' : list() if entry.find('tag') else None,
}
if aux['type'] == 'static':
aux['static'] = list()
for member in entry.find('static').children:
if type(member) != Tag:
continue
aux['static'].append(member.text)
elif aux['type'] == 'dynamic':
aux['filter'] = entry.find('filter').text if entry.find('filter') else None
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
for k,v in data.iteritems():
if type(aux[k]) == list:
if type(v) == list:
for _v in v:
if _v not in aux[k]:
aux[k].append(_v)
else:
aux[k].append(v)
else:
aux[k] = v
return aux, 200
def put(self,data,object):
response = self.apicall(type='config',\
action='get',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/{0}/entry[@name='{1}']".format(object,data['name']))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
soup = BeautifulSoup(response.text,'xml')
if soup.result.isSelfClosing:
logger.warning("Object does not exists.")
return {'error' : 'Object does not exists.'}, 400
element = BeautifulSoup('','xml')
if object == 'address':
if 'value' in data:
element.append(element.new_tag(data['type'] if 'type' in data else soup.entry.next_element.next_element.name))
element.find(data['type'] if 'type' in data else soup.entry.next_element.name).append(data['value'])
if 'tag' in data:
if data['tag']:
element.append(element.new_tag(data['tag']))
for t in data['tag']:
element.tag.append(element.new_tag('member'))
element.tag.find_all('member')[-1].append(t)
if 'description' in data:
if data['description']:
element.append(element.new_tag('description'))
element.description.append(data['description'])
elif object == 'service':
if 'description' in data:
if data['description']:
element.append(element.new_tag('description'))
element.description.append(data['description'])
if 'tag' in data:
if data['tag']:
element.append(element.new_tag('tag'))
for t in data['tag']:
element.tag.append(element.new_tag('member'))
element.tag.find_all('member')[-1].append(t)
if 'destination-port' in data:
if data['destination-port']:
element.append(element.new_tag('protocol'))
element.protocol.append(element.new_tag(data['protocol'] if 'protocol' in data else soup.entry.protocol.next_element.next_element.name))
element.find(data['protocol'] if 'protocol' in data else soup.entry.protocol.next_element.next_element.name).append(element.new_tag('port'))
element.port.append(data['destination-port'])
if 'source-port' in data:
if data['source-port']:
element.append(element.new_tag('protocol'))
element.protocol.append(element.new_tag(data['protocol'] if 'protocol' in data else soup.entry.protocol.next_element.next_element.name))
element.find(data['protocol'] if 'protocol' in data else soup.entry.protocol.next_element.next_element.name).append(element.new_tag('source-port'))
element.find(data['source-port']).append(data['source-port'])
elif object == 'address-group':
element.append(element.new_tag('entry'))
element.entry['name'] = data['name']
if 'description' in data:
if data['description']:
element.entry.append(element.new_tag('description'))
element.description.append(data['description'])
if 'tag' in data:
if data['tag']:
element.entry.append(element.new_tag('tag'))
for t in data['tag']:
element.tag.append(element.new_tag('member'))
element.tag.find_all('member')[-1].append(t)
if 'static' in data:
element.entry.append(element.new_tag('static'))
for d in data['static']:
element.static.append(element.new_tag('member'))
element.static.find_all('member')[-1].append(d)
elif 'filter' in data:
element.entry.append(element.new_tag('dynamic'))
element.dynamic.append(element.new_tag('filter'))
element.dynamic.filter.append(data['filter'])
elif object == 'service-group':
element.append(element.new_tag('entry'))
element.entry['name'] = data['name']
if 'tag' in data:
if data['tag']:
element.entry.append(element.new_tag('tag'))
for t in data['tag']:
element.tag.append(element.new_tag('member'))
element.tag.find_all('member')[-1].append(t)
if 'value' in data:
element.entry.append(element.new_tag('members'))
for d in data['value']:
element.members.append(element.new_tag('member'))
element.members.find_all('member')[-1].append(d)
else:
logger.warning("Object not found.")
return {'error' : 'Object not found.'}, 404
logger.debug(str(element).replace('<?xml version="1.0" encoding="utf-8"?>\n',''))
response = self.apicall(type='config',\
action='edit' if object in ['address-group','service-group'] else 'set',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/{0}/entry[@name='{1}']".format(object,data['name']),\
element=str(element).replace('<?xml version="1.0" encoding="utf-8"?>\n',''))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.text))
return {'error' : str(response.text)}, 502
else:
aux = dict()
entry = soup.entry
if object == 'address':
aux = {
'name' : entry['name'],
'type' : 'ip-netmask' if entry.find('ip-netmask') else 'fqdn' if entry.find('fqdn') else 'ip-range' if entry.find('ip-range') else None,
'value' : entry.find('ip-netmask').text if entry.find('ip-netmask') else entry.find('fqdn').text if entry.find('fqdn') else entry.find('ip-range').text if entry.find('ip-range') else None,
'description' : entry.find('description').text if entry.find('description') else None,
'tag' : list() if entry.find('tag') else None
}
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
elif object == 'service':
aux = {
'name' : entry['name'],
'destination-port' : entry.find('port').text if entry.find('port') else None,
'source-port' : entry.find('source-port').text if entry.find('source-port') else None,
'description' : entry.find('description').text if entry.find('description') else None,
'protocol' : 'tcp' if entry.find('tcp') else 'udp' if entry.find('udp') else None,
'tag' : list() if entry.find('tag') else None
}
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
elif object == 'service-group':
aux = {
'name' : entry['name'],
'tag' : list() if entry.find('tag') else None,
'value' : list()
}
for member in entry.find('members').children:
if type(member) != Tag:
continue
aux['value'].append(member.text)
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
elif object == 'address-group':
aux = {
'name' : entry['name'],
'description' : entry.find('description').text if entry.find('description') else None,
'type' : 'static' if entry.find('static') else 'dynamic' if entry.find('dynamic') else None,
'tag' : list() if entry.find('tag') else None,
}
if aux['type'] == 'static':
aux['static'] = list()
for member in entry.find('static').children:
if type(member) != Tag:
continue
aux['static'].append(member.text)
elif aux['type'] == 'dynamic':
aux['filter'] = entry.find('filter').text if entry.find('filter') else None
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
for k,v in data.iteritems():
if type(aux[k]) == list:
if type(v) == list:
aux[k] = list()
for _v in v:
aux[k].append(_v)
else:
aux[k] = v
return aux, 200
def delete(self,name,object):
response = self.apicall(type='config',\
action='get',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/{0}/entry[@name='{1}']".format(object,name))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
entry = BeautifulSoup(response.text,'xml')
if entry.result.isSelfClosing:
logger.warning("Rule does not exists.")
return {'error' : 'Rule does not exists.'}, 404
else:
entry = entry.find('entry')
#Object exists, delete it
response = self.apicall(type='config',\
action='delete',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/{0}/entry[@name='{1}']".format(object,name))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
else:
if object == 'address':
aux = {
'name' : entry['name'],
'type' : 'ip-netmask' if entry.find('ip-netmask') else 'fqdn' if entry.find('fqdn') else 'ip-range' if entry.find('ip-range') else None,
'value' : entry.find('ip-netmask').text if entry.find('ip-netmask') else entry.find('fqdn').text if entry.find('fqdn') else entry.find('ip-range').text if entry.find('ip-range') else None,
'description' : entry.find('description').text if entry.find('description') else None,
'tag' : list() if entry.find('tag') else None
}
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
elif object == 'service':
aux = {
'name' : entry['name'],
'destination-port' : entry.find('port').text if entry.find('port') else None,
'source-port' : entry.find('source-port').text if entry.find('source-port') else None,
'description' : entry.find('description').text if entry.find('description') else None,
'protocol' : 'tcp' if entry.find('tcp') else 'udp' if entry.find('udp') else None,
'tag' : list() if entry.find('tag') else None
}
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
elif object == 'address-group':
aux = {
'name' : entry['name'],
'description' : entry.find('description').text if entry.find('description') else None,
'type' : 'static' if entry.find('static') else 'dynamic' if entry.find('dynamic') else None,
'tag' : list() if entry.find('tag') else None,
}
if aux['type'] == 'static':
aux['static'] = list()
for member in entry.find('static').children:
if type(member) != Tag:
continue
aux['static'].append(member.text)
elif aux['type'] == 'dynamic':
aux['filter'] = entry.find('filter').text if entry.find('filter') else None
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
elif object == 'service-group':
aux = {
'name' : entry['name'],
'tag' : list() if entry.find('tag') else None,
'value' : list()
}
for member in entry.find('members').children:
if type(member) != Tag:
continue
aux['value'].append(member.text)
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
else:
logger.error("Unknown error.")
return {'error' : 'Unknown error.'}, 500
return aux, 200
class objects_rename(PAN):
def post(self,object,oldname,newname):
response = self.apicall(type='config',\
action='rename',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/{0}/entry[@name='{1}']".format(object,oldname),\
newname=newname)
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
else:
logger.info("Object {0} {1} renamed to {2}.".format(object,oldname,newname))
response = self.apicall(type='config',\
action='get',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/{0}/entry[@name='{1}']".format(object,newname))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
else:
entry = BeautifulSoup(response.text,'xml')
if object == 'address':
aux = {
'name' : entry['name'],
'type' : 'ip-netmask' if entry.find('ip-netmask') else 'fqdn' if entry.find('fqdn') else 'ip-range' if entry.find('ip-range') else None,
'value' : entry.find('ip-netmask').text if entry.find('ip-netmask') else entry.find('fqdn').text if entry.find('fqdn') else entry.find('ip-range').text if entry.find('ip-range') else None,
'description' : entry.find('description').text if entry.find('description') else None,
'tag' : list() if entry.find('tag') else None
}
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
elif object == 'service':
aux = {
'name' : entry['name'],
'destination-port' : entry.find('port').text if entry.find('port') else None,
'source-port' : entry.find('source-port').text if entry.find('source-port') else None,
'description' : entry.find('description').text if entry.find('description') else None,
'protocol' : 'tcp' if entry.find('tcp') else 'udp' if entry.find('udp') else None,
'tag' : list() if entry.find('tag') else None
}
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
elif object == 'address-group':
aux = {
'name' : entry['name'],
'description' : entry.find('description').text if entry.find('description') else None,
'type' : 'static' if entry.find('static') else 'dynamic' if entry.find('dynamic') else None,
'tag' : list() if entry.find('tag') else None,
}
if aux['type'] == 'static':
aux['static'] = list()
for member in entry.find('static').children:
if type(member) != Tag:
continue
aux['static'].append(member.text)
elif aux['type'] == 'dynamic':
aux['filter'] = entry.find('filter').text if entry.find('filter') else None
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
elif object == 'service-group':
aux = {
'name' : entry['name'],
'tag' : list() if entry.find('tag') else None,
'value' : list()
}
for member in entry.find('members').children:
if type(member) != Tag:
continue
aux['value'].append(member.text)
if type(aux['tag']) == list:
for tag in entry.find('tag').children:
if type(tag) != Tag:
continue
aux['tag'].append(tag.text)
else:
logger.error("Unknown error.")
return {'error' : 'Unknown error.'}, 500
return aux, 200
class interfaces(PAN):
def get(self,args):
response = self.apicall(type='op',\
cmd="<show><interface>all</interface></show>")
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
logger.debug(str(response.text))
entries = list()
for entry in BeautifulSoup(response.text,'xml').find_all('entry'):
entries.append({
'name' : entry.find('name').text,
'zone' : entry.zone.text if entry.zone else None,
'virtual-router' : None if not entry.fwd else entry.fwd.text.strip('vr:') if entry.fwd.text != 'N/A' else None,
'tag' : entry.tag.text if entry.tag else None,
'ip' : None if not entry.ip else entry.ip.text if entry.ip.text != 'N/A' else None,
'id' : entry.id.text
})
return {'interfaces' : self.filter(args,entries)}
class route(PAN):
def get(self,args):
response = self.apicall(type='op',\
cmd='<show><routing><route></route></routing></show>')
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
entries = list()
for entry in BeautifulSoup(response.text,'xml').find_all('entry'):
entries.append({
'virtual-router' : entry.find('virtual-router').text,
'destination' : entry.destination.text,
'nexthop' : entry.nexthop.text,
'metric' : int(entry.metric.text) if entry.metric.text else None,
'interface' : entry.interface.text,
'age' : entry.age.text if entry.age.text else None,
'flags' : {
'active' : True if 'A' in entry.flags.text else False,
'loose' : True if '?' in entry.flags.text else False,
'connect' : True if 'C' in entry.flags.text else False,
'host' : True if 'H' in entry.flags.text else False,
'static' : True if 'S' in entry.flags.text else False,
'internal' : True if '~' in entry.flags.text else False,
'rip' : True if 'R' in entry.flags.text else False,
'ospf' : True if 'O' in entry.flags.text else False,
'bgp' : True if 'B' in entry.flags.text else False,
'ospf-intra-area' : True if 'Oi' in entry.flags.text else False,
'ospf-inter-area' : True if 'Oo' in entry.flags.text else False,
'ospf-external-1' : True if 'O1' in entry.flags.text else False,
'ospf-external-2' : True if 'O1' in entry.flags.text else False,
'ecmp' : True if 'E' in entry.flags.text else False
}
})
return {'routes' : self.filter(args,entries)}
class lock(PAN):
def get(self,option=None,admin=None):
if option in ['commit-locks', 'config-locks']:
if option == 'commit-locks':
response = self.apicall(type='op',\
cmd='<show><commit-locks></commit-locks></show>')
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 400
entries = list()
for entry in BeautifulSoup(response.text,'xml').find_all('entry'):
if admin:
if admin == entry['name']:
entries.append({
'name' : entry['name'],
'created' : entry.created.text,
'last-activity' : entry.find('last-activity').text,
'loggedin' : True if entry.loggedin.text == 'yes' else False,
'comment' : entry.comment.text if entry.comment.text != '(null)' else None
})
break
else:
entries.append({
'name' : entry['name'],
'created' : entry.created.text,
'last-activity' : entry.find('last-activity').text,
'loggedin' : True if entry.loggedin.text == 'yes' else False,
'comment' : entry.comment.text if entry.comment.text != '(null)' else None
})
return {'commit-locks' : entries}
else:
response = self.apicall(type='op',\
cmd='<show><config-locks></config-locks></show>')
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 400
entries = list()
for entry in BeautifulSoup(response.text,'xml').find_all('entry'):
if admin:
if admin == entry['name']:
entries.append({
'name' : entry['name'],
'created' : entry.created.text,
'last-activity' : entry.find('last-activity').text,
'loggedin' : True if entry.loggedin.text == 'yes' else False,
'comment' : entry.comment.text if entry.comment.text != '(null)' else None
})
break
else:
entries.append({
'name' : entry['name'],
'created' : entry.created.text,
'last-activity' : entry.find('last-activity').text,
'loggedin' : True if entry.loggedin.text == 'yes' else False,
'comment' : entry.comment.text if entry.comment.text != '(null)' else None
})
return {'config-locks' : entries}
else:
response = self.apicall(type='op',\
cmd='<show><commit-locks></commit-locks></show>')
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 400
entries = list()
for entry in BeautifulSoup(response.text,'xml').find_all('entry'):
entries.append({
'name' : entry['name'],
'created' : entry.created.text,
'last-activity' : entry.find('last-activity').text,
'loggedin' : True if entry.loggedin.text == 'yes' else False,
'comment' : entry.comment.text if entry.comment.text != '(null)' else None
})
response = self.apicall(type='op',\
cmd='<show><config-locks></config-locks></show>')
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 400
_entries = list()
for entry in BeautifulSoup(response.text,'xml').find_all('entry'):
_entries.append({
'name' : entry['name'],
'created' : entry.created.text,
'last-activity' : entry.find('last-activity').text,
'loggedin' : True if entry.loggedin.text == 'yes' else False,
'comment' : entry.comment.text if entry.comment.text != '(null)' else None
})
return {'commit-locks' : entries, 'config-locks' : _entries, 'locked' : True if entries or _entries else False}
def post(self,comment=None,option=None,admin=None):
if option in ['commit-locks', 'config-locks']:
if option == 'commit-locks':
response = self.apicall(type='op',\
cmd='<request><commit-lock><add>{0}</add></commit-lock></request>'.format("<comment>{0}</comment>".format(comment) if comment else ''))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 400
else:
response = self.apicall(type='op',\
cmd='<request><config-lock><add>{0}</add></config-lock></request>'.format("<comment>{0}</comment>".format(comment) if comment else ''))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 400
else:
response = self.apicall(type='op',\
cmd='<request><commit-lock><add>{0}</add></commit-lock></request>'.format("<comment>{0}</comment>".format(comment) if comment else ''))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 400
else:
logger.debug(str(response.text))
response = self.apicall(type='op',\
cmd='<request><config-lock><add>{0}</add></config-lock></request>'.format("<comment>{0}</comment>".format(comment) if comment else ''))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 400
else:
logger.debug(str(response.text))
return self.get(option)
def delete(self,option=None,admin=None):
if option == 'commit-locks':
if admin:
response = self.apicall(type='op',\
cmd='<request><commit-lock><remove><admin>{0}</admin></remove></commit-lock></request>'.format(admin))
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success' and 'Commit lock is not currently held by' not in BeautifulSoup(response.text,'xml').line.text:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
else:
response = self.apicall(type='op',\
cmd='<request><commit-lock><remove></remove></commit-lock></request>')
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif option == 'config-locks':
response = self.apicall(type='op',\
cmd='<request><config-lock><remove /></config-lock></request>')
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success' and 'Config lock is not currently locked' not in BeautifulSoup(response.text,'xml').line.text:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
else:
response = self.apicall(type='op',\
cmd='<request><commit-lock><remove></remove></commit-lock></request>')
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success' and 'Commit lock is not currently held by' not in BeautifulSoup(response.text,'xml').line.text:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
else:
logger.debug(str(response.text))
response = self.apicall(type='op',\
cmd='<request><config-lock><remove></remove></config-lock></request>')
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success' and 'Config lock is not currently locked' not in BeautifulSoup(response.text,'xml').line.text:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
else:
logger.debug(str(response.text))
return self.get(option)
class commit(PAN):
def get(self):
response = self.apicall(type='op',\
cmd="<show><jobs><processed></processed></jobs></show>")
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
else:
soup = BeautifulSoup(response.text,'xml')
fw_obj = list()
for job in soup.response.result.find_all('job'):
if job.type.text == 'Commit':
aux = dict()
for prop in job.find_all():
aux[prop.name] = prop.text
fw_obj.append(aux)
return {'commit-jobs' : fw_obj}, 200
def post(self):
response = self.apicall(type='commit',\
cmd="<commit><description /></commit>")
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'commit' : False, 'error' : str(response.text)}, 502
else:
soup = BeautifulSoup(response.text,'xml')
return {'commit' : True, 'id' : soup.job.text}, 201
class logging(PAN):
def get(self):
response = self.apicall(type='config',\
action='get',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/log-settings/profiles")
if response.status_code != 200:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'commit' : False, 'error' : str(response.text)}, 502
else:
soup = BeautifulSoup(response.text,'xml')
logging = list()
for a in soup.response.result.profiles.find_all('entry'):
logging.append(a['name'])
return {'log-settings' : logging }
class gp_gateways(PAN):
def get(self):
response = self.apicall(type='config',\
action='get',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/global-protect/global-protect-gateway")
if not response.ok:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'commit' : False, 'error' : str(response.text)}, 502
else:
soup = BeautifulSoup(response.text,'xml')
ret = {"gateways" : list()}
for gw in soup.find('global-protect-gateway').childGenerator():
if type(gw) != Tag:
continue
aux = {"name" : gw['name'],"tunnel-mode" : True if gw.find('tunnel-mode').string == 'yes' else False}
ret['gateways'].append(aux)
ret['len'] = len(ret['gateways'])
return ret
class gp_gateway(PAN):
def get(self,gateway):
response = self.apicall(type='config',\
action='get',\
xpath="/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']/global-protect/global-protect-gateway/entry[@name='{}']".format(gateway))
if not response.ok:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'commit' : False, 'error' : str(response.text)}, 502
else:
soup = BeautifulSoup(response.text,'xml')
ret = {"name" : soup.result.entry['name'],"tunnel-mode" : True if soup.result.find('tunnel-mode').string == 'yes' else False}
return ret
class gp_gateways_stats(PAN):
def get(self):
response = self.apicall(type='op',\
cmd="<show><global-protect-gateway><statistics></statistics></global-protect-gateway></show>")
if not response.ok:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'commit' : False, 'error' : str(response.text)}, 502
else:
soup = BeautifulSoup(response.text,'xml')
ret = {'gateways' : list()}
for gw in soup.result.find_all("Gateway"):
ret['gateways'].append({"name" : gw.find('name').string, "current-users" : int(gw.CurrentUsers.string)})
ret['len'] = len(ret['gateways'])
return ret
class gp_gateway_stats(PAN):
def get(self,gateway):
response = self.apicall(type='op',\
cmd="<show><global-protect-gateway><statistics><gateway>{}</gateway></statistics></global-protect-gateway></show>".format(gateway))
if not response.ok:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'commit' : False, 'error' : str(response.text)}, 502
else:
soup = BeautifulSoup(response.text,'xml')
return {"name" : soup.Gateway.find('name').string, "current-users" : int(soup.Gateway.CurrentUsers.string)}
class gp_gateway_users(PAN):
def get(self,gateway):
response = self.apicall(type='op',\
cmd="<show><global-protect-gateway><current-user><gateway>Gateway</gateway></current-user></global-protect-gateway></show>")
if not response.ok:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'commit' : False, 'error' : str(response.text)}, 502
else:
soup = BeautifulSoup(response.text,'xml')
ret = {"users" : list()}
for user in soup.result.childGenerator():
if type(user) != Tag:
continue
ret['users'].append({"domain" : user.domain.string,
"islocal" : True if user.islocal.string == 'yes' else False,
"username" : user.username.string,
"computer" : user.computer.string,
"client" : user.client.string,
"vpn-type" : user.find("vpn-type").string,
"virtual-ip" : user.find("virtual-ip").string,
"public-ip" : user.find("public-ip").string,
"tunnel-type" : user.find("tunnel-type").string,
"login-time" : user.find("login-time").string,
"login-time-utc" : user.find("login-time-utc").string,
"lifetime" : user.lifetime.string})
ret['len'] = len(ret['users'])
return ret
class gp_users(PAN):
def get(self, domain):
response = self.apicall(type='op', \
cmd="<show><global-protect-gateway><current-user><domain>{}</domain></current-user></global-protect-gateway></show>".format(domain))
#Check Response Status
if not response.ok:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
elif BeautifulSoup(response.text,'xml').response['status'] != 'success':
logger.error("Palo Alto response: " + str(response.status_code))
return {'commit' : False, 'error' : str(response.text)}, 502
else:
soup = BeautifulSoup(response.text,'xml')
ret = {"users" : list()}
#Parse XML Reponse
for user in soup.result.childGenerator():
if type(user) != Tag:
continue
ret['users'].append({
"domain" : user.domain.string,
"islocal" : True if user.islocal.string == 'yes' else False,
"username" : user.username.string,
"computer" : user.computer.string,
"client" : user.client.string,
"vpn-type" : user.find("vpn-type").string,
"virtual-ip" : user.find("virtual-ip").string,
"public-ip" : user.find("public-ip").string,
"tunnel-type" : user.find("tunnel-type").string,
"login-time" : user.find("login-time").string,
"login-time-utc" : user.find("login-time-utc").string,
"lifetime" : user.lifetime.string})
ret['len'] = len(ret['users'])
return ret
class pa_botnet_report(PAN):
def get(self):
response = self.apicall(type='report', \
async='yes', \
reporttype='predefined', \
reportname='botnet')
#Check Response Status
if not response.ok:
logger.error("Palo Alto response: " + str(response.status_code))
return {'error' : str(response.text)}, 502
data = xmltodict.parse(response.text)
return json.loads(json.dumps(data))
|
##### Uncompyle #####
# Author: AnonPrixor
# This is a Public and Simple Script
import os
import sys
import time
def logo():
print("""
[+]==========================[+]
# Pyc > Py Decompiler #
# Author: AnonPrixor #
# Team: PureXploit #
[+]==========================[+]
""")
def uncompyle():
uncom = input("Enter file to Decompile: ")
out = input("Outfile: ")
os.system("uncompyle6 " + uncom + "> " + out)
def report():
os.system("xdg-open mbasic.facebook.com/botnetmaster1337")
def menu():
logo()
print("""
1. Uncompyle pyc > py
2. Report Bug
""")
choice1 = input("Choose [1/2]: > ")
if (choice1 == "1"):
uncompyle()
if (choice1 == "2"):
report()
menu()
|
import numpy as np
from bayesnet.tensor.constant import Constant
from bayesnet.tensor.tensor import Tensor
from bayesnet.function import Function
class Swapaxes(Function):
def __init__(self, axis1, axis2):
self.axis1 = axis1
self.axis2 = axis2
def forward(self, x):
x = self._convert2tensor(x)
self.x = x
if isinstance(self.x, Constant):
return Constant(np.swapaxes(x.value, self.axis1, self.axis2))
return Tensor(np.swapaxes(x.value, self.axis1, self.axis2), function=self)
def backward(self, delta):
dx = np.swapaxes(delta, self.axis2, self.axis1)
self.x.backward(dx)
def swapaxes(x, axis1, axis2):
"""
interchange two axes of an array
Parameters
----------
x : np.ndarray
input array
axis1: int
first axis
axis2: int
second axis
Returns
-------
output : np.ndarray
interchanged array
"""
return Swapaxes(axis1, axis2).forward(x)
|
#!/usr/bin/env python3
from tkinter import *
# class Layout:
# Main:
calc = Tk()
calc.title("Calculette de merde")
calc.configure(background="orange")
# make buttons fit the window (column / row)
calc.grid_columnconfigure(0, weight=1)
calc.grid_columnconfigure(1, weight=1)
calc.grid_columnconfigure(2, weight=1)
calc.grid_columnconfigure(3, weight=1)
calc.grid_rowconfigure(0, weight=1)
calc.grid_rowconfigure(1, weight=1)
calc.grid_rowconfigure(2, weight=1)
calc.grid_rowconfigure(3, weight=1)
calc.grid_rowconfigure(4, weight=1)
calc.grid_rowconfigure(5, weight=1)
calc.grid_rowconfigure(6, weight=1)
def btnClick(number):
global operator
operator = operator + str(number)
text_Input.set(operator)
return
def btnClear():
global operator
operator = ""
text_Input.set("")
def btnEqual():
global operator
result = str(eval(operator))
text_Input.set(result)
operator = ""
operator = ""
text_Input = StringVar()
#create display
textDisplay = Entry(textvariable=text_Input, font=('arial', 18, 'bold'), justify='right')
textDisplay.grid(row=0, column=0, columnspan = 5, sticky=N+S+W+E)
#create button
#top line (sin cos tan surprise)
Button(calc, text="sin", command = lambda:btnClick('sin')) .grid(row=1, column=0, sticky=N+S+W+E)
Button(calc, text="cos", command = lambda:btnClick('cos')) .grid(row=1, column=1, sticky=N+S+W+E)
Button(calc, text="tan", command = lambda:btnClick('tan')) .grid(row=1, column=2, sticky=N+S+W+E)
Button(calc, text="²", command = lambda:btnClick('²')) .grid(row=1, column=3, sticky=N+S+W+E)
# top middle line (c / * <-)
Button(calc, text="C", command = lambda:btnClear()) .grid(row=2, column=0, sticky=N+S+W+E)
Button(calc, text="/", command = lambda:btnClick('/')) .grid(row=2, column=1, sticky=N+S+W+E)
Button(calc, text="*", command = lambda:btnClick('*')) .grid(row=2, column=2, sticky=N+S+W+E)
Button(calc, text="<-", command = lambda:btnClick('<-')) .grid(row=2, column=3, sticky=N+S+W+E)
# bottom middle line (7 8 9 -)
Button(calc, text="7", command = lambda:btnClick('7')) .grid(row=3, column=0, sticky=N+S+W+E)
Button(calc, text="8", command = lambda:btnClick('8')) .grid(row=3, column=1, sticky=N+S+W+E)
Button(calc, text="9", command = lambda:btnClick('9')) .grid(row=3, column=2, sticky=N+S+W+E)
Button(calc, text="-", command = lambda:btnClick('-')) .grid(row=3, column=3, sticky=N+S+W+E)
# bottom middle line (4 5 6 +)
Button(calc, text="4", command = lambda:btnClick('4')) .grid(row=4, column=0, sticky=N+S+W+E)
Button(calc, text="5", command = lambda:btnClick('5')) .grid(row=4, column=1, sticky=N+S+W+E)
Button(calc, text="6", command = lambda:btnClick('6')) .grid(row=4, column=2, sticky=N+S+W+E)
Button(calc, text="+", command = lambda:btnClick('+')) .grid(row=4, column=3, sticky=N+S+W+E)
# bottom middle line (1 2 3 =)
Button(calc, text="1", command = lambda:btnClick('1')) .grid(row=5, column=0, sticky=N+S+W+E)
Button(calc, text="2", command = lambda:btnClick('2')) .grid(row=5, column=1, sticky=N+S+W+E)
Button(calc, text="3", command = lambda:btnClick('3')) .grid(row=5, column=2, sticky=N+S+W+E)
Button(calc, text="=", command = lambda:btnEqual()) .grid(row=5, rowspan=2, column=3, sticky=N+S+W+E)
# bottom line (surprise 0 , =)
Button(calc, text=":)", command = lambda:btnClick('I <3 Gecko')) .grid(row=6, column=0, sticky=N+S+W+E)
Button(calc, text="0", command = lambda:btnClick('0')) .grid(row=6, column=1, sticky=N+S+W+E)
Button(calc, text=",", command = lambda:btnClick('.')) .grid(row=6, column=2, sticky=N+S+W+E)
#rue the main loop
calc.mainloop()
|
"""Constants for the Eloverblik integration."""
DOMAIN = "eloverblik"
|
import random
import numpy as np
import tensorflow as tf
def set_global_seeds(i):
tf.set_random_seed(i)
np.random.seed(i)
random.seed(i)
def tf_print(tensor, name):
return tf.Print(tensor, [tensor],
summarize=10000,
message='{} tensor:\n'.format(name))
def batch_to_seq(tensor, is_train):
shape = tensor.get_shape()[1:].as_list()
return tf.cond(
pred=is_train,
true_fn=lambda: tf.reshape(tensor, [16, 16] + shape),
false_fn=lambda: tf.reshape(tensor, [16, 1] + shape),
name='batch_to_seq'
)
def safe_div(numerator, denominator, name="value"):
"""Computes a safe divide which returns 0 if the denominator is zero.
Note that the function contains an additional conditional check that is
necessary for avoiding situations where the loss is zero causing NaNs to
creep into the gradient computation.
Args:
numerator: An arbitrary `Tensor`.
denominator: `Tensor` whose shape matches `numerator` and whose values are
assumed to be non-negative.
name: An optional name for the returned op.
Returns:
The element-wise value of the numerator divided by the denominator.
"""
return tf.where(
tf.greater(denominator, 0),
tf.div(numerator, tf.where(
tf.equal(denominator, 0),
tf.ones_like(denominator), denominator)),
tf.zeros_like(numerator),
name=name)
def safe_log(x):
"""Computes a safe logarithm which returns 0 if x is zero."""
result = tf.where(
tf.equal(x, 0),
tf.zeros_like(x),
tf.log(tf.maximum(1e-12, x)))
#result = tf.cond(
# tf.reduce_any(tf.is_nan(result)),
# true_fn=lambda: tf.Print(result, [result], message="NaN detected at 'pysc2-rl-agents/rl/util.py/safe_log'\n"),
# false_fn=lambda: result)
#result = tf.Print(result, [result], summarize=16*524, message="safe_log tensors:\n")
return result
def has_nan_or_inf(datum, tensor):
"""Returns true if the tensor contains NaN or inf."""
return np.any(np.isnan(tensor)) or np.any(np.isinf(tensor))
def send_notification(slack, message, channel):
"""Send notification to Slack channel (i.e. sc2)."""
slack.chat.post_message(channel=channel, text=message, username="sc2 bot")
|
a = []
for line in open('day1.txt').read().split('\n'):
a.append(int(line))
n = len(a)
c = 0
for i in range(n):
for j in range(i + 1, n):
for k in range(j + 1, n):
x = a[i]
y = a[j]
z = a[k]
c = c + 1
if x + y + z == 2020:
print(x * y * z)
print(c)
|
#!/usr/bin/env python
'''
Author: Christopher Duffy
Date: March 2015
Name: msfrpc_smb.py
Purpose: To scan a network for a smb ports and validate if credentials work on the target host
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the
name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import os, argparse, sys, time
try:
import msfrpc
except:
sys.exit("[!] Install the msfrpc library that can be found here: https://github.com/SpiderLabs/msfrpc.git")
try:
import nmap
except:
sys.exit("[!] Install the nmap library: pip install python-nmap")
try:
import netifaces
except:
sys.exit("[!] Install the netifaces library: pip install netifaces")
def get_interfaces():
interfaces = netifaces.interfaces()
return interfaces
def get_gateways():
gateway_dict = {}
gws = netifaces.gateways()
for gw in gws:
try:
gateway_iface = gws[gw][netifaces.AF_INET]
gateway_ip, iface = gateway_iface[0], gateway_iface[1]
gw_list =[gateway_ip, iface]
gateway_dict[gw]=gw_list
except:
pass
return gateway_dict
def get_addresses(interface):
addrs = netifaces.ifaddresses(interface)
link_addr = addrs[netifaces.AF_LINK]
iface_addrs = addrs[netifaces.AF_INET]
iface_dict = iface_addrs[0]
link_dict = link_addr[0]
hwaddr = link_dict.get('addr')
iface_addr = iface_dict.get('addr')
iface_broadcast = iface_dict.get('broadcast')
iface_netmask = iface_dict.get('netmask')
return hwaddr, iface_addr, iface_broadcast, iface_netmask
def get_networks(gateways_dict):
networks_dict = {}
for key, value in gateways_dict.iteritems():
gateway_ip, iface = value[0], value[1]
hwaddress, addr, broadcast, netmask = get_addresses(iface)
network = {'gateway': gateway_ip, 'hwaddr' : hwaddress, 'addr' : addr, 'broadcast' : broadcast, 'netmask' : netmask}
networks_dict[iface] = network
return networks_dict
def target_identifier(verbose, dir, user, passwd, ips, port_num, ifaces, ipfile):
hostlist = []
pre_pend = "smb"
service_name = "microsoft-ds"
service_name2 = "netbios-ssn"
protocol = "tcp"
port_state = "open"
bufsize = 0
hosts_output = "%s/%s_hosts" % (dir, pre_pend)
scanner = nmap.PortScanner()
if ipfile != None:
if verbose > 0:
print("[*] Scanning for hosts from file %s") % (ipfile)
with open(ipfile) as f:
hostlist = f.read().replace('\n',' ')
scanner.scan(hosts=hostlist, ports=port_num)
else:
if verbose > 0:
print("[*] Scanning for host\(s\) %s") % (ips)
scanner.scan(ips, port_num)
open(hosts_output, 'w').close()
hostlist=[]
if scanner.all_hosts():
e = open(hosts_output, 'a', bufsize)
else:
sys.exit("[!] No viable targets were found!")
for host in scanner.all_hosts():
for k,v in ifaces.iteritems():
if v['addr'] == host:
print("[-] Removing %s from target list since it belongs to your interface!") % (host)
host = None
if host != None:
e = open(hosts_output, 'a', bufsize)
if service_name or service_name2 in scanner[host][protocol][int(port_num)]['name']:
if port_state in scanner[host][protocol][int(port_num)]['state']:
if verbose > 0:
print("[+] Adding host %s to %s since the service is active on %s") % (host, hosts_output, port_num)
hostdata=host + "\n"
e.write(hostdata)
hostlist.append(host)
else:
if verbose > 0:
print(print("[-] Host %s is not being added to %s since the service is not active on %s") % (host, hosts_output, port_num))
if not scanner.all_hosts():
e.closed
if hosts_output:
return hosts_output, hostlist
def build_command(verbose, user, passwd, dom, port, ip):
module = "auxiliary/scanner/smb/smb_enumusers_domain"
command = '''use ''' + module + '''
set RHOSTS ''' + ip + '''
set SMBUser ''' + user + '''
set SMBPass ''' + passwd + '''
set SMBDomain ''' + dom +'''
run
'''
return command, module
def run_commands(verbose, iplist, user, passwd, dom, port, file):
bufsize = 0
e = open(file, 'a', bufsize)
done = False
client = msfrpc.Msfrpc({})
client.login('msf','msfrpcpassword')
try:
result = client.call('console.create')
except:
sys.exit("[!] Creation of console failed!")
console_id = result['id']
console_id_int = int(console_id)
for ip in iplist:
if verbose > 0:
print("[*] Building custom command for: %s") % (str(ip))
command, module = build_command(verbose, user, passwd, dom, port, ip)
if verbose > 0:
print("[*] Executing Metasploit module %s on host: %s") % (module, str(ip))
client.call('console.write',[console_id, command])
time.sleep(1)
while done != True:
result = client.call('console.read',[console_id_int])
if len(result['data']) > 1:
if result['busy'] == True:
time.sleep(1)
continue
else:
console_output = result['data']
e.write(console_output)
if verbose > 0:
print(console_output)
done = True
e.closed
client.call('console.destroy',[console_id])
def main():
# If script is executed at the CLI
usage = '''usage: %(prog)s [-u username] [-p password] [-d domain] [-t IP] [-l IP_file] [-r ports] [-o output_dir] [-f filename] -q -v -vv -vvv'''
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument("-u", action="store", dest="username", default="Administrator", help="Accepts the username to be used, defaults to 'Administrator'")
parser.add_argument("-p", action="store", dest="password", default="admin", help="Accepts the password to be used, defalts to 'admin'")
parser.add_argument("-d", action="store", dest="domain", default="WORKGROUP", help="Accepts the domain to be used, defalts to 'WORKGROUP'")
parser.add_argument("-t", action="store", dest="targets", default=None, help="Accepts the IP to be used, can provide a range, single IP or CIDR")
parser.add_argument("-l", action="store", dest="targets_file", default=None, help="Accepts a file with IP addresses, ranges, and CIDR notations delinated by new lines")
parser.add_argument("-r", action="store", dest="ports", default="445", help="Accepts the port to be used, defalts to '445'")
parser.add_argument("-o", action="store", dest="home_dir", default="/root", help="Accepts the dir to store any results in, defaults to /root")
parser.add_argument("-f", action="store", dest="filename", default="results", help="Accepts the filename to output relevant results")
parser.add_argument("-v", action="count", dest="verbose", default=1, help="Verbosity level, defaults to one, this outputs each command and result")
parser.add_argument("-q", action="store_const", dest="verbose", const=0, help="Sets the results to be quiet")
parser.add_argument('--version', action='version', version='%(prog)s 0.42b')
args = parser.parse_args()
# Argument Validator
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
if (args.targets == None) and (args.targets_file == None):
parser.print_help()
sys.exit(1)
# Set Constructors
verbose = args.verbose # Verbosity level
password = args.password # Password or hash to test against default is admin
username = args.username # Username to test against default is Administrator
domain = args.domain # Domain default is WORKGROUP
ports = args.ports # Port to test against Default is 445
targets = args.targets # Hosts to test against
targets_file = args.targets_file # Hosts to test against loaded by a file
home_dir = args.home_dir # Location to store results
filename = args.filename # A file that will contain the final results
gateways = {}
network_ifaces={}
if not filename:
if os.name != "nt":
filename = home_dir + "/msfrpc_smb_output"
else:
filename = home_dir + "\\msfrpc_smb_output"
else:
if filename:
if "\\" or "/" in filename:
if verbose > 1:
print("[*] Using filename: %s") % (filename)
else:
if os.name != "nt":
filename = home_dir + "/" + filename
else:
filename = home_dir + "\\" + filename
if verbose > 1:
print("[*] Using filename: %s") % (filename)
gateways = get_gateways()
network_ifaces = get_networks(gateways)
hosts_file, hostlist = target_identifier(verbose, home_dir, username, password, targets, ports, network_ifaces, targets_file)
run_commands(verbose, hostlist, username, password, domain, ports, filename)
if __name__ == '__main__':
main()
|
# coding: utf-8
import pprint
import re
import six
class VerificationCodeDTO:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'contact': 'str',
'verification_code': 'str',
'country': 'str'
}
attribute_map = {
'contact': 'contact',
'verification_code': 'verificationCode',
'country': 'country'
}
def __init__(self, contact=None, verification_code=None, country=None):
"""VerificationCodeDTO - a model defined in huaweicloud sdk"""
self._contact = None
self._verification_code = None
self._country = None
self.discriminator = None
self.contact = contact
if verification_code is not None:
self.verification_code = verification_code
if country is not None:
self.country = country
@property
def contact(self):
"""Gets the contact of this VerificationCodeDTO.
后台自动识别是手机号还是邮箱。 如果为手机号,必须加上国家码,例如中国大陆手机为“+86xxxxxxxxxxx”,当填写手机号时 “country”参数必填。 maxLength:255 minLength:1
:return: The contact of this VerificationCodeDTO.
:rtype: str
"""
return self._contact
@contact.setter
def contact(self, contact):
"""Sets the contact of this VerificationCodeDTO.
后台自动识别是手机号还是邮箱。 如果为手机号,必须加上国家码,例如中国大陆手机为“+86xxxxxxxxxxx”,当填写手机号时 “country”参数必填。 maxLength:255 minLength:1
:param contact: The contact of this VerificationCodeDTO.
:type: str
"""
self._contact = contact
@property
def verification_code(self):
"""Gets the verification_code of this VerificationCodeDTO.
验证码,在校验的场景时需要携带
:return: The verification_code of this VerificationCodeDTO.
:rtype: str
"""
return self._verification_code
@verification_code.setter
def verification_code(self, verification_code):
"""Sets the verification_code of this VerificationCodeDTO.
验证码,在校验的场景时需要携带
:param verification_code: The verification_code of this VerificationCodeDTO.
:type: str
"""
self._verification_code = verification_code
@property
def country(self):
"""Gets the country of this VerificationCodeDTO.
contact为手机号,则需带上手机号所属的国家, 如果国家为中国大陆则country参数取值为chinaPR,国家和国家码的对应关系如下chinaPR: +86(中国大陆); chinaHKG: +852(中国香港); chinaOMA: +853(中国澳门); chinaTPE: +886(中国台湾地区); BVl: +1284 (英属维尔京群岛); Bolivia: +591(玻利维亚); CZ: +420(捷克共和国); GB: +245(几内亚比绍); SVGrenadines: +1784(圣文森特和格林纳丁斯); TAT: +1868(特立尼达和多巴哥); UK: +44(英国); afghanistan: +93(阿富汗); albania: +355(阿尔巴尼亚); algeria: +213(阿尔及利亚); andorra: +376(安道尔共和国); angola: +244(安哥拉); argentina: +54(阿根廷); armenia: +374(亚美尼亚); australia: +61(澳大利亚); austria: +43(奥地利); azerbaijan: +994(阿塞拜疆); bahamas: +1242(巴哈马); bahrain: +973(巴林); bangladesh: +880(孟加拉国); belarus: +375(白俄罗斯); belgium: +32(比利时); belize: +501(伯利兹); benin: +229(贝宁); bosniaAndHerzegovina: +387(波斯尼亚和黑塞哥维那); botswana: +267(博茨瓦纳); brazil: +55(巴西); brunei: +673(文莱); bulgaria: +359(保加利亚); burkinaFaso: +226(布基纳法索); burundi: +257(布隆迪); cambodia: +855(柬埔寨); cameroon: +237(喀麦隆); canada: +1(加拿大); capeVerde: +238(佛得角); caymanIslands: +1345(开曼群岛); centralAfrican: +236(中非); chad: +235(乍得); chile: +56(智利); columbia: +57(哥伦比亚); comoros: +269(科摩罗); congoB: +242(刚果.布); congoJ: +243(刚果.金); costarica: +506(哥斯达黎加); croatia: +385(克罗地亚); curacao: +599(库拉索岛); cyprus: +357(塞浦路斯); denmark: +45(丹麦); djibouti: +253(吉布提); dominica: +1809(多米尼加共和国); ecuador: +593(厄瓜多尔); egypt: +20(埃及); equatorialGuinea: +240(赤道几内亚); estonia: +372(爱沙尼亚); finland: +358(芬兰); france: +33(法国); gabon: +241(加蓬); gambia: +220(冈比亚); georgia: +995(格鲁吉亚); germany: +49(德国); ghana: +233(加纳); greece: +30(希腊); grenada: +1473(格林纳达); guatemala: +502(危地马拉); guinea: +224(几内亚); guyana: +592(圭亚那); honduras: +504(洪都拉斯); hungary: +36(匈牙利); india: +91(印度); indonesia: +62(印度尼西亚); iraq: +964(伊拉克); ireland: +353(爱尔兰); israel: +972( 以色列); italy: +39(意大利); ivoryCoast: +225(科特迪瓦); jamaica: +1876(牙买加); japan: +81(日本); jordan: +962(约旦); kazakhstan: +7(哈萨克斯坦); kenya: +254(肯尼亚); kosovo: +383(科索沃); kuwait: +965(科威特); kyrgyzstan: +996(吉尔吉斯斯坦); laos: +856(老挝); latvia: +371(拉脱维亚); lebanon: +961(黎巴嫩); lesotho: +266(莱索托); liberia: +231(利比里亚); libya: +218(利比亚); lithuania: +370(立陶宛); luxembourg: +352(卢森堡); macedonia: +389(马其顿); madagascar: +261(马达加斯加); malawi: +265(马拉维); malaysia: +60(马来西亚); maldives: +960(马尔代夫); mali: +223(马里); malta: +356(马耳他); mauritania: +222(毛里塔尼亚); mauritius: +230(毛里求斯); mexico: +52(墨西哥); moldova: +373(摩尔多瓦); mongolia: +976(蒙古); montenegro: +382 (黑山共和国); morocco: +212(摩洛哥); mozambique: +258(莫桑比克); myanmar: +95(缅甸); namibia: +264(纳米比亚); nepal: +977(尼泊尔); netherlands: +31(荷兰); newZealand: +64(新西兰); nicaragua: +505(尼加拉瓜); niger: +227(尼日尔); nigeria: +234(尼日利亚); norway: +47(挪威); oman: +968(阿曼); pakistan: +92(巴基斯坦); palestine: +970(巴勒斯坦); panama: +507(巴拿马); papuaNewGuinea: +675(巴布亚新几内亚); peru: +51(秘鲁); philippines: +63(菲律宾); poland: +48(波兰); portugal: +351(葡萄牙); puertoRico: +1787(波多黎各); qatar: +974(卡塔尔); romania: +40(罗马尼亚); russia: +7(俄罗斯); rwanda: +250(卢旺达); saintMartin: +590(圣马丁); salvatore: +503(萨尔瓦多); saudiArabia: +966(沙特阿拉伯); senegal: +221(塞内加尔); serbia: +381(塞尔维亚); seychelles: +248(塞舌尔); sierraLeone: +232(塞拉利昂); singapore: +65(新加坡); slovakia: +421(斯洛伐克); slovenia: +386(斯洛文尼亚); somalia: +252(索马里); southAfrica: +27(南非); southKorea: +82(韩国); spain: +34(西班牙); sriLanka: +94(斯里兰卡); suriname: +597(苏里南); swaziland: +268(斯威士兰); sweden: +46(瑞典); switzerland: +41(瑞士); tajikistan: +992(塔吉克斯坦); tanzania: +255(坦桑尼亚); thailand: +66(泰国); togo: +228(多哥); tunisia: +216(突尼斯); turkey: +90(土耳其); turkmenistan: +993(土库曼斯坦); uae: +971(阿联酋); uganda: +256(乌干达); ukraine: +380(乌克兰); uruguay: +598(乌拉圭); usa: +1(美国); uzbekistan: +998(乌兹别克斯坦); venezuela: +58(委内瑞拉); vietNam: +84(越南); yemen: +967(也门); zambia: +260(赞比亚); zimbabwe: +263(津巴布韦)
:return: The country of this VerificationCodeDTO.
:rtype: str
"""
return self._country
@country.setter
def country(self, country):
"""Sets the country of this VerificationCodeDTO.
contact为手机号,则需带上手机号所属的国家, 如果国家为中国大陆则country参数取值为chinaPR,国家和国家码的对应关系如下chinaPR: +86(中国大陆); chinaHKG: +852(中国香港); chinaOMA: +853(中国澳门); chinaTPE: +886(中国台湾地区); BVl: +1284 (英属维尔京群岛); Bolivia: +591(玻利维亚); CZ: +420(捷克共和国); GB: +245(几内亚比绍); SVGrenadines: +1784(圣文森特和格林纳丁斯); TAT: +1868(特立尼达和多巴哥); UK: +44(英国); afghanistan: +93(阿富汗); albania: +355(阿尔巴尼亚); algeria: +213(阿尔及利亚); andorra: +376(安道尔共和国); angola: +244(安哥拉); argentina: +54(阿根廷); armenia: +374(亚美尼亚); australia: +61(澳大利亚); austria: +43(奥地利); azerbaijan: +994(阿塞拜疆); bahamas: +1242(巴哈马); bahrain: +973(巴林); bangladesh: +880(孟加拉国); belarus: +375(白俄罗斯); belgium: +32(比利时); belize: +501(伯利兹); benin: +229(贝宁); bosniaAndHerzegovina: +387(波斯尼亚和黑塞哥维那); botswana: +267(博茨瓦纳); brazil: +55(巴西); brunei: +673(文莱); bulgaria: +359(保加利亚); burkinaFaso: +226(布基纳法索); burundi: +257(布隆迪); cambodia: +855(柬埔寨); cameroon: +237(喀麦隆); canada: +1(加拿大); capeVerde: +238(佛得角); caymanIslands: +1345(开曼群岛); centralAfrican: +236(中非); chad: +235(乍得); chile: +56(智利); columbia: +57(哥伦比亚); comoros: +269(科摩罗); congoB: +242(刚果.布); congoJ: +243(刚果.金); costarica: +506(哥斯达黎加); croatia: +385(克罗地亚); curacao: +599(库拉索岛); cyprus: +357(塞浦路斯); denmark: +45(丹麦); djibouti: +253(吉布提); dominica: +1809(多米尼加共和国); ecuador: +593(厄瓜多尔); egypt: +20(埃及); equatorialGuinea: +240(赤道几内亚); estonia: +372(爱沙尼亚); finland: +358(芬兰); france: +33(法国); gabon: +241(加蓬); gambia: +220(冈比亚); georgia: +995(格鲁吉亚); germany: +49(德国); ghana: +233(加纳); greece: +30(希腊); grenada: +1473(格林纳达); guatemala: +502(危地马拉); guinea: +224(几内亚); guyana: +592(圭亚那); honduras: +504(洪都拉斯); hungary: +36(匈牙利); india: +91(印度); indonesia: +62(印度尼西亚); iraq: +964(伊拉克); ireland: +353(爱尔兰); israel: +972( 以色列); italy: +39(意大利); ivoryCoast: +225(科特迪瓦); jamaica: +1876(牙买加); japan: +81(日本); jordan: +962(约旦); kazakhstan: +7(哈萨克斯坦); kenya: +254(肯尼亚); kosovo: +383(科索沃); kuwait: +965(科威特); kyrgyzstan: +996(吉尔吉斯斯坦); laos: +856(老挝); latvia: +371(拉脱维亚); lebanon: +961(黎巴嫩); lesotho: +266(莱索托); liberia: +231(利比里亚); libya: +218(利比亚); lithuania: +370(立陶宛); luxembourg: +352(卢森堡); macedonia: +389(马其顿); madagascar: +261(马达加斯加); malawi: +265(马拉维); malaysia: +60(马来西亚); maldives: +960(马尔代夫); mali: +223(马里); malta: +356(马耳他); mauritania: +222(毛里塔尼亚); mauritius: +230(毛里求斯); mexico: +52(墨西哥); moldova: +373(摩尔多瓦); mongolia: +976(蒙古); montenegro: +382 (黑山共和国); morocco: +212(摩洛哥); mozambique: +258(莫桑比克); myanmar: +95(缅甸); namibia: +264(纳米比亚); nepal: +977(尼泊尔); netherlands: +31(荷兰); newZealand: +64(新西兰); nicaragua: +505(尼加拉瓜); niger: +227(尼日尔); nigeria: +234(尼日利亚); norway: +47(挪威); oman: +968(阿曼); pakistan: +92(巴基斯坦); palestine: +970(巴勒斯坦); panama: +507(巴拿马); papuaNewGuinea: +675(巴布亚新几内亚); peru: +51(秘鲁); philippines: +63(菲律宾); poland: +48(波兰); portugal: +351(葡萄牙); puertoRico: +1787(波多黎各); qatar: +974(卡塔尔); romania: +40(罗马尼亚); russia: +7(俄罗斯); rwanda: +250(卢旺达); saintMartin: +590(圣马丁); salvatore: +503(萨尔瓦多); saudiArabia: +966(沙特阿拉伯); senegal: +221(塞内加尔); serbia: +381(塞尔维亚); seychelles: +248(塞舌尔); sierraLeone: +232(塞拉利昂); singapore: +65(新加坡); slovakia: +421(斯洛伐克); slovenia: +386(斯洛文尼亚); somalia: +252(索马里); southAfrica: +27(南非); southKorea: +82(韩国); spain: +34(西班牙); sriLanka: +94(斯里兰卡); suriname: +597(苏里南); swaziland: +268(斯威士兰); sweden: +46(瑞典); switzerland: +41(瑞士); tajikistan: +992(塔吉克斯坦); tanzania: +255(坦桑尼亚); thailand: +66(泰国); togo: +228(多哥); tunisia: +216(突尼斯); turkey: +90(土耳其); turkmenistan: +993(土库曼斯坦); uae: +971(阿联酋); uganda: +256(乌干达); ukraine: +380(乌克兰); uruguay: +598(乌拉圭); usa: +1(美国); uzbekistan: +998(乌兹别克斯坦); venezuela: +58(委内瑞拉); vietNam: +84(越南); yemen: +967(也门); zambia: +260(赞比亚); zimbabwe: +263(津巴布韦)
:param country: The country of this VerificationCodeDTO.
:type: str
"""
self._country = country
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, VerificationCodeDTO):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
from tkinter import Frame,Canvas,NW,N,S,SUNKEN,ALL,Message,CENTER
from tkinter import messagebox
from PIL import Image,ImageTk
import math
import time
import random
import os.path
from os import listdir
from os.path import isfile, join
from matrixTile import matrixTile
from configShisen import configShisen
from tileboard import tileboard
from boardEvent import boardEvent
from matchTiles import matchTiles
from log import log
class board:
"""
manipulates tiles on board game
"""
def __init__(self,window,tile_source,dimension,size):
self.window = window
# size of canvas width,height in pixels
self.size = size
# size of base canvas, can't be modified
self.basesize = size
# dimension in row x cols
self.dimension = dimension
#self.tiles_image = None
self.tile_source = tile_source
self.canvas = None
self.offset_size = None
self.limits = None
self.tiles = None
self.msg_frame = None
# two tiles that match
self.clue = None
self.background_file = configShisen.load('background-file')
#self.background_file = 'wallpaper1.jpg'
self.status = 'run'
def changeSize(self,dim):
log.save('in changesize, new dim:' + str(dim))
self.dimension = dim
self.load()
def load(self):
"""
load board on window
"""
# initialize timer
self.window.onStartGame()
log.save("loading board")
# create matrixtile object
matrix_tile = matrixTile(self.dimension)
self.matrix_tile = matrix_tile
log.save('canvas size' + str(self.size))
new_tile_size= board.getTileSize(self.basesize,self.tile_source.tilesize, self.dimension)
log.save('new tile size' + str(new_tile_size))
self.tile_size = new_tile_size
#define offset size
self.offset_size = new_tile_size
# create matrix of gameboard
(dimx,dimy) = self.dimension
matrix_board = [[ None for x in range(dimx)] for x in range(dimy)]
# adjust size to tile_width * (cols + 2)
self.size = (self.tile_size[0] *(self.dimension[0] + 2), self.size[1] )
log.save('new board size' + str(self.size))
# write image to canvas
self.loadBoardToImage()
self.canvas.delete(self.msg_frame)
# load tiles in new image
row_count=0
for tile_row in matrix_tile.matrix_board:
col_count=0
for tilename in tile_row:
position = (col_count,row_count)
tile_board = self.drawTile(matrix_tile, new_tile_size,position,tilename)
matrix_board[row_count][col_count] = tile_board
col_count +=1
row_count +=1
self.tiles = matrix_board
# save last box position
self.last_box = tile_board.box
#define board limits of clickable area
self.limits = ((self.offset_size),(tile_board.box[2],tile_board.box[3]))
# validate if board is solvable
self.checkSolvable()
log.save("loading board done")
#def getTileSize(matrix_tile):
def getTileSize(board_size,tile_size, dimension):
"""
calculates the new size based on formula:
given:
bw = board width
bh = board height
tw = tile width
th = tile height
Tr = tile relative size = tw / th
ntw = number of tiles horizontally
nth = number of tiles vertically
twb = tile width depending of board = bw /( ntw + 2)
thb = tile height depending of board = bh /( nth + 2)
thrb = tile height relative to tile width = twb / Tr
fth = final tile height = minor(thrb , thb)
ftw = final tile width = Tr * fth
"""
#tiles=matrix_tile.matrix_board
#rows = len(tiles)
#cols = len(tiles[0])
(bw,bh) = board_size
(tw,th) = tile_size
(ntw, nth) = dimension
tr = tw /th
twb = bw / (ntw + 2)
thb = bh / (nth + 2)
thrb = twb / tr
fth = math.ceil(board.minor(thrb, thb))
ftw = math.ceil(tr * fth)
return (ftw,fth)
#def drawTile(self,tile_board,matrix_tile,new_size,offset_size):
def drawTile(self, matrix_tile, new_size,position, tilename):
"""
Draw a Tile of certain type in certain position
get the token piece from some graphic tile group
and put on certain position in graphic board
"""
(wt,ht) = self.tile_source.tilesize
(row_tile_base,col_tile_base) = matrix_tile.getPosTile(tilename)
x1 = col_tile_base * wt
y1 = row_tile_base * ht
# cut piece of tiles image
box_tiles = (x1 , y1 , x1 + wt , y1 + ht)
region = self.tile_source.image.crop(box_tiles)
# resize
new_region = region.resize(size=new_size, resample=Image.BICUBIC)
# paste tile on image board
box_board = self.getBoxPosition(position,new_size,self.offset_size)
new_region_tk = ImageTk.PhotoImage(new_region)
canvasref = self.canvas.create_image(box_board[0],box_board[1],image=new_region_tk,anchor=NW)
tile_board = tileboard(position=position,name=tilename)
tile_board.box = box_board
tile_board.canvasref = canvasref #imagebox
tile_board.imageboxtk = new_region_tk
tile_board.imagebox = new_region
return tile_board
def getBoxPosition(self,position,size,offset):
(new_width,new_height) = size
(col_count,row_count) = position
(width_offset,height_offset) = offset
box_board = (col_count * new_width + width_offset, row_count * new_height + height_offset,
col_count * new_width + new_width + width_offset, row_count * new_height + new_height + height_offset)
return box_board
def minor(a,b):
if(a < b):
return a
else :
return b
def loadBoardToImage(self):
"""
create an Image Tk object, save Image into it and
save ImageTk object into label as image, this is the final frame
"""
# create a blank image object
#self.board_image = Image.new("RGB",size=self.size,color=(255,255,255))
#self.board_image = Image.open(self.background_file)
self.board_image = Image.open(self.getBackgroundFile())
imagetk=ImageTk.PhotoImage(image=self.board_image)
self.imagetk=imagetk
if self.canvas is None:
log.save('label image is none')
self.canvas = Canvas(self.window.root,width=self.size[0],height=self.size[1])
#self.canvas = Canvas(self.window.root,width=canvas_width,height=canvas_height)
self.canvas.create_image((0,0), image=self.imagetk,anchor=NW)
else:
self.canvas.delete(ALL)
# bind click events to board Event
board_event = boardEvent(self)
self.canvas.bind("<Button-1>",board_event.clickOnCanvas)
# this let save image reference
self.image_on_canvas = self.canvas.create_image((0,0), image=self.imagetk,anchor=NW)
self.canvas.itemconfig(self.image_on_canvas, image = self.imagetk)
# grid once
self.canvas.grid(sticky=N+S)
def getBackgroundFile(self):
"""
obtain a background image
"""
# image dir
mypath= configShisen.getAbsPath("bg")
onlyfiles = [f for f in listdir(mypath) if isfile(join(mypath, f))]
file =random.choice(onlyfiles)
return configShisen.getAbsPath("bg/" + file)
def setStatusTile(self,tile,status):
position = tile.position
self.tiles[position[1]][position[0]].status = status
def getTileFromPos(self,position):
"""
get current tile from position in game board
"""
(col,row) = position
(board_cols,board_rows) = self.dimension
if ( col == -1 ) or ( col >= board_cols ) \
or ( row == -1 ) or ( row >= board_rows ) :
tile= self.getTileFromPosWithExceptions(position)
else :
tile = self.tiles[row][col]
return tile
def getTileFromPosWithExceptions(self,position):
"""
get current tile from position in game board
and manage exceptions
a position outside board tiles exists if
colpos < 0 or rowpos < 0 or colpos >= board_cols or rowpos >= board_rows
given: (x1,y1) upper left position and (x2,y2) right bottom position of box
given: (xl,yl) lower right corner of lower right tile in board
if colpos = -1 => x1 = 0 and x2 = x1 + tile_width
if rowpos = -1 => y1 = 0 and y2 = y1 + tile_height
if colpos >= board_cols => x1 = xl , x2 = x1 + tile_width
if rowpos >= board_rows => y1 = yl , y2 = y1 + tile_height
"""
# manage exceptions
(col,row) = position
(board_cols,board_rows) = self.dimension
(tile_width,tile_height) = self.tile_size
# get last tile
last_tile = self.tiles[board_rows - 1][board_cols - 1]
(xnp,ynp,xl,yl) = last_tile.box
x1=y1=None
if col == -1 :
x1 = 0
elif col >= board_cols:
x1 = xl
if row == -1 :
y1 = 0
elif row >= board_rows:
y1 = yl
if x1 is None:
x1 = (col + 1) * tile_width
if y1 is None:
y1 = (row + 1) * tile_height
x2 = x1 + tile_width
y2 = y1 + tile_height
box = (x1,y1,x2,y2)
tile = tileboard(name='',position=position)
tile.box = box
return tile
def checkWin(self):
"""
check if all tiles are in status "free"
"""
for rows in self.tiles:
for tile in rows:
if tile.status != "free":
return False
# no tiles free, you won!
self.endGame('You Won!!!')
return True
def checkSolvable(self):
"""
if game is not solvable, then block
sweep board of tiles and order in array indexing by tilename
then sweep that array and look for two similar tiles that match
"""
tilelist={}
for rows in self.tiles:
for tile in rows:
if tile.status == 'busy':
if not tile.name in tilelist:
tilelist[tile.name] = []
tilelist[tile.name].append(tile)
match_tiles = matchTiles()
for tilename, tilelistsame in tilelist.items():
for tile1 in tilelistsame:
for tile2 in tilelistsame:
if tile1 != tile2:
if match_tiles.positionMatch(tile1,tile2,self):
#self.valid_path = match_tiles.valid_path
self.clue = tile1,tile2
return True
# there is not pair of tiles matching a valid position, then block game
self.endGame('Game Blocked')
def endGame(self,message):
self.__showMessage(message,self.reload)
# disable clue
self.window.onEndGame()
def __showMessage(self,message,action=None):
frame=Frame(height=200, width=200, bd=1, relief=SUNKEN)
frame.grid()
w = Message(frame, text=message)
w.config(bg='lightgreen', font=('times',24,'bold'))
w.bind('<Button-1>',action)
w.grid()
centerxc = math.ceil(self.size[0] / 2)
centeryc = math.ceil(self.size[1] / 2)
self.msg_frame = self.canvas.create_window(centerxc,centeryc,window=frame,anchor=CENTER)
def showMessageTest(self):
self.endGame('hola a todos esto es un test!!!')
def reload(self,event):
#self.canvas.unbind('<Button-1>')
log.save('loading board again')
self.load()
def showClue(self):
"""
show the last valid path found with checksolvable
use the path and show it
"""
self.checkSolvable()
# get last path
tile1,tile2 = self.clue
# hightlight tiles
tile1.twinkle(5,False)
tile2.twinkle(5,False)
def pause(self):
if self.status == 'run' :
self.status = 'paused'
#self.window.pauseClock(True)
self.window.onPauseGame()
self.__showMessage('paused',self.onEndPause)
else:
self.onEndPause()
def onEndPause(self,event=None):
#self.window.pauseClock(False)
self.window.onEndPause()
self.canvas.delete(self.msg_frame)
self.status = 'run'
|
distancia = int(input('Digite a distancia da sua viagem: '))
valor1 = distancia*0.5
valor2 = distancia*0.45
if distancia > 200:
print('O preço dessa viagem vai custar R${}'.format(valor2))
else:
print('O preço dessa viagem vai custar R${}'.format(valor1))
print('--fim--')
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
from os import path
import sys
import datetime
sys.path.append(path.dirname(path.dirname(path.dirname(__file__))))
from DatabaseHandler.initiation import InfoDB
class FetchhandlerPipeline(object):
def process_item(self, item, spider):
# issued_fmt_time = str(item.get('issued_time', '')).replace('-', '')
# if issued_fmt_time \
# < datetime.datetime.strftime(datetime.datetime.now() - datetime.timedelta(days=90), '%Y%m%d'):
# return item
lecturers = ''
for person in item['lecturer']:
lecturers += person
self.infodb.insert_Lecture(
item.get('title', ''),
lecturers,
item.get('issued_time', ''),
str(item.get('lecture_time', '')).replace('年', '-').replace('月', '-').replace('日', '-'),
item.get('location', ''),
item.get('uni', ''),
item.get('url', ''),
item.get('description', '')
)
return item
def open_spider(self, spider):
self.infodb = InfoDB()
self.infodb.openDB()
def close_spider(self, spider):
self.infodb.closeDB()
|
import sys
def fibonacci(n):
a, b, counter = 0, 1, 0
while True:
if (counter > n):
return
#yield a
a, b = b, a + b
print('%d,%d' % (a,b))
counter += 1
f = fibonacci(10)
# ----------------------------------------------------------------------------------------------
import sys
def fibonacci(n): # generator function
a, b, counter = 0, 1, 0
while True:
if (counter > n):
return
yield a
a, b = b, a + b
print('a = %d, b = %d' % (a,b))
counter += 1
f = fibonacci(10) # f is an iterator return by generator fibonacci()
while True:
try:
print("next(f) =", next(f), end=" / ")
except :
sys.exit()
|
# This is not 100% vanilla, because still we need to replace the xml of new body.
#
import numpy as np
import pybullet
from pybullet_envs.gym_locomotion_envs import WalkerBaseBulletEnv, Walker2DBulletEnv
from pybullet_envs.robot_locomotors import WalkerBase, Walker2D
from pybullet_envs.scene_stadium import MultiplayerStadiumScene
import pybullet_data
from pathlib import Path
class _Walker2D(Walker2D):
def __init__(self, xml, param, render=False):
self.param = param
WalkerBase.__init__(self, xml, "torso", action_dim=6, obs_dim=22, power=0.40)
def robot_specific_reset(self, bullet_client):
super().robot_specific_reset(bullet_client)
# power coefficient should be proportional to the min possible volume of that part. (Avoid pybullet fly-away bug.)
self.jdict["thigh_joint"].power_coef = 65
self.jdict["leg_joint"].power_coef = 31
self.jdict["foot_joint"].power_coef = 18
self.jdict["thigh_left_joint"].power_coef = 65
self.jdict["leg_left_joint"].power_coef = 31
self.jdict["foot_left_joint"].power_coef = 18
# I deleted ignore_joints in mujoco xml files, so i need to place the robot at an appropriate initial place manually.
robot_id = self.objects[0] # is the robot pybullet_id
bullet_client.resetBasePositionAndOrientation(
bodyUniqueId=robot_id, posObj=[0, 0, self.param["torso_center_height"] + 0.1],
ornObj=[0, 0, 0, 1]) # Lift the robot higher above ground
class Walker2DEnv(Walker2DBulletEnv):
def __init__(self, xml, param, render=False, max_episode_steps=1000):
self.robot = _Walker2D(xml=xml, param=param)
self.max_episode_steps = max_episode_steps
WalkerBaseBulletEnv.__init__(self, self.robot, render)
def reset(self):
self.step_num = 0
return super().reset()
def step(self, a):
self.step_num += 1
obs, r, done, info = super().step(a)
if self.step_num > self.max_episode_steps:
done = True
return obs, r, done, info
|
from math import hypot
c = float(input('Digite o cateto: '))
ca = float(input('Digite o catedo adjacente: '))
h = hypot(c, ca)
print('A hipotenusa tem o valor de {:.2f}'.format(h))
|
from sys import path
from os.path import dirname as dir
from shutil import rmtree
path.append(dir(path[0]))
from analizer import grammar
from analizer.reports import BnfGrammar
from analizer.interpreter import symbolReport
dropAll = 0
if dropAll:
print("Eliminando registros")
rmtree("data")
s = """
--SELECT * FROM tab where 5 IN (select * from tab2);
--SELECT factorial(17) AS factorial,
--EXP(2.0) as Exponencial,
--LN(5.0) "Logaritmo Natural",
--PI(),
--POWER(5,2);
--create table tblibrosalario
--( idempleado integer not null,
--aniocalculo integer not null CONSTRAINT aniosalario CHECK (aniocalculo > 0),
--mescalculo integer not null CONSTRAINT mescalculo CHECK (mescalculo > 0 ),
--salariobase money not null,
--comision decimal(1,1),
--primary key(idempleado)
--);
--SELECT * from tab1 where exists (select * from tab2 where 1 < 2);
--ALTER TABLE tab1 ADD foreign key(a,b) references tab2(b,c);
--ALTER TABLE tab1 ADD COLUMN col1 NUMERIC(1,2);
--UPDATE tbempleadopuesto SET idpuesto = 2 where idempleado = 2;
--select primernombre,segundonombre,primerapellido,fechaventa, sum(s)
--from tbventa V,tbempleado E
--where V.idempleado = E.idempleado
--group by primernombre,segundonombre,primerapellido,fechaventa
--having x = 2 limit all offset 2;
--USE test;
--select E.*,
-- estado,
-- I.identificacion,
-- tipoidentificacion
--from tbempleado E,
-- tbestado ES,
-- tbempleadoidentificacion I,
-- tbidentificaciontipo IT
--where ES.idestado = E.idestado
--and I.idempleado = E.idempleado
--and IT.ididentificaciontipo = I.ididentificaciontipo;
--SELECT distinct caca.primernombre FROM tbempleado de1, (SELECT de2.primernombre FROM tbempleado de2 WHERE de1.idempleado = de2.idempleado) AS caca;
"""
result = grammar.parse(s)
print(result)
# print(symbolReport())
# grammar.InitTree()
# BnfGrammar.grammarReport()
|
# fastfield main python script.
## change enviroment variable
## activate 'walker'
import json
import pickle as pck
import os
import shutil
#use after database analysis
class move_FILE():
def __init__(self,input_dir,output_file_name,output_dir):
# open_JSON class instantiation
self.output_file_name = output_file_name
self.input_path = os.path.join(input_dir,output_file_name)
self.job_num = output_file_name[0:output_file_name.find("_")]
# sub-dir extraction
self.dir = output_dir
self.existing_jobs = self.get_job_nums(self.dir)
@staticmethod
def get_job_nums(job_dir):
job_nums = [f for f in os.listdir(job_dir) if not os.path.isfile(os.path.join(job_dir,f))]
return job_nums
@staticmethod
def __create_folder(self,folder_name):
os.mkdir(os.path.join(self.dir,folder_name))
@staticmethod
def __remove_special_characters(file_name,special_char=":][}{/\)(",new_char="_"):
"""takes input string, and replaces all special_char with new_char"""
for char in special_char:
file_name = file_name.replace(char,new_char)
return file_name
def move_file(self):
new_file = self.__remove_special_characters(self.output_file_name) #remove illegal characters
job_folder_path = os.path.join(self.dir,str(self.job_num))
complete_save_path = os.path.join(job_folder_path,new_file)
if str(self.job_num) not in self.existing_jobs:
os.mkdir(job_folder_path)
os.replace(self.input_path,complete_save_path)
def organize_files(input_dir,job_dir,file_ext=".pdf"):
"""uses open_JSON & move_FILE classes over a directory to organize all files into given job folder"""
input_files = [f for f in os.listdir(input_dir) if f.endswith(file_ext)]
if input_files == []:
print("No New Files Found")
return None
for f in input_files:
move_class = move_FILE(input_dir,f,job_dir)
move_class.move_file()
return True
def main(input_dir = r"S:\Personal Folders\FTP\Dailys",
daily_pdfs_dir = r"S:\Personal Folders\Job Dailys"):
NEW_FILE_FLAG = organize_files(input_dir,daily_pdfs_dir)
return NEW_FILE_FLAG #used with high level main
if __name__ == '__main__':
main()
|
from django.urls import path
from .views import (
PostListView,
PostListViewByTag,
PostDetailView,
PostCreateView,
PostUpdateView,
PostDeleteView,
UserPostListView,
SearchPostView,
CommentCreateView,
post_like,
PostLikeViewRedirect,
PostLikeAPI,
)
from . import views
from .feeds import LatestPostsFeed
urlpatterns = [
# path('', views.home, name='blog-home'),
path('', PostListView.as_view(), name='blog-home'),
path('tag/<tag_slug>/', PostListViewByTag.as_view(), name='posts-list-by-tag'),
path('post/new/', PostCreateView.as_view(), name='post-create'),
path('user-posts/<str:username>/', UserPostListView.as_view(), name='user-posts'),
# path('posts/<int:pk>/', PostDetailView.as_view(), name='post-detail'),
# path('posts/<int:year>/<int:month>/<int:day>/<slug:slug>/', CommentCreateView.as_view(), name='post-comment'),
path('posts/<slug:slug>/', PostDetailView.as_view(), name='post-detail'),
# path('posts/<int:pk>/<slug:slug>/', PostDetailView.as_view(), name='post-detail'),
path('posts/<int:year>/<int:month>/<int:day>/<slug:slug>/', PostDetailView.as_view(), name='post-detail'),
path('post/<int:pk>/update/', PostUpdateView.as_view(), name='post-update'),
path('post/<int:pk>/delete/', PostDeleteView.as_view(), name='post-delete'),
path('about/', views.about, name='blog-about'),
path('posts/search', SearchPostView.as_view(), name='post-search'),
path('feed/', LatestPostsFeed(),name='posts-feed'),
path('posts/<int:year>/<int:month>/<int:day>/<slug:slug>/like/', PostLikeViewRedirect.as_view(), name='post-clap' ),
path('posts/<slug:slug>/api-like/', PostLikeAPI.as_view(), name='post-api-clap' ),
# path('posts/<slug:slug>/api-like/', PostLikeAPI.as_view(), name='post-api-clap' ),
# path('posts/<int:pk>/generate-pdf/', views.post_pdf,name='post-pdf' ),
path('hello/', views.view_pdf,name='hello-pdf' )
]
|
"""
Maze environment for reinforcement learning, with the python package tkinter.
Red rectangle: explorer.
Black rectangle: hells [reward = -1]
Yellow bin circle: paradise [reward = +1]
All other state: ground [reward = 0]
Alse referenced the tutorial of morvanzhou: https://morvanzhou.github.io/tutorials/,
but make some change, initial state has became random state
"""
import numpy as np
import random
import time
import sys
if sys.version_info.major == 2:
import Tkinter as tk
else:
import tkinter as tk
UNIT = 40 # pixels
def is_different(*numbers):
length = len(numbers)
return len(set(numbers)) == length
class Maze(tk.Tk, object):
def __init__(self, opt):
super(Maze, self).__init__()
self.action_space = opt.action_space
self.maze_w = opt.maze_w
self.maze_h = opt.maze_h
self.hole_num = opt.hole_num
self.fresh_time = opt.fresh_time
self.n_actions = len(self.action_space)
self.title("{}X{} MAZE".format(self.maze_h, self.maze_w))
self.geometry("{}x{}".format(self.maze_h * UNIT, self.maze_w * UNIT))
self.origin_x = UNIT/2 + random.randint(0, self.maze_w - 1) * UNIT
self.origin_y = UNIT/2 + random.randint(0, self.maze_h - 1) * UNIT
self.origin = [self.origin_x, self.origin_y]
seed = 1 # 736
if opt.play:
seed = int(input("Please enter the seed: "))
while True:
random.seed(seed)
for i in range(self.hole_num):
self.__setattr__("hole{}_center".format(i),
[UNIT/2 + random.randint(0, self.maze_w - 1) * UNIT,
UNIT/2 + random.randint(0, self.maze_h - 1) * UNIT])
self.oval_center = [UNIT/2 + random.randint(0, self.maze_w - 1) * UNIT,
UNIT/2 + random.randint(0, self.maze_h - 1) * UNIT]
self.sum_centers = [sum(self.__getattribute__("hole{}_center".format(i)))
for i in range(self.hole_num)]
if opt.play:
break
if len(self.sum_centers) != self.hole_num:
continue
self.sub_centers = [(self.__getattribute__("hole{}_center".format(i))[0] -
self.__getattribute__("hole{}_center".format(i))[1])
for i in range(self.hole_num)]
self.sum_centers.extend([sum(self.origin), sum(self.oval_center)])
if is_different(*self.sum_centers):
break
else:
self.sub_centers.extend([self.origin[0] - self.origin[1], self.oval_center[0] - self.oval_center[1]])
if is_different(*self.sub_centers):
break
seed += 1
print("Remember the seed: {}".format(seed))
self._build_maze()
def _build_maze(self):
self.canvas = tk.Canvas(self, bg="white",
height=self.maze_h * UNIT,
width=self.maze_w * UNIT)
# create grids
for c in range(0, self.maze_w * UNIT, UNIT):
x0, y0, x1, y1 = c, 0, c, self.maze_h * UNIT
self.canvas.create_line(x0, y0, x1, y1)
for r in range(0, self.maze_h * UNIT, UNIT):
x0, y0, x1, y1 = 0, r, self.maze_w * UNIT, r
self.canvas.create_line(x0, y0, x1, y1)
# create holes
for i in range(self.hole_num):
hole_center = self.__getattribute__("hole{}_center".format(i))
self.__setattr__("hole{}".format(i),
self.canvas.create_rectangle(
hole_center[0] - 15, hole_center[1] - 15,
hole_center[0] + 15, hole_center[1] + 15,
fill="black"))
# create oval
self.oval = self.canvas.create_oval(
self.oval_center[0] - 15, self.oval_center[1] - 15,
self.oval_center[0] + 15, self.oval_center[1] + 15,
fill="yellow"
)
# create red rect
self.rect = self.canvas.create_rectangle(
self.origin[0] - 15, self.origin[1] -15,
self.origin[0] + 15, self.origin[1] + 15,
fill="red"
)
# pack all
self.canvas.pack()
def reset(self):
while True:
self.origin_x = UNIT/2 + random.randint(0, self.maze_w - 1) * UNIT
self.origin_y = UNIT/2 + random.randint(0, self.maze_h - 1) * UNIT
self.origin = [self.origin_x, self.origin_y]
self.sum_centers = [sum(self.__getattribute__("hole{}_center".format(i)))
for i in range(self.hole_num)]
self.sub_centers = [(self.__getattribute__("hole{}_center".format(i))[0] -
self.__getattribute__("hole{}_center".format(i))[1])
for i in range(self.hole_num)]
if is_different(self.sum_centers.extend([sum(self.origin),
sum(self.oval_center)])):
break
else:
if is_different(self.sub_centers.extend([self.origin[0] - self.origin[1],
self.oval_center[0] - self.oval_center[1]])):
break
self.update()
time.sleep(self.fresh_time*2)
self.canvas.delete(self.rect)
self.rect = self.canvas.create_rectangle(
self.origin[0] - 15, self.origin[1] - 15,
self.origin[0] + 15, self.origin[1] + 15,
fill="red"
)
rect_coords = self.canvas.coords(self.rect)
observation = (np.array(rect_coords[:2]) - np.array(self.canvas.coords(self.oval)[:2]))/(self.maze_h * UNIT)
return observation
def step(self, action):
state = self.canvas.coords(self.rect)
base_action = [0, 0]
done = False
if action == 0: # up
if state[1] > UNIT:
base_action[1] -= UNIT
else:
reward = -1
done = True
elif action == 1: # down
if state[1] < (self.maze_h * UNIT - UNIT):
base_action[1] += UNIT
else:
reward = -1
done = True
elif action == 2: # left
if state[0] > UNIT:
base_action[0] -= UNIT
else:
reward = -1
done = True
elif action == 3: # right
if state[0] < self.maze_w * UNIT -UNIT:
base_action[0] += UNIT
else:
reward = -1
done = True
self.canvas.move(self.rect, base_action[0], base_action[1]) # move agent
state = self.canvas.coords(self.rect)
# reward function
if state == self.canvas.coords(self.oval):
reward = 1
done = True
elif state in [self.canvas.coords(self.__getattribute__("hole{}".format(i))) for i in range(self.hole_num)]:
reward = -1
done = True
else:
reward = 0
observation = (np.array(state[:2]) - np.array(self.canvas.coords(self.oval)[:2]))/(self.maze_h * UNIT)
return observation, reward, done
def render(self):
time.sleep(self.fresh_time)
self.update()
def update():
for t in range(10):
state = env.reset()
while True:
env.render()
a = 0
s, r, done = env.step(a)
if done:
break
env.destroy()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--fresh_time", dest="fresh_time", type=float, default=0.1, help="fresh time for one move")
parser.add_argument("--maze_h", dest="maze_h", type=int, default=8, help="the height of the maze(unit:unit)")
parser.add_argument("--maze_w", dest="maze_w", type=int, default=8, help="the width of the maze(unit:unit)")
parser.add_argument("--hole_num", dest="hole_num", type=int, choices=range(5), default=2, help="the number of holes")
parser.add_argument("--action_space", nargs="+", default=["u", "d", "l", "r"], help="all available actions")
opt = parser.parse_args()
env = Maze(opt)
env.after(10, update)
env.mainloop()
|
#! venv/bin/python
import unittest
import benicio
from app import db, views, models
dbs = db.session
class BenicioTestCase(unittest.TestCase):
def setUp(self):
benicio.app.config['TESTING'] = True
benicio.app.config['WTF_CSRF_ENABLED'] = False
self.client = benicio.app.test_client()
with self.client.session_transaction() as session:
session['user_id'] = '1'
admin = models.User(name='Admin', password='pbkdf2:sha1:1000$0jrJWk7S$15c39bbdff6514dfe3377ea5a7a15b7fd1f31a9b')
dbs.add(admin)
dbs.commit()
def tearDown(self):
[dbs.execute(table.delete()) for table in db.metadata.sorted_tables]
dbs.commit()
def test_empty_database(self):
rv = self.client.get('/devices')
assert 'No entries yet' in rv.data
def test_add_rule(self):
self.client.post('/rules', data=dict(name='Johns Rule', rule='deny out'))
rv = self.client.get('/rules')
assert 'Johns Rule' in rv.data
assert 'deny out' in rv.data
def test_add_group(self):
self.client.post('/groups', data=dict(name='Johns Group'))
rv = self.client.get('/groups')
assert 'Johns Group' in rv.data
def test_add_device(self):
self.client.post('/groups', data=dict(name='PC Group'))
self.client.post('/devices', data=dict(name='Johns PC', mac_address='AA:BB:CC:DD:EE', group=1))
rv = self.client.get('/devices')
assert 'Johns PC' in rv.data
assert 'PC Group' in rv.data
assert 'AA:BB:CC:DD:EE' in rv.data
def test_delete_rule(self):
self.client.post('/rules', data=dict(name='Johns Rule', rule='deny out'))
self.client.delete('/api/rules/1', data=dict(id='1'))
rv = self.client.get('/rules')
assert 'Johns Rule' not in rv.data
def test_delete_group(self):
self.client.post('/groups', data=dict(name='Johns Group'))
self.client.delete('/api/groups/1')
rv = self.client.get('/groups')
assert 'Johns Group' not in rv.data
def test_delete_device(self):
self.client.post('/groups', data=dict(name='PC Group'))
self.client.post('/devices', data=dict(name='Johns PC', mac_address='AA:BB:CC:DD:EE', group=1))
self.client.delete('/api/devices/1')
rv = self.client.get('/devices')
assert 'Johns PC' not in rv.data
def test_edit_device(self):
self.client.post('/groups', data=dict(name='PC Group'))
self.client.post('/devices', data=dict(name='Johns PC', mac_address='AA:BB:CC:DD:EE', group=1))
rv = self.client.get('/devices/1')
assert 'Johns PC' in rv.data
def test_edit_group(self):
self.client.post('/groups', data=dict(name='Johns Group'))
rv = self.client.get('/groups/1')
assert 'Johns Group' in rv.data
def test_edit_rule(self):
self.client.post('/rules', data=dict(name='Johns Rule', rule='deny out'))
rv = self.client.get('/rules/1')
assert 'Johns Rule' in rv.data
def test_alter_rule(self):
self.client.post('/rules', data=dict(name='Johns Rule', rule='deny out'))
self.client.post('/rules', data=dict(id='1', name='Other Rule', rule='deny out'))
rv = self.client.get('/rules')
assert 'Other Rule' in rv.data
assert 'Johns Rule' not in rv.data
def test_alter_device(self):
self.client.post('/groups', data=dict(name='PC Group'))
self.client.post('/devices', data=dict(name='Johns PC', mac_address='AA:BB:CC:DD:EE', group=1))
self.client.post('/devices', data=dict(id='1', name='Other PC', mac_address='AA:BB:CC:DD:EE', group=1))
rv = self.client.get('/devices')
assert 'Other PC' in rv.data
assert 'Johns PC' not in rv.data
def test_load_apply_rules(self):
self.client.post('/groups', data=dict(name='PC Group'))
self.client.post('/devices', data=dict(name='Johns PC', mac_address='AA:BB:CC:DD:EE', group=1))
self.client.post('/rules', data=dict(name='Johns Rule', rule='deny out'))
rv = self.client.get('/apply')
assert 'PC Group' in rv.data
assert 'Johns PC' in rv.data
assert 'Johns Rule' in rv.data
def test_generate_rules(self):
self.client.post('/groups', data=dict(name='PC Group'))
self.client.post('/devices', data=dict(name='Johns PC', mac_address='37:3B:09:FC:5C:1D', group=1))
self.client.post('/devices', data=dict(name='Johns Phone', mac_address='8E:32:62:F0:5D:38', group=1))
self.client.post('/devices', data=dict(name='Johns Tablet', mac_address='DE:81:00:E5:32:94', group=1))
self.client.post('/devices', data=dict(name='Johns Notebook', mac_address='DE:4C:47:55:7B:41', group=1))
self.client.post('/rules', data=dict(name='Johns Rule', rule='deny out [MAC]'))
self.client.post('/rules', data=dict(name='Johns Rule Exception', rule='allow in [MAC]'))
rv = views.generate_rules([1, 2], [1, 2, 3, 4])
assert 'deny out mac 37:3B:09:FC:5C:1D' in rv
assert 'deny out mac 8E:32:62:F0:5D:38' in rv
assert 'deny out mac DE:81:00:E5:32:94' in rv
assert 'deny out mac DE:4C:47:55:7B:41' in rv
assert 'allow in mac 37:3B:09:FC:5C:1D' in rv
assert 'allow in mac 8E:32:62:F0:5D:38' in rv
assert 'allow in mac DE:81:00:E5:32:94' in rv
assert 'allow in mac DE:4C:47:55:7B:41' in rv
def test_apply_rules(self):
self.client.post('/groups', data=dict(name='PC Group'))
self.client.post('/devices', data=dict(name='Johns PC', mac_address='37:3B:09:FC:5C:1D', group=1))
self.client.post('/devices', data=dict(name='Johns Phone', mac_address='8E:32:62:F0:5D:38', group=1))
self.client.post('/devices', data=dict(name='Johns Tablet', mac_address='DE:81:00:E5:32:94', group=1))
self.client.post('/rules', data=dict(name='Johns Rule', rule='deny out [MAC]'))
self.client.post('/rules', data=dict(name='Johns Rule Exception', rule='allow in [MAC]'))
rv = self.client.post('/apply', data=dict(rule1='a', rule2='b', device1='c', device2='d', device3='e'))
self.assertTrue('deny+out+mac+37%3A3B%3A09%3AFC%3A5C%3A1D' in rv.data)
self.assertTrue('deny+out+mac+8E%3A32%3A62%3AF0%3A5D%3A38' in rv.data)
self.assertTrue('deny+out+mac+DE%3A81%3A00%3AE5%3A32%3A94' in rv.data)
self.assertTrue('allow+in+mac+37%3A3B%3A09%3AFC%3A5C%3A1D' in rv.data)
self.assertTrue('allow+in+mac+8E%3A32%3A62%3AF0%3A5D%3A38' in rv.data)
self.assertTrue('allow+in+mac+DE%3A81%3A00%3AE5%3A32%3A94' in rv.data)
def test_change_password(self):
self.client.post('/index', data=dict(current_password='bendmin', new_password='12345', retype_password='12345'))
user = models.User.query.filter_by(name="Admin").first()
self.assertTrue(user.check_password('12345'))
def test_login(self):
self.client.post('/login', data=dict(user='Admin', password='bendmin'))
rv = self.client.get('/index')
assert 'Logged in successfully' in rv.data
@staticmethod
def test_clear_group():
group = models.Group(id=1, name='Group One')
dbs.add(group)
dbs.commit()
device = models.Device(name='Johns PC', mac_address='37:3B:09:FC:5C:1D', group=1)
device.clear_group()
assert device.group_id == 0
if __name__ == '__main__':
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.