content
stringlengths 5
1.05M
|
|---|
from django.db import models
class LongLivedAccessToken(models.Model):
access_token = models.CharField(max_length=500, primary_key=True, null=False, default=None)
token_type = models.CharField(max_length=500)
expires_in = models.DateField(default=False)
date_created = models.DateField(default=False)
def __str__(self):
return self.access_token
|
__all__ = ["get_config"]
|
from io import BytesIO
from typing import List, Optional
from typing_extensions import Self
import requests
from helper import (
encode_varint,
hash256,
int_to_little_endian,
little_endian_to_int,
read_varint,
SIGHASH_ALL
)
from script import Script
from ecc import PrivateKey
class Tx:
"""
Defines a bitcoin transaction
"""
def __init__(self, version, tx_ins, tx_outs, locktime, testnet=False) -> "Tx":
"""
Initializes a transaction
"""
self.version = version
self.tx_ins: List[TxIn] = tx_ins
self.tx_outs: List[TxOut] = tx_outs
self.locktime = locktime
self.testnet = testnet
def __repr__(self) -> str:
"""
String representation of a transaction
"""
tx_ins = ''
for tx_in in self.tx_ins:
tx_ins += tx_in.__repr() + '\n'
tx_outs = ''
for tx_out in self.tx_outs:
tx_outs += tx_out.__repr__() + '\n'
return 'tx: {}\nversion: {}\ntx_ins:\n{}tx_outs:\n{}locktime: {}'.format(
self.id(),
self.version,
tx_ins,
tx_outs,
self.locktime,
)
def id(self):
"""
Human-readable hexadecimal of the transaction hash
"""
return self.hash().hex()
def hash(self):
"""
Binary hash of the legacy serialization
"""
return hash256(self.serialize())[::-1]
@classmethod
def parse(cls, stream, testnet=False) -> "Tx":
"""
Parse a stream of transaction
"""
version = little_endian_to_int(stream.read(4))
num_inputs = read_varint(stream)
inputs = []
for _ in range(num_inputs):
inputs.append(TxIn.parse(stream))
num_outputs = read_varint(stream)
outputs = []
for _ in range(num_outputs):
outputs.append(TxOut.parse(stream))
locktime = little_endian_to_int(stream.read(4))
return cls(version, inputs, outputs, locktime, testnet=testnet)
def serialize(self):
"""
Returns the byte serialization of the transaction
"""
result = int_to_little_endian(self.version, 4)
result += encode_varint(len(self.tx_ins))
for tx_in in self.tx_ins:
result += tx_in.serialize()
result += encode_varint(len(self.tx_outs))
for tx_out in self.tx_outs:
result += tx_out.serialize()
result += int_to_little_endian(self.locktime, 4)
return result
def fee(self):
"""
Calculates the transaction fee in satoshi
"""
input_sum, output_sum = 0, 0
for tx_in in self.tx_ins:
input_sum += tx_in.value(self.testnet)
for tx_out in self.tx_outs:
output_sum += tx_out.amount
return input_sum - output_sum
def sig_hash(self, input_index: int, redeem_script=None):
"""
Compute the integer representation of the signature hash (of a transaction) that needs
to get signed fir index input_index.
"""
s = int_to_little_endian(self.version, 4)
s += encode_varint(len(self.tx_ins))
for i, tx_in in enumerate(self.tx_ins):
if i == input_index:
if redeem_script:
script_sig = redeem_script
else:
script_sig = tx_in.script_pubkey(self.testnet)
s += TxIn(
prev_tx=tx_in.prev_tx,
prev_index=tx_in.prev_index,
script_sig=tx_in.script_pubkey(self.testnet),
sequence=tx_in.sequqnce
).serialize()
else:
s += TxIn(
prev_tx=tx_in.prev_tx,
prev_index=tx_in.prev_index,
sequence=tx_in.sequqnce
).serialize()
s += encode_varint(len(self.tx_outs))
for tx_out in self.tx_outs:
s += tx_out.serialize()
s += int_to_little_endian(self.locktime, 4)
s += int_to_little_endian(SIGHASH_ALL, 4)
h256 = hash256(s)
return int.from_bytes(h256, 'big')
def verify_input(self, input_index: int):
"""
Verify a transaction input
"""
tx_in: TxIn = self.tx_ins[input_index]
script_pubkey = tx_in.script_pubkey(testnet=self.testnet)
if script_pubkey.is_p2sh_script_pubkey():
cmd = tx_in.script_sig.cmds[-1]
raw_redeem = encode_varint(len(cmd)) + cmd
redeem_script = Script.parse(BytesIO(raw_redeem))
else:
redeem_script = None
z = self.sig_hash(input_index, redeem_script)
combined: Script = tx_in.script_sig + script_pubkey
return combined.evaluate(z)
def verify(self):
"""
Verify this transaction
"""
if self.fee() < 0:
return False
for i in range(len(self.tx_ins)):
if not self.verify_input(i):
return False
return True
def sign_input(self, input_index: int, private_key: PrivateKey):
"""
Signs a transaction input
Args:
input_index (int): the index of the transaction input
private_key (int): private key to sign the transaction input
"""
z = self.sig_hash(input_index=input_index)
der = private_key.sign(z).der()
sig = der + SIGHASH_ALL.to_bytes(1, 'big')
sec = private_key.point.sec()
self.tx_ins[input_index].script_sig = Script([sig, sec])
return self.verify_input(input_index)
def is_coinbase(self) -> bool:
"""
Returns True if the Tx is a coinbase transaction, else returns False
"""
if len(self.tx_ins) != 1:
return False
first_input: TxIn = self.tx_ins[0]
if first_input.prev_tx != b'\x00' * 32:
return False
if first_input.prev_index != 0xffffffff:
return False
return True
def coinbase_height(self) -> Optional[int]:
"""
Returns the block height of the coinbase transaction
"""
if not self.is_coinbase():
return None
element = self.tx_ins[0].script_sig.cmds[0]
return little_endian_to_int(element)
class TxIn:
"""
Transaction Input
"""
def __init__(self, prev_tx, prev_index, script_sig=None, sequence=0xffffffff) -> Self:
"""
Instantiates a new transaction input
"""
self.prev_tx = prev_tx
self.prev_index = prev_index
if script_sig is None:
self.script_sig = Script()
else:
self.script_sig = script_sig
self.sequence = sequence
def __repr__(self) -> str:
"""
String representation of a transaction
"""
return f"{self.prev_tx.hex()}:{self.prev_index}"
@classmethod
def parse(cls, stream) -> "TxIn":
"""
Takes a byte stream and parses the tx_input at the start
Returns a TxIn
"""
prev_tx = stream.read(32)[::-1]
prev_index = little_endian_to_int(stream.read(4))
script_sig = Script.parse(stream)
sequence = little_endian_to_int(stream.read(4))
return cls(prev_tx, prev_index, script_sig, sequence)
def serialize(self):
"""
Returns the byte serialization of the transaction input
"""
result = self.prev_tx[::-1]
result += int_to_little_endian(self.prev_index, 4)
result += self.script_sig.serialize()
result += int_to_little_endian(self.sequence, 4)
return result
def fetch_tx(self, testnet=False):
"""
Fetches a transaction
"""
return TxFetcher.fetch(self.prev_tx.hex(), testnet=testnet)
def value(self, testnet=False):
"""
Get the output value by looking up the transaction hash.
Returns the amount in satoshi
"""
tx = self.fetch_tx(testnet=testnet)
return tx.tx_outs[self.prev_index].amount
def script_pubkey(self, testnet=False):
"""
Get the ScriptPubKey by looking up the tx hash
Returns a Script object
"""
tx: Tx = self.fetch_tx(testnet=testnet)
return tx.tx_outs[self.prev_index].script_pubkey
class TxOut:
"""
Transaction Output
"""
def __init__(self, amount, script_pubkey) -> None:
"""
Instantiates a new transaction output
"""
self.amount = amount
self.script_pubkey = script_pubkey
def __repr__(self) -> str:
"""
String representation of transaction output
"""
return f'{self.amount}:{self.script_pubkey}'
@classmethod
def parse(cls, stream) -> Self:
"""
Takes a byte stream and parses the tx_output at the start.
Returns a TxOut object
"""
amount = little_endian_to_int(stream.read(8))
script_pubkey = Script.parse(stream)
return cls(amount, script_pubkey)
def serialize(self):
"""
Returns the byte serialization of the transaction output
"""
result = int_to_little_endian(self.amount, 8)
result += self.script_pubkey.serialize()
return result
class TxFetcher:
"""
Fetch transactions from the UTXO set
"""
cache = {}
@classmethod
def get_url(cls, testnet=False):
"""
Get mainnet or testnet url
"""
if testnet:
return 'http://testnet.programmingbitcoin.com'
else:
return 'http://mainnet.programmingbitcoin.com'
@classmethod
def fetch(cls, tx_id, testnet=False, fresh=False) -> "Tx":
"""
Fetch transactions from the UTXO set
"""
if fresh or (tx_id not in cls.cache):
url = f'{cls.get_url(testnet)}/tx/{tx_id}.hex'
response = requests.get(url)
try:
raw = bytes.fromhex(response.text.strip())
except ValueError:
raise ValueError(f'unexpected response: {response.text}')
if raw[4] == 0:
raw = raw[:4] + raw[6:]
tx = Tx.parse(BytesIO(raw), testnet=testnet)
tx.locktime = little_endian_to_int(raw[-4:])
else:
tx = Tx.parse(BytesIO(raw), testnet=testnet)
if tx.id() != tx_id:
raise ValueError(f'not the same id: {tx.id()} vs {tx_id}')
cls.cache[tx_id] = tx
cls.cache[tx_id].testnet = testnet
return cls.cache[tx_id]
|
# -*- coding: utf-8 -*-
"""Boilerplate:
A one line summary of the module or program, terminated by a period.
Rest of the description. Multiliner
<div id = "exclude_from_mkds">
Excluded doc
</div>
<div id = "content_index">
<div id = "contributors">
Created on Fri Aug 27 17:25:06 2021
@author: Timothe
</div>
"""
from PyQt5.QtWidgets import QWidget, QPushButton,QHBoxLayout, QGridLayout, QApplication, QFrame, QVBoxLayout
class QDynamicGenericLayout():
def __init__(self):
super().__init__()
self._mapping = {}
self._map_index = 0
@property
def _lower_index(self):
try :
max_index = max(list(self._mapping.values()))
except ValueError : #if mapping values has no item because we have selfdeleted the layout
return
if self._map_index - 1 > max_index :
self._map_index = max_index + 1
def __getitem__(self, key):
try :
return self.itemAt(self._mapping[key]).widget()
except Exception as e:
print(e,self._mapping)
return None
def delete(self,key):
if self[key] is not None :
self[key].deleteLater()
self._mapping.pop(key)
self._lower_index
def selfdelete(self):
for key in list(self._mapping.keys()):
self.delete(key)
class QDynamicGridLayout(QGridLayout, QDynamicGenericLayout):
def __init__(self,parent = None):
super().__init__()
def addWidget(self, widgetname, widget , coordx, coordy):
if widgetname in self._mapping.keys():
raise ValueError("Two widgets with the same name, not allowed")
self._mapping.update({widgetname:self._map_index})
self._map_index += 1
super().addWidget(widget, coordx, coordy)
class QDynamicHLayout(QHBoxLayout, QDynamicGenericLayout):
def __init__(self,parent = None):
super().__init__()
def addWidget(self, widgetname, widget ):
if widgetname in self._mapping.keys():
raise ValueError("Two widgets with the same name, not allowed")
self._mapping.update({widgetname:self._map_index})
self._map_index += 1
super().addWidget(widget)
class QDynamicVLayout(QVBoxLayout, QDynamicGenericLayout):
def __init__(self,parent = None):
super().__init__()
def addWidget(self, widgetname, widget ):
if widgetname in self._mapping.keys():
raise ValueError("Two widgets with the same name, not allowed")
self._mapping.update({widgetname:self._map_index})
self._map_index += 1
super().addWidget(widget)
|
import ibmsecurity.utilities.tools
module_uri="/isam/felb/configuration/services/"
requires_modulers=None
requires_version=None
def get(isamAppliance, service_name, check_mode=False, force=False):
"""
Retrieves layer configuration
"""
return isamAppliance.invoke_get("Retrieving Layer Configuration", "{0}{1}/layer".format(module_uri, service_name))
def update(isamAppliance, service_name, type, layer7_secure, layer7_ssl_label, layer7_cookie=None, check_mode=False, force=False):
"""
Updates specified service name layer
"""
change_required = _check(isamAppliance, service_name, type, layer7_secure, layer7_ssl_label, layer7_cookie)
if force is True or change_required is True:
return isamAppliance.invoke_put("Updating Service Layer", "{0}{1}/layer".format(module_uri, service_name),
{
"type": type,
"layer7_secure": layer7_secure,
"layer7_ssl_label": layer7_ssl_label,
"layer7_cookie": layer7_cookie
}, requires_version=requires_version, requires_modules=requires_modulers)
else:
return isamAppliance.create_return_object(changed=False)
def _check(isamAppliance, service_name, type, layer7_secure, layer7_ssl_label, layer7_cookie):
"""
Checks update for idempotency
"""
change_required=False
ret_obj = get(isamAppliance, service_name)
if ret_obj['data']['type'] != type:
change_required=True
elif ret_obj['data']['layer7_secure'] != layer7_secure:
change_required=True
elif ret_obj['data']['layer7_ssl_label'] != layer7_ssl_label:
change_required=True
elif ret_obj['data']['layer7_cookie'] != layer7_cookie:
change_required=True
return change_required
|
import typing
from typing import Iterable, Optional
from .utils.meta import roundrepr
from .xref import Xref
__all__ = ["Definition"]
class Definition(str):
"""A human-readable text definition of an entity.
Definitions are human-readable descriptions of an entity in the ontology
graph, with some optional cross-references to support the definition.
Example:
Simply create a `Definition` instance by giving it a string::
>>> def1 = pronto.Definition('a structural anomaly')
Additional cross-references can be passed as arguments, or added later
to the ``xrefs`` attribute of the `Definition`:
>>> def2 = pronto.Definition('...', xrefs={pronto.Xref('MGI:Anna')})
>>> def2.xrefs.add(pronto.Xref('ORCID:0000-0002-3947-4444'))
The text content of the definition can be accessed by casting the
definition object to a plain string:
>>> str(def1)
'a structural anomaly'
Caution:
A `Definition` compare only based on its textual value, independently
of the `Xref` it may contains:
>>> def2 == pronto.Definition('...')
True
Note:
Some ontologies use the xrefs of a description to attribute the
authorship of that definition:
>>> cio = pronto.Ontology.from_obo_library("cio.obo")
>>> sorted(cio['CIO:0000011'].definition.xrefs)
[Xref('Bgee:fbb')]
The common usecase however is to refer to the source of a definition
using persistent identifiers like ISBN book numbers or PubMed IDs.
>>> pl = pronto.Ontology.from_obo_library("plana.obo")
>>> sorted(pl['PLANA:0007518'].definition.xrefs)
[Xref('ISBN:0-71677033-4'), Xref('PMID:4853064')]
"""
xrefs: typing.Set[Xref]
__slots__ = ("__weakref__", "xrefs")
def __new__(cls, text: str, xrefs=None) -> "Definition":
return super().__new__(cls, text) # type: ignore
def __init__(self, text: str, xrefs: Optional[Iterable[Xref]] = None) -> None:
self.xrefs = set(xrefs) if xrefs is not None else set()
def __repr__(self) -> str:
return roundrepr.make("Definition", str(self), xrefs=(self.xrefs, set()))
|
from django.shortcuts import render, redirect
from apps.session.models import UserProfile, Group, GroupMessage
from django.contrib.auth.decorators import login_required
@login_required(login_url='/session/login/')
def make_group(request):
if request.method != "POST":
return render(request, 'session/make_group.html')
group_name = request.POST['groupname']
try:
Group.objects.get(name=group_name)
return render(request, 'session/make_group.html',
{'error':
'The name is already being used by other group'})
except Group.DoesNotExist:
new_group = Group.objects.create(name=group_name)
if request.POST['groupname'] != "":
new_group.add_member(request.user.userprofile)
return redirect('/session/group/message/'+new_group.name+'/manage')
# main view of session/group
@login_required(login_url='/session/login/')
def view_group_list(request):
mygroups = request.user.userprofile.groups.all()
return render(request, 'session/grouplist.html', {'groups': mygroups})
@login_required(login_url='/session/login/')
def group_message(request, groupname):
try:
group = Group.objects.get(name=groupname)
except Group.DoesNotExist:
return render(request, 'session/group_message.html',
{'group': None, 'error': 'The group does not exist'})
if not (request.user.userprofile in group.members.all()):
return render(request, 'session/group_message.html',
{'group': None,
'error': 'You are not a member of this group'})
if request.method == "POST":
if request.POST['content'] != "":
GroupMessage.objects.create(content=request.POST['content'],
sender=request.user.userprofile,
receivers=group)
messages = GroupMessage.objects.filter(receivers=group)
messages = messages.order_by('created_time')
return render(request, 'session/group_message.html',
{'group': group,
'me': request.user.userprofile,
'messages': messages})
@login_required(login_url='/session/login/')
def manage(request, groupname):
try:
group = Group.objects.get(name=groupname)
except Group.DoesNotExist:
return render(request, 'session/group_manage.html',
{'group': None, 'error': 'The group does not exist'})
if not (request.user.userprofile in group.members.all()):
return render(request, 'session/group_manage.html',
{'group': None,
'error': 'You are not a member of this group'})
members = group.members.all().order_by('nickname')
if request.method == "GET":
return render(request, 'session/group_manage.html',
{'group': group, 'members': members})
# DELETE request(remove self)
if request.method == "DELETE":
group.remove_member(request.user.userprofile)
return redirect('/session/group/')
# POST request(add member)
try:
invitee = UserProfile.objects.get(nickname=request.POST['nickname'])
except UserProfile.DoesNotExist:
return render(request, 'session/group_manage.html',
{'group': group,
'members': members,
'error': 'The user does not exist'})
if invitee in members:
return render(request, 'session/group_manage.html',
{'group': group,
'members': members,
'error': 'The user is already in this group'})
group.add_member(invitee)
return redirect('/session/group/message/'+group.name+'/manage/')
|
from django.http.response import HttpResponse
class PandasJsonResponse(HttpResponse):
"""
An easier way to return a json encoded response from
'Pandas.to_dataframe().to_json()' object
"""
def __init__(self, data, **kwargs):
data = data.reset_index().to_json(orient='records', date_format='iso')
kwargs.setdefault('content_type', 'application/json')
super().__init__(content=data, **kwargs)
|
import requests
from . import FeedSource, _request_headers
class Coincap(FeedSource):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.nb_coins_included_in_altcap_x = getattr(self, 'nb_coins_included_in_altcap_x', 10)
def _fetch(self):
feed = {}
base = self.bases[0]
if base == 'BTC':
coincap_front = requests.get('http://www.coincap.io/front').json()
coincap_global = requests.get('http://www.coincap.io/global').json()
alt_cap = float(coincap_global["altCap"])
alt_caps_x = [float(coin['mktcap'])
for coin in coincap_front[0:self.nb_coins_included_in_altcap_x+1]
if coin['short'] != "BTC"][0:self.nb_coins_included_in_altcap_x]
alt_cap_x = sum(alt_caps_x)
btc_cap = float(coincap_global["btcCap"])
btc_altcap_price = alt_cap / btc_cap
btc_altcapx_price = alt_cap_x / btc_cap
if 'ALTCAP' in self.quotes:
self.add_rate(feed, base, 'ALTCAP', btc_altcap_price, 1.0)
if 'ALTCAP.X' in self.quotes:
self.add_rate(feed, base, 'ALTCAP.X', btc_altcapx_price, 1.0)
return feed
|
# -*- encoding: utf-8 -*-
"""Parser Module.
This module is an interface to the functionalities of pycpasrer,
a Python implementation of a parser for C99 source code. The
pycparser implementation used here is a forked version from
the original one.
.. _Google Python Style Guide:
https://github.com/DonAurelio/pycparser
"""
import os
from . import ast_visitor
from . import pycparser
FAKE_DEFINES = '#include <_fake_defines.h>'
"""str: Definitions headers.
In a given C source code there are words that have a special meaning
for the user and the compiler, for example *NULL*, *false*, *true*.
Therefore the compiler needs to know that these words have a special
meanning and are not simply text. The *#include <_fake_defines.h>* header
defines those imporant definitions.
Example:
#define NULL 0
#define false 0
#define true 1
"""
FAKE_TYPEDEFS = '#include <_fake_typedefs.h>'
"""str: Type definitions header.
To parse a given C source code, the compiler needs to know
what is the type of each declared variable.
"""
FAKE_INCLUDES = [FAKE_DEFINES,FAKE_TYPEDEFS]
"""List[str]: .
The docstring may span multiple lines. The type may optionally be specified
on the first line, separated by a colon.
"""
BASE_DIR = os.path.dirname(os.path.realpath(__file__))
"""int: Module level variable documented inline.
The docstring may span multiple lines. The type may optionally be specified
on the first line, separated by a colon.
"""
FAKE_INCLUDES_DIR = os.path.join(BASE_DIR,'utils','fake_libc_include')
"""int: Module level variable documented inline.
The docstring may span multiple lines. The type may optionally be specified
on the first line, separated by a colon.
"""
def fake_cfile(file_path):
"""Include fake include header with the basic C99 type definitions.
To parce a C99 source code it is not neccesary to get the whole
Libaries included. Then, to parse the code, we remove those headers
an include fake headers with the neccesarry dafinitions to allow the
parser to parse de code.
Args:
file_path (str): Path to the C99 source code file which
include heraders needs to be faked.
Returns:
A path the facked file, it is to say a file with the suffix
fake_<filename>.c.
"""
dir_path = os.path.dirname(file_path)
file_name = os.path.basename(file_path)
new_file_name = 'fake_' + file_name
faked_file_path = os.path.join(dir_path,new_file_name)
file_lines = []
new_lines = []
with open(file_path,'r') as file:
file_lines = file.readlines()
fake_includes = FAKE_INCLUDES[:]
for line in file_lines:
if fake_includes and '#include' in line:
new_lines.append(fake_includes.pop(0) + '\n')
elif '#include' in line:
new_lines.append('//include removed' + '\n')
else:
new_lines.append(line)
with open(faked_file_path,'w') as fakefile:
fakefile.write(''.join(new_lines))
return faked_file_path
def parse_cfile(file_path,preprocesor='cpp'):
"""Parse a C99 source code into a Syntrax Abstract Tree.
Args:
file_path (str): Path to the file to be parsed.
preprocesor (str): C preprocessor.
Returns:
A Syntrax Abstract Tree.
"""
faked_file_path = fake_cfile(file_path=file_path)
ast = pycparser.parse_file(filename=faked_file_path,use_cpp=True,
cpp_path=preprocesor, cpp_args=['-E', r'-I%s' % FAKE_INCLUDES_DIR])
return ast
def get_data_from_cfile(file_path,compiler='gcc'):
"""Split a C99 source code in sections.
Use pycparser to parse a C99 source code and dive it into three sections.
**include**, **declaration**, **functions**.
Example:
{
'file_path': '/home/somebody/project/code.c',
'include': '#include <stdlib.h>\n#include <sdtio.h>',
'declaration': '#define PI 3.141516',
'functions': [
{
name: 'initilize',
begin: 12,
end: 15:
raw:'void initialize(int i){\n //somebody\n}'
},
...
]
}
file_path, it is the path to the parsed file. Include, raw inclides
sections in the file. Declarations, raw declarations in the file.
Functions, a list of dict, which contains information about each
function, the line on which it begin and end in the code, finally
the raw code.
Note:
This function support files that can be parsed by pycparser,
it is to say, C99 source code.
Args:
file_path (str): The path to the C99 source file to be parsed.
compiler (str): The compiler to preprocess the c99 source code file.
Returns:
dict: a dict containing the sections of the C99 source code.
"""
code_ast = parse_cfile(file_path)
visitor = ast_visitor.FuncDefVisitor()
funcdefs = visitor.funcdefs(code_ast)
fundefs_data = visitor.funcdefs_data(funcdefs)
code_data = {}
with open(file_path,'r') as file:
code_lines = file.readlines()
includes_end_line = [ line + 1 for line, raw in enumerate(code_lines) \
if '#include' in raw ][-1]
fundef_init_line = fundefs_data[0]['begin'] - 1
code_data['file_path'] = file_path
code_data['include'] = ''.join(code_lines[:includes_end_line])
code_data['declaration'] = ''.join(code_lines[includes_end_line:fundef_init_line])
code_data['functions'] = fundefs_data
for funcdef_data in code_data['functions']:
begin = funcdef_data['begin'] - 1
end = funcdef_data['end']
funcdef_data['raw'] = ''.join(code_lines[begin:end])
return code_data
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# This is a partial copy of Spack Gromacs package
# - modified URL and versions
# - removed Plumed patches
# - calling original patch and cmake-related procedures to not duplicate them
# - simplified variants/dependencies because this fork starts at Gromacs 2021
import os
from spack.pkg.builtin.gromacs import Gromacs as BuiltinGromacs
class GromacsChainCoordinate(CMakePackage):
"""
A modification of GROMACS that implements the "chain coordinate", a reaction
coordinate for pore formation in membranes and stalk formation between membranes.
"""
homepage = 'https://gitlab.com/cbjh/gromacs-chain-coordinate/-/blob/main/README.md'
url = 'https://gitlab.com/cbjh/gromacs-chain-coordinate/-/archive/release-2021.chaincoord-0.1/gromacs-chain-coordinate-release-2021.chaincoord-0.1.tar.bz2'
git = 'https://gitlab.com/cbjh/gromacs-chain-coordinate.git'
maintainers = ['w8jcik']
version('main', branch='main')
version('2021.2-0.1', sha256="879fdd04662370a76408b72c9fbc4aff60a6387b459322ac2700d27359d0dd87",
url="https://gitlab.com/cbjh/gromacs-chain-coordinate/-/archive/release-2021.chaincoord-0.1/gromacs-chain-coordinate-release-2021.chaincoord-0.1.tar.bz2",
preferred=True)
variant('mpi', default=True,
description='Activate MPI support (disable for Thread-MPI support)')
variant('shared', default=True,
description='Enables the build of shared libraries')
variant(
'double', default=False,
description='Produces a double precision version of the executables')
variant('cuda', default=False, description='Enable CUDA support')
variant('opencl', default=False, description='Enable OpenCL support')
variant('sycl', default=False, description='Enable SYCL support')
variant('nosuffix', default=False, description='Disable default suffixes')
variant('build_type', default='RelWithDebInfo',
description='The build type to build',
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel',
'Reference', 'RelWithAssert', 'Profile'))
variant('openmp', default=True,
description='Enables OpenMP at configure time')
variant('hwloc', default=True,
description='Use the hwloc portable hardware locality library')
variant('lapack', default=False,
description='Enables an external LAPACK library')
variant('blas', default=False,
description='Enables an external BLAS library')
variant('cycle_subcounters', default=False,
description='Enables cycle subcounters')
depends_on('mpi', when='+mpi')
depends_on('fftw-api@3')
depends_on('cmake@3.16.0:3.99.99', type='build')
depends_on('cuda', when='+cuda')
depends_on('sycl', when='+sycl')
depends_on('lapack', when='+lapack')
depends_on('blas', when='+blas')
depends_on('hwloc', when='+hwloc')
filter_compiler_wrappers(
'*.cmake',
relative_root=os.path.join('share', 'cmake', 'gromacs_mpi'))
filter_compiler_wrappers(
'*.cmake',
relative_root=os.path.join('share', 'cmake', 'gromacs'))
def patch(self):
BuiltinGromacs.patch(self)
def cmake_args(self):
return super(GromacsChainCoordinate, self).cmake_args()
def check(self):
"""The default 'test' targets does not compile the test programs"""
with working_dir(self.build_directory):
if self.generator == 'Unix Makefiles':
self._if_make_target_execute('check')
elif self.generator == 'Ninja':
self._if_ninja_target_execute('check')
|
#!/usr/bin/env python3
#Author: Stefan Toman
def is_leap(year):
#custom code starts here
if year % 4 != 0:
return False
if year % 400 == 0:
return True
if year % 100 == 0:
return False
return True
#custom code ends here
if __name__ == '__main__':
year = int(input())
print(is_leap(year))
|
# This file is part of sner4 project governed by MIT license, see the LICENSE.txt file.
"""
scheduler.views.queue tests
"""
import json
from http import HTTPStatus
from pathlib import Path
from flask import url_for
from sner.server.scheduler.models import Job, Queue
def test_queue_list_route(cl_operator):
"""queue list route test"""
response = cl_operator.get(url_for('scheduler.queue_list_route'))
assert response.status_code == HTTPStatus.OK
def test_queue_list_json_route(cl_operator, queue):
"""queue list_json route test"""
response = cl_operator.post(
url_for('scheduler.queue_list_json_route'),
{'draw': 1, 'start': 0, 'length': 1, 'search[value]': queue.name}
)
assert response.status_code == HTTPStatus.OK
response_data = json.loads(response.body.decode('utf-8'))
assert response_data['data'][0]['name'] == queue.name
response = cl_operator.post(
url_for('scheduler.queue_list_json_route', filter=f'Queue.name=="{queue.name}"'),
{'draw': 1, 'start': 0, 'length': 1}
)
assert response.status_code == HTTPStatus.OK
response_data = json.loads(response.body.decode('utf-8'))
assert response_data['data'][0]['name'] == queue.name
def test_queue_add_route(cl_operator, queue_factory):
"""queue add route test"""
aqueue = queue_factory.build()
form = cl_operator.get(url_for('scheduler.queue_add_route')).form
form['name'] = aqueue.name
form['config'] = aqueue.config
form['group_size'] = aqueue.group_size
form['priority'] = aqueue.priority
response = form.submit()
assert response.status_code == HTTPStatus.FOUND
tqueue = Queue.query.filter(Queue.name == aqueue.name).one()
assert tqueue.name == aqueue.name
def test_queue_add_route_config_validation(cl_operator, queue_factory):
"""queue add route test"""
aqueue = queue_factory.build()
form = cl_operator.get(url_for('scheduler.queue_add_route')).form
form['name'] = aqueue.name
form['config'] = ''
form['group_size'] = aqueue.group_size
form['priority'] = aqueue.priority
response = form.submit()
assert response.status_code == HTTPStatus.OK
assert response.lxml.xpath('//div[@class="invalid-feedback" and contains(text(), "Invalid YAML")]')
form = response.form
form['config'] = "module: 'notexist'"
response = form.submit()
assert response.status_code == HTTPStatus.OK
assert response.lxml.xpath('//div[@class="invalid-feedback" and text()="Invalid module specified"]')
form = response.form
form['config'] = "module: 'dummy'\nadditionalKey: 'value'\n"
response = form.submit()
assert response.status_code == HTTPStatus.OK
assert response.lxml.xpath('//div[@class="invalid-feedback" and contains(text(), "Invalid config")]')
def test_queue_edit_route(cl_operator, queue):
"""queue edit route test"""
form = cl_operator.get(url_for('scheduler.queue_edit_route', queue_id=queue.id)).form
form['name'] = f'{form["name"].value} edited'
response = form.submit()
assert response.status_code == HTTPStatus.FOUND
assert Queue.query.get(queue.id).name == form['name'].value
def test_queue_enqueue_route(cl_operator, queue, target_factory):
"""queue enqueue route test"""
atarget = target_factory.build(queue=queue)
form = cl_operator.get(url_for('scheduler.queue_enqueue_route', queue_id=queue.id)).form
form['targets'] = f'{atarget.target}\n \n '
response = form.submit()
assert response.status_code == HTTPStatus.FOUND
tqueue = Queue.query.get(queue.id)
assert len(tqueue.targets) == 1
assert tqueue.targets[0].target == atarget.target
def test_queue_flush_route(cl_operator, target):
"""queue flush route test"""
queue_id = target.queue_id
form = cl_operator.get(url_for('scheduler.queue_flush_route', queue_id=target.queue_id)).form
response = form.submit()
assert response.status_code == HTTPStatus.FOUND
assert not Queue.query.get(queue_id).targets
def test_queue_prune_route(cl_operator, job_completed):
"""queue flush route test"""
form = cl_operator.get(url_for('scheduler.queue_prune_route', queue_id=job_completed.queue_id)).form
response = form.submit()
assert response.status_code == HTTPStatus.FOUND
assert not Job.query.filter(Job.queue_id == job_completed.queue_id).all()
assert not Path(job_completed.output_abspath).exists()
def test_queue_delete_route(cl_operator, job_completed):
"""queue delete route test"""
tqueue = Queue.query.get(job_completed.queue_id)
assert Path(tqueue.data_abspath)
form = cl_operator.get(url_for('scheduler.queue_delete_route', queue_id=tqueue.id)).form
response = form.submit()
assert response.status_code == HTTPStatus.FOUND
assert not Queue.query.get(tqueue.id)
assert not Path(tqueue.data_abspath).exists()
|
import tensorflow as tf
from voxelgan.loss import generator_loss, discriminator_loss
from voxelgan.optimizer import Optimizer
import numpy as np
class Latent(tf.keras.Model):
'''
The latent
'''
def __init__(self, d) -> None:
super(Latent, self).__init__(name='')
self.input_layer = tf.keras.layers.Input(shape=(d,))
self.normalize = tf.keras.layers.LayerNormalization()
def call(self, x):
x = self.input_layer
x = self.normalize(x)
return x
class Mapping(tf.keras.Model):
'''
The Mapping block
'''
def __init__(self, w_dim, m_layers) -> None:
super(Mapping, self).__init__(name='')
self.m_layers = m_layers
self.dense = tf.keras.layers.Dense(w_dim)
self.leaky_relu = tf.keras.layers.LeakyReLU(alpha=0.2)
self.normalize = tf.keras.layers.LayerNormalization()
def call(self, x):
for i in range(self.m_layers):
x = self.dense(x)
x = self.leaky_relu(x)
return x
class NyaAdd(tf.keras.layers.Layer):
#connect the mapping network with the convolutional layers
def __init__(self) -> None:
super(NyaAdd, self).__init__(name='NyaAdd')
def call(self, x):
#TODO: this was an idea (just adding the two together entirely is too many params)
a = tf.keras.layers.Reshape(x[0].shape[-1])(x[1])
x = tf.keras.layers.concatenate([x[0], a])
return x
class Generator_Block(tf.keras.Model):
'''4
The Generator block
'''
def __init__(self, sequence, filters) -> None:
super(Generator_Block, self).__init__(name='')
self.conv = tf.keras.layers.Conv3DTranspose(filters=filters, kernel_size=(3,3,3), strides=(1,1,1), padding='same')
self.leaky_relu = tf.keras.layers.LeakyReLU(alpha=0.3)
self.upsample = tf.keras.layers.UpSampling3D(size=(2, 2, 2))
self.batch_norm = tf.keras.layers.BatchNormalization()
def call(self, x):
x = self.upsample(x)
x = self.batch_norm(x)
x = self.conv(x)
x = self.leaky_relu(x)
return x
class RGB_Block(tf.keras.Model):
'''
The RGB block
'''
def __init__(self, sequence, filters) -> None:
super(RGB_Block, self).__init__(name='')
self.conv = tf.keras.layers.Conv3D(filters=filters, kernel_size=(3,3,3), strides=(1,1,1), padding='same')
self.leaky_relu = tf.keras.layers.LeakyReLU(alpha=0.3)
self.batch_norm = tf.keras.layers.BatchNormalization()
def call(self, x):
x = self.conv(x)
x = self.leaky_relu(x)
x = self.batch_norm(x)
return x
class Generator(tf.keras.Model):
def __init__(self, resolution, sequence, filters, z_dim, w_dim, mapping_layers) -> None:
super(Generator, self).__init__()
self.latent = Latent(w_dim)
self.mapping = Mapping(w_dim, mapping_layers)
self.generator_block = Generator_Block(sequence, filters)
self.rgbs = RGB_Block(sequence, filters)
self.nya = NyaAdd()
init = np.zeros((1,4,4,4,512))
self.const = tf.Variable(init, shape=(1,4,4,4,512), trainable=True)
self.noise = tf.keras.layers.GaussianNoise(0.1)
def call(self, x, training=False):
x = self.latent(x)
x = self.mapping(x)
z = self.const
for i in range(4): #TODO: Fix this
z = self.generator_block(z)
z = self.noise(z)
z = self.nya([z,x])
z = self.rgbs(z)
return z
class Discriminator_Block(tf.keras.Model):
'''
The Discriminator block
'''
def __init__(self, filters) -> None:
super(Discriminator_Block, self).__init__(name='')
self.conv = tf.keras.layers.Conv3D(filters=filters, kernel_size=(3,3,3), strides=(1,1,1), padding='same')
self.leaky_relu = tf.keras.layers.LeakyReLU(alpha=0.3)
self.downsample = tf.keras.layers.AveragePooling3D(pool_size=(2, 2, 2))
self.batch_norm = tf.keras.layers.BatchNormalization()
def call(self, x):
x = self.conv(x)
x = self.leaky_relu(x)
x = self.downsample(x)
x = self.batch_norm(x)
return x
class Discriminator(tf.keras.Model):
def __init__(self, resolution, sequence, filters) -> None:
super(Discriminator, self).__init__(name='')
self.input_layer = tf.keras.layers.Input(shape=(None, sequence, resolution, resolution, 3))
self.conv1 = tf.keras.layers.Conv3D(filters=filters, kernel_size=(3,3,3), strides=(1,1,1), padding='same')
self.discriminator_block = Discriminator_Block(filters)
self.downsample = tf.keras.layers.MaxPooling3D(pool_size=(2, 2, 2))
self.pool = tf.keras.layers.GlobalAveragePooling3D()
self.output_layer = tf.keras.layers.Dense(1, activation='sigmoid')
def call(self, x, training=False):
x = self.input_layer(x)
for i in range(4): #TODO: Fix this
x = self.discriminator_block(x)
x = self.pool(x)
x = self.output_layer(x)
return x
class GAN(tf.keras.Model):
def __init__(self,
generator,
discriminator,
generator_metrics=None,
discriminator_metrics=None,
generator_lr=0.0001,
discriminator_lr=0.0001,
**kwargs):
super().__init__(**kwargs)
self.generator = generator.build(input_shape=(None,512))
self.discriminator = discriminator.build(input_shape=(None,32,512,512,3))
self.generator_optimizer = Optimizer(generator_lr, 0.0, 0.999)
self.discriminator_optimizer = Optimizer(discriminator_lr, 0.0, 0.999)
self.generator_metrics = generator_metrics
self.discriminator_metrics = discriminator_metrics
self.discriminator.build()
self.generator.build()
#sanity checks
# assert self.generator.output_shape == self.discriminator.input_shape
#print the summary of the model
self.generator.summary()
tf.keras.utils.plot_model(self.generator, to_file='generator.png', show_shapes=True)
self.discriminator.summary()
tf.keras.utils.plot_model(self.discriminator, to_file='discriminator.png', show_shapes=True)
def compile(self,
optimizer,
loss,
metrics=None,
**kwargs):
super().compile(optimizer, loss, metrics, **kwargs)
@tf.function
def train_step(self, images):
noise = tf.random.normal([BATCH_SIZE, noise_dim])
with tf.GradientTape() as gen_tape, tf.GradientTape() as disc_tape:
generated_images = self.generator(noise, training=True)
real_output = self.discriminator(images, training=True)
fake_output = self.discriminator(generated_images, training=True)
gen_loss = generator_loss(fake_output)
disc_loss = discriminator_loss(real_output, fake_output)
gradients_of_generator = gen_tape.gradient(gen_loss, self.generator.trainable_variables)
gradients_of_discriminator = disc_tape.gradient(disc_loss, self.discriminator.trainable_variables)
self.generator_optimizer.apply_gradients(zip(gradients_of_generator, self.generator.trainable_variables))
self.discriminator_optimizer.apply_gradients(zip(gradients_of_discriminator, self.discriminator.trainable_variables))
@tf.function
def validation_step(self, images):
pass
|
from pycoin.coins.bcash.Tx import Tx as BcashTx
from pycoin.networks.bitcoinish import create_bitcoinish_network
network = create_bitcoinish_network(
symbol="BCH",
network_name="Bitcoin",
subnet_name="mainnet",
tx=BcashTx,
wif_prefix_hex="80",
sec_prefix="BCHSEC:",
address_prefix_hex="00",
pay_to_script_prefix_hex="05",
bip32_prv_prefix_hex="0488ADE4",
bip32_pub_prefix_hex="0488B21E",
magic_header_hex="F9BEB4D9",
)
|
#!/usr/bin/python3
class Config:
"""A class to define how to scan the JSON file"""
@classmethod
def get_configuration(cls):
dict = [
{
"name":"experiments",
"path":["experiments"],
"type":"list",
"sheetName":"Metadata",
"transformHeader":"toLowerCase",
"headerRow":"1",
#"excludeSubset":["crid","cul_name"]
},
{
"name":"initial_conditions",
"type":"map",
"path":["experiments","initial_conditions"],
"sheetName":"Init_conditions",
"transformHeader":"toLowerCase",
"headerRow":"1"
},
{
"name":"residue",
"type":"map",
"path":["experiments","initial_conditions"],
"sheetName":"Residue",
"transformHeader":"toLowerCase",
"headerRow":"1"
},
{
"name":"tillage",
"type":"map",
"path":["experiments","initial_conditions"],
"sheetName":"Tillage",
"transformHeader":"toLowerCase",
"headerRow":"1"
},
{
"name":"initial_conditions_soil",
"type":"list",
"path":["experiments","initial_conditions","soilLayer"],
"sheetName":"Init_conditions_Soil_layers",
"transformHeader":"toLowerCase",
"headerRow":"1"
},
{
"name":"events",
"type":"list",
"path":["experiments","management","events"],
"eventsType":["Plantings", "Irrigations" , "Fertilizers", "Other", "Metadata"],
"eventsName":["planting", "irrigation" , "fertilizer", "other", "planting"],
"sheetConfig":{'Metadata':{'takeSubset':["id","crid","cul_name"]}},
"transformHeader":"toLowerCase",
"headerRow":"1"
},
# {
# "name":"plantingEventMetadata",
# "type":"list",
# "path":["experiments","management","events"],
# "eventsType":["Metadata"],
# "eventsName":["planting"],
# "transformHeader":"toLowerCase",
# "headerRow":"1",
# "takeSubset":["id","crid","cul_name"]
# },
{
"name":"weathers",
"path":["weathers"],
"type":"list",
"sheetName":"Weather_meta",
"transformHeader":"toLowerCase",
"headerRow":"1"
},
{
"name":"dailyWeather",
"type":"list",
"path":["weathers","dailyWeather"],
"sheetName":"Weather_daily",
"transformHeader":"toLowerCase",
"headerRow":"1"
},
{
"name":"soils",
"path":["soils"],
"type":"list",
"sheetName":"Soils_meta",
"transformHeader":"toLowerCase",
"headerRow":"1"
},
{
"name":"soilLayer",
"type":"list",
"path":["soils","soilLayer"],
"sheetName":"Soil_layers",
"transformHeader":"toLowerCase",
"headerRow":"1"
},
{
"name":"summary",
"type":"map",
"path":["experiments","observed"],
"sheetPattern":"Summary",
"transformHeader":"toLowerCase",
"headerRow":"1"
},
{#always after summary, because it creates the map object
"name":"observations",
"type":"list",
"path":["experiments","observed","timeSeries"],
"sheetPattern":"Obs",
"transformHeader":"toLowerCase",
"headerRow":"1"
}
]
return dict
|
"""
a)
State Representation: (c1(liters), c2(liters))
Initial State: (0,0)
(Preconds behind the state change)
Operators: (x != 4) fillc1 -> (x, c2_l) => (4, c2_l)
(x != 3) fillc2 -> (c1_l, x) => (c1_l, 3)
(x != 0) emptyc1 -> (x, c2_l) => (0, c2_l)
(x != 0) emptyc2 -> (c1_l, x) => (c1_l, 0)
(c1_l != 0, c1_l + c2_l < 3) emptyc1pourc2 -> (c1_l, c2_l) => (0, c1_l + c2_l)
(c2_l != 3, c1_l + c2_l > 3) pourc1c2 -> (c1_l, c2_l) => (c1_l + c2_l - 3, 3)
(c2_l != 0, c1_l + c2_l < 4) emptyc2pourc1 -> (c1_l, c2_l) => (c1_l + c2_l, 0)
(c1_l != 4, c1_l + c2_l > 4) pourc2c1 -> (c1_l, c2_l) => (4, c1_l + c2_l - 4)
All operators have a cost of 1.
Objective Test: Check if the state is of the form (n, _) -> c1_l === n
"""
from algorithms import *;
C1_L_MAX = 7;
C2_L_MAX = 3;
class BucketNode:
def __init__(self, c1_l, c2_l, previousNode = None, depth = 0):
self.c1_l = c1_l;
self.c2_l = c2_l;
self.previousNode = previousNode;
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.c1_l == other.c1_l and self.c2_l == other.c2_l;
return False;
def __repr__(self):
return f'({self.c1_l}, {self.c2_l})';
def __str__(self):
return f'({self.c1_l}, {self.c2_l})';
def edgeNodes(self):
edgeNodesList = []
if( self.c1_l != C1_L_MAX ):
edgeNodesList.append(BucketNode(C1_L_MAX, self.c2_l, self));
if( self.c2_l != C2_L_MAX ):
edgeNodesList.append(BucketNode(self.c1_l, C2_L_MAX, self));
if( self.c1_l != 0 ):
edgeNodesList.append(BucketNode(0, self.c2_l, self));
if( self.c2_l != 0 ):
edgeNodesList.append(BucketNode(self.c1_l, 0, self));
if( self.c1_l != 0 and self.c1_l + self.c2_l < C2_L_MAX):
edgeNodesList.append(BucketNode(0, self.c1_l + self.c2_l, self));
if( self.c2_l != C2_L_MAX and self.c1_l + self.c2_l > C2_L_MAX):
edgeNodesList.append(BucketNode(self.c1_l + self.c2_l - C2_L_MAX, C2_L_MAX, self));
if( self.c2_l != 0 and self.c1_l + self.c2_l < C1_L_MAX):
edgeNodesList.append(BucketNode(self.c1_l + self.c2_l, 0, self));
if( self.c1_l != C1_L_MAX and self.c1_l + self.c2_l > C1_L_MAX):
edgeNodesList.append(BucketNode(C1_L_MAX, self.c1_l + self.c2_l - C1_L_MAX, self));
return edgeNodesList;
initial = BucketNode(0,0)
n = 5
def condition(node):
return node.c1_l == n;
print(bfs(initial, condition))
print(dfs(initial, condition))
print(it_deep(initial, condition))
|
from django.conf import settings
from storages.backends.s3boto import S3BotoStorage
class S3MediaStorage(S3BotoStorage):
def __init__(self, *args, **kwargs):
self.bucket_name = settings.AWS_MEDIA_BUCKET_NAME
super(S3MediaStorage, self).__init__(*args, **kwargs)
|
from setuptools import setup
setup(
name='slackython',
version='1.0.0',
packages=[''],
url='https://github.com/Michotastico/slackython',
license='MIT',
author='Michel Llorens',
author_email='mllorens@dcc.uchile.cl',
description='Python library to send slack messages using webhooks ',
install_requires=['requests'],
)
|
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from enum import Enum
from typing import Optional
class Priority(Enum):
P0 = ('P0', 'Blocker')
P1 = ('P1', 'Critical')
P2 = ('P2', 'Major')
P3 = ('P3', 'Minor')
def __init__(self, level: str, jira_severity: str):
self.level = level
self.jira_severity = jira_severity
# JIRA SDK does not return priority beyond the name
@staticmethod
def from_jira_severity(jira_severity: str) -> 'Optional[Priority]':
jira_severity_to_priority = {
p.jira_severity: p for p in Priority
}
return jira_severity_to_priority.get(jira_severity)
@staticmethod
def from_level(level: str) -> 'Optional[Priority]':
level_to_priority = {
p.level: p for p in Priority
}
return level_to_priority.get(level)
class DataIssue:
def __init__(self,
issue_key: str,
title: str,
url: str,
status: str,
priority: Optional[Priority]) -> None:
self.issue_key = issue_key
self.title = title
self.url = url
self.status = status
self.priority = priority
def serialize(self) -> dict:
return {'issue_key': self.issue_key,
'title': self.title,
'url': self.url,
'status': self.status,
'priority_name': self.priority.jira_severity.lower() if self.priority else None,
'priority_display_name': self.priority.level if self.priority else None}
|
# Connect Four, by Al Sweigart al@inventwithpython.com
import sys
assert sys.version_info.major == 3, 'Run this program on Python 3.'
EMPTY_SPACE = '.'
X_PLAYER = 'X'
O_PLAYER = 'O'
def getNewBoard():
# Note: The board is 7x6, represented by a dictionary with keys
# of (x, y) tuples from (0, 0) to (6, 5), and values of '.' (empty),
# 'X' (X player), or 'O' (O player)
board = {}
for y in range(6):
for x in range(7):
board[(x, y)] = EMPTY_SPACE
return board
def drawBoard(board):
tileChars = []
for y in range(6):
for x in range(7):
tileChars.append(board[(x, y)])
boardToDraw = """ 1234567
v v v v
+-------+
|{}{}{}{}{}{}{}|
|{}{}{}{}{}{}{}|
|{}{}{}{}{}{}{}|
|{}{}{}{}{}{}{}|
|{}{}{}{}{}{}{}|
|{}{}{}{}{}{}{}|
+-------+""".format(*tileChars)
print(boardToDraw)
def getPlayerMove(playerTile, board):
while True:
print('Player %s, enter your move (1-7) or "quit":' % playerTile)
move = input()
if move == 'quit':
sys.exit()
if move not in '1234567':
continue # Ask again for their move.
try:
move = int(move) - 1 # - 1 adjust for 0-based index.
except:
continue
for i in range(5, -1, -1):
if board[(move, i)] == EMPTY_SPACE:
return (move, i)
def isFull(board):
for y in range(6):
for x in range(7):
if board[(x, y)] != EMPTY_SPACE:
return False
return True
def isWinner(playerTile, board):
b = board # Using a shorter name instead of `board`.
# Go through the entire board, checking for four-in-a-row:
for y in range(6):
for x in range(4):
# Check for four-in-a-row going across:
if b[(x, y)] == b[(x + 1, y)] == b[(x + 2, y)] == b[(x + 3, y)] == playerTile:
return True
for y in range(3):
for x in range(7):
# Check for four-in-a-row going down:
if b[(x, y)] == b[(x, y + 1)] == b[(x, y + 2)] == b[(x, y + 3)] == playerTile:
return True
for y in range(3):
for x in range(4):
# Check for four-in-a-row going right-down diagonal:
if b[(x, y)] == b[(x + 1, y + 1)] == b[(x + 2, y + 2)] == b[(x + 3, y + 3)] == playerTile:
return True
# Check for four-in-a-row going left-down diagonal:
if b[(x + 3, y)] == b[(x + 2, y + 1)] == b[(x + 1, y + 2)] == b[(x, y + 3)] == playerTile:
return True
return False
def main():
# Set up a new game:
gameBoard = getNewBoard()
playerTurn = X_PLAYER
while True:
# Draw board and get player's move:
drawBoard(gameBoard)
playerMove = getPlayerMove(playerTurn, gameBoard)
gameBoard[playerMove] = playerTurn
# Check for a win or tie:
if isWinner(playerTurn, gameBoard):
drawBoard(gameBoard)
print('Player %s has won!' % (playerTurn))
break
elif isFull(gameBoard):
drawBoard(gameBoard)
print('There is a tie!')
break
# Switch turn to other player:
if playerTurn == X_PLAYER:
playerTurn = O_PLAYER
elif playerTurn == O_PLAYER:
playerTurn = X_PLAYER
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import defaultdict
import luigi
from luigi.contrib.ssh import RemoteContext, RemoteTarget
from luigi.mock import MockTarget
SSH_HOST = "some.accessible.host"
class CreateRemoteData(luigi.Task):
"""
Dump info on running processes on remote host.
Data is still stored on the remote host
"""
def output(self):
"""
Returns the target output for this task.
In this case, a successful execution of this task will create a file on a remote server using SSH.
:return: the target output for this task.
:rtype: object (:py:class:`~luigi.target.Target`)
"""
return RemoteTarget(
"/tmp/stuff",
SSH_HOST
)
def run(self):
remote = RemoteContext(SSH_HOST)
print(remote.check_output([
"ps aux > {0}".format(self.output().path)
]))
class ProcessRemoteData(luigi.Task):
"""
Create a toplist of users based on how many running processes they have on a remote machine.
In this example the processed data is stored in a MockTarget.
"""
def requires(self):
"""
This task's dependencies:
* :py:class:`~.CreateRemoteData`
:return: object (:py:class:`luigi.task.Task`)
"""
return CreateRemoteData()
def run(self):
processes_per_user = defaultdict(int)
with self.input().open('r') as infile:
for line in infile:
username = line.split()[0]
processes_per_user[username] += 1
toplist = sorted(
processes_per_user.items(),
key=lambda x: x[1],
reverse=True
)
with self.output().open('w') as outfile:
for user, n_processes in toplist:
print(n_processes, user, file=outfile)
def output(self):
"""
Returns the target output for this task.
In this case, a successful execution of this task will simulate the creation of a file in a filesystem.
:return: the target output for this task.
:rtype: object (:py:class:`~luigi.target.Target`)
"""
return MockTarget("output", mirror_on_stderr=True)
|
import torch
from torch import nn, optim
import numpy as np
from util import np_to_cuda
class RandomWD(nn.Module):
def __init__(self, feat_dim, obs_space, act_space, sigma=1., gamma=1., lr=1e-3):
super(RandomWD, self).__init__()
self.feat_dim = feat_dim
self.rf_W = torch.randn((obs_space.shape[0] + act_space.shape[0], feat_dim), requires_grad=False, device="cuda")/sigma
self.rf_b = torch.rand((1, feat_dim), requires_grad=False, device="cuda") * 2 * np.pi
self.rf_scale = np.sqrt(2./ self.feat_dim)
self.beta_1 = nn.Linear(feat_dim, 1, bias=False)
self.beta_2 = nn.Linear(feat_dim, 1, bias=False)
self.gamma = gamma
self.optimizer = optim.Adam(self.parameters(), lr=lr)
def get_random_feature(self, obs, actions): #x is of shape batch_size x
x = torch.cat([obs, actions], dim=1)
return torch.cos(torch.matmul(x, self.rf_W) + self.rf_b) * self.rf_scale, x
def wd(self, obs_x, actions_x, obs_y, actions_y):
x_feat, x_cat = self.get_random_feature(obs_x, actions_x)
y_feat, y_cat = self.get_random_feature(obs_y, actions_y)
x_score = self.beta_1(x_feat)
y_score = self.beta_2(y_feat)
l2_dist = torch.square(x_cat - y_cat).sum(dim=1, keepdim=True)
dist = x_score - y_score + self.gamma * torch.exp((x_score - y_score - l2_dist)/ self.gamma)
return torch.mean(dist)
def update(self, obs_x, actions_x, obs_y, actions_y):
dist = - self.wd(obs_x, actions_x, obs_y, actions_y) # maximize the dist when updating beta parameters
print(dist)
self.optimizer.zero_grad()
dist.backward()
self.optimizer.step()
class WDMetric(object):
def __init__(self, train_data, env, wd_model):
self.obs, self.actions = train_data
self.wd_model = wd_model
self.env = env
self.size = self.obs.shape[0]
def sample(self, batch_size):
idx = np.random.choice(np.arange(self.size), batch_size)
obs = self.obs[idx]
actions = self.actions[idx]
if self.env:
obs = self.env.normalize_obs(obs)
return np_to_cuda(obs), np_to_cuda(actions)
def __call__(self, obs_x, actions_x):
obs_y, actions_y = self.sample(obs_x.shape[0])
return self.wd_model.wd(obs_x, actions_x, obs_y, actions_y)
def update_wd(self, obs_x, actions_x):
obs_y, actions_y = self.sample(obs_x.shape[0])
self.wd_model.update(obs_x, actions_x, obs_y, actions_y)
|
import numpy as np
from regression.base import predict_output
def feature_derivative_ridge(
errors: np.ndarray, feature: np.ndarray, weight: np.ndarray, l2_penalty: float, feature_is_constant: bool = False
) -> np.ndarray:
"""
Compute the derivative of the regression cost function w.r.t L2 norm.
If feature_is_constant is True,
derivative is twice the dot product of errors and feature
Otherwise, derivative is twice the dot product plus 2*l2_penalty*weight
"""
l2_term_derivative = 0.0 if feature_is_constant else 2 * l2_penalty * weight
derivative = 2 * np.dot(errors, feature) + l2_term_derivative
return derivative
def ridge_regression_gradient_descent(
feature_matrix,
output,
initial_weights,
step_size: float = 1e-2,
l2_penalty: float = 1e-2,
max_iterations=1e3,
tolerance: float = 1e-3,
):
weights = np.array(initial_weights) # make sure it's a numpy array
iteration = 0
converged = False
while not converged and iteration < max_iterations:
iteration += 1
predictions = predict_output(feature_matrix, weights)
errors = predictions - output
# fixme: vectorize this loop
for feature_idx in range(len(weights)):
# Recall that feature_matrix[:,i] is the feature column associated with weights[i]
# (Remember: when i=0, you are computing the derivative of the constant!)
derivative = feature_derivative_ridge(
errors=errors,
feature=feature_matrix[:, feature_idx],
weight=weights[feature_idx],
l2_penalty=l2_penalty,
feature_is_constant=(feature_idx == 0),
)
weights[feature_idx] -= step_size * derivative
gradient_magnitude = np.sqrt(np.power(-2 * errors, 2).sum())
converged = gradient_magnitude < tolerance
return weights
|
from django.db import models, migrations
import lti.utils
class Migration(migrations.Migration):
dependencies = [
('lti', '0007_auto_20170410_0552'),
]
operations = [
migrations.AlterField(
model_name='lticonsumer',
name='consumer_key',
field=models.CharField(default=lti.utils.key_secret_generator, unique=True, max_length=64, db_index=True),
),
migrations.AlterField(
model_name='lticonsumer',
name='consumer_secret',
field=models.CharField(default=lti.utils.key_secret_generator, unique=True, max_length=64),
),
]
|
from datetime import datetime
from typing import (
Any,
Dict,
Iterator,
Optional,
Text,
)
from pynamodb.attributes import (
ListAttribute,
NumberAttribute,
MapAttribute,
UnicodeAttribute,
UTCDateTimeAttribute,
)
from pynamodb.models import Model
from pynamodb.indexes import GlobalSecondaryIndex, AllProjection
from eventbot import settings
class AttendeeMap(MapAttribute):
attendee = UnicodeAttribute()
class EventStatusIndex(GlobalSecondaryIndex):
class Meta:
index_name = settings.DYNAMODB_GSI_EVENT_STATUS
read_capacity_units = 10
write_capacity_units = 10
if settings.DYNAMODB_URL:
host = settings.DYNAMODB_URL
projection = AllProjection()
event_id = UnicodeAttribute(hash_key=True)
status = UTCDateTimeAttribute(range_key=True)
class Event(Model):
STATUS_OPEN = 'open'
class Meta:
table_name = settings.DYNAMODB_TABLE_EVENT
if settings.DYNAMODB_URL:
host = settings.DYNAMODB_URL
event_id = UnicodeAttribute(hash_key=True)
name = UnicodeAttribute()
description = UnicodeAttribute()
created_date = UTCDateTimeAttribute()
modified_date = UTCDateTimeAttribute()
start_date = UTCDateTimeAttribute(null=True)
end_date = UTCDateTimeAttribute(null=True)
status = UnicodeAttribute(default=STATUS_OPEN)
creator = UnicodeAttribute()
attendees = ListAttribute(of=AttendeeMap, null=True)
extra_attendees = NumberAttribute(default=0)
cost = NumberAttribute(default=0)
event_status_index = EventStatusIndex()
@classmethod
def get_all_paged(
cls,
next_page: Optional[str] = None,
limit: Optional[int] = None
) -> Iterator['Event']:
last_evaluated_key = cls.format_last_evaluated_key(next_page)
return cls.scan(limit=limit, last_evaluated_key=last_evaluated_key)
@classmethod
def format_last_evaluated_key(
cls,
event_id: Optional[str]
) -> Optional[dict]:
if event_id is None:
return None
return {
'event_id': {'S': event_id}
}
@property
def total_attendees(self) -> int:
if self.attendees is None:
attendees = 0
else:
attendees = len(self.attendees)
return attendees + self.extra_attendees
@property
def cost_per_attendee(self) -> int:
return (self.cost / max(self.total_attendees, 1))
def user_is_attendee(self, user_id: Text) -> bool:
if self.attendees is None:
return False
for attendee in self.attendees:
if attendee.attendee == user_id:
return True
return False
def add_attendee(self, user_id: Text) -> None:
attendee = AttendeeMap()
attendee.attendee = user_id
if self.attendees:
self.attendees.append(attendee)
else:
self.attendees = [attendee]
def remove_attendee(self, user_id: Text) -> None:
if self.attendees is None:
return
# TODO: there must be a more efficient way to do this, right?
attendees = []
for attendee in self.attendees:
if attendee.attendee != user_id:
attendees.append(attendee)
self.attendees = attendees
def save(self, *args, **kwargs) -> Dict[str, Any]:
if not self.created_date:
self.created_date = datetime.utcnow()
self.modified_date = datetime.utcnow()
return super(Event, self).save(*args, **kwargs)
|
# Copyright 2021 Peng Cheng Laboratory (http://www.szpclab.com/) and FedLab Authors (smilelab.group)
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import torch
import torch.distributed as dist
from .package import Package
from . import HEADER_DATA_TYPE_IDX, HEADER_SIZE, HEADER_RECEIVER_RANK_IDX, HEADER_SLICE_SIZE_IDX, dtype_flab2torch, dtype_torch2flab
class PackageProcessor(object):
"""Provide more flexible distributed tensor communication functions based on
:func:`torch.distributed.send` and :func:`torch.distributed.recv`.
:class:`PackageProcessor` defines the details of point-to-point package communication.
EVERYTHING is :class:`torch.Tensor` in FedLab.
"""
@staticmethod
def send_package(package, dst):
"""Three-segment tensor communication pattern based on ``torch.distributed``
Pattern is shown as follows:
1.1 sender: send a header tensor containing ``slice_size`` to receiver
1.2 receiver: receive the header, and get the value of ``slice_size`` and create a buffer for incoming slices of content
2.1 sender: send a list of slices indicating the size of every content size.
2.2 receiver: receive the slices list.
3.1 sender: send a content tensor composed of a list of tensors.
3.2 receiver: receive the content tensor, and parse it to obtain slices list using parser function
"""
def send_header(header, dst):
header[HEADER_RECEIVER_RANK_IDX] = dst
dist.send(header, dst=dst)
def send_slices(slices, dst):
np_slices = np.array(slices, dtype=np.int32)
tensor_slices = torch.from_numpy(np_slices)
dist.send(tensor_slices, dst=dst)
def send_content(content, dst):
dist.send(content, dst=dst)
# body
if package.dtype is not None:
package.header[HEADER_DATA_TYPE_IDX] = dtype_torch2flab(package.dtype)
# sender header firstly
send_header(header=package.header, dst=dst)
# if package got content, then send remain parts
if package.header[HEADER_SLICE_SIZE_IDX] > 0:
send_slices(slices=package.slices, dst=dst)
send_content(content=package.content, dst=dst)
@staticmethod
def recv_package(src=None):
"""Three-segment tensor communication pattern based on ``torch.distributed``
Pattern is shown as follows:
1.1 sender: send a header tensor containing ``slice_size`` to receiver
1.2 receiver: receive the header, and get the value of ``slice_size`` and create a buffer for incoming slices of content
2.1 sender: send a list of slices indicating the size of every content size.
2.2 receiver: receive the slices list.
3.1 sender: send a content tensor composed of a list of tensors.
3.2 receiver: receive the content tensor, and parse it to obtain slices list using parser function
"""
def recv_header(src=src, parse=True):
buffer = torch.zeros(size=(HEADER_SIZE, ), dtype=torch.int32)
dist.recv(buffer, src=src)
if parse is True:
return Package.parse_header(buffer)
else:
return buffer
def recv_slices(slices_size, src):
buffer_slices = torch.zeros(size=(slices_size, ),
dtype=torch.int32)
dist.recv(buffer_slices, src=src)
slices = [slc.item() for slc in buffer_slices]
return slices
def recv_content(slices, data_type, src):
content_size = sum(slices)
dtype = dtype_flab2torch(data_type)
buffer = torch.zeros(size=(content_size, ), dtype=dtype)
dist.recv(buffer, src=src)
return Package.parse_content(slices, buffer)
# body
sender_rank, _, slices_size, message_code, data_type = recv_header(
src=src)
if slices_size > 0:
slices = recv_slices(slices_size=slices_size, src=sender_rank)
content = recv_content(slices, data_type, src=sender_rank)
else:
content = None
return sender_rank, message_code, content
|
result = (*(x**2 for x in range(5)),)
print(result)
|
"""
ServerService tests.
To run a single test, modify the main code to::
singletest = unittest.TestSuite()
singletest.addTest(TESTCASE("<TEST METHOD NAME>"))
unittest.TextTestRunner().run(singletest)
| Copyright 2017-2022, Voxel51, Inc.
| `voxel51.com <https://voxel51.com/>`_
|
"""
import asyncio
from collections import defaultdict
import json
import os
import posixpath
import time
import unittest
import urllib
from bson import ObjectId
import numpy as np
from tornado.testing import AsyncHTTPTestCase
from tornado.websocket import websocket_connect
import eta.core.serial as etas
import eta.core.utils as etau
import fiftyone as fo
import fiftyone.core.state as fos
from fiftyone.server.json_util import FiftyOneJSONEncoder
import fiftyone.server.main as fosm
class TestCase(AsyncHTTPTestCase):
def get_app(self):
return fosm.Application()
def fetch_and_parse(self, path):
response = self.fetch(path)
return etas.load_json(response.body)
class RouteTests(TestCase):
def test_fiftyone(self):
response = self.fetch_and_parse("/fiftyone")
self.assertEqual(response, fosm.FiftyOneHandler.get_response())
def test_filepath(self):
data = {"hello": "world"}
with etau.TempDir() as tmp:
path = os.path.join(tmp, "data%20.json")
etas.write_json(data, path)
response = self.fetch_and_parse(
"/filepath/%s" % urllib.parse.quote(path, safe="")
)
self.assertEqual(response, data)
def test_reactivate(self):
handle_id = "handle_id"
response = self.fetch_and_parse("/reactivate?handleId=%s" % handle_id)
self.assertEqual(
response, fosm.ReactivateHandler.get_response(handle_id)
)
def test_stages(self):
response = self.fetch_and_parse("/stages")
self.assertEqual(response, fosm.StagesHandler.get_response())
class StateTests(TestCase):
image_url = "https://user-images.githubusercontent.com/3719547/74191434-8fe4f500-4c21-11ea-8d73-555edfce0854.png"
test_one = os.path.abspath("./test_one.png")
test_two = os.path.abspath("./test_two.png")
dataset = fo.Dataset()
sample1 = fo.Sample(filepath=test_one)
sample2 = fo.Sample(filepath=test_two)
@classmethod
def setUpClass(cls):
urllib.request.urlretrieve(cls.image_url, cls.test_one)
etau.copy_file(cls.test_one, cls.test_two)
cls.dataset.add_sample(cls.sample1)
cls.dataset.add_sample(cls.sample2)
cls.sample1["scalar"] = 1
cls.sample1["label"] = fo.Classification(label="test")
cls.sample1.tags.append("tag")
cls.sample1["floats"] = [
0.5,
float("nan"),
float("inf"),
float("-inf"),
]
cls.sample1.save()
def setUp(self):
super().setUp()
self.__app_client = self.get_ws()
self.gather_events({self.app: 1})
self.send(self.app, "as_app", {})
self.__session_client = self.get_ws()
self.gather_events({self.session: 1})
def get_ws(self):
websocket_connect(self.get_socket_path(), callback=self.stop)
return self.wait().result()
@property
def app(self):
return self.__app_client
@property
def session(self):
return self.__session_client
@property
def enc(self):
return FiftyOneJSONEncoder
def assertNormalizedEqual(self, one, two):
one = self.enc.loads(self.enc.dumps(one))
two = self.enc.loads(self.enc.dumps(two))
self.assertEqual(one, two)
def get_socket_path(self):
return "ws://localhost:%d/state" % self.get_http_port()
def send(self, client, event, message={}):
payload = {"type": event}
payload.update(message)
client.write_message(FiftyOneJSONEncoder.dumps(payload))
def gather_events(self, num_events):
results = defaultdict(list)
for client, num_events in num_events.items():
for i in range(0, num_events):
client.read_message(self.stop)
message = self.wait().result()
message = FiftyOneJSONEncoder.loads(message)
results[client].append(message)
return results
if __name__ == "__main__":
fo.config.show_progress_bars = False
unittest.main(verbosity=2)
|
def add_argument(parser):
# Client settings
parser.add_argument('--host', default='127.0.0.1')
parser.add_argument('--port', type=int, default=2000)
parser.add_argument('--tm_port', type=int, default=8000)
parser.add_argument('--timeout', type=float, default=5.0)
def from_args(args):
import carla
client = carla.Client(args.host, args.port)
client.set_timeout(args.timeout)
traffic_manager = client.get_trafficmanager(args.tm_port)
return client, traffic_manager
|
from django.http import HttpRequest, HttpResponse
from main.util import render_template
TEMPLATE = "tasks/lesson03/task301.html"
def handler(request: HttpRequest) -> HttpResponse:
name = request.GET.get("name")
context = {
"input_name": name,
"greeting_name": name or "anonymous",
}
document = render_template(TEMPLATE, context)
response = HttpResponse(content=document)
return response
if __name__ == '__main__':
x = render_template(TEMPLATE, {'input_name': 1, 'greeting_name': 2})
print(x)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import six
from django.utils.html import escape
from sentry.models import Activity, User, Event
ICON = 'https://sentry-hipchat-ac-assets.s3.amazonaws.com/sentry-icon.png'
ICON2X = 'https://sentry-hipchat-ac-assets.s3.amazonaws.com/sentry-icon.png'
ICON_SM = 'https://sentry-hipchat-ac-assets.s3.amazonaws.com/favicon.ico'
COLORS = {
'ALERT': 'red',
'ERROR': 'red',
'WARNING': 'yellow',
'INFO': 'green',
'DEBUG': 'purple',
}
def _format_user(user):
if user is None:
name = 'system'
elif user.name:
name = user.name
else:
parts = user.username.split('@')
if len(parts) == 1:
name = user.username
else:
name = parts[0].lower()
return '<em>%s</em>' % escape(name)
def _make_event_card(
group,
event,
title=None,
subtitle=None,
event_target=False,
new=False,
description=None,
compact=False
):
project = event.project
link = group.get_absolute_url()
if event_target:
link = '%s/events/%s/' % (link.rstrip('/'), event.id)
event_title = '%sSentry %s Issue' % (new and 'New ' or '', group.get_level_display().title(), )
if title is None:
title = escape(event_title)
attributes = []
for key, value in event.tags:
if key.startswith('sentry:'):
key = key.split(':', 1)[1]
attr = {'label': key, 'value': {'label': value}}
if key == 'level':
attr_color = {
'critical': 'lozenge-error',
'fatal': 'lozenge-error',
'error': 'lozenge-error',
'warning': 'lozenge-current',
'debug': 'lozenge-moved',
}.get(value.lower())
if attr_color is not None:
attr['value']['style'] = attr_color
elif key == 'release':
attr['value']['style'] = 'lozenge-success'
attributes.append(attr)
fold_description = '%s. Issue has been seen %s time%s. First seen %s%s.' % (
group.get_level_display().title() + ' in Sentry', group.times_seen,
group.times_seen != 1 and 's' or '', group.first_seen.strftime('%Y-%m-%d'),
(group.first_release and ' (%s)' % group.first_release.short_version or ''),
)
if compact and description is None:
description = ''
if description is None:
description = '<a href="%(link)s"><em>%(err)s</em></a>' % {
'link': escape(link),
'err': escape(event.error()),
}
if description:
description = '<p>%s</p>' % description
extra = ''
if not compact:
extra = '''
<p>
<strong>Project:</strong>
<a href="%(project_link)s">%(project)s</a>
<strong>Culprit:</strong>
%(culprit)s
''' % {
'project': escape(project.name),
'project_link': escape(project.get_absolute_url()),
'culprit': escape(event.culprit),
}
else:
attributes = [
{
'label': 'culprit',
'value': {
'label': event.culprit
},
}, {
'label': 'title',
'value': {
'label': event.error()
},
}
] + attributes
return {
'style': 'application',
'url': link,
'id': 'sentry/%s' % event.id,
'title': event_title,
'description': fold_description,
'images': {},
'icon': {
'url': ICON,
'url@2x': ICON2X,
},
'metadata': {
'event': six.text_type(event.id),
'sentry_message_type': 'event',
},
'attributes': attributes,
'activity': {
'html': '''
<p>
<a href="%(link)s">
<img src="%(icon_sm)s" style="width: 16px; height: 16px">
<strong>%(title)s</strong></a>
%(subtitle)s
%(description)s
%(extra)s
''' % {
'title': title,
'subtitle': subtitle or '',
'link': escape(link),
'icon': ICON,
'icon_sm': ICON_SM,
'description': description,
'extra': extra,
}
},
}
def make_event_notification(group, event, tenant, new=True, event_target=False):
project = event.project
level = group.get_level_display().upper()
link = group.get_absolute_url()
if event_target:
link = '%s/events/%s/' % (link.rstrip('/'), event.id)
color = COLORS.get(level, 'purple')
# Legacy message
message = ('[%(level)s]%(project_name)s %(message)s '
'[<a href="%(link)s">view</a>]') % {
'level': escape(level),
'project_name': '<strong>%s</strong>' % escape(project.name),
'message': escape(event.error()),
'link': escape(link),
}
return {
'color': color,
'message': message,
'format': 'html',
'card': _make_event_card(group, event, new=new, event_target=event_target),
'notify': True,
}
def make_activity_notification(activity, tenant):
if activity.type in (Activity.UNASSIGNED, Activity.ASSIGNED):
if activity.type == Activity.ASSIGNED:
assignee_id = activity.data.get('assignee')
else:
assignee_id = None
if assignee_id is None:
target_user = None
else:
target_user = User.objects.get(pk=assignee_id)
if target_user is None:
message = '%s unassigned a user from the event' % (_format_user(activity.user), )
elif activity.user is not None and target_user.id == activity.user.id:
message = '%s assigned themselves to the event' % (_format_user(activity.user), )
else:
message = '%s assigned %s to the event' % (
_format_user(activity.user), _format_user(target_user)
)
elif activity.type == Activity.NOTE:
message = '%s left a note on the event' % (_format_user(activity.user), )
else:
return
event = activity.group.get_latest_event()
Event.objects.bind_nodes([event], 'data')
project = activity.project
link = activity.group.get_absolute_url()
legacy_message = (
'%(project_name)s %(message)s (%(event)s, %(culprit)s) '
'[<a href="%(link)s">view</a>]'
) % {
'project_name': '<strong>%s</strong>' % escape(project.name),
'event': escape(event.error()),
'message': message,
'culprit': escape(event.culprit),
'link': escape(link),
}
return {
'color':
'yellow',
'message':
legacy_message,
'card':
_make_event_card(
activity.group,
event,
title=message,
subtitle='%s, %s' % (event.error(), event.culprit),
compact=True
),
'format':
'html',
'notify':
False,
}
def make_subscription_update_notification(new=None, removed=None):
bits = ['The project subscriptions for this room were updated. ']
def _proj(project):
return '<strong>%s</strong>' % escape(project.name)
if new:
if len(new) == 1:
bits.append('New project: %s. ' % _proj(new[0]))
else:
bits.append('New projects: %s. ' % ', '.join(_proj(x) for x in new))
if removed:
if len(removed) == 1:
bits.append('Removed project: %s' % _proj(removed[0]))
else:
bits.append('Removed projects: %s' % ', '.join(_proj(x) for x in removed))
return {
'message': ' '.join(bits).strip(),
'color': 'green',
'notify': False,
}
def make_generic_notification(text, color=None, notify=False):
return {
'message': escape(text),
'color': color,
'notify': notify,
}
|
#!/usr/bin/env python3
# Copyright 2021 Yoshi Kadokawa
# See LICENSE file for licensing details.
#
# Learn more at: https://juju.is/docs/sdk
"""Charm the service.
Refer to the following post for a quick-start guide that will help you
develop a new k8s charm using the Operator Framework:
https://discourse.charmhub.io/t/4208
"""
import logging
from charms.grafana_k8s.v0.grafana_source import GrafanaSourceConsumer
from ops.charm import CharmBase
from ops.framework import StoredState
from ops.main import main
from ops.model import ActiveStatus
logger = logging.getLogger(__name__)
CONFIG_PATH = "/etc/loki/local-config.yaml"
class LokiCharm(CharmBase):
"""Charm to run Loki on Kubernetes."""
_stored = StoredState()
def __init__(self, *args):
super().__init__(*args)
self._name = "loki"
self._port = 3100
self._stored.set_default(things=[])
# Allows Grafana to add Loki data-source
self.grafana_source_consumer = GrafanaSourceConsumer(
charm=self,
name="grafana-source",
refresh_event=self.on.loki_pebble_ready,
source_type=self._name,
source_port=str(self._port),
)
# Event handlers
self.framework.observe(self.on.loki_pebble_ready, self._on_loki_pebble_ready)
def _on_loki_pebble_ready(self, event):
"""Define and start a workload using the Pebble API."""
# Get a reference the container attribute on the PebbleReadyEvent
container = event.workload
# Define an initial Pebble layer configuration
pebble_layer = {
"summary": "Loki layer",
"description": "pebble config layer for Loki",
"services": {
"loki": {
"override": "replace",
"summary": "loki",
"command": "/usr/bin/loki -target=all -config.file={}".format(CONFIG_PATH),
"startup": "enabled",
}
},
}
# Add intial Pebble config layer using the Pebble API
container.add_layer("loki", pebble_layer, combine=True)
# Autostart any services that were defined with startup: enabled
container.autostart()
# Learn more about statuses in the SDK docs:
# https://juju.is/docs/sdk/constructs#heading--statuses
self.unit.status = ActiveStatus("loki started")
if __name__ == "__main__":
main(LokiCharm)
|
from tkinter import *
from tkinter.filedialog import askopenfilename
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from tkinter.messagebox import *
rows = 0 #记录数
page_num = 0 #总页数
page_idx = 0 #当前页数
page_show = 25 #每页记录数
arr = np.ones(6)
stock_df = pd.DataFrame(arr) #DataFrame数据对象
names = ['日期','开盘价','最高价','收盘价','最低价','成交量','价格变动','涨跌幅','5日均价','10日均价','20日均价','5日均量','10日均量','20日均量']
# 设置中文字体,否则中文会出现方框状
plt.rcParams["font.sans-serif"] = ['SimHei']
plt.rcParams['axes.unicode_minus'] = False
def showList(): #显示列表框数据
list_items = [] #list框内容列表
rows = stock_df.shape[0]
start_idx = page_idx * page_show # 一页的开始记录下标
end_idx = (page_idx + 1) * page_show #一页的结束记录下标
show =" "+ " ".join([f"{i:8s}"for i in names]) #list框中标题行的字符串
list_items.append(show)
for idx in range(start_idx, end_idx + 1):
if idx < rows :
rowdata = stock_df.iloc[idx, ::]
lst = list(rowdata)
# lst_date=str(stock_df.index[idx])
lst = [f"{x:11.2f} " for x in lst]
# lst.insert(0,f"{lst_date[:10]:12s}")
show = " ".join(lst) #list框中一行数据的字符串
list_items.append(show)
list_box_var.set(list_items)
def dataprocess():#对DataFrame对象数据进行数据清洗
pass
def btn_open():
global stock_df, page_num, rows
filename = askopenfilename()
stock_df = pd.read_excel(filename, sheet_name='2018-2020',names=names,index_col=0,skiprows=[0])
#print(stock_df.head())
#dataprocess()
rows = stock_df.shape[0]
page_num = rows / page_show
showList()
def btn_next():#下一页
global page_idx
if page_idx == page_num-1:
showinfo(title='最后一页提示', message='当前是最后一页。')
return
page_idx+=1
showList()
def btn_previous():#前一页
pass
def btn_fistpage():#第一页
pass
def btn_lastpage():#最后一页
pass
def btn_page():#指定一页
pass
def btn_price():
start_idx = page_idx * page_show
end_idx = (page_idx + 1) * page_show
x = np.arange(start_idx,end_idx+1)
y = stock_df['收盘价'].values[start_idx:end_idx+1]
plt.plot(x, y,
color='#3589FF', # 线的颜色
linestyle=':', # 线的风格
linewidth=3, # 线的宽度
marker='o', # 标记点的样式
markerfacecolor='r', # 标记点的颜色
markersize=10, # 标记点的大小
alpha=0.7, # 图形的透明度
label="sin(x)" #设置图例的label
)
#设置标题
plt.title('国泰君安收盘价折线图')
index_name = stock_df.index[start_idx:end_idx+1]
index_name = [x.strftime('%Y-%m-%d') for x in index_name]
plt.xticks(x,index_name)
plt.gcf().autofmt_xdate() # 自动旋转日期标记
plt.show()
def btn_change():
start_idx = page_idx * page_show
end_idx = (page_idx + 1) * page_show
x = np.arange(0,end_idx+1-start_idx)
y = stock_df['成交量'].values[start_idx:end_idx+1]
# bar宽度
bar_width = 0.5
plt.bar(x, y, width=bar_width, alpha=0.7, label='成交量', color='b')
# 显示数值标签
for a, b in zip(x, y):
plt.text(a, b, '%.0f' % b, ha='left', va= 'center', fontsize=7)
#设置标题
plt.title('国泰君安成交量柱状图')
index_name = stock_df.index[start_idx:end_idx+1]
index_name = [x.strftime('%Y-%m-%d') for x in index_name]
plt.xticks(x+bar_width/2,index_name)
plt.gcf().autofmt_xdate() # 自动旋转日期标记
plt.show()
root = Tk()
root.title("国泰君安股价分析")
width = 1000
height = 600
#设置窗口居屏幕中间显示
screenwidth = root.winfo_screenwidth()
screenheight = root.winfo_screenheight()
alignstr = '%dx%d+%d+%d' % (width, height, (screenwidth-width)/2, (screenheight-height)/2)
root.geometry(alignstr)
top = Frame(root)
bottom = Frame(root)
top.pack(side=TOP)
bottom.pack(side=TOP,expand=YES,fill=BOTH)
top.config(bg = 'green')
bottom.config(bg = 'yellow')
btn1 = Button(top,text='打开', command=btn_open)
btn2 = Button(top,text='股价折线图', command=btn_price)
btn3 = Button(top,text='成交量图', command=btn_change)
btn4 = Button(top,text='下一页', command=btn_next)
#定义一个文本变量
list_box_var = StringVar()
#设置文本变量的值
show =" "+ " ".join([f"{i:8s}"for i in names])
list_box_var.set([show])
list_box = Listbox(bottom,listvar=list_box_var)
btn1.pack(side=LEFT)
btn2.pack(side=LEFT)
btn3.pack(side=LEFT)
btn4.pack(side=LEFT)
list_box.pack(side=TOP,expand=YES,fill=BOTH)
#list_items = ["a","b","c","d"]
#for item in list_items:
# list_box.insert("end",item)#末尾插入
root.mainloop()
|
from django.conf.urls import url
from . import views
urlpatterns = (
url(r'^$', views.view_content, {'template': 'multicol'}),
url(r'^watch/$', views.view_content, {'template': 'multicol'}),
url(r'^listen/$', views.view_content, {'template': 'multicol'}),
url(r'^read/$', views.view_content, {'template': 'multicol'}),
url(r'^rider/$', views.view_content, {'template': 'multicol'}),
url(r'^contact/$', views.view_content, {'template': 'multicol'}),
url(r'^reviews/$', views.view_content, {'template': 'multicol'}),
url(r'^press/$', views.view_content, {'template': 'multicol'}),
url(r'^404/$', views.view_static, {'template': '404'}),
)
|
import youtokentome
import codecs
import os
import torch
from random import shuffle
from itertools import groupby
from torch.nn.utils.rnn import pad_sequence
class SequenceLoader(object):
"""
An iterator for loading batches of data into the transformer model.
For training:
Each batch contains tokens_in_batch target language tokens (approximately),
target language sequences of the same length to minimize padding and therefore memory usage,
source language sequences of very similar (if not the same) lengths to minimize padding and therefore memory usage.
Batches are also shuffled.
For validation and testing:
Each batch contains just a single source-target pair, in the same order as in the files from which they were read.
"""
def __init__(self, data_folder, source_suffix, target_suffix, split, tokens_in_batch):
"""
:param data_folder: folder containing the source and target language data files
:param source_suffix: the filename suffix for the source language files
:param target_suffix: the filename suffix for the target language files
:param split: train, or val, or test?
:param tokens_in_batch: the number of target language tokens in each batch
"""
self.tokens_in_batch = tokens_in_batch
self.source_suffix = source_suffix
self.target_suffix = target_suffix
assert split.lower() in {"train", "val",
"test"}, "'split' must be one of 'train', 'val', 'test'! (case-insensitive)"
self.split = split.lower()
# Is this for training?
self.for_training = self.split == "train"
# Load BPE model
self.bpe_model = youtokentome.BPE(model=os.path.join(data_folder, "bpe.model"))
# Load data
with codecs.open(os.path.join(data_folder, ".".join([split, source_suffix])), "r", encoding="utf-8") as f:
source_data = f.read().split("\n")[:-1]
with codecs.open(os.path.join(data_folder, ".".join([split, target_suffix])), "r", encoding="utf-8") as f:
target_data = f.read().split("\n")[:-1]
assert len(source_data) == len(target_data), "There are a different number of source or target sequences!"
source_lengths = [len(s) for s in self.bpe_model.encode(source_data, bos=False, eos=False)]
target_lengths = [len(t) for t in self.bpe_model.encode(target_data, bos=True,
eos=True)] # target language sequences have <BOS> and <EOS> tokens
self.data = list(zip(source_data, target_data, source_lengths, target_lengths))
# If for training, pre-sort by target lengths - required for itertools.groupby() later
if self.for_training:
self.data.sort(key=lambda x: x[3])
# Create batches
self.create_batches()
def create_batches(self):
"""
Prepares batches for one epoch.
"""
# If training
if self.for_training:
# Group or chunk based on target sequence lengths
chunks = [list(g) for _, g in groupby(self.data, key=lambda x: x[3])]
# Create batches, each with the same target sequence length
self.all_batches = list()
for chunk in chunks:
# Sort inside chunk by source sequence lengths, so that a batch would also have similar source sequence lengths
chunk.sort(key=lambda x: x[2])
# How many sequences in each batch? Divide expected batch size (i.e. tokens) by target sequence length in this chunk
seqs_per_batch = self.tokens_in_batch // chunk[0][3]
# Split chunk into batches
self.all_batches.extend([chunk[i: i + seqs_per_batch] for i in range(0, len(chunk), seqs_per_batch)])
# Shuffle batches
shuffle(self.all_batches)
self.n_batches = len(self.all_batches)
self.current_batch = -1
else:
# Simply return once pair at a time
self.all_batches = [[d] for d in self.data]
self.n_batches = len(self.all_batches)
self.current_batch = -1
def __iter__(self):
"""
Iterators require this method defined.
"""
return self
def __next__(self):
"""
Iterators require this method defined.
:returns: the next batch, containing:
source language sequences, a tensor of size (N, encoder_sequence_pad_length)
target language sequences, a tensor of size (N, decoder_sequence_pad_length)
true source language lengths, a tensor of size (N)
true target language lengths, typically the same as decoder_sequence_pad_length as these sequences are bucketed by length, a tensor of size (N)
"""
# Update current batch index
self.current_batch += 1
try:
source_data, target_data, source_lengths, target_lengths = zip(*self.all_batches[self.current_batch])
# Stop iteration once all batches are iterated through
except IndexError:
raise StopIteration
# Tokenize using BPE model to word IDs
source_data = self.bpe_model.encode(source_data, output_type=youtokentome.OutputType.ID, bos=False,
eos=False)
target_data = self.bpe_model.encode(target_data, output_type=youtokentome.OutputType.ID, bos=True,
eos=True)
# Convert source and target sequences as padded tensors
source_data = pad_sequence(sequences=[torch.LongTensor(s) for s in source_data],
batch_first=True,
padding_value=self.bpe_model.subword_to_id('<PAD>'))
target_data = pad_sequence(sequences=[torch.LongTensor(t) for t in target_data],
batch_first=True,
padding_value=self.bpe_model.subword_to_id('<PAD>'))
# Convert lengths to tensors
source_lengths = torch.LongTensor(source_lengths)
target_lengths = torch.LongTensor(target_lengths)
return source_data, target_data, source_lengths, target_lengths
|
# http://multivax.com/last_question.html
import logging
import sys
from os.path import basename, exists, join, splitext
from adles.args import parse_cli_args
from adles.interfaces import PlatformInterface
from adles.parser import check_syntax, parse_yaml
from adles.utils import handle_keyboard_interrupt, setup_logging
def run_cli():
"""Parse command line interface arguments and run ADLES."""
args = parse_cli_args()
exit_status = main(args=args)
sys.exit(exit_status)
@handle_keyboard_interrupt
def main(args) -> int:
"""
:param args:
:return: The exit status of the program
"""
# Configure logging, including console colors and syslog server
colors = (False if args.no_color else True)
syslog = (args.syslog, 514) if args.syslog else None
setup_logging(filename='adles.log', colors=colors,
console_verbose=args.verbose, server=syslog)
# Set the sub-command to execute
command = args.command
# Just validate syntax, no building of environment
if command == 'validate':
if check_syntax(args.spec, args.validate_type) is None:
return 1
# Build an environment using a specification
elif command in ['deploy', 'masters', 'cleanup', 'package']:
override = None
if command == 'package': # Package specification
package_spec = check_syntax(args.spec, spec_type='package')
if package_spec is None: # Ensure it passed the check
return 1
# Extract exercise spec filename
spec_filename = package_spec["contents"]["environment"]
if "infrastructure" in package_spec["contents"]:
# Extract infra spec filename
override = package_spec["contents"]["infrastructure"]
else:
spec_filename = args.spec
# Validate specification syntax before proceeding
spec = check_syntax(spec_filename)
if spec is None: # Ensure it passed the check
return 1
if "name" not in spec["metadata"]:
# Default name is the filename of the specification
spec["metadata"]["name"] = splitext(basename(args.spec))[0]
# Override the infra file defined in exercise/package specification
if args.infra:
infra_file = args.infra
if not exists(infra_file):
logging.error("Could not find infra file '%s' "
"to override with", infra_file)
else:
override = infra_file
if override is not None: # Override infra file in exercise config
logging.info("Overriding infrastructure config "
"file with '%s'", override)
spec["metadata"]["infra-file"] = override
# Instantiate the Interface and call functions for the specified phase
interface = PlatformInterface(infra=parse_yaml(
spec["metadata"]["infra-file"]), spec=spec)
if command == 'masters':
interface.create_masters()
logging.info("Finished Master creation for %s",
spec["metadata"]["name"])
elif command == 'deploy':
interface.deploy_environment()
logging.info("Finished deployment of %s",
spec["metadata"]["name"])
elif command == 'cleanup':
if args.cleanup_type == 'masters':
interface.cleanup_masters(args.cleanup_nets)
elif args.cleanup_type == 'environment':
interface.cleanup_environment(args.cleanup_nets)
logging.info("Finished %s cleanup of %s", args.cleanup_type,
spec["metadata"]["name"])
else:
logging.error("INTERNAL ERROR -- Invalid command: %s", command)
return 1
# Show examples on commandline
elif args.list_examples or args.print_example:
from pkg_resources import Requirement, resource_filename
from os import listdir
example_dir = resource_filename(Requirement.parse("ADLES"), "examples")
# Filter non-YAML files from the listdir output
examples = [x[:-5] for x in listdir(example_dir) if ".yaml" in x]
if args.list_examples: # List all examples and their metadata
print("Example scenarios that can be printed " # noqa: T001
"using --print-example <name>")
# Print header for the output
print("Name".ljust(25) + "Version".ljust(10) + "Description") # noqa: T001
for example in examples:
if "infra" in example:
continue
metadata = parse_yaml(
join(example_dir, example + ".yaml"))["metadata"]
name = str(example).ljust(25)
ver = str(metadata["version"]).ljust(10)
desc = str(metadata["description"])
print(name + ver + desc) # noqa: T001
else:
example = args.print_example
if example in examples:
# Print out the complete content of a named example
with open(join(example_dir, example + ".yaml")) as file:
print(file.read()) # noqa: T001
else:
logging.error("Invalid example: %s", example)
return 1
# Show specifications on commandline
elif args.print_spec:
from pkg_resources import Requirement, resource_filename
spec = args.print_spec
specs = ["exercise", "package", "infrastructure"]
# Find spec in package installation directory and print it
if spec in specs:
# Extract specifications from their package installation location
filename = resource_filename(Requirement.parse("ADLES"),
join("specifications",
spec + "-specification.yaml"))
with open(filename) as file:
print(file.read()) # noqa: T001
else:
logging.error("Invalid specification: %s", spec)
return 1
# Handle invalid arguments
else:
logging.error("Invalid arguments. Argument dump:\n%s", str(vars(args)))
return 1
# Finished successfully
return 0
|
somaI = 0
for c in range(1, 501, 2):
if (c % 3 == 0):
somaI += c
print(somaI)
|
# This file is referred and derived from project NetworkX
#
# which has the following license:
#
# Copyright (C) 2004-2020, NetworkX Developers
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
#
# This file is part of NetworkX.
#
# NetworkX is distributed under a BSD license; see LICENSE.txt for more
# information.
#
import os
import numpy as np
import pytest
from networkx.classes.tests.test_graph import TestEdgeSubgraph as _TestEdgeSubgraph
from networkx.classes.tests.test_graph import TestGraph as _TestGraph
from networkx.testing.utils import almost_equal
from networkx.testing.utils import assert_graphs_equal
from graphscope import nx
@pytest.mark.usefixtures("graphscope_session")
class TestGraph(_TestGraph):
def setup_method(self):
self.Graph = nx.Graph
self.k3nodes = [0, 1, 2]
self.k3edges = [(0, 1), (0, 2), (1, 2)]
self.K3 = self.Graph()
self.K3.update(self.k3edges, self.k3nodes)
def graphs_equal(self, H, G):
assert G.adj == H.adj
assert G.nodes == H.nodes
assert G.graph == H.graph
assert G.name == H.name
assert G.adj == H.adj
if G.is_directed() and H.is_directed():
assert G.pred == H.pred
assert G.succ == H.succ
def shallow_copy_graph_attr(self, H, G):
assert G.graph["foo"] == H.graph["foo"]
G.graph["foo"] = "new_foo"
assert G.graph["foo"] == H.graph["foo"]
def shallow_copy_node_attr(self, H, G):
assert G.nodes[0]["foo"] == H.nodes[0]["foo"]
G.nodes[0]["foo"] = "new_foo"
assert G.nodes[0]["foo"] == H.nodes[0]["foo"]
def shallow_copy_edge_attr(self, H, G):
assert G[1][2]["foo"] == H[1][2]["foo"]
G[1][2]["foo"] = "new_foo"
assert G[1][2]["foo"] == H[1][2]["foo"]
def deepcopy_node_attr(self, H, G):
assert G.nodes[0]["foo"] == H.nodes[0]["foo"]
attr = G.nodes[0]["foo"]
G.nodes[0]["foo"] = attr.append(1)
assert G.nodes[0]["foo"] != H.nodes[0]["foo"]
def deepcopy_edge_attr(self, H, G):
assert G[1][2]["foo"] == H[1][2]["foo"]
attr = G[1][2]["foo"]
attr.append(1)
G[1][2]["foo"] = attr
assert G[1][2]["foo"] != H[1][2]["foo"]
def test_memory_leak(self):
pass
def test_pickle(self):
pass
def test_to_undirected(self):
G = self.K3
self.add_attributes(G)
H = G.to_undirected()
self.is_deepcopy(H, G)
def test_to_directed(self):
G = self.K3
self.add_attributes(G)
H = G.to_directed()
self.is_deepcopy(H, G)
def test_graph_chain(self):
# subgraph now is fallback with networkx, not view
G = self.Graph([(0, 1), (1, 2)])
DG = G.to_directed(as_view=True)
RDG = DG.reverse(copy=False)
assert G is DG._graph
assert DG is RDG._graph
def test_copy(self):
G = self.Graph()
G.add_node(0)
G.add_edge(1, 2)
self.add_attributes(G)
# deep copy
H = G.copy()
self.graphs_equal(H, G)
def test_class_copy(self):
G = self.Graph()
G.add_node(0)
G.add_edge(1, 2)
self.add_attributes(G)
# deep copy
H = G.__class__(G)
self.graphs_equal(H, G)
def test_subgraph(self):
# subgraph now is true subgraph, not view
G = self.K3
self.add_attributes(G)
H = G.subgraph([0, 1, 2, 5])
self.graphs_equal(H, G)
H = G.subgraph([0])
assert H.adj == {0: {}}
H = G.subgraph([])
assert H.adj == {}
assert G.adj != {}
def test_node_type(self):
G = self.Graph()
nodes = [3, "n", 3.14, True, False]
edges = [(3, "n", 1), ("n", 3.14, 3.14), (True, False, True)]
G.add_nodes_from(nodes)
G.add_weighted_edges_from(edges)
nlist = list(G.nodes)
assert len(nlist) == 5
for n in nlist:
assert n in [False, 3, "n", 3.14, True]
assert G[3]["n"]["weight"] == 1
assert G["n"][3.14]["weight"] == 3.14
assert G[True][False]["weight"] == True
def test_selfloops(self):
G = self.Graph()
G.add_edge(0, 0)
assert G.number_of_edges() == 1
G.add_edge(0, 1)
assert G.number_of_selfloops() == 1
G.add_edge(2, 2)
assert G.number_of_edges() == 3
assert G.number_of_selfloops() == 2
SG = G.subgraph([0, 1])
assert SG.number_of_edges() == 2
assert SG.number_of_selfloops() == 1
ESG = G.edge_subgraph([(0, 0), (2, 2)])
assert ESG.number_of_edges() == 2
assert ESG.number_of_selfloops() == 2
H = G.copy()
assert H.number_of_selfloops() == 2
Gv = G.copy(as_view=True)
assert Gv.number_of_selfloops() == 2
G.remove_node(0)
assert G.number_of_selfloops() == 1
G.remove_edge(2, 2)
assert G.number_of_selfloops() == 0
def test_update(self):
# specify both edgees and nodes
G = self.K3.copy()
G.update(nodes=[3, (4, {"size": 2})], edges=[(4, 5), (6, 7, {"weight": 2})])
nlist = [
(0, {}),
(1, {}),
(2, {}),
(3, {}),
(4, {"size": 2}),
(5, {}),
(6, {}),
(7, {}),
]
assert sorted(G.nodes.data()) == nlist
if G.is_directed():
elist = [
(0, 1, {}),
(0, 2, {}),
(1, 0, {}),
(1, 2, {}),
(2, 0, {}),
(2, 1, {}),
(4, 5, {}),
(6, 7, {"weight": 2}),
]
else:
if os.environ.get("DEPLOYMENT", None) == "standalone":
elist = [
(0, 1, {}),
(0, 2, {}),
(1, 2, {}),
(4, 5, {}),
(6, 7, {"weight": 2}),
]
else: # num_workers=2
elist = [
(0, 1, {}),
(0, 2, {}),
(2, 1, {}),
(4, 5, {}),
(6, 7, {"weight": 2}),
]
assert sorted(G.edges.data()) == elist
assert G.graph == {}
# no keywords -- order is edges, nodes
G = self.K3.copy()
G.update([(4, 5), (6, 7, {"weight": 2})], [3, (4, {"size": 2})])
assert sorted(G.nodes.data()) == nlist
assert sorted(G.edges.data()) == elist
assert G.graph == {}
# update using only a graph
G = self.Graph()
G.graph["foo"] = "bar"
G.add_node(2, data=4)
G.add_edge(0, 1, weight=0.5)
GG = G.copy()
H = self.Graph()
GG.update(H)
assert_graphs_equal(G, GG)
H.update(G)
assert_graphs_equal(H, G)
# update nodes only
H = self.Graph()
H.update(nodes=[3, 4])
assert H.nodes ^ {3, 4} == set()
assert H.size() == 0
# update edges only
H = self.Graph()
H.update(edges=[(3, 4)])
if H.is_directed():
assert sorted(H.edges.data()) == [(3, 4, {})]
else:
assert sorted(H.edges.data()) in ([(3, 4, {})], [(4, 3, {})])
assert H.size() == 1
# No inputs -> exception
with pytest.raises(nx.NetworkXError):
nx.Graph().update()
def test_duplicated_modification(self):
G = nx.complete_graph(5, create_using=self.Graph)
ret = nx.builtin.closeness_centrality(G)
assert ret == {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0}
# test add node
G.add_node(5)
ret = nx.builtin.closeness_centrality(G)
assert ret == {0: 0.8, 1: 0.8, 2: 0.8, 3: 0.8, 4: 0.8, 5: 0.0}
# test add edge
G.add_edge(4, 5)
ret = nx.builtin.closeness_centrality(G)
expect1 = {
0: 0.8,
1: 0.8,
2: 0.8,
3: 0.8,
4: 0.8,
5: 0.555556,
}
expect2 = {
0: 0.833333,
1: 0.833333,
2: 0.833333,
3: 0.833333,
4: 1.0,
5: 0.555556,
}
if G.is_directed():
for n in ret:
assert almost_equal(ret[n], expect1[n], places=4)
else:
for n in ret:
assert almost_equal(ret[n], expect2[n], places=4)
# test remove edge
G.remove_edge(4, 5)
ret = nx.builtin.closeness_centrality(G)
assert ret == {0: 0.8, 1: 0.8, 2: 0.8, 3: 0.8, 4: 0.8, 5: 0.0}
# test remove node
G.remove_node(5)
ret = nx.builtin.closeness_centrality(G)
assert ret == {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0}
# test update
for e in G.edges:
G.edges[e]["weight"] = 2
ret = nx.builtin.closeness_centrality(G, weight="weight")
assert ret == {0: 0.5, 1: 0.5, 2: 0.5, 3: 0.5, 4: 0.5}
# test copy
G2 = G.copy()
ret = nx.builtin.closeness_centrality(G2)
assert ret == {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0}
# test reverse
if G.is_directed():
rG = G.reverse()
ret = nx.builtin.closeness_centrality(rG)
assert ret == {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0}
# to_directed/to_undirected
if G.is_directed():
udG = G.to_undirected()
ret = nx.builtin.closeness_centrality(udG)
assert ret == {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0}
else:
dG = G.to_directed()
ret = nx.builtin.closeness_centrality(dG)
assert ret == {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0}
# sub_graph
sG = G.subgraph([0, 1, 2])
ret = nx.builtin.closeness_centrality(sG)
assert ret == {0: 1.0, 1: 1.0, 2: 1.0}
esG = G.edge_subgraph([(0, 1), (1, 2), (2, 3)])
ret = nx.builtin.closeness_centrality(esG)
expect1 = {
0: 0.000,
1: 0.333333,
2: 0.444444,
3: 0.500,
}
expect2 = {
0: 0.5,
1: 0.75,
2: 0.75,
3: 0.5,
}
if G.is_directed():
for n in ret:
assert almost_equal(ret[n], expect1[n], places=4)
else:
for n in ret:
assert almost_equal(ret[n], expect2[n], places=4)
@pytest.mark.usefixtures("graphscope_session")
class TestEdgeSubgraph(_TestEdgeSubgraph):
def setup_method(self):
# Create a path graph on five nodes.
G = nx.path_graph(5)
# Add some node, edge, and graph attributes.
for i in range(5):
G.nodes[i]["name"] = f"node{i}"
G.edges[0, 1]["name"] = "edge01"
G.edges[3, 4]["name"] = "edge34"
G.graph["name"] = "graph"
# Get the subgraph induced by the first and last edges.
self.G = G
self.H = G.edge_subgraph([(0, 1), (3, 4)])
def test_correct_edges(self):
"""Tests that the subgraph has the correct edges."""
assert sorted(self.H.edges(data="name")) in (
[(1, 0, "edge01"), (4, 3, "edge34")],
[(0, 1, "edge01"), (4, 3, "edge34")],
)
def test_remove_node(self):
"""Tests that removing a node in the original graph does not
affect the nodes of the subgraph, is a true subgraph.
"""
self.G.remove_node(0)
assert [0, 1, 3, 4] == sorted(self.H.nodes())
def test_node_attr_dict(self):
for v in self.H:
assert self.G.nodes[v] == self.H.nodes[v]
self.G.nodes[0]["name"] = "foo"
assert self.G.nodes[0] != self.H.nodes[0]
self.H.nodes[1]["name"] = "bar"
assert self.G.nodes[1] != self.H.nodes[1]
def test_edge_attr_dict(self):
for u, v in self.H.edges():
assert self.G.edges[u, v] == self.H.edges[u, v]
self.G.edges[0, 1]["name"] = "foo"
assert self.G.edges[0, 1]["name"] != self.H.edges[0, 1]["name"]
self.H.edges[3, 4]["name"] = "bar"
assert self.G.edges[3, 4]["name"] != self.H.edges[3, 4]["name"]
def test_graph_attr_dict(self):
assert self.G.graph == self.H.graph
|
# Copyright 2019 The Cloud Robotics Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Send and receive opaque messages on ROS topics."""
import rospy
class Publisher(object):
"""Publishes messages without needing to know the type.
The publisher is lazily created when the user publishes the first message.
The user must specify the type and md5sum of the message when publishing.
"""
def __init__(self, topic, queue_size, latch=False):
self.topic = topic
self.queue_size = queue_size
self.latch = latch
self._publisher = None
self._data_class = None
def _create_data_class(self, type, md5sum):
class DataClass(rospy.AnyMsg):
_type = type
_md5sum = md5sum
def __init__(self, buff):
self._buff = buff
return DataClass
def publish(self, type, md5sum, buff):
if (self._data_class is None or self._data_class._type != type or
self._data_class._md5sum != md5sum):
self._data_class = self._create_data_class(type, md5sum)
self._publisher = rospy.Publisher(
self.topic, self._data_class, queue_size=self.queue_size,
latch=self.latch)
self._publisher.publish(self._data_class(buff))
# TODO(rodrigoq): provide a way to discover type/md5sum before the first
# message, so that the remote subscriber has a chance to connect before the
# first message is published. Otherwise, the remote subscriber doesn't receive
# the first message. (or maybe it should latch the first message?)
class Subscriber(rospy.Subscriber):
"""Subscribes to messages without needing to know the type.
The callback receives an instance of rospy.AnyMsg. The type and md5sum can be
retrieved from the _connection_header dict in the message.
"""
def __init__(self, topic, callback):
rospy.Subscriber.__init__(self, topic, rospy.AnyMsg, callback)
if __name__ == '__main__':
# Test code: forward messages between two topics.
rospy.init_node('forward', anonymous=True)
pub = Publisher('chatter2', 10)
def callback(message):
pub.publish(
message._connection_header['type'],
message._connection_header['md5sum'],
message._buff)
sub = Subscriber('chatter', callback)
rospy.spin()
|
from flask_restplus import Namespace,Resource,fields,reqparse
import Crypto
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_OAEP
from Crypto import Random
from core import Configs
import pickle
import json
from flask import send_file
import ast
api = Namespace('public_key_exchange_api',description = "api for exchanging public key")
@api.route('/pke')
class Pub_Key(Resource):
def get(self):
return send_file(Configs.PUBLIC_KEY_FILE)
@api.route('/message')
@api.doc(params={'msg': 'A message'})
class Msg(Resource):
def get(self):
parser = reqparse.RequestParser()
parser.add_argument('msg')
args = parser.parse_args()
decryptor = PKCS1_OAEP.new(Configs.PUBLIC_KEY_OBJECT)
decrypted = decryptor.decrypt(ast.literal_eval(str(args.msg)))
return decrypted
@api.route('/connect')
@api.doc(params={'packet':'Encrypted Connection Package with nonce and SessionKey'})
class Connect(Resource):
def get(self):
parser = reqparse.RequestParser()
parser.add_argument('packet')
args = parser.parse_args()
decryptor = PKCS1_OAEP.new(Configs.PUBLIC_KEY_OBJECT)
decrypted = decryptor.decrypt(ast.literal_eval(str(args.packet))).decode('utf-8')
Session_dict =ast.literal_eval(decrypted)
nonce = int(Session_dict['nonce'])
Configs.CLIENT_SESSION_BOOK[Session_dict['SessionKey']] = 0
return nonce + 1
|
from arm.logicnode.arm_nodes import *
class SeparateVectorNode(ArmLogicTreeNode):
"""Splits the given vector into XYZ values."""
bl_idname = 'LNSeparateVectorNode'
bl_label = 'Separate XYZ'
arm_section = 'vector'
arm_version = 1
def init(self, context):
super(SeparateVectorNode, self).init(context)
self.add_input('NodeSocketVector', 'Vector')
self.add_output('NodeSocketFloat', 'X')
self.add_output('NodeSocketFloat', 'Y')
self.add_output('NodeSocketFloat', 'Z')
|
import string
import sys
import uuid
from dataclasses import dataclass
from random import choices
from typing import (
Any,
Dict,
List,
Optional,
TYPE_CHECKING,
Tuple,
Type,
Union,
overload,
)
import sqlalchemy
from pydantic import BaseModel, create_model
from pydantic.typing import ForwardRef, evaluate_forwardref
import ormar # noqa I101
from ormar.exceptions import ModelDefinitionError, RelationshipInstanceError
from ormar.fields.base import BaseField
if TYPE_CHECKING: # pragma no cover
from ormar.models import Model, NewBaseModel, T
from ormar.fields import ManyToManyField
if sys.version_info < (3, 7):
ToType = Type["T"]
else:
ToType = Union[Type["T"], "ForwardRef"]
def create_dummy_instance(fk: Type["T"], pk: Any = None) -> "T":
"""
Ormar never returns you a raw data.
So if you have a related field that has a value populated
it will construct you a Model instance out of it.
Creates a "fake" instance of passed Model from pk value.
The instantiated Model has only pk value filled.
To achieve this __pk_only__ flag has to be passed as it skips the validation.
If the nested related Models are required they are set with -1 as pk value.
:param fk: class of the related Model to which instance should be constructed
:type fk: Model class
:param pk: value of the primary_key column
:type pk: Any
:return: Model instance populated with only pk
:rtype: Model
"""
init_dict = {
**{fk.Meta.pkname: pk or -1, "__pk_only__": True},
**{
k: create_dummy_instance(v.to)
for k, v in fk.Meta.model_fields.items()
if v.is_relation and not v.nullable and not v.virtual
},
}
return fk(**init_dict)
def create_dummy_model(
base_model: Type["T"],
pk_field: Union[BaseField, "ForeignKeyField", "ManyToManyField"],
) -> Type["BaseModel"]:
"""
Used to construct a dummy pydantic model for type hints and pydantic validation.
Populates only pk field and set it to desired type.
:param base_model: class of target dummy model
:type base_model: Model class
:param pk_field: ormar Field to be set on pydantic Model
:type pk_field: Union[BaseField, "ForeignKeyField", "ManyToManyField"]
:return: constructed dummy model
:rtype: pydantic.BaseModel
"""
alias = (
"".join(choices(string.ascii_uppercase, k=6)) # + uuid.uuid4().hex[:4]
).lower()
fields = {f"{pk_field.name}": (pk_field.__type__, None)}
dummy_model = create_model( # type: ignore
f"PkOnly{base_model.get_name(lower=False)}{alias}",
__module__=base_model.__module__,
**fields, # type: ignore
)
return dummy_model
def populate_fk_params_based_on_to_model(
to: Type["T"], nullable: bool, onupdate: str = None, ondelete: str = None
) -> Tuple[Any, List, Any]:
"""
Based on target to model to which relation leads to populates the type of the
pydantic field to use, ForeignKey constraint and type of the target column field.
:param to: target related ormar Model
:type to: Model class
:param nullable: marks field as optional/ required
:type nullable: bool
:param onupdate: parameter passed to sqlalchemy.ForeignKey.
How to treat child rows on update of parent (the one where FK is defined) model.
:type onupdate: str
:param ondelete: parameter passed to sqlalchemy.ForeignKey.
How to treat child rows on delete of parent (the one where FK is defined) model.
:type ondelete: str
:return: tuple with target pydantic type, list of fk constraints and target col type
:rtype: Tuple[Any, List, Any]
"""
fk_string = to.Meta.tablename + "." + to.get_column_alias(to.Meta.pkname)
to_field = to.Meta.model_fields[to.Meta.pkname]
pk_only_model = create_dummy_model(to, to_field)
__type__ = (
Union[to_field.__type__, to, pk_only_model]
if not nullable
else Optional[Union[to_field.__type__, to, pk_only_model]]
)
constraints = [
ForeignKeyConstraint(
reference=fk_string, ondelete=ondelete, onupdate=onupdate, name=None
)
]
column_type = to_field.column_type
return __type__, constraints, column_type
def validate_not_allowed_fields(kwargs: Dict) -> None:
"""
Verifies if not allowed parameters are set on relation models.
Usually they are omitted later anyway but this way it's explicitly
notify the user that it's not allowed/ supported.
:raises ModelDefinitionError: if any forbidden field is set
:param kwargs: dict of kwargs to verify passed to relation field
:type kwargs: Dict
"""
default = kwargs.pop("default", None)
encrypt_secret = kwargs.pop("encrypt_secret", None)
encrypt_backend = kwargs.pop("encrypt_backend", None)
encrypt_custom_backend = kwargs.pop("encrypt_custom_backend", None)
overwrite_pydantic_type = kwargs.pop("overwrite_pydantic_type", None)
not_supported = [
default,
encrypt_secret,
encrypt_backend,
encrypt_custom_backend,
overwrite_pydantic_type,
]
if any(x is not None for x in not_supported):
raise ModelDefinitionError(
f"Argument {next((x for x in not_supported if x is not None))} "
f"is not supported "
"on relation fields!"
)
@dataclass
class ForeignKeyConstraint:
"""
Internal container to store ForeignKey definitions used later
to produce sqlalchemy.ForeignKeys
"""
reference: Union[str, sqlalchemy.Column]
name: Optional[str]
ondelete: Optional[str]
onupdate: Optional[str]
@overload
def ForeignKey(to: Type["T"], **kwargs: Any) -> "T": # pragma: no cover
...
@overload
def ForeignKey(to: ForwardRef, **kwargs: Any) -> "Model": # pragma: no cover
...
def ForeignKey( # type: ignore # noqa CFQ002
to: "ToType",
*,
name: str = None,
unique: bool = False,
nullable: bool = True,
related_name: str = None,
virtual: bool = False,
onupdate: str = None,
ondelete: str = None,
**kwargs: Any,
) -> "T":
"""
Despite a name it's a function that returns constructed ForeignKeyField.
This function is actually used in model declaration (as ormar.ForeignKey(ToModel)).
Accepts number of relation setting parameters as well as all BaseField ones.
:param to: target related ormar Model
:type to: Model class
:param name: name of the database field - later called alias
:type name: str
:param unique: parameter passed to sqlalchemy.ForeignKey, unique flag
:type unique: bool
:param nullable: marks field as optional/ required
:type nullable: bool
:param related_name: name of reversed FK relation populated for you on to model
:type related_name: str
:param virtual: marks if relation is virtual.
It is for reversed FK and auto generated FK on through model in Many2Many relations.
:type virtual: bool
:param onupdate: parameter passed to sqlalchemy.ForeignKey.
How to treat child rows on update of parent (the one where FK is defined) model.
:type onupdate: str
:param ondelete: parameter passed to sqlalchemy.ForeignKey.
How to treat child rows on delete of parent (the one where FK is defined) model.
:type ondelete: str
:param kwargs: all other args to be populated by BaseField
:type kwargs: Any
:return: ormar ForeignKeyField with relation to selected model
:rtype: ForeignKeyField
"""
owner = kwargs.pop("owner", None)
self_reference = kwargs.pop("self_reference", False)
orders_by = kwargs.pop("orders_by", None)
related_orders_by = kwargs.pop("related_orders_by", None)
skip_reverse = kwargs.pop("skip_reverse", False)
skip_field = kwargs.pop("skip_field", False)
sql_nullable = kwargs.pop("sql_nullable", None)
sql_nullable = nullable if sql_nullable is None else sql_nullable
validate_not_allowed_fields(kwargs)
if to.__class__ == ForwardRef:
__type__ = to if not nullable else Optional[to]
constraints: List = []
column_type = None
else:
__type__, constraints, column_type = populate_fk_params_based_on_to_model(
to=to, # type: ignore
nullable=nullable,
ondelete=ondelete,
onupdate=onupdate,
)
namespace = dict(
__type__=__type__,
to=to,
through=None,
alias=name,
name=kwargs.pop("real_name", None),
nullable=nullable,
sql_nullable=sql_nullable,
constraints=constraints,
unique=unique,
column_type=column_type,
related_name=related_name,
virtual=virtual,
primary_key=False,
index=False,
pydantic_only=False,
default=None,
server_default=None,
onupdate=onupdate,
ondelete=ondelete,
owner=owner,
self_reference=self_reference,
is_relation=True,
orders_by=orders_by,
related_orders_by=related_orders_by,
skip_reverse=skip_reverse,
skip_field=skip_field,
)
Field = type("ForeignKey", (ForeignKeyField, BaseField), {})
return Field(**namespace)
class ForeignKeyField(BaseField):
"""
Actual class returned from ForeignKey function call and stored in model_fields.
"""
def __init__(self, **kwargs: Any) -> None:
if TYPE_CHECKING: # pragma: no cover
self.__type__: type
self.to: Type["Model"]
self.ondelete: str = kwargs.pop("ondelete", None)
self.onupdate: str = kwargs.pop("onupdate", None)
super().__init__(**kwargs)
def get_source_related_name(self) -> str:
"""
Returns name to use for source relation name.
For FK it's the same, differs for m2m fields.
It's either set as `related_name` or by default it's owner model. get_name + 's'
:return: name of the related_name or default related name.
:rtype: str
"""
return self.get_related_name()
def get_related_name(self) -> str:
"""
Returns name to use for reverse relation.
It's either set as `related_name` or by default it's owner model. get_name + 's'
:return: name of the related_name or default related name.
:rtype: str
"""
return self.related_name or self.owner.get_name() + "s"
def default_target_field_name(self) -> str:
"""
Returns default target model name on through model.
:return: name of the field
:rtype: str
"""
prefix = "from_" if self.self_reference else ""
return self.through_reverse_relation_name or f"{prefix}{self.to.get_name()}"
def default_source_field_name(self) -> str:
"""
Returns default target model name on through model.
:return: name of the field
:rtype: str
"""
prefix = "to_" if self.self_reference else ""
return self.through_relation_name or f"{prefix}{self.owner.get_name()}"
def evaluate_forward_ref(self, globalns: Any, localns: Any) -> None:
"""
Evaluates the ForwardRef to actual Field based on global and local namespaces
:param globalns: global namespace
:type globalns: Any
:param localns: local namespace
:type localns: Any
:return: None
:rtype: None
"""
if self.to.__class__ == ForwardRef:
self.to = evaluate_forwardref(
self.to, globalns, localns or None # type: ignore
)
(
self.__type__,
self.constraints,
self.column_type,
) = populate_fk_params_based_on_to_model(
to=self.to,
nullable=self.nullable,
ondelete=self.ondelete,
onupdate=self.onupdate,
)
def _extract_model_from_sequence(
self, value: List, child: "Model", to_register: bool
) -> List["Model"]:
"""
Takes a list of Models and registers them on parent.
Registration is mutual, so children have also reference to parent.
Used in reverse FK relations.
:param value: list of Model
:type value: List
:param child: child/ related Model
:type child: Model
:param to_register: flag if the relation should be set in RelationshipManager
:type to_register: bool
:return: list (if needed) registered Models
:rtype: List["Model"]
"""
return [
self.expand_relationship( # type: ignore
value=val, child=child, to_register=to_register
)
for val in value
]
def _register_existing_model(
self, value: "Model", child: "Model", to_register: bool
) -> "Model":
"""
Takes already created instance and registers it for parent.
Registration is mutual, so children have also reference to parent.
Used in reverse FK relations and normal FK for single models.
:param value: already instantiated Model
:type value: Model
:param child: child/ related Model
:type child: Model
:param to_register: flag if the relation should be set in RelationshipManager
:type to_register: bool
:return: (if needed) registered Model
:rtype: Model
"""
if to_register:
self.register_relation(model=value, child=child)
return value
def _construct_model_from_dict(
self, value: dict, child: "Model", to_register: bool
) -> "Model":
"""
Takes a dictionary, creates a instance and registers it for parent.
If dictionary contains only one field and it's a pk it is a __pk_only__ model.
Registration is mutual, so children have also reference to parent.
Used in normal FK for dictionaries.
:param value: dictionary of a Model
:type value: dict
:param child: child/ related Model
:type child: Model
:param to_register: flag if the relation should be set in RelationshipManager
:type to_register: bool
:return: (if needed) registered Model
:rtype: Model
"""
if len(value.keys()) == 1 and list(value.keys())[0] == self.to.Meta.pkname:
value["__pk_only__"] = True
model = self.to(**value)
if to_register:
self.register_relation(model=model, child=child)
return model
def _construct_model_from_pk(
self, value: Any, child: "Model", to_register: bool
) -> "Model":
"""
Takes a pk value, creates a dummy instance and registers it for parent.
Registration is mutual, so children have also reference to parent.
Used in normal FK for dictionaries.
:param value: value of a related pk / fk column
:type value: Any
:param child: child/ related Model
:type child: Model
:param to_register: flag if the relation should be set in RelationshipManager
:type to_register: bool
:return: (if needed) registered Model
:rtype: Model
"""
if self.to.pk_type() == uuid.UUID and isinstance(value, str): # pragma: nocover
value = uuid.UUID(value)
if not isinstance(value, self.to.pk_type()):
raise RelationshipInstanceError(
f"Relationship error - ForeignKey {self.to.__name__} "
f"is of type {self.to.pk_type()} "
f"while {type(value)} passed as a parameter."
)
model = create_dummy_instance(fk=self.to, pk=value)
if to_register:
self.register_relation(model=model, child=child)
return model
def register_relation(self, model: "Model", child: "Model") -> None:
"""
Registers relation between parent and child in relation manager.
Relation manager is kep on each model (different instance).
Used in Metaclass and sometimes some relations are missing
(i.e. cloned Models in fastapi might miss one).
:param model: parent model (with relation definition)
:type model: Model class
:param child: child model
:type child: Model class
"""
model._orm.add(parent=model, child=child, field=self)
def has_unresolved_forward_refs(self) -> bool:
"""
Verifies if the filed has any ForwardRefs that require updating before the
model can be used.
:return: result of the check
:rtype: bool
"""
return self.to.__class__ == ForwardRef
def expand_relationship(
self,
value: Any,
child: Union["Model", "NewBaseModel"],
to_register: bool = True,
) -> Optional[Union["Model", List["Model"]]]:
"""
For relations the child model is first constructed (if needed),
registered in relation and returned.
For relation fields the value can be a pk value (Any type of field),
dict (from Model) or actual instance/list of a "Model".
Selects the appropriate constructor based on a passed value.
:param value: a Model field value, returned untouched for non relation fields.
:type value: Any
:param child: a child Model to register
:type child: Union["Model", "NewBaseModel"]
:param to_register: flag if the relation should be set in RelationshipManager
:type to_register: bool
:return: returns a Model or a list of Models
:rtype: Optional[Union["Model", List["Model"]]]
"""
if value is None:
return None if not self.virtual else []
constructors = {
f"{self.to.__name__}": self._register_existing_model,
"dict": self._construct_model_from_dict,
"list": self._extract_model_from_sequence,
}
model = constructors.get( # type: ignore
value.__class__.__name__, self._construct_model_from_pk
)(value, child, to_register)
return model
def get_relation_name(self) -> str: # pragma: no cover
"""
Returns name of the relation, which can be a own name or through model
names for m2m models
:return: result of the check
:rtype: bool
"""
return self.name
def get_source_model(self) -> Type["Model"]: # pragma: no cover
"""
Returns model from which the relation comes -> either owner or through model
:return: source model
:rtype: Type["Model"]
"""
return self.owner
|
from sanic import Sanic
from sanic.response import json, file
app = Sanic()
app.static('/', '../dist')
@app.route("/")
async def index(request):
return await file('dist/index.html')
@app.route("/example_post", methods=["POST",])
def create_user(request):
return text("POST data: %s" % request.body)
@app.route("/example_json")
def post_json(request):
return json({ "received": True, "data": request.json })
@app.route("/query_string")
def query_string(request):
return json({ "parsed": True, "args": request.args, "url": request.url,
"query_string": request.query_string })
@app.websocket('/ws_data')
async def feed(request, ws):
while True:
data = 'hello!'
print('Sending: ' + data)
await ws.send(data)
data = await ws.recv()
print('Received: ' + data)
|
"""
获取youtube视频下的评论
思路:
基于youtube官方的API来获取, 这里是关于如何初始化配置的文档 https://developers.google.com/youtube/v3/getting-started
评论接口文档:https://developers.google.com/youtube/v3/docs/channelSections/list
任意视频地址:https://www.youtube.com/watch?v=FWMIPukvdsQ
"""
import requests
# 在 API Console 配置生成
key = "AIzaSyCtJuC7oMed0xxZYPcid913vPxOnl72sHg"
# 视频ID
videoId = "FWMIPukvdsQ"
url = f"https://www.googleapis.com/youtube/v3/commentThreads?" \
f"key={key}&" \
f"textFormat=plainText&" \
f"part=snippet&" \
f"videoId={videoId}&" \
f"maxResults=100" # 分页参数
proxies = {
'http': 'socks5://127.0.0.1:1080',
'https': 'socks5://127.0.0.1:1080',
}
def spider(next_page_token):
if next_page_token:
params = {"pageToken": next_page_token}
else:
params = None
res = requests.get(url, proxies=proxies, params=params)
data = res.json()
import pprint
next_page_token = data.get("nextPageToken")
items = data.get("items")
for item in items:
comment = item.get("snippet").get("topLevelComment").get("snippet").get("textDisplay")
print(comment)
return next_page_token
def run():
next_page_token = spider(None)
while next_page_token:
try:
print(next_page_token)
next_page_token = spider(next_page_token)
import time
time.sleep(1)
except Exception as e:
# 请求超时重试
import traceback
print(next_page_token)
print(traceback.format_exc())
if __name__ == '__main__':
run()
|
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import os
from datadog_checks.base.utils.platform import Platform
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
DRIVER_CONFIG_DIR = os.path.join(CURRENT_DIR, 'data', 'driver_config')
def set_default_driver_conf():
if Platform.is_containerized():
# Use default `./driver_config/odbcinst.ini` when Agent is running in docker.
# `freetds` is shipped with the Docker Agent.
os.environ.setdefault('ODBCSYSINI', DRIVER_CONFIG_DIR)
|
import pytest
from api.models.utils import rankings
@pytest.fixture
def test_data():
return [1, 11, 101]
def test_rankings(test_data):
"""Tests if ranking works
e.g. 1 returns 1st
11 returns 11th
101 return 101st
"""
assert rankings(test_data[0]) == "1st"
assert rankings(test_data[1]) == "11th"
assert rankings(test_data[2]) == "101st"
|
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""User friendly container for Google Cloud Bigtable Table."""
from google.cloud._helpers import _to_bytes
from google.cloud.bigtable._generated import (
bigtable_pb2 as data_messages_v2_pb2)
from google.cloud.bigtable._generated import (
bigtable_table_admin_pb2 as table_admin_messages_v2_pb2)
from google.cloud.bigtable._generated import (
table_pb2 as table_v2_pb2)
from google.cloud.bigtable.column_family import _gc_rule_from_pb
from google.cloud.bigtable.column_family import ColumnFamily
from google.cloud.bigtable.row import AppendRow
from google.cloud.bigtable.row import ConditionalRow
from google.cloud.bigtable.row import DirectRow
from google.cloud.bigtable.row_data import PartialRowsData
class Table(object):
"""Representation of a Google Cloud Bigtable Table.
.. note::
We don't define any properties on a table other than the name.
The only other fields are ``column_families`` and ``granularity``,
The ``column_families`` are not stored locally and
``granularity`` is an enum with only one value.
We can use a :class:`Table` to:
* :meth:`create` the table
* :meth:`rename` the table
* :meth:`delete` the table
* :meth:`list_column_families` in the table
:type table_id: str
:param table_id: The ID of the table.
:type instance: :class:`Instance <.instance.Instance>`
:param instance: The instance that owns the table.
"""
def __init__(self, table_id, instance):
self.table_id = table_id
self._instance = instance
@property
def name(self):
"""Table name used in requests.
.. note::
This property will not change if ``table_id`` does not, but the
return value is not cached.
The table name is of the form
``"projects/../instances/../tables/{table_id}"``
:rtype: str
:returns: The table name.
"""
return self._instance.name + '/tables/' + self.table_id
def column_family(self, column_family_id, gc_rule=None):
"""Factory to create a column family associated with this table.
:type column_family_id: str
:param column_family_id: The ID of the column family. Must be of the
form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
:type gc_rule: :class:`.GarbageCollectionRule`
:param gc_rule: (Optional) The garbage collection settings for this
column family.
:rtype: :class:`.ColumnFamily`
:returns: A column family owned by this table.
"""
return ColumnFamily(column_family_id, self, gc_rule=gc_rule)
def row(self, row_key, filter_=None, append=False):
"""Factory to create a row associated with this table.
.. warning::
At most one of ``filter_`` and ``append`` can be used in a
:class:`Row`.
:type row_key: bytes
:param row_key: The key for the row being created.
:type filter_: :class:`.RowFilter`
:param filter_: (Optional) Filter to be used for conditional mutations.
See :class:`.DirectRow` for more details.
:type append: bool
:param append: (Optional) Flag to determine if the row should be used
for append mutations.
:rtype: :class:`.DirectRow`
:returns: A row owned by this table.
:raises: :class:`ValueError <exceptions.ValueError>` if both
``filter_`` and ``append`` are used.
"""
if append and filter_ is not None:
raise ValueError('At most one of filter_ and append can be set')
if append:
return AppendRow(row_key, self)
elif filter_ is not None:
return ConditionalRow(row_key, self, filter_=filter_)
else:
return DirectRow(row_key, self)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return (other.table_id == self.table_id and
other._instance == self._instance)
def __ne__(self, other):
return not self.__eq__(other)
def create(self, initial_split_keys=None, column_families=()):
"""Creates this table.
.. note::
A create request returns a
:class:`._generated.table_pb2.Table` but we don't use
this response.
:type initial_split_keys: list
:param initial_split_keys: (Optional) List of row keys that will be
used to initially split the table into
several tablets (Tablets are similar to
HBase regions). Given two split keys,
``"s1"`` and ``"s2"``, three tablets will be
created, spanning the key ranges:
``[, s1)``, ``[s1, s2)``, ``[s2, )``.
:type column_families: list
:param column_families: (Optional) List or other iterable of
:class:`.ColumnFamily` instances.
"""
if initial_split_keys is not None:
split_pb = table_admin_messages_v2_pb2.CreateTableRequest.Split
initial_split_keys = [
split_pb(key=key) for key in initial_split_keys]
table_pb = None
if column_families:
table_pb = table_v2_pb2.Table()
for col_fam in column_families:
curr_id = col_fam.column_family_id
table_pb.column_families[curr_id].MergeFrom(col_fam.to_pb())
request_pb = table_admin_messages_v2_pb2.CreateTableRequest(
initial_splits=initial_split_keys or [],
parent=self._instance.name,
table_id=self.table_id,
table=table_pb,
)
client = self._instance._client
# We expect a `._generated.table_pb2.Table`
client._table_stub.CreateTable(request_pb)
def delete(self):
"""Delete this table."""
request_pb = table_admin_messages_v2_pb2.DeleteTableRequest(
name=self.name)
client = self._instance._client
# We expect a `google.protobuf.empty_pb2.Empty`
client._table_stub.DeleteTable(request_pb)
def list_column_families(self):
"""List the column families owned by this table.
:rtype: dict
:returns: Dictionary of column families attached to this table. Keys
are strings (column family names) and values are
:class:`.ColumnFamily` instances.
:raises: :class:`ValueError <exceptions.ValueError>` if the column
family name from the response does not agree with the computed
name from the column family ID.
"""
request_pb = table_admin_messages_v2_pb2.GetTableRequest(
name=self.name)
client = self._instance._client
# We expect a `._generated.table_pb2.Table`
table_pb = client._table_stub.GetTable(request_pb)
result = {}
for column_family_id, value_pb in table_pb.column_families.items():
gc_rule = _gc_rule_from_pb(value_pb.gc_rule)
column_family = self.column_family(column_family_id,
gc_rule=gc_rule)
result[column_family_id] = column_family
return result
def read_row(self, row_key, filter_=None):
"""Read a single row from this table.
:type row_key: bytes
:param row_key: The key of the row to read from.
:type filter_: :class:`.RowFilter`
:param filter_: (Optional) The filter to apply to the contents of the
row. If unset, returns the entire row.
:rtype: :class:`.PartialRowData`, :data:`NoneType <types.NoneType>`
:returns: The contents of the row if any chunks were returned in
the response, otherwise :data:`None`.
:raises: :class:`ValueError <exceptions.ValueError>` if a commit row
chunk is never encountered.
"""
request_pb = _create_row_request(self.name, row_key=row_key,
filter_=filter_)
client = self._instance._client
response_iterator = client._data_stub.ReadRows(request_pb)
rows_data = PartialRowsData(response_iterator)
rows_data.consume_all()
if rows_data.state not in (rows_data.NEW_ROW, rows_data.START):
raise ValueError('The row remains partial / is not committed.')
if len(rows_data.rows) == 0:
return None
return rows_data.rows[row_key]
def read_rows(self, start_key=None, end_key=None, limit=None,
filter_=None):
"""Read rows from this table.
:type start_key: bytes
:param start_key: (Optional) The beginning of a range of row keys to
read from. The range will include ``start_key``. If
left empty, will be interpreted as the empty string.
:type end_key: bytes
:param end_key: (Optional) The end of a range of row keys to read from.
The range will not include ``end_key``. If left empty,
will be interpreted as an infinite string.
:type limit: int
:param limit: (Optional) The read will terminate after committing to N
rows' worth of results. The default (zero) is to return
all results.
:type filter_: :class:`.RowFilter`
:param filter_: (Optional) The filter to apply to the contents of the
specified row(s). If unset, reads every column in
each row.
:rtype: :class:`.PartialRowsData`
:returns: A :class:`.PartialRowsData` convenience wrapper for consuming
the streamed results.
"""
request_pb = _create_row_request(
self.name, start_key=start_key, end_key=end_key, filter_=filter_,
limit=limit)
client = self._instance._client
response_iterator = client._data_stub.ReadRows(request_pb)
# We expect an iterator of `data_messages_v2_pb2.ReadRowsResponse`
return PartialRowsData(response_iterator)
def sample_row_keys(self):
"""Read a sample of row keys in the table.
The returned row keys will delimit contiguous sections of the table of
approximately equal size, which can be used to break up the data for
distributed tasks like mapreduces.
The elements in the iterator are a SampleRowKeys response and they have
the properties ``offset_bytes`` and ``row_key``. They occur in sorted
order. The table might have contents before the first row key in the
list and after the last one, but a key containing the empty string
indicates "end of table" and will be the last response given, if
present.
.. note::
Row keys in this list may not have ever been written to or read
from, and users should therefore not make any assumptions about the
row key structure that are specific to their use case.
The ``offset_bytes`` field on a response indicates the approximate
total storage space used by all rows in the table which precede
``row_key``. Buffering the contents of all rows between two subsequent
samples would require space roughly equal to the difference in their
``offset_bytes`` fields.
:rtype: :class:`~google.cloud.exceptions.GrpcRendezvous`
:returns: A cancel-able iterator. Can be consumed by calling ``next()``
or by casting to a :class:`list` and can be cancelled by
calling ``cancel()``.
"""
request_pb = data_messages_v2_pb2.SampleRowKeysRequest(
table_name=self.name)
client = self._instance._client
response_iterator = client._data_stub.SampleRowKeys(request_pb)
return response_iterator
def _create_row_request(table_name, row_key=None, start_key=None, end_key=None,
filter_=None, limit=None):
"""Creates a request to read rows in a table.
:type table_name: str
:param table_name: The name of the table to read from.
:type row_key: bytes
:param row_key: (Optional) The key of a specific row to read from.
:type start_key: bytes
:param start_key: (Optional) The beginning of a range of row keys to
read from. The range will include ``start_key``. If
left empty, will be interpreted as the empty string.
:type end_key: bytes
:param end_key: (Optional) The end of a range of row keys to read from.
The range will not include ``end_key``. If left empty,
will be interpreted as an infinite string.
:type filter_: :class:`.RowFilter`
:param filter_: (Optional) The filter to apply to the contents of the
specified row(s). If unset, reads the entire table.
:type limit: int
:param limit: (Optional) The read will terminate after committing to N
rows' worth of results. The default (zero) is to return
all results.
:rtype: :class:`data_messages_v2_pb2.ReadRowsRequest`
:returns: The ``ReadRowsRequest`` protobuf corresponding to the inputs.
:raises: :class:`ValueError <exceptions.ValueError>` if both
``row_key`` and one of ``start_key`` and ``end_key`` are set
"""
request_kwargs = {'table_name': table_name}
if (row_key is not None and
(start_key is not None or end_key is not None)):
raise ValueError('Row key and row range cannot be '
'set simultaneously')
range_kwargs = {}
if start_key is not None or end_key is not None:
if start_key is not None:
range_kwargs['start_key_closed'] = _to_bytes(start_key)
if end_key is not None:
range_kwargs['end_key_open'] = _to_bytes(end_key)
if filter_ is not None:
request_kwargs['filter'] = filter_.to_pb()
if limit is not None:
request_kwargs['rows_limit'] = limit
message = data_messages_v2_pb2.ReadRowsRequest(**request_kwargs)
if row_key is not None:
message.rows.row_keys.append(_to_bytes(row_key))
if range_kwargs:
message.rows.row_ranges.add(**range_kwargs)
return message
|
import logging
import sys
from optparse import make_option
from django.core.management.base import BaseCommand
from celery.task.sets import TaskSet
import amo
from lib.crypto.packaged import sign
from mkt.webapps.models import Webapp
HELP = """\
Start tasks to re-sign web apps.
To specify which webapps to sign:
`--webapps=1234,5678,...9012`
If omitted, all signed apps will be re-signed.
"""
log = logging.getLogger('z.addons')
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--webapps',
help='Webapp ids to process. Use commas to separate '
'multiple ids.'),
)
help = HELP
def handle(self, *args, **kw):
qs = Webapp.objects.filter(is_packaged=True,
status__in=amo.LISTED_STATUSES)
if kw['webapps']:
pks = [int(a.strip()) for a in kw['webapps'].split(',')]
qs = qs.filter(pk__in=pks)
tasks = []
for app in qs:
if not app.current_version:
sys.stdout.write('Public app [id:%s] with no current version'
% app.pk)
continue
tasks.append(sign.subtask(args=[app.current_version.pk],
kwargs={'resign': True}))
TaskSet(tasks).apply_async()
|
"""
EBAS IO Setup Instructions: https://git.nilu.no/ebas/ebas-io/-/wikis/home
For my own purposes, I've cloned the ebas-io repository here, and installed with:
pip install ebas-io/dist/ebas_io-3.5.1-py3-none-any.whl
"""
|
#!/usr/bin/python
#
# -*- coding: utf-8 -*-
# vim: set ts=4 sw=4 et sts=4 ai:
"""Connect to the streamti.me server and report our stats."""
__author__ = "mithro@mithis.com (Tim 'mithro' Ansell)"
import datetime
import json
import time
import urllib
import urllib2
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
"--server", help="server to register on", action="store",
default="http://localhost:8000/tracker/endpoint/register")
parser.add_argument(
"--secret", help="secret to use to register", action="store",
default="move me to config.private.json")
parser.add_argument(
"--group", help="group to register on the server", action="store",
default="example")
parser.add_argument(
"--ip", help="IP to pretend to be", action="store",
default="")
if __name__ == "__main__":
args = parser.parse_args()
while True:
data = {
"overall_clients": 0,
"overall_bitrate": 0,
"overall_cbitrate": 0,
}
totals = [('ogg_high', 4, 1e6, 2e6)]
for name, clients, bitrate, streambitrate in totals:
fixed_name = name.replace('http-', '').replace('-', '_')
data[fixed_name+"_clients"] = int(clients)
data[fixed_name+"_bitrate"] = float(bitrate)
data[fixed_name+"_cbitrate"] = float(bitrate)
data["overall_clients"] += clients
data["overall_bitrate"] += bitrate
data["overall_cbitrate"] += clients*streambitrate
for group in args.group.split(','):
try:
req = urllib2.Request(
args.server,
urllib.urlencode((
('secret', args.secret),
('group', group),
('data', json.dumps(data)),
('REMOTE_ADDR', args.ip),
)))
r = urllib2.urlopen(req)
except urllib2.HTTPError, e:
print e
print e.read()
raise
print "Registered", group, "at", datetime.datetime.now(), "result", r.read().strip()
time.sleep(1)
|
# -*- coding: utf-8 -*-
from python_lessons.fixture.group import *
def test_add_group(app, db, json_groups):
group = json_groups
old_groups = db.get_group_list()
app.group.create(group)
new_groups = db.get_group_list()
old_groups.append(group)
sorted_old = sorted(old_groups, key=Group.id_or_max)
sorted_new = sorted(new_groups, key=Group.id_or_max)
print("\nsorted_old " + str(sorted_old))
print("\nsorted_new " + str(sorted_new))
assert sorted_old == sorted_new
|
import os
from pprint import pprint
import numpy as np
import mxnet as mx
import logging
print "testing global optimizer"
class GlobalOptimizer(mx.optimizer.Optimizer):
"""A GlobalOptimizer for the master parameter server.
Parameters
----------
learning_rate : float, optional
learning_rate of GlobalOptimizer
wd : float, optional
L2 regularization coefficient add to all the weights
rescale_grad : float, optional
rescaling factor of gradient.
clip_gradient : float, optional
clip gradient in range [-clip_gradient, clip_gradient]
param_idx2name : dict of string/int to float, optional
special treat weight decay in parameter ends with bias, gamma, and beta
"""
def __init__(self, **kwargs):
super(GlobalOptimizer, self).__init__(**kwargs)
self.momentum = 0
def create_state(self, index, weight):
"""Create additional optimizer state such as momentum.
Parameters
----------
weight : NDArray
The weight data
"""
if self.momentum == 0.0:
return None
else:
return zeros(weight.shape, weight.context, dtype=weight.dtype)
def update(self, index, curr_weights, incoming_weights, state):
"""Update the parameters.
Parameters
----------
index : int
An unique integer key used to index the parameters
curr_weights : NDArray
curr_weights ndarray
incoming_weights : NDArray
incoming_weights ndarray
state : NDArray or other objects returned by init_state
The auxiliary state used in optimization.
"""
assert(isinstance(curr_weights, mx.ndarray.NDArray))
assert(isinstance(incoming_weights, mx.ndarray.NDArray))
# lr: learning rate, wd: L2 regularization coeff add to all weights
# lr = self._get_lr(index)
# wd = self._get_wd(index)
self._update_count(index)
# We are not adding a gradient to no need
# grad = grad * self.rescale_grad
# if self.clip_gradient is not None:
# grad = clip(grad, -self.clip_gradient, self.clip_gradient)
curr_weights[:] = (curr_weights[:] + incoming_weights[:]) / 2
# if state:
# mom = state
# mom[:] *= self.momentum
# mom[:] += -lr * (grad + wd * weight)
# weight[:] += mom
# else:
# assert self.momentum == 0.0
# weight[:] += -lr * (grad + wd * weight)
mx.optimizer.Optimizer.register(GlobalOptimizer)
kv = mx.kv.create('local')
kv.set_optimizer(GlobalOptimizer())
num_weights = 10
w = np.arange(num_weights)
shape = w.shape
print "Initial w: {0}".format(w)
kv.init(0, mx.nd.array(w))
kv.init(1, mx.nd.array(w * 2))
a0 = mx.nd.zeros(shape)
a1 = mx.nd.zeros(shape)
kv.pull(0, out = a0)
kv.pull(1, out = a1)
print "Pulled w0: {0}".format(a0.asnumpy())
print "Pulled w1: {0}".format(a1.asnumpy())
kv.push(0, a0 * 10)
kv.push(1, a1 * 10)
kv.pull(0, out = a0) # pull out the value
kv.pull(1, out = a1) # pull out the value
print "Pulled w0*10: {0}".format(a0.asnumpy())
print "Pulled w1*10: {0}".format(a1.asnumpy())
|
from mongoengine import connect
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/dev/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = []
connect('picket')
|
from sklearn.linear_model import LinearRegression
import numpy as np
import torch
import pickle
import joblib
import time
import os.path
from os import path
from training.baseline import baseline_inpute
from utils.utils import construct_missing_X_from_mask
def linear_regression(data, args, log_path, load_path):
t0 = time.time()
n_row, n_col = data.df_X.shape
x = data.x.clone().detach()
edge_index = data.edge_index.clone().detach()
train_edge_mask = data.train_edge_mask.numpy()
train_edge_index = data.train_edge_index.clone().detach()
train_edge_attr = data.train_edge_attr.clone().detach()
test_edge_index = data.test_edge_index.clone().detach()
test_edge_attr = data.test_edge_attr.clone().detach()
y = data.y.detach().numpy()
train_y_mask = data.train_y_mask.clone().detach()
# print(torch.sum(train_y_mask))
test_y_mask = data.test_y_mask.clone().detach()
y_train = y[train_y_mask]
y_test = y[test_y_mask]
if args.method == 'gnn':
model = torch.load(load_path+'model.pt',map_location=torch.device('cpu'))
model.eval()
impute_model = torch.load(load_path+'impute_model.pt',map_location=torch.device('cpu'))
impute_model.eval()
predict_model = torch.load(load_path+'predict_model.pt',map_location=torch.device('cpu'))
predict_model.eval()
t_load = time.time()
with torch.no_grad():
x_embd = model(x, train_edge_attr, train_edge_index)
X = impute_model([x_embd[edge_index[0, :int(n_row * n_col)]], x_embd[edge_index[1, :int(n_row * n_col)]]])
t_impute = time.time()
X = torch.reshape(X, [n_row, n_col])
y_pred = predict_model(X)[:, 0]
y_pred_test = y_pred[test_y_mask].detach().numpy()
t_reg = time.time()
else:
if args.method == 'gnn_mdi':
model = torch.load(load_path+'model.pt',map_location=torch.device('cpu'))
model.eval()
impute_model = torch.load(load_path+'impute_model.pt',map_location=torch.device('cpu'))
impute_model.eval()
t_load = time.time()
with torch.no_grad():
x_embd = model(x, train_edge_attr, train_edge_index)
x_pred = impute_model([x_embd[test_edge_index[0], :], x_embd[test_edge_index[1], :]])
t_impute = time.time()
x_pred = x_pred[:int(test_edge_attr.shape[0] / 2)]
X_true, X_incomplete = construct_missing_X_from_mask(train_edge_mask, data.df_X)
X = X_incomplete
for i in range(int(test_edge_attr.shape[0] / 2)):
assert X_true[test_edge_index[0, i], test_edge_index[1, i] - y.shape[0]] == test_edge_attr[i]
X[test_edge_index[0, i], test_edge_index[1, i] - y.shape[0]] = x_pred[i]
else:
X_true, X_incomplete = construct_missing_X_from_mask(train_edge_mask, data.df_X)
t_load = time.time()
X = baseline_inpute(X_incomplete, args.method, args.level)
t_impute = time.time()
reg = LinearRegression().fit(X[train_y_mask, :], y_train)
y_pred_test = reg.predict(X[test_y_mask, :])
t_reg = time.time()
rmse = np.sqrt(np.mean((y_pred_test - y_test) ** 2))
mae = np.mean(np.abs(y_pred_test - y_test))
t_test = time.time()
if path.exists(log_path + 'result.pkl'):
obj = joblib.load(log_path + 'result.pkl')
obj['args_linear_regression'] = args
else:
obj = dict()
obj['args'] = args
obj['load_path'] = load_path
obj['rmse'] = rmse
obj['mae'] = mae
obj['load_time'] = t_load - t0
obj['impute_time'] = t_impute - t_load
obj['reg_time'] = t_reg - t_impute
obj['test_time'] = t_test - t_reg
print('{}: rmse: {:.3g}, mae: {:.3g}'.format(args.method,rmse,mae))
pickle.dump(obj, open(log_path + 'result.pkl', "wb"))
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from flask import Flask
from sklearn.model_selection import train_test_split
from sklearn.metrics import r2_score, mean_squared_error
from sklearn.preprocessing import MinMaxScaler
from sklearn.neighbors import KNeighborsRegressor
from sklearn.linear_model import LinearRegression
from sklearn.svm import SVR
from sklearn.ensemble import RandomForestRegressor
from sklearn.tree import DecisionTreeRegressor
from scipy.stats import pearsonr
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.linear_model import SGDRegressor
import statsmodels.api as sm
from sklearn.model_selection import GridSearchCV
import seaborn as sns
from flask import Flask, request, make_response, jsonify
import joblib
app = Flask(__name__)
model = joblib.load('reg_model.pkl')
@app.route("/", methods=['POST'])
def home():
content = request.get_json()
df = pd.DataFrame([content])
test = df.drop(columns=['LOS', 'ID'])
prediction = model.predict(test)
return jsonify(los=str(np.round(prediction[0], 2)))
if __name__ == '__main__':
app.run(host='127.0.0.1', port=8000, debug=True)
|
# -*- coding: utf-8 -*-
from twitter.stream import TwitterStream
from utils.singleton import Singleton
class StreamsHandler(metaclass=Singleton):
""" Represents a streams handler object """
def __init__(self):
""" Creates a streams handler singleton """
self.streams = {}
@staticmethod
def build_key(account: str) -> str:
"""
Builds a hash key given a Twitter account
:param account: Twitter account
"""
return account
def get(self, account: str) -> TwitterStream:
"""
Retrieves a Twitter stream given an account
:param account: Twitter account
"""
key = self.build_key(account)
return self.streams.get(key)
def set(self, account: str, stream: object) -> None:
"""
Saves a Twitter stream given an account
:param account: Twitter account
:param stream: Twitter stream object
"""
key = self.build_key(account)
self.streams[key] = stream
def start_stream(self, account: str, stream: TwitterStream, stream_props: dict) -> None:
"""
Starts a Twitter stream given an account
:param account: Twitter account
:param stream: Twitter stream object
:param stream_props: Twitter stream start parameters:
1. Filter term (i.e: 'Golden gate')
2. Filter coords (i.e: [-74,40,-73,41])
"""
# Stopping previous stream in case it existed
self.stop_stream(account)
self.set(account, stream)
stream.start(
filter_term=stream_props['filter_term'],
filter_coords=stream_props['filter_coords'],
filter_langs=('en',),
)
def stop_stream(self, account: str) -> None:
"""
Stops a Twitter stream given an account
:param account: Twitter account
"""
stream = self.get(account)
if stream is not None:
stream.stop()
self.set(account, None)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# pylint: disable-msg=no-member
# pylint: disable-msg=arguments-differ
# pylint: disable-msg=import-error
"""
RCNN_model.py is a module for RCNN model
"""
import torch
from torch import nn
import torch.nn.functional as F
__author__ = "Ehsan Tavan"
__organization__ = "Persian Emoji Prediction"
__credits__ = ["Ehsan Tavan"]
__license__ = "Public Domain"
__version__ = "1.0.0"
__maintainer__ = "Ehsan Tavan"
__email__ = "tavan.ehsan@gmail.com"
__status__ = "Production"
__date__ = "01/04/2021"
class RCNN(nn.Module):
"""
In this class we implement RCNN model
"""
def __init__(self, **kwargs):
super().__init__()
self.use_emotion = kwargs["use_emotion"]
self.embeddings = nn.Embedding(num_embeddings=kwargs["vocab_size"],
embedding_dim=kwargs["embedding_dim"],
padding_idx=kwargs["pad_idx"])
if self.use_emotion:
self.emotion_embeddings = nn.Embedding(num_embeddings=kwargs["vocab_size"],
embedding_dim=kwargs["emotion_embedding_dim"],
padding_idx=kwargs["pad_idx"])
self.lstm = nn.LSTM(input_size=kwargs["embedding_dim"] +
kwargs["emotion_embedding_dim"] if self.use_emotion
else kwargs["embedding_dim"],
hidden_size=kwargs["lstm_hid_dim"],
num_layers=kwargs["lstm_layers"],
dropout=kwargs["dropout"] if kwargs["lstm_layers"] > 1
else 0,
bidirectional=True)
self.dropout = nn.Dropout(kwargs["dropout"])
self.linear = nn.Linear(
in_features=kwargs["embedding_dim"] +
kwargs["emotion_embedding_dim"] +
2 * kwargs["lstm_hid_dim"] if self.use_emotion
else kwargs["embedding_dim"] + 2 * kwargs["lstm_hid_dim"],
out_features=kwargs["linear_units"]
)
self.tanh = nn.Tanh()
self.output = nn.Linear(in_features=kwargs["linear_units"],
out_features=kwargs["output_size"])
def forward(self, input_batch):
# input_batch.size() = [batch_size, sent_len]
embedded = self.embeddings(input_batch)
# embedded.size() = [batch_size, sent_len, embedding_dim]
if self.use_emotion:
emotion_embedded = self.emotion_embeddings(input_batch)
embedded = torch.cat((embedded, emotion_embedded), dim=2)
embedded = embedded.permute(1, 0, 2)
# embedded.size() = [sent_len, batch_size, embedding_dim]
lstm_output, (hidden, cell) = self.lstm(embedded)
# output_1.size() = [sent_len, batch_size, hid_dim * num_directions]
# hidden.size() = [num_layers * num_directions, batch_size, hid_dim]
# cell.size() = [num_layers * num_directions, batch_size, hid_dim]
input_features = torch.cat([lstm_output, embedded], 2).permute(1, 0, 2)
# final_features.size() = [batch_size, sent_len, embedding_dim+2*hid_dim]
linear_output = self.tanh(self.linear(input_features))
# linear_output.size() = [batch_size, sent_len, linear_units]
linear_output = linear_output.permute(0, 2, 1) # Reshaping fot max_pool
# linear_output.size() = [batch_size, linear_units, sent_len]
max_out_features = F.max_pool1d(linear_output, linear_output.shape[2]).squeeze(2)
max_out_features = self.dropout(max_out_features)
# max_out_features.size() = [batch_size, linear_units]
return self.output(max_out_features)
|
from collections import namedtuple
from contextlib import contextmanager, ExitStack
from functools import partial
from typing import ContextManager, Callable, Any, TypeVar, overload
from ._async_value import AsyncValue
def _IDENTITY(x):
return x
T_OUT = TypeVar('T_OUT')
@overload
def compose_values(**value_map: AsyncValue) -> ContextManager[AsyncValue]: ...
@overload
def compose_values(*, _transform_: Callable[[Any], T_OUT],
**value_map: AsyncValue) -> ContextManager[AsyncValue[T_OUT]]: ...
def compose_values(*, _transform_=None, **value_map):
"""Context manager providing a composite of multiple AsyncValues
The composite object itself is an AsyncValue, with the `value` of each
underlying object accessible as attributes on the composite `value`.
`compose_values()` expects named AsyncValue instances to be provided as
keyword arguments. The attributes of the composite value will correspond
to the given names.
It's mostly an implementation detail, but the composite value type is a
namedtuple. Users should not write to the composite `value` attribute
since it is exclusively managed by the context.
Synopsis:
>>> async_x, async_y = AsyncValue(-1), AsyncValue(10)
>>>
>>> with compose_values(x=async_x, y=async_y) as async_xy:
>>> result = await async_xy.wait_value(lambda val: val.x < 0 < val.y))
>>>
>>> result
CompositeValue(x=-1, y=10)
The `_transform_` parameter specifies an optional function to transform the
final value. This is equivalent but more efficient than chaining a single
open_transform() to the default compose_values() output. For example:
>>> with compose_values(x=async_x, y=async_y,
>>> _transform_=lambda val: val.x * val.y) as x_mul_y:
>>> ...
is equivalent to:
>>> with compose_values(x=async_x, y=async_y) as async_xy, \\
>>> async_xy.open_transform(lambda val: val.x * val.y) as x_mul_y:
>>> ...
Performance note: predicates on the output AsyncValue will be evaluated
on every assignment to the `value` properties of the input AsyncValues.
So if two inputs are being composed, each updated 10 times per second,
the output predicates will be evaluated 20 times per second.
"""
# type hint workaround for https://youtrack.jetbrains.com/issue/PY-36444
return _compose_values(_transform_, value_map)
@contextmanager
def _compose_values(_transform_, value_map):
transform = _transform_ or _IDENTITY
async_vals = value_map.values()
if not (async_vals and all(isinstance(av, AsyncValue) for av in async_vals)):
raise TypeError('expected instances of AsyncValue')
value_type = namedtuple('CompositeValue', value_map.keys())
composite_value = value_type._make(av.value for av in async_vals)
composite = AsyncValue(transform(composite_value))
# This dummy wait_value() predicate hooks into each value and updates
# the composite as a side effect.
def _update_composite(name, val):
nonlocal composite_value
composite_value = composite_value._replace(**{name: val})
composite.value = transform(composite_value)
return False
with ExitStack() as stack:
for name_, async_val in value_map.items():
# NOTE: by using AsyncValue internals we avoid running wait_value()
# as a child task for each input.
stack.enter_context(
async_val._level_results.open_ref(partial(_update_composite, name_)))
yield composite
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import APITestCase
class OrganizationMemberListTest(APITestCase):
def setUp(self):
self.user_1 = self.create_user('foo@localhost', username='foo')
self.user_2 = self.create_user('bar@localhost', username='bar')
self.create_user('baz@localhost', username='baz')
self.org = self.create_organization(owner=self.user_1)
self.org.member_set.create(user=self.user_2)
self.login_as(user=self.user_1)
self.url = reverse(
'sentry-api-0-organization-member-index', kwargs={
'organization_slug': self.org.slug,
}
)
def test_simple(self):
response = self.client.get(self.url)
assert response.status_code == 200
assert len(response.data) == 2
assert response.data[0]['email'] == self.user_2.email
assert response.data[1]['email'] == self.user_1.email
def test_email_query(self):
response = self.client.get(self.url + "?query=email:foo@localhost")
assert response.status_code == 200
assert len(response.data) == 1
assert response.data[0]['email'] == self.user_1.email
def test_user_email_email_query(self):
self.create_useremail(self.user_1, 'baz@localhost')
response = self.client.get(self.url + "?query=email:baz@localhost")
assert response.status_code == 200
assert len(response.data) == 1
assert response.data[0]['email'] == self.user_1.email
|
import sys, os.path
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
import os.path
from collections import defaultdict, namedtuple
from mpl_toolkits.basemap import Basemap
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import matplotlib.pyplot as plt
import py1090
from example_helpers import map_bounds, blacklist_hexidents, example_data_file
def basemap_plot_paths(filename):
m = Basemap(projection='merc', resolution='i', **map_bounds['europe'])
fig = plt.figure()
ax = Axes3D(fig)
ax.add_collection3d(m.drawcoastlines(linewidth=0.25))
ax.add_collection3d(m.drawcountries(linewidth=0.35))
collection = py1090.FlightCollection()
#with py1090.Connection() as connection:
with open(filename, 'r') as connection:
collection.add_list(connection)
for flight in collection:
if flight.hexident in blacklist_hexidents:
continue
path = list(flight.path)
if len(path) > 1:
lats, lons, alts = np.array(path).T
x, y = m(lons, lats)
m.plot(x,y, alts,".-")
plt.title("Flights in file '{:s}'".format(filename))
plt.show()
if __name__ == "__main__":
filename = example_data_file()
basemap_plot_paths(filename)
|
import pytest
from sqload import load
@pytest.fixture
def q():
return load('tests/queries.sql')
def test_load_all_queries(q):
assert len(q.keys()) == 3
def test_get_query_by_key(q):
assert q['find-all-by-name'] == 'SELECT * FROM users WHERE name = :name'
def test_clean_multiple_lines(q):
sql = 'SELECT * FROM users u INNER JOIN products p ON u.id = p.user_id WHERE u.id = 1 LIMIT 1'
assert q['find-users-join-products'] == sql
|
import sys
from pyteal import *
def contract_account(app_id):
asset_close_to_check = Txn.asset_close_to() == Global.zero_address()
rekey_check = Txn.rekey_to() == Global.zero_address()
linked_with_app_call = And(
Gtxn[0].type_enum() == TxnType.ApplicationCall,
Gtxn[0].application_id() == Int(app_id)
)
fee_check = Txn.fee() <= Int(1000)
# create asa from escrow
on_create_asa = Txn.type_enum() == TxnType.AssetConfig
# fund 1 asa that has been created by escrow
on_fund_asa = Seq([
Assert(Txn.type_enum() == TxnType.AssetTransfer),
Assert(Txn.asset_sender() == Global.zero_address()),
Assert(asset_close_to_check),
Assert(Txn.asset_amount() == Int(1)),
Int(1)
])
return Seq([
Assert(Txn.group_index() == Int(1)),
Assert(linked_with_app_call),
Assert(rekey_check),
Assert(fee_check),
Cond(
[Gtxn[0].application_args[0] == Bytes("create_asa"), on_create_asa],
[Gtxn[0].application_args[0] == Bytes("fund_asa"), on_fund_asa]
)
])
if __name__ == "__main__":
arg = int(sys.argv[1])
print(compileTeal(contract_account(arg), Mode.Signature, version=3))
|
import csv
import s2sphere # https://s2sphere.readthedocs.io/en/latest/index.html
import math
import datetime
import numpy
import matplotlib.pyplot as plot
import time
import pyproj as proj
print('NodeExtractor started...')
# general parameters
# minimum frequency-peak to number-of-transmissions rate used in Timo's (sine) method
MINIMUM_PEAK_TO_TRANSMISSIONS_RATIO = 0.5
# following value has to be at least 2 for fft to work properly
MINIMUM_NO_OF_PACKETS_SENT = 15
# criterion for filtering out short-living nodes
# This value must be greater than the upper bound periodicity used for Mostafa's (direct) method
MINIMUM_LIFESPAN = 86400 # 24h in seconds
# periodicity boundaries used for the frequency cutoff in Mostafa's method
UPPER_BOUND_PERIODICITY = 7200 # 2h in s
LOWER_BOUND_PERIODICITY = 1209600 # 2 weeks in s
# minimum percentage of intervals which have to be successfully checked back in Mostafa's method
MINIMUM_INTERVAL_PERCENTAGE = 0.99
EARTH_RADIUS = 6371000 # in m
# setting up projections (according to Coordinate Reference Systems WGS84 (lat.-lon.) and CH1903 (Switzerland))
proj_WGS84 = proj.Proj(init='epsg:4326')
proj_CH1903 = proj.Proj(init='epsg:21781') # http://epsg.io/21781
# setting the filename
filename = "input.csv"
# defining the center of the city of Zurich and the radius of the cap to be drawn around it
ZurichLon, ZurichLat = 8.54226, 47.37174
centerOfZurich = s2sphere.LatLng.from_degrees(ZurichLat, ZurichLon)
radius = 10000
# converting the radius into the Angle format
angleRadius = s2sphere.Angle.from_degrees(360 * radius / (2 * math.pi * EARTH_RADIUS))
# defining the cap around Zurich
region = s2sphere.Cap.from_axis_angle(centerOfZurich.to_point(), angleRadius)
# converting Zurich's WGS84-coordinates (EPSG 4326) to CH1903 (EPSG 21781)
ZurichX, ZurichY = proj.transform(proj_WGS84, proj_CH1903, ZurichLon, ZurichLat)
# calculating the offsets used for normalization of the cartesian coordinate system
offsetX, offsetY = ZurichX - radius, ZurichY - radius
class PacketTransmission:
def __init__(self, trans_id, time_stamp, lat, lon):
self.trans_id = trans_id
self.time_stamp = time_stamp
self.lat = lat
self.lon = lon
# initializing the dictionary, which will hold all Transmission-objects per key (= nodeaddr)
nodeDict = {}
# starting the timer
time_start = time.clock()
# parsing the .csv-file
with open(filename, 'r', encoding='unicode_escape') as csv_file:
csv_reader = csv.reader(csv_file)
# skipping the first line (fieldnames)
next(csv_file)
for line in csv_reader:
# building a temporary point at the lat./lon.-position of the looked-at packet transmission
tempPoint = s2sphere.LatLng.from_degrees(float(line[10]), float(line[11])).to_point()
# checking, if the point is contained in the defined shape
if region.contains(tempPoint):
# if for a given nodeaddr no key in nodeDict exists yet, initialize an empty list at this key (line[2])
if not (line[2] in nodeDict):
nodeDict[line[2]] = []
timeStamp = datetime.datetime.strptime(line[1], '%Y-%m-%d %H:%M:%S').timestamp()
nodeDict.get(line[2]).append(PacketTransmission(line[0], timeStamp, line[10], line[11]))
keptNodesLifespanCheck = {}
shortLivingNodes = {}
keptNodesMethodTimo = {}
remainderMethodTimo = {}
keptNodesMethodMostafa = {}
remainderMethodMostafa = {}
keptNodesMethodMachineLearning = {}
remainderMethodMachineLearning = {}
# filtering out short-living nodes, resp. nodes with too little transmissions
for node in nodeDict:
# calculating the time-difference between first and last transmission in seconds
timeSpan = nodeDict[node].__getitem__(len(nodeDict[node]) - 1).time_stamp - nodeDict[node].__getitem__(0).time_stamp
packetLength = len(nodeDict[node])
if packetLength < MINIMUM_NO_OF_PACKETS_SENT:
shortLivingNodes[node] = nodeDict[node]
print('Failing lifespan check: ' + node + ' (reason: too little transmissions: '
+ str(packetLength) + ' packets transmitted)')
elif timeSpan < MINIMUM_LIFESPAN:
shortLivingNodes[node] = nodeDict[node]
print('Failing lifespan check: ' + node + ' (reason: lifespan between 1st and last transmission too short: '
+ str(timeSpan) + ' s)')
else:
keptNodesLifespanCheck[node] = nodeDict[node]
# building the statistical periodicity table & function
periodicityDistribution = []
for i in range(0, 334):
periodicityDistribution.append(0)
def register_periodicity(p):
if UPPER_BOUND_PERIODICITY <= p < LOWER_BOUND_PERIODICITY:
index = (p-7200) // 3600
periodicityDistribution[int(index)] += 1
# filtering after Timo's method (Sine method): sine period between transmissions, determine strong single frequencies)
for node in keptNodesLifespanCheck:
# building the sine list
sinePeriodicity = []
for i in range(len(keptNodesLifespanCheck[node]) - 1):
# determining the next interval (time in seconds between two transmissions)
j = i + 1
interval = int(
keptNodesLifespanCheck[node].__getitem__(j).__getattribute__('time_stamp')
- keptNodesLifespanCheck[node].__getitem__(i).__getattribute__("time_stamp"))
# adding 0 for sin(0) per default
sinePeriodicity.append(0)
# appending the y-values for one cycle of a sine wave which spans the current interval
for q in range(interval - 1):
sinePeriodicity.append(numpy.sin(2 * numpy.pi * (q + 1) / (interval + 1)))
# computing fft for sinePeriodicity
fftSinTable = numpy.abs(numpy.fft.rfft(sinePeriodicity))
# adding passing nodes to the remainderMethodTimo list, for which FFT does not show a clear peak
# (i.e., their peak/transmissions-ratio is too low)
ratio = max(fftSinTable) / (len(fftSinTable))
if ratio < MINIMUM_PEAK_TO_TRANSMISSIONS_RATIO:
remainderMethodTimo[node] = keptNodesLifespanCheck[node]
print('Failing sine method: ' + node + ' (reason: peak/transmissions-ratio too low: ' + str(ratio) + ')')
else:
keptNodesMethodTimo[node] = keptNodesLifespanCheck[node]
# printing the peak periodicity (by converting the found peak frequency)
singularPeriodicityPeak = len(sinePeriodicity) / numpy.argmax(fftSinTable)
print('Node ' + node + ' is most regularly transmitting all '
+ str(singularPeriodicityPeak) + ' seconds.')
register_periodicity(singularPeriodicityPeak)
# filtering after Mostafa's method (Direct method)
for node in remainderMethodTimo:
timeSpan = remainderMethodTimo[node].__getitem__(len(remainderMethodTimo[node]) - 1).time_stamp \
- remainderMethodTimo[node].__getitem__(0).time_stamp
# initializing the counters
secondCount = 0
packetCount = 0
startSecond = remainderMethodTimo[node].__getitem__(0).time_stamp
# building the periodicityTable
# initializing the periodicity table (list)
periodicityTable = []
while packetCount < len(remainderMethodTimo[node]) and secondCount <= timeSpan:
if startSecond + secondCount == remainderMethodTimo[node].__getitem__(packetCount).time_stamp:
# appending 1 to the periodicityTable to signalize a transmission at the current second
periodicityTable.append(1)
# skipping packetTransmissions in the same second
while packetCount < len(remainderMethodTimo[node]) and \
startSecond + secondCount == remainderMethodTimo[node].__getitem__(packetCount).time_stamp:
packetCount = packetCount + 1
else:
# appending 0 to the periodicityTable if no transmission happened at current second
periodicityTable.append(0)
secondCount = secondCount + 1
# computing FFT for periodicityTable
fftPeriodicityTable = numpy.abs(numpy.fft.rfft(periodicityTable))
# converting the provided periodicity-cutoffs to the looked-at node's time domain
if timeSpan < UPPER_BOUND_PERIODICITY:
raise ValueError('Node\'s lifespan must strictly be greater than the lower bound periodicity!')
else:
upperBoundFrequency = int(round(timeSpan / UPPER_BOUND_PERIODICITY))
if upperBoundFrequency > len(fftPeriodicityTable):
upperBoundFrequency = len(fftPeriodicityTable-1)
if timeSpan < LOWER_BOUND_PERIODICITY:
lowerBoundFrequency = 1
else:
lowerBoundFrequency = int(round(timeSpan / LOWER_BOUND_PERIODICITY))
# determining the peak frequency using the frequency-cutoff
peakFrequencyY = 0
peakFrequencyX = 0
for i in range(lowerBoundFrequency, upperBoundFrequency):
if fftPeriodicityTable[i] > peakFrequencyY:
peakFrequencyY = fftPeriodicityTable[i]
peakFrequencyX = i
# converting the found peakFrequency to periodicity
peakPeriodicity = int(round(timeSpan / peakFrequencyX))
# checking back, if found peakPeriodicity appears frequently in periodicityTable
intervalSecond = 0
transmissionCounter = 0
intervalCountList = []
for j in range(len(periodicityTable)):
if periodicityTable[j] == 1:
transmissionCounter = transmissionCounter + 1
# determining, if already a whole interval (according to peakPeriodicity) has been checked
if intervalSecond == peakPeriodicity - 1:
# only consider the interval, if at least one transmission appeared within it
if transmissionCounter > 0:
intervalCountList.append(transmissionCounter)
# resetting both the (inter-) intervalSecond as well as the transmissionCounter
intervalSecond = 0
transmissionCounter = 0
intervalSecond = intervalSecond + 1
# keep the node, if at least the specified percentage of intervals were checked back positively for transmissions
if len(intervalCountList) > MINIMUM_INTERVAL_PERCENTAGE * peakFrequencyX:
print('Node ' + node + ' has been verified to be transmitting regularly all '
+ str(peakPeriodicity) + ' seconds.')
keptNodesMethodMostafa[node] = remainderMethodTimo[node]
register_periodicity(peakPeriodicity)
else:
print('Failing direct method: ' + node + ' (reason: intervals/peakFrequency-ratio too low.')
remainderMethodMostafa[node] = remainderMethodTimo[node]
# printing the number of found end devices in the area
print("\n# of found suitable end devices in the defined area: " + str(len(keptNodesMethodTimo)
+ len(keptNodesMethodMostafa) + len(keptNodesMethodMachineLearning)))
print("Coordinates of determined nodes (in terms of CH1903 Coordinate Reference System):")
# iterating over keptNodesMethodTimo, converting coordinates to epsg:21781-projection
print('\nConsiderable nodes sending most frequently at one peak periodicity (Sine method):')
for node in keptNodesMethodTimo:
lon = keptNodesMethodTimo[node].__getitem__(0).__getattribute__('lon')
lat = keptNodesMethodTimo[node].__getitem__(0).__getattribute__('lat')
x, y = proj.transform(proj_WGS84, proj_CH1903, lon, lat)
x, y = x - offsetX, y - offsetY
print('node \"' + node + ' X: ' + str(x) + ', Y: ' + str(y) + ". No. of packets: " +
str(len(keptNodesMethodTimo[node])))
print('\nConsiderable nodes sending frequently at several periodicities: (Direct method): ')
for node in keptNodesMethodMostafa:
lon = keptNodesMethodMostafa[node].__getitem__(0).__getattribute__('lon')
lat = keptNodesMethodMostafa[node].__getitem__(0).__getattribute__('lat')
x, y = proj.transform(proj_WGS84, proj_CH1903, lon, lat)
x, y = x - offsetX, y - offsetY
print('node \"' + node + '\": X: ' + str(x) + ', Y: ' + str(y) + ". No. of packets: " +
str(len(keptNodesMethodMostafa[node])))
# plotting the periodicity distribution
plot.plot(periodicityDistribution)
plot.title("periodicityDistribution")
plot.xlabel("periodicities (2 h to 2 weeks), one hour in between two succeeding indices")
plot.ylabel("number of end devices per periodicity-hour")
plot.show()
# stopping the timer:
time_stop = time.clock()
# printing the execution time
print("\n\nexecution time: " + str(time_stop - time_start))
|
# -*- coding: utf-8 -*-
import os
from django.test import TestCase
from accounts.factories import CustomUserFactory
from projects.factories import ProjectFactory, ProjectVolumeFactory, ProjectReleaseFactory, ProjectBuildFactory
from projects.models import ProjectVolume
class MarathonAppMixinTest(TestCase):
def test_get_volumes(self):
user = CustomUserFactory()
project = ProjectFactory(user=user)
build = ProjectBuildFactory(project=project)
release = ProjectReleaseFactory(build=build)
self.assertEqual(release.get_volumes(), [])
self.assertEqual(release.container_paths, [])
host_path = "/app/host_path"
container_path = "/app/container_path"
volume = ProjectVolumeFactory(project=project, host_path="")
self.assertEqual(ProjectVolume.objects.count(), 1)
self.assertIn(volume, release.container_paths)
hostpath = os.path.join("/mnt/container-volumes/",
release.get_marathon_app_id(),
volume.container_path.strip('/'))
self.assertEqual(hostpath, release.get_volumes()[0].host_path)
self.assertEqual(ProjectVolume.MODE.RW, release.get_volumes()[0].mode)
volume.delete()
volume = ProjectVolumeFactory(project=project, host_path=host_path, container_path=container_path,
mode=ProjectVolume.MODE.RO)
self.assertEqual(ProjectVolume.objects.count(), 1)
self.assertIn(volume, release.container_paths)
self.assertIn(volume.host_path, release.get_volumes()[0].host_path)
self.assertEqual(ProjectVolume.MODE.RO, release.get_volumes()[0].mode)
|
from django.contrib import admin
# Register your models here.
from .models import *
admin.site.register(Question)
admin.site.register(Choice)
admin.site.register(Address)
admin.site.register(PersonalInfo)
admin.site.register(WorkInfo)
admin.site.register(Insignia)
admin.site.register(Education)
admin.site.register(NewsInHome2)
|
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from .models import RunResultsModel, ManifestModel, SourcesModel, CatalogModel
class ArtifactReader:
@property
def run_results_artifact(self) -> "RunResultsModel":
"""A reference to the :class:`RunResults` artifact."""
return self.get_artifact("run_results")
@property
def manifest_artifact(self) -> "ManifestModel":
"""A reference to the :class:`Manifest` artifact."""
return self.get_artifact("manifest")
@property
def catalog_artifact(self) -> "CatalogModel":
"""A reference to the :class:`Catalog` artifact."""
return self.get_artifact("catalog")
@property
def sources_artifact(self) -> "SourcesModel":
"""A reference to the :class:`Sources` artifact."""
return self.get_artifact("sources")
def get_artifact(self, artifact_name):
import artefacts.deserializers
if artefacts.state.exists(artifact_name):
return artefacts.state.get(artifact_name)
else:
Artifact = {
"manifest": artefacts.deserializers.Manifest,
"run_results": artefacts.deserializers.RunResults,
"sources": artefacts.deserializers.Sources,
"catalog": artefacts.deserializers.Catalog,
}.get(artifact_name)
if Artifact is None:
raise AttributeError(f"Invalid artifact name: {artifact_name}")
return Artifact()
class ArtifactNodeReader(ArtifactReader):
@property
def manifest(self):
"""A reference to details about the node contained in the manifest."""
return self.manifest_artifact.resources.get(self.unique_id)
@property
def catalog(self):
"""A reference to details about the node contained in the catalog."""
return self.catalog_artifact.nodes.get(self.unique_id)
@property
def run_results(self):
"""A reference to results from running the node, if it exists."""
return [
r
for r in self.run_results_artifact.results
if r.unique_id == self.unique_id
]
@property
def freshness_check_results(self):
"""A reference to any freshness check result of the node, if it exists."""
return [
r for r in self.sources_artifact.results if r.unique_id == self.unique_id
]
@property
def parents(self):
"""A list of the node's parents"""
return self.manifest_artifact.parent_map[self.unique_id]
@property
def children(self):
"""A list of the node's children"""
return self.manifest_artifact.child_map[self.unique_id]
@property
def tests(self):
"""A list of any tests that reference the node"""
return [t for t in self.children if t.resource_type == "test"]
@property
def snapshots(self):
"""A list of any snapshots that reference the node"""
return [s for s in self.children if s.resource_type == "snapshot"]
@property
def disabled(self):
"""Whether the resource has been disabled"""
return self.unique_id in self.manifest_artifact.disabled
|
#/usr/bin/python3
#-*- coding: utf-8 -*-
import serial
try:
from geopy.geocoders import Nominatim
geo=True
except:
geo=False
class GpsNeo6():
"""
class de gestion du soc NEO 6M
"""
def __init__(self,port,debit=9600,diff=1):
"""
on initialise les variables a partir de:
port: port com
debit: vitesse en bauds
diff: differece heure local et utc
"""
self.port=serial.Serial(port,debit)
self.diff=diff
self.tabCode=["GPVTG","GPGGA","GPGSA","GPGLL","GPRMC","GPGSV"]
self.vitesse=""
self.latitude=""
self.longitude=""
self.latitudeDeg=""
self.longitudeDeg=""
self.time=""
self.altitude=""
self.precision=""
self.satellite=""
self.geoloc=Nominatim()
def __del__(self):
"""
on ferme le port a la destruction de l'objet
"""
self.port.close()
def __repr__(self):
"""
on affiche les info
"""
rep="heure: "+str(self.time)+"\rlatitude: "+str(self.latitude) \
+"\rlongitude: "+str(self.longitude)+"\rvitesse: "+str(self.vitesse)+" km/h" \
+"\raltitude: "+str(self.altitude)+" metre(s)"+"\rprecision: "+str(self.precision)+" metre(s)" \
+"\rNombre de satelites vue: "+str(self.satellite)
if geo:
rep+="\rlieu : "+self.geolocation()
return rep
def recupData(self):
"""
on recupere les datas sur le port serie
"""
l='->'
ligne=""
tab={}
gp=[]
while len(tab)<6:
l=self.port.read(2)
if b'\r' in l or b'\n' in l:
l=''
for i in self.tabCode:
if i in ligne:
if i=="GPGSV":
gp.append(ligne)
tab["GPGSV"]=gp
else:
tab[i]=ligne
gp=[]
ligne=""
else:
try:
ligne+=str(l.decode().strip())
except: pass
return tab
def degToDec(self,deg):
"""
fonction de tronsformation des coordonees en degre vers les degre decimals
"""
dec=int(deg[0:deg.find(".")-2])
min=int(deg[deg.find(".")-2:deg.find(".")])/60
sec=float("0."+deg[deg.find(".")+1:])/36
return round(dec+min+sec,10)
def traite(self):
"""
on traite les donnes pour les mettres en formes
"""
donnees=self.recupData()
data=donnees["GPGGA"]
data=data.split(',')
temps=str(int(data[1][0:2])+self.diff)+"h"+data[1][2:4]+"m"+data[1][4:6]+"s" #mets en forme la date avec le decalage de l'heure
self.time=temps
self.latitude=self.degToDec(data[2]) #mets au format decimale xx.yyyyyy
self.latitudeDeg=float(data[2])/100#+data[3]
self.longitude=self.degToDec(data[4]) #mets au format decimale xx.yyyyyy
self.longitudeDeg=float(data[4])/100#+data[5]
self.altitude=data[9]
self.precision=data[6]
self.vitesse=self.traiteGPVTG(donnees["GPVTG"]) #recupere que la vitesse de deplacement
self.satellite=int(donnees["GPGSV"][0].split(',')[3]) #recupere le nombre de satellite vue
def traiteGPVTG(self,data):
"""
on traite les donnees pour la vitesse
"""
data=data.split(',')
return data[7]
def geolocation(self):
"""
si on peut on geolocalise les coordonnees
"""
if geo:
try:
location = self.geoloc.reverse(str(self.latitude)+", "+str(self.longitude))
return str(location)
except: return "Le Néant"
else: return "le Néant"
if __name__=="__main__":
#on definit le port la vitesse et la diferrence d'heure utc et locale
gps=GpsNeo6(port="/dev/ttyUSB0",debit=9600,diff=6)
while True:
#on appel un traitement gps
gps.traite()
#on affiche les infos
print(gps)
## print(gps.latitude, gps.longitude)
|
'''crie um programa que leia dois valores e mostre um menu:
[1] - somar, [2] - multiplicar, [3] - maior, [4] - novos números, [5] - sair'''
print('{:-^40}'.format('CALCULADORA'))
flag = 5
cont = maior = menor = 0
while flag == 5:
num_1 = int(input('Digite o primeiro número: '))
num_2 = int(input('Digite o segundo número: '))
op = int(input(''' -- Menu --
[1] - Somar
[2] - Multiplicar
[3] - Maior
[4] - Novos Números
[5] - Sair do Programa
Digite sua opção: '''))
print('---------------')
if op == 1:
print(f'SOMAR')
print(f'{num_1} + {num_2} = {num_1 + num_2}')
elif op == 2:
print(f'MULTIPLICAR')
print(f'{num_1} x {num_2} = {num_1 * num_2}')
elif op == 3:
print(f'MAIOR')
if cont == 0:
maior = num_1
menor = num_1
if num_2 > maior:
maior = num_2
if num_2 < menor:
menor = num_2
print(f'O maior valor é {maior} e o menor valor é {menor} ')
elif op == 4:
print(f'NOVOS NÚMEROS')
flag = 5
elif op == 5:
print(f'SAINDO')
flag == 0
else:
print(f'OPERAÇÃO INVALIDA, TENTE NOVAMENTE! ')
print('{:-^40}'.format('FIM'))
|
import logging
import copy
from threading import Thread
from Queue import Queue
class Migrator(object):
def __init__(self, source_registry, artifactory_access, work_queue, workers, overwrite, dir_path):
self.log = logging.getLogger(__name__)
self.source = source_registry
self.target = artifactory_access
self.work_queue = work_queue
self.failure_queue = Queue()
self.skipped_queue = Queue()
self.overwrite = overwrite
self.workers = workers
self.dir_path = dir_path
'''
Iterates over the Queue until all images have been uploaded (or have failed to upload)
'''
def migrate(self):
for i in range(self.workers):
t = Thread(target=self.__worker, args=(i,))
t.daemon = True
t.start()
self.work_queue.join()
'''
Consumes image/tags that need to be uploaded from Queue until Queue is empty
Builds shared list of failed entries
@param idx - The index (or ID) of this worker. Should be unique across all concurrent workers.
'''
def __worker(self, idx):
# The endpoint resources are not thread safe, make deep copies
source = copy.deepcopy(self.source)
target = copy.deepcopy(self.target)
while True:
image, tag = self.work_queue.get()
failure = True
try:
if self.overwrite or not target.image_exists(image, tag):
failure = not self.__upload_image(source, target, image, tag, idx)
else: # Image already exists and we should not overwrite it
failure = False
self.skipped_queue.put((image, tag))
except Exception as ex:
self.log.error("Upload of %s/%s failed." % (image, tag))
if failure:
self.failure_queue.put((image, tag))
self.work_queue.task_done()
'''
Attempts to upload the specified image from the source to the target
@source - The source registry
@target - The target Artifactory instance
@image - The image name
@tag - The tag name
'''
def __upload_image(self, source, target, image, tag, idx):
self.log.info("Uploading image %s/%s..." % (image, tag))
layer_file = "%s/layer%d.out" % (self.dir_path, idx)
manifest_file = "%s/manifest%d.json" % (self.dir_path, idx)
# Get the manifest
if source.download_manifest(image, tag, manifest_file):
# Read in all the layers and try to deploy them
type, layers = source.interpret_manifest(manifest_file)
for layer in layers:
sha2 = layer.replace('sha256:', '')
# Try to perform a sha2 checksum deploy to avoid downloading the layer from source
if not target.checksum_deploy_sha2(image, tag, sha2):
# Sha2 checksum failed, download the file
sha1 = source.download_layer(image, layer, layer_file)
if sha1:
# Try a sha1 checksum deploy to avoid upload to target
if not target.checksum_deploy_sha1(image, tag, sha2, sha1):
# All checksum deploys failed, perform an actual upload
if not target.upload_layer(image, tag, sha2, layer_file):
self.log.error("Unable to upload layer %s for %s/%s" % (layer, image, tag))
return False
else:
self.log.error("Unable to get layer %s for %s/%s..." % (layer, image, tag))
return False
# Finished uploading all layers, upload the manifest
if not target.upload_manifest(image, tag, type, manifest_file):
self.log.error("Unable to deploy manifest for %s/%s..." % (image, tag))
return False
return True
else:
self.log.error("Unable to get manifest for %s/%s..." % (image, tag))
return False
def get_failure_queue(self):
return self.failure_queue
def get_skipped_queue(self):
return self.skipped_queue
|
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from dl_classification_model import config,predict
from dl_classification_model import __version__ as _version
def test_single_make_prediction():
""" Test make_prediction function for a single prediction """
# Given
dataset_file_path = config.DATASET_DIR/config.TESTING_DATA_FILE
test_data = pd.read_csv(dataset_file_path)
single_row = test_data.iloc[:1,:]
# the make_prediction function is expecting a dict
single_row_dict = dict(single_row)
# When
subject = predict.make_prediction(single_row_dict)
assert subject.get("predictions")[0] in ["functional","functional","non functional or functional needs repair"]
assert type(subject.get("predictions")) == np.ndarray
assert subject.get("predictions").shape == (1,1)
assert subject.get("version") == _version
def test_multiple_make_prediction():
""" Test make_prediction function for multiple prediction """
# Given
dataset_file_path = config.DATASET_DIR/config.TESTING_DATA_FILE
test_data = pd.read_csv(dataset_file_path)
multiple_row = test_data
# the make_prediction function is expecting a dict
multiple_row_dict = dict(multiple_row)
# When
subject = predict.make_prediction(multiple_row_dict)
assert subject.get("predictions")[0] in ["functional","functional","non functional or functional needs repair"]
assert type(subject.get("predictions")) == np.ndarray
assert subject.get("predictions").shape == (test_data.shape[0],1)
assert subject.get("version") == _version
|
# coding=utf-8
from OTLMOW.OTLModel.BaseClasses.OTLAttribuut import OTLAttribuut
from abc import abstractmethod
from OTLMOW.OTLModel.Classes.Detectie import Detectie
from OTLMOW.OTLModel.Datatypes.DtcAfmetingBxlInM import DtcAfmetingBxlInM
from OTLMOW.OTLModel.Datatypes.DtcTijdsduur import DtcTijdsduur
# Generated with OTLClassCreator. To modify: extend, do not edit
class Detectielus(Detectie):
"""Abstracte voor een detectielus. Een detectielus is een kabel onder het wegdek die in staat is om voertuigen te detecteren teneinde de verkeersregelaar aan te sturen. Selectieve lussen zijn in staat om gecodeerde informatie door te geven van prioritaire voertuigen, niet-selectieve lussen geven informatie door van alle voertuigen die het detectie gebied passeren."""
typeURI = 'https://wegenenverkeer.data.vlaanderen.be/ns/abstracten#Detectielus'
"""De URI van het object volgens https://www.w3.org/2001/XMLSchema#anyURI."""
@abstractmethod
def __init__(self):
super().__init__()
self._afmetingenBL = OTLAttribuut(field=DtcAfmetingBxlInM,
naam='afmetingenBL',
label='afmetingen b l',
objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/abstracten#Detectielus.afmetingenBL',
definition='Afmetingen breedte x lengte van de lus.',
owner=self)
self._bewakingstijd = OTLAttribuut(field=DtcTijdsduur,
naam='bewakingstijd',
label='bewakingstijd',
objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/abstracten#Detectielus.bewakingstijd',
definition='Wachttijd (in uren) waarna een alarm pas mag optreden.',
owner=self)
@property
def afmetingenBL(self):
"""Afmetingen breedte x lengte van de lus."""
return self._afmetingenBL.get_waarde()
@afmetingenBL.setter
def afmetingenBL(self, value):
self._afmetingenBL.set_waarde(value, owner=self)
@property
def bewakingstijd(self):
"""Wachttijd (in uren) waarna een alarm pas mag optreden."""
return self._bewakingstijd.get_waarde()
@bewakingstijd.setter
def bewakingstijd(self, value):
self._bewakingstijd.set_waarde(value, owner=self)
|
"""
Copyright (C) 2018-2020 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from extensions.middle.InsertLayoutPropagationTransposes import mark_as_correct_data_layout, \
mark_input_as_in_correct_layout, mark_output_as_in_correct_layout
from extensions.ops.transpose import Transpose
from mo.front.common.partial_infer.utils import int64_array
from mo.graph.graph import Graph
from mo.middle.replacement import MiddleReplacementPattern
from mo.ops.const import Const
from mo.ops.reshape import Reshape
class SpaceToDepth(MiddleReplacementPattern):
"""
Replaces SpaceToDepth with 6D_Reshape->Transpose->4D_Reshape sequence
"""
enabled = True
graph_condition = [lambda graph: not graph.graph['cmd_params'].generate_experimental_IR_V10]
def run_after(self):
from extensions.middle.pass_separator import MiddleStart
return [MiddleStart]
def run_before(self):
from extensions.middle.pass_separator import MiddleFinish
return [MiddleFinish]
def pattern(self):
return dict(
nodes=[
('in_data', dict(kind='data')),
('op', dict(op='SpaceToDepth', data_format='NHWC')),
('out_data', dict(kind='data'))
],
edges=[
('in_data', 'op'),
('op', 'out_data')
])
def replace_pattern(self, graph: Graph, match: dict):
node = match['op']
N, H, W, C = match['in_data'].shape
block_size = node['block_size']
graph.remove_edge(match['in_data'].id, node.id)
graph.remove_edge(node.id, match['out_data'].id)
dim_6D = int64_array([N, C, int(H / block_size), block_size, int(W / block_size), block_size])
order_6D = int64_array([0, 3, 5, 1, 2, 4])
dim_4D = int64_array([N, int(H / block_size), int(W / block_size), int(C * (block_size ** 2))])
reshape_6_op = Reshape(graph, dict(name=node.id + '/Reshape_to_6D'))
reshape_6_const_data = Const(graph, dict(value=dim_6D)).create_node_with_data()
reshape_6_data_node = reshape_6_op.create_node_with_data([match['in_data'], reshape_6_const_data])
mark_as_correct_data_layout(reshape_6_data_node.in_node(0))
order_const_data = Const(graph, dict(value=order_6D)).create_node_with_data()
transpose_op = Transpose(graph, dict(name=node.id + '/Transpose'))
transpose_data_node = transpose_op.create_node_with_data([reshape_6_data_node, order_const_data])
mark_as_correct_data_layout(transpose_data_node.in_node(0))
reshape_4_op = Reshape(graph, dict(name=node.id + '/Reshape_to_4D'))
reshape_4_const_data = Const(graph, dict(value=dim_4D)).create_node_with_data()
reshape_4_data_node = reshape_4_op.create_node_with_data([transpose_data_node, reshape_4_const_data],
data_nodes=[match['out_data']])
mark_input_as_in_correct_layout(reshape_4_data_node.in_node(0), 0)
mark_output_as_in_correct_layout(reshape_4_data_node.in_node(0), 0)
|
# Faça um programa para uma loja de tintas. O programa deverá pedir o tamanho em metros quadrados da área
# a ser pintada. Considere que a cobertura da tinta é de 1 litro para cada 3 metros quadrados e que a tinta é
# vendida em latas de 18 litros, que custam R$ 80,00. Informe ao usuário a quantidades de latas de tinta
# a serem compradas e o preço total.
class CalculadorDeTinta:
def __init__(self, area_c):
self.area_c = area_c # metros quadrados
self.lata_volume = 18 # litros
self.preco = 80.00 # reais
def cobertura_por_litro(self):
lata = 0
litros_de_tinta = self.area_c / 3
unidade_lata = litros_de_tinta / 18
while unidade_lata > 0:
lata += 1
unidade_lata -= 1
custo = lata * self.preco
return f'Em {self.area_c} metros quadrados são necessarias {lata} latas de tinta, com o preço de {custo} Reais'
if __name__ == '__main__':
comprador = CalculadorDeTinta(120)
print(comprador.cobertura_por_litro())
comprador = CalculadorDeTinta(26)
print(comprador.cobertura_por_litro())
comprador = CalculadorDeTinta(9)
print(comprador.cobertura_por_litro())
comprador = CalculadorDeTinta(580)
print(comprador.cobertura_por_litro())
comprador = CalculadorDeTinta(70)
print(comprador.cobertura_por_litro())
area = float(input('Qual a area a ser pintada? (em metros quadrados): ')) # resposta certa, corrigido da internet
quantidade_de_latas = area // 54 # uma lata pinta 54 metros quadrados
if area % 54 != 0:
quantidade_de_latas += 1
preco = quantidade_de_latas * 80
print('A quantidade de latas a serem compradas é', quantidade_de_latas)
print('e o valor a ser pago será', preco)
|
# Generated by Django 2.1.9 on 2019-08-27 08:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('resources', '0079_reservation_extra_questions'),
]
operations = [
migrations.AlterModelOptions(
name='resourcegroup',
options={'ordering': ('name',), 'permissions': [('group:can_approve_reservation', 'Can approve reservation'), ('group:can_make_reservations', 'Can make reservations'), ('group:can_modify_reservations', 'Can modify reservations'), ('group:can_ignore_opening_hours', 'Can make reservations outside opening hours'), ('group:can_view_reservation_access_code', 'Can view reservation access code'), ('group:can_view_reservation_extra_fields', 'Can view reservation extra fields'), ('group:can_access_reservation_comments', 'Can access reservation comments'), ('group:can_view_reservation_catering_orders', 'Can view reservation catering orders'), ('group:can_modify_reservation_catering_orders', 'Can modify reservation catering orders'), ('group:can_view_reservation_product_orders', 'Can view reservation product orders'), ('group:can_modify_paid_reservations', 'Can modify paid reservations')], 'verbose_name': 'Resource group', 'verbose_name_plural': 'Resource groups'},
),
migrations.AlterModelOptions(
name='unit',
options={'ordering': ('name',), 'permissions': [('unit:can_approve_reservation', 'Can approve reservation'), ('unit:can_make_reservations', 'Can make reservations'), ('unit:can_modify_reservations', 'Can modify reservations'), ('unit:can_ignore_opening_hours', 'Can make reservations outside opening hours'), ('unit:can_view_reservation_access_code', 'Can view reservation access code'), ('unit:can_view_reservation_extra_fields', 'Can view reservation extra fields'), ('unit:can_access_reservation_comments', 'Can access reservation comments'), ('unit:can_view_reservation_catering_orders', 'Can view reservation catering orders'), ('unit:can_modify_reservation_catering_orders', 'Can modify reservation catering orders'), ('unit:can_view_reservation_product_orders', 'Can view reservation product orders'), ('unit:can_modify_paid_reservations', 'Can modify paid reservations')], 'verbose_name': 'unit', 'verbose_name_plural': 'units'},
),
migrations.AddField(
model_name='reservation',
name='billing_email_address',
field=models.EmailField(blank=True, max_length=254, verbose_name='Billing email address'),
),
migrations.AddField(
model_name='reservation',
name='billing_first_name',
field=models.CharField(blank=True, max_length=100, verbose_name='Billing first name'),
),
migrations.AddField(
model_name='reservation',
name='billing_last_name',
field=models.CharField(blank=True, max_length=100, verbose_name='Billing last name'),
),
migrations.AddField(
model_name='reservation',
name='billing_phone_number',
field=models.CharField(blank=True, max_length=30, verbose_name='Billing phone number'),
),
migrations.AlterField(
model_name='reservation',
name='state',
field=models.CharField(choices=[('created', 'created'), ('cancelled', 'cancelled'), ('confirmed', 'confirmed'), ('denied', 'denied'), ('requested', 'requested'), ('waiting_for_payment', 'waiting for payment')], default='created', max_length=32, verbose_name='State'),
),
]
|
from __future__ import absolute_import, division, print_function, unicode_literals
import tensorflow as tf
from tensorflow import keras
from tensorflow.python.client import device_lib
import pre_process_data as pre_data
from keras.callbacks import ModelCheckpoint
import tensorflow_datasets as tfds
import keras
from keras import backend as K
from keras.models import Sequential
from keras.layers import Activation
from keras.layers.core import Dense, Flatten
from keras.optimizers import Adam
from keras.metrics import categorical_crossentropy
from keras.preprocessing.image import ImageDataGenerator
from keras.layers.normalization import BatchNormalization
from keras.layers.convolutional import *
import time
import os
import logging
tf.get_logger().setLevel(logging.ERROR)
# os.environ["CUDA_VISIBLE_DEVICES"]="0,1,2,3"
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' # 3 no TF debug info
tf.debugging.set_log_device_placement(True)
#tf.config.experimental.list_physical_devices('GPU')
# Here I set it default to relu so that our variable became of that type
act_func_global = tf.nn.relu
print(act_func_global)
is_Swish = False
print('is a Swish function: ',is_Swish)
is_SwishBeta = False
print('is a Swish Beta function: ',is_SwishBeta)
is_logging = False
print('is a first function: ',is_Swish)
b_size=256
print('batch size: ', b_size)
def check_available_GPUS():
local_devices = device_lib.list_local_devices()
gpu_names = [x.name for x in local_devices if x.device_type == 'GPU']
gpu_num = len(gpu_names)
print(f'{gpu_num} GPUs are detected : {gpu_names}')
return gpu_num
def execution_time(model_start_time,model_end_time):
print('Model execution start Time:',round(model_start_time,0))
print('Model execution end Time:',round(model_end_time,0))
excn_time= model_end_time - model_start_time
print('Model execution Time:',round(excn_time/60,2),'minutes')
def act_func(x):
dict_act = {'sigmoid': tf.nn.sigmoid(x), 'tanh': tf.nn.tanh(x), 'relu': tf.nn.relu(x), 'leaky_relu': tf.nn.leaky_relu(x),
'swish': tf.nn.sigmoid(x), 'swish_beta': tf.nn.sigmoid(x)}
if act_func_global == 'swish':
# act_func_var = x*tf.nn.sigmoid(x)
return x * keras.backend.sigmoid(x)
if act_func_global == 'swish_beta':
beta = 1.5 # 1, 1.5 or 2
return x * keras.backend.sigmoid(beta * x)
else:
return dict_act[act_func_global]
def set_GPU_Strategy(num_GPU):
gpu_list = ['/gpu:'+str(i) for i in range(0, num_GPU) ]
print("Available GPUs: ", tf.config.experimental.list_physical_devices('GPU'))
print("Num GPUs Available: ", len(tf.config.experimental.list_physical_devices('GPU')))
gpus = tf.config.experimental.list_physical_devices('GPU')
if gpus:
try:
# Currently, memory growth needs to be the same across GPUs
for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, False)
logical_gpus = tf.config.experimental.list_logical_devices('GPU')
print(len(gpus), "Physical GPUs,", len(logical_gpus), "Logical GPUs")
except RuntimeError as e:
# Memory growth must be set before GPUs have been initialized
print(e)
# strategy = tf.contrib.distribute.MirroredStrategy(num_gpus=num_GPU)
# TF 2.0
print('GPUs will be used in training:', gpu_list)
strategy = tf.distribute.MirroredStrategy(gpu_list)
# run_config = tf.estimator.RunConfig(train_distribute=strategy)
return strategy
def model_pipeline(act_function, Islog:False, num_GPU):
global act_func_global
act_func_global = act_function
print('----------------- Model Run ---------------')
# ----------------- Model Run ---------------
model_start_time = time.time()
#check_available_GPUS()
run_config_strategy = set_GPU_Strategy(num_GPU)
print('GPU Run Strategy:',run_config_strategy)
print('----------------- GET DATA -----------------')
# ----------------- Dataset -----------------
train_data, train_labels, eval_data, eval_labels = pre_data.pre_process()
train_dataset = tf.data.Dataset.from_tensor_slices((train_data, train_labels)).cache().batch(b_size)
eval_dataset = tf.data.Dataset.from_tensor_slices((train_data, train_labels)).cache().batch(b_size)
# Open a strategy scope. Everything that creates variables should be under the strategy scope.
# In general this is only model construction & `compile()`
print('----------------- Model Create ---------------')
# ----------------- Model Run ---------------
with run_config_strategy.scope():
model = tf.keras.Sequential([
tf.keras.layers.Conv2D(32, 3, activation=act_func, input_shape=(128, 128, 3)),
tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=2),
tf.keras.layers.BatchNormalization(epsilon=0.001),
tf.keras.layers.Conv2D(64, 3, activation=act_func),
tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=1),
tf.keras.layers.LayerNormalization(epsilon=0.001),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(128, activation=act_func),
tf.keras.layers.Dense(6)
])
# model = tf.keras.Sequential()
# # 1st convolution layer
# model.add(tf.keras.layers.Conv2D(32, (3, 3),
# activation=act_func,
# input_shape=(64, 64, 3)))
# model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=2))
# model.add(tf.keras.layers.LayerNormalization(epsilon=0.001))
# # 2nd convolution layer
# model.add(tf.keras.layers.Conv2D(64, (3, 3),
# activation=act_func))
# model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=1))
# model.add(tf.keras.layers.BatchNormalization(epsilon=0.001))
#
# model.add(tf.keras.layers.Flatten())
# # Fully connected layer. 1 hidden layer consisting of 512 nodes
# model.add(tf.keras.layers.Dense(128, activation=act_func))
# model.add(tf.keras.layers.Dense(6, activation='softmax'))
model.compile(loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
optimizer=tf.keras.optimizers.Adam(),
metrics=['accuracy'])
# Define the checkpoint directory to store the checkpoints
checkpoint_dir = './training_checkpoints'
# Name of the checkpoint files
checkpoint_prefix = os.path.join(checkpoint_dir, "ckpt_{epoch}")
print("checkpoint_prefix: ", checkpoint_prefix)
# Function for decaying the learning rate.
# You can define any decay function you need.
def decay(epoch):
if epoch < 3:
return 1e-3
elif 3 <= epoch < 7:
return 1e-4
else:
return 1e-5
# Callback for printing the LR at the end of each epoch.
class PrintLR(tf.keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs=None):
print('\nLearning rate for epoch {} is {}'.format(epoch + 1, model.optimizer.lr.numpy()))
callbacks = [
# tf.keras.callbacks.TensorBoard(log_dir='./logs'),
# tf.keras.callbacks.ModelCheckpoint(filepath=checkpoint_prefix,
# save_weights_only=True),
tf.keras.callbacks.LearningRateScheduler(decay),
PrintLR()
]
print('------------------- train ------------------- ')
model.fit(train_dataset, epochs=12, callbacks=callbacks)
print('------------------- Evaluate ------------------- ')
# model.load_weights(tf.train.latest_checkpoint(checkpoint_dir))
eval_loss, eval_acc = model.evaluate(eval_dataset)
print('\n\nEval loss: {}, Eval Accuracy: {}'.format(eval_loss, eval_acc))
print('Number of GPUs:', num_GPU)
model_end_time = time.time()
execution_time(model_start_time, model_end_time)
""" Parameters: activation function, logging op, Number of GPU(Discovery 4 p100 available) """
model_pipeline('relu', True, 4)
|
# Copyright 2017 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Public API surface is re-exported here.
Users should not load files under "/internal"
"""
load("//internal/common:check_bazel_version.bzl", _check_bazel_version = "check_bazel_version")
load("//internal/node:node.bzl",
_nodejs_binary = "nodejs_binary_macro",
_nodejs_test = "nodejs_test_macro")
load("//internal/node:node_repositories.bzl", _node_repositories = "node_repositories")
load("//internal/jasmine_node_test:jasmine_node_test.bzl", _jasmine_node_test = "jasmine_node_test")
load("//internal/npm_install:npm_install.bzl", _npm_install = "npm_install")
load("//internal/yarn_install:yarn_install.bzl", _yarn_install = "yarn_install")
load("//internal/rollup:rollup_bundle.bzl", _rollup_bundle = "rollup_bundle")
load("//internal/npm_package:npm_package.bzl", _npm_package = "npm_package")
check_bazel_version = _check_bazel_version
nodejs_binary = _nodejs_binary
nodejs_test = _nodejs_test
node_repositories = _node_repositories
jasmine_node_test = _jasmine_node_test
npm_install = _npm_install
yarn_install = _yarn_install
rollup_bundle = _rollup_bundle
npm_package = _npm_package
def node_modules_filegroup(packages, **kwargs):
native.filegroup(
srcs = native.glob(["/".join([
"node_modules",
pkg,
"**",
ext,
]) for pkg in packages for ext in [
"*.js",
"*.json",
"*.d.ts",
]]),
**kwargs
)
|
import hypothesis.extra.numpy as hnp
import hypothesis.strategies as st
import numpy as np
from hypothesis import given
from numpy.testing import assert_allclose
from pytest import raises
from mygrad._utils import reduce_broadcast
from tests.custom_strategies import broadcastable_shape
def test_bad_gradient_dimensionality():
""" test that grad.dim < len(var_shape) raises ValueError"""
var_shape = (1, 2, 3)
grad = np.empty((1, 2))
with raises(ValueError):
reduce_broadcast(grad=grad, var_shape=var_shape)
@given(
grad=hnp.arrays(
dtype=float, shape=hnp.array_shapes(), elements=st.floats(-100, 100)
)
)
def test_broadcast_scalar(grad):
""" test when grad was broadcasted from a scalar"""
assert_allclose(reduce_broadcast(grad, tuple()), grad.sum())
@given(
grad=hnp.arrays(
dtype=float, shape=hnp.array_shapes(), elements=st.floats(-100, 100)
)
)
def test_reduce_broadcast_same_shape(grad):
""" test when no broadcasting occurred"""
var_shape = grad.shape
reduced_grad = reduce_broadcast(grad=grad, var_shape=var_shape)
assert_allclose(actual=reduced_grad, desired=grad)
@given(var_shape=hnp.array_shapes(min_side=2), data=st.data())
def test_reduce_broadcast_nokeepdim(var_shape, data):
""" example broadcasting: (2, 3) -> (5, 2, 3)"""
grad_shape = data.draw(
broadcastable_shape(
shape=var_shape,
min_dim=len(var_shape) + 1,
max_dim=len(var_shape) + 3,
allow_singleton=False,
),
label="grad_shape",
)
grad = np.ones(grad_shape, dtype=float)
reduced_grad = reduce_broadcast(grad=grad, var_shape=var_shape)
reduced_grad *= (
np.prod(var_shape) / grad.size
) # scale reduced-grad so all elements are 1
assert_allclose(actual=reduced_grad, desired=np.ones(var_shape))
@given(var_shape=hnp.array_shapes(), data=st.data())
def test_reduce_broadcast_keepdim(var_shape, data):
""" example broadcasting: (2, 1, 4) -> (2, 5, 4)"""
grad = data.draw(
hnp.arrays(
dtype=float,
shape=broadcastable_shape(
shape=var_shape, min_dim=len(var_shape), max_dim=len(var_shape)
),
elements=st.just(1.0),
),
label="grad",
)
reduced_grad = reduce_broadcast(grad=grad, var_shape=var_shape)
assert reduced_grad.shape == tuple(
i if i < j else j for i, j in zip(var_shape, grad.shape)
)
assert (i == 1 for i, j in zip(var_shape, grad.shape) if i < j)
sum_axes = tuple(n for n, (i, j) in enumerate(zip(var_shape, grad.shape)) if i != j)
assert_allclose(actual=reduced_grad, desired=grad.sum(axis=sum_axes, keepdims=True))
@given(
grad=hnp.arrays(dtype=float, shape=(5, 3, 4, 2), elements=st.floats(-0.01, 0.01))
)
def test_hybrid_broadcasting(grad):
""" tests new-dim and keep-dim broadcasting
(3, 1, 2) -> (5, 3, 4, 2)"""
var_shape = (3, 1, 2)
reduced = reduce_broadcast(grad=grad, var_shape=var_shape)
answer = grad.sum(axis=0).sum(axis=-2, keepdims=True)
assert_allclose(actual=reduced, desired=answer)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Command line utility to parse/report on apache log files.
See: https://github.com/bedge/sqalp/blob/master/README.rst
"""
from __future__ import division, print_function, absolute_import
import argparse
import logging
import re
import sys
from collections import OrderedDict
import json
from typing import Dict
from user_agents import parse
import apache_log_parser
import sqlalchemy
from sqlalchemy import create_engine, Column, String, Integer, DateTime, func
from sqlalchemy.orm import sessionmaker
from tabulate import tabulate, tabulate_formats
__version__ = 'unknown'
from sqalp import __version__
__author__ = "Bruce Edge"
__copyright__ = "Bruce Edge"
__license__ = "mit"
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
_logger = logging.getLogger('sqalp')
# https://regex101.com/r/7xVnXr/11
# Custom regex in case we can't use apache_log_parser
# Not Used - for reference only
custom_format = re.compile(
r'(?P<ip>\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\b\s*'
r'(?P<logname>\S+)\s*'
r'(?P<user>\S+)\s*'
r'\[(?P<date>\d{2}/\w{3}/\d{4}):(?P<time>\d{2}:\d{2}:\d{2})\s'
r'(?P<tz>-\d{4})\]\s\"'
r'(?P<verb>(\w+))\s'
r'(?P<request>\S*)\s'
r'(?P<proto>HTTP/1.0)\"\s'
r'(?P<status>\d+)\b\s*'
r'(?P<size>\d+)\b\s'
r'\"(?P<referer>[^\"]+)\"\s'
r'\"(?P<agent>[^\"]+)\"',
re.I | re.X)
class LogMsg(Base):
"""
SqlAlchemy object based on response dict from apache_log_parser.
Ignored elements are commented below, but left for reference.
They can be added by uncommenting and providing a data type.
"""
__tablename__ = 'logs'
id: int = Column(Integer, primary_key=True)
# Elements mapped directly from apache_log_parser output
remote_host: str = Column(String())
remote_logname: str = Column(String())
remote_user: str = Column(String())
# request_first_line
request_header_referer: str = Column(String())
request_header_user_agent: str = Column(String())
request_http_ver: str = Column(String())
request_method: str = Column(String())
request_url: str = Column(String())
# request_url_fragment
request_url_hostname: str = Column(String(), nullable=True)
request_url_netloc: str = Column(String(), nullable=True)
request_url_password: str = Column(String(), nullable=True)
request_url_path: str = Column(String())
request_url_port: str = Column(String(), nullable=True)
request_url_query: str = Column(String(), nullable=True)
# request_url_query_dict
# request_url_query_list
# request_url_query_simple_dict
# request_url_scheme
request_url_username: str = Column(String(), nullable=True)
response_bytes_clf: int = Column(Integer)
status: int = Column(Integer)
# time_received
time_received_datetimeobj: DateTime = Column(DateTime)
# time_received_isoformat
# time_received_tz_datetimeobj
# time_received_tz_isoformat
# time_received_utc_datetimeobj
# time_received_utc_isoformat
# Additional elements added for convenience.
# Use string here to avoid SQL 'must use datetime' restrictions
time_received_date: str = Column(String)
# These are parsed out separately to take advantage of UA package
user_agent: str = Column(String)
operating_system: str = Column(String)
def __init__(self, os_approx, **kwargs):
keep_kwargs = {k: v for k, v in kwargs.items() if k in logmsg_columns}
date_str = kwargs['time_received_datetimeobj'].strftime('%Y-%m-%d')
ua = parse(kwargs['request_header_user_agent'])
user_agent = ua.browser.family
operating_system = ua.os.family
if os_approx:
operating_system = operating_system.split()[0]
super(LogMsg, self).__init__(
**keep_kwargs, **{'time_received_date': date_str,
'user_agent': user_agent,
'operating_system': operating_system})
def __repr__(self):
return f'<LogMsg(remote_host={self.remote_host}, ' \
f'ua={self.request_header_user_agent}, ' \
f'status={self.status}, ' \
f'len={self.response_bytes_clf}>'
# Use this to filter out unwanted dict elements
logmsg_columns = [_ for _ in LogMsg.__dict__.keys() if not _.startswith('_')]
# Well known log formats for apache logs
known_formats = {
"common": "%h %l %u %t \"%r\" %>s %b",
"combined": "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\""
}
def file_import(session, input, parser, os_approx):
# type: (Session, TextIO, Callable, bool) -> Tuple[int, int]
"""
Read in log data
"""
bad_msg_count = 0
msg_count = 0
for log_msg in input.readlines():
try:
msg_dict: Dict = parser(log_msg)
lm: LogMsg = LogMsg(os_approx, **msg_dict)
session.add(lm)
msg_count += 1
except (KeyError, ValueError) as ex:
bad_msg_count += 1
_logger.info(f'Parse failed: {ex} for log message: {log_msg}.')
try:
session.commit()
except sqlalchemy.exc.OperationalError as ex:
_logger.error(f'Commit failed: {ex} for {msg_count} messages.')
_logger.info(f'Unparseable message count: {bad_msg_count}.')
_logger.debug(f'Messages read: {msg_count}.')
return msg_count, bad_msg_count
def get_session(loglevel):
# type: (int) -> Session
try:
echo = loglevel >= logging.WARNING
except TypeError:
echo = False
engine = create_engine('sqlite:///:memory:', echo=echo)
Base.metadata.create_all(engine)
session_factory = sessionmaker(bind=engine)
return session_factory()
def output(data, output_format):
# type: (OrderedDict, str) -> None
if output_format == 'json':
print(json.dumps(data))
else:
print(tabulate(data, headers="keys", tablefmt=output_format))
def get_by_date(session):
# type: (Session) -> OrderedDict
results = OrderedDict()
for day in session.query(func.distinct(LogMsg.time_received_date)).all():
day = day[0]
day_count = session.query(func.count()).filter(
LogMsg.time_received_date == day).one()[0]
results[day] = [day_count]
_logger.debug(f'results: {results}')
return results
def get_by_date_by_ua(session):
# type: (Session) -> OrderedDict
results = OrderedDict()
for day in session.query(func.distinct(LogMsg.time_received_date)).all():
day = day[0]
day_counts = [[_[0], _[1]] for _ in
session.query(LogMsg.user_agent, func.count('*')).filter(
LogMsg.time_received_date == day).group_by(
LogMsg.user_agent).all()]
results[day] = sorted(day_counts, key=lambda x: x[1], reverse=True)[:3]
_logger.debug(f'results: {results}')
return results
def get_by_date_verb_ratio(session):
# type: (Session) -> OrderedDict
results = OrderedDict()
for day in session.query(func.distinct(LogMsg.time_received_date)).all():
day: str = day[0]
day_counter = OrderedDict()
for method, os, count in session.query(
LogMsg.request_method, LogMsg.operating_system,
func.count('*')) \
.filter(LogMsg.time_received_date == day) \
.group_by(LogMsg.operating_system,
LogMsg.request_method).all():
try:
day_counter[os][method]: int = count
except KeyError:
day_counter[os] = OrderedDict()
day_counter[os][method] = count
results[day] = []
for os in day_counter:
if 'GET' not in day_counter[os].keys():
os_ratio = 0
elif 'POST' not in day_counter[os].keys():
os_ratio = 'NAN'
else:
ratio = float(day_counter[os]["GET"]) / day_counter[os]["POST"]
os_ratio = '{:.4}'.format(ratio)
results[day].append([os, os_ratio])
_logger.debug(f'results: {results}')
return results
def get_parser(parser_string):
# type: (str) -> Callable
return apache_log_parser.make_parser(parser_string)
def parse_args(args: argparse.ArgumentParser) -> argparse.Namespace:
"""Parse command line parameters
Args:
args ([str]): command line parameters as list of strings
Returns:
:obj:`argparse.Namespace`: command line parameters namespace
"""
parser = argparse.ArgumentParser(
description='Log file parser')
parser.add_argument(
'--version',
action='version',
version='sqalp {ver}'.format(ver=__version__))
parser.add_argument(
'-i',
'--input',
nargs='?',
help='Files to input, default to stdin',
type=argparse.FileType('r', encoding='UTF-8'),
default=sys.stdin)
input_formats = [_ for _ in known_formats.keys()]
parser.add_argument(
'-f',
'--format',
help=f'Input format'
f'see: https://httpd.apache.org/docs/1.3/logs.html#accesslog',
type=str,
choices=input_formats,
required=True)
parser.add_argument(
'-c',
'--count',
help='Requests per day',
default=False,
action='store_true')
parser.add_argument(
'-u',
'--ua_frequency',
help='User-agent stats by day',
default=False,
action='store_true')
parser.add_argument(
'-r',
'--ratio',
help='Ratio of GET to POST by day by OS',
default=False,
action='store_true')
parser.add_argument(
'-O',
'--os_approximate',
help="Approximate OS to family grouping (Win XP == Win, etc)",
default=False,
action='store_true')
output_formats = tabulate_formats
output_formats.append("json")
parser.add_argument(
'-o',
'--output_format',
help=f'table/output formats, one of {output_formats}',
default='grid',
choices=output_formats,
action='store',
metavar='OUTPUT_FORMAT')
parser.add_argument(
'-v',
'--verbose',
dest='loglevel',
help='set loglevel to INFO',
action='store_const',
const=logging.INFO)
parser.add_argument(
'-vv',
'--very-verbose',
dest='loglevel',
help='set loglevel to DEBUG',
action='store_const',
const=logging.DEBUG)
return parser.parse_args(args)
def setup_logging(loglevel: int) -> None:
"""Setup basic logging
Args:
loglevel (int): minimum loglevel for emitting messages
"""
logformat = "[%(asctime)s] %(levelname)s:%(name)s:%(message)s"
logging.basicConfig(level=loglevel, stream=sys.stderr,
format=logformat, datefmt="%Y-%m-%d %H:%M:%S")
def main(args):
"""Main entry point allowing external calls
Args:
args ([str]): command line parameter list
"""
args = parse_args(args)
setup_logging(args.loglevel)
_logger.debug("apl start")
# Decide on back-end, default to RAM for now...
session = get_session(args.loglevel)
parser = get_parser(known_formats[args.format])
file_import(session, args.input, parser, args.os_approximate)
if args.count:
data = get_by_date(session)
if args.ua_frequency:
data = get_by_date_by_ua(session)
if args.ratio:
data = get_by_date_verb_ratio(session)
output(data, args.output_format)
_logger.debug("apl stop")
def run():
"""Entry point for console_scripts
"""
main(sys.argv[1:])
if __name__ == "__main__":
run()
|
import sys, getopt, os, json, fnmatch, pprint
#from urllib.request import urlopen, Request
from metacat.util import to_bytes, to_str, TokenLib, epoch
from metacat.webapi import MetaCatClient
import datetime
Usage = """
Usage:
metacat dataset <command> [<options>] ...
Commands and options:
list [<options>] [[<namespace pattern>:]<name pattern>]
-l|--long - detailed output
-c|--file-counts - include file counts if detailed output
create [<options>] <namespace>:<name> [<description>]
-M|--monotonic
-F|--frozen
-m|--metadata '<JSON expression>'
-m|--metadata @<JSON file>
add <parent dataset namespace>:<parent name> <child dataset namespace>:<child name> [<child dataset namespace>:<child name> ...]
remove <parent namespace>:<parent name> <child namespace>:<child name>
show [<options>] <namespace>:<name>
-j|--json - print as JSON
-p|--pprint - Python pprint
update <options> <namespace>:<name> [<description>]
-M|--monotonic (yes|no) - set/reset monotonic flag
-F|--frozen (yes|no) - set/reset monotonic flag
-r|--replace - replace metadata, otherwise update
-m|--metadata @<JSON file with metadata>
-m|--metadata '<JSON expression>'
"""
def do_list(client, args):
opts, args = getopt.getopt(args, "lc", ["--long", "--file-counts"])
if args:
patterns = args
else:
patterns = ["*"]
opts = dict(opts)
verbose = "-l" in opts or "--long" in opts
include_counts = verbose and ("-c" in opts or "--file-counts" in opts)
output = client.list_datasets(with_file_counts=include_counts)
verbose_format = "%-16s %-19s %4d/%-4d %10s %s"
header_format = "%-16s %-19s %9s %-10s %s"
if verbose:
print(header_format % (
"creator", "created", "prnt/chld", "files", "namespace/name"
))
print("-"*16, "-"*19, "-"*9, "-"*10, "-"*40)
for item in output:
match = False
namespace, name = item["namespace"], item["name"]
for p in patterns:
pns = None
if ":" in p:
pns, pn = p.split(":", 1)
else:
pn = p
if fnmatch.fnmatch(name, pn) and (pns is None or fnmatch.fnmatch(namespace, pns)):
match = True
break
if match:
if verbose:
nparents = len(item.get("parents", []))
nchildren = len(item.get("children", []))
ct = item.get("created_timestamp")
if not ct:
ct = ""
else:
ct = datetime.datetime.fromtimestamp(ct).strftime("%Y-%m-%d %H:%M:%S")
file_count = item.get("file_count")
if file_count is None:
file_count = ""
else:
file_count = str(file_count)
print(verbose_format % (
item.get("creator") or "",
ct,
nparents, nchildren,
file_count,
namespace + ":" + name
))
else:
print("%s:%s" % (namespace, name))
def do_show(client, args):
opts, args = getopt.getopt(args, "pj", ["--pprint=","--json"])
opts = dict(opts)
info = client.get_dataset(args[0])
if "-p" in opts or "--pprint" in opts:
pprint.pprint(info)
elif "-j" in opts or "--json" in opts:
print(json.dumps(info, indent=4, sort_keys=True))
else:
for k, v in sorted(info.items()):
if k == "created_timestamp":
v = "" if not v else datetime.datetime.fromtimestamp(v).strftime("%Y-%m-%d %H:%M:%S")
elif k == "children" or k == "parents":
n = len(v)
if n <= 5:
v = " ".join(v)
else:
v = "(%d) " % (n,) + " ".join(v[:5]) + " ..."
elif k == "metadata":
v = json.dumps(v or {})
print("%-25s: %s" % (k, v))
def do_add(client, args):
parent, children = args[0], args[1:]
for child in children:
client.add_child_dataset(parent, child)
def load_metadata(opts):
# return None if no -j in options
meta = None
if "-m" in opts or "--metadata" in opts:
arg = opts.get("-m") or opts.get("--metadata")
if arg.startswith('@'):
meta = json.load(open(arg[1:], "r"))
else:
meta = json.loads(arg)
return meta
def do_create(client, args):
opts, args = getopt.getopt(args, "FMm:", ["--metadata=","--frozen","--monotonic"])
opts = dict(opts)
dataset_spec, desc = args[0], args[1:]
if desc:
desc = " ".join(desc)
else:
desc = ""
monotonic = "-M" in opts or "--monotonic" in opts
frozen = "-F" in opts or "--frozen" in opts
metadata = load_metadata(opts) or {}
out = client.create_dataset(dataset_spec, monotonic = monotonic, frozen = frozen, description=desc, metadata = metadata)
print(out)
def do_update(client, args):
opts, args = getopt.getopt(args, "rM:F:m:", ["replace"])
opts = dict(opts)
mode = "replace" if ("-r" in opts or "--replace" in opts) else "update"
if not args or args[0] == "help":
print(Usage)
sys.exit(2)
metadata = load_metadata(opts)
dataset = args[0]
monotonic = frozen = None
if "-M" in opts or "--monotonic" in opts:
monotonic = opts.get("-M") or opts.get("--monotonic")
if not monotonic in ("yes", "no"):
print("Invalid value for -M or --monotonic option:", monotonic, ". Valid values are 'yes' and 'no'")
sys.exit(2)
monotonic = monotonic == "yes"
if "-F" in opts or "--frozen" in opts:
frozen = opts.get("-F") or opts.get("--frozen")
if not frozen in ("yes", "no"):
print("Invalid value for -F or --frozen option:", frozen, ". Valid values are 'yes' and 'no'")
sys.exit(2)
frozen = frozen == "yes"
desc = None
if args[1:]:
desc = " ".join(args[1:])
response = client.update_dataset(dataset, metadata=metadata, frozen=frozen, monotonic=monotonic, mode=mode, description=desc)
print(response)
def do_dataset(server_url, args):
if not args:
print(Usage)
sys.exit(2)
client = MetaCatClient(server_url)
command = args[0]
try:
method = {
"add": do_add,
"list": do_list,
"update": do_update,
"create": do_create,
"show": do_show
}[command]
except KeyError:
print("Unknown subcommand:", command)
sys.exit(2)
return method(client, args[1:])
|
A_33_01_10 = {0: {'A': 0.212, 'C': -0.068, 'E': -0.226, 'D': -0.379, 'G': 0.336, 'F': -0.4, 'I': -0.063, 'H': 0.04, 'K': 0.471, 'M': -0.473, 'L': 0.068, 'N': -0.161, 'Q': 0.078, 'P': 0.401, 'S': -0.051, 'R': 0.324, 'T': -0.023, 'W': -0.034, 'V': 0.151, 'Y': -0.201}, 1: {'A': 0.055, 'C': 0.331, 'E': 0.232, 'D': 0.573, 'G': -0.112, 'F': -0.653, 'I': -0.153, 'H': 0.281, 'K': 0.55, 'M': -0.374, 'L': 0.037, 'N': -0.267, 'Q': 0.247, 'P': 0.455, 'S': 0.012, 'R': 0.393, 'T': -0.127, 'W': -0.18, 'V': -0.13, 'Y': -1.172}, 2: {'A': 0.116, 'C': -0.142, 'E': 0.295, 'D': 0.215, 'G': 0.139, 'F': -0.174, 'I': -0.06, 'H': -0.017, 'K': 0.097, 'M': -0.302, 'L': -0.074, 'N': 0.027, 'Q': 0.088, 'P': 0.183, 'S': 0.054, 'R': 0.006, 'T': 0.067, 'W': -0.333, 'V': 0.092, 'Y': -0.276}, 3: {'A': -0.011, 'C': -0.05, 'E': 0.091, 'D': -0.007, 'G': -0.056, 'F': -0.074, 'I': -0.062, 'H': 0.0, 'K': 0.122, 'M': -0.082, 'L': -0.033, 'N': 0.03, 'Q': 0.079, 'P': 0.088, 'S': -0.027, 'R': -0.011, 'T': 0.044, 'W': -0.085, 'V': 0.054, 'Y': -0.009}, 4: {'A': 0.117, 'C': -0.157, 'E': 0.288, 'D': 0.128, 'G': 0.122, 'F': -0.262, 'I': -0.077, 'H': 0.066, 'K': 0.05, 'M': -0.102, 'L': -0.11, 'N': 0.038, 'Q': -0.003, 'P': 0.079, 'S': 0.085, 'R': -0.153, 'T': 0.041, 'W': -0.02, 'V': -0.117, 'Y': -0.014}, 5: {'A': -0.032, 'C': -0.066, 'E': 0.098, 'D': 0.233, 'G': 0.102, 'F': -0.191, 'I': -0.108, 'H': -0.009, 'K': 0.114, 'M': -0.136, 'L': 0.002, 'N': 0.167, 'Q': 0.053, 'P': 0.122, 'S': -0.072, 'R': 0.002, 'T': 0.023, 'W': -0.138, 'V': -0.082, 'Y': -0.083}, 6: {'A': 0.063, 'C': -0.109, 'E': 0.2, 'D': 0.142, 'G': 0.055, 'F': -0.217, 'I': 0.014, 'H': -0.139, 'K': -0.037, 'M': -0.079, 'L': -0.133, 'N': 0.125, 'Q': 0.145, 'P': 0.223, 'S': 0.039, 'R': -0.123, 'T': -0.04, 'W': -0.047, 'V': 0.128, 'Y': -0.209}, 7: {'A': -0.147, 'C': -0.156, 'E': 0.036, 'D': 0.388, 'G': 0.297, 'F': -0.403, 'I': -0.118, 'H': -0.001, 'K': 0.253, 'M': -0.212, 'L': -0.099, 'N': 0.104, 'Q': 0.255, 'P': 0.31, 'S': -0.12, 'R': 0.093, 'T': 0.031, 'W': -0.357, 'V': -0.055, 'Y': -0.098}, 8: {'A': -0.125, 'C': -0.218, 'E': 0.034, 'D': 0.317, 'G': 0.134, 'F': -0.496, 'I': 0.144, 'H': 0.167, 'K': 0.11, 'M': 0.065, 'L': -0.112, 'N': 0.034, 'Q': -0.01, 'P': -0.133, 'S': 0.074, 'R': 0.038, 'T': 0.032, 'W': -0.043, 'V': 0.255, 'Y': -0.267}, 9: {'A': 0.0, 'C': 0.0, 'E': 0.105, 'D': 0.0, 'G': 0.0, 'F': 0.268, 'I': 0.179, 'H': 0.104, 'K': -0.243, 'M': -0.281, 'L': 0.191, 'N': 0.0, 'Q': 0.391, 'P': 0.0, 'S': 0.0, 'R': -1.516, 'T': 0.198, 'W': 0.349, 'V': 0.092, 'Y': 0.163}, -1: {'con': 4.2855}}
|
from tests.data.book_data import BOOKS
from tests.data.library_data import LIBRARIES
from tests.data.library_book_data import LIBRARY_BOOKS, LIBRARY_BOOKS_API
|
import getpass
import os
import configparser
import tableauserverclient as TSC
import argparse
serverObject=""
def exit():
return "exit"
def help():
return ""
def downloadResource(): # TODO HERE: make the function check if the file exists or not
path = r'.\download'
resourceType=""
resourceName=""
if os.path.isfile('./myconfig.ini'): # [RESOURCES] must be on the file, create something that enables this check
answer=input("Configuration file available. Do you want to use it (Yes/No)?: ")
if answer=='Yes':
config = configparser.ConfigParser()
config.read('myconfig.ini')
resourceName = config['RESOURCES']['resource_name']
resourceType = config['RESOURCES']['resource_type']
if answer=='No':
print("-> Input resource parameters\n")
resourceName = input("Workbook name: ")
resourceType = input("Resource type (workbook/datasource): ")
pathAnswer = input("Do you want to specify a path for the downloaded resource (Yes/No)?: ") # put tkinter here!
if pathAnswer=="Yes":
path = input("Specify your path: ")
if resourceType=="workbook":
workbooks = serverObject.workbooks.get()
targetWorkbook = [w for w in workbooks[0] if w.name==resourceName]
serverObject.workbooks.download(targetWorkbook[0].id, filepath=path, no_extract=False)
print("Workbook downloaded!")
serverObject.workbooks.populate_connections(targetWorkbook[0])
connections=[connection.datasource_name for connection in targetWorkbook[0].connections]
print("\n\t-{} \n\t\tOwner: {} \n\t\tConnections: {}\n".format(targetWorkbook[0].name,targetWorkbook[0].owner_id,connections))
if resourceType=="datasource":
datasources = serverObject.datasources.get()
targetDatasource = [d for d in datasources[0] if d.name==resourceName]
serverObject.datasources.download(targetDatasource[0].id, filepath=path, no_extract=False)
return ""
def downloadDataSource():
return ""
def listDataSources():
return ""
def listProjects():
projects = serverObject.projects.get()
for i,p in enumerate(projects[0]):
print("{}: {}\n\tDescription: {}\n\tId: {}\n\tParent id: {}\n\tPermissions: {}\n".format(i,p.name,p.description,p.id,p.parent_id,p.content_permissions))
return ""
def listWorkbooks():
workbooks = serverObject.workbooks.get()
workbooks = [(w,w.project_name) for w in workbooks[0]]
workbooks.sort(key=lambda tup: tup[1])
listOfProjects = list(set([w[1] for w in workbooks]))
for p in listOfProjects:
counter=0
print("\nProject name: {}".format(p))
print("")
for w in workbooks:
if w[1]==p:
counter+=1
serverObject.workbooks.populate_connections(w[0])
connections=[connection.datasource_name for connection in w[0].connections]
print("\t-{}) {} \n\t\tOwner: {} \n\t\tConnections: {}".format(counter,w[0].name,w[0].owner_id,connections))
return ""
def listSites():
sites=serverObject.sites.get()
for i,s in enumerate(sites[0]):
print("{}: {}\n\tURL: {}\n\tUser quota: {}\n\tStorage quota: {}\n\tState: {}\n".format(i,s.name,s.content_url,s.user_quota,s.storage_quota,s.state))
return ""
def publishWorkbook():
return ""
def publishDataSource():
project_id = "8676e446-180c-4a17-bcdf-7842a8fd49e5"
new_datasource = TSC.DatasourceItem(project_id)
return ""
def actionToTake(argument):
switcher = {
1:listDataSources,
2:listWorkbooks,
3:listProjects,
4:listSites,
5:downloadResource,
6:exit,
}
# Get the function from switcher dictionary
func=switcher.get(int(argument),lambda:'Invalid input')
# Execute the function
return func()
def connectToServer():
servername, username, password, site=parseInput()
try:
tableau_auth = TSC.TableauAuth(username, password, site)
server = TSC.Server(servername, use_server_version=True)
server.auth.sign_in(tableau_auth)
print("\n----------------------------------------------------------\n")
print("Connected to the Tableau Server!")
s_info = server.server_info.get()
print("\nServer info:")
print("\tProduct version: {0}".format(s_info.product_version))
print("\tREST API version: {0}".format(s_info.rest_api_version))
print("\tBuild number: {0}".format(s_info.build_number))
print("\tAddress: {}".format(server.server_address))
print("\tUsername: {}".format(username))
print("\tSite name: {}".format(site))
print("\n----------------------------------------------------------\n")
return server
except:
print("\nInvalid login information!!!")
return ""
def parseInput():
servername=""
username=""
password=""
site=""
if os.path.isfile('./myconfig.ini'):
answer=input("\nConfigutation file available. Do you want to use it (Yes/No)?: ")
if answer=='Yes':
config = configparser.ConfigParser()
config.read('myconfig.ini')
servername = config['PARAMETERS']['server']
username = config['PARAMETERS']['username']
password = config['PARAMETERS']['password']
site = config['PARAMETERS']['site']
if answer=='No':
print("-> Input user parameters\n")
servername = input("Server link: ")
username = input("Username: ")
password = getpass.getpass("Password: ")
site = input("Site: ")
return servername, username, password, site
if __name__ == "__main__":
print("\n------------------WELCOME TO TABLEAU APP------------------")
while serverObject=="":
serverObject=connectToServer()
while True:
print("1: List data sources\n2: List workbooks\n3: List projects\n4: List sites\n5: Download resource\n6: Exit\n")
action = input("Action to take: ")
print("")
result=actionToTake(action)
if result=="exit":
break
|
#!/usr/bin/env python
###########################################
# 提供 pyape 初始化的命令行工具
###########################################
import shutil
from pathlib import Path
from pkg_resources import resource_filename
import click
from pyape.tpl import create_from_jinja
basedir = Path(resource_filename('pyape', '__init__.py')).parent
# 找到 tpl 文件夹所在地
tpldir = basedir.joinpath('tpl')
fabconfig = Path('fabconfig')
files = {
'dotenv': '_env.jinja2',
'uwsgi': 'uwsgi_ini.jinja2',
'gunicorn': 'gunicorn_conf_py.jinja2',
'gunicorn_nginx': 'gunicorn_nginx.conf.jinja2',
'fabfile': 'fabfile.py',
'fabconfig/init': '__init__.py',
'fabconfig/local': 'env_local.py',
'fabconfig/prod': 'env_prod.py',
'fabconfig/test': 'env_test.py',
'wsgi': 'wsgi.py',
'readme': 'README.md',
}
@click.group(help='初始化 pyape 项目')
def main():
pass
def copytplfile(srcdir, dstdir, keyname, filename, force=False, rename=False):
""" 复制文件到目标文件夹
:param srcdir: 源文件夹
:param dstdir: 目标文件夹
:param keyname: 文件 key 名称,files 的 key
:param filename: 文件名称,files 的 value
:param force: 是否强制覆盖已存在文件
:param rename: 若文件已存在是否重命名
"""
split_path = keyname.split('/')
dstfiledir = dstdir
srcfiledir = srcdir
while len(split_path) > 1:
# 检测是否拥有中间文件夹,若有就创建它
dstfiledir = dstfiledir.joinpath(split_path[0])
srcfiledir = srcfiledir.joinpath(split_path[0])
if not dstfiledir.exists():
dstfiledir.mkdir()
split_path = split_path[1:]
srcfile = srcfiledir / filename
dstfile = dstfiledir / filename
if dstfile.exists():
if force:
shutil.copyfile(srcfile, dstfile)
click.echo('复制 {0} 到 {1}'.format(srcfile, dstfile))
elif rename:
dstbak = dstfile.parent.joinpath(dstfile.name + '.bak')
if dstbak.exists():
st = click.style('备份文件 {0} 已存在!请先删除备份文件。'.format(dstbak), fg='red')
click.echo(st, err=True)
else:
shutil.move(dstfile, dstbak)
st = click.style('备份文件 {0} 到 {1}'.format(dstfile, dstbak), fg='yellow')
click.echo(st)
shutil.copyfile(srcfile, dstfile)
click.echo('复制 {0} 到 {1}'.format(srcfile, dstfile))
else:
st = click.style('文件 {0} 已存在!'.format(dstfile), fg='red')
click.echo(st, err=True)
else:
shutil.copyfile(srcfile, dstfile)
click.echo('复制 {0} 到 {1}'.format(srcfile, dstfile))
@click.command(help='复制 pyape 配置文件到当前项目中')
@click.option('--all', '-A', default=False, is_flag=True, help='复制所有模版')
@click.option('--dst', '-D', help='指定复制目标文件夹')
@click.option('--force', '-F', default=False, is_flag=True, help='覆盖已存在的文件')
@click.option('--rename', '-R', default=False, is_flag=True, help='若目标文件存在则重命名')
@click.argument('name', nargs=-1)
def copy(all, name, dst, force, rename):
if dst is None:
dst = Path.cwd()
else:
dst = Path(dst)
if all:
for key, tplfile in files.items():
copytplfile(tpldir, dst, key, tplfile, force, rename)
else:
for key in name:
if not key in files.keys():
st = click.style('仅支持以下名称: {0}'.format(' '.join(files.keys())), fg='red')
click.echo(st, err=True)
continue
copytplfile(tpldir, dst, key, files[key], force, rename)
@click.command(help='初始化 pyape 项目')
@click.option('--force', '-F', default=False, is_flag=True, help='覆盖已存在的文件')
def init(force):
dst = Path.cwd()
for keyname, filename in files.items():
copytplfile(tpldir, dst, keyname, filename, force, False)
@click.command(help='展示 uwsgi 的运行情况。')
@click.option('--frequency', '-F', default=1, type=int, help='Refresh frequency in seconds')
@click.argument('address', nargs=1)
def top(address, frequency):
import pyape.uwsgitop
pyape.uwsgitop.call(address, frequency)
GEN_SUPE_HELP = '在当前文件夹下生成 supervisord.conf 配置文件'
@click.command(help=GEN_SUPE_HELP)
@click.option('-p', '--path', required=False, type=click.Path(), help='提供一个路径,配置中和路径相关的内容都放在这个路径下')
@click.option('--unix-http-server-file', required=False, type=str)
@click.option('--supervisord-logfile', required=False, type=str)
@click.option('--supervisord-pidfile', required=False, type=str)
@click.option('--supervisord-user', required=False, type=str)
@click.option('--supervisord-directory', required=False, type=str)
@click.option('--supervisorctl-serverurl', required=False, type=str)
@click.option('--include-files', required=False, type=str)
def gensupe(**kwargs):
try:
replaceobj = {}
path = kwargs.get('path')
if path is not None:
path = Path(path)
replaceobj['unix_http_server_file'] = str(path.joinpath('run', 'supervisord.sock').resolve())
replaceobj['supervisorctl_serverurl'] = 'unix://%s' % str(path.joinpath('run', 'supervisord.sock').resolve())
replaceobj['include_files'] = str(path.joinpath('conf.d').resolve()) + '/*.conf'
replaceobj['supervisord_logfile'] = str(path.joinpath('log', 'supervisord.log').resolve())
replaceobj['supervisord_pidfile'] = str(path.joinpath('run', 'supervisord.pid').resolve())
replaceobj['supervisord_directory'] = str(path.resolve())
for k, v in kwargs.items():
if v is not None:
replaceobj[k] = v
name = 'supervisord'
cwdpath = Path().cwd()
create_from_jinja(name, cwdpath, replaceobj)
except Exception as e:
click.echo(click.style('生成错误:%s' % e, fg='red'), err=True)
raise click.Abort()
GEN_SYS_HELP = '在当前文件夹下生成 systemd 需要的 supervisord.service 配置文件'
@click.command(help=GEN_SYS_HELP)
@click.option('--supervisord-exec', required=False, type=str)
@click.option('--supervisorctl-exec', required=False, type=str)
@click.option('--supervisord-conf', required=False, type=str)
def gensys(**kwargs):
try:
replaceobj = {}
for k, v in kwargs.items():
if v is not None:
replaceobj[k] = v
name = 'systemd'
cwdpath = Path().cwd()
create_from_jinja(name, cwdpath, replaceobj)
except Exception as e:
click.echo(click.style('生成错误:%s' % e, fg='red'), err=True)
raise click.Abort()
GEN_PROGRAM_CONF_HELP = '生成 supervisord 的 program 配置文件'
@click.command(help=GEN_PROGRAM_CONF_HELP)
@click.option('-n', '--name', required=True, type=str, help='Supervisor program 名称')
@click.option('-u', '--user', required=False, type=str, help='Supervisor program 的 user')
@click.option('-c', '--app-module', default='uwsgi:pyape_app', type=str, help='Supervisor 启动的 flask 进程之 app_module')
def genprog(name, app_module, user):
try:
cwdpath = Path().cwd()
replaceobj = {
'cwd': cwdpath.resolve(),
'name': name,
'app_module': app_module,
}
if user is not None:
replaceobj['user'] = user
create_from_jinja('program', cwdpath.joinpath(name + '.conf'), replaceobj)
except Exception as e:
click.echo(click.style('生成错误 %s' % e, fg='red'), err=True)
raise click.Abort()
main.add_command(copy)
main.add_command(init)
main.add_command(top)
main.add_command(gensupe)
main.add_command(gensys)
main.add_command(genprog)
if __name__ == '__main__':
main()
|
# coding=utf-8
# Copyright 2019 The Trax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base layer class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import inspect
import traceback
import jax
import numpy as onp
import six
from trax import backend
from trax.backend import nested_map
from trax.backend import numpy as np
from trax.shapes import ShapeDtype
from trax.shapes import signature
EMPTY_WEIGHTS = ()
EMPTY_STATE = ()
class Layer(object):
"""Base class for composable layers in a deep learning network.
Layers are the basic building blocks for deep learning models. A Trax layer
computes a function from zero or more inputs to zero or more outputs,
optionally using trainable weights (common) and non-parameter state (not
common). Authors of new layer subclasses typically override at most two
methods of the base `Layer` class:
forward(inputs, weights):
Computes this layer's output as part of a forward pass through the model.
new_weights(self, input_signature):
Returns new weights suitable for inputs with the given signature.
A small subset of layer types are combinators -- they organize the computation
of their sublayers, e.g., applying their sublayers in series or in parallel.
All layers have the following properties, with default values implemented
in the base `Layer` class:
- n_in: int (default 1)
- n_out: int (default 1)
- weights: tuple (default empty -- the layer has no weights)
- state: tuple (default empty -- the layer has no non-parameter state)
- sublayers: tuple (default empty -- the layer has no sublayers)
The inputs to a layer are tensors, packaged according to how many there are:
- n_in = 0: an empty tuple ()
- n_in = 1: one tensor (NOT wrapped in a tuple)
- n_in > 1: a tuple of tensors
(The special treatment of the single-input case is meant to simplify the
work of layer writers; this design choice may be revisited in the future.)
The outputs from a layer are also tensors, packaged the same as layer inputs:
- n_out = 0: an empty tuple ()
- n_out = 1: the tensor (NOT wrapped in a tuple)
- n_out > 1: a tuple of tensors
The Trax runtime maintains a data stack with which layer calls are composed.
For more complex data network architectures, possibly involving multiple data
flows, one can view each layer as a function from stack state to stack state,
where the function's inputs are a slice from the stack, and the function's
outputs are spliced back into the stack.
"""
def __init__(self, n_in=1, n_out=1):
"""Creates a partially initialized, unconnected layer instance.
Args:
n_in: Number of inputs expected by this layer.
n_out: Number of outputs promised by this layer.
"""
self._n_in = n_in
self._n_out = n_out
self._sublayers = () # Default is no sublayers.
self._input_signature = None
self._rng = None
self._weights = EMPTY_WEIGHTS # cached weights
self._state = EMPTY_STATE
# record root call site for custom error messages:
frame = _find_frame(inspect.currentframe())
# Turns out that frame can mutate in time, so we just copy what we need.
self._caller = {'filename': copy.copy(frame.f_code.co_filename),
'lineno': int(frame.f_lineno)}
del frame # Just in case.
self._init_finished = False
def __repr__(self):
class_str = self.__class__.__name__
fields_str = 'in={},out={}'.format(self.n_in, self.n_out)
objs = self.sublayers
if objs:
objs_str = ', '.join(str(x) for x in objs)
return '{}{{{},sublayers=[{}]}}'.format(class_str, fields_str, objs_str)
else:
return '{}{{{}}}'.format(class_str, fields_str)
def __call__(self, x, **kwargs):
"""Makes Layer instances callable; for use in tests or interactive settings.
This convenience method helps library users play with, test, or otherwise
probe the behavior of layers outside of a full training environment. It
presents the layer as callable function from inputs to outputs, with the
option of manually specifying weights and non-parameter state per individual
call. For convenience, weights and non-parameter state are cached per layer
instance, starting from default values of `EMPTY_WEIGHTS` and `EMPTY_STATE`,
and acquiring non-empty values either by initialization or from values
explicitly provided via the weights and state keyword arguments.
Args:
x: 0 or more input tensors, formatted the same as the inputs to
Layer.forward.
**kwargs: Additional keyword arguments if needed/desired for this layer.
Three possible keyword arguments are especially relevant:
- weights=... will override any cached weights values
- state=... will override any cached state values
- rng=... will supply a PRNG key for use by the layer
Returns:
0 or more output tensors, formatted the same as the outputs from
Layer.forward.
"""
weights = kwargs.pop('weights', self.weights)
state = kwargs.pop('state', self.state)
rng = kwargs.pop('rng', None)
outputs, _ = self._forward_internal(x, weights, state, rng)
return outputs
def forward(self, inputs, weights):
"""Computes this layer's output as part of a forward pass through the model.
Authors of new Layer subclasses should override this method to define the
forward computation that their layer performs, unless they need to use
local non-trainable state or randomness, in which case they should
override `forward_with_state` instead.
Args:
inputs: Input tensors, matching the number (n_in) expected by this
layer. Specifically:
- n_in = 0: an empty tuple ()
- n_in = 1: a tensor (NOT wrapped in a tuple)
- n_in > 1: a tuple of tensors, with n_in items
weights: A tuple of trainable weights, with one element for this layer
if this layer has no sublayers, or one for each sublayer if this
layer has sublayers. If a layer (or sublayer) has no trainable
weights, the corresponding weights element is an empty tuple.
Returns:
Tensors, matching the number (n_out) promised by this layer.
Specifically:
- n_out = 0: an empty tuple
- n_out = 1: one tensor (NOT wrapped in a tuple)
- n_out > 1: a tuple of tensors, with n_out items
"""
raise NotImplementedError
def forward_with_state(self, inputs, weights=EMPTY_WEIGHTS, state=EMPTY_STATE,
**kwargs):
"""Computes this layer's output as part of a forward pass through the model.
Authors of new Layer subclasses should override this method to define the
forward computation that their layer performs only if their layer uses
local state or randomness. Otherwise override `forward` instead.
Args:
inputs: Input tensors, matching the number (n_in) expected by this
layer. Specifically:
- n_in = 0: an empty tuple ()
- n_in = 1: a tensor (NOT wrapped in a tuple)
- n_in > 1: a tuple of tensors, with n_in items
weights: A tuple of trainable weights, with one element for this layer
if this layer has no sublayers, or one for each sublayer if this
layer has sublayers. If a layer (or sublayer) has no trainable
weights, the corresponding weights element is an empty tuple.
state: Layer-specific non-parameter state that can update between batches.
**kwargs: Often empty; main current use is to carry a PRNG key for random
number generation, using the keyword 'rng'.
Returns:
A tuple of (tensors, state). The tensors match the number (n_out) promised
by this layer, and are formatted according to that number, specifically:
- n_out = 0: an empty tuple
- n_out = 1: one tensor (NOT wrapped in a tuple)
- n_out > 1: a tuple of tensors, with n_out items
"""
del kwargs
return self.forward(inputs, weights), state
def new_weights(self, input_signature):
"""Returns new weights suitable for inputs with the given signature.
Authors of new Layer subclasses should override this method if their layer
uses trainable weights. The default implementation works for layers that
have no weights. Layers that have trainable state should override the
`new_weights_and_state` method instead.
Args:
input_signature: A ShapeDtype instance (if this layer takes one input)
or a list/tuple of ShapeDtype instances; signatures of inputs.
"""
del input_signature
return EMPTY_WEIGHTS
def new_weights_and_state(self, input_signature):
"""Returns a (weights, state) pair suitable for initializing this layer.
Authors of new Layer subclasses should override this method if their layer
uses trainable weights or has non-parameter state that gets updated
between batches. The default implementation works for layers that have
no weights or state.
Args:
input_signature: A ShapeDtype instance (if this layer takes one input)
or a list/tuple of ShapeDtype instances.
"""
return self.new_weights(input_signature), EMPTY_STATE
@property
def has_backward(self):
"""Returns True if this layer provides its own (custom) backward pass code.
A layer subclass that provides custom backward pass code (for custom
gradients) must override this method to return True.
"""
return False
def backward(self, inputs, output, grad, weights, state, new_state, **kwargs):
"""Custom backward pass to propagate gradients in a custom way.
Args:
inputs: Input tensors; can be a (possibly nested) tuple.
output: The result of running this layer on inputs.
grad: gradient signal (called cotangent in jax) computed based on
subsequent layers. The structure and shape must match output.
weights: layer weights
state: start state.
new_state: end state computed by running the layer
**kwargs: kwargs for the layer
Returns:
The custom gradient signal for the input. Note that we need to return
a gradient for each argument of forward, so it will usually be a tuple
of signals: the gradient for inputs and weights.
"""
raise NotImplementedError
# End of public subclassing interface.
# Begin public callable interface.
def init(self, input_signature, rng=None):
"""Initializes this layer and its sublayers recursively.
This method is designed to initialize each layer instance once, even if the
same layer instance occurs in multiple places in the network. This enables
weight sharing to be implemented as layer sharing.
Args:
input_signature: A `ShapeDtype` instance (if this layer takes one input)
or a list/tuple of `ShapeDtype` instances.
rng: A single-use random number generator (JAX PRNG key). If none is
provided, a default rng based on the integer seed 0 will be used.
Returns:
A (weights, state) tuple, in which weights contains newly created weights
on the first call and `EMPTY_WEIGHTS` on all subsequent calls.
"""
try:
if self._rng is None:
rng = backend.random.get_prng(0) if rng is None else rng
self._set_rng_recursive(rng)
# Initialize weights once; store them for use when this layer is called.
# Needs to call new_weights_and_state regardless of _init_finished because
# state also needs to be initialized. After jitting, graph pruning should
# be able to remove unnecessary computation.
# TODO(lukaszkaiser): Revisit this decision and see whether layers sharing
# weights should also share states.
weights, state = self.new_weights_and_state(input_signature)
if not self._init_finished:
self._init_finished = True
self._weights = weights
self._state = state
return (weights, state)
else:
return (EMPTY_WEIGHTS, state)
except Exception:
name, trace = self.__class__.__name__, _short_traceback(skip=3)
raise LayerError(name, 'init', self._caller,
input_signature, trace)
def new_rng(self):
"""Returns a new single-use random number generator (JAX PRNG key)."""
self._rng, rng = backend.random.split(self._rng)
return rng
def new_rngs(self, n):
"""Returns `n` single-use random number generators (JAX PRNG keys).
Args:
n: The number of rngs to return; must be an integer > 0.
Returns:
A tuple of `n` rngs. Successive calls will yield continually new values.
"""
if n < 1:
raise ValueError('n must be > 0; received value: {}'.format(n))
rngs = backend.random.split(self._rng, n + 1)
self._rng = rngs[0]
return tuple(rngs[1:])
# End of public callable methods.
# Methods and properties below are reserved for internal use.
@property
def n_in(self):
"""Returns how many tensors this layer expects as input."""
return self._n_in
@property
def n_out(self):
"""Returns how many tensors this layer promises as output."""
return self._n_out
@property
def sublayers(self):
"""Returns a tuple containing this layer's sublayers; may be empty."""
return self._sublayers
@property
def input_signature(self):
"""Returns this layer's input signature.
An input signature is a ShapeDtype instance (if the layer takes one input)
or a tuple of ShapeDtype instances.
"""
return self._input_signature
@property
def weights(self):
"""Returns this layer's weights.
Depending on the layer, the weights can be in the form of:
- an empty tuple
- a tensor (ndarray)
- a nested structure of tuples and tensors
TODO(jonni): Simplify this picture (and underlying implementation).
"""
return self._weights
@weights.setter
def weights(self, weights):
self._weights = weights
@property
def state(self):
"""Returns a tuple containing this layer's state; may be empty."""
return self._state
@state.setter
def state(self, state):
self._state = state
def _forward_internal(self, x, weights, state, rng):
"""Applies this layer as part of a forward pass; an internal system method.
This method is reserved for handling plumbing and other internal affairs
as needed by the overall library. Trax library users should use or override
the `forward` method instead.
Args:
x: See Layer.forward_with_state inputs.
weights: See Layer.forward_with_state.
state: See Layer.forward_with_state.
rng: See Layer.forward_with_state.
Returns:
See Layer.forward_with_state.
"""
try:
# If weights are nothing, we may be reusing this layer.
# Use the cached weights to calculate the value.
# Note: to make sure jit tracers can decide this branch in python we use
# `weights is EMPTY_WEIGHTS` instead of, e.g., `not weights` or
# `weights == EMPTY_WEIGHTS`.
if weights is EMPTY_WEIGHTS: # pylint: disable=literal-comparison
weights = self._weights
else:
# In this case, we're called for the first time: cache weights.
self._weights = weights
if not self.has_backward:
outputs, s = self.forward_with_state(
x, weights=weights, state=state, rng=rng)
else:
outputs, s = self._do_custom_gradients(x, weights, state, rng=rng)
self._state = s
return outputs, s
except Exception:
name, trace = self.__class__.__name__, _short_traceback()
raise LayerError(name, '_forward_internal',
self._caller, signature(x), trace)
def _forward_abstract(self, input_signature):
"""Computes shapes and dtypes this layer would produce in a forward pass.
Args:
input_signature: A ShapeDtype instance (if this layer takes one input)
or a list/tuple of ShapeDtype instances; signatures of inputs.
Returns:
A tuple of (output, state).
The output part of the tuple is a ShapeDtype instance representing the
shape and type of the output (if this layer has one output) or a tuple
of ShapeDtype instances (if this layer has more than one output).
"""
try:
# Beware: using an actual RNG (as opposed to this ShapeDtype stub) would
# cause a large number of dropout masks to be computed and permanently
# stored in global memory.
rng = ShapeDtype((2,), onp.uint32)
def call_on_input(x, weights, state, rng):
return self.forward_with_state(x, weights=weights, state=state, rng=rng)
weight_signature = nested_map(signature, self.weights)
s = backend.abstract_eval(call_on_input)(
input_signature, weight_signature, self.state, rng)
return s
except Exception:
name, trace = self.__class__.__name__, _short_traceback(skip=3)
raise LayerError(name, '_forward_abstract', self._caller, input_signature,
trace)
# pylint: disable=protected-access
def _set_rng_recursive(self, rng):
"""Sets the rng (JAX PRNG key) for this layer and sublayers, recursively."""
self._rng = rng
sublayers = self.sublayers
if sublayers:
rngs = backend.random.split(rng, len(sublayers))
for sublayer, rng in zip(sublayers, rngs):
sublayer._rng = rng
def _set_input_signature_recursive(self, input_signature):
"""Sets input_signatures for this layer and sublayers, recursively.
General combinators (those that can take multiple sublayers) must override
this method to calculate and set input signatures for the sublayers. (See
the `Serial` class in combinators.py for an example.)
Args:
input_signature: A `ShapeDtype` instance (if this layer takes one input)
or a list/tuple of `ShapeDtype` instances
"""
self._input_signature = input_signature
# Handle the special case of a single immediate sublayer (which may in turn
# have its own sublayers).
sublayers = self.sublayers
if sublayers and len(sublayers) == 1:
sublayers[0]._set_input_signature_recursive(input_signature)
if sublayers and len(sublayers) > 1:
raise ValueError('A layer class whose instances can have more than one '
'sublayer must override the input_signature property '
'setter.')
# pylint: enable=protected-access
def _do_custom_gradients(self, x, weights, state, **kwargs):
"""Calls this layer for a forward pass, but with custom gradients."""
assert backend.get_name() == 'jax', (
'Custom gradients are only supported in JAX for now.')
# See this link for how custom transformations are defined in JAX:
# https://jax.readthedocs.io/en/latest/jax.html#jax.custom_transforms
# Note that we capture the kwargs and don't calculate gradients wrt. them.
@jax.custom_transforms
def _do_forward(y, weights):
res = self.forward_with_state(
y, weights=weights, state=state, **kwargs)
return res
# This is the custom gradient (vector-jacobian product in JAX) function.
# For the exact specification of this custom transformation see this link:
# https://jax.readthedocs.io/en/latest/jax.html#jax.defjvp_all
def do_forward_vjp(y, weights):
"""Custom gradient (vjp) function."""
output, new_state = self.forward_with_state(
y, weights=weights, state=state, **kwargs)
def vjpfun(grad):
grad = grad[0] # Ignore dummy gradient wrt state.
res = self.backward(
y, output, grad, weights, state, new_state, **kwargs)
return res
return (output, state), vjpfun
jax.defvjp_all(_do_forward, do_forward_vjp)
output, state = _do_forward(x, weights)
state = jax.lax.stop_gradient(state)
return output, state
def layer(n_in=1, n_out=1, new_weights_fn=None):
"""Returns a decorator that converts a function into a Layer class builder."""
def _build_layer_class(raw_fn):
"""Returns a Layer class whose callable instances execute the function."""
def _init(self, **kwargs):
self._kwargs = kwargs # pylint: disable=protected-access
Layer.__init__(self, n_in=n_in, n_out=n_out)
def _forward(self, x, weights):
"""Uses this layer as part of a forward pass through the model."""
_validate_forward_input(x, n_in)
raw_output = raw_fn(x, weights=weights, **self._kwargs) # pylint: disable=protected-access
output = () if _is_empty(raw_output) else raw_output
return output
def _new_weights(self, input_signature):
if new_weights_fn is None:
return EMPTY_WEIGHTS
kwargs = self._kwargs # pylint: disable=protected-access
return new_weights_fn(input_signature, **kwargs)
def _is_empty(raw_output):
return raw_output is None or (isinstance(raw_output, (list, tuple))
and len(raw_output) == 0) # pylint: disable=g-explicit-length-test
# Set docstrings and create the class.
_forward.__doc__ = raw_fn.__doc__
_new_weights.__doc__ = new_weights_fn.__doc__
# Note: None.__doc__ is None
cls = type(raw_fn.__name__, (Layer,),
{'__init__': _init,
'forward': _forward,
'new_weights': _new_weights})
return cls
return _build_layer_class
def Fn(f, n_in=None, n_out=None): # pylint: disable=invalid-name
"""Returns a layer with no weights that applies the function f.
The function f can take and return any number of arguments, but it cannot
have default arguments or keywords arguments. It can use numpy though, e.g:
A layer that takes 2 arguments and returns sum and concatenation on stack:
Fn(lambda x, y: (x + y, np.concatenate([x, y], axis=0)))
Sometimes determining the number of outputs automatically fails,
in such cases specify n_in and n_out.
Args:
f: the function to execute
n_in: optional, number of inputs
n_out: optional, number of outputs
Returns:
A layer executing the function f.
"""
# Inspect the function f to restrict to no-defaults and no-kwargs functions.
if six.PY2:
argspec = inspect.getargspec(f)
varkwargs = argspec.keywords
else:
argspec = inspect.getfullargspec(f)
varkwargs = argspec.varkw
# This layer cannot handle functions with kwargs or defaults.
if argspec.defaults is not None:
raise ValueError('function cannot have default arguments')
if varkwargs:
raise ValueError('function cannot have keyword arguments')
# Determine n_in from function signature if not set.
if n_in is None:
if argspec.varargs is not None:
raise ValueError('n_in is not set and f has variable args')
n_in = len(argspec.args)
# Try to determine n_out from function signature.
if n_out is None:
try:
dummy_args = [np.array([[0.0]]) for _ in range(n_in)]
res = f(*dummy_args)
n_out = len(res) if isinstance(res, (list, tuple)) else 1
except:
raise ValueError('n_out is not set and could not be determined')
# Create the layer.
@layer(n_in=n_in, n_out=n_out)
def F(xs, **unused_kwargs): # pylint: disable=invalid-name
if not isinstance(xs, (tuple, list)):
xs = (xs,)
return f(*xs)
return F() # pylint: disable=no-value-for-parameter
class LayerError(Exception):
"""Exception raised in the layer stack.
Attributes:
message: the message corresponding to this exception.
"""
def __init__(self, layer_name, function_name, caller,
input_signature, traceback_string):
self._layer_name = layer_name
self._function_name = function_name
self._caller = caller # Python inspect object with init caller info.
self._traceback = traceback_string
self._input_signature = input_signature
super(LayerError, self).__init__(self.message)
@property
def message(self):
"""Create error message."""
prefix = 'Exception passing through layer '
prefix += '%s (in %s):\n' % (self._layer_name, self._function_name)
short_path = '[...]/' + '/'.join(
self._caller['filename'].split('/')[-3:])
caller = ' layer created in file %s, line %d\n' % (short_path,
self._caller['lineno'])
shapes_str = ' layer input shapes: %s\n\n' % str(self._input_signature)
return prefix + caller + shapes_str + self._traceback
def check_shape_agreement(layer_obj, input_signature):
"""Compares the layer's __call__ output to its _foward_abstract shape output.
This function helps test layer mechanics and inter-layer connections that
aren't dependent on specific data values.
Args:
layer_obj: A layer object.
input_signature: A `ShapeDtype` instance (if `layer_obj` takes one input)
or a list/tuple of ShapeDtype instances.
Returns:
A tuple representing either a single shape (if the layer has one output) or
a tuple of shape tuples (if the layer has more than one output).
"""
weights, state = layer_obj.init(input_signature)
output_signature, _ = layer_obj._forward_abstract(input_signature) # pylint: disable=protected-access
if isinstance(output_signature, tuple):
shape_output = tuple(x.shape for x in output_signature)
else:
shape_output = output_signature.shape
rng1, rng2 = layer_obj.new_rngs(2)
random_input = _random_values(input_signature, rng1)
call_output = layer_obj(random_input, weights=weights, state=state, rng=rng2)
call_output_shape = _shapes(call_output)
msg = '_foward_abstract shape output %s != __call__ output shape %s' % (
shape_output, call_output_shape)
assert shape_output == call_output_shape, msg
# TODO(jonni): Remove this assert? It makes test logs harder to read.
return shape_output
def _validate_forward_input(x, n_in):
if n_in != 1:
if not isinstance(x, tuple):
raise TypeError(
'expected input to be a tuple; instead received {}'.format(type(x)))
if len(x) != n_in:
raise ValueError(
'input tuple length ({}) does not equal required number of inputs'
' ({})'.format(len(x), n_in))
def _find_frame(frame):
"""Find the frame with the caller on the stack."""
# TODO(lukaszkaiser): rewrite this function in a systematic way.
# We want to find the first place where the layer was called
# that is *not* an __init__ function of an inheriting layer.
# We also need to exclude a few decorator functions.
while frame.f_code.co_name in ['__init__', 'gin_wrapper', '_validate',
'_validate_forward_inputs', '_init']:
# We only skip __init__ in internal layers, return otherwise.
dirname = frame.f_code.co_filename.split('/')[-2]
if dirname != 'layers' and frame.f_code.co_name == '__init__':
return frame
# If we are in an init, move up.
frame = frame.f_back
return frame
def _shorten_file_path(line):
"""Shorten file path in error lines for more readable tracebacks."""
start = line.lower().find('file')
if start < 0:
return line
first_quote = line.find('"', start)
if first_quote < 0:
return line
second_quote = line.find('"', first_quote + 1)
if second_quote < 0:
return line
path = line[first_quote + 1:second_quote]
new_path = '/'.join(path.split('/')[-3:])
return line[:first_quote] + '[...]/' + new_path + line[second_quote + 1:]
def _short_traceback(skip=3):
"""Cleaned-up form of traceback."""
counter, res = 0, []
# Skipping 3 lines by default: the top (useless) and self-call.
# In python 3, we need to set chain to False (it doesn't exist in python 2).
if six.PY2:
lines = traceback.format_exc().splitlines()[skip:]
else:
lines = traceback.format_exc(chain=False).splitlines()[skip:] # pylint: disable=unexpected-keyword-arg
for l in lines:
if l.startswith('trax.layers.base.LayerError'):
l = l[len('trax.layers.base.'):] # Remove the trax.layers.base prefix.
res.append(_shorten_file_path(l))
if counter % 2 == 1:
res.append('')
counter += 1
# If we see a LayerError, the traceback has already been processed.
if l.startswith('LayerError'):
# Skip 4 back except last as these are internal base-layer calls.
res = res[:-4] + [res[-1]]
res += lines[counter:]
break
return '\n'.join(res)
def _random_values(input_signature, rng):
"""Creates random floats or ints of the given shape.
Args:
input_signature: A `ShapeDtype` instance (if `layer_obj` takes one input)
or a list/tuple of ShapeDtype instances.
rng: A random number generator.
Returns:
Random values with the shape and type specified.
"""
if isinstance(input_signature, ShapeDtype):
shape, dtype = input_signature.shape, input_signature.dtype
if onp.issubdtype(dtype, onp.integer):
return backend.random.bernoulli(rng, 0.5, shape).astype(onp.int32)
else:
return backend.random.uniform(rng, shape, minval=-1.0, maxval=1.0)
elif isinstance(input_signature, (list, tuple)):
return tuple(_random_values(x, rng) for x in input_signature)
else:
raise TypeError(type(input_signature))
def _shapes(x):
"""Get a structure of shapes for a structure of nested arrays."""
def shape(x):
try:
return tuple([int(i) for i in x.shape])
except Exception: # pylint: disable=broad-except
return ()
return tuple(nested_map(shape, x))
|
# Bluerred Image Detection
#
# Author: Jasonsey
# Email: 2627866800@qq.com
#
# =============================================================================
"""read training data set from training path"""
import sys
import asyncio
from pathlib import Path
import pickle
import numpy as np
from PIL import Image
from easydict import EasyDict
from sklearn.model_selection import train_test_split
from sklearn.utils import shuffle
from keras.preprocessing.image import ImageDataGenerator
from utils.tools import get_imginfo, get_imginfo2, get_imgarray, focuse_image, resize2, init_path
import config
from api.decision_tree import detection as tree_detection
from api.total_image import detection as cnn_detection
def load_dataset(paths: list, random=True):
"""the decision tree reading image from disk api
Arguments:
paths: a list of string of pathlib.Path
random: whether to shuffle the data or not
Returns:
data: 4D np.ndarray of images
labels: 2D np.ndarray of image's labels
"""
data, labels = [], []
for i in range(len(paths)):
path = Path(paths[i])
results = []
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
for pa in path.glob('**/*.jpg'):
print(pa)
results.append(asyncio.ensure_future(get_imginfo2(pa)))
labels.append(i)
loop.run_until_complete(asyncio.wait(results))
for result in results:
data.append(list(result.result()))
data = np.array(data, dtype='float32')
labels = np.array(labels, dtype='float32')
print('Blur: %s, Clear: %s' % ((labels==1).sum(), (labels==0).sum()))
if random:
data, labels = shuffle(data, labels, random_state=2)
return data, labels
def load_dataset2(paths: list, random=True):
"""the CNN model's reading image from disk api
Arguments:
paths: a list of string of pathlib.Path
random: whether to shuffle the data or not
Returns:
data: 4D np.ndarray of images
labels: 2D np.ndarray of image's labels
"""
data, labels = [], []
for i in range(len(paths)):
path = Path(paths[i])
results = []
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
for pa in path.glob('*.jpg'):
print(pa)
results.append(asyncio.ensure_future(get_imgarray(pa)))
labels.append(i)
loop.run_until_complete(asyncio.wait(results))
for result in results:
data.append(result.result())
data = np.asarray(data, dtype='float32')
data /= 255
labels = np.asarray(labels, dtype='float32')
print('Blur: %s, Clear: %s' % ((labels==1).sum(), (labels==0).sum()))
if random:
data, labels = shuffle(data, labels, random_state=2)
return data, labels
def load_dataset3(paths: list, random=True):
"""the stacking model's reading image from disk api
Arguments:
paths: a list of string of pathlib.Path
random: whether to shuffle the data or not
Returns:
data: 4D np.ndarray of images
labels: 2D np.ndarray of image's labels
"""
data_info, data_array, labels = [], [], []
for i in range(len(paths)):
path = Path(paths[i])
results = []
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
for pa in path.glob('*.jpg'):
print(pa)
results.append(asyncio.ensure_future(imginfo_and_array(pa)))
labels.append(i)
loop.run_until_complete(asyncio.wait(results))
for result in results:
imginfo, array = result.result()
data_info.append(imginfo)
data_array.append(array)
labels = np.asarray(labels, dtype='float32')
data_info = np.asarray(data_info, dtype='float32')
data_info = tree_detection.predict(data_info)
data_info = data_info[:, 1].reshape((-1, 1))
data_array = np.asarray(data_array, dtype='float32')
data_array = cnn_detection.predict(data_array)
data_array = data_array
data = np.concatenate((data_info, data_array), axis=1)
if random:
data, labels = shuffle(data, labels, random_state=2)
return data, labels
async def imginfo_and_array(path):
"""return information and array of an image"""
img = Image.open(path).convert('RGB')
imginfo = list(get_imginfo(path))
img = focuse_image(img)
img = resize2(img)
array = np.asarray(img, dtype='float32')
return imginfo, array
def split_dataset(*array):
"""split the data set into train set and test set"""
return train_test_split(*array, test_size=0.2, random_state=2)
def datagen(x_train, y_train, batch_size=128):
"""data augment of CNN model
Arguments:
x_train: 4D np.ndarray of images
y_train: 2D np.ndarray of labels
batch_size: batch size
"""
epoch_size = len(y_train)
if epoch_size % batch_size < batch_size / config.GPUS: # 使用多GPU时,可能出现其中1个GPU 0 batchsize问题
x_train = x_train[:-(epoch_size % batch_size)]
y_train = y_train[:-(epoch_size % batch_size)]
epoch_size = len(y_train)
if epoch_size % batch_size:
train_steps = int(epoch_size / batch_size) + 1
else:
train_steps = int(epoch_size / batch_size)
train_datagen = ImageDataGenerator(
rescale=None,
rotation_range=90,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True)
train_generator = train_datagen.flow(
x_train,
y_train,
batch_size=batch_size)
return train_generator, train_steps, epoch_size
def read_dataset(paths: list, use_cache=True, cache_home='../data/output/cache'):
"""reading data set api for decision tree model
Arguments:
paths: a list of string or pathlib.Path
use_cache: if True and the cache existing, this api will read the cache instead
of read the data from disk
cache_home: where the cache will be saved
Returns:
dataset: a dict of data set
"""
cache_home = Path(cache_home)
init_path([cache_home])
cache_path = Path(cache_home) / 'dataset_decision_tree.pkl'
if use_cache and cache_path.exists():
with cache_path.open('rb') as f:
dataset = pickle.load(f)
else:
assert len(paths) == config.NUM_CLASS, 'length of paths should be %s, but get %s' % (NUM_CLASS, len(paths))
data, labels = load_dataset(paths)
x_train, x_test, y_train, y_test = split_dataset(data, labels)
dataset = EasyDict({
'train': {
'data': x_train,
'labels': y_train
},
'test': {
'data': x_test,
'labels': y_test
}
})
with cache_path.open('wb') as f:
pickle.dump(dataset, f)
print('All Dataset Read!')
return dataset
def read_dataset2(paths: list, batch_size=128, use_cache=True, cache_home='../data/output/cache'):
"""reading data set api for CNN model
Arguments:
paths: a list of string or pathlib.Path
batch_size: batch size
use_cache: if True and the cache existing, this api will read the cache instead
of read the data from disk
cache_home: where the cache will be saved
Returns:
dataset: a dict of data set
"""
cache_home = Path(cache_home)
init_path([cache_home])
cache_path = Path(cache_home) / 'dataset_total_image.pkl'
if use_cache and cache_path.exists():
with cache_path.open('rb') as f:
cache_dataset = pickle.load(f)
x_train = cache_dataset.train.data
y_train = cache_dataset.train.labels
x_test = cache_dataset.test.data
y_test = cache_dataset.test.labels
else:
assert len(paths) == config.NUM_CLASS, 'length of paths should be %s, but get %s' % (NUM_CLASS, len(paths))
data, labels = load_dataset2(paths)
x_train, x_test, y_train, y_test = split_dataset(data, labels)
cache_dataset = EasyDict({
'train': {
'data': x_train,
'labels': y_train
},
'test': {
'data': x_test,
'labels': y_test
}
})
with cache_path.open('wb') as f:
pickle.dump(cache_dataset, f)
train_generator, train_steps, epoch_size = datagen(x_train, y_train, batch_size)
dataset = EasyDict({
'train': train_generator,
'test': {
'data': x_test,
'labels': y_test
},
'train_steps': train_steps,
'epoch_size': epoch_size,
'input_shape': x_train[0].shape,
'batch_size': batch_size
})
print('All Dataset Read!')
return dataset
def read_dataset3(paths: list, use_cache=True, cache_home='../data/output/cache'):
"""reading data set api for stacking model
Arguments:
paths: a list of string or pathlib.Path
use_cache: if True and the cache existing, this api will read the cache instead
of read the data from disk
cache_home: where the cache will be saved
Returns:
dataset: a dict of data set
"""
cache_home = Path(cache_home)
init_path([cache_home])
cache_path = Path(cache_home) / 'dataset_stacking.pkl'
if use_cache and cache_path.exists():
with cache_path.open('rb') as f:
dataset = pickle.load(f)
else:
assert len(paths) == config.NUM_CLASS, 'length of paths should be %s, but get %s' % (NUM_CLASS, len(paths))
data, labels = load_dataset3(paths)
x_train, x_test, y_train, y_test = split_dataset(data, labels)
dataset = EasyDict({
'train': {
'data': x_train,
'labels': y_train
},
'test': {
'data': x_test,
'labels': y_test
}
})
with cache_path.open('wb') as f:
pickle.dump(dataset, f)
print('All Dataset Read!')
return dataset
if __name__ == '__main__':
pass
|
# Essential imports
import os, sys, re
import pathlib
from .utils import *
# AWS Related imports
import boto
import boto.s3.connection
from boto.s3.key import Key
from boto.exception import NoAuthHandlerFound
bucket = None
bucket_name_final = None
# Command line related imports
import click
import emoji
from pyfiglet import Figlet
f = Figlet(font='slant')
# for the purposes of securely storing tinypng API if necessary
import keyring
LOTTIE_KEYRING_SERVICE_ID = 'lottie_animation_manager'
configured_tiny_png_key = None
compression_enabled = True
'''
The program actually initializes here.
If the user does not already have AWS CLI configured with the appropriate details (AWS Access keys)
they will not be able to continue in the program, not much they can do anyways!
'''
try:
conn = boto.connect_s3()
except NoAuthHandlerFound:
click.echo(click.style(emoji.emojize(' Lottie Animation Manager - AWS Config Error '), bg='red', fg="bright_white"))
click.echo('\n')
click.echo(click.style('In order to continue, please reconfigure and test your local AWS profile/configuration. You\'ll need to download the AWS CLI and configure it first before you can proceed.', fg="green"))
sys.exit()
except Exception:
click.echo(click.style(emoji.emojize(' Lottie Animation Manager - AWS Config Error '), bg='red', fg="bright_white"))
click.echo('\n')
click.echo(click.style('In order to continue, please reconfigure and test your local AWS profile/configuration. You\'ll need to download the AWS CLI and configure it first before you can proceed.', fg="green"))
sys.exit()
def clear_keyring():
'''
This function is used for testing purposes. Basically clears existing keyring values so you can reconfigure
the command line utility from scratch and see if configs are working.
'''
global bucket
global configured_tiny_png_key
global compression_enabled
keyring.delete_password(LOTTIE_KEYRING_SERVICE_ID, 'lottie_animation_manager_config_complete')
keyring.delete_password(LOTTIE_KEYRING_SERVICE_ID, 's3_bucket_name')
keyring.delete_password(LOTTIE_KEYRING_SERVICE_ID, 'tiny_png_credentials')
click.echo("cleared keyring")
def compress_image(file_name):
'''
Compress a local PNG or JPEG image using the TinyPNG API
'''
global bucket
global configured_tiny_png_key
global compression_enabled
import tinify
tinify.key = configured_tiny_png_key
click.echo(click.style('Compressing image files:',
fg="green"),
nl=True)
try:
original_file_size = sizeof_fmt(os.path.getsize(file_name))
source = tinify.from_file(file_name)
source.to_file(file_name)
compressed_file_size = sizeof_fmt(os.path.getsize(file_name))
click.echo(click.style('{} - Compression successful'.format(file_name),
fg="bright_cyan"),
nl=False)
click.echo(click.style(' {} ➟ {} '.format(original_file_size, compressed_file_size),
fg="blue"))
except Exception as e:
click.echo(click.style('{} - Error compressing this file'.format(file_name),
fg="red"))
def uploadDirectory(directory_name):
'''
This is the bread and butter of the application. Does a few things:
1. Let's the user know which files it has discovered
2. Confirm the want to proceed
3. Optionally compress the images
4. Upload the images to s3 under specified animation name, inside the request s3 bucket
'''
global bucket
global configured_tiny_png_key
global compression_enabled
# Screen Title
click.echo(click.style(emoji.emojize(' Upload Current Working Directory :up_arrow: '), bg='green', fg="bright_white"))
# Screen Instructions
click.echo(click.style("Make sure you are 'inside' of the animation directory with all of your lottie assets.\n", fg="green"))
click.echo(click.style(emoji.emojize('Lottie Animation Manager has discovered the following files in the current directory: '),
fg='green'),
nl=True)
# list out current files which will be sent to the user, ignoring .ds_store files generated by MacOS
file_count = 0
for root, dirs, files in os.walk("."):
for f in files:
current_file = os.path.relpath(os.path.join(root, f), ".")
if not ".DS_Store" in current_file:
if os.sep in current_file:
click.echo(click.style(" {}".format(current_file), fg="blue"))
file_count += 1
else:
click.echo(click.style(current_file, fg="blue"))
file_count += 1
click.echo('\n')
click.echo(click.style('The above',
fg='white'),
nl=False)
click.echo(click.style(' {} files '.format(file_count),
bold=True,
fg='white'),
nl=False)
# confirm they want to proceed with the upload process
if click.confirm(click.style('will be uploaded to AWS S3 Storage. Please confirm'.format(file_count),
fg='white'),
abort=True):
# Clear screen
if compression_enabled == True:
click.clear()
# Title
click.echo(click.style(emoji.emojize(' Compressing & Uploading Animation Assets '), bg='green', fg="bright_white"))
click.echo('\n')
# compress any images first
for root, dirs, files in os.walk("."):
for f in files:
current_file = os.path.relpath(os.path.join(root, f), ".")
temp_current_file = current_file.lower()
if temp_current_file.endswith(".png") or temp_current_file.endswith(".jpg") \
or temp_current_file.endswith(".jpeg"):
compress_image(current_file)
click.echo('\n')
click.echo(click.style('Uploading animation asset files to AWS S3 Storage Bucket:',
fg="green"),
nl=True)
# the user has confirmed, images are compressed, now send it up to s3
for root, dirs, files in os.walk("."):
for f in files:
current_file = os.path.relpath(os.path.join(root, f), ".")
if not ".DS_Store" in current_file:
try:
k = bucket.new_key("{}/{}".format(directory_name, current_file))
k.set_contents_from_filename(current_file)
k.set_acl('public-read')
click.echo(click.style('{} - Upload successful'.format(current_file),
fg="bright_cyan"))
except:
click.echo('Error uploading file to s3!')
# Send them to the view animations listing view, now that we're done here
if(click.confirm('\nAll done! Do you want to continue to the animations listings section?')):
list_hosted_animations()
else:
click.echo(click.style(emoji.emojize("Thanks for using Lottie Animation Manager, have a nice day :sun: \n"), fg='bright_green'))
else:
# terminate the program
click.echo(click.style(emoji.emojize("Thanks for using Lottie Animation Manager, have a nice day :sun: \n"), fg='bright_green'))
@click.command()
def upload_current_lottie_directory():
'''
Main purpose of this function is to get the user to enter a valid name for the animation following s3 rules.
'''
global bucket
global configured_tiny_png_key
global compression_enabled
# Clear screen
click.clear()
# Screen Title
click.echo(click.style(emoji.emojize(' Create a New Animation :rocket: '), bg='green', fg="bright_white"))
# Screen Instructions
click.echo(click.style("Create your new animation by giving it a name.\n", fg="green"))
click.echo(click.style("The animation name is the name you will use in your vue project to reference/call the animation. It's also the name given to the 'folder' inside your S3 bucket where your assets will be stored. Please name it carefully and follow typical file naming conventions.\n", fg="green"))
# Ask for a name for their new animation
valid = False
while (valid == False):
animation_name = click.prompt('Please enter a CaSE SENSitive name for your new animation (eg. animation-1)')
if (animation_name is None or len(animation_name) < 3 or len(animation_name) > 59):
click.echo(click.style("Please enter a valid animation name. Longer than 3 characters, shorter than 60 characters.", fg="red"))
elif (" " in animation_name):
click.echo(click.style("Please avoid using spaces in your animation name (best practice)", fg="red"))
elif not re.match(r'^[a-zA-Z0-9][ A-Za-z0-9_-]*$', animation_name):
click.echo(click.style("Please avoid using special characters. Only alpha/numeric, dashes, and underscores are allowed.", fg="red"))
else:
click.echo(click.style("Now checking if '{}' already exists in your S3 Bucket...".format(animation_name), fg="blue"))
# creating a working list of folders in the bucket
existing_folders = []
folders = bucket.list("","/")
for folder in folders:
folder_name = folder.name
# strip the folder name to just the root level, with no trailing slashes
folder_name = pathlib.Path(folder_name).parts[0]
folder_name = folder_name.lower()
existing_folders.append(folder_name)
# check if animation name actually exists in the bucket already
if(animation_name.lower() not in existing_folders):
valid = True
else:
click.echo(click.style("Sorry, please enter another animation name. '{}' already exists in your bucket.".format(animation_name), fg="red"))
# Clear screen
click.clear()
# Go to the upload current directory wizard, pass on the name they have requested
uploadDirectory(animation_name)
# Main Menu Screen
@click.command()
def list_hosted_animations():
''' List hosted animations / folders currently in the s3 bucket '''
global bucket
global configured_tiny_png_key
global compression_enabled
global bucket_name_final
# Clear screen
click.clear()
# Window Title
click.echo(click.style(emoji.emojize(' List Animations '), bg='green', fg="bright_white"))
click.echo('\n')
click.echo(click.style('Found the following folders in your s3 bucket:', fg="green"))
# creating a working list of folders in the bucket
existing_folders = []
folders = bucket.list("","/")
for folder in folders:
folder_name = folder.name
# strip the folder name to just the root level, with no trailing slashes
folder_name = pathlib.Path(folder_name).parts[0]
existing_folders.append(folder_name)
if len(existing_folders) > 0:
count = 1
for folder in existing_folders:
click.echo(click.style("{}) ".format(count), fg="bright_white"), nl=False)
click.echo(click.style("{}".format(folder), bg='bright_white', fg="black"), nl=True)
click.echo(click.style("https://s3.console.aws.amazon.com/s3/buckets/{}/{}/".format(bucket_name_final, folder), fg="bright_cyan"), nl=True)
count += 1
else:
click.echo(click.style('Sorry - no existing animation folders were found!'))
@click.command()
def initialize_configuration():
"""
This is for "first time" usage of the Lottie Animation Manager Command line Utility.
If the user is already configured, they will just be redirected to the main menu.
Otherwise, they will be asked for two things:
1) Which S3 Bucket they want to upload lottie animation assets to
2) What their TinyPNG API key is (if they actually want to use it)
Lottie Animation Manager uses Python's keyring library in order to store this information sort of securely.
"""
global bucket
global configured_tiny_png_key
global compression_enabled
global bucket_name_final
# Detect if the configuration was successfully completed in the past
if(keyring.get_password(LOTTIE_KEYRING_SERVICE_ID, 'lottie_animation_manager_config_complete') == 'true'):
# grab the stored configuration values
# s3 bucket
bucket_name = keyring.get_password(LOTTIE_KEYRING_SERVICE_ID, 's3_bucket_name')
bucket = conn.get_bucket(bucket_name)
bucket_name_final = bucket_name
# configured tinypng API info (if any)
configured_tiny_png_key = keyring.get_password(LOTTIE_KEYRING_SERVICE_ID, 'tiny_png_credentials')
if configured_tiny_png_key == False:
compression_enabled = False
main_menu()
else:
# Clear screen
click.clear()
# Window Title
click.echo(click.style(emoji.emojize(' Lottie Animation Manager Setup Wizard '), bg='green', fg="bright_white"))
click.echo('\n')
# Initial instructions
click.echo(click.style('Thanks for downloading Lottie Animation Manager!', fg="green"))
click.echo(click.style('LAM depends on a few services:', fg="green"))
click.echo(click.style('- Amazon Web Services S3 + Cloudfront', fg="bright_cyan"))
click.echo(click.style('Follow the instructions in our docs on setting up these services.', fg="bright_cyan"))
click.echo(click.style('LAM utilizes your local default AWS configuration/profile keys to upload media. You\'ll need to download the AWS CLI locally and configure it correctly.', fg="bright_cyan"))
click.echo(click.style('- TinyPNG (optional)', fg="bright_cyan"))
click.echo(click.style('Easily compress images in your lottie assets folder before uploading to them your CDN. API key needed for this.', fg="bright_cyan"))
# Please enter your bucket name
bucket_set = False
while(bucket_set == False):
bucket_name = click.prompt('Please enter the name of your AWS bucket where assets will be stored')
try:
bucket = conn.get_bucket(bucket_name)
keyring.set_password(LOTTIE_KEYRING_SERVICE_ID, 's3_bucket_name', bucket_name)
bucket_name_final = bucket_name
bucket_set = True
except:
click.echo(click.style("Could not connect to '{}' bucket, please try again".format(bucket_name), fg="red"))
# Please enter your tinyPNG stuff
tiny_png_key = click.prompt('Please enter your TinyPNG Key or enter "skip" to disable image compression')
if(tiny_png_key.lower() != 'skip'):
keyring.set_password(LOTTIE_KEYRING_SERVICE_ID, 'tiny_png_credentials', tiny_png_key)
configured_tiny_png_key = tiny_png_key
compression_enabled = True
else:
compression_enabled = False
keyring.set_password(LOTTIE_KEYRING_SERVICE_ID, 'tiny_png_credentials', "false")
# Mark configuration as complete now
keyring.set_password(LOTTIE_KEYRING_SERVICE_ID, 'lottie_animation_manager_config_complete', "true")
main_menu()
@click.command()
def main_menu():
'''
Main Screen of the Lottie Animation Manager Command line utility. Choose from a menu of options!
'''
# Clear screen
click.clear()
# Quick "Up top logo"
click.echo(click.style("--------------------------------------------------------\n", fg='bright_green'))
click.echo(click.style(f.renderText("Lottie CDN"), fg='bright_green'))
click.echo(click.style("--------------------------------------------------------", fg='bright_green'))
# Welcome and project description
click.echo(click.style(emoji.emojize('Thanks for using Lottie Animation Manager :thumbs_up:'), fg='bright_green'))
click.echo(click.style("Lottie Animation Manager makes it easy to manage, compress, and upload Lottie assets to a CDN. \n", fg='bright_green'))
# Main menu with 3 options
click.echo(click.style("Choose an option below to get started: ", bold=True))
click.echo(click.style("1) Create a New Animation / Upload Current Directory", fg='bright_cyan'))
click.echo(click.style("2) List Hosted Animations", fg='bright_cyan'))
click.echo(click.style("3) Exit Lottie Animation Manager", fg='bright_cyan'))
# Ask the user what menu option they want
menu_choice = click.prompt('Please enter a value between 1-3', type=int)
# Option 2: Create a new animation ie. upload the current working directory
if(menu_choice == 1):
upload_current_lottie_directory()
elif(menu_choice == 2):
list_hosted_animations()
elif(menu_choice == 3):
click.echo(click.style(emoji.emojize("Thanks for using Lottie Animation Manager, have a nice day :sun: \n"), fg='bright_green'))
click.Abort()
else:
click.echo(click.style("Please enter a valid menu choice number.", fg="red"))
if __name__ == '__main__':
initialize_configuration()
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.15.2)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x09\xab\
\x00\
\x00\x54\x9e\x78\x9c\xed\x9c\x6f\x8c\x9c\x45\x1d\xc7\x67\xf7\xb9\
\xb2\x5c\x09\x2e\xa4\x15\x1b\xac\x6c\x03\x27\x55\x1a\x53\x89\x5a\
\x1b\xaa\xb9\x5e\x31\x91\x14\x39\x23\x31\x39\x73\xd0\x36\x47\x7c\
\x45\x84\x68\x42\x8b\x12\x4c\x97\x17\x87\xad\x2f\x44\x12\x0c\x36\
\xb4\x31\x91\xc6\x56\x5e\xe0\x0b\x6d\x78\x21\x31\x6b\x48\xaa\x62\
\x7a\x6d\x68\x21\x01\x44\x96\xda\xa4\x69\xb4\xd7\xb5\xb1\xed\x42\
\xaf\x3b\x7e\xbf\xcf\x33\xcf\xde\xdc\xdc\x3c\x7f\xe6\xd9\xe7\xf6\
\xf6\xda\xfe\x9a\xcf\xdd\xaf\x77\xbb\xfb\x3c\xf3\xd9\x79\xe6\x99\
\x99\x9d\x39\x21\x0a\xf8\x77\xcf\x3d\x02\x5f\x57\x88\x1d\xdf\x16\
\xe2\x26\x21\xc4\x67\x00\x7e\x24\x7e\x2e\x82\x9f\xfb\xf1\x2d\x21\
\xc6\xca\x01\x57\x63\xee\x63\x62\xb3\x1b\x57\x42\xc4\x94\xbf\x68\
\xa1\x70\xa5\xb8\xb2\x94\xd1\x03\x03\x60\x04\xec\x04\xfb\xc0\x4b\
\x06\x7b\xc1\x53\x60\x18\x2c\xb7\xf9\xba\x1c\xc2\x28\xd3\x62\xb0\
\x11\xbc\x08\x8e\x83\x29\x20\x13\xf8\x10\xbc\x03\x7e\x01\xd6\x81\
\xbe\xcb\xc1\x91\xa5\xce\xb0\x6c\xbf\x03\xe7\x52\x38\x89\x62\x12\
\xbc\x00\x56\x39\xd5\xa5\x43\x32\x99\x2e\xc6\xd4\xf8\xad\xe2\xef\
\x0f\x0a\x71\x78\x4b\x81\xe7\x7e\x1d\x78\x1c\x9c\xea\xc0\x8b\xc9\
\x3f\xc0\xe6\xd4\x75\x29\x70\xb0\x18\xac\x03\x1b\xc0\x90\xc6\x27\
\xba\xe9\x47\xee\x18\xf0\x39\xf1\x83\x1b\xc4\xdf\x46\xc5\x52\x9c\
\xf7\xee\x94\xd7\x91\x2b\xe7\x54\xfb\xd4\x9f\xe8\x28\xf0\x73\x3b\
\x78\x1f\x5c\x00\xe7\x15\xe7\xc0\xc8\x7c\xf8\x91\x3f\xfb\xec\x92\
\xc9\x27\x96\xed\x3b\xfa\x50\x5f\x5c\x19\xe9\xed\x9f\xe0\x00\x78\
\x0e\x8c\x83\xa7\xc1\x2e\xf0\x2a\x38\x01\x5a\x31\xcf\xbf\x08\x7e\
\x9a\xe8\x28\xf0\xb3\x12\x9c\x04\x52\xa3\x05\x46\x23\xfd\xe4\x7c\
\x0d\xb6\xdd\xec\x18\xe8\x07\xcf\x03\x79\xe6\x89\x65\xf2\x8d\x87\
\x3c\xb3\x5c\x4d\xf0\x0a\x78\x10\x54\xc0\x35\x96\xf6\x8a\x65\x5e\
\x09\x1e\x06\x07\xc1\xa5\xcc\x8e\xb2\xf8\xc9\xb9\x9d\xd2\xdc\x90\
\xef\x81\x8f\xe8\xc7\xe2\xe8\x6d\xd5\x76\x5c\x67\x71\x12\xc5\x12\
\xf0\x18\x38\x99\xc9\x91\xab\x9f\x69\x1f\x05\xf0\x4d\xf0\xa5\x4e\
\x1d\x69\x6e\x56\x83\xe3\xa1\x1b\xdd\x11\xae\xb5\xda\xa1\x4d\xe2\
\x73\x66\xf9\xa3\xc2\xe2\x69\x03\x78\xd3\xd9\x91\x8b\x9f\x99\xf5\
\x65\x13\x98\x04\xc7\xc0\x9a\xac\x8e\x34\x37\x7d\x60\x8f\xe9\x46\
\xf1\xda\x7f\x9f\xbc\xf9\x36\xb6\xdb\x49\x5e\x12\x3c\xad\x01\x47\
\x9d\x1c\xa5\xf5\x33\xdb\xcd\x69\xed\xb1\x47\xb3\x3a\xd2\xfc\xac\
\x01\xff\xb1\xb8\x79\x1f\x7c\xe1\x7c\xf5\x53\x42\x3e\x73\x47\xa6\
\x7e\x5d\x27\x8e\x52\xf9\x89\x77\x13\x72\x6c\xc6\xb5\xe6\xee\x67\
\xa7\xc5\xcd\x25\xf0\x88\xde\x3e\x65\x8d\xac\x8e\xc4\x61\x27\x3f\
\x51\x6e\xa4\xba\xd6\x86\x33\xfa\x59\x02\x0e\x59\xfc\xbc\x0e\x96\
\x76\xea\xa6\x13\x47\xc3\x2f\xfe\x2a\xad\x9f\x38\x37\xa7\xd5\xef\
\x0b\x2e\x7e\xb4\x7a\x71\x17\x38\x6b\xf1\xb3\x2d\x2f\x37\x59\x1d\
\xc9\xb5\xa2\x24\x8e\xf8\xfd\xc3\x53\x96\x72\x3f\xe0\xe0\xa6\x93\
\xb6\xe7\xbb\x16\x37\x67\x54\x9b\x94\xab\x1f\x47\x47\x97\x8e\x6c\
\x12\xd5\x9b\xfe\x7c\x62\x29\xca\xf5\x30\xd8\xa6\xb1\x55\xd5\xab\
\x61\x75\xed\xe4\xea\xc6\xf0\xf3\x13\x8b\x9f\xa3\xe0\xe3\x79\xbb\
\x71\x74\x74\x06\x8c\x09\x14\xb5\xff\xaf\xff\x8b\xea\xf7\xad\x05\
\x6f\xe5\xed\x86\xa1\xf9\xd9\x6d\xf1\xf3\x47\x70\xed\x5c\xf9\x61\
\x24\x38\xe2\x78\x7f\x2c\xb2\xaf\x35\xdb\xd1\x9b\x79\xba\x61\x28\
\x37\x45\xb0\xdf\xe2\xe7\x65\xd5\x27\xea\x5c\x44\x4c\x58\x1c\x1d\
\x03\x8d\x58\x37\x61\xd8\xeb\xd1\x64\x1e\x6e\x18\xca\x4f\x01\xfc\
\xda\xe2\xe7\x0f\x60\xd1\x5c\xfb\x61\x58\x1c\xdd\x3f\xa1\xcd\x39\
\xc6\xc6\x4c\x47\x5f\x06\xf7\x45\x5c\x87\xce\xe3\x32\xed\xfa\x7a\
\xd6\xe2\xe7\x2f\xe0\xfa\x6e\xf8\x61\x18\x8e\xdc\xfa\xe9\x33\xcb\
\xbb\x52\xdd\xd7\x46\x13\xe0\xdc\x48\xec\xfc\x91\xe6\xe7\x31\x8b\
\x9f\x7f\x81\x81\xb9\xb8\x7f\x45\x45\x26\x37\x61\x4c\xfb\xd9\xaa\
\xf5\x8f\xe2\xe0\xfc\xd1\x50\x4a\x3f\xdf\x00\x1f\x1a\x7e\x2e\x82\
\x91\x6e\xfa\xe9\x38\x02\x3f\xdb\x22\xee\xf5\x26\xe7\x1d\xfc\xdc\
\x0a\xea\x96\x3a\xb4\x2f\x6c\x83\x16\x84\xa3\x9c\xfd\x30\xe4\xf4\
\xd8\xfd\x25\x8b\x9f\x49\x30\xa8\x79\xec\x52\x41\x33\x86\x9b\x1f\
\xce\xd5\x6e\x48\xe9\x87\x7c\x47\x5d\x53\xa6\xa3\x03\xa0\xdc\xeb\
\x7e\xd8\x5e\xa9\x3e\x24\xfb\xd9\x1c\x8b\x9c\x4c\x80\xf3\xd9\xeb\
\x1c\xfc\x70\x8c\x7a\xd0\xe2\x87\x63\xf8\xf1\xb0\x2f\xd4\x8b\x8e\
\xda\xed\x39\xaa\x85\x3f\x16\x09\xc6\x6b\x2b\x13\xe0\x63\x16\xa7\
\xe9\x1f\x69\x8e\x1e\xb0\xb4\xd3\xe4\x3c\xd8\x0a\xbc\x5e\x73\x64\
\xdc\xf3\xc6\x38\x5e\xe3\x98\xd6\x1f\xf7\x1f\xee\xac\xff\x13\x86\
\xe6\x87\x73\xf3\xbf\xb1\xf8\xd1\x1d\xf5\x4c\x3d\x32\xdd\xa8\xf1\
\xda\x25\x73\x8e\xcd\xb9\xaf\x60\x09\xcd\xd1\xed\xe0\x48\xaf\x3b\
\xd2\xca\x5e\x50\x6e\x26\xa3\xe6\xd8\xf2\x70\xa4\xdf\xa3\xc0\x7a\
\x19\xcc\xab\xf6\xac\x23\xad\xdc\xf7\xab\xf1\x5a\xb6\xcf\xd7\x1c\
\xc2\x70\xf4\xf5\x88\x3e\x51\x4f\x38\xb2\x8c\x69\xb3\x7f\x06\xe9\
\x10\x0b\xc5\x91\xc3\x1c\xdb\x55\x47\x57\x1d\x45\xc6\x55\x47\xc9\
\xb1\x40\x1c\x5d\x00\x8f\xf6\x40\x7b\xed\xfe\x19\x64\x87\x91\xd2\
\x11\x3f\xef\xd8\xd2\x23\xf7\xfc\x34\x8e\xb8\xfe\xa8\xaf\x4b\x8e\
\x38\xce\x1f\xd3\x1f\x33\x5f\xe1\xe0\x88\x6b\xb4\x36\xe7\x75\x9d\
\x31\x2c\x8e\xb8\xd6\xa3\xd1\x2b\x6e\xc2\x70\x70\xc4\xb5\x7e\xab\
\xd2\xfa\xf9\xda\x3b\x32\x15\xf7\xbd\x75\x4e\x8c\xff\xe9\xf7\x42\
\x8e\x7f\xf2\x6e\xf8\x18\x69\xed\xf8\x74\xa1\x57\xdc\x84\x61\x71\
\x14\xb5\xb6\xe6\x85\xb4\xd7\x99\x2a\x7f\x3f\x18\x02\x1b\x13\xb8\
\xf7\xab\xef\xc9\xd5\xf2\x11\x21\xfe\xfd\xec\x57\x7a\xca\x4d\x18\
\x86\xa3\xbb\x27\xec\x6b\xb4\x38\x6e\x5b\xe7\xe0\xa7\x02\xde\x03\
\x17\xc1\x47\x31\xb4\xc0\xde\xa1\x77\x65\x71\xc3\xbb\xdd\x5d\x63\
\xeb\x12\x86\x23\xae\x63\xb3\xad\xf5\xe3\xfa\xec\x42\x4a\x3f\x2b\
\xc0\x71\x20\x53\xb0\x1f\x14\xf9\xbc\x5e\x0e\xcd\x0f\xd7\xfa\x1d\
\xb4\xf8\xe1\x1a\xf6\xe5\x49\x75\x48\xf3\xf3\xc1\xe5\xe4\x87\xa1\
\x39\xe2\x9a\x51\x73\x5d\x2d\xd7\xf9\x0f\x3b\xf8\x49\x5b\x7f\x7e\
\xbb\x00\xfd\x70\x5d\xed\x09\x4b\x1d\x7a\x2a\xa5\x9f\x5b\xc0\x1b\
\xa0\x01\x26\x63\x38\x0b\xf6\x80\x42\x78\x5f\x4b\x1b\x46\x3f\x21\
\x87\xd2\xa7\x0b\x55\x7e\xf6\x9b\x5f\xb5\xf8\xd9\x9b\xd4\x06\xa9\
\x72\x5e\x03\x56\x81\x3b\xc1\xe7\x63\xb8\x53\xb5\xe5\x33\xee\xfd\
\x49\x61\xba\xe9\xa6\x23\xad\x0e\xed\xb2\xf8\xe1\xbe\xa3\x62\x0a\
\x3f\x2e\x8c\x82\xef\x83\x45\x69\x1c\x19\x4e\xb6\xa8\xf1\x5a\xd7\
\xc6\xb4\x9a\x9f\xa7\xb3\xf8\x49\x0a\xc3\xcd\x26\x75\x9d\x35\xc1\
\x8f\x40\x5f\x9c\x23\xc3\xcd\x98\x1a\xaf\x5d\xe8\xe6\xb8\x5f\xf3\
\x33\x9e\xb7\x1f\x8b\x9b\xd3\x5a\x3b\x7d\x41\x39\x5a\x64\xab\x67\
\xec\x1f\xb1\x0f\xa9\xfa\xd9\x63\x6a\xbc\xd6\xf5\xb9\x11\xcd\xcf\
\x73\x79\xfa\x49\x70\x13\xd2\x54\xd7\x9a\x50\x6d\x76\x31\x84\x7d\
\x48\xf6\xb3\x65\xb0\xde\xa1\x31\x5f\xf3\x47\xca\x0d\xf7\x8b\x1c\
\xb0\xf8\xd9\x97\xc5\x4f\x4a\x37\x52\x5d\x6b\xa3\xaa\xcd\xde\xa3\
\xee\xfd\xfb\x15\x7b\xfd\xb1\x48\x30\x5e\x9b\xb5\x7f\xc1\x70\x34\
\x27\x9f\x41\x6a\x75\xa7\x32\x11\xec\x3b\x32\xfd\xec\x4c\x33\xc6\
\xc8\xe8\xe6\xb4\xfa\xbd\x50\xf7\xb5\xb3\xc6\xef\x39\x16\xb9\xd7\
\x1f\xd3\xa6\x9b\x87\xcc\xdd\x91\xe6\x87\xfb\x8e\xcc\x3d\x6d\xfc\
\xff\x88\x8b\x1f\xcd\x4b\xd1\xc1\x8d\x50\xf7\xfe\x49\xe3\x31\x1c\
\xaf\x6d\xe4\xb8\x3f\xc5\x3c\x64\xee\x8e\x34\x37\xdc\x77\xf4\x8a\
\xa5\xee\x70\x3f\xed\x40\x46\x3f\x6b\xc1\xa9\x94\x6e\x62\xfd\xf0\
\xf7\x29\xe6\x21\x73\x75\xa4\xb9\x21\x9c\x0f\x6b\x5a\xfc\x70\xcf\
\xb1\x97\xf1\xda\xba\x11\xec\x52\xd7\x48\x92\x1b\x17\x3f\xce\x8e\
\x5c\x3d\xf9\xcf\x79\xe6\x0e\x7f\xbf\xa8\xda\x9b\xf5\xb6\xc5\x0d\
\xe7\x11\x37\x76\xd8\xf6\x7c\x0c\xfc\x52\x73\x34\xcb\x8d\xf6\x9c\
\x58\x3f\xed\xf3\xce\xf8\xb9\x48\x5a\x47\xe1\x63\xb9\xef\xe8\xec\
\x8f\x6f\xbe\x8d\xfb\xd7\x2c\x6e\x08\xf7\x65\x2f\x76\xf5\x13\xe1\
\x68\x97\xba\xd6\x36\x9b\x6e\x5c\xfc\x64\x70\xc4\xf5\x47\x65\xe3\
\x39\xb1\x5e\x34\xbe\x08\x5e\x8b\xd8\x2b\xca\x7d\xd9\xa9\xe6\xc6\
\x52\x3a\xba\x51\xb5\x47\x45\xd3\x8d\xab\x1f\x47\x47\x5c\xa3\xc5\
\x75\x6c\x5c\xeb\xb7\xc8\xe2\xc0\x06\xf7\x1d\x3d\xaa\xbf\xa6\xe1\
\x88\xf7\xac\xc7\xf5\xf6\x29\x6b\x58\xda\x18\xeb\x18\xc2\xd5\x8f\
\xa3\xa3\xf0\x73\x12\xae\x19\x65\x3f\x93\x6b\x8f\xaf\x57\xbe\x78\
\xfd\x71\x2f\x04\xf7\x8b\x70\x4f\x0d\xf7\x1d\xbd\xae\xbc\xce\x78\
\x0d\xb5\x57\x94\x7e\x76\x4f\x68\xfb\x68\x3b\x8d\x24\x37\xda\x63\
\xd8\xff\x69\x18\x7e\x2e\x46\xf9\x89\x70\x14\xb5\xb6\x26\x84\xeb\
\x24\xb9\x3e\x9b\x6b\xd8\xb9\xce\x9f\x7b\x21\xb8\x5f\x84\x7b\x6a\
\xce\x24\x3c\x57\x72\xef\x3a\xf7\xf7\xab\xbf\x81\xd0\xb5\x50\x7e\
\x38\x37\xc2\xf9\x23\xce\xb1\x7d\xa0\xbe\x73\x3e\x7b\x28\xca\x8f\
\xc5\xd1\x7a\x19\xbd\x46\xab\x13\xb8\x2f\xfb\x79\xee\xef\xe7\x3d\
\x8d\x7f\x27\x62\x1e\xfc\x70\xfe\x88\x73\x6c\x2b\x34\x38\xe6\xe8\
\x8f\xf3\xc3\x30\x1c\x71\x1d\x1b\xd7\xfa\xd9\xd6\x43\x66\x81\xe3\
\x1a\xee\x5d\xef\x0f\x8f\xc1\xbf\xa5\xd1\xcd\x88\x6a\xa7\x92\xae\
\x4b\x3d\x0c\x47\x2c\x0b\xd7\x8c\x1e\x94\xf6\xb5\xc7\x69\xe0\x9e\
\x63\xee\xcb\x5e\x6d\xbc\x76\x17\x8c\xcc\x4d\x98\xe5\x90\xc1\xda\
\x63\xae\xcf\xe6\x1a\xf6\x7a\x8a\x3a\xc5\xbd\xa2\xdc\x4f\xcb\x3d\
\xc7\x6c\xaf\x9d\xfb\x4f\x0b\x21\x2c\x9e\x58\x4e\xee\x85\xe0\x7e\
\x11\xee\xa9\xe1\xbe\x23\xee\xcd\xe2\xfe\x35\xee\xf1\xe3\x3e\x48\
\xee\x15\xbd\x4b\x39\x9d\xb7\xb9\xee\x6e\x86\xad\x9c\x1a\x9c\x73\
\x2b\x26\x3c\x66\xbe\x8b\xd0\xb5\x48\xf2\x70\xa5\xf9\x98\xaf\x90\
\x61\x30\x6f\x89\x4a\x3b\x6f\x08\xaf\x9d\xd7\xd4\xe3\x98\x57\x85\
\xd8\xae\xf2\x16\xa8\xd4\x2a\x7e\x3e\x05\xca\x2a\x6f\x82\x92\x96\
\x7b\xb5\x4a\x99\x79\x03\x14\x6a\xb7\x94\x98\xd7\x79\xf0\xda\x72\
\x2f\xc8\x07\x85\x58\xbf\xac\xc0\xbc\xe6\x1f\xed\x06\xe1\xe7\x05\
\xe9\x3f\xcc\xcf\x3d\xd9\x68\xe7\x25\xd9\x0c\xf3\x6a\x59\xf2\xd0\
\x05\x95\xf3\x94\xfc\xd7\xac\x06\x87\x2f\x85\x79\xb5\x24\xca\x61\
\x5e\x2b\xe3\xab\xca\xeb\x95\x9a\xa8\xa8\xbc\x31\x58\x0f\xf2\xb2\
\x94\xcd\xed\x8d\x76\x8e\xff\x05\x79\x69\x3a\xaf\x79\x66\x1e\x3c\
\x1e\xe7\x8c\x08\x5e\xa7\xee\x5b\xab\xa9\x9c\xd6\xaa\xca\xc3\x20\
\x05\x96\x82\x9c\xca\x83\x73\x6e\x0a\x1e\x2c\x28\xcb\x94\x28\xf9\
\x92\x03\xcf\x3c\x40\x55\x6c\xf7\xcb\x5c\xf0\x5f\x73\x30\x78\x2f\
\xda\xc7\xc2\x8f\xda\xe7\x20\xfd\x03\xe3\xd5\x82\xf7\x11\x39\x5e\
\x8d\x79\x93\xcf\x6d\xf9\x79\x33\x78\x7b\x85\xe7\xe7\x7e\xc1\xaa\
\x5e\xfb\x1c\x64\x4d\xcb\xeb\x85\xe9\xbc\x31\x3b\x2f\xb5\x50\x82\
\x66\x90\x7b\x53\xe5\x76\x5e\x68\x96\x54\x8e\x53\x69\x78\x61\xde\
\xe0\x9b\xa5\xf2\xba\x5f\x01\x83\x1c\xef\x3a\x14\xf9\xaf\xc9\x3a\
\x06\x45\x41\xce\x37\xba\x12\x9c\x83\x9f\x97\x83\x73\xf3\x73\x2f\
\x38\x67\x29\xae\x2d\xd2\x9a\x9f\x57\x97\xe1\xdb\x93\x41\x79\x6b\
\x6b\x3d\xbe\x94\x9f\xd7\x7f\xe8\xf1\x10\x81\x93\xa6\xc7\x43\x57\
\x82\xc3\x7b\x0d\x2d\x67\xc5\x1a\x54\x39\x2b\xd6\x76\x75\x8d\xb4\
\x54\x1d\xf3\xaf\x83\x6a\xf0\x7e\xc9\xe0\xad\x2e\xb7\xf3\x29\x21\
\xdb\xb9\xba\x46\xfe\x0f\x67\xe8\xe0\xa9\
\x00\x00\x1d\xbf\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x60\x00\x00\x00\x60\x08\x06\x00\x00\x00\xe2\x98\x77\x38\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\
\x01\x00\x9a\x9c\x18\x00\x00\x05\x16\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x36\x2d\x63\x31\x34\x38\x20\x37\x39\
\x2e\x31\x36\x34\x30\x33\x36\x2c\x20\x32\x30\x31\x39\x2f\x30\x38\
\x2f\x31\x33\x2d\x30\x31\x3a\x30\x36\x3a\x35\x37\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\
\x78\x6d\x6c\x6e\x73\x3a\x64\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\
\x2f\x70\x75\x72\x6c\x2e\x6f\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\
\x6d\x65\x6e\x74\x73\x2f\x31\x2e\x31\x2f\x22\x20\x78\x6d\x6c\x6e\
\x73\x3a\x70\x68\x6f\x74\x6f\x73\x68\x6f\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x70\x68\x6f\x74\x6f\x73\x68\x6f\x70\x2f\x31\x2e\x30\x2f\x22\
\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x4d\x4d\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\
\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x6d\x6d\x2f\x22\x20\x78\
\x6d\x6c\x6e\x73\x3a\x73\x74\x45\x76\x74\x3d\x22\x68\x74\x74\x70\
\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\
\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\x79\x70\x65\x2f\x52\x65\
\x73\x6f\x75\x72\x63\x65\x45\x76\x65\x6e\x74\x23\x22\x20\x78\x6d\
\x70\x3a\x43\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\
\x64\x6f\x62\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x32\
\x31\x2e\x30\x20\x28\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x20\x78\
\x6d\x70\x3a\x43\x72\x65\x61\x74\x65\x44\x61\x74\x65\x3d\x22\x32\
\x30\x32\x31\x2d\x30\x31\x2d\x32\x30\x54\x31\x39\x3a\x33\x36\x3a\
\x33\x30\x2b\x30\x38\x3a\x30\x30\x22\x20\x78\x6d\x70\x3a\x4d\x6f\
\x64\x69\x66\x79\x44\x61\x74\x65\x3d\x22\x32\x30\x32\x31\x2d\x30\
\x31\x2d\x32\x39\x54\x31\x30\x3a\x31\x32\x3a\x34\x32\x2b\x30\x38\
\x3a\x30\x30\x22\x20\x78\x6d\x70\x3a\x4d\x65\x74\x61\x64\x61\x74\
\x61\x44\x61\x74\x65\x3d\x22\x32\x30\x32\x31\x2d\x30\x31\x2d\x32\
\x39\x54\x31\x30\x3a\x31\x32\x3a\x34\x32\x2b\x30\x38\x3a\x30\x30\
\x22\x20\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3d\x22\x69\x6d\x61\
\x67\x65\x2f\x70\x6e\x67\x22\x20\x70\x68\x6f\x74\x6f\x73\x68\x6f\
\x70\x3a\x43\x6f\x6c\x6f\x72\x4d\x6f\x64\x65\x3d\x22\x33\x22\x20\
\x70\x68\x6f\x74\x6f\x73\x68\x6f\x70\x3a\x49\x43\x43\x50\x72\x6f\
\x66\x69\x6c\x65\x3d\x22\x73\x52\x47\x42\x20\x49\x45\x43\x36\x31\
\x39\x36\x36\x2d\x32\x2e\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x49\
\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\x78\x6d\x70\x2e\x69\
\x69\x64\x3a\x34\x30\x30\x30\x33\x32\x62\x61\x2d\x64\x39\x63\x35\
\x2d\x37\x31\x34\x61\x2d\x61\x63\x64\x32\x2d\x32\x30\x65\x64\x30\
\x30\x31\x64\x38\x62\x66\x39\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\
\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\
\x69\x64\x3a\x34\x30\x30\x30\x33\x32\x62\x61\x2d\x64\x39\x63\x35\
\x2d\x37\x31\x34\x61\x2d\x61\x63\x64\x32\x2d\x32\x30\x65\x64\x30\
\x30\x31\x64\x38\x62\x66\x39\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x4f\
\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\x74\x49\
\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x34\x30\x30\x30\x33\
\x32\x62\x61\x2d\x64\x39\x63\x35\x2d\x37\x31\x34\x61\x2d\x61\x63\
\x64\x32\x2d\x32\x30\x65\x64\x30\x30\x31\x64\x38\x62\x66\x39\x22\
\x3e\x20\x3c\x78\x6d\x70\x4d\x4d\x3a\x48\x69\x73\x74\x6f\x72\x79\
\x3e\x20\x3c\x72\x64\x66\x3a\x53\x65\x71\x3e\x20\x3c\x72\x64\x66\
\x3a\x6c\x69\x20\x73\x74\x45\x76\x74\x3a\x61\x63\x74\x69\x6f\x6e\
\x3d\x22\x63\x72\x65\x61\x74\x65\x64\x22\x20\x73\x74\x45\x76\x74\
\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\x78\x6d\x70\
\x2e\x69\x69\x64\x3a\x34\x30\x30\x30\x33\x32\x62\x61\x2d\x64\x39\
\x63\x35\x2d\x37\x31\x34\x61\x2d\x61\x63\x64\x32\x2d\x32\x30\x65\
\x64\x30\x30\x31\x64\x38\x62\x66\x39\x22\x20\x73\x74\x45\x76\x74\
\x3a\x77\x68\x65\x6e\x3d\x22\x32\x30\x32\x31\x2d\x30\x31\x2d\x32\
\x30\x54\x31\x39\x3a\x33\x36\x3a\x33\x30\x2b\x30\x38\x3a\x30\x30\
\x22\x20\x73\x74\x45\x76\x74\x3a\x73\x6f\x66\x74\x77\x61\x72\x65\
\x41\x67\x65\x6e\x74\x3d\x22\x41\x64\x6f\x62\x65\x20\x50\x68\x6f\
\x74\x6f\x73\x68\x6f\x70\x20\x32\x31\x2e\x30\x20\x28\x57\x69\x6e\
\x64\x6f\x77\x73\x29\x22\x2f\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x53\
\x65\x71\x3e\x20\x3c\x2f\x78\x6d\x70\x4d\x4d\x3a\x48\x69\x73\x74\
\x6f\x72\x79\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\
\x69\x70\x74\x69\x6f\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\
\x46\x3e\x20\x3c\x2f\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\
\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\
\x22\x3f\x3e\x06\x2d\xb3\x26\x00\x00\x18\x4f\x49\x44\x41\x54\x78\
\xda\xed\x9d\x67\x8c\x64\xd9\x55\xc7\x7f\xf7\xdd\x57\x55\x5d\x9d\
\x6b\xba\xa7\x27\xed\xcc\x78\xf3\xce\x7a\x83\xd7\x6b\x83\xc1\x42\
\x60\x19\x1b\x61\x63\xbc\x18\x84\xb1\x48\x02\x91\x64\x30\xd8\xc6\
\x08\x81\x4d\x90\x00\x83\x70\xf8\x80\x8d\x64\xa2\x25\xb2\x30\xc1\
\xc0\x07\x96\x6c\xcb\x60\x8c\x91\x2d\xbc\x71\xf2\xce\x4e\x9e\xee\
\xe9\xea\x50\xd5\xa1\xaa\x5e\xe0\xc3\x39\xaf\x5f\xa8\x57\xd5\x55\
\xd5\x61\xda\xde\xbd\xd2\x53\xcf\x74\xd7\xbb\x55\x75\xce\x3d\xe9\
\x7f\xc2\x33\x61\x18\xf2\xe2\xba\x75\xcb\x79\x91\x04\x2f\x32\xe0\
\x05\xbd\xdc\xe4\x7f\x7e\xf7\x0b\x7b\xfe\xb0\x94\x81\x11\x60\xd4\
\x38\x54\x82\x16\x63\xd5\x53\x38\x8d\x45\xea\xc6\xb2\x08\xd4\x80\
\x3a\xb0\x06\x78\x7b\xf5\x8b\xfc\xd2\xdb\x3b\x30\x60\x8f\xae\x12\
\x70\x40\xaf\x7d\xc0\x14\x30\x09\x4c\x61\xa8\x84\x21\x0e\xb0\x0c\
\xdc\x04\x96\xf4\x67\x15\xb8\xa1\xd7\xca\x97\x8d\x04\xec\x91\x75\
\x17\xf0\xb0\x5e\xf7\x01\x77\x00\x07\x81\x09\x60\xb4\xed\xd5\xf9\
\x3e\xc4\x9a\x32\x63\x16\x78\x0e\x38\x09\x3c\xa1\xd7\x33\x40\xf0\
\x22\x03\xd2\xeb\x18\xf0\x6a\xe0\x4d\xc0\x6b\x81\x99\x2d\xee\x57\
\xd6\xeb\x20\xf0\x10\xf0\x66\xfd\xfd\x22\xf0\x9f\xc0\x27\xf5\xe7\
\xa9\x17\x32\x03\x86\x80\x6f\x03\x1e\x03\xbe\x0e\x38\xb4\x0b\xef\
\x39\x09\x7c\x8b\x5e\x8b\xc0\x67\x81\x7f\x00\x3e\x01\xcc\xbf\x50\
\x18\x50\x01\xbe\x13\xf8\x3e\xe0\x6b\x6f\xe1\x01\x98\x04\xde\xa0\
\xd7\x3b\x80\x3f\x02\xfe\x0c\xb8\xf4\x95\xea\x86\xba\xfa\x45\xff\
\x03\xf8\xd8\xa0\xc4\xf7\x43\xb9\xda\x4c\x41\x00\xc1\xe0\x7e\xcf\
\xfd\xc0\x6f\x00\x9f\x06\x7e\x45\x0f\xc9\x57\x14\x03\x5e\x0f\xfc\
\x33\xf0\x5b\x6a\x5c\xdb\x09\xa8\x84\x0d\x43\x30\x5d\x88\x3f\x64\
\x61\xac\x98\xb0\xbd\x46\x0c\x71\x61\x18\x4a\xe3\x10\xfa\xb2\x47\
\xee\x0a\x21\xf0\x85\x59\x1d\x8c\xf7\xed\xc0\xfb\x94\x11\xdf\xfb\
\x95\xc0\x80\x23\xc0\xef\x01\x8f\x03\xaf\xc9\x73\x60\xbc\x00\x5a\
\x81\x10\x72\x5f\x59\xae\x20\xe7\x75\x4d\x1f\x26\x4b\xf0\xd0\x41\
\x98\x1e\x06\x3f\x88\xe9\xef\xfb\x50\x1c\x83\x03\x0f\xc3\xd8\x11\
\x08\x3d\x21\x74\x56\x42\x4a\xe3\x30\x7a\x10\x1c\x57\xa4\x25\x68\
\x29\x33\xda\xd7\x83\xaa\x92\x1e\x07\x1e\xfd\x72\x65\xc0\x1b\x80\
\xbf\x03\x7e\x88\x9c\x43\xed\x07\x72\x95\x0b\x70\x78\x0c\xee\xdf\
\x0f\x77\xed\x13\x62\x07\x41\xfb\x6b\xa7\x87\xe1\x9e\x69\x18\x2b\
\xe9\xe1\x0d\x63\xe6\x18\x25\xb0\x5b\x82\x7d\x77\xc3\xc4\x31\x7d\
\xc3\xcc\x3e\x41\x0b\xc6\x8f\xc0\xc1\x87\x61\xf2\x25\x50\x9a\x50\
\xa9\xf0\x3a\x4a\xc4\x37\xe9\x77\xf8\xf1\x2f\x37\x23\xfc\xf3\xc0\
\xaf\xe5\xfd\x21\x50\x55\x53\x76\xe1\xd0\x18\xec\x1f\x16\x26\xb8\
\x0e\x9c\x5f\x80\x9b\xab\xf2\xef\x68\xb5\x7c\x98\x2c\xc3\xbd\xd3\
\x50\x72\xa1\xe9\xa9\x8a\x31\x19\xa2\x85\xe0\x37\xc1\x29\x08\x13\
\xc2\x10\x96\x2e\x82\x2d\xaa\xa4\x38\xd0\xa8\xc1\xe2\x05\x38\xf8\
\x10\x0c\x4d\x42\x6b\x0d\xd6\xaa\xb0\x7c\x19\x1a\x4b\xb2\xa7\x63\
\xdb\x8e\xcb\x11\xe0\xa3\xc0\xd7\x00\x3f\xb5\xdd\xde\xd2\x76\x33\
\x60\x0a\xf8\x6d\xe0\xad\xdd\x08\x7f\x60\x14\x66\x86\x61\x44\x75\
\xb9\x03\xcc\xaf\xc2\xa5\xe5\xf4\x97\xf7\x42\x18\x72\xe1\x8e\x8a\
\x10\xdf\xf3\xc1\x31\x5d\xde\xdd\x88\xfa\x31\x16\x26\x6f\x87\xd6\
\x2a\xac\xcd\x0b\x53\x40\x7e\xbf\x32\x07\x4b\x97\x45\x4a\x6c\x11\
\xc6\x0e\x43\x79\x1f\xac\xcc\x42\xed\x1a\x34\x6b\x60\x8c\xbc\x36\
\xb3\xbe\x1b\x38\x01\xfc\x08\xf0\x85\xbd\xa8\x82\xf6\x03\x7f\x91\
\x25\xbe\x89\xd4\x4d\x28\xa7\xfd\xc4\x7e\xb8\x7d\x52\x88\xef\x05\
\xc2\x98\x00\xb8\x5e\x87\x86\x07\xae\x89\x19\x06\x70\x7c\x12\x26\
\x87\x84\xf8\xbd\xae\xc0\x03\x77\x08\x2a\x77\x08\x91\x43\x2f\x96\
\x02\x03\xd4\xae\x83\xd7\x88\xa5\xc6\x16\x61\xe2\x28\x1c\x78\x10\
\xc6\x8f\x0a\x55\x82\x56\xee\xd6\x2f\x57\x95\xf4\x9a\xbd\xc6\x80\
\x3b\x81\x7f\x07\xbe\xb1\xcd\x78\x06\x72\x7a\xef\x9d\x86\xfb\xa6\
\x61\xbc\x14\x1b\xde\x10\xb0\x0e\x2c\x37\x44\x02\x92\xaa\xc7\x0b\
\x61\xaa\x0c\x33\xa3\xf2\xfa\x7e\xb3\x16\x7e\x53\x8d\xee\xe1\xb4\
\x57\xe4\xb8\xd0\x5c\x86\x95\xeb\xb1\xb4\x05\x1e\xf8\x2d\xb1\x21\
\x53\x77\xc3\xcc\x03\x50\xaa\x40\xd0\xcc\x35\xd2\x47\x34\x70\x7b\
\xcb\x5e\x61\xc0\x3d\x1a\x4d\x3e\x90\x75\x2b\xbd\x00\x26\x4a\x62\
\x60\x8f\x8c\x89\xfa\x48\x12\xd3\x18\x91\x8e\xab\x35\x61\x94\x4d\
\x9c\xfe\xa2\x15\xe3\xec\x9a\x58\x1a\xfa\x5a\xea\xd2\x8e\x1f\x11\
\x63\xbb\x11\x23\xe8\x7b\xd4\xae\x8a\x0d\x30\x09\x0a\x04\xbe\xb8\
\xb1\x23\x33\xc2\x84\xb1\xc3\xba\x8f\x9f\xab\x6a\xff\x12\xf8\x8e\
\x5b\xcd\x80\x7d\x1a\x3d\x9e\xc8\xea\x7b\x2f\x80\xc3\xa3\xf0\xd2\
\xfd\x72\xea\x5b\x7e\x7b\x00\x65\x0d\xd4\x9a\x50\x5d\x8b\x89\x1f\
\xf9\xfb\xfb\x87\xa1\x32\x24\x92\x30\xe8\x0a\x03\x89\x0f\xc6\x0e\
\x67\x4c\x85\x85\xe6\x0a\xac\x2f\x80\x2d\x89\x54\x18\x93\x88\x47\
\x1a\x2a\x0d\x27\xa0\x72\x57\x6c\x5b\x32\xcb\x02\x7f\xa0\x31\xce\
\x2d\x61\x40\x45\x75\xfe\xa3\x59\x97\x31\x54\xdd\x7d\xd7\x14\x14\
\xdd\x58\xdd\xe4\xad\xea\xaa\x30\x27\x32\xae\x01\x50\x70\x60\x66\
\x44\xd4\xd3\x96\x32\xa6\xa1\x30\xa1\x3c\x05\xc5\xd1\x58\xaf\x47\
\xc4\x5e\x99\x85\xf5\x2a\x78\x6b\x10\x1a\x65\x84\x13\xbb\xac\x84\
\x62\xac\xa7\xef\xd3\xd8\xa1\xdd\x2e\x8c\x03\x7f\x0a\xbc\xe2\x56\
\x30\xe0\x23\xc0\xeb\xb2\xc4\x07\x78\xc9\x24\xdc\xb1\x4f\xdd\xc8\
\x0e\xc0\xaf\x35\x42\xf8\xc5\xf5\xf4\xef\x23\xb5\x35\xae\x46\x7a\
\xab\x2b\xf0\xc5\x20\x8f\x1e\xd4\x93\x1c\xc6\xb6\x60\x7d\x09\xae\
\x7f\x09\xae\x7d\x09\xe6\x9e\x84\xba\xda\x05\xc7\x8d\x25\x28\xf4\
\x44\x82\xa6\xef\x97\x7d\x72\x98\x30\xad\x4c\x38\xb8\x9b\x0c\x78\
\xa7\xba\x65\x29\xb5\x83\x81\x97\x54\xe0\xe8\xb8\xa8\x91\xa0\x0b\
\xac\xe0\x18\x58\x6e\xc2\x4a\x2b\x3e\x91\x41\x28\x8c\x99\x1e\xde\
\x86\xd3\x9f\x90\x02\x63\xc4\x20\x3b\x85\xb4\x51\x35\x88\xf1\xf5\
\x56\x61\xf5\x26\xdc\x3c\x09\xd5\xd3\x1a\x4f\xd8\xd8\x8e\xf8\x2d\
\x18\x39\x20\x92\x60\x4b\xb9\x4c\xb8\x07\xf8\x7d\xa0\xb0\x1b\x0c\
\x78\x35\xf0\xc1\x36\x1c\x27\x10\xc2\x1f\x9f\x54\x62\x76\x39\xbd\
\x8e\x1a\xd6\x9b\x75\x91\x82\x48\xff\xfb\xa1\x9c\xfc\xa9\x61\xf0\
\xe9\xdf\xf3\xc9\x7d\x2f\x17\xbc\x75\x09\xc0\x42\x5f\x09\x9b\xe0\
\x80\x63\xe5\x35\x51\xac\xb0\x74\x49\x18\xe1\xb7\x12\xb1\x40\x28\
\x1e\xd1\xf0\x7e\xf1\x92\x1c\xb7\x1d\xea\x00\xde\x08\xfc\xf4\x4e\
\x07\x62\x93\xc0\x07\xd4\x00\x6d\xb8\x9a\x21\x70\x6c\x02\xee\xdc\
\x17\x7f\x31\xb7\x4b\xc0\xe4\x3a\x72\xf2\x97\x9b\xe0\x38\xf1\x3e\
\x8e\xe2\x41\xc3\x05\x51\x5d\x8e\xd3\x16\x67\xe1\x98\xfc\x60\xcc\
\x44\x3a\x9c\x58\x72\x8c\x91\x9b\x96\x2f\xca\x09\x8f\xa2\xe2\xe8\
\x80\xf8\x9e\xfc\xdd\x5a\x79\x2f\xe3\xc8\x1e\x2b\x37\xa0\x38\x02\
\xfb\xee\x89\xed\x48\x74\xd0\xc6\x6f\x93\x9f\xf3\x67\xe4\xf7\x26\
\xfd\x59\xde\x07\xfc\xb7\x82\x79\x3b\xc2\x80\x5f\xd4\x90\x3c\xa5\
\x7a\x46\x0a\x62\x34\x1b\x9e\x10\xce\x6c\xb2\x89\x75\x60\x61\x0d\
\xd6\xfd\x34\x27\xad\x91\x98\x61\xd5\xcb\x0f\xbc\x8c\x06\x53\x2d\
\x3f\x5f\xd7\xb7\x56\x85\x20\x11\xc1\x1c\x2b\xf0\x43\xfd\x7a\xac\
\xd7\x8d\x01\xcf\x13\x06\x0c\x8f\xca\x9e\x2b\x2b\xf2\xfe\x91\x11\
\x36\x16\xea\x37\xa0\x3c\x0d\x85\x72\x1a\xe6\xf6\x2d\x94\x2b\x30\
\xbc\x0f\xea\xb3\x6d\x11\xf3\x88\xc2\x16\xaf\x04\xd6\xb7\x9b\x01\
\x5f\x0d\xbc\x3d\xcf\x17\xf3\x02\x38\x55\xed\xae\xf3\xf3\xc0\x38\
\x93\xf0\x48\x8c\x91\xff\x5f\x5c\x52\x48\x22\x5f\x9d\x63\xf4\xfd\
\xac\x13\xc3\xca\x8e\x85\xc6\x32\xcc\x3e\x91\x56\x5b\x26\x0a\xb2\
\xfc\xd8\xbb\xf1\x7d\x28\x14\xe0\xc4\xfd\x70\xdb\x31\xf9\xdd\xf9\
\x73\x70\xfa\xa4\xa8\xa8\x48\x0a\xfc\x16\xcc\x3d\x13\xdb\x82\xb6\
\x93\x10\x64\xd4\x59\xbc\x1e\xd0\xbc\xc7\x07\xb6\x9b\x01\xef\x43\
\x2a\x14\xda\x3e\x4c\xcb\x97\xd3\xdf\x87\x5d\xcc\x55\x25\x21\xb0\
\xe6\x75\xcf\x09\x44\xf7\x5a\x9b\xce\x09\x84\x3e\x34\x9b\x39\x52\
\xe3\xc4\xc4\x0f\x43\x68\xb5\xe0\x8e\x3b\xe1\xa1\x47\x54\x55\x85\
\xf0\xc8\xcb\xa1\xd5\x84\xd3\xa7\xa0\x58\x4c\x04\x89\xeb\x9d\xe3\
\x90\xe4\xbe\x39\xeb\x67\x90\xbc\xf3\x99\xed\x32\xc2\xdf\x85\xe4\
\x51\xf3\x3f\x8c\x91\x13\xd9\xeb\xe5\x3a\x9d\x41\x35\x6b\xe4\xef\
\x7d\xdf\xab\x36\x20\x7b\x25\x89\x14\x86\xa2\xeb\xa7\xf7\xcb\x67\
\x5e\x5f\x83\xf5\x75\xb0\x2e\xec\x9f\x69\x67\xba\xb1\xf9\x7b\x66\
\xf7\xed\x80\x8b\xfd\xdc\x76\x79\x41\x65\x85\x61\xdb\x32\x58\x83\
\x5e\x9d\x4e\x76\xcf\x7b\x74\x92\x8e\xbc\xf7\x0a\xd3\x07\x25\x08\
\xa0\x56\x53\xf1\x77\xe5\x0a\x02\x58\x5a\x6a\x57\x35\x5b\xfc\x4e\
\x6f\x05\x1e\xd9\x0e\x06\xbc\x11\x78\x55\x9b\xcf\x9f\xf0\x32\xfa\
\xb9\xcc\x26\x70\x72\x2f\x7b\x9a\xce\xb7\x6f\xd8\x92\xe8\xdf\x49\
\x02\x19\x23\x04\x3f\x77\x06\xce\x9c\x8a\xff\x76\xf2\x69\x38\x73\
\x1a\x6c\x21\x4d\x7c\xb2\xfb\xe5\x5d\x26\x0e\xda\x32\x6b\x18\xf8\
\xb1\xad\xda\x00\x17\xf8\xe1\x94\xf1\x0c\xc5\x53\x39\x36\x2e\x80\
\x59\x3f\x40\x59\x94\x43\xb9\xbc\x2c\x18\x50\xe4\xff\x7b\x9a\xf1\
\x3a\x38\xda\xfd\x84\x47\x31\xc4\xdc\xaa\x5c\x46\x37\x0d\x7c\x28\
\x4f\x88\x8b\x98\x4c\xd4\x18\x07\x9a\x75\x49\xcc\x44\x16\xdc\x5a\
\x51\x3d\x5f\xf8\x3c\x3c\x7f\x41\x5e\x37\x7b\x43\x6c\x43\xa1\x18\
\x83\x6f\x8e\x2b\x39\x85\x42\x39\xd7\xe7\x4f\x49\x15\xc0\xf2\x55\
\xcd\x3d\xb8\x6d\xaa\xfb\xa3\xc0\x93\x83\x32\xe0\xf5\x49\xb0\x29\
\x52\x13\x87\x46\xe1\xe8\x44\x8c\xfb\xf4\x1c\x14\x19\x41\x3d\x2f\
\x2f\xc5\x59\xad\x28\x3f\x5e\x29\xc3\xc1\xb1\x7c\x17\x33\x1b\x07\
\xac\xb6\xe0\x46\x28\x1e\x58\x32\x1d\x39\x72\x20\x13\x07\x38\x30\
\x3c\x2d\xa8\x67\xed\x4a\x1c\x07\xb8\x05\x71\x73\xaf\x5c\x56\xbb\
\x63\xc5\x33\x4a\x46\xbe\xa3\x07\x05\x07\xea\x70\xba\x53\x1f\xca\
\xba\x60\x5c\x68\x2c\x0a\xb3\x12\xde\xd1\x38\xf0\x3d\xc0\xcf\x0e\
\xca\x80\x6f\xcf\xba\x8e\xe5\x82\x20\x95\x9e\xdf\x3f\x52\xe9\x3a\
\xb0\xda\x10\xff\xdf\x24\x44\xdd\xb5\x71\xc6\xab\x1b\xfe\x63\x12\
\x51\xb4\xd1\x93\x1e\x26\x54\x4d\xe0\xa5\x19\x10\x45\xc2\x13\xc7\
\xc5\x4d\x6d\x2e\x83\x53\xd4\xa0\xcd\x69\x0f\xf4\xa2\x04\x4d\xb9\
\x22\xf7\x44\xf0\x74\x2f\x78\xd3\xd0\x38\x94\x26\x61\xed\x66\x32\
\x4c\x05\x75\x5e\x7e\x15\x29\x1c\xee\xcb\x06\xec\xcf\x82\x6d\x21\
\x92\x9d\x2a\x17\xf2\x6b\x73\x36\x55\x41\x2a\x01\x7e\x98\xb0\x05\
\x21\x14\x8d\x94\x9b\xec\x44\xab\x48\xe0\x41\x71\x18\xf6\x9f\x90\
\x3c\x70\xd0\x8c\x91\xce\x3c\xf8\xba\x5c\x81\xe9\x13\x02\x63\x87\
\xbd\x66\xe1\x02\x61\xf4\xf0\x54\x7c\x30\x12\xeb\xfe\x6e\x68\x69\
\x37\x06\xbc\x0a\x38\x9a\x24\xbe\xeb\x08\x54\xc0\x16\x70\x9a\x86\
\xaf\x41\x98\x89\xf7\x29\x38\x72\x05\x3b\xd4\xac\x13\x78\x02\xc6\
\xcd\x3c\x20\xf8\x7e\x69\x42\x63\x87\xcc\x6b\x86\x26\x61\xe6\x41\
\x85\xae\xfb\x2c\xf2\x0a\x03\x18\xaa\x80\x3b\x9c\xab\xb2\xde\x32\
\x08\x03\xde\x94\xf5\x7c\x86\x0b\x52\x14\x15\x0c\x78\xfa\xc3\x50\
\x02\xb6\xec\xed\x05\x57\x03\xab\x1d\xec\x96\x4a\xe6\x89\x0f\xbe\
\x4c\x93\x34\x09\xc4\x2f\x0c\x04\x90\x73\x8b\x83\x7d\x8e\x40\x93\
\x3f\xc3\xd3\xb9\x0c\xf8\x56\x3a\xd4\xbe\x76\x62\xc0\x51\xe0\x9b\
\xb3\x3e\xf1\x74\x19\x4a\x03\x12\x2a\xd2\xcd\x2d\xfd\xd2\x49\x57\
\xb2\x68\xe5\x83\xec\x74\xb7\x5a\xe0\xc7\x8c\x28\x57\x24\xd0\x0a\
\x49\x7b\x4c\x11\x9e\x34\x10\xec\xed\xc8\xbe\x8e\xdb\x46\xa3\x63\
\x9d\xd4\x50\x27\x06\x3c\x04\xdc\x96\x54\x3f\xd6\x91\xd4\xa2\x31\
\x83\x13\xca\x0f\xd3\x09\x9a\x50\x6d\x41\xb9\x30\xe0\x97\x1e\x30\
\x3f\xe0\x7b\xe2\xb5\x18\x9b\x76\x59\xfd\xa6\x5c\x98\xc1\xf7\x2e\
\x8c\x08\x83\x69\xb7\x1f\x8f\xf6\xc3\x80\x47\xda\xbc\x1f\x57\x08\
\x35\x68\x92\xca\x49\x60\x46\x49\x28\xc1\x1a\x28\x5b\x76\x77\x29\
\x90\x96\x04\xd3\x22\x14\xd5\xf7\xb7\xc0\xdb\x40\x12\x36\x85\xb1\
\xdc\x7c\xc8\xc3\xfd\x30\xe0\xfe\xac\xfa\x29\x17\xa0\xb0\x05\x3d\
\x1d\xb9\x8f\x4d\x3f\x6d\x80\x1d\xb3\xb5\x7d\xb7\x92\xa8\x71\x5c\
\xda\xaa\xeb\x3a\xd4\x03\xf5\xc6\x00\x45\x66\x8b\x23\xb9\xde\xd0\
\xdd\xe4\x74\xf8\xe4\x31\xa0\x80\xb4\x09\xa5\xe3\xea\x82\x9c\xd6\
\x41\x08\x65\xf5\x5d\xea\xcd\xb4\x07\xb4\x91\x48\x51\x00\xce\xec\
\x22\x03\x8c\x55\x1b\x10\x26\x10\xd5\x50\xab\x1f\xc2\x01\xd5\x90\
\xee\x55\x2c\xeb\xde\x69\x29\x38\xa4\xb6\x60\x53\x06\x1c\x4c\x5a\
\xec\x40\x11\xc4\x21\xb7\xdd\x75\xeb\xc5\xf0\xba\x0e\xd4\x1a\xf0\
\xf4\x1c\x9c\x5b\x48\x63\x39\x8e\x91\xe0\xeb\x6c\x15\xae\xd5\x14\
\xa2\xde\x8d\x82\x79\xb5\x3d\x36\x27\x0c\xf5\xbd\xad\x49\x63\x18\
\x80\x1d\x12\x5c\x29\xb3\x4f\x05\x38\x9c\x85\xf4\xdd\x0e\x0c\xa8\
\xa4\x0c\xb0\x11\xef\xa7\x5f\xeb\xeb\x5a\xa9\x7a\x38\x35\x07\xf5\
\x96\xa4\x29\xad\xd3\xce\xa5\xe5\x75\xc1\x86\x56\x3d\x49\x6d\x3a\
\xec\x42\x17\x9d\xe9\x00\x29\xfb\xed\x75\xbf\xfd\x32\xc0\x2d\x4b\
\x3c\xe0\x35\xda\xce\xe3\x51\x0d\x70\x2f\x77\x92\x80\x11\xa4\xcc\
\x62\x38\xa9\xd7\xac\x06\x4a\xa1\xe9\x1d\xf8\x74\xb5\xe4\xf0\xf4\
\x3c\xac\xb5\x84\x81\xd6\x49\x63\x4a\x1b\x52\xa2\xc6\xf0\xe2\x92\
\xd4\x88\x5a\xa7\xfb\xde\x5d\x45\xae\x0f\x54\xd6\xd8\x76\x1b\x10\
\x86\xfa\x3d\x07\x40\x7a\x23\x35\x66\x5d\xc1\xa6\x72\xb8\xf8\x60\
\x56\xbd\x67\x25\x60\x4c\x19\x90\xfa\x9e\x43\x2e\x0c\x15\xc1\x84\
\x9b\x54\x27\x27\xf4\xba\x31\x70\x65\x59\xd4\x4f\x29\xe1\x6d\xb4\
\xb4\x2f\xc0\xd1\x0f\x5b\xb4\x9a\xe1\x52\x2f\xe9\xca\x92\x26\xe6\
\x8b\xed\x1e\x89\x41\x93\xe7\x26\xcd\xc0\x08\xd8\x73\x4c\xfa\x6f\
\x5d\x11\x4c\xa7\x0d\xb9\xdc\x08\xa8\x1c\xfd\xfb\xc0\x0e\x87\x9b\
\x28\x00\x48\xdb\x93\x19\xa4\x9a\xb0\x23\x03\x26\x90\xba\xc7\x94\
\xf7\xe2\xf9\x09\x04\xb3\xc7\xa8\xd7\x0f\x61\x71\x4d\x24\x27\x09\
\x43\x8c\x15\xe1\xc4\xb4\x9c\xfa\xcb\x4b\xc2\x24\x94\xa8\xae\x95\
\x94\xe4\xd9\xaa\xc4\x1c\x7e\x90\xbf\xf7\xf2\xba\x42\xd9\x0a\xc6\
\x39\x56\x02\xa8\x85\xf3\xf4\xae\x3f\x1c\x01\xe7\x92\x49\x75\x63\
\xe5\x77\xd5\x73\x6c\x09\x6f\x31\x56\xcb\xdc\xdb\x7b\x0d\xbc\x6c\
\x84\xe0\xe6\x00\x70\x87\xb2\x0c\x58\xf7\xe1\xf9\xc5\xc1\xbc\x9f\
\x48\x62\x5a\xbe\x54\x4f\xbc\xee\x0e\x38\x32\xae\xd1\xde\x01\xf8\
\x97\xb3\x62\x9c\x4b\x36\x4e\x6d\x56\x57\xa5\x5a\xba\x9b\x4b\x6b\
\xad\xfa\xda\x1a\x81\xb6\x56\x61\xb1\xd6\xa7\x21\xb6\x69\x3b\x60\
\x1c\xa9\x19\x6d\xd4\xb6\x68\x5f\x72\xf6\xee\x15\x8e\xae\xa8\x0a\
\xca\xf5\x66\xb6\xf0\x79\xf0\x43\xb8\x77\x2a\x26\x3e\x0a\xc2\x3d\
\x70\x00\x2e\xd6\xd4\xdb\xca\xb8\xad\xfd\x62\x4d\x66\x1b\xda\x4d\
\x36\x49\xb6\x6f\x7f\x3c\x92\xf3\xff\x1d\x8b\x4b\x5d\x9b\x2f\x25\
\x96\x9d\xc7\x81\xf6\xea\xca\x9e\x99\x1a\x32\x63\xa1\xfd\x04\x07\
\x83\xa9\xa0\xc8\x41\x70\x0c\x9c\xaf\x4a\xb9\xfa\x68\xa2\x42\xed\
\x6c\x15\xd6\x3d\xa9\xa2\x4e\x62\x46\xdd\xec\x8d\x63\xda\x4f\x49\
\x18\xf6\x81\xdf\x77\x39\xed\x83\xec\xb3\x15\x49\xca\x32\x60\x0e\
\xb8\x9a\x85\xa1\x87\x5c\x18\x2d\xf7\xa7\x0e\xfc\x00\x96\xd6\x85\
\x98\x8e\x11\x6f\xe7\xe6\x1a\x3c\x7e\x16\x1e\x9c\x91\x0c\xd8\xc5\
\x25\x38\x79\x53\x24\xc3\x24\x88\x3f\x5a\x14\x7b\x91\x07\x7b\x1b\
\x03\xab\x4d\x58\xf5\xd3\xbe\x77\x61\x58\x70\xfc\x5e\x8d\xb0\x31\
\x09\xf4\x33\x51\x37\xe4\x0e\xe9\x3e\x5b\x21\xbe\x81\xd6\x0a\x34\
\x7b\x40\x56\xb3\x0c\x58\x50\x26\xa4\x18\x50\xb4\xd2\x42\xea\xf6\
\x98\x34\x89\x10\xd3\x67\x66\xa5\xeb\xb1\xa8\xc7\xb5\x64\xe1\x7a\
\x4d\x12\xea\x8e\x91\x8e\x47\xd7\x4a\x80\x16\x22\x29\xce\x82\x03\
\x77\x56\xb4\x2f\x2c\xc8\x77\x43\x9f\x5f\x84\xfa\x92\xea\x4f\x05\
\xd1\x8a\x23\x92\xc9\xca\xa6\x24\xbb\xb9\xa1\x0b\x67\xd5\x5b\x89\
\x18\xa0\xa9\xc5\xa9\x7b\x63\x86\x0c\xea\x86\x56\x4f\x8b\x31\xcf\
\x78\x42\x6e\x56\xc5\xe7\xa9\xa0\x9b\xd9\x0d\x1b\xbe\xf8\xef\x45\
\xdb\x5b\x84\x1a\x45\xcf\x87\xc7\x25\xc2\x6d\x05\x71\xb1\x6e\xd1\
\x15\x26\x86\xa1\xfc\xdb\x24\x02\xb3\x30\x80\x43\xe3\x30\x31\xa4\
\x29\x4f\xd3\x39\xde\xca\x3b\xe9\x9b\x06\x6a\xc9\xfb\xc9\x51\x35\
\xa1\xc2\xd4\x5a\xf6\x38\x10\x36\x65\x24\x5e\x0a\xf2\x31\xa5\x25\
\x64\xa0\x54\x47\x23\xbc\xa2\x0c\x68\xa6\xc4\x29\x88\x4b\x0f\x7b\
\x2d\x54\xf2\x02\x69\xb2\xbb\xa3\x22\xcc\x68\x26\x46\x08\x44\x65\
\x89\xd1\x67\x8b\x9a\xf6\x0e\x8c\xc2\x6d\x13\xda\x39\xb9\x4d\x85\
\x59\x9d\xae\xc0\xd7\x72\x13\x93\x21\x9e\xd9\x7a\xd1\x99\xef\x41\
\xab\x91\x7b\x1a\x9e\x04\x4e\x77\x93\x00\x4f\x55\xd0\xa2\x46\x6d\
\x1b\x6d\xa6\xcd\x01\x8c\x70\xa0\x25\x2c\xd6\x48\xc1\x6d\xbd\x91\
\x26\x7c\x24\x2d\x25\xed\x1d\xbe\x6d\x5c\xd4\x9c\xbf\x4b\xe3\x94\
\xf2\xca\x4d\xa2\x56\xd6\x70\x0b\xc6\xb7\xb5\x2a\x4d\x1f\x39\xeb\
\x02\x32\x44\xaa\x2b\x1a\x7a\x15\x19\xf5\xb5\x21\x01\x81\xe6\x72\
\xfb\x95\xc9\xa8\x59\x6f\x66\x44\x82\xae\xa3\x13\xe9\x93\x12\x75\
\xc4\xdc\x3d\x15\x4b\xca\xae\x10\x5f\xed\x46\xd0\x6a\xff\x4a\xd6\
\x65\x4b\xb8\xb8\x71\xa4\x21\x24\x68\xb5\x19\xe0\x65\xa5\xed\xea\
\x66\x0c\x58\x41\xc6\x7c\xa5\xf4\xe5\x6a\x2b\x81\x93\xf4\x19\x84\
\xb5\x02\x31\xc0\x1b\x8d\x77\x19\x7d\x5c\x54\x5c\x3e\xd8\xc5\x60\
\x20\x0c\xda\x55\xd0\x06\x40\x37\xa8\x08\x44\x95\x7e\x6b\x6a\x03\
\xd2\xd4\xbd\x0e\x5c\xec\x25\x1f\x00\x32\x63\x2d\xb5\x56\x5b\xbd\
\x35\x5f\x74\x5a\x51\x2d\x50\x21\xc1\x00\xa3\x52\xd0\xbc\x05\xf3\
\x0d\xc3\x68\x6c\x8d\x49\x83\x66\x4e\x61\x0b\xa7\x5f\x25\xab\xb9\
\x12\xe7\xbb\x13\xeb\x9c\xaa\xf6\x9e\x18\xf0\x44\x4a\x2c\x8d\x04\
\x4b\x6b\xde\xe0\xc9\xf3\xc8\x9d\x1d\x72\x13\xf9\x52\x23\xc8\x54\
\xc3\xdf\x65\xea\x3b\xf1\xb8\x9a\xa4\x01\x37\x26\x1f\x21\xed\x47\
\xfd\xf8\x4d\x61\x40\x0e\x9d\x9e\xe8\x05\x8a\x88\xd6\xff\x25\x23\
\xe2\x08\x11\xad\x37\xd3\x6a\xa9\xef\xc8\x58\xf3\xbf\x49\x89\x0d\
\x02\x91\xae\xdd\x84\x22\x1c\x47\xbd\xa0\x8c\x04\xd8\x82\xc2\xc8\
\x03\x5b\x60\x31\xc0\xad\x95\xdc\x61\x1f\x5f\xec\x87\x01\x27\x81\
\xcf\x64\x75\xf9\xbc\x36\x55\x0f\xc2\x81\xa8\x86\xb3\x68\xdb\x7f\
\xdf\xf0\xfa\x6b\x6f\xda\xca\x09\x8d\x4a\xd0\x9b\xf5\x74\x1c\x10\
\x06\x5a\x52\x52\x1e\x30\x00\xd3\x0f\xdf\x58\x52\x03\xec\xb4\x21\
\x0c\xff\xdb\x0f\x03\x7c\xe0\x6f\xb3\xa7\xb7\xde\x94\xcb\x0e\xc2\
\x00\x25\x70\x41\x31\x92\x20\x91\x50\x69\xf9\x83\x19\xf8\xbe\xe8\
\xa3\x86\xbe\x76\x1d\x6e\x3c\x05\xcb\x97\x48\xf5\x2b\x84\x06\x42\
\x27\x4e\x26\x0d\xa2\xff\xbd\x35\x19\x87\x93\x43\xd5\x7f\x54\x17\
\xb4\x67\x06\x00\x7c\x2a\xa9\x86\x8c\x66\xac\x16\xd6\xb6\xe6\xfe\
\x95\x6c\xfb\x9b\xb6\x34\x10\xdb\xa9\xe2\xac\x68\xcc\xc0\xfc\x49\
\x98\x7b\x1a\xea\x57\x45\x57\x27\xdf\xcf\xba\xd0\xa8\xc6\x3d\xc2\
\xfd\xda\x02\x63\x61\x7d\x51\x24\x2b\x07\x84\xfb\x44\x27\xc5\xd6\
\x8d\x01\xe7\x91\xe1\xa6\xa9\x55\x6d\x88\xd7\x32\xa8\x14\x94\x5c\
\x09\xb6\x92\x13\x53\x9a\x41\x7f\x4d\x7e\xfd\x12\xc6\x6f\x09\x61\
\x6b\xd7\x34\xe9\x53\xcc\x47\x2a\xc3\x50\xa6\xa8\xcc\x9f\x92\x84\
\xba\xb1\xbd\x1f\xac\x30\x90\xe9\x5b\xa1\xdf\x76\x90\x2e\x20\xbd\
\xc3\xf4\xcb\x00\xb2\x6a\xc8\x71\x60\xa5\x09\xf3\x6b\x02\xa2\x59\
\x47\x18\xd1\xeb\xe5\x68\x19\x62\x31\x51\x8f\xe3\x20\x06\xbe\x91\
\x00\xe6\xba\xee\x93\x97\x13\x8e\x60\x04\x9b\xbe\x1c\x57\xd2\x95\
\xb5\xcb\xd2\x7c\x6d\x0b\x31\x51\xc3\x50\xfa\x85\x3d\x2f\xdd\xd0\
\xe1\xb8\x50\xbf\x26\x63\xcc\x36\x1a\xf2\x6c\xf7\xcb\x96\xe4\xe4\
\xaf\x55\x73\x29\xfa\x38\x5d\xc6\x9c\x6d\x26\x68\x7f\x0d\xbc\x1b\
\xad\x94\x73\xd4\x9d\xbc\x5a\x93\x42\xad\x82\xed\x3e\x92\x20\x4f\
\x4f\x06\xa4\x33\x5e\x51\x05\x59\xad\x29\x50\x85\x1f\x74\x36\x82\
\x11\x1a\xba\x51\x5d\x17\x26\x86\xf6\x79\x2a\xfe\xc9\x81\x1c\x5a\
\x70\x5b\xbb\xa6\xfe\xbd\xea\x77\xdf\x97\x6b\x6c\x5c\x27\x68\x2d\
\xab\x81\xb6\xe9\x6e\xf9\xf2\xa4\x94\x97\x6c\xd6\xa2\x64\x8c\xd8\
\x14\xbf\xd9\xa6\xba\x3c\xe0\x8f\xfb\x81\xa3\xb3\x6b\x11\xf8\x43\
\x12\xb3\x21\x5c\x47\xdc\xc6\x67\x6f\x0e\x6e\x34\x7d\x3f\x9d\xe2\
\xb4\x8e\x40\xd4\x0b\xeb\x3d\xde\xaf\x10\x46\x34\x3d\x31\xea\x88\
\x9f\x7b\xb2\x9d\x63\x81\xa7\x9d\x33\x89\x46\x6d\xc7\x81\x7b\xef\
\x83\xbb\x74\x14\xc1\xc9\x67\xe1\xec\x59\x45\x40\xb5\x53\xde\x6f\
\xc2\xdc\xc9\x0e\x8d\xda\xd9\x53\x11\x82\xd7\xcc\x6d\xdc\xfe\x24\
\x32\x1e\x79\x60\x06\x80\x8c\x62\x79\x07\x70\x3c\x19\x17\xb4\xb6\
\x10\x3c\x19\xd3\x06\x42\xc6\x78\x53\x8f\x1e\x9f\x93\xd3\xa8\xed\
\x79\x9d\xdd\xcf\x0d\x83\xdf\x82\x7b\xee\x85\x47\x5f\x19\xe3\x52\
\xaf\xf8\x2a\xf9\x79\xe6\x94\x36\xea\xe9\x3d\x41\xab\xf7\x5a\x51\
\xd3\x8e\x85\x7b\xc8\x00\xc3\xcd\x62\xc2\x4d\xd7\x75\x64\x14\x4b\
\xfa\x46\x33\xf8\x65\x3a\x11\xb5\xd7\xfb\x3b\x6c\x10\xa5\x01\xb3\
\xd7\x46\x34\xae\x9e\xd6\xd4\x7e\x91\x82\x46\x43\xba\xeb\x5d\x17\
\x66\x66\xda\x93\x30\x9d\xf6\xcb\x7d\x0f\x93\xab\xfb\x3f\xb5\x19\
\x71\x7b\x75\xb6\x3e\x82\x34\x1e\x3f\xd0\x29\xc8\xea\x37\x78\x31\
\x1d\x18\xb1\x59\x9b\x6a\xd7\x7b\x83\xee\x27\x33\xb2\x0f\xf5\x9a\
\x30\xa0\x50\x88\x7f\xbf\x5c\x8b\x83\xc5\x6e\xfb\x75\x39\xf1\x29\
\xcf\x9a\x0e\x73\x53\x07\x65\xc0\x12\xf0\x7e\x64\x3e\x5c\x2e\xc4\
\x10\x9a\xad\x47\xb3\xd1\x34\x94\x41\x53\x81\xb9\xae\x25\x92\xa1\
\x8a\x0c\xb0\xb5\xf0\xfc\x79\xa8\xec\x83\x23\x87\xe5\xef\xe7\xcf\
\x4b\xf3\xb6\xb5\xe9\x1b\x3b\x12\x59\xa5\xad\x4b\xf2\xfe\x23\xc0\
\xe7\xb6\x93\x01\x00\x7f\x0e\xfc\x00\xd9\x31\x65\xda\x3b\x76\xfb\
\xa4\x9c\x2a\xaf\x87\x80\xca\x35\x62\x70\x2f\x2e\xb2\x51\x2d\x1d\
\x6a\x81\xd5\xf1\x09\x18\x2d\x29\x7a\x9a\xa7\x33\x0d\xdc\xa8\xc3\
\x8d\x95\x74\xa3\xf6\x70\x45\x47\x16\x27\x5d\x54\xad\x74\x5b\x78\
\x2e\x36\xb0\xae\x0b\xab\xab\xf0\xb9\xcf\x42\xa5\x22\x7b\x54\xab\
\xea\x18\xb8\x31\x52\xea\xb8\xb0\xef\x2e\x81\x27\x92\x0d\x7b\x51\
\xfd\xd1\xf2\x25\x19\x69\x93\x63\x78\xaf\x02\xbf\xd9\x2b\x51\xfb\
\xc5\xfe\xde\xad\xba\xed\x48\xd2\x88\xac\xb6\x04\x29\x3d\x36\xa9\
\x63\x29\x37\x39\xc1\x05\x47\x7c\xfe\x6b\x35\x41\x42\xad\x9e\x34\
\x4f\xc1\xb1\xe9\x72\xfe\xac\x39\xa3\x1e\xd3\xf2\x7a\x66\xa2\x4a\
\x28\xc1\x55\x79\x2a\xc3\x00\x03\x43\x13\xd0\xa8\xa7\x1b\xb5\xad\
\x2b\x69\xc3\x1b\xd7\x75\x4f\x37\x26\x3e\x08\xc1\x47\x66\x64\x10\
\x78\x14\x64\x6d\x74\xda\x17\x60\x75\x4e\x30\x9f\x9c\x83\x16\x20\
\x4d\xd9\x37\xfa\x00\x66\xfb\x5a\x4f\x01\xbf\xd0\xe6\xdb\x87\x52\
\xdf\x73\x71\x29\xf6\x68\xbc\xa0\xf3\xd5\xf4\xa1\xe4\x48\xe9\x49\
\x72\x0a\x7a\x10\x0a\xe0\xb7\xae\xd8\x50\xde\xbd\x2d\x3f\x9e\x4f\
\xd7\x96\x60\xf1\x62\xcf\x25\x68\xc5\xfd\x5e\x93\xc7\x64\xba\x7a\
\xd2\x9f\xb7\x2e\x94\x4a\x50\x2c\xa5\x55\x8f\xdf\x80\xe2\xb8\x34\
\x6a\xa3\x23\x2c\x83\x96\xdc\x6b\x0c\xac\xce\xc3\xec\x33\xe9\x72\
\x96\xc4\xfa\x1d\xe0\x4f\xfa\x44\xc6\xfb\x5e\x1f\xd7\x2b\xa5\x16\
\x42\xe0\x5c\x55\xd4\x43\xb7\x71\x34\x1b\x7e\xbc\x13\x0f\xe7\x8b\
\x80\x39\xab\x85\xb7\x0b\x6b\x19\x7d\xbc\x85\x15\xb4\xa4\xce\x67\
\xdf\xdd\x42\xf4\xbc\xb1\xf6\xa1\x76\x4f\xfa\x4d\x91\xa2\x99\x97\
\xca\x3d\x7e\xc2\x05\xb5\x45\x8d\x35\x9e\x91\x7c\x6f\x4e\xe2\xe6\
\x49\xe0\xbd\x03\xa4\x26\x06\x5a\x3f\x99\x85\x57\xad\xaa\x90\x33\
\x55\x98\xad\x8b\x9e\xef\x66\x0b\x82\x10\x2a\xc3\x2a\x05\x89\x6a\
\x89\x56\x20\x41\xd9\xb6\xa2\xa3\x46\x3b\xe4\xfd\x76\x2f\x27\xb2\
\x3d\x85\x11\x98\x3c\x0e\xfb\xef\x17\x09\x48\xea\x7d\x5b\x10\x95\
\x33\x7f\x52\x22\xeb\x1c\xe2\x2f\x23\x63\xfa\x17\xfa\xfd\x68\x83\
\xe6\x7f\xea\xc8\x33\x60\xfe\x0d\x69\xbb\xd9\xd0\xed\x4d\x1f\x4e\
\xdd\x94\x22\xab\x43\xa3\x82\x6b\xe7\xe5\x7a\xfd\x50\x90\xd1\x89\
\x21\x81\x21\x92\x8c\x5c\x5c\x93\x8e\x9a\x41\x9b\xc2\x53\x27\xcc\
\x8a\x1a\xa9\x5d\x55\xe3\x5a\x88\xf5\x7c\x69\x4c\x86\x75\xdb\xa2\
\x54\xd6\xb9\x43\x9a\x9b\x6e\xc6\xea\xd5\x29\xc8\x7c\xd1\xb9\xa7\
\x85\xf8\xb6\x90\x0b\xdd\xff\x20\xf0\xf9\x81\x3e\xdf\x16\xbe\xdb\
\x49\x64\x76\xf2\x6c\xd6\xc0\xfa\x21\x9c\x99\x87\x8b\xcb\x31\x7c\
\xd1\x69\x55\xca\xe9\x31\x05\xd6\x11\xc3\x3c\xbf\xda\x7b\xa1\x55\
\x2f\x30\x71\x63\x39\x81\xd3\xe8\x20\xa7\xe1\x03\x32\x35\xbd\x3c\
\xa5\xf3\x40\xbd\xd8\xb5\x34\x56\xbc\x9d\xfa\x2c\xcc\x3d\x25\x69\
\x46\x5b\xcc\xfd\x40\xef\x52\xcc\x8c\xdd\x66\x00\x0a\xb3\x7e\x3f\
\x90\xea\x86\x8a\x2a\x1f\x9e\x5b\x90\x16\xa5\xa6\x1f\xd7\xff\x67\
\xd5\xd0\x44\x49\x9a\x31\x92\x27\xdd\x1a\x71\x33\x97\x1b\x83\x95\
\xaa\x27\xe3\x02\xbf\x25\x60\x5c\x98\xc8\xe4\x45\x7d\x5c\xc3\x15\
\xf0\xd4\x60\x27\x7d\x7a\x47\x09\xbd\xf8\x3c\xcc\xab\xc1\xb5\xf9\
\xc9\xfa\xf7\xaa\xcf\xcf\xad\x62\x40\x14\x72\x3f\x96\x95\x04\xab\
\xfe\xfd\xf5\x3a\x3c\x33\x27\x10\x76\x04\x29\x27\x19\x50\x70\x64\
\x4a\xba\x4d\x74\x60\x3a\x46\xfa\xca\xae\xd4\x12\x01\xd1\x20\x0c\
\x50\x68\x39\x3b\x48\x29\x0c\x64\xb6\x50\x61\x2c\x4d\x78\xe3\x88\
\xca\x69\xd6\xe1\xe6\xb3\x52\x3b\x1a\xf8\xb9\x3a\x3f\x50\xe2\xbf\
\x7f\xcb\xc9\xa2\x6d\x32\x73\x8f\xab\x3a\xba\x94\xf5\x8e\x0a\x8e\
\x74\x4a\x3e\x35\x2b\x12\xe1\x05\x02\x63\x47\x27\xdb\x0f\x45\x0d\
\x4d\x94\xd3\x00\x9f\x63\xa4\xb0\xb7\x9a\x69\x73\xea\xf9\x8b\x15\
\x24\x45\xb8\x74\x39\x5d\x7e\x12\x78\x02\x31\x8f\x1f\x56\xc6\x06\
\x31\xe1\x43\x24\x5e\xb8\xf1\x25\xf9\x19\xe1\xfd\x39\xeb\x5d\xdb\
\x41\xfc\xed\x64\x00\x48\x12\xff\x31\xa4\xa2\xa2\x2d\xf0\xf2\x03\
\x89\x13\x9e\x9e\x15\x57\xd5\x0f\xe2\x04\xbd\x35\x32\xae\xcc\x3a\
\xb1\x47\x64\x75\xc0\xde\x85\x25\x09\xf4\xfa\x61\x42\xd4\x24\xbd\
\xf8\x9c\x54\x28\x24\x75\x7f\x18\xc0\xe8\x01\x70\x47\x74\x42\x4a\
\x51\xa8\xb0\x5e\x15\x75\x33\x7f\x3a\x56\x39\x39\x92\x57\x07\xde\
\x86\x3c\x8e\x8b\xbd\xc6\x00\x90\xd2\x8b\xd7\x23\x0f\x37\x48\xbb\
\x5b\x4e\xfc\xb4\x8c\x67\xe7\xa4\x74\x7d\x6e\x25\x9e\x7e\x75\x40\
\x67\xc6\xa5\x6c\x81\x23\x3d\x06\x17\x16\xd4\x2d\x75\x7a\xfb\x46\
\x8e\x15\xa8\xa0\x76\x45\x89\x9f\x38\xfd\x43\xe3\xe2\xf9\x44\x10\
\x42\x63\x59\xd4\xcd\x8d\x27\x04\x5a\x88\xdc\xce\x0e\x85\xb5\xaf\
\x45\x46\xf6\x6f\xdb\xda\x89\xa7\x28\xcd\x21\xc3\xea\xfe\x07\x79\
\x9a\xd2\x54\x16\x4a\x08\x43\xa8\xae\xc3\x52\x43\x8c\x70\x65\x58\
\xc6\xa0\x1d\x9f\x90\xe6\x8b\xe5\x44\xe5\x45\xc1\xc2\xec\xaa\xdc\
\x77\xa7\x3e\xcc\xc7\x74\xd1\xf9\x51\x76\x6a\xe9\x02\x60\xd3\x53\
\x0d\xdd\xb2\x04\x64\x8e\xbe\x66\xed\xa6\x78\x48\xde\xba\xa6\x30\
\x3b\x57\xc5\x7d\x4c\xbf\xcb\xc2\x76\x13\x6b\xa7\x1e\x63\x15\x02\
\x1f\xd6\x60\xed\xd7\x91\x89\xeb\x29\xf8\xa2\xa0\xad\xac\x0b\xeb\
\xb0\xd0\x10\x5c\x68\x66\x44\x72\xc6\xc9\xb8\x20\x72\x45\xaf\xd4\
\xe4\xe7\x9d\xfb\x44\x12\xc2\x4c\x83\x75\x94\xb9\x5a\xbe\x04\x55\
\xcd\x6e\x65\x2b\x1b\x8a\x23\x82\x0b\x2d\x9c\x93\xa8\x36\xb2\x0d\
\x4e\xa1\xa3\xbf\x7b\x19\x99\x97\xfd\xf1\x1d\xa2\xd3\x8e\x3f\xcc\
\xf3\x33\xc8\x13\x87\x7e\x14\x78\x0f\x89\xac\x5a\xa4\xe7\xa3\xf9\
\x13\xeb\x9e\x74\xbe\xe4\xc1\x18\x51\x12\xe7\x6a\x5d\xec\x81\x13\
\x8d\x3c\x88\x86\x3f\x39\x32\x19\xf1\xe6\x53\xb0\xa2\xe9\xef\x2c\
\xf1\x8d\x23\xea\x66\xad\x9a\x56\x55\x1d\x56\x43\x4f\xfd\x07\x49\
\x8c\x15\xd8\x89\xb5\x1b\x0d\x99\x2d\x64\x76\xe6\x37\x20\x29\xba\
\x6a\x2e\xca\x69\xba\x07\x6c\xd1\x54\x95\x05\xc5\x8a\x92\x4c\x32\
\xd1\x94\xf4\x1b\x71\x4e\x81\x0e\x09\x9b\x8d\x6a\x86\xfc\x13\xef\
\x03\x7f\x8f\x3c\x41\xef\x9d\x3b\x4d\xfc\xdd\x62\x40\xb4\x2e\x00\
\x3f\x01\x7c\x3d\xf0\x21\x32\xbd\x68\xbd\x26\x6c\x5c\x27\xdf\x18\
\x9b\xee\xaa\x24\xde\xa0\xf3\x21\xf9\x2b\x64\x4c\xdb\x9b\xe9\x63\
\xfe\xff\x5e\x57\x41\x9d\x20\xed\xf7\x20\x0f\xf9\x7c\x1b\xf2\xcc\
\xc9\x57\x72\x6b\xd6\x39\xe0\x5f\x91\x64\xd3\xa7\x6f\xc5\x07\xb8\
\x95\x4f\xd4\x3e\x05\xfc\xb2\xea\xd9\x87\x91\xc9\x82\x8f\x21\x93\
\xa5\x76\xb2\x4e\x77\x16\xa9\xd5\xfc\x1b\xf5\xd4\x6e\xdc\x42\x1a\
\xec\x89\x67\xca\xd7\x81\xff\xd2\xeb\x43\xc8\xbc\xba\x47\x81\x97\
\x21\xa3\x5d\x8e\x20\x23\x14\x06\x69\x9d\x58\x50\x02\x9f\x47\xea\
\xf3\xbf\xa8\xd7\x39\xf6\xc8\x72\xd9\x5b\x6b\x16\xf8\x27\xbd\x40\
\x46\xe7\xdf\xa6\x4c\x38\x8c\x54\xe8\x1d\x27\x1e\x39\x9d\x5d\x56\
\x6d\xcb\x53\xc0\x15\x24\x3f\x7b\x89\x9c\xce\x94\x17\x19\xd0\xdb\
\x5a\x43\x9e\x42\x71\x11\x19\xa5\x73\x5e\x19\xd1\x8d\x01\x0b\x7a\
\x4f\x95\x4c\x43\xdc\x5e\x5c\x26\x0c\x5f\xa8\x63\x32\xf6\xc6\x72\
\x5e\x24\xc1\x8b\x0c\x78\x41\xaf\xff\x07\x45\x83\x2e\x88\x18\xab\
\x45\xbb\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x07\x8e\
\x00\
\x00\x54\x9e\x78\x9c\xed\x9c\x51\x88\x54\x55\x18\xc7\xcf\xec\x6c\
\x4e\xab\xc8\x0d\xb4\x90\x75\xdb\x09\x5d\xca\x94\xf0\xa1\xf0\x41\
\xa1\x91\x7a\x48\x24\x84\x22\x30\xca\x34\xa5\xa7\xc8\x85\xc0\x12\
\x44\x77\x47\x62\x6d\xed\x21\x0b\x0c\x11\xed\x25\xa5\x0d\x1f\xea\
\xc1\x15\x1f\xf2\x61\x40\xd8\xc8\x70\x92\xa4\x07\x4b\x72\x30\x21\
\x82\x58\x27\x49\x19\x75\xdc\xd3\xf7\x3f\xf7\xdc\xdb\xe9\x78\xee\
\xcc\xbd\x77\xee\x9d\x3b\x57\xe7\x93\xff\xf2\xb9\xcc\x3d\xe7\x7c\
\xbf\x7b\xee\xb9\xf7\x9e\xf9\xbe\x65\x2c\x43\xff\xd6\xac\x61\xf4\
\xf3\x31\x36\xfe\x0a\x63\x8f\x30\xc6\x96\x90\xe8\x57\xec\x13\x66\
\xff\x5e\xd8\x4b\x8c\x6d\xb6\x6c\x75\x2d\x7e\x2b\x6f\x0c\xa6\xfb\
\xc1\x1a\xc4\xdf\x63\x50\xe6\x7e\x61\x65\x88\x31\x4b\x1a\x22\xad\
\x27\xed\x25\x4d\x90\x8e\x69\x3a\x4a\xda\x4d\x5a\x47\x1a\x30\xf1\
\xba\x17\x4c\x8b\x69\x36\x69\x2d\xe9\x08\xe9\x32\xa9\x4e\xe2\x4d\
\x74\x93\xf4\x0b\xe9\x33\xd2\x2a\x52\xef\xbd\xc0\xc8\x30\x67\x10\
\xdb\x37\xa4\xeb\x3e\x98\x78\x69\x9a\x74\x88\xb4\x2c\xcd\x73\xa9\
\x3e\xb6\x88\xfd\xb0\x81\xb1\x1f\x37\x65\x30\xf6\x39\xa4\xed\xa4\
\x3f\x5b\xe0\xa2\xeb\x22\x69\x63\x1a\xe7\x12\x1f\x1f\x12\xba\xf2\
\xee\x43\xec\xfb\xd7\xd8\x7c\x1a\xf7\x61\x9f\xd7\x51\x50\x5d\x97\
\xeb\x53\x5f\x9a\x18\x39\x7c\xf8\xc7\x4f\xce\x9b\xde\xb1\x60\xe2\
\xfc\x96\xde\x46\x31\x82\xdb\x6f\xa4\x13\xa4\xfd\xa4\x31\xd2\x1e\
\xd2\x41\xd2\x29\xd2\x15\xd2\x4c\x83\xe3\x6f\x93\x3e\x4a\x0b\x23\
\x97\xcd\xf8\x50\x1f\xe9\x00\x89\x5f\xdd\xb1\x80\xff\xb4\x25\xab\
\xc7\x55\x23\x9d\x24\x6d\x20\xe5\x49\xb3\x0c\xeb\x15\x62\x7e\x82\
\xf4\x36\x69\x8a\x74\xc7\x2f\xa3\x4e\x34\x85\x0d\xf4\x0e\xe9\x16\
\xf8\x18\x18\x5d\x90\x6b\xc7\x1c\x03\x13\x2f\xcd\x23\x6d\x23\xfd\
\x91\x56\x46\x0a\x9b\xe5\xa4\xcb\x0e\x1b\x95\x11\x5d\x6b\xa5\xb3\
\x6f\xb0\xa7\xf4\xf8\xbd\xcc\xc0\xe9\x39\xd2\xcf\x69\x63\xa4\xb0\
\xe9\x25\x7d\xae\xb3\x91\x3a\xfd\xf7\xce\xfe\xc5\x58\xb7\x83\x8e\
\x5f\x63\xb4\x82\x74\x3e\x4d\x8c\x14\x3e\x2b\x48\x7f\x19\xd8\x5c\
\x22\x3d\x7d\xa3\xf8\x28\xe3\xfb\x96\x86\x1a\x77\x2b\x8c\x92\xe6\
\xa4\xf0\xd9\x6b\x60\x73\x87\xb4\x55\x5d\x9f\xc2\x5a\x5a\x19\xc9\
\xb8\xe7\x91\xce\x1a\xf8\x9c\x21\xcd\x6f\x95\x8d\x63\x69\x63\xa4\
\xcc\x8b\x95\xa4\x6b\x06\x3e\xef\x47\xc5\xc6\xb1\x34\xad\x47\x0a\
\x9f\xb7\x0c\x6c\xae\xca\x35\x29\x52\x3e\xb0\xb4\x30\x52\xf8\x7c\
\x68\xe0\x73\x9e\xf4\x70\xd4\x6c\x1c\x4b\x03\x23\x85\xcf\x61\x03\
\x9f\x6f\x49\x0f\xc6\xc5\x07\xd6\xe9\x8c\x24\x9b\x1e\xd2\x57\x06\
\x3e\x5f\xcb\x67\xa2\x50\x6d\x6b\xb1\x7b\xc6\x14\x82\xd1\x9c\x76\
\x31\x92\x7c\x32\xa4\x2f\x0c\x7c\x26\x49\x0f\x84\xe1\xa3\xc4\x8b\
\x7d\xc3\x9e\x88\x19\x6d\x6e\x33\x1f\xe8\x53\x03\x9f\xef\x48\x73\
\x83\xf2\x51\xe2\x04\x97\xad\xa4\x5d\xa4\x5c\x44\x8c\x8e\x93\x06\
\x13\xe0\xb3\xcd\xc0\xe7\x77\xd2\x50\x90\xfb\x97\xc6\x66\xb8\x6c\
\xef\xf3\xe0\x9c\x7f\xa0\x32\xf2\x71\xbc\x89\x11\xd8\x2c\x34\x5d\
\xb7\x71\xb1\x52\xf8\xbc\x48\xba\xa9\xf1\xb9\x4d\x5a\xef\x97\x8f\
\x32\xd6\xac\x64\xf3\x8f\x12\xdb\xad\x16\x19\x4d\x36\x62\x13\x17\
\x23\x85\xcf\x22\x52\xc5\x30\x87\x26\x9c\x35\xa8\x19\x23\x65\x9c\
\x9b\xca\xe6\x7d\xea\xb0\x8c\x96\x90\xfa\x3d\x98\x0c\xca\xf5\x28\
\xb6\x35\x9b\xff\xf7\xee\x7e\xcc\xc0\x67\x9a\x54\x50\x38\xfa\x89\
\xa9\x50\xb6\xf7\x98\x4d\xeb\xeb\x5d\x8c\x7c\xae\x47\x26\x2d\x94\
\xd7\x5c\xac\xf7\x7e\x25\xf6\x57\xe5\x35\xa5\x33\x3a\x41\xb2\x02\
\xf0\x71\x18\xfd\xda\x2a\xa3\x06\x7d\x38\x6c\x62\x7f\x5f\x53\xf8\
\xe0\x1d\x75\xca\xc0\x07\xef\xf0\x63\xce\xb3\x50\x92\x8c\xe4\xe7\
\xfa\xe5\x7a\xd4\xb6\x77\x5a\x85\xd1\xeb\x86\x75\x1a\xba\x41\x7a\
\x8f\x94\x4d\x92\x91\xb2\x1e\xb5\xf5\xbd\x5f\xe1\x83\xbd\xf9\x2f\
\x0d\x7c\xda\xc6\xc8\x07\x9f\x44\xde\x45\x14\x46\x8f\x93\xce\x75\
\x22\x23\xad\xcd\xb6\x32\x52\xef\x51\xa4\xd5\xdc\xde\x57\xed\x32\
\xf2\x66\xf4\x82\xc7\x33\x51\x97\x51\x07\x31\xf2\x8a\xa9\x15\x46\
\xad\x72\xea\x32\x8a\x85\x51\x22\xcf\x47\x5d\x46\x5d\x46\x41\x18\
\xf9\x6c\xb3\xd3\xd7\xec\x34\x30\x42\xfe\x51\x6f\x97\x91\x27\x23\
\xec\xc1\x6c\x8c\xea\x3a\x0b\xc9\x68\x56\x87\xaf\x47\xd8\x87\x59\
\x16\x15\x9f\x10\x8c\x5e\x6e\xc6\xa7\x8d\x8c\xbc\x72\x6b\x0e\x45\
\x79\x9d\x05\x60\x84\xef\x3f\x06\xfd\xf0\x69\x13\xa3\xe7\xcb\xe6\
\x1c\x2d\xe4\xd5\xae\x8a\x92\x8f\x0f\x46\xc7\x49\x0b\xb5\xcf\x04\
\xd9\x83\x8c\x6b\x3d\x42\x1e\x9b\x29\xd7\x0f\xf9\xd9\x99\x28\xf9\
\x34\x60\x34\xd9\x88\x4d\x84\x8c\x70\x0f\xc2\x77\x47\x3d\xcd\xe2\
\x52\xda\x43\xae\xdf\x94\xa1\x2d\xe4\xb0\x0f\x44\x3d\x87\x0c\x8c\
\x9e\x21\x0d\x78\x30\x19\x94\xeb\x51\x54\x6b\x36\xee\x3f\x5b\xfd\
\x9e\x77\xa5\x3d\xe4\x8c\xea\x79\xb5\xc8\xf3\x5f\x17\x07\x1f\x58\
\xb3\xb9\x22\x99\x4d\xf2\xe8\xde\xd7\xf0\xdd\xd1\x70\x59\xf9\x6e\
\xb6\x99\x29\x6d\x21\xaf\xf6\x8a\x81\xf7\xee\xb8\xf8\x98\x4c\x63\
\x73\xdc\x70\xef\x0f\xc3\xe8\xa2\x9c\x37\x81\xd8\x68\xed\xe1\xb9\
\xf9\x94\x81\xcf\xd1\x38\xd6\x20\x2f\xd3\xe6\x4d\x94\xef\xfd\x6f\
\x96\xed\xef\x20\x03\x9f\x6b\xa5\x9d\x83\x06\x3e\xc7\xfc\xac\x65\
\x51\x99\xb2\x1e\x45\xbd\x37\x12\xfa\x79\x57\x39\x76\x4f\x87\xf0\
\x89\x6b\xff\x28\x94\x29\xc7\x8f\x75\x10\x9f\xc8\x18\xb5\x6a\x0a\
\x9f\xfd\x49\xf3\x81\x75\x1a\x23\xc9\x06\xf5\x22\x27\x0c\x7c\x26\
\xda\xcd\x07\xd6\x29\x8c\x94\xb9\x93\x2f\xdb\x75\x47\x3a\x9f\xbd\
\xed\xbc\xbf\xab\xd6\x09\x8c\x14\x3e\xa8\x3b\xd2\x6b\xda\xf0\xff\
\xf5\x49\xf1\x81\x25\xc9\x48\x61\x83\x3c\x98\x93\x86\xb9\x83\x7a\
\xda\xa1\x24\xf9\xc0\x92\x60\xa4\xdd\xf7\xb0\x1f\x56\x33\xf0\x39\
\xe2\x3c\x53\x25\x6d\xad\x30\x0a\xca\x49\x1c\xb3\x6f\xa9\xa8\x17\
\x95\xb5\x59\x17\x0c\x6c\xf0\x3c\xbe\x36\xe9\xb9\xa3\x5a\x08\x46\
\xbd\x41\x19\x39\x9f\x45\xdd\xd1\xb5\x5d\xfd\x8b\x51\xbf\x66\x60\
\x03\xa1\x2e\x7b\x76\x27\xf1\x81\x05\x64\x84\xfc\x23\x4b\x3b\xc6\
\x4f\xbb\xce\x33\xfc\x69\x8f\x5a\x51\xd4\x65\x47\xbe\x37\x16\x95\
\x05\x60\x84\x1c\x2d\xe4\xb1\x15\xb8\x92\x0f\xd9\x44\xa8\x3b\x1a\
\x56\xdb\xd4\x18\xe1\x9e\xb5\xbd\xd5\x67\xf2\xb8\x2d\x00\x23\x08\
\xf9\x90\xc8\x19\x45\x5e\x2d\x72\x8f\xe7\x4a\x5e\xb8\xfe\x50\x0b\
\x81\x7a\x11\xd4\xd4\xa0\xee\xe8\x8c\xe4\xfa\xbf\x36\x64\xad\x28\
\xf8\x1c\x2e\xb7\x31\x17\xbf\x15\x33\x30\xba\xd4\x80\x11\x84\x3c\
\x49\xe4\x67\x23\x87\x1d\xfb\x03\xa8\x85\x40\xbd\x08\x6a\x6a\xae\
\x36\x39\x96\xa3\x76\x1d\xf5\xfd\xf2\x6f\x20\xa4\xc2\x34\x46\xab\
\xb9\x77\x8e\x56\x2b\x42\x5d\xf6\x01\xd4\xf7\xe3\x9e\x86\xbf\x13\
\x91\x16\x3e\x30\x8d\x11\xf2\xd8\x90\xeb\x67\xca\x87\x0c\x23\xd4\
\x65\xa3\x76\xbd\xcf\xe9\x03\x7f\x4b\x23\x6d\xa6\x31\x42\x2c\xc8\
\x19\x9d\xe2\xe6\xdc\x63\x3f\x42\xcd\x31\xea\xb2\x97\x6b\x6d\x27\
\x1d\x6a\x68\xd3\xe3\xe0\x76\xee\x31\xf2\xb3\x91\xc3\x5e\xf1\x31\
\xa7\x50\x2b\x8a\x7a\x5a\xd4\x1c\x63\xbd\x0e\xfc\xfc\x94\x06\x33\
\x70\x42\x9c\xa8\x85\x40\xbd\x08\x6a\x6a\x50\x77\x84\xda\x2c\xd4\
\xaf\xa1\xc6\x0f\x75\x90\xa8\x15\x5d\x29\x99\xde\xd5\xc6\xbd\x68\
\xa6\x38\x15\xa1\x7e\xad\xa7\xc9\x67\x92\x0e\xa1\x6d\xd6\x8c\xc3\
\xfd\xc6\x23\x29\xe3\x8e\xc1\x9f\x61\x79\xd7\xaf\xb2\xac\xeb\x97\
\xe4\xe7\xe0\x17\x19\x1b\x95\xfe\x0c\x29\x2f\xfd\x3a\xc9\x92\x7e\
\x8d\x94\x53\xfc\xac\xf4\xab\x8a\x5f\x21\x65\x5c\xbf\x60\x77\x40\
\x7e\x09\xbd\x8d\x4a\x3f\xc3\x2b\xa2\x03\xf8\x59\x1a\x91\x25\xfd\
\x1c\xaf\x89\x0e\xc8\x2f\x5a\xbc\xae\xf8\x33\xa2\x03\xf8\x74\x9c\
\xe8\x40\xfa\x45\xe6\xfa\x39\xd1\x81\xf0\x67\xb2\x15\xd7\xaf\x67\
\xaa\x8e\x6f\xd5\x32\x35\x74\x26\xfc\x2a\x13\x1d\xc0\xcf\x55\xd9\
\x08\x3a\x10\xe3\xa9\xb2\x3c\x3a\x90\xbe\x55\xa3\x83\xc5\x98\x69\
\x68\x35\x56\xb0\xe3\xaa\xb3\x6c\x8d\x22\x10\xfe\x88\xeb\x57\x29\
\x5e\xd1\x81\xf0\xf3\x45\xd1\x81\x60\x65\x95\x44\x07\x82\x67\x8e\
\x62\x27\x2c\x82\x33\x3a\x28\xb2\x51\xf0\xc7\x91\x14\x41\xc1\x3e\
\x17\xd4\x81\xf8\x18\xa8\x8d\x30\x31\x26\xf2\x2b\xb2\x03\xfb\x3c\
\xda\x1d\xc0\x17\x1d\x3c\x6b\xfb\x75\x66\xe1\xc8\xac\x7d\xde\x2d\
\x74\x20\x7c\xd1\x81\x55\xb2\x7d\x1c\x69\x55\x32\xc2\x47\x07\x56\
\xd5\xf6\xd1\x81\xe3\x73\xea\x20\x5f\x73\x7c\xab\x54\x70\xfd\x5c\
\x65\xd4\xf5\xb3\x55\xee\xfa\x44\xcd\xf1\x71\x5a\x1c\x1f\xa7\xc5\
\x69\x93\x3a\x70\x7d\x8a\x80\xcb\x31\x20\x02\x2e\xc7\x46\x11\x64\
\xb9\x1c\x33\x45\x90\xe1\x4c\xfa\x84\x6c\xa7\x1d\xa3\xe8\x80\xb9\
\x7e\x09\x73\x54\xfa\x98\x7f\x79\xe9\x57\x15\x1f\xf3\xb5\x20\x7d\
\xcc\xe9\x51\xe9\xcf\x60\xba\x3a\xd7\x4b\xd1\x3e\x47\xdc\x6e\xc8\
\x72\xfd\x3a\xe3\xae\x2f\xe7\xf3\xbf\x2f\x4f\x0d\x68\
"
qt_resource_name = b"\
\x00\x03\
\x00\x00\x70\x37\
\x00\x69\
\x00\x6d\x00\x67\
\x00\x05\
\x00\x4f\xa6\x53\
\x00\x49\
\x00\x63\x00\x6f\x00\x6e\x00\x73\
\x00\x14\
\x09\xf6\x2c\x9f\
\x00\x62\
\x00\x72\x00\x6f\x00\x6b\x00\x65\x00\x6e\x00\x5f\x00\x6c\x00\x69\x00\x6e\x00\x6b\x00\x5f\x00\x37\x00\x32\x00\x70\x00\x78\x00\x2e\
\x00\x69\x00\x63\x00\x6f\
\x00\x0b\
\x06\x27\xc0\x87\
\x00\x4e\
\x00\x65\x00\x74\x00\x77\x00\x6f\x00\x72\x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0d\
\x0f\x71\x6b\x1f\
\x00\x6c\
\x00\x69\x00\x6e\x00\x6b\x00\x5f\x00\x37\x00\x32\x00\x70\x00\x78\x00\x2e\x00\x69\x00\x63\x00\x6f\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x0c\x00\x02\x00\x00\x00\x03\x00\x00\x00\x03\
\x00\x00\x00\x4a\x00\x00\x00\x00\x00\x01\x00\x00\x09\xaf\
\x00\x00\x00\x1c\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x66\x00\x01\x00\x00\x00\x01\x00\x00\x27\x72\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x0c\x00\x02\x00\x00\x00\x03\x00\x00\x00\x03\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x4a\x00\x00\x00\x00\x00\x01\x00\x00\x09\xaf\
\x00\x00\x01\x77\x4b\xea\x00\x37\
\x00\x00\x00\x1c\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x77\x4c\xfd\xd4\x26\
\x00\x00\x00\x66\x00\x01\x00\x00\x00\x01\x00\x00\x27\x72\
\x00\x00\x01\x77\x4c\xfd\xf7\xe1\
"
qt_version = [int(v) for v in QtCore.qVersion().split(".")]
if qt_version < [5, 8, 0]:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(
rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data
)
def qCleanupResources():
QtCore.qUnregisterResourceData(
rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data
)
qInitResources()
|
class filters:
def __init__(self):
self.filters = {}
def set (self, name, function):
if not name in self.filters:
self.filters[name] = []
self.filters[name].append(function)
else:
self.filters[name].append(function)
def get (self, name):
if name in self.filters:
return self.filters[name]
def call (self, name, argv):
r = argv
if (not self.get(name) == None):
for _ in self.get(name):
try:
if callable(_):
r = _(r)
except: pass
return r
else:
return argv
|
import os
import tarfile
import pickle
__author__ = "Marcin Stachowiak"
__version__ = "1.0"
__email__ = "marcin.stachowiak.ms@gmail.com"
def check_if_file_exists(path):
return(os.path.exists(path))
def create_dir_if_not_exists(path):
if not os.path.exists(path):
os.makedirs(path)
def unpack(path):
folder, _ = os.path.split(path)
tar = tarfile.open(path, "r:gz")
tar.extractall(folder)
tar.close()
print('File %s unpacked.' % path)
def unpickle(file):
with open(file, 'rb') as fo:
dict = pickle.load(fo, encoding='bytes')
return dict
|
"""
write2csv.py
Writes adjacency, chemical and electrical networks to edgelist file.
created: Christopher Brittin
date: 01 November 2018
Synopsis:
python paper_figures figNum [fout]
Parameters:
db (str): Database name
fout (str) : Path to output file
"""
import argparse
import networkx as nx
from connectome.load import from_db
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('db',
action="store",
help="Database name")
parser.add_argument('fout',
action="store",
help="Path to output file")
params = parser.parse_args()
C = from_db(params.db,adjacency=True,chemical=True,
electrical=True, dataType='networkx')
adj_out = params.fout.replace('.','_adjacency.')
chem_out = params.fout.replace('.','_chemical.')
elec_out = params.fout.replace('.','_electrical.')
nx.write_weighted_edgelist(C.A,adj_out,delimiter=',')
nx.write_weighted_edgelist(C.C,chem_out,delimiter=',')
nx.write_weighted_edgelist(C.E,elec_out,delimiter=',')
|
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_firewall_profile_protocol_options
short_description: Configure protocol options in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify firewall feature and profile_protocol_options category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
state:
description:
- Indicates whether to create or remove the object.
This attribute was present already in previous version in a deeper level.
It has been moved out to this outer level.
type: str
required: false
choices:
- present
- absent
version_added: 2.9
firewall_profile_protocol_options:
description:
- Configure protocol options.
default: null
type: dict
suboptions:
state:
description:
- B(Deprecated)
- Starting with Ansible 2.9 we recommend using the top-level 'state' parameter.
- HORIZONTALLINE
- Indicates whether to create or remove the object.
type: str
required: false
choices:
- present
- absent
comment:
description:
- Optional comments.
type: str
dns:
description:
- Configure DNS protocol options.
type: dict
suboptions:
ports:
description:
- Ports to scan for content (1 - 65535).
type: int
status:
description:
- Enable/disable the active status of scanning for this protocol.
type: str
choices:
- enable
- disable
ftp:
description:
- Configure FTP protocol options.
type: dict
suboptions:
comfort_amount:
description:
- Amount of data to send in a transmission for client comforting (1 - 10240 bytes).
type: int
comfort_interval:
description:
- Period of time between start, or last transmission, and the next client comfort transmission of data (1 - 900 sec).
type: int
inspect_all:
description:
- Enable/disable the inspection of all ports for the protocol.
type: str
choices:
- enable
- disable
options:
description:
- One or more options that can be applied to the session.
type: str
choices:
- clientcomfort
- oversize
- splice
- bypass-rest-command
- bypass-mode-command
oversize_limit:
description:
- Maximum in-memory file size that can be scanned (1 - 383 MB).
type: int
ports:
description:
- Ports to scan for content (1 - 65535).
type: int
scan_bzip2:
description:
- Enable/disable scanning of BZip2 compressed files.
type: str
choices:
- enable
- disable
status:
description:
- Enable/disable the active status of scanning for this protocol.
type: str
choices:
- enable
- disable
uncompressed_nest_limit:
description:
- Maximum nested levels of compression that can be uncompressed and scanned (2 - 100).
type: int
uncompressed_oversize_limit:
description:
- Maximum in-memory uncompressed file size that can be scanned (0 - 383 MB, 0 = unlimited).
type: int
http:
description:
- Configure HTTP protocol options.
type: dict
suboptions:
block_page_status_code:
description:
- Code number returned for blocked HTTP pages (non-FortiGuard only) (100 - 599).
type: int
comfort_amount:
description:
- Amount of data to send in a transmission for client comforting (1 - 10240 bytes).
type: int
comfort_interval:
description:
- Period of time between start, or last transmission, and the next client comfort transmission of data (1 - 900 sec).
type: int
fortinet_bar:
description:
- Enable/disable Fortinet bar on HTML content.
type: str
choices:
- enable
- disable
fortinet_bar_port:
description:
- Port for use by Fortinet Bar (1 - 65535).
type: int
http_policy:
description:
- Enable/disable HTTP policy check.
type: str
choices:
- disable
- enable
inspect_all:
description:
- Enable/disable the inspection of all ports for the protocol.
type: str
choices:
- enable
- disable
options:
description:
- One or more options that can be applied to the session.
type: str
choices:
- clientcomfort
- servercomfort
- oversize
- chunkedbypass
oversize_limit:
description:
- Maximum in-memory file size that can be scanned (1 - 383 MB).
type: int
ports:
description:
- Ports to scan for content (1 - 65535).
type: int
post_lang:
description:
- ID codes for character sets to be used to convert to UTF-8 for banned words and DLP on HTTP posts (maximum of 5 character sets).
type: str
choices:
- jisx0201
- jisx0208
- jisx0212
- gb2312
- ksc5601-ex
- euc-jp
- sjis
- iso2022-jp
- iso2022-jp-1
- iso2022-jp-2
- euc-cn
- ces-gbk
- hz
- ces-big5
- euc-kr
- iso2022-jp-3
- iso8859-1
- tis620
- cp874
- cp1252
- cp1251
range_block:
description:
- Enable/disable blocking of partial downloads.
type: str
choices:
- disable
- enable
retry_count:
description:
- Number of attempts to retry HTTP connection (0 - 100).
type: int
scan_bzip2:
description:
- Enable/disable scanning of BZip2 compressed files.
type: str
choices:
- enable
- disable
status:
description:
- Enable/disable the active status of scanning for this protocol.
type: str
choices:
- enable
- disable
streaming_content_bypass:
description:
- Enable/disable bypassing of streaming content from buffering.
type: str
choices:
- enable
- disable
strip_x_forwarded_for:
description:
- Enable/disable stripping of HTTP X-Forwarded-For header.
type: str
choices:
- disable
- enable
switching_protocols:
description:
- Bypass from scanning, or block a connection that attempts to switch protocol.
type: str
choices:
- bypass
- block
uncompressed_nest_limit:
description:
- Maximum nested levels of compression that can be uncompressed and scanned (2 - 100).
type: int
uncompressed_oversize_limit:
description:
- Maximum in-memory uncompressed file size that can be scanned (0 - 383 MB, 0 = unlimited).
type: int
imap:
description:
- Configure IMAP protocol options.
type: dict
suboptions:
inspect_all:
description:
- Enable/disable the inspection of all ports for the protocol.
type: str
choices:
- enable
- disable
options:
description:
- One or more options that can be applied to the session.
type: str
choices:
- fragmail
- oversize
oversize_limit:
description:
- Maximum in-memory file size that can be scanned (1 - 383 MB).
type: int
ports:
description:
- Ports to scan for content (1 - 65535).
type: int
scan_bzip2:
description:
- Enable/disable scanning of BZip2 compressed files.
type: str
choices:
- enable
- disable
status:
description:
- Enable/disable the active status of scanning for this protocol.
type: str
choices:
- enable
- disable
uncompressed_nest_limit:
description:
- Maximum nested levels of compression that can be uncompressed and scanned (2 - 100).
type: int
uncompressed_oversize_limit:
description:
- Maximum in-memory uncompressed file size that can be scanned (0 - 383 MB, 0 = unlimited).
type: int
mail_signature:
description:
- Configure Mail signature.
type: dict
suboptions:
signature:
description:
- Email signature to be added to outgoing email (if the signature contains spaces, enclose with quotation marks).
type: str
status:
description:
- Enable/disable adding an email signature to SMTP email messages as they pass through the FortiGate.
type: str
choices:
- disable
- enable
mapi:
description:
- Configure MAPI protocol options.
type: dict
suboptions:
options:
description:
- One or more options that can be applied to the session.
type: str
choices:
- fragmail
- oversize
oversize_limit:
description:
- Maximum in-memory file size that can be scanned (1 - 383 MB).
type: int
ports:
description:
- Ports to scan for content (1 - 65535).
type: int
scan_bzip2:
description:
- Enable/disable scanning of BZip2 compressed files.
type: str
choices:
- enable
- disable
status:
description:
- Enable/disable the active status of scanning for this protocol.
type: str
choices:
- enable
- disable
uncompressed_nest_limit:
description:
- Maximum nested levels of compression that can be uncompressed and scanned (2 - 100).
type: int
uncompressed_oversize_limit:
description:
- Maximum in-memory uncompressed file size that can be scanned (0 - 383 MB, 0 = unlimited).
type: int
name:
description:
- Name.
required: true
type: str
nntp:
description:
- Configure NNTP protocol options.
type: dict
suboptions:
inspect_all:
description:
- Enable/disable the inspection of all ports for the protocol.
type: str
choices:
- enable
- disable
options:
description:
- One or more options that can be applied to the session.
type: str
choices:
- oversize
- splice
oversize_limit:
description:
- Maximum in-memory file size that can be scanned (1 - 383 MB).
type: int
ports:
description:
- Ports to scan for content (1 - 65535).
type: int
scan_bzip2:
description:
- Enable/disable scanning of BZip2 compressed files.
type: str
choices:
- enable
- disable
status:
description:
- Enable/disable the active status of scanning for this protocol.
type: str
choices:
- enable
- disable
uncompressed_nest_limit:
description:
- Maximum nested levels of compression that can be uncompressed and scanned (2 - 100).
type: int
uncompressed_oversize_limit:
description:
- Maximum in-memory uncompressed file size that can be scanned (0 - 383 MB, 0 = unlimited).
type: int
oversize_log:
description:
- Enable/disable logging for antivirus oversize file blocking.
type: str
choices:
- disable
- enable
pop3:
description:
- Configure POP3 protocol options.
type: dict
suboptions:
inspect_all:
description:
- Enable/disable the inspection of all ports for the protocol.
type: str
choices:
- enable
- disable
options:
description:
- One or more options that can be applied to the session.
type: str
choices:
- fragmail
- oversize
oversize_limit:
description:
- Maximum in-memory file size that can be scanned (1 - 383 MB).
type: int
ports:
description:
- Ports to scan for content (1 - 65535).
type: int
scan_bzip2:
description:
- Enable/disable scanning of BZip2 compressed files.
type: str
choices:
- enable
- disable
status:
description:
- Enable/disable the active status of scanning for this protocol.
type: str
choices:
- enable
- disable
uncompressed_nest_limit:
description:
- Maximum nested levels of compression that can be uncompressed and scanned (2 - 100).
type: int
uncompressed_oversize_limit:
description:
- Maximum in-memory uncompressed file size that can be scanned (0 - 383 MB, 0 = unlimited).
type: int
replacemsg_group:
description:
- Name of the replacement message group to be used Source system.replacemsg-group.name.
type: str
rpc_over_http:
description:
- Enable/disable inspection of RPC over HTTP.
type: str
choices:
- enable
- disable
smtp:
description:
- Configure SMTP protocol options.
type: dict
suboptions:
inspect_all:
description:
- Enable/disable the inspection of all ports for the protocol.
type: str
choices:
- enable
- disable
options:
description:
- One or more options that can be applied to the session.
type: str
choices:
- fragmail
- oversize
- splice
oversize_limit:
description:
- Maximum in-memory file size that can be scanned (1 - 383 MB).
type: int
ports:
description:
- Ports to scan for content (1 - 65535).
type: int
scan_bzip2:
description:
- Enable/disable scanning of BZip2 compressed files.
type: str
choices:
- enable
- disable
server_busy:
description:
- Enable/disable SMTP server busy when server not available.
type: str
choices:
- enable
- disable
status:
description:
- Enable/disable the active status of scanning for this protocol.
type: str
choices:
- enable
- disable
uncompressed_nest_limit:
description:
- Maximum nested levels of compression that can be uncompressed and scanned (2 - 100).
type: int
uncompressed_oversize_limit:
description:
- Maximum in-memory uncompressed file size that can be scanned (0 - 383 MB, 0 = unlimited).
type: int
switching_protocols_log:
description:
- Enable/disable logging for HTTP/HTTPS switching protocols.
type: str
choices:
- disable
- enable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure protocol options.
fortios_firewall_profile_protocol_options:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
firewall_profile_protocol_options:
comment: "Optional comments."
dns:
ports: "5"
status: "enable"
ftp:
comfort_amount: "8"
comfort_interval: "9"
inspect_all: "enable"
options: "clientcomfort"
oversize_limit: "12"
ports: "13"
scan_bzip2: "enable"
status: "enable"
uncompressed_nest_limit: "16"
uncompressed_oversize_limit: "17"
http:
block_page_status_code: "19"
comfort_amount: "20"
comfort_interval: "21"
fortinet_bar: "enable"
fortinet_bar_port: "23"
http_policy: "disable"
inspect_all: "enable"
options: "clientcomfort"
oversize_limit: "27"
ports: "28"
post_lang: "jisx0201"
range_block: "disable"
retry_count: "31"
scan_bzip2: "enable"
status: "enable"
streaming_content_bypass: "enable"
strip_x_forwarded_for: "disable"
switching_protocols: "bypass"
uncompressed_nest_limit: "37"
uncompressed_oversize_limit: "38"
imap:
inspect_all: "enable"
options: "fragmail"
oversize_limit: "42"
ports: "43"
scan_bzip2: "enable"
status: "enable"
uncompressed_nest_limit: "46"
uncompressed_oversize_limit: "47"
mail_signature:
signature: "<your_own_value>"
status: "disable"
mapi:
options: "fragmail"
oversize_limit: "53"
ports: "54"
scan_bzip2: "enable"
status: "enable"
uncompressed_nest_limit: "57"
uncompressed_oversize_limit: "58"
name: "default_name_59"
nntp:
inspect_all: "enable"
options: "oversize"
oversize_limit: "63"
ports: "64"
scan_bzip2: "enable"
status: "enable"
uncompressed_nest_limit: "67"
uncompressed_oversize_limit: "68"
oversize_log: "disable"
pop3:
inspect_all: "enable"
options: "fragmail"
oversize_limit: "73"
ports: "74"
scan_bzip2: "enable"
status: "enable"
uncompressed_nest_limit: "77"
uncompressed_oversize_limit: "78"
replacemsg_group: "<your_own_value> (source system.replacemsg-group.name)"
rpc_over_http: "enable"
smtp:
inspect_all: "enable"
options: "fragmail"
oversize_limit: "84"
ports: "85"
scan_bzip2: "enable"
server_busy: "enable"
status: "enable"
uncompressed_nest_limit: "89"
uncompressed_oversize_limit: "90"
switching_protocols_log: "disable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_firewall_profile_protocol_options_data(json):
option_list = ['comment', 'dns', 'ftp',
'http', 'imap', 'mail_signature',
'mapi', 'name', 'nntp',
'oversize_log', 'pop3', 'replacemsg_group',
'rpc_over_http', 'smtp', 'switching_protocols_log']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def firewall_profile_protocol_options(data, fos):
vdom = data['vdom']
if 'state' in data and data['state']:
state = data['state']
elif 'state' in data['firewall_profile_protocol_options'] and data['firewall_profile_protocol_options']:
state = data['firewall_profile_protocol_options']['state']
else:
state = True
firewall_profile_protocol_options_data = data['firewall_profile_protocol_options']
filtered_data = underscore_to_hyphen(filter_firewall_profile_protocol_options_data(firewall_profile_protocol_options_data))
if state == "present":
return fos.set('firewall',
'profile-protocol-options',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('firewall',
'profile-protocol-options',
mkey=filtered_data['name'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_firewall(data, fos):
if data['firewall_profile_protocol_options']:
resp = firewall_profile_protocol_options(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"firewall_profile_protocol_options": {
"required": False, "type": "dict", "default": None,
"options": {
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"comment": {"required": False, "type": "str"},
"dns": {"required": False, "type": "dict",
"options": {
"ports": {"required": False, "type": "int"},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}},
"ftp": {"required": False, "type": "dict",
"options": {
"comfort_amount": {"required": False, "type": "int"},
"comfort_interval": {"required": False, "type": "int"},
"inspect_all": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"options": {"required": False, "type": "str",
"choices": ["clientcomfort", "oversize", "splice",
"bypass-rest-command", "bypass-mode-command"]},
"oversize_limit": {"required": False, "type": "int"},
"ports": {"required": False, "type": "int"},
"scan_bzip2": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"uncompressed_nest_limit": {"required": False, "type": "int"},
"uncompressed_oversize_limit": {"required": False, "type": "int"}
}},
"http": {"required": False, "type": "dict",
"options": {
"block_page_status_code": {"required": False, "type": "int"},
"comfort_amount": {"required": False, "type": "int"},
"comfort_interval": {"required": False, "type": "int"},
"fortinet_bar": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"fortinet_bar_port": {"required": False, "type": "int"},
"http_policy": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"inspect_all": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"options": {"required": False, "type": "str",
"choices": ["clientcomfort", "servercomfort", "oversize",
"chunkedbypass"]},
"oversize_limit": {"required": False, "type": "int"},
"ports": {"required": False, "type": "int"},
"post_lang": {"required": False, "type": "str",
"choices": ["jisx0201", "jisx0208", "jisx0212",
"gb2312", "ksc5601-ex", "euc-jp",
"sjis", "iso2022-jp", "iso2022-jp-1",
"iso2022-jp-2", "euc-cn", "ces-gbk",
"hz", "ces-big5", "euc-kr",
"iso2022-jp-3", "iso8859-1", "tis620",
"cp874", "cp1252", "cp1251"]},
"range_block": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"retry_count": {"required": False, "type": "int"},
"scan_bzip2": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"streaming_content_bypass": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"strip_x_forwarded_for": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"switching_protocols": {"required": False, "type": "str",
"choices": ["bypass", "block"]},
"uncompressed_nest_limit": {"required": False, "type": "int"},
"uncompressed_oversize_limit": {"required": False, "type": "int"}
}},
"imap": {"required": False, "type": "dict",
"options": {
"inspect_all": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"options": {"required": False, "type": "str",
"choices": ["fragmail", "oversize"]},
"oversize_limit": {"required": False, "type": "int"},
"ports": {"required": False, "type": "int"},
"scan_bzip2": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"uncompressed_nest_limit": {"required": False, "type": "int"},
"uncompressed_oversize_limit": {"required": False, "type": "int"}
}},
"mail_signature": {"required": False, "type": "dict",
"options": {
"signature": {"required": False, "type": "str"},
"status": {"required": False, "type": "str",
"choices": ["disable", "enable"]}
}},
"mapi": {"required": False, "type": "dict",
"options": {
"options": {"required": False, "type": "str",
"choices": ["fragmail", "oversize"]},
"oversize_limit": {"required": False, "type": "int"},
"ports": {"required": False, "type": "int"},
"scan_bzip2": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"uncompressed_nest_limit": {"required": False, "type": "int"},
"uncompressed_oversize_limit": {"required": False, "type": "int"}
}},
"name": {"required": True, "type": "str"},
"nntp": {"required": False, "type": "dict",
"options": {
"inspect_all": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"options": {"required": False, "type": "str",
"choices": ["oversize", "splice"]},
"oversize_limit": {"required": False, "type": "int"},
"ports": {"required": False, "type": "int"},
"scan_bzip2": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"uncompressed_nest_limit": {"required": False, "type": "int"},
"uncompressed_oversize_limit": {"required": False, "type": "int"}
}},
"oversize_log": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"pop3": {"required": False, "type": "dict",
"options": {
"inspect_all": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"options": {"required": False, "type": "str",
"choices": ["fragmail", "oversize"]},
"oversize_limit": {"required": False, "type": "int"},
"ports": {"required": False, "type": "int"},
"scan_bzip2": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"uncompressed_nest_limit": {"required": False, "type": "int"},
"uncompressed_oversize_limit": {"required": False, "type": "int"}
}},
"replacemsg_group": {"required": False, "type": "str"},
"rpc_over_http": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"smtp": {"required": False, "type": "dict",
"options": {
"inspect_all": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"options": {"required": False, "type": "str",
"choices": ["fragmail", "oversize", "splice"]},
"oversize_limit": {"required": False, "type": "int"},
"ports": {"required": False, "type": "int"},
"scan_bzip2": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"server_busy": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"uncompressed_nest_limit": {"required": False, "type": "int"},
"uncompressed_oversize_limit": {"required": False, "type": "int"}
}},
"switching_protocols_log": {"required": False, "type": "str",
"choices": ["disable", "enable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_firewall(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_firewall(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Wrapper class for ``museval`` implementation of the BSS-Eval metrics (SDR, SIR, SAR).
Contains logic for loading ground truth AudioSignals and estimated
AudioSignals to compute BSS-Eval metrics. The ``mir_eval`` module contains an
implementation of BSS-Eval version 4.
"""
import museval
import json
from . import bss_eval_base
from ..core import constants, utils
class BSSEvalV4(bss_eval_base.BSSEvalBase):
"""
Wrapper class for ``museval`` implementation of the BSS-Eval metrics (SDR, SIR, SAR).
Contains logic for loading ground truth AudioSignals and estimated
AudioSignals to compute BSS-Eval metrics. The ``mir_eval`` module contains an
implementation of BSS-Eval version 4.
The BSS-Eval metrics attempt to measure perceptual quality by comparing sources
estimated from a source separation algorithm to the ground truth, known sources.
These metrics evaluate the distortion (SDR) and artifacts (SAR) present in the
estimated signals as well as the interference (SIR) from other sources in a given
estimated source. Results are returned in units of dB, with higher values indicating
better quality.
Examples:
:class:`BSSEvalV4` can be initialized in two ways, either with a list or a dict.
See the example below for a demonstration:
.. code-block:: python
:linenos:
mir1k_dir = 'path/to/MIR-1K'
mix, vox, acc = next(nussl.datasets.mir1k(mir1k_dir))
mix.to_mono(overwrite=True)
r = nussl.RepetSim(mix)
r()
bg, fg = r.make_audio_signals()
# Method 1: Dictionary where sources are explicit
# Note that this dictionary exactly matches nussl.constants.VOX_ACC_DICT
est_dict = {'vocals': fg, 'accompaniment': bg}
gt_dict = {'vocals': vox, 'accompaniment': acc}
bss = nussl.evaluation.BSSEvalV4(mix, gt_dict, est_dict)
scores1 = bss.evaluate()
# Method 2: List
# Note that vocals are always expected to be first, then accompaniment.
bss = nussl.evaluation.BSSEvalV4(mix, [vox, acc], [fg, bg])
scores2 = bss.evaluate()
See Also:
* For more information on ``museval`` (python implementation of BSS-Eval v4) see
`its Github page <https://github.com/sigsep/sigsep-mus-eval>`_ or
'its documentation <https://sigsep.github.io/sigsep-mus-eval/>`_.
* For more information on the BSS-Eval metrics, see the webpage for
`the original MATLAB implementation <http://bass-db.gforge.inria.fr/bss_eval/>`_.
* :class:`BSSEvalSources` and :class:`BSSEvalImages` for the ``mir_eval`` version 3
BSS-Eval implementations.
References:
* Emmanuel Vincent, Rémi Gribonval, Cédric Févotte. Performance measurement in blind
audio source separation. IEEE Transactions on Audio, Speech and Language Processing,
Institute of Electrical and Electronics Engineers, 2006, 14 (4), pp.1462–1469.
<inria-00544230>
* Fabian-Robert Stöter, Antoine Liutkus, and Nobutaka Ito. The 2018 Signal Separation
Evaluation Campaign. In International Conference on Latent Variable Analysis and Signal
Separation, pages 293–305. Springer, 2018.
Args:
true_sources_list (list): List of :class:`AudioSignal` objects that contain the ground
truth sources for the mixture.
estimated_sources_list (list): List of :class:`AudioSignal` objects that contain estimate
sources, output from source separation algorithms.
source_labels (list): List of strings that are labels for each source to be used as keys for
the scores. Default value is ``None`` and in that case labels are ``Source 0``,
``Source 1``, etc.
algorithm_name (str): Name of the algorithm if using this object to compute many
BSS-Eval metrics. Can be changed later.
do_mono (bool): Should flatten the audio to mono before calculating metrics.
compute_permutation (bool): Should try to find the best permutation for the estimated
sources.
"""
SDR_MEANS = 'sdr_means'
def __init__(self, mixture, true_sources, estimated_sources,
target_dict=constants.VOX_ACC_DICT,
mode='v4', output_dir=None, win=1.0, hop=1.0):
# try:
# super(BSSEvalV4, self).__init__(true_sources_list=true_sources_list,
# estimated_sources_list=estimated_sources_list,
# source_labels=source_labels, do_mono=do_mono)
# except evaluation_base.AudioSignalListMismatchError:
# pass
# if vox_acc and target_dict == constants.VOX_ACC_DICT:
# self.source_labels = ['accompaniment', 'vocals']
#
# if target_dict == constants.STEM_TARGET_DICT:
# self.source_labels = ['drums', 'bass', 'other', 'vocals']
#
# self.true_sources = {l: self.true_sources_list[i] for i, l in enumerate(self.source_labels)}
# self.estimates = {l: self.estimated_sources_list[i].audio_data.T
# for i, l in enumerate(self.source_labels)}
if type(true_sources) is not type(estimated_sources):
raise bss_eval_base.BssEvalException('true_sources and estimated_sources must both be '
'lists or both be dicts!')
have_list = type(true_sources) is list
self._scores = {}
self.target_dict = target_dict
self.is_vox_acc = target_dict == constants.VOX_ACC_DICT
self.is_stem = target_dict == constants.STEM_TARGET_DICT
self.mixture = mixture
self.mode = mode
self.output_dir = output_dir
self.win = win
self.hop = hop
# Set up the dictionaries for museval
# self.true_sources is filled with AudioSignals (b/c nussl converts it to a Track)
# & self.estimates is raw numpy arrays
if self.is_vox_acc:
if have_list:
self.estimates = {'vocals': estimated_sources[0].audio_data.T,
'accompaniment': estimated_sources[1].audio_data.T}
self.true_sources = {'vocals': true_sources[0],
'accompaniment': true_sources[1]}
else:
self.estimates = {'vocals': estimated_sources['vocals'].audio_data.T,
'accompaniment': estimated_sources['accompaniment'].audio_data.T}
self.true_sources = {'vocals': true_sources['vocals'],
'accompaniment': true_sources['accompaniment']}
else:
# Assume they know what they're doing...
self.true_sources = true_sources
self.estimates = estimated_sources
# TODO: STEM_TARGET_DICT logic
def _get_scores(self, scores):
s = scores.split()
v, a = s[0], s[6].replace('\\n', '')
i1, i2 = [2, 3, 4, 5], [8, 9, 10, 11]
return {v: {self._parse(s[i])[0]: self._parse(s[i])[1] for i in i1},
a: {self._parse(s[i])[0]: self._parse(s[i])[1] for i in i2}}
@staticmethod
def _parse(str_):
bss_type, val = str_.split(':')
val = float(val[:-3])
return bss_type, val
def _get_mean_scores(self, scores):
return self._get_scores(repr(scores))
def evaluate(self):
track = utils.audio_signals_to_musdb_track(self.mixture, self.true_sources,
self.target_dict)
bss_output = museval.eval_mus_track(track, self.estimates,
output_dir=self.output_dir, mode=self.mode,
win=self.win, hop=self.hop)
self._populate_scores_dict(bss_output)
return self.scores
def _populate_scores_dict(self, bss_output):
self.scores[self.RAW_VALUES] = json.loads(bss_output.json) # Hack to format dict correctly
self.scores[self.SDR_MEANS] = self._get_mean_scores(repr(bss_output))
|
import re
import yaml
import shlex
import logging
import subprocess
from libla.Extractors import RegularExtractor, UsnExtractor
class ArtifactMapping(object):
def __init__(self):
self._mapping = {}
def set_handler(self, name, handler):
self._mapping[name] = handler
@staticmethod
def from_file(filename):
artifact_mapping = ArtifactMapping()
with open(filename, u'rb') as fh:
template = yaml.load(fh)
handler_dict = template['Handlers']
for handler_name, handler_dict in handler_dict.items():
handler = ArtifactHandler.from_dict(
handler_name, handler_dict
)
artifact_mapping.set_handler(
handler_name, handler
)
return artifact_mapping
def iter_handlers(self, file_info):
for name, handler in self._mapping.items():
if handler.matches(file_info):
yield handler
class MatchFilter(object):
def __init__(self, attribute, value):
self.attribute = attribute
self.value = value
self.regexp = re.compile(self.value, flags=re.I)
@staticmethod
def from_dict(dictionary):
return MatchFilter(**dictionary)
def matches(self, file_info):
attribute_value = getattr(file_info, self.attribute, None)
if attribute_value is not None:
if self.regexp.search(attribute_value):
return True
return False
class ArtifactHandler(object):
def __init__(self, name, match, extractor, tool_cmd):
self.name = name
self.match = match
self.extractor = extractor
self.tool_cmd = tool_cmd
if self.extractor == 'regular':
self._extractor_class = RegularExtractor
elif self.extractor == 'usn':
self._extractor_class = UsnExtractor
else:
raise Exception(u"Unknown extractor type: {}".format(self.extractor))
@staticmethod
def from_dict(name, dictionary):
match = MatchFilter.from_dict(
dictionary['match']
)
return ArtifactHandler(
name,
match,
dictionary['extractor'],
dictionary['tool_cmd']
)
def matches(self, file_info):
return self.match.matches(file_info)
def run(self, source_path, tsk_file, file_info, arango_handler, temp_dir):
logging.info(u"[starting] Processing: {}".format(source_path))
extractor = self._extractor_class(
source_path,
tsk_file,
file_info,
temp_dir
)
extractor.write_file()
temp_filename = extractor.get_temp_name()
temp_filename = temp_filename.replace(u"\\", u"\\\\")
arguments = self.tool_cmd.format(
temp_filename
)
arguments = shlex.split(arguments)
logging.debug(u"Command: {}".format(u" ".join(arguments)))
output, error = subprocess.Popen(
arguments,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
).communicate()
if output:
arango_handler.insert_jsonl(
self.name,
output
)
if error:
logging.error(error)
logging.info(u"[finished] Processing: {}".format(source_path))
|
#
import math
#
from steelpy import Formulas
from steelpy import Units
from steelpy import Materials
from steelpy import Sections
#
formulas = Formulas()
units = Units()
#
# -----------------------------------
# Define sections
section = Sections()
#
Dp = 219.10 * units.mm
tp = 14.3 * units.mm
section = Sections()
section["TUB"] = ['Tubular', Dp, tp]
#
# -----------------------------------
# define material
SMYS = 448 * units.MPa
SMTS = 517 * units.MPa
E = 207000.0 * units.MPa
alpha = (1.170E-05 + 1/273.15 ) / units.K
#
material = Materials()
material["X65"] = ['elastic', SMYS, SMTS]
#
# Input Element Identification
#BeamID = 'Bm2129'
#
# Select Section Type
# Sections currently supported:
# I, RECTANGULAR BAR, SOLID CIRCLE, TUBULAR, TEE, CHANNEL & BOX'
#SectionType = "rectangular bar"
#SectionType = "i"
#
#
#
#
#roak_case = 12
W = 40.5 * units.kN
phase = 180 * units.deg
theta = (180-15.893) * units.deg
print(f'theta : {math.degrees(theta.value)}')
phi = 0 * units.deg
#ring1.case(roak_case, apld, theta, phi, phase)
#
# load[name] = [case, load, theta, phi (case 18), phase]
### Input Data
#ring1 = formulas.ring(72)
#ring1.material = material["X65"]
#ring1.geometry = section["TUB"]
##ring1.load[1] = {'case':1, 'W':W, 'phase':90*units.deg}
#ring1.load[1] = [1, W] # , 90*units.deg
###ring1.load[1].view()
###
#forces = ring1.radial_forces
##forces[1].printout()
##forces[1].plot()
###
#stress = ring1.radial_stress
#stress.printout()
###
#
#
#section["TUB2"] = ['Tubular', 10*units.inch, 1.5*units.inch]
ring2 = formulas.ring(72)
ring2.material = material["X65"]
ring2.geometry = section["TUB"]
#
ring2.load[4] = [2, W, 90 * units.deg, 90* units.deg]
#ring2.load[4] = [18, W, 10 * units.deg, 20* units.deg, 90* units.deg]
#ring2.load[4] = {'case':18, 'load':W,
# 'theta':10 * units.deg,
# 'phi':20* units.deg,
# 'phase':90* units.deg}
#
#ring2.load[4] = [17, 1000 * units.kg / units.m**3, 45* units.deg, 90* units.deg]
#
#ring2.load[4] = [15, W]
#
#ring2.load[2] = [2, W, 90 * units.deg, 90 * units.deg]
#ring2.load[2].view()
#forces = ring2.radial_forces
#forces[2].printout()
#phi = 65 * units.deg
#W = 10000 * units.lbf
#ring2.load[4] = [7, W, phase*0, 5 * units.deg]
#ring2.load[4] = [19, W, phase*0, theta*0]
#ring2.load[5] = [20, W, phase*0]
#ring2.load[88] = [12, W*-0.50, phase, theta]
#ring2.load[12] = [12, W*0.50, phase, theta]
#
#
#forces2 = ring2.radial_forces
#forces2[8].plot()
##forces2[12].plot()
#forces2[8].printout()
##forces2[12].printout()
#
## Printing Results
force = ring2.radial_force
#force.printout()
#Mmax = force.Mmax * units.N * units.m
#print(f'Mmax = {Mmax.convert("lbf*inch").value : 1.3e} lbf-inch')
##
#Nmax = force.Nmax * units.N
#print(f'Nmax = {Nmax.convert("lbf").value : 1.3e} lbf')
##
#Vmax = force.Vmax * units.N
#print(f'Vmax = {Vmax.convert("lbf").value : 1.3e} lbf')
#force.plot2d()
#
stress = ring2.radial_stress
stress.printout()
##ring1.print_results()
#
print('-->')
#
|
#DeepPasta data examination with same methods for same regions and truth values
from __future__ import print_function
import keras
from keras.models import Sequential, Model, load_model
from keras import backend as K
import tensorflow as tf
import isolearn.keras as iso
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cm as cm
from keras.utils import plot_model
import time
import copy
from aparent.predictor import *
##################################################
#import bioPython for working with FASTA files
from Bio import SeqIO
##################################################
#use to correct the indexes for the RC predictions so that they can be matched with the true cluster labels (since those are indexed according to the forward strand)
def flipSequenceIndex(index, lenSeq):
b = lenSeq - 1
return -1 * index + b
def placePeaksWithTolerance(peaks, clusters, tolerance, sign, lenSeq):
clusterRanges = clusters.keys()
for peak in peaks:
if sign == "-":
peak = flipSequenceIndex(peak, lenSeq)
placed = False
for rng in clusterRanges:
if rng != 'Unplaced':
lower = rng[0] - tolerance
upper = rng[1] + tolerance
if peak >= lower and peak <= upper: #if the peak is in [start, end]
clusters[rng].append(peak)
placed = True
break
if not placed: #wasn't placed
clusters['Unplaced'].append(peak)
return clusters
#using the peak-cluster dictionaries
#need to count TP, FP, FN (placed peaks, unplaced peaks, empty clusters)
def fillConfMatrix(dictForward, dictRC):
countTP = 0 #peaks in cluster
countFP = 0 #peak outside of cluster (in ['Unplaced'])
countFN = 0 #those w/no peak in the clusters
for key in dictForward:
if key != 'Unplaced':
inCluster = len(dictForward[key])
if inCluster != 0:
countTP += inCluster
#print (key, " contains: ", inCluster)
else:
countFN += 1
else: #unplaced peaks
countFP += len(dictForward[key])
for key in dictRC:
if key != 'Unplaced':
inCluster = len(dictRC[key])
if inCluster != 0:
countTP += inCluster
#print (key, " contains: ", inCluster)
else:
countFN += 1
else: #unplaced peaks
countFP += len(dictRC[key])
return countTP, countFP, countFN
def calculatePrecisionRecall(tp, fp, fn):
precision = tp / (tp + fp)
recall = tp / (tp + fn)
if tp == 0:
return precision, recall, None
else:
f1 = (2 * (precision * recall))/(precision + recall)
return precision, recall, f1
def openForwardReverse(stem, name):
totalNameFor = stem + name + ".npy"
print (totalNameFor)
forward = np.load(totalNameFor)
reverse = np.load(stem + name + "RC.npy")
return forward, reverse
def openTrueValuesForType(name, pasType):
#opening all the true values from PolyASite2.0
colnames = ["seqName", "start" , "end", "clusterID", "avgTPM", "strand", "percentSupporting", "protocolsSupporting", "avgTPM2", "type", "upstreamClusters"]
pas_stuff =pd.read_csv('atlas.clusters.hg38.2-0.bed',delimiter='\t', names = colnames, dtype = {"seqName": str})
trueValBoolMask = pas_stuff['seqName'] == name
#print (name)
currentTrueVals = pas_stuff[trueValBoolMask] #filtered true vals
#print (currentTrueVals)
#set up true value array
clustersForward = {}
clustersRC = {} #key of (Start, End) will use to track clusters with no peaks for the FN
if pasType == "All":
for index, row in currentTrueVals.iterrows():
if row['strand'] == "+": #forward strand
clustersForward[(row['start'], row['end'])] = []
else: #negative strand, RC cluster
clustersRC[(row['start'], row['end'])] = []
clustersForward['Unplaced'] = []
clustersRC['Unplaced'] = []
else:
maskType = currentTrueVals["type"] == pasType
maskedTrue = currentTrueVals[maskType]
for index, row in maskedTrue.iterrows():
if row['strand'] == "+": #forward strand
clustersForward[(row['start'], row['end'])] = []
else: #negative strand, RC cluster
clustersRC[(row['start'], row['end'])] = []
clustersForward['Unplaced'] = []
clustersRC['Unplaced'] = []
#print (clustersForward)
return clustersForward, clustersRC
def find_peaks_deepPASTA(chrname, allChrs):
#for each chromosome, sep. + and -v
forwardPeaks = []
reversePeaks = []
trueValBoolMask = allChrs['seqName'] == chrname
#print (name)
currentTrueVals = allChrs[trueValBoolMask] #filtered true vals
for index, row in currentTrueVals.iterrows():
if row['strand'] == "+":
forwardPeaks.append(row['position'])
elif row['strand'] == "-":
reversePeaks.append(row['position'])
else:
print ("ERROR! No strand associated!")
return forwardPeaks, reversePeaks
#treat these as "peaks"
data = pd.read_csv("genome_wide_polyA_site_prediction_human_DeepPASTA.txt", sep="\t", header=None)
data.columns = ["seqName", "position", "strand", "score"]
chromosomes = [ 'chr15', 'chr16', 'chr17', 'chr18', 'chr19', 'chr20', 'chr21', 'chr22', 'chrX', 'chrY']
namesPolyA = ["15", "16", "17", "18", "19", "20", "21", "22", "X", "Y"]
tolerances = [0, 10, 20] #tolerances around clusters
types = ['All', 'IN', 'TE', 'IG', 'AI', 'EX', 'DS', 'AE', 'AU'] #for each type and subtype
pasTypeTotals = {}
f = open( "deepPASTAConfusionMatrices.txt", "w")
for pasType in types:
counterTypes = 0
for tolerance in tolerances:
countTPtotal = 0
countFPtotal = 0
countFNtotal = 0
for i, fname in enumerate(chromosomes):
print ("-------------------------------------------------------------")
print ("Chromosome: ", namesPolyA[i], " PAS Type: ", pasType)
clustersForward, clustersRC = openTrueValuesForType(namesPolyA[i], pasType)
print ("Forward size: ", len(clustersForward.keys()) - 1)
print ("Reverse size: ", len(clustersRC.keys()) -1 )
print ("All signals: ", len(clustersForward.keys()) + len(clustersRC.keys())- 2)
if tolerance == tolerances[0]:
counterTypes += len(clustersForward.keys()) + len(clustersRC.keys())- 2
forwardPeaks, reversePeaks = find_peaks_deepPASTA(fname, data)
print ("Number forward peaks: ", len(forwardPeaks))
print ("Number reverse peaks: ", len(reversePeaks))
print ("Total peaks: ", len(forwardPeaks)+ len(reversePeaks))
clustersForTol = placePeaksWithTolerance(forwardPeaks, clustersForward, tolerance, "+", len(forwardPeaks))
clustersRCTol = placePeaksWithTolerance(reversePeaks, clustersRC, tolerance, "-", len(forwardPeaks))
countTP, countFP, countFN = fillConfMatrix(clustersForTol, clustersRCTol)
print ("tolerance, fname: ", tolerance, " ", fname)
print (countTP, countFP, countFN)
countTPtotal += countTP
countFPtotal += countFP
countFNtotal += countFN
#print ("For min peak height: ", minh, " TP: ", countTP, " FP: ", countFP, " FN: ", countFN)
print ("tolerance: ", tolerance)
print ("Total TP: ", countTPtotal, " total FP: ", countFPtotal, " total FN: ", countFNtotal)
fileLine = "PAS: " + pasType + " tolerance: " + str(tolerance) + " TP: " + str(countTPtotal) + " FP: " + str(countFPtotal) + " FN: " + str(countFNtotal) + "\n"
f.write(fileLine)
pasTypeTotals[pasType] = counterTypes
f.close()
for k in pasTypeTotals.keys():
print (k, " " , pasTypeTotals[k])
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
class Trainer(object):
def __init__(self, data_manager, model, flags):
self.data_manager = data_manager
self.model = model
self._prepare_optimizer(flags)
self._prepare_summary()
def _prepare_optimizer(self, flags):
with tf.variable_scope("opt"):
output_vars = tf.trainable_variables("model/outputs")
# Apply L2 regularization to output linear layers
l2_reg_loss = tf.add_n([ tf.nn.l2_loss(v) for v in output_vars
if 'bias' not in v.name ]) * flags.l2_reg
optimizer = tf.train.RMSPropOptimizer(
learning_rate=flags.learning_rate,
momentum=flags.momentum
)
total_loss = self.model.place_loss + \
self.model.hd_loss + \
l2_reg_loss
# Apply gradient clipping
gvs = optimizer.compute_gradients(total_loss)
gradient_clipping = flags.gradient_clipping
clipped_gvs = []
for grad, var in gvs:
if "model/outputs" in var.name:
gv = (tf.clip_by_value(grad,
-flags.gradient_clipping,
flags.gradient_clipping), var)
else:
gv = (grad, var)
clipped_gvs.append(gv)
self.train_op = optimizer.apply_gradients(clipped_gvs)
def _prepare_summary(self):
with tf.name_scope("logs"):
tf.summary.scalar("place_loss", self.model.place_loss)
tf.summary.scalar("hd_loss", self.model.hd_loss)
self.summary_op = tf.summary.merge_all()
def train(self, sess, summary_writer, step, flags):
out = self.data_manager.get_train_batch(flags.batch_size,
flags.sequence_length)
inputs_batch, place_outputs_batch, hd_outputs_batch, place_init_batch, hd_init_batch = \
out
_, summary_str = sess.run(
[self.train_op, self.summary_op],
feed_dict = {
self.model.inputs : inputs_batch,
self.model.place_outputs : place_outputs_batch,
self.model.hd_outputs : hd_outputs_batch,
self.model.place_init : place_init_batch,
self.model.hd_init : hd_init_batch,
self.model.keep_prob : 0.5
})
if step % 10 == 0:
summary_writer.add_summary(summary_str, step)
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def lowestCommonAncestor(self, root: 'TreeNode', p: 'TreeNode', q: 'TreeNode') -> 'TreeNode':
def dfs(root):
if root in (None, p, q):
return root
left, right = dfs(root.left), dfs(root.right)
if left and right:
return root
if left:return left
return right
return dfs(root)
|
from django.contrib import admin
from apps.ventas.models import todo_item, Cabecera_Venta
class medicamento_ventaInline(admin.TabularInline):
model = todo_item
class Detalle_VentaAdmin(admin.ModelAdmin):
inlines = (medicamento_ventaInline,)
admin.site.register(Cabecera_Venta, Detalle_VentaAdmin)
|
from flask import Flask, render_template, request, url_for
import requests
import json
app = Flask(__name__)
def weather_Forecast(self):
city=self
apiKey='YOUR_API_KEY' #https://home.openweathermap.org/users/sign_up
response= requests.get(f'http://api.openweathermap.org/data/2.5/weather?q={city}&appid={apiKey}')
weatherData=response.json()
skyDescription=weatherData['weather'][0]['description']
cityName=weatherData['name']
skyTypes = ['clear sky', 'few clouds','overcast clouds', 'scattered clouds', 'broken clouds', 'shower rain', 'rain', 'thunderstorm','snow','mist']
for i in range(len(skyTypes)):
if skyDescription == skyTypes[i]:
skyDescription=skyTypes[i]
temp=round((weatherData['main']['temp']-273.15),2)
feels_temp=round((weatherData['main']['feels_like']-273.15),2)
temp_min=round((weatherData['main']['temp_min']-273.15),2)
temp_max=round((weatherData['main']['temp_max']-273.15),2)
weatherForecast={
"City":cityName,
"Sky":skyDescription,
"Temp":temp,
"Feels":feels_temp,
"Min":temp_min,
"Max":temp_max
}
return weatherForecast
@app.route("/", methods=['POST','GET'])
def index():
if request.method == 'POST':
city = request.form['data']
if not city:
return render_template('index.html')
a = weather_Forecast(city)
return render_template('result.html', result = a)
else:
return render_template('index.html')
if __name__=="__main__":
app.run(debug=True)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.