content
stringlengths 5
1.05M
|
|---|
import numpy
N, M = map(int, input().split())
A = numpy.array([input().split() for _ in range(N)], int)
print(numpy.prod(numpy.sum(A, axis=0), axis=0))
|
import typemap_delete
r = typemap_delete.Rect(123)
if r.val != 123:
raise RuntimeError
|
from .TagContainer import TagContainer
from collections import deque
# Will only be one for a template. Will be the top level parent of all tags.
# Only there to hold the top level children.
class RootTag(TagContainer):
tag = None
def __init__(self, tagchar, template):
self._tagchar = tagchar
self._template = template
self._charpos = 0
self._linenum = 0
self._linepos = 0
self._children = []
# We will need this for blank line suppression, since we are not inheriting the TagContainer.__init__
self._blankLineSuppressionCandidates = deque()
self._maybeSupressible = True
self._tagsOnLine = False
def markLineSuppressible(self, outputFormatter):
pass
|
#!/usr/bin/python
import sys
fact = lambda n: float(reduce(int.__mul__, range(1, n+1)) if n else 1)
pow_xc = lambda c, n: Polinome(*[c**(n-i)*fact(n)/fact(i)/fact(n-i) for i in range(n+1)]) # (x+c)**n
extend = lambda L1, L2: map(lambda a, b: (0. if a is None else a, 0. if b is None else b), L1, L2)
class Polinome:
def __init__(self, *P): self.P = map(float, P)
def __str__(self): return str(reduce(lambda R, c: '%s+x*(%s)'%(c, R), reversed(self.P))).replace('(0.0+', '(').replace('+x*(1.0)', '+x').replace('+x*(-1.0)', '-x')
def __mul__(self, other): return Polinome(*[a*other for a in self.P])
def __add__(self, other): return Polinome(*[a+b for a, b in extend(self.P, other.P)])
def __sub__(self, other): return Polinome(*[a-b for a, b in extend(self.P, other.P)])
def offset(self, c): # ==> P(x+c)
return sum([pow_xc(c, n)*p for n, p in enumerate(self.P)], Polinome())
def __call__(self, x): return Polinome(sum([p*x**n for n, p in enumerate(self.P)]))
def integrate(self, i): #i - center polinome, ==> [P-, P+]
PI = Polinome(0, *[p/(n+1.) for n, p in enumerate(self.P)])
return [PI.offset(.5)-PI(i-.5), PI(i+.5)-PI.offset(-.5)]
print ' template <int K> inline double bspline(double x){ static_assert(K<=%i, "code not implemented"); return 0; }\n'%int(sys.argv[1])
Plist = [Polinome(1.)]
for k in range(1, int(sys.argv[1])+1):
print ' template <> inline double bspline<%i>(double x){\n x = fabs(x);'%k
##for i in range(not k%2, 1+k/2): print ' if(x<%g) return %s;'%(i+.5*(k%2), Plist[i])
for i in range(k/2, k): print ' if(x<%g) return %s;'%(i-k*.5+1, Plist[i])
print ' return 0.;\n }'
L = [P.integrate(i-k*.5+.5) for i, P in enumerate(Plist)]
Plist = [L[0][0]]+[a[1]+b[0] for a, b in zip(L[:-1], L[1:])]+[L[-1][1]]
#for i, P in enumerate(Plist):
# for j in range(100):
# x = -.5*len(Plist)+i+j*1e-2
# print x, P(x)
|
#!/usr/bin/env python
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2015
import sys, time, collections, csv
from subprocess import call, Popen, PIPE
def exec_no_fail(seq):
p = Popen(seq, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
print 'err: ' + stderr
print str(seq) + ' failed.'
return stdout
def exec_fail(seq):
p = Popen(seq, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
print 'err: ' + stderr
print str(seq) + ' failed.'
sys.exit()
return stdout
def exec_raise(seq):
p = Popen(seq, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
print 'err: ' + stderr
print str(seq) + ' failed.'
raise Exception('exec_throw failure')
return stdout
def finish_exec(p):
stdout, stderr = p.communicate()
if p.returncode != 0:
print 'err: ' + stderr
print "'" + str(p) + "' failed."
sys.exit()
return stdout
def run_standalone(exp, params):
exec_raise(['./output/' + exp + '/bin/standalone'] + [params])
Experiment = collections.namedtuple('Experiment', ['name', 'argument_name', 'argument_list'])
def main():
sample_size = 5
experiments = [Experiment('FileSourceOnly', '', ['']),
Experiment('FileSourceString', '', ['']),
Experiment('SequentialParse', '', ['']),
Experiment('ParallelParse', 'parallelism=', [str(i) for i in [1, 2, 4, 8, 16]]),
Experiment('ParallelParseOrdered', 'parallelism=', [str(i) for i in [1, 2, 4, 8, 16]])]
exec_raise(['make'])
results_file = open('results.csv', 'w')
results = csv.writer(results_file)
for exp in experiments:
for arg in exp.argument_list:
row = [exp.name, arg]
print exp.name, arg,
for i in range(sample_size):
before = time.time()
run_standalone(exp.name, exp.argument_name + arg)
after = time.time()
row.append(after - before)
print after - before,
results.writerow(row)
print
results_file.close()
if __name__ == "__main__":
main()
|
from pycnic.core import Handler, WSGI
from pycnic.utils import requires_validation
def has_proper_name(data):
if 'name' not in data or data['name'] != 'root':
raise ValueError('Expected \'root\' as name')
class NameHandler(Handler):
@requires_validation(has_proper_name)
def post(self):
return {'status': 'ok'}
class app(WSGI):
routes = [('/name', NameHandler())]
if __name__ == "__main__":
from wsgiref.simple_server import make_server
try:
print("Serving on 0.0.0.0:8080...")
make_server('0.0.0.0', 8080, app).serve_forever()
except KeyboardInterrupt:
pass
print("Done")
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
##################################################
# GNU Radio Python Flow Graph
# Title: Fecapi Decoders
# Generated: Mon Dec 17 16:34:10 2018
##################################################
if __name__ == '__main__':
import ctypes
import sys
if sys.platform.startswith('linux'):
try:
x11 = ctypes.cdll.LoadLibrary('libX11.so')
x11.XInitThreads()
except:
print "Warning: failed to XInitThreads()"
from PyQt4 import Qt
from gnuradio import blocks
from gnuradio import digital
from gnuradio import eng_notation
from gnuradio import fec
from gnuradio import gr
from gnuradio import qtgui
from gnuradio.eng_option import eng_option
from gnuradio.filter import firdes
from optparse import OptionParser
import sip
import sys
from gnuradio import qtgui
class fecapi_decoders(gr.top_block, Qt.QWidget):
def __init__(self, frame_size=30, puncpat='11'):
gr.top_block.__init__(self, "Fecapi Decoders")
Qt.QWidget.__init__(self)
self.setWindowTitle("Fecapi Decoders")
qtgui.util.check_set_qss()
try:
self.setWindowIcon(Qt.QIcon.fromTheme('gnuradio-grc'))
except:
pass
self.top_scroll_layout = Qt.QVBoxLayout()
self.setLayout(self.top_scroll_layout)
self.top_scroll = Qt.QScrollArea()
self.top_scroll.setFrameStyle(Qt.QFrame.NoFrame)
self.top_scroll_layout.addWidget(self.top_scroll)
self.top_scroll.setWidgetResizable(True)
self.top_widget = Qt.QWidget()
self.top_scroll.setWidget(self.top_widget)
self.top_layout = Qt.QVBoxLayout(self.top_widget)
self.top_grid_layout = Qt.QGridLayout()
self.top_layout.addLayout(self.top_grid_layout)
self.settings = Qt.QSettings("GNU Radio", "fecapi_decoders")
self.restoreGeometry(self.settings.value("geometry").toByteArray())
##################################################
# Parameters
##################################################
self.frame_size = frame_size
self.puncpat = puncpat
##################################################
# Variables
##################################################
self.rate = rate = 2
self.polys = polys = [109, 79]
self.k = k = 7
self.samp_rate = samp_rate = 50000
self.enc_rep = enc_rep = fec.repetition_encoder_make(frame_size*8, 3)
self.enc_dummy = enc_dummy = fec.dummy_encoder_make(frame_size*8)
self.enc_ccsds = enc_ccsds = fec.ccsds_encoder_make(frame_size*8, 0, fec.CC_TAILBITING)
self.dec_rep = dec_rep = fec.repetition_decoder.make(frame_size*8, 3, 0.5)
self.dec_dummy = dec_dummy = fec.dummy_decoder.make(frame_size*8)
self.dec_cc = dec_cc = fec.cc_decoder.make(frame_size*8, k, rate, (polys), 0, -1, fec.CC_TAILBITING, False)
##################################################
# Blocks
##################################################
self.qtgui_time_sink_x_0 = qtgui.time_sink_f(
2048, #size
samp_rate, #samp_rate
'', #name
4 #number of inputs
)
self.qtgui_time_sink_x_0.set_update_time(0.01)
self.qtgui_time_sink_x_0.set_y_axis(-0.5, 1.5)
self.qtgui_time_sink_x_0.set_y_label('Amplitude', "")
self.qtgui_time_sink_x_0.enable_tags(-1, True)
self.qtgui_time_sink_x_0.set_trigger_mode(qtgui.TRIG_MODE_FREE, qtgui.TRIG_SLOPE_POS, 0.0, 0, 0, "")
self.qtgui_time_sink_x_0.enable_autoscale(False)
self.qtgui_time_sink_x_0.enable_grid(False)
self.qtgui_time_sink_x_0.enable_axis_labels(True)
self.qtgui_time_sink_x_0.enable_control_panel(False)
self.qtgui_time_sink_x_0.enable_stem_plot(False)
if not True:
self.qtgui_time_sink_x_0.disable_legend()
labels = ['Input', 'Dummy', 'Rep. (Rate=3)', 'CC (K=7, Rate=2)', 'CCSDS',
'', '', '', '', '']
widths = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "green", "black", "cyan",
"magenta", "yellow", "dark red", "dark green", "blue"]
styles = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
markers = [-1, -1, -1, -1, -1,
-1, -1, -1, -1, -1]
alphas = [1.0, 0.6, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(4):
if len(labels[i]) == 0:
self.qtgui_time_sink_x_0.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_time_sink_x_0.set_line_label(i, labels[i])
self.qtgui_time_sink_x_0.set_line_width(i, widths[i])
self.qtgui_time_sink_x_0.set_line_color(i, colors[i])
self.qtgui_time_sink_x_0.set_line_style(i, styles[i])
self.qtgui_time_sink_x_0.set_line_marker(i, markers[i])
self.qtgui_time_sink_x_0.set_line_alpha(i, alphas[i])
self._qtgui_time_sink_x_0_win = sip.wrapinstance(self.qtgui_time_sink_x_0.pyqwidget(), Qt.QWidget)
self.top_grid_layout.addWidget(self._qtgui_time_sink_x_0_win)
self.fec_extended_encoder_1_0_0 = fec.extended_encoder(encoder_obj_list=enc_dummy, threading='capillary', puncpat=puncpat)
self.fec_extended_encoder_1_0 = fec.extended_encoder(encoder_obj_list=enc_rep, threading='capillary', puncpat=puncpat)
self.fec_extended_encoder_1 = fec.extended_encoder(encoder_obj_list=enc_ccsds, threading='capillary', puncpat=puncpat)
self.fec_extended_decoder_0_1_0 = fec.extended_decoder(decoder_obj_list=dec_dummy, threading= None, ann=None, puncpat=puncpat, integration_period=10000)
self.fec_extended_decoder_0_1 = fec.extended_decoder(decoder_obj_list=dec_rep, threading= None, ann=None, puncpat=puncpat, integration_period=10000)
self.fec_extended_decoder_0 = fec.extended_decoder(decoder_obj_list=dec_cc, threading= None, ann=None, puncpat=puncpat, integration_period=10000)
self.digital_map_bb_0_0_0_0 = digital.map_bb(([-1, 1]))
self.digital_map_bb_0_0_0 = digital.map_bb(([-1, 1]))
self.digital_map_bb_0_0 = digital.map_bb(([-1, 1]))
self.blocks_vector_source_x_0_1_0 = blocks.vector_source_b((frame_size/15)*[0, 0, 1, 0, 3, 0, 7, 0, 15, 0, 31, 0, 63, 0, 127], True, 1, [])
self.blocks_unpack_k_bits_bb_0 = blocks.unpack_k_bits_bb(8)
self.blocks_throttle_0 = blocks.throttle(gr.sizeof_char*1, samp_rate,True)
self.blocks_char_to_float_0_2_0 = blocks.char_to_float(1, 1)
self.blocks_char_to_float_0_2 = blocks.char_to_float(1, 1)
self.blocks_char_to_float_0_1 = blocks.char_to_float(1, 1)
self.blocks_char_to_float_0_0_0_0 = blocks.char_to_float(1, 1)
self.blocks_char_to_float_0_0_0 = blocks.char_to_float(1, 1)
self.blocks_char_to_float_0_0 = blocks.char_to_float(1, 1)
self.blocks_char_to_float_0 = blocks.char_to_float(1, 1)
##################################################
# Connections
##################################################
self.connect((self.blocks_char_to_float_0, 0), (self.fec_extended_decoder_0, 0))
self.connect((self.blocks_char_to_float_0_0, 0), (self.qtgui_time_sink_x_0, 3))
self.connect((self.blocks_char_to_float_0_0_0, 0), (self.qtgui_time_sink_x_0, 2))
self.connect((self.blocks_char_to_float_0_0_0_0, 0), (self.qtgui_time_sink_x_0, 1))
self.connect((self.blocks_char_to_float_0_1, 0), (self.qtgui_time_sink_x_0, 0))
self.connect((self.blocks_char_to_float_0_2, 0), (self.fec_extended_decoder_0_1, 0))
self.connect((self.blocks_char_to_float_0_2_0, 0), (self.fec_extended_decoder_0_1_0, 0))
self.connect((self.blocks_throttle_0, 0), (self.blocks_unpack_k_bits_bb_0, 0))
self.connect((self.blocks_unpack_k_bits_bb_0, 0), (self.blocks_char_to_float_0_1, 0))
self.connect((self.blocks_unpack_k_bits_bb_0, 0), (self.fec_extended_encoder_1, 0))
self.connect((self.blocks_unpack_k_bits_bb_0, 0), (self.fec_extended_encoder_1_0, 0))
self.connect((self.blocks_unpack_k_bits_bb_0, 0), (self.fec_extended_encoder_1_0_0, 0))
self.connect((self.blocks_vector_source_x_0_1_0, 0), (self.blocks_throttle_0, 0))
self.connect((self.digital_map_bb_0_0, 0), (self.blocks_char_to_float_0, 0))
self.connect((self.digital_map_bb_0_0_0, 0), (self.blocks_char_to_float_0_2, 0))
self.connect((self.digital_map_bb_0_0_0_0, 0), (self.blocks_char_to_float_0_2_0, 0))
self.connect((self.fec_extended_decoder_0, 0), (self.blocks_char_to_float_0_0, 0))
self.connect((self.fec_extended_decoder_0_1, 0), (self.blocks_char_to_float_0_0_0, 0))
self.connect((self.fec_extended_decoder_0_1_0, 0), (self.blocks_char_to_float_0_0_0_0, 0))
self.connect((self.fec_extended_encoder_1, 0), (self.digital_map_bb_0_0, 0))
self.connect((self.fec_extended_encoder_1_0, 0), (self.digital_map_bb_0_0_0, 0))
self.connect((self.fec_extended_encoder_1_0_0, 0), (self.digital_map_bb_0_0_0_0, 0))
def closeEvent(self, event):
self.settings = Qt.QSettings("GNU Radio", "fecapi_decoders")
self.settings.setValue("geometry", self.saveGeometry())
event.accept()
def get_frame_size(self):
return self.frame_size
def set_frame_size(self, frame_size):
self.frame_size = frame_size
self.blocks_vector_source_x_0_1_0.set_data((self.frame_size/15)*[0, 0, 1, 0, 3, 0, 7, 0, 15, 0, 31, 0, 63, 0, 127], [])
def get_puncpat(self):
return self.puncpat
def set_puncpat(self, puncpat):
self.puncpat = puncpat
def get_rate(self):
return self.rate
def set_rate(self, rate):
self.rate = rate
def get_polys(self):
return self.polys
def set_polys(self, polys):
self.polys = polys
def get_k(self):
return self.k
def set_k(self, k):
self.k = k
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.qtgui_time_sink_x_0.set_samp_rate(self.samp_rate)
self.blocks_throttle_0.set_sample_rate(self.samp_rate)
def get_enc_rep(self):
return self.enc_rep
def set_enc_rep(self, enc_rep):
self.enc_rep = enc_rep
def get_enc_dummy(self):
return self.enc_dummy
def set_enc_dummy(self, enc_dummy):
self.enc_dummy = enc_dummy
def get_enc_ccsds(self):
return self.enc_ccsds
def set_enc_ccsds(self, enc_ccsds):
self.enc_ccsds = enc_ccsds
def get_dec_rep(self):
return self.dec_rep
def set_dec_rep(self, dec_rep):
self.dec_rep = dec_rep
def get_dec_dummy(self):
return self.dec_dummy
def set_dec_dummy(self, dec_dummy):
self.dec_dummy = dec_dummy
def get_dec_cc(self):
return self.dec_cc
def set_dec_cc(self, dec_cc):
self.dec_cc = dec_cc
def argument_parser():
parser = OptionParser(usage="%prog: [options]", option_class=eng_option)
parser.add_option(
"", "--frame-size", dest="frame_size", type="intx", default=30,
help="Set Frame Size [default=%default]")
parser.add_option(
"", "--puncpat", dest="puncpat", type="string", default='11',
help="Set puncpat [default=%default]")
return parser
def main(top_block_cls=fecapi_decoders, options=None):
if options is None:
options, _ = argument_parser().parse_args()
from distutils.version import StrictVersion
if StrictVersion(Qt.qVersion()) >= StrictVersion("4.5.0"):
style = gr.prefs().get_string('qtgui', 'style', 'raster')
Qt.QApplication.setGraphicsSystem(style)
qapp = Qt.QApplication(sys.argv)
tb = top_block_cls(frame_size=options.frame_size, puncpat=options.puncpat)
tb.start()
tb.show()
def quitting():
tb.stop()
tb.wait()
qapp.connect(qapp, Qt.SIGNAL("aboutToQuit()"), quitting)
qapp.exec_()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `sqlf` package."""
# import pytest
import sqlf
###############################################################################
# Data Serialisation UDFs
###############################################################################
def test_cbor_map():
@sqlf.single_row
@sqlf.sqlf
def test():
""" select cbor_map() as m; """
assert {"m": b"\xa0"} == test()
def test_cbor_list():
@sqlf.single_row
@sqlf.sqlf
def test():
""" select cbor_list() as m; """
assert {"m": b"\x80"} == test()
def test_cbor_map_insert():
@sqlf.single_row
@sqlf.sqlf
def test():
""" select cbor_insert(cbor_map(), 'n', 3.14) as m; """
assert {"m": b"\xa1an\xfb@\t\x1e\xb8Q\xeb\x85\x1f"} == test()
def test_cbor_list_insert():
@sqlf.single_row
@sqlf.sqlf
def test():
""" select cbor_insert(cbor_list(), 0, 3.14) as m; """
assert {"m": b"\x81\xfb@\t\x1e\xb8Q\xeb\x85\x1f"} == test()
def test_cbor_list_append():
@sqlf.single_row
@sqlf.sqlf
def test():
""" select cbor_append(cbor_list(), 3.14) as m; """
assert {"m": b"\x81\xfb@\t\x1e\xb8Q\xeb\x85\x1f"} == test()
def test_cbor_has():
@sqlf.single_row
@sqlf.sqlf
def test():
""" select cbor_has(cbor_insert(cbor_map(), 'n', 3.14), 'n') as m; """
assert {"m": 1} == test()
def test_cbor_get():
@sqlf.single_row
@sqlf.sqlf
def test():
""" select cbor_get(cbor_insert(cbor_map(), 'n', 3.14), 'n') as m; """
assert {"m": 3.14} == test()
|
import pytest
from mock import MagicMock
from airflow_monitor.data_fetcher import DbFetcher, decorate_fetcher
def test_db_fetcher_retries():
class TestException(Exception):
pass
db_fetcher = MagicMock(spec=DbFetcher)
func_mock = MagicMock(
side_effect=TestException(), __name__="get_airflow_dagruns_to_sync"
)
db_fetcher.get_airflow_dagruns_to_sync = func_mock
decorated_fetcher = decorate_fetcher(db_fetcher, "some label")
with pytest.raises(TestException):
decorated_fetcher.get_airflow_dagruns_to_sync()
# it should be called more than once
assert func_mock.call_count == 3
|
'''
Style Transfer Network - Main network, which combines all rest
'''
import tensorflow as tf
# from utils import *
from functions import *
from encoder import Encoder
from decoder import Decoder
from samod import SAMod
class STNet:
def __init__(self, encoder_weights_path):
self.encoder = Encoder(encoder_weights_path)
self.decoder = Decoder()
self.SAModule = SAMod(512)
def transform(self, content, style):
# switch RGB to BGR
# content = tf.reverse(content, axis=[-1])
# style = tf.reverse(style, axis=[-1])
# preprocess image
# content = self.encoder.preprocess(content)
# style = self.encoder.preprocess(style)
# encode image
enc_c_layers = self.encoder.encode(content)
enc_s_layers = self.encoder.encode(style)
self.encoded_content_layers = enc_c_layers
self.encoded_style_layers = enc_s_layers
Fcsc_m = self.SAModule.map(enc_c_layers['relu4_1'], enc_c_layers['relu5_1'], enc_s_layers['relu4_1'], enc_s_layers['relu5_1'])
self.Fcsc_m = Fcsc_m
# decode target features back to image (generate image)
Ics = self.decoder.decode(Fcsc_m)
# deprocess image
# Ics = self.encoder.deprocess(Ics)
# switch BGR back to RGB
# Ics = tf.reverse(Ics, axis=[-1])
# clip to 0..255
# Ics = tf.clip_by_value(Ics, 0.0, 255.0)
return Ics
|
from django.urls import path
from .views import firstPage, site_redirection
urlpatterns = [
path('', firstPage, name='first_page'),
path('link/<slug:slug>/', site_redirection, name='site_redirection'),
]
|
#
# PySNMP MIB module WWP-LEOS-PING-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/WWP-LEOS-PING-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:38:13 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsUnion, ConstraintsIntersection, ValueRangeConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueRangeConstraint", "ValueSizeConstraint")
AddressFamilyNumbers, = mibBuilder.importSymbols("IANA-ADDRESS-FAMILY-NUMBERS-MIB", "AddressFamilyNumbers")
InetAddressType, = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Integer32, NotificationType, ModuleIdentity, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, Counter32, iso, ObjectIdentity, TimeTicks, MibIdentifier, Counter64, Unsigned32, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "NotificationType", "ModuleIdentity", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "Counter32", "iso", "ObjectIdentity", "TimeTicks", "MibIdentifier", "Counter64", "Unsigned32", "IpAddress")
DisplayString, TruthValue, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TruthValue", "TextualConvention")
wwpModulesLeos, = mibBuilder.importSymbols("WWP-SMI", "wwpModulesLeos")
wwpLeosPingMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19))
wwpLeosPingMIB.setRevisions(('2012-04-02 00:00', '2001-07-03 12:57',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: wwpLeosPingMIB.setRevisionsDescriptions(('Add wwpLeosPingInetAddrType to support IP protocol version independent Inet addressing.', 'Initial Creation',))
if mibBuilder.loadTexts: wwpLeosPingMIB.setLastUpdated('201204020000Z')
if mibBuilder.loadTexts: wwpLeosPingMIB.setOrganization('Ciena, Inc')
if mibBuilder.loadTexts: wwpLeosPingMIB.setContactInfo(' Mib Meister 115 North Sullivan Road Spokane Valley, WA 99037 USA Phone: +1 509 242 9000 Email: support@ciena.com')
if mibBuilder.loadTexts: wwpLeosPingMIB.setDescription('The MIB for WWP Ping')
class PingFailCause(TextualConvention, Integer32):
description = 'The cause of the last ping failure.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17))
namedValues = NamedValues(("unknownHost", 1), ("socketError", 2), ("bindError", 3), ("connectError", 4), ("missingHost", 5), ("asyncError", 6), ("nonBlockError", 7), ("mcastError", 8), ("ttlError", 9), ("mcastTtlError", 10), ("outputError", 11), ("unreachableError", 12), ("isAlive", 13), ("txRx", 14), ("commandCompleted", 15), ("noStatus", 16), ("sendRecvMismatch", 17))
class PingState(TextualConvention, Integer32):
description = 'The state of the last ping request.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))
namedValues = NamedValues(("idle", 1), ("pinging", 2), ("pingComplete", 3), ("failed", 4))
wwpLeosPingMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1))
wwpLeosPingDelay = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPingDelay.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingDelay.setDescription('The object specifies the minimum amount of time to wait before sending the next packet in a sequence after receiving a response or declaring a timeout for a previous packet.')
wwpLeosPingPacketSize = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 1464)).clone(56)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPingPacketSize.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingPacketSize.setDescription('The size of the ping packets to send to the target.')
wwpLeosPingActivate = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 3), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPingActivate.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingActivate.setDescription("Ping can be activated by setting this object to true. Once the ping operation is completed, the object is set to 'false'. This object can be set to 'false' by the Management Station to stop the ping.")
wwpLeosPingAddrType = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 4), AddressFamilyNumbers()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPingAddrType.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingAddrType.setDescription('The address type associated with wwpLeosPingAddr. With the new wwpLeosPingInetAddrType being introduced to support RFC 4001, this OID will only be used when wwpLeosPingAddr is a host name or an IPv4 address. Otherwise, it will be set to other(0).')
wwpLeosPingAddr = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPingAddr.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingAddr.setDescription('The host name or IP address of the device to be pinged. wwpLeosPingAddrType determines if address is host name or IP address.')
wwpLeosPingPacketCount = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPingPacketCount.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingPacketCount.setDescription('Specifies the number of ICMP requests to send to the target.')
wwpLeosPingPacketTimeout = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPingPacketTimeout.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingPacketTimeout.setDescription("Specifies the amount of time to wait for a response to a transmitted packet before declaring the packet 'dropped'.")
wwpLeosPingSentPackets = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPingSentPackets.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingSentPackets.setDescription('The number of ping packets that have been sent to the target.')
wwpLeosPingReceivedPackets = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPingReceivedPackets.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingReceivedPackets.setDescription('The number of ping packets that have been received from the target.')
wwpLeosPingFailCause = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 10), PingFailCause()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPingFailCause.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingFailCause.setDescription('The result of the ping.')
wwpLeosPingState = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 11), PingState().clone('idle')).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPingState.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingState.setDescription('The state of the ping process. The possible states include pinging, idle, complete or failed.')
wwpLeosPingUntilStopped = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 12), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wwpLeosPingUntilStopped.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingUntilStopped.setDescription("Setting this object to true prior to wwpLeosPingActivate will cause the device to ping the specified host until wwpLeosPingActivate is set to false. The object cannot be modified once the ping is active. The object returns to 'false' once the ping is halted.")
wwpLeosPingInetAddrType = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 13), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wwpLeosPingInetAddrType.setStatus('current')
if mibBuilder.loadTexts: wwpLeosPingInetAddrType.setDescription('The Inet address type associated with wwpLeosPingAddr. When set to: ipv4 : wwpLeosPingAddr should be compliant with InetAddressIPv4 from RFC 4001 ipv6 : wwpLeosPingAddr should be compliant with InetAddressIPv6 from RFC 4001.')
mibBuilder.exportSymbols("WWP-LEOS-PING-MIB", wwpLeosPingMIB=wwpLeosPingMIB, wwpLeosPingDelay=wwpLeosPingDelay, wwpLeosPingPacketTimeout=wwpLeosPingPacketTimeout, wwpLeosPingPacketSize=wwpLeosPingPacketSize, wwpLeosPingFailCause=wwpLeosPingFailCause, wwpLeosPingSentPackets=wwpLeosPingSentPackets, PingState=PingState, wwpLeosPingPacketCount=wwpLeosPingPacketCount, wwpLeosPingState=wwpLeosPingState, wwpLeosPingMIBObjects=wwpLeosPingMIBObjects, wwpLeosPingInetAddrType=wwpLeosPingInetAddrType, PingFailCause=PingFailCause, wwpLeosPingReceivedPackets=wwpLeosPingReceivedPackets, PYSNMP_MODULE_ID=wwpLeosPingMIB, wwpLeosPingAddrType=wwpLeosPingAddrType, wwpLeosPingUntilStopped=wwpLeosPingUntilStopped, wwpLeosPingActivate=wwpLeosPingActivate, wwpLeosPingAddr=wwpLeosPingAddr)
|
#!/usr/bin/env python
from distutils.util import strtobool
from time import sleep
import configargparse
from github3 import login
from github3.exceptions import NotFoundError, GitHubException
def toggle_enforce_admin(options):
access_token, owner, repo_name, branch_name, retries, github_repository = options.access_token, options.owner, options.repo, options.branch, int(options.retries), options.github_repository
if not owner and not repo_name and github_repository and "/" in github_repository:
owner = github_repository.split("/")[0]
repo_name = github_repository.split("/")[1]
if owner == '' or repo_name == '':
print('Owner and repo or GITHUB_REPOSITORY not set')
raise RuntimeError
enforce_admins = bool(strtobool(options.enforce_admins)) if options.enforce_admins is not None and not options.enforce_admins == '' else None
# or using an access token
print(f"Getting branch protection settings for {owner}/{repo_name}")
protection = get_protection(access_token, branch_name, owner, repo_name)
print(f"Enforce admins branch protection enabled? {protection.enforce_admins.enabled}")
# save the current status for use later on if desired
print(f"::set-output name=initial_status::{protection.enforce_admins.enabled}")
print(f"Setting enforce admins branch protection to {enforce_admins if enforce_admins is not None else not protection.enforce_admins.enabled}")
for i in range(retries):
try:
if enforce_admins is False:
disable(protection)
return
elif enforce_admins is True:
enable(protection)
return
elif protection.enforce_admins.enabled:
disable(protection)
return
elif not protection.enforce_admins.enabled:
enable(protection)
return
except GitHubException:
print(f"Failed to set enforce admins to {not protection.enforce_admins.enabled}. Retrying...")
sleep(i ** 2) # Exponential back-off
print(f"Failed to set enforce admins to {not protection.enforce_admins.enabled}.")
exit(1)
def get_protection(access_token, branch_name, owner, repo_name):
gh = login(token=access_token)
if gh is None:
print(f"Could not login. Have you provided credentials?")
raise exit(1)
try:
repo = gh.repository(owner, repo_name)
except NotFoundError:
print(f"Could not find repo https://github.com/{owner}/{repo_name}")
raise
branch = repo.branch(branch_name)
protection = branch.protection()
return protection
def enable(protection):
protection.enforce_admins.enable()
def disable(protection):
protection.enforce_admins.disable()
if __name__ == '__main__':
p = configargparse.ArgParser()
p.add_argument('-t', '--access-token', env_var='ACCESS_TOKEN', required=True, help='Github access token. https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line')
p.add_argument('-o', '--owner', env_var='OWNER', required=False, default='', help='Owner. For example benjefferies for https://github.com/benjefferies/branch-protection-bot')
p.add_argument('-r', '--repo', env_var='REPO', required=False, default='', help='Repo. For example branch-protection-bot for https://github.com/benjefferies/branch-protection-bot')
p.add_argument('--github_repository', env_var='GITHUB_REPOSITORY', required=False, default='', help='Owner and repo. For example benjefferies/branch-protection-bot for https://github.com/benjefferies/branch-protection-bot')
p.add_argument('-b', '--branch', env_var='BRANCH', default='master', help='Branch name')
p.add_argument('--retries', env_var='RETRIES', default=5, help='Number of times to retry before exiting')
p.add_argument('--enforce-admins', env_var='ENFORCE_ADMINS', default=None, help='Flag to explicitly enable or disable "Include administrators"')
toggle_enforce_admin(p.parse_args())
|
import os
token = os.getenv("TELEGRAM_BOT_TOKEN", '')
forum = {
'login': os.getenv('FORUM_LOGIN', ''),
'password': os.getenv('FORUM_PASSWORD', '')
}
|
def solution(array, commands):
answer = []
for i in commands: answer.append(sorted(array[i[0]-1:i[1]])[i[2]-1])
return answer
|
import statistics
data = [9, 12, 6, 10, 9, 5, 8, 7, 13, 11]
mode = statistics.mode(data)
print(mode)
|
from solution import get_file_info
from glob import glob
import os.path
import shutil
from pathlib import Path
def test_nothing(tmp_path):
d = tmp_path / 'sub'
d.mkdir()
assert len(list(d.iterdir())) == 0
assert get_file_info(d) == []
def test_three_good_files(tmp_path):
d = tmp_path / 'sub'
d.mkdir()
for i in [1, 500, 1000]:
with open(d / f'file{i}', 'w') as f:
f.write('abcd\n' * i)
assert len(list(d.iterdir())) == 3
file_info = get_file_info(d)
assert type(file_info) == list
assert len(file_info) == 3
assert {'file1', 'file500', 'file1000'} == {os.path.basename(one_item['filename'])
for one_item in file_info}
assert {'819abca7eabfd860df0d96b850cd43d64fce35c4',
'e31780bcdeb62dfd8b939fa9b77dc7412cc83399',
'3330b4373640f9e4604991e73c7e86bfd8da2dc3'} == {os.path.basename(one_item['sha1'])
for one_item in file_info}
if __name__ == '__main__':
for func in [test_nothing, test_three_good_files]:
try:
os.mkdir('temp')
func(Path('temp'))
finally:
shutil.rmtree('temp')
|
import numpy as np
import cv2
import matplotlib.pyplot as plt
# read the input image
img = cv2.imread("city.jpg")
# convert from BGR to RGB so we can plot using matplotlib
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
# disable x & y axis
plt.axis('off')
# show the image
plt.imshow(img)
plt.show()
# get 200 pixels from 100 to 300 on both x-axis & y-axis
# change that if you will, just make sure you don't exceed cols & rows
cropped_img = img[100:300, 100:300]
# disable x & y axis
plt.axis('off')
# show the resulting image
plt.imshow(cropped_img)
plt.show()
# save the resulting image to disk
plt.imsave("city_cropped.jpg", cropped_img)
|
import os
import unittest
from click.testing import CliRunner
from changelog.commands import cli
class CliIntegrationTestCase(unittest.TestCase):
def setUp(self):
self.runner = CliRunner()
os.environ.setdefault('LC_ALL', 'en_US.utf-8')
os.environ.setdefault('LANG', 'en_US.utf-8')
def test_cli_init(self):
with self.runner.isolated_filesystem():
result = self.runner.invoke(cli, ['init'])
self.assertTrue(os.path.isfile('CHANGELOG.md'))
self.assertTrue(result)
def test_cli_current(self):
with self.runner.isolated_filesystem():
self.runner.invoke(cli, ['init'])
result = self.runner.invoke(cli, ['current'])
self.assertEqual(result.output.strip(), '0.0.0')
def test_cli_current_missing(self):
with self.runner.isolated_filesystem():
result = self.runner.invoke(cli, ['current'])
self.assertEqual(result.output.strip(), '')
def test_cli_suggest(self):
with self.runner.isolated_filesystem():
self.runner.invoke(cli, ['init'])
result = self.runner.invoke(cli, ['suggest'])
self.assertEqual(result.output.strip(), '0.0.1')
def test_cli_suggest_missing(self):
with self.runner.isolated_filesystem():
result = self.runner.invoke(cli, ['suggest'])
self.assertEqual(result.output.strip(), '')
def test_cli_version_flag(self):
result = self.runner.invoke(cli, ['--version'])
self.assertTrue(result)
def test_cli_added(self):
with self.runner.isolated_filesystem():
self.runner.invoke(cli, ['init'])
result = self.runner.invoke(cli, ['added', 'Adding a new feature'])
self.assertTrue(result)
suggest = self.runner.invoke(cli, ['suggest'])
self.assertEqual(suggest.output.strip(), '0.1.0')
def test_cli_added_missing(self):
with self.runner.isolated_filesystem():
result = self.runner.invoke(cli, ['added', 'Adding a new feature'], input='y\n')
self.assertEqual(result.output.strip(), 'No CHANGELOG.md found, do you want to create one? [y/N]: y')
def test_cli_changed(self):
with self.runner.isolated_filesystem():
self.runner.invoke(cli, ['init'])
result = self.runner.invoke(cli, ['changed', 'Changing a feature'])
self.assertTrue(result)
suggest = self.runner.invoke(cli, ['suggest'])
self.assertEqual(suggest.output.strip(), '0.0.1')
def test_cli_changed_missing(self):
with self.runner.isolated_filesystem():
result = self.runner.invoke(cli, ['changed', 'changing a feature'], input='y\n')
self.assertEqual(result.output.strip(), 'No CHANGELOG.md found, do you want to create one? [y/N]: y')
def test_cli_fixed(self):
with self.runner.isolated_filesystem():
self.runner.invoke(cli, ['init'])
result = self.runner.invoke(cli, ['fixed', 'Fix a Bug'])
self.assertTrue(result)
suggest = self.runner.invoke(cli, ['suggest'])
self.assertEqual(suggest.output.strip(), '0.0.1')
def test_cli_suggest_type_fixed(self):
with self.runner.isolated_filesystem():
self.runner.invoke(cli, ['init'])
result = self.runner.invoke(cli, ['fixed', 'Fix a Bug'])
self.assertTrue(result)
suggest = self.runner.invoke(cli, ['suggest', '--type'])
self.assertEqual(suggest.output.strip(), 'patch')
def test_cli_fixed_missing(self):
with self.runner.isolated_filesystem():
result = self.runner.invoke(cli, ['fixed', 'Fix a Bug'], input='y\n')
self.assertEqual(result.output.strip(), 'No CHANGELOG.md found, do you want to create one? [y/N]: y')
def test_cli_removed(self):
with self.runner.isolated_filesystem():
self.runner.invoke(cli, ['init'])
result = self.runner.invoke(cli, ['removed', 'Breaking Change'])
self.assertTrue(result)
suggest = self.runner.invoke(cli, ['suggest'])
self.assertEqual(suggest.output.strip(), '1.0.0')
def test_cli_suggest_type_removed(self):
with self.runner.isolated_filesystem():
self.runner.invoke(cli, ['init'])
result = self.runner.invoke(cli, ['removed', 'Breaking Change'])
self.assertTrue(result)
suggest = self.runner.invoke(cli, ['suggest', '--type'])
self.assertEqual(suggest.output.strip(), 'major')
def test_cli_removed_missing(self):
with self.runner.isolated_filesystem():
result = self.runner.invoke(cli, ['removed', 'Breaking Change'], input='y\n')
self.assertEqual(result.output.strip(), 'No CHANGELOG.md found, do you want to create one? [y/N]: y')
def test_cli_release(self):
with self.runner.isolated_filesystem():
self.runner.invoke(cli, ['init'])
self.runner.invoke(cli, ['added', 'Adding a new feature'])
result = self.runner.invoke(cli, ['release'])
self.assertEqual(result.output.strip(), 'Planning on releasing version 0.1.0. Proceed? [y/N]:')
def test_cli_release_y(self):
with self.runner.isolated_filesystem():
self.runner.invoke(cli, ['init'])
self.runner.invoke(cli, ['added', 'Adding a new feature'])
result = self.runner.invoke(cli, ['release', '--yes'])
self.assertTrue(result)
suggest = self.runner.invoke(cli, ['current'])
self.assertEqual(suggest.output.strip(), '0.1.0')
def test_cli_release_missing(self):
with self.runner.isolated_filesystem():
result = self.runner.invoke(cli, ['release'])
self.assertEqual(result.output.strip(), 'No CHANGELOG.md found, do you want to create one? [y/N]:')
def test_cli_view(self):
with self.runner.isolated_filesystem():
self.runner.invoke(cli, ['init'])
self.runner.invoke(cli, ['added', 'Adding a new feature'])
result = self.runner.invoke(cli, ['view'])
self.assertTrue(result)
|
#!/usr/bin/python
#coding=utf-8
#@+leo-ver=5-thin
#@+node:bob.20180206123613.1: * @file ../plugins/leo_babel/tests/idle_time.py
#@@first
#@@first
#@@language python
#@@tabwidth -4
#@+<< imports >>
#@+node:bob.20180206123613.2: ** << imports >>
import os
import time
# import traceback
from leo.core import leoGlobals as leoG
assert leoG
#@-<< imports >>
#@+<< version >>
#@+node:bob.20180206123613.3: ** << version >>
version = '1.0'
#@-<< version >>
#@+others
#@+node:bob.20180206123725.1: ** class IdleTime
class IdleTime:
""" This is an implementation of the Leo-Editor
class IdleTime() for use with Leo-Bridge.
"""
#@+others
#@+node:bob.20180206125732.1: *3* Class Parameters
list_active = list()
list_inactive = list()
#@+node:bob.20180206123842.1: *3* __init__()
def __init__(self, handler, delay=500, tag=None):
""" Create an Idle Time Object Instance
Arguments:
handler: Function to execute when idle
delay: Minimum time in milliseconds between
calls to handler
tag: Identifier for the purpose of the handler
Returns:
None
"""
self._handler = handler
self._delay = delay / 1000.
self._tag = tag
self._active = False
IdleTime.list_inactive.append(self)
#traceStk = [lix.strip() for lix in traceback.format_stack()]
#leoG.trace('Trace: {0}'.format(traceStk[-2]))
#leoG.trace('IdleTime() {0}'.format(id(self)))
#@+node:bob.20180206124140.1: *3* start()
def start(self):
""" Start an Idle Time Instance
Arguments:
self: IdleTime instance
Returns:
None
"""
#leoG.trace(id(self))
IdleTime.list_inactive.remove(self)
self._nexttime = time.process_time()
IdleTime.list_active.insert(0, self)
self._active = True
#@+node:bob.20180206125022.1: *3* stop()
def stop(self):
""" Stop an Idle Time Instance
Arguments:
self: IdleTime instance
Returns:
None
"""
#leoG.trace(id(self))
if self._active:
IdleTime.list_active.remove(self)
IdleTime.list_inactive.append(self)
self._active = False
#@+node:bob.20180206123934.1: *3* idle() Class method
@classmethod
def idle(cls):
""" Application idle -- Except for Idle Time
handler execution
Arguments:
cls: The IdleTime class object
Returns:
None
"""
#traceStk = [lix.strip() for lix in traceback.format_stack()]
#leoG.trace('Trace: {0}'.format(traceStk[-2]))
itoLast = 0
while True:
if not cls.list_active:
break
# pylint: disable=no-member
os.sched_yield()
timeCur = time.process_time()
idleTimeObj = cls.list_active.pop(0)
#leoG.trace('Popped {0} leaving {1}'.format(id(idleTimeObj), [id(ent) for ent in cls.list_active]))
if timeCur >= idleTimeObj._nexttime:
nexttime = timeCur + idleTimeObj._delay
idleTimeObj._nexttime = nexttime
for idx, idleTimeObj2 in enumerate(cls.list_active):
if nexttime < idleTimeObj2._nexttime:
cls.list_active.insert(idx, idleTimeObj)
break
else:
cls.list_active.append(idleTimeObj)
if itoLast != idleTimeObj:
itoLast = idleTimeObj
idleTimeObj._handler(idleTimeObj)
else:
# Nothing to run yet
cls.list_active.insert(0, idleTimeObj)
#@-others
#@+node:bob.20180206123613.16: ** main()
def main():
""" Command Line Program Entry point
"""
raise NotImplementedError('{0} is not a command line program.'.format(__file__))
#@-others
if __name__ == "__main__":
main()
#@-leo
|
# coding=utf-8
"""
Copyright 2012 Ali Ok (aliokATapacheDOTorg)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import unittest
from trnltk.morphology.contextless.parser.bruteforceverbrootfinder import BruteForceVerbRootFinder
from trnltk.morphology.contextless.parser.test.parser_test import ParserTest
from trnltk.morphology.contextless.parser.parser import ContextlessMorphologicalParser, logger as parser_logger
from trnltk.morphology.contextless.parser.suffixapplier import logger as suffix_applier_logger
from trnltk.morphology.morphotactics.basicsuffixgraph import BasicSuffixGraph
class ParserTestWithBruteForceVerbRootFinder(ParserTest):
@classmethod
def setUpClass(cls):
super(ParserTestWithBruteForceVerbRootFinder, cls).setUpClass()
def setUp(self):
logging.basicConfig(level=logging.INFO)
parser_logger.setLevel(logging.INFO)
suffix_applier_logger.setLevel(logging.INFO)
suffix_graph = BasicSuffixGraph()
suffix_graph.initialize()
self.mock_brute_force_noun_root_finder = BruteForceVerbRootFinder()
self.parser = ContextlessMorphologicalParser(suffix_graph, None, [self.mock_brute_force_noun_root_finder])
def test_should_mark_unparsable(self):
self.assert_not_parsable(u'd')
self.assert_not_parsable(u'dp')
self.assert_not_parsable(u'ayl')
self.assert_not_parsable(u'anf')
self.assert_not_parsable(u'azz')
self.assert_not_parsable(u'ddr')
self.assert_not_parsable(u'xxx')
def test_should_find_one_result_for_words_not_acceptable_by_suffix_graph(self):
self.assert_parse_correct(u'asdasmo', u'asdasmo(asdasmomak)+Verb+Pos+Imp+A2sg')
self.assert_parse_correct(u'balpaze', u'balpaze(balpazemek)+Verb+Pos+Imp+A2sg')
def test_should_parse_simple_verbs(self):
self.assert_parse_correct(u'de', u'de(demek)+Verb+Pos+Imp+A2sg')
self.assert_parse_correct(u'git', u'git(gitmek)+Verb+Pos+Imp+A2sg', u'gi(gimek)+Verb+Verb+Caus(t[t])+Pos+Imp+A2sg')
self.assert_parse_correct(u'sok', u'sok(sokmak)+Verb+Pos+Imp+A2sg')
self.assert_parse_correct(u'deyip',
u'deyip(deyipmek)+Verb+Pos+Imp+A2sg',
u'de(demek)+Verb+Pos+Adv+AfterDoingSo(+yIp[yip])',
u'dey(deymek)+Verb+Pos+Adv+AfterDoingSo(+yIp[ip])')
self.assert_parse_correct(u'sokacak',
u'sok(sokmak)+Verb+Pos+Fut(+yAcAk[acak])+A3sg',
u'sok(sokmak)+Verb+Pos+Fut(+yAcAk[acak])+Adj+Zero',
u'sok(sokmak)+Verb+Pos+Adj+FutPart(+yAcAk[acak])+Pnon',
u'sok(sokmak)+Verb+Pos+Noun+FutPart(+yAcAk[acak])+A3sg+Pnon+Nom',
u'sok(sokmak)+Verb+Pos+Fut(+yAcAk[acak])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom',
u'sokacak(sokacakmak)+Verb+Pos+Imp+A2sg')
self.assert_parse_correct(u'saldı', u'sal(salmak)+Verb+Pos+Past(dI[dı])+A3sg', u'saldı(saldımak)+Verb+Pos+Imp+A2sg')
def test_should_parse_verbs_with_progressive_vowel_drop(self):
self.assert_parse_correct(u'başlıyor',
u'başl(başlamak)+Verb+Pos+Prog(Iyor[ıyor])+A3sg',
u'başl(başlımak)+Verb+Pos+Prog(Iyor[ıyor])+A3sg',
u'başlıyo(başlıyomak)+Verb+Pos+Aor(+Ar[r])+A3sg',
u'başlıyor(başlıyormak)+Verb+Pos+Imp+A2sg',
u'başlıyo(başlıyomak)+Verb+Pos+Aor(+Ar[r])+Adj+Zero',
u'başlıyo(başlıyomak)+Verb+Pos+Aor(+Ar[r])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom')
self.assert_parse_correct(u'elliyorduk',
u'ell(ellemek)+Verb+Pos+Prog(Iyor[iyor])+Past(dI[du])+A1pl(k[k])',
u'ell(ellimek)+Verb+Pos+Prog(Iyor[iyor])+Past(dI[du])+A1pl(k[k])',
u'elliyor(elliyormak)+Verb+Pos+Past(dI[du])+A1pl(k[k])',
u'elliyorduk(elliyordukmak)+Verb+Pos+Imp+A2sg',
u'elliyo(elliyomak)+Verb+Pos+Aor(+Ar[r])+Past(dI[du])+A1pl(k[k])',
u'elliyor(elliyormak)+Verb+Pos+Adj+PastPart(dIk[duk])+Pnon',
u'elliyor(elliyormak)+Verb+Pos+Noun+PastPart(dIk[duk])+A3sg+Pnon+Nom')
self.assert_parse_correct(u'oynuyorlar',
u'oyn(oynamak)+Verb+Pos+Prog(Iyor[uyor])+A3pl(lAr[lar])',
u'oyn(oynumak)+Verb+Pos+Prog(Iyor[uyor])+A3pl(lAr[lar])',
u'oynuyo(oynuyomak)+Verb+Pos+Aor(+Ar[r])+A3pl(lAr[lar])',
u'oynuyorla(oynuyorlamak)+Verb+Pos+Aor(+Ar[r])+A3sg',
u'oynuyorlar(oynuyorlarmak)+Verb+Pos+Imp+A2sg',
u'oynuyorla(oynuyorlamak)+Verb+Pos+Aor(+Ar[r])+Adj+Zero',
u'oynuyo(oynuyomak)+Verb+Pos+Aor(+Ar[r])+Adj+Zero+Noun+Zero+A3pl(lAr[lar])+Pnon+Nom',
u'oynuyorla(oynuyorlamak)+Verb+Pos+Aor(+Ar[r])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom')
self.assert_parse_correct(u'söylüyorsun',
u'söyl(söylemek)+Verb+Pos+Prog(Iyor[üyor])+A2sg(sIn[sun])',
u'söyl(söylümek)+Verb+Pos+Prog(Iyor[üyor])+A2sg(sIn[sun])',
u'söylüyo(söylüyomak)+Verb+Pos+Aor(+Ar[r])+A2sg(sIn[sun])',
u'söylüyor(söylüyormak)+Verb+Pos+Imp+A3sg(sIn[sun])',
u'söylüyorsun(söylüyorsunmak)+Verb+Pos+Imp+A2sg',
u'söylüyorsu(söylüyorsumak)+Verb+Verb+Pass(+In[n])+Pos+Imp+A2sg')
self.assert_parse_correct(u'atlıyorsunuz',
u'atl(atlamak)+Verb+Pos+Prog(Iyor[ıyor])+A2pl(sInIz[sunuz])',
u'atl(atlımak)+Verb+Pos+Prog(Iyor[ıyor])+A2pl(sInIz[sunuz])',
u'atlıyo(atlıyomak)+Verb+Pos+Aor(+Ar[r])+A2pl(sInIz[sunuz])',
u'atlıyorsunuz(atlıyorsunuzmak)+Verb+Pos+Imp+A2sg')
self.assert_parse_correct(u'kazıyor',
u'kaz(kazmak)+Verb+Pos+Prog(Iyor[ıyor])+A3sg',
u'kaz(kazamak)+Verb+Pos+Prog(Iyor[ıyor])+A3sg',
u'kaz(kazımak)+Verb+Pos+Prog(Iyor[ıyor])+A3sg',
u'kazıyo(kazıyomak)+Verb+Pos+Aor(+Ar[r])+A3sg',
u'kazıyor(kazıyormak)+Verb+Pos+Imp+A2sg',
u'kazıyo(kazıyomak)+Verb+Pos+Aor(+Ar[r])+Adj+Zero',
u'kazıyo(kazıyomak)+Verb+Pos+Aor(+Ar[r])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom')
self.assert_parse_correct(u'koruyor',
u'kor(kormak)+Verb+Pos+Prog(Iyor[uyor])+A3sg',
u'kor(koramak)+Verb+Pos+Prog(Iyor[uyor])+A3sg',
u'kor(korumak)+Verb+Pos+Prog(Iyor[uyor])+A3sg',
u'koruyo(koruyomak)+Verb+Pos+Aor(+Ar[r])+A3sg',
u'koruyor(koruyormak)+Verb+Pos+Imp+A2sg',
u'koruyo(koruyomak)+Verb+Pos+Aor(+Ar[r])+Adj+Zero',
u'koruyo(koruyomak)+Verb+Pos+Aor(+Ar[r])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom')
def test_should_parse_verbs_with_aorist_A_and_causative_Ar(self):
self.assert_parse_correct(u'çıkar',
u'çık(çıkmak)+Verb+Pos+Aor(+Ar[ar])+A3sg',
u'çıka(çıkamak)+Verb+Pos+Aor(+Ar[r])+A3sg',
u'çıkar(çıkarmak)+Verb+Pos+Imp+A2sg',
u'çık(çıkmak)+Verb+Pos+Aor(+Ar[ar])+Adj+Zero',
u'çıka(çıkamak)+Verb+Pos+Aor(+Ar[r])+Adj+Zero',
u'çık(çıkmak)+Verb+Verb+Caus(Ar[ar])+Pos+Imp+A2sg',
u'çık(çıkmak)+Verb+Pos+Aor(+Ar[ar])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom',
u'çıka(çıkamak)+Verb+Pos+Aor(+Ar[r])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom')
self.assert_parse_correct(u'ötercesine',
u'ötercesin(ötercesinmek)+Verb+Pos+Opt(A[e])+A3sg',
u'ötercesine(ötercesinemek)+Verb+Pos+Imp+A2sg',
u'öterces(ötercesmek)+Verb+Verb+Pass(+In[in])+Pos+Opt(A[e])+A3sg',
u'ötercesi(ötercesimek)+Verb+Verb+Pass(+In[n])+Pos+Opt(A[e])+A3sg',
u'öt(ötmek)+Verb+Pos+Aor(+Ar[er])+Adv+AsIf(cAsI!nA[cesine])',
u'öte(ötemek)+Verb+Pos+Aor(+Ar[r])+Adv+AsIf(cAsI!nA[cesine])',
u'öt(ötmek)+Verb+Pos+Aor(+Ar[er])+Adj+Zero+Adj+Equ(cA[ce])+Noun+Zero+A3sg+P3sg(+sI[si])+Dat(nA[ne])',
u'öt(ötmek)+Verb+Pos+Aor(+Ar[er])+Adj+Zero+Adj+Quite(cA[ce])+Noun+Zero+A3sg+P3sg(+sI[si])+Dat(nA[ne])',
u'öte(ötemek)+Verb+Pos+Aor(+Ar[r])+Adj+Zero+Adj+Equ(cA[ce])+Noun+Zero+A3sg+P3sg(+sI[si])+Dat(nA[ne])',
u'öte(ötemek)+Verb+Pos+Aor(+Ar[r])+Adj+Zero+Adj+Quite(cA[ce])+Noun+Zero+A3sg+P3sg(+sI[si])+Dat(nA[ne])',
u'öt(ötmek)+Verb+Pos+Aor(+Ar[er])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ce])+Noun+Zero+A3sg+P3sg(+sI[si])+Dat(nA[ne])',
u'öte(ötemek)+Verb+Pos+Aor(+Ar[r])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom+Adj+Equ(cA[ce])+Noun+Zero+A3sg+P3sg(+sI[si])+Dat(nA[ne])')
self.assert_parse_correct(u'zebersin',
u'zeb(zebmek)+Verb+Pos+Aor(+Ar[er])+A2sg(sIn[sin])',
u'zebe(zebemek)+Verb+Pos+Aor(+Ar[r])+A2sg(sIn[sin])',
u'zeber(zebermek)+Verb+Pos+Imp+A3sg(sIn[sin])',
u'zebersin(zebersinmek)+Verb+Pos+Imp+A2sg',
u'zeb(zebmek)+Verb+Verb+Caus(Ar[er])+Pos+Imp+A3sg(sIn[sin])',
u'zebersi(zebersimek)+Verb+Verb+Pass(+In[n])+Pos+Imp+A2sg')
def test_should_parse_verbs_with_aorist_I(self):
self.assert_parse_correct(u'yatır',
u'ya(yamak)+Verb+Verb+Caus(t[t])+Pos+Aor(+Ir[ır])+A3sg',
u'ya(yamak)+Verb+Verb+Caus(t[t])+Verb+Caus(Ir[ır])+Pos+Imp+A2sg',
u'yat(yatmak)+Verb+Pos+Aor(+Ir[ır])+A3sg',
u'yat(yatmak)+Verb+Verb+Caus(Ir[ır])+Pos+Imp+A2sg',
u'yatı(yatımak)+Verb+Pos+Aor(+Ar[r])+A3sg',
u'yatır(yatırmak)+Verb+Pos+Imp+A2sg',
u'yat(yatmak)+Verb+Pos+Aor(+Ir[ır])+Adj+Zero',
u'yatı(yatımak)+Verb+Pos+Aor(+Ar[r])+Adj+Zero',
u'ya(yamak)+Verb+Verb+Caus(t[t])+Pos+Aor(+Ir[ır])+Adj+Zero',
u'yat(yatmak)+Verb+Pos+Aor(+Ir[ır])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom',
u'yatı(yatımak)+Verb+Pos+Aor(+Ar[r])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom',
u'ya(yamak)+Verb+Verb+Caus(t[t])+Pos+Aor(+Ir[ır])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom')
self.assert_parse_correct(u'gelir',
u'gel(gelmek)+Verb+Pos+Aor(+Ir[ir])+A3sg',
u'geli(gelimek)+Verb+Pos+Aor(+Ar[r])+A3sg',
u'gelir(gelirmek)+Verb+Pos+Imp+A2sg',
u'gel(gelmek)+Verb+Pos+Aor(+Ir[ir])+Adj+Zero',
u'geli(gelimek)+Verb+Pos+Aor(+Ar[r])+Adj+Zero',
u'gel(gelmek)+Verb+Verb+Caus(Ir[ir])+Pos+Imp+A2sg',
u'gel(gelmek)+Verb+Pos+Aor(+Ir[ir])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom',
u'geli(gelimek)+Verb+Pos+Aor(+Ar[r])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom')
self.assert_parse_correct(u'zopuracak',
u'zopur(zopurmak)+Verb+Pos+Fut(+yAcAk[acak])+A3sg',
u'zopuracak(zopuracakmak)+Verb+Pos+Imp+A2sg',
u'zopur(zopurmak)+Verb+Pos+Adj+FutPart(+yAcAk[acak])+Pnon',
u'zopur(zopurmak)+Verb+Pos+Fut(+yAcAk[acak])+Adj+Zero',
u'zop(zopmak)+Verb+Verb+Caus(Ir[ur])+Pos+Fut(+yAcAk[acak])+A3sg',
u'zop(zopmak)+Verb+Verb+Caus(Ir[ur])+Pos+Adj+FutPart(+yAcAk[acak])+Pnon',
u'zop(zopmak)+Verb+Verb+Caus(Ir[ur])+Pos+Fut(+yAcAk[acak])+Adj+Zero',
u'zopur(zopurmak)+Verb+Pos+Noun+FutPart(+yAcAk[acak])+A3sg+Pnon+Nom',
u'zop(zopmak)+Verb+Verb+Caus(Ir[ur])+Pos+Noun+FutPart(+yAcAk[acak])+A3sg+Pnon+Nom',
u'zopur(zopurmak)+Verb+Pos+Fut(+yAcAk[acak])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom',
u'zop(zopmak)+Verb+Verb+Caus(Ir[ur])+Pos+Fut(+yAcAk[acak])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom')
self.assert_parse_correct(u'zoburacak',
u'zobur(zoburmak)+Verb+Pos+Fut(+yAcAk[acak])+A3sg',
u'zoburacak(zoburacakmak)+Verb+Pos+Imp+A2sg',
u'zobur(zoburmak)+Verb+Pos+Adj+FutPart(+yAcAk[acak])+Pnon',
u'zobur(zoburmak)+Verb+Pos+Fut(+yAcAk[acak])+Adj+Zero',
u'zob(zobmak)+Verb+Verb+Caus(Ir[ur])+Pos+Fut(+yAcAk[acak])+A3sg',
u'zob(zobmak)+Verb+Verb+Caus(Ir[ur])+Pos+Adj+FutPart(+yAcAk[acak])+Pnon',
u'zob(zobmak)+Verb+Verb+Caus(Ir[ur])+Pos+Fut(+yAcAk[acak])+Adj+Zero',
u'zobur(zoburmak)+Verb+Pos+Noun+FutPart(+yAcAk[acak])+A3sg+Pnon+Nom',
u'zob(zobmak)+Verb+Verb+Caus(Ir[ur])+Pos+Noun+FutPart(+yAcAk[acak])+A3sg+Pnon+Nom',
u'zobur(zoburmak)+Verb+Pos+Fut(+yAcAk[acak])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom',
u'zob(zobmak)+Verb+Verb+Caus(Ir[ur])+Pos+Fut(+yAcAk[acak])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom')
def test_should_parse_verbs_with_causative_t(self):
self.assert_parse_correct(u'kapattım',
u'kapat(kapatmak)+Verb+Pos+Past(dI[tı])+A1sg(+Im[m])',
u'kapattı(kapattımak)+Verb+Neg(m[m])+Imp+A2sg',
u'kapattım(kapattımmak)+Verb+Pos+Imp+A2sg',
u'kapa(kapamak)+Verb+Verb+Caus(t[t])+Pos+Past(dI[tı])+A1sg(+Im[m])')
self.assert_parse_correct(u'yürütecekmiş',
u'yürütecek(yürütecekmek)+Verb+Pos+Narr(mIş[miş])+A3sg',
u'yürütecekmiş(yürütecekmişmek)+Verb+Pos+Imp+A2sg',
u'yürüt(yürütmek)+Verb+Pos+Fut(+yAcAk[ecek])+Narr(mIş[miş])+A3sg',
u'yürütecek(yürütecekmek)+Verb+Pos+Narr(mIş[miş])+Adj+Zero',
u'yürütecekmi(yürütecekmimek)+Verb+Verb+Recip(+Iş[ş])+Pos+Imp+A2sg',
u'yür(yürmek)+Verb+Verb+Caus(It[üt])+Pos+Fut(+yAcAk[ecek])+Narr(mIş[miş])+A3sg',
u'yürü(yürümek)+Verb+Verb+Caus(t[t])+Pos+Fut(+yAcAk[ecek])+Narr(mIş[miş])+A3sg',
u'yürütecek(yürütecekmek)+Verb+Pos+Narr(mIş[miş])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom')
def test_should_parse_verbs_with_causative_It(self):
self.assert_parse_correct(u'akıtmışlar',
u'ak(akmak)+Verb+Verb+Caus(It[ıt])+Pos+Narr(mIş[mış])+A3pl(lAr[lar])',
u'akı(akımak)+Verb+Verb+Caus(t[t])+Pos+Narr(mIş[mış])+A3pl(lAr[lar])',
u'akıt(akıtmak)+Verb+Pos+Narr(mIş[mış])+A3pl(lAr[lar])',
u'akıtmışla(akıtmışlamak)+Verb+Pos+Aor(+Ar[r])+A3sg',
u'akıtmışlar(akıtmışlarmak)+Verb+Pos+Imp+A2sg',
u'akıtmışla(akıtmışlamak)+Verb+Pos+Aor(+Ar[r])+Adj+Zero',
u'akıt(akıtmak)+Verb+Pos+Narr(mIş[mış])+Adj+Zero+Noun+Zero+A3pl(lAr[lar])+Pnon+Nom',
u'akıtmışla(akıtmışlamak)+Verb+Pos+Aor(+Ar[r])+Adj+Zero+Noun+Zero+A3sg+Pnon+Nom',
u'ak(akmak)+Verb+Verb+Caus(It[ıt])+Pos+Narr(mIş[mış])+Adj+Zero+Noun+Zero+A3pl(lAr[lar])+Pnon+Nom',
u'akı(akımak)+Verb+Verb+Caus(t[t])+Pos+Narr(mIş[mış])+Adj+Zero+Noun+Zero+A3pl(lAr[lar])+Pnon+Nom')
self.assert_parse_correct(u'korkut',
u'korkut(korkutmak)+Verb+Pos+Imp+A2sg',
u'kork(korkmak)+Verb+Verb+Caus(It[ut])+Pos+Imp+A2sg',
u'korku(korkumak)+Verb+Verb+Caus(t[t])+Pos+Imp+A2sg')
def test_should_parse_verbs_with_causative_dIr(self):
self.assert_parse_correct(u'aldırsın',
u'ald(altmak)+Verb+Pos+Aor(+Ir[ır])+A2sg(sIn[sın])',
u'ald(aldmak)+Verb+Pos+Aor(+Ir[ır])+A2sg(sIn[sın])',
u'aldı(aldımak)+Verb+Pos+Aor(+Ar[r])+A2sg(sIn[sın])',
u'aldır(aldırmak)+Verb+Pos+Imp+A3sg(sIn[sın])',
u'aldırsın(aldırsınmak)+Verb+Pos+Imp+A2sg',
u'al(almak)+Verb+Verb+Caus(dIr[dır])+Pos+Imp+A3sg(sIn[sın])',
u'ald(altmak)+Verb+Verb+Caus(Ir[ır])+Pos+Imp+A3sg(sIn[sın])',
u'ald(aldmak)+Verb+Verb+Caus(Ir[ır])+Pos+Imp+A3sg(sIn[sın])',
u'aldırsı(aldırsımak)+Verb+Verb+Pass(+In[n])+Pos+Imp+A2sg')
self.assert_parse_correct(u'öldürelim',
u'öldür(öldürmek)+Verb+Pos+Opt(A[e])+A1pl(lIm[lim])',
u'öldüreli(öldürelimek)+Verb+Neg(m[m])+Imp+A2sg',
u'öldürelim(öldürelimmek)+Verb+Pos+Imp+A2sg',
u'öl(ölmek)+Verb+Verb+Caus(dIr[dür])+Pos+Opt(A[e])+A1pl(lIm[lim])',
u'öld(öldmek)+Verb+Verb+Caus(Ir[ür])+Pos+Opt(A[e])+A1pl(lIm[lim])',
u'öld(öltmek)+Verb+Verb+Caus(Ir[ür])+Pos+Opt(A[e])+A1pl(lIm[lim])')
self.assert_parse_correct(u'öttürsek',
u'öttür(öttürmek)+Verb+Pos+Cond(+ysA[se])+A1pl(k[k])',
u'öttür(öttürmek)+Verb+Pos+Desr(sA[se])+A1pl(k[k])',
u'öttürsek(öttürsekmek)+Verb+Pos+Imp+A2sg',
u'öttü(öttümek)+Verb+Pos+Aor(+Ar[r])+Cond(+ysA[se])+A1pl(k[k])',
u'öt(ötmek)+Verb+Verb+Caus(dIr[tür])+Pos+Cond(+ysA[se])+A1pl(k[k])',
u'öt(ötmek)+Verb+Verb+Caus(dIr[tür])+Pos+Desr(sA[se])+A1pl(k[k])',
u'öt(ötmek)+Verb+Verb+Caus(t[t])+Pos+Aor(+Ir[ür])+Cond(+ysA[se])+A1pl(k[k])',
u'öt(ötmek)+Verb+Verb+Caus(t[t])+Verb+Caus(Ir[ür])+Pos+Cond(+ysA[se])+A1pl(k[k])',
u'öt(ötmek)+Verb+Verb+Caus(t[t])+Verb+Caus(Ir[ür])+Pos+Desr(sA[se])+A1pl(k[k])')
def assert_parse_correct(self, word_to_parse, *args):
super(ParserTestWithBruteForceVerbRootFinder, self).assert_parse_correct(word_to_parse, *args)
assert len(list(args)) == len(set(args))
if __name__ == '__main__':
unittest.main()
|
import json
from unittest.mock import call
import pytest
from flask_restalchemy import Api
from flask_restalchemy.decorators.request_hooks import before_request, after_request
from flask_restalchemy.tests.sample_model import Employee, Company, Address
@pytest.fixture
def sample_api(flask_app):
return Api(flask_app)
@pytest.fixture(autouse=True)
def create_test_sample(db_session):
company = Company(id=5, name="Terrans")
emp1 = Employee(id=1, firstname="Jim", lastname="Raynor", company=company)
addr1 = Address(street="5 Av", number="943", city="Tarsonis")
emp1.address = addr1
db_session.add(company)
db_session.add(emp1)
db_session.commit()
@pytest.mark.parametrize("decorator_verb", ["ALL", "GET"])
def test_get_item_preprocessor(sample_api, client, mocker, decorator_verb):
pre_processor_mock = mocker.Mock(return_value=None)
sample_api.add_model(
Employee,
request_decorators={decorator_verb: before_request(pre_processor_mock)},
)
resp = client.get("/employee/1")
assert resp.status_code == 200
pre_processor_mock.assert_called_once_with(id=1)
resp = client.post("/employee", data=json.dumps({"firstname": "Jeff"}))
assert resp.status_code == 201
# 2 calls if all verbs were decorated, otherwise test only for GET call
assert pre_processor_mock.call_count == 2 if decorator_verb == "all" else 1
def test_get_collection_preprocessor(sample_api, client, mocker):
pre_processor_mock = mocker.Mock(return_value=None)
sample_api.add_model(
Employee, request_decorators=before_request(pre_processor_mock)
)
resp = client.get("/employee")
assert resp.status_code == 200
assert pre_processor_mock.call_args == call(id=None)
resp = client.post("/employee", data=json.dumps({"firstname": "Jeff"}))
assert resp.status_code == 201
assert pre_processor_mock.call_args == call()
resp = client.put("/employee/1", data=json.dumps({"lastname": "R."}))
assert resp.status_code == 200
assert pre_processor_mock.call_args == call(id=1)
assert pre_processor_mock.call_count == 3
def test_post_processors(sample_api, client, mocker):
pre_mock = mocker.Mock(return_value=None)
post_mock = mocker.Mock(return_value=None)
sample_api.add_model(
Employee,
request_decorators={
"ALL": after_request(post_mock),
"POST": before_request(pre_mock),
},
)
data = {"firstname": "Ana", "lastname": "Queen"}
resp = client.post("/employee", data=json.dumps(data))
assert resp.status_code == 201
assert pre_mock.call_count == 1
employee_id = resp.get_json()["id"]
assert employee_id
assert post_mock.call_count == 1
post_mock_args = post_mock.call_args[0]
assert post_mock_args[0][1] == 201
assert post_mock_args[0][0].data == resp.data
def test_put_preprocessors(sample_api, client, mocker):
pre_mock = mocker.Mock(return_value=None)
post_mock = mocker.Mock(return_value=None)
sample_api.add_model(
Employee,
request_decorators={
"PUT": [before_request(pre_mock), after_request(post_mock)]
},
)
data = {"firstname": "Ana", "lastname": "Queen"}
resp = client.put("/employee/1", data=json.dumps(data))
assert resp.status_code == 200
assert pre_mock.call_count == 1
assert pre_mock.call_args == call(id=1)
assert post_mock.call_count == 1
def test_delete_preprocessors(sample_api, client, mocker):
pre_mock = mocker.Mock(return_value=None)
post_mock = mocker.Mock(return_value=None)
sample_api.add_model(
Employee,
request_decorators={
"DELETE": [before_request(pre_mock), after_request(post_mock)]
},
)
resp = client.delete("/employee/1")
assert resp.status_code == 204
assert pre_mock.call_args == call(id=1)
assert post_mock.call_args == call(("", 204), id=1)
def test_property_get_collection_processor(sample_api, client, mocker):
pre_mock = mocker.Mock(return_value=None)
sample_api.add_property(
Employee,
Employee,
"colleagues",
request_decorators={"GET": before_request(pre_mock)},
)
resp = client.get("/employee/1/colleagues")
assert resp.status_code == 200
pre_mock.assert_called_once_with(id=None, relation_id=1)
def test_relation_get_item_preprocessor(sample_api, client, mocker):
pre_mock = mocker.Mock(return_value=None)
sample_api.add_relation(
Company.employees, request_decorators={"GET": before_request(pre_mock)}
)
resp = client.get("/company/5/employees/1")
assert resp.status_code == 200
pre_mock.assert_called_once_with(relation_id=5, id=1)
def test_relation_get_collection_preprocessor(sample_api, client, mocker):
pre_mock = mocker.Mock(return_value=None)
sample_api.add_relation(
Company.employees, request_decorators={"GET": before_request(pre_mock)}
)
resp = client.get("/company/5/employees")
assert resp.status_code == 200
pre_mock.assert_called_once_with(relation_id=5, id=None)
def test_relation_post_processors(sample_api, client, mocker):
pre_mock = mocker.Mock(return_value=None)
post_mock = mocker.Mock(return_value=None)
sample_api.add_relation(
Company.employees,
request_decorators={
"POST": [before_request(pre_mock), after_request(post_mock)]
},
)
data = {"firstname": "Ana", "lastname": "Queen"}
resp = client.post("/company/5/employees", data=json.dumps(data))
assert resp.status_code == 201
pre_mock.assert_called_once_with(relation_id=5)
assert post_mock.call_count == 1
assert post_mock.call_args[1] == {"relation_id": 5}
def test_relation_put_preprocessors(sample_api, client, mocker):
pre_mock = mocker.Mock(return_value=None)
post_mock = mocker.Mock(return_value=None)
sample_api.add_relation(
Company.employees,
request_decorators={
"PUT": [before_request(pre_mock), after_request(post_mock)]
},
)
data = {"firstname": "Ana", "lastname": "Queen"}
resp = client.put("/company/5/employees/1", data=json.dumps(data))
assert resp.status_code == 200
assert pre_mock.call_args == call(relation_id=5, id=1)
assert post_mock.call_count == 1
assert post_mock.call_args[1] == {"relation_id": 5, "id": 1}
def test_relation_delete_preprocessors(sample_api, client, mocker):
pre_mock = mocker.Mock(return_value=None)
post_mock = mocker.Mock(return_value=None)
sample_api.add_relation(
Company.employees,
request_decorators={
"DELETE": [before_request(pre_mock), after_request(post_mock)]
},
)
resp = client.delete("/company/5/employees/1")
assert resp.status_code == 204
assert pre_mock.call_count == 1
assert post_mock.call_count == 1
assert post_mock.call_args[1] == {"relation_id": 5, "id": 1}
|
# Copyright 2009-2017 Ram Rachum.
# This program is distributed under the MIT license.
'''Defines various tools for manipulating sequences.'''
import collections
import numbers
import types
import itertools
import random
from combi._python_toolbox import math_tools
from combi._python_toolbox import caching
from combi._python_toolbox import misc_tools
infinity = float('inf')
class UnorderedIterableException(Exception):
'''
An unordered iterable was encountered when we expected an orderable one.
'''
def are_equal_regardless_of_order(seq1, seq2):
'''
Do `seq1` and `seq2` contain the same elements, same number of times?
Disregards order of elements.
Currently will fail for items that have problems with comparing.
'''
from combi._python_toolbox import nifty_collections
return nifty_collections.Bag(seq1) == nifty_collections.Bag(seq2)
def flatten(iterable):
'''
Flatten a sequence, returning a sequence of all its items' items.
For example, `flatten([[1, 2], [3], [4, 'meow']]) == [1, 2, 3, 4, 'meow']`.
'''
# If that ain't a damn clever implementation, I don't know what is.
iterator = iter(iterable)
try:
return sum(iterator, next(iterator))
except StopIteration:
return []
class NO_FILL_VALUE(misc_tools.NonInstantiable):
'''
Sentinel that means: Don't fill last partition with default fill values.
'''
@misc_tools.limit_positional_arguments(2)
def partitions(sequence, partition_size=None, n_partitions=None,
allow_remainder=True, larger_on_remainder=False,
fill_value=NO_FILL_VALUE):
'''
Partition `sequence` into equal partitions of size `partition_size`, or
determine size automatically given the number of partitions as
`n_partitions`.
If the sequence can't be divided into precisely equal partitions, the last
partition will contain less members than all the other partitions.
Example:
>>> partitions([0, 1, 2, 3, 4], 2)
[[0, 1], [2, 3], [4]]
(You need to give *either* a `partition_size` *or* an `n_partitions`
argument, not both.)
Specify `allow_remainder=False` to enforce that the all the partition sizes
be equal; if there's a remainder while `allow_remainder=False`, an
exception will be raised.
By default, if there's a remainder, the last partition will be smaller than
the others. (e.g. a sequence of 7 items, when partitioned into pairs, will
have 3 pairs and then a partition with only 1 element.) Specify
`larger_on_remainder=True` to make the last partition be a bigger partition
in case there's a remainder. (e.g. a sequence of a 7 items divided into
pairs would result in 2 pairs and one triplet.)
If you want the remainder partition to be of equal size with the other
partitions, you can specify `fill_value` as the padding for the last
partition. A specified value for `fill_value` implies
`allow_remainder=True` and will cause an exception to be raised if
specified with `allow_remainder=False`.
Example:
>>> partitions([0, 1, 2, 3, 4], 3, fill_value='meow')
[[0, 1, 2], [3, 4, 'meow']]
'''
sequence = ensure_iterable_is_sequence(sequence)
sequence_length = len(sequence)
### Validating input: #####################################################
# #
if (partition_size is None) + (n_partitions is None) != 1:
raise Exception('You must specify *either* `partition_size` *or* '
'`n_paritions`.')
remainder_length = sequence_length % (partition_size if partition_size
is not None else n_partitions)
if not allow_remainder and remainder_length > 0:
raise Exception("You set `allow_remainder=False`, but there's a "
"remainder of %s left." % remainder_length)
# #
### Finished validating input. ############################################
if partition_size is None:
floored_partition_size, modulo = divmod(sequence_length,
n_partitions)
if modulo:
if larger_on_remainder:
partition_size = floored_partition_size
n_partitions += 1
# Extra partition will be joined into previous partition
else:
partition_size = floored_partition_size + 1
else: # modulo == 0
partition_size = floored_partition_size
if n_partitions is None:
n_partitions = math_tools.ceil_div(sequence_length, partition_size)
naive_length = partition_size * n_partitions
blocks = [sequence[i : i + partition_size] for i in
range(0, naive_length, partition_size)]
if naive_length != sequence_length:
assert blocks
if larger_on_remainder:
if len(blocks) >= 2:
small_block_to_append_back = blocks[-1]
del blocks[-1]
blocks[-1] += small_block_to_append_back
elif fill_value != NO_FILL_VALUE: # (We use elif because fill is never
# done if `larger_on_remainder=True`.)
filler = itertools.repeat(fill_value,
naive_length - sequence_length)
blocks[-1].extend(filler)
return blocks
def is_immutable_sequence(thing):
'''Is `thing` an immutable sequence, like `tuple`?'''
return isinstance(thing, collections.Sequence) and not \
isinstance(thing, collections.MutableSequence)
def to_tuple(single_or_sequence, item_type=None, item_test=None):
'''
Convert an item or a sequence of items into a tuple of items.
This is typically used in functions that request a sequence of items but
are considerate enough to accept a single item and wrap it in a tuple
`(item,)` themselves.
This function figures out whether the user entered a sequence of items, in
which case it will only be converted to a tuple and returned; or the user
entered a single item, in which case a tuple `(item,)` will be returned.
To aid this function in parsing, you may optionally specify `item_type`
which is the type of the items, or alternatively `item_test` which is a
callable that takes an object and returns whether it's a valid item. These
are necessary only when your items might be sequences themselves.
You may optionally put multiple types in `item_type`, and each object would
be required to match to at least one of them.
'''
if (item_type is not None) and (item_test is not None):
raise Exception('You may specify either `item_type` or '
'`item_test` but not both.')
if item_test is not None:
actual_item_test = item_test
elif item_type is not None:
actual_item_test = \
lambda candidate: isinstance(candidate, item_type)
else:
actual_item_test = None
if actual_item_test is None:
if isinstance(single_or_sequence, collections.Sequence):
return tuple(single_or_sequence)
elif single_or_sequence is None:
return tuple()
else:
return (single_or_sequence,)
else: # actual_item_test is not None
if actual_item_test(single_or_sequence):
return (single_or_sequence,)
elif single_or_sequence is None:
return ()
else:
return tuple(single_or_sequence)
def pop_until(sequence, condition=bool):
'''
Look for item in `sequence` that passes `condition`, popping away others.
When sequence is empty, propagates the `IndexError`.
'''
from combi._python_toolbox import cute_iter_tools
for item in cute_iter_tools.iterate_pop(sequence):
if condition(item):
return item
def get_recurrences(sequence):
'''
Get a `dict` of all items that repeat at least twice.
The values of the dict are the numbers of repititions of each item.
'''
from combi._python_toolbox import nifty_collections
return dict(
(item, n_recurrences) for item, n_recurrences in
nifty_collections.Bag(sequence).most_common() if n_recurrences >= 2
)
def ensure_iterable_is_immutable_sequence(iterable, default_type=tuple,
unallowed_types=(),
allow_unordered=True):
'''
Return a version of `iterable` that is an immutable sequence.
If `iterable` is already an immutable sequence, it returns it as is;
otherwise, it makes it into a `tuple`, or into any other data type
specified in `default_type`.
'''
from combi._python_toolbox import nifty_collections
assert isinstance(iterable, collections.Iterable)
if not allow_unordered and \
isinstance(iterable, nifty_collections.DefinitelyUnordered):
raise UnorderedIterableException
if isinstance(iterable, collections.MutableSequence) or \
isinstance(iterable, unallowed_types) or \
not isinstance(iterable, collections.Sequence):
return default_type(iterable)
else:
return iterable
def ensure_iterable_is_sequence(iterable, default_type=tuple,
unallowed_types=(bytes,),
allow_unordered=True):
'''
Return a version of `iterable` that is a sequence.
If `iterable` is already a sequence, it returns it as is; otherwise, it
makes it into a `tuple`, or into any other data type specified in
`default_type`.
'''
assert isinstance(iterable, collections.Iterable)
if not allow_unordered and isinstance(iterable, (set, frozenset)):
raise UnorderedIterableException
if isinstance(iterable, collections.Sequence) and \
not isinstance(iterable, unallowed_types):
return iterable
else:
return default_type(iterable)
class CuteSequenceMixin(misc_tools.AlternativeLengthMixin):
'''A sequence mixin that adds extra functionality.'''
def take_random(self):
'''Take a random item from the sequence.'''
return self[random.randint(0, get_length(self) - 1)]
def __contains__(self, item):
try: self.index(item)
except ValueError: return False
else: return True
class CuteSequence(CuteSequenceMixin, collections.Sequence):
'''A sequence type that adds extra functionality.'''
def get_length(sequence):
'''Get the length of a sequence.'''
return sequence.length if hasattr(sequence, 'length') else len(sequence)
def divide_to_slices(sequence, n_slices):
'''
Divide a sequence to slices.
Example:
>>> divide_to_slices(range(10), 3)
[range(0, 4), range(4, 7), range(7, 10)]
'''
from combi._python_toolbox import cute_iter_tools
assert isinstance(n_slices, numbers.Integral)
assert n_slices >= 1
sequence_length = get_length(sequence)
base_slice_length, remainder = divmod(sequence_length, n_slices)
indices = [0]
for i in range(n_slices):
indices.append(indices[-1] + base_slice_length + (remainder > i))
assert len(indices) == n_slices + 1
assert indices[0] == 0
assert indices[-1] == sequence_length
return [sequence[x:y] for x, y in
cute_iter_tools.iterate_overlapping_subsequences(indices)]
def is_subsequence(big_sequence, small_sequence):
'''
Check whether `small_sequence` is a subsequence of `big_sequence`.
For example:
>>> is_subsequence([1, 2, 3, 4], [2, 3])
True
>>> is_subsequence([1, 2, 3, 4], [4, 5])
False
This can be used on any kind of sequence, including tuples, lists and
strings.
'''
from combi._python_toolbox import nifty_collections
big_sequence = ensure_iterable_is_sequence(big_sequence,
allow_unordered=False)
small_sequence = ensure_iterable_is_sequence(small_sequence,
allow_unordered=False)
small_sequence_length = len(small_sequence)
last_index_that_subsequence_can_start = \
len(big_sequence) - len(small_sequence) + 1
matches = {}
for i, item in enumerate(big_sequence):
if matches:
new_matches = {}
for match_position, match_length in matches.items():
if small_sequence[match_length] == item:
new_matches[match_position] = match_length + 1
matches = new_matches
if (item == small_sequence[0]) and \
(i < last_index_that_subsequence_can_start):
matches[i] = 1
for match_position, match_length in matches.items():
if match_length == small_sequence_length:
return True
|
if __name__ == '__main__':
N = int(input('Enter N: '))
tokens = []
print('Enter (token no, id): ')
for i in range(0, N):
token = input()
token = token.lstrip('(')
token = token.rstrip(')')
token = token.replace(' ', '')
token = token.split(',')
tokens.append(token)
print('Enter k: ')
bribe = input()
for token in tokens:
if token[1] is bribe:
tokens.remove(token)
tokens.insert(0, token)
print('The order is: ')
for token in tokens:
print('(%s, %s)' % (token[0], token[1]))
|
import io
import base64
import json
import os
import requests
from PIL import Image
HOST = "http://127.0.0.1:5000"
INPUT_IMAGE_PATH = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"images/input.jpg"
)
INPUT_TXT_PATH = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"images/input.txt"
)
def post_img():
inp_img = _load_img()
res = requests.post(
HOST + "/image",
json.dumps({"image": inp_img}),
headers={"Content-Type": "application/json"},
)
print("Response: ", res.json())
def _load_img() -> str:
img_pil = Image.open(INPUT_IMAGE_PATH)
img_pil = img_pil.convert("RGB")
buff = io.BytesIO()
img_pil.save(buff, format="JPEG")
img_b64 = base64.b64encode(buff.getvalue())
img_b64 = img_b64.decode()
with open(INPUT_TXT_PATH, "w") as f:
f.write(img_b64)
return img_b64
if __name__ == "__main__":
post_img()
|
class ExponentialMovingAverage(object):
'''
权重滑动平均,对最近的数据给予更高的权重
uasge:
# 初始化
ema = EMA(model, 0.999)
# 训练过程中,更新完参数后,同步update shadow weights
def train():
optimizer.step()
ema.update(model)
# eval前,apply shadow weights;
# eval之后(保存模型后),恢复原来模型的参数
def evaluate():
ema.apply_shadow(model)
# evaluate
ema.restore(modle)
'''
def __init__(self,model, decay):
self.decay = decay
self.shadow = {}
self.backup = {}
for name, param in model.named_parameters():
if param.requires_grad:
self.shadow[name] = param.data.clone()
def update(self,model):
for name, param in model.named_parameters():
if param.requires_grad:
assert name in self.shadow
new_average = (1.0 - self.decay) * param.data + self.decay * self.shadow[name]
self.shadow[name] = new_average.clone()
def apply_shadow(self,model):
for name, param in model.named_parameters():
if param.requires_grad:
assert name in self.shadow
self.backup[name] = param.data
param.data = self.shadow[name]
def restore(self,model):
for name, param in model.named_parameters():
if param.requires_grad:
assert name in self.backup
param.data = self.backup[name]
self.backup = {}
|
import sys
import os.path
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
from WizardVsWorld.classes.draw import *
from WizardVsWorld.classes.fsm import FSM
import WizardVsWorld.phases.player_movement_phase
import WizardVsWorld.phases.player_attack_phase
import WizardVsWorld.phases.enemy_attack_phase
import WizardVsWorld.phases.enemy_movement_phase
from WizardVsWorld.classes.entity import Player
def main():
pygame.init()
pygame.display.set_caption('Wizard vs. World v1.0.3')
pygame.display.set_icon(WIZ_LARGE_PNG)
player = Player()
player.currentTile = GRID.game_map[7][0]
ENTITIES.append(player)
GRID.generate_enemies(0)
fsm = FSM()
# Declare Phases
player_movement_phase = WizardVsWorld.phases.player_movement_phase.PlayerMovementPhase(player)
player_attack_phase = WizardVsWorld.phases.player_attack_phase.PlayerAttackPhase(player, player_movement_phase)
enemy_attack_phase = WizardVsWorld.phases.enemy_attack_phase.EnemyAICombatPhase()
enemy_movement_phase = WizardVsWorld.phases.enemy_movement_phase.EnemyAIMovement()
# Add Phases
fsm.add_phase(player_movement_phase)
fsm.add_phase(player_attack_phase)
fsm.add_phase(enemy_movement_phase)
fsm.add_phase(enemy_attack_phase)
# Start the FSM
fsm.restart()
# Gameplay Loop
while True:
fsm.update()
for event in pygame.event.get():
if event.type == pygame.QUIT:
quit_game()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
import os
import gzip
import sys
# Run this scrpipt under eggNOG 5.0 per_tax_level folder,
# mirroring the following site.
# 33208_Metazoa
# http://eggnog5.embl.de/download/eggnog_5.0/per_tax_level/33208/
# 7742_Vertebrata
# http://eggnog5.embl.de/download/eggnog_5.0/per_tax_level/7742/
tax_list = {'Vertebrata': 7742, 'Metazoa': 33208}
tax_info = sys.argv[1] # either 'Vertebrata' or 'Metazoa'
if tax_info not in tax_list:
sys.stderr.write('tax_info is missing.\n')
sys.exit(1)
dirname_script = os.path.dirname(os.path.realpath(__file__))
dirname_output = 'MODtree_eggNOG50.%s.raw_alg' % tax_info
filename_out_base = 'MODtree_eggNOG50.%s' % tax_info
# Run under 33208_Metazoa with following file.
dirname_tax = '%d_%s' % (tax_list[tax_info], tax_info)
filename_members = '%d_members.tsv.gz' % tax_list[tax_info]
filename_members = os.path.join(dirname_tax, filename_members)
# Directory generated from 33208_raw_algs.tar
dirname_raw_alg = os.path.join(dirname_tax, '%d' % tax_list[tax_info])
# Make it by grep 'BLAST_UniProt_GN' e5.sequence_aliases.tsv
filename_GN = 'e5.sequence_aliases.BLAST_UniProt_GN.tsv.gz'
filename_species = os.path.join(dirname_script, 'MODtree_species.txt')
# UniProt_ID UP_taxId EN_taxId sp_code sp_name GOA_name
# UP000005640 9606 9606 HUMAN homo_sapiens 25.H_sapiens.goa
species_list = dict()
sys.stderr.write('Read %s ... ' % filename_species)
f_species = open(filename_species, 'r')
for line in f_species:
if line.startswith('#'):
continue
tokens = line.strip().split("\t")
tax_id = tokens[2]
sp_code = tokens[3]
species_list[tax_id] = sp_code
f_species.close()
sys.stderr.write('Done\n')
sys.stderr.write('Read gene names from %s...' % filename_GN)
gene_names = dict()
# 9541.XP_005587739.1 RFX2 BLAST_KEGG_NAME BLAST_UniProt_GN RefSeq_gene
f_GN = gzip.open(filename_GN, 'rt')
for line in f_GN:
tokens = line.strip().split("\t")
seq_id = tokens[0]
tmp_name = tokens[1]
# Refine the gene name
# because of DANRE name like si:ch211-151m7.6
tmp_name = tmp_name.replace(':', '_')
tmp_name = tmp_name.replace(' ', '_')
# because of CIOIN name like zf(cchc)-22
tmp_name = tmp_name.replace('(', '_').replace(')', '_')
tmp_name = tmp_name.replace('/', '_')
tax_id = seq_id.split('.')[0]
if tax_id in species_list:
gene_names[seq_id] = tmp_name
f_GN.close()
sys.stderr.write('Done.\n')
family2seq = dict()
exclude_family = dict()
f_members = open(filename_members, 'r')
if filename_members.endswith('.gz'):
f_members = gzip.open(filename_members, 'rt')
# In
# 7742 48URP 89 87 10029.XP_007630944.1,10036.XP_005064951.1,...
# Out
# E100F00001|ANOCA|NotAvail|ENSACAP00000018721 E100F00001 ENSACAP00000018721 ANOCA NotAvail
new_headers = dict()
f_out_txt = open('%s.raw.txt' % filename_out_base, 'w')
for line in f_members:
tokens = line.strip().split("\t")
family_id = tokens[1]
total_seqs = int(tokens[2])
total_species = int(tokens[3])
seq_list = []
sp_code_list = []
for tmp_id in tokens[4].split(','):
tmp_tax_id = tmp_id.split('.')[0]
tmp_prot_id = tmp_id.split('.')[1]
if tmp_tax_id in species_list:
tmp_sp_code = species_list[tmp_tax_id]
tmp_gene_name = 'NotAvail'
if tmp_id in gene_names:
tmp_gene_name = gene_names[tmp_id]
sp_code_list.append(tmp_sp_code)
seq_list.append(tmp_id)
seq_h = '%s|%s|%s|%s' % (family_id, tmp_sp_code,\
tmp_gene_name, tmp_prot_id)
new_headers[tmp_id] = seq_h
f_out_txt.write('%s\t%s\t%s\t%s\t%s\n' %
(seq_h, family_id, tmp_prot_id,
tmp_sp_code, tmp_gene_name))
count_seqs = len(seq_list)
if count_seqs == 0:
continue
if count_seqs > 1:
family2seq[family_id] = seq_list
f_members.close()
sys.stderr.write('Processed members.tsv.\n')
f_out_fa = open('%s.raw.fa' % filename_out_base, 'w')
for tmp_family_id, tmp_seq_list in family2seq.items():
tmp_filename_fa = os.path.join(dirname_raw_alg,
'%s.raw_alg.faa.gz' % tmp_family_id)
tmp_seq_list = dict()
f_fa = gzip.open(tmp_filename_fa, 'rt')
for line in f_fa:
if line.startswith('>'):
tmp_h = line.strip().lstrip('>')
tmp_seq_list[tmp_h] = []
else:
tmp_seq_list[tmp_h].append(line.strip())
f_fa.close()
for tmp_seq_id in family2seq[tmp_family_id]:
if tmp_seq_id not in tmp_seq_list:
sys.stderr.write('%s has no sequences. (%s)\n' %
(tmp_seq_id, tmp_filename_fa))
continue
tmp_new_h = new_headers[tmp_seq_id]
tmp_seq = ''.join(tmp_seq_list[tmp_seq_id])
tmp_new_seq = tmp_seq.replace('-', '')
f_out_fa.write('>%s\n%s\n' % (tmp_new_h, tmp_seq))
f_out_fa.close()
|
from jsonschema import validate
from jsonschema.exceptions import ValidationError
from jsonschema.exceptions import SchemaError
updateshift_schema = {
"type": "object",
"properties": {
"id": {
"type": "integer"
},
"name": {
"type": "string"
},
"start_time": {
"type": "string"
},
"end_time": {
"type": "string"
},
"job_type": {
"type": "string"
},
"difficulty": {
"type": "integer"
},
"date": {
"type": "string"
},
"amount": {
"type": "integer"
},
"day_part": {
"type": "array"
},
"employees": {
"type": "array"
},
"note": {
"type": "string"
}
},
"required": ["id", "name", "start_time", "end_time", "job_type","difficulty", "date", "amount", "day_part"],
"additionalProperties": False
}
def validate_updateshift(data):
try:
validate(data, updateshift_schema)
except ValidationError as e:
return {"ok": False, "msg": e}
except SchemaError as e:
return {"ok": False, "msg": e}
return {"ok": True, 'data': data}
|
'''
'''
def openAsAscii(path):
if path == "": path = "cipher.txt"
file = open(path,"rb")
data = file.read()
udata = data.decode("utf-8")
bytearray = udata.encode("ascii",errors="ignore")
bytearray = bytearray.replace(bytes([ord("\n")]),bytes([ord(" ")]))
return bytearray
|
import sys
import os
from subprocess import Popen, PIPE, STDOUT
RED = '\033[91m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
LIGHT_PURPLE = '\033[94m'
PURPLE = '\033[95m'
END = '\033[0m'
if __name__ == '__main__':
try:
src_dir = sys.argv[1]
new_dir = sys.argv[2]
except IndexError:
print "Usage: python do_compare.py src_dir new_idr"
sys.exit(0)
diff_dir = 'changes'
for root, dirs, files in os.walk('.'):
if src_dir in root:
for f in files:
if f.endswith('png'):
print "%(purple)sCompare%(end)s %(red)s%(file_name)s%(end)s\n" % {'purple': PURPLE, 'end': END, 'file_name': f, 'red': RED}
src_f = "%s/%s" % (src_dir, f)
new_f = "%s/%s" % (new_dir, f)
diff_f = "%s/%s" % (diff_dir, f)
# print src_f, new_f, diff_f
proccess = Popen(["compare", "-extract", "1024x768", "-verbose", "-metric", "PAE", src_f, new_f, diff_f], stdout=PIPE, stderr=STDOUT, close_fds=True)
stdoutdata, stderrdata = proccess.communicate()
print stdoutdata
proccess.stdout.close()
print "----------------------------\n"
|
from IPython.core.error import UsageError
from adlmagics.magics.adls.adls_folders_listing_magic import AdlsFoldersListingMagic
from adlmagics.exceptions import MagicArgumentError
from adlmagics.session_consts import session_adls_account, session_null_value
from adlmagics.models.adls_folder import AdlsFolder
from adlmagics.test.adls_magic_test_base import AdlsMagicTestBase
class AdlsFoldersListingMagicTest(AdlsMagicTestBase):
adls_account = "mock_adls_account"
folder_path = "test"
def test_execute_with_correct_arg_string(self):
arg_string = "--account %s --folder_path %s" % (AdlsFoldersListingMagicTest.adls_account, AdlsFoldersListingMagicTest.folder_path)
folders = self.__magic.execute(arg_string, None)
self.__validate(AdlsFoldersListingMagicTest.folder_path, folders)
arg_string = "--account %s" % (AdlsFoldersListingMagicTest.adls_account)
folders = self.__magic.execute(arg_string, None)
self.__validate("", folders)
def test_execute_with_incorrect_arg_string(self):
arg_string = "--account_1 %s --folder_path %s" % (AdlsFoldersListingMagicTest.adls_account, AdlsFoldersListingMagicTest.folder_path)
self.assertRaises(UsageError, self.__magic.execute, arg_string, None)
arg_string = "--account %s --folder_path_1 %s" % (AdlsFoldersListingMagicTest.adls_account, AdlsFoldersListingMagicTest.folder_path)
self.assertRaises(UsageError, self.__magic.execute, arg_string, None)
def test_execute_with_missing_account(self):
# account missing in arg_string, but not in session
arg_string = "--folder_path %s" % (AdlsFoldersListingMagicTest.folder_path)
self._session_service.set_session_item(session_adls_account.name, AdlsFoldersListingMagicTest.adls_account)
folders = self.__magic.execute(arg_string, None)
self.__validate(AdlsFoldersListingMagicTest.folder_path, folders)
# account missing in both arg_string and session
arg_string = "--folder_path %s" % (AdlsFoldersListingMagicTest.folder_path)
self._session_service.set_session_item(session_adls_account.name, session_null_value)
self.assertRaises(MagicArgumentError, self.__magic.execute, arg_string, None)
def setUp(self):
super(AdlsFoldersListingMagicTest, self).setUp()
self.__magic = AdlsFoldersListingMagic(self._session_service, self._presenter_factory, self._result_converter, self._adls_service)
def tearDown(self):
self.__magic = None
super(AdlsFoldersListingMagicTest, self).tearDown()
def __validate(self, folder_path, folders):
# Verify that the magic actually returns something
self.assertIsNotNone(folders)
self.assertEquals([
"Listing azure data lake store folders under folder '%s' of account '%s'..." % (folder_path, AdlsFoldersListingMagicTest.adls_account),
"(%d) azure data lake store folder(s) listed." % (len(folders)),
"A list of %s" % (AdlsFolder.__name__)], self._presenter_factory.presented_logs)
self._presenter_factory.clear()
|
from pytorchtools.pytorchtools import EarlyStopping, save_model
|
from elasticsearch.helpers import bulk
from msg_archive import es_client, get_data
import utils
crs = utils.get_cursor()
sql = "select max(imsg) as highmsg from webdata.archive"
crs.execute(sql)
rec = crs.fetchone()
curr = rec["highmsg"]
with open(".highmessage") as ff:
last = int(ff.read().strip())
if curr > last:
success, failures = bulk(es_client, get_data(currmsg=last, verbose=True))
# num = 0
# data_gen = msg_archive.get_data(currmsg=last)
# for data in data_gen:
# num += 1
# vals = data["_source"]
# res = msg_archive.es_client.index(index="email", doc_type="mail",
# id=vals["id"], body=vals)
# if num % 100 == 0:
# print("Imported msg#", num)
# with open(".highmessage", "w") as ff:
# ff.write("%s" % curr)
|
cur_dir = ""
domain = ""
user_sid = ""
user_path = ""
system_root = ""
base_user_dir = ""
user_appdata_dir = ""
user_local_appdata_dir =""
user_roaming_appdata_dir = ""
user_temp_dir = ""
target = ""
elevated_username = ""
user_target = ""
admin_priv = ""
build = ""
execution_label = ""
artifact_file=""
execution_log=""
error_log=""
yaml_data_folder = ""
win_dir = ""
elevated_password = ""
|
from .connection import Connection
|
class RebarShapeMultiplanarDefinition(object, IDisposable):
"""
A specification for a simple 3D rebar shape.
RebarShapeMultiplanarDefinition(outOfPlaneBendDiameter: float)
"""
def Dispose(self):
""" Dispose(self: RebarShapeMultiplanarDefinition) """
pass
def ReleaseUnmanagedResources(self, *args):
""" ReleaseUnmanagedResources(self: RebarShapeMultiplanarDefinition,disposing: bool) """
pass
def SetPresenceOfSegments(
self, isDuplicateShapePresent, isStartConnectorPresent, isEndConnectorPresent
):
"""
SetPresenceOfSegments(self: RebarShapeMultiplanarDefinition,isDuplicateShapePresent: bool,isStartConnectorPresent: bool,isEndConnectorPresent: bool)
Simultaneously set the presence of all 3D segments.
"""
pass
def __enter__(self, *args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self, outOfPlaneBendDiameter):
""" __new__(cls: type,outOfPlaneBendDiameter: float) """
pass
def __repr__(self, *args):
""" __repr__(self: object) -> str """
pass
DepthParamId = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Id of the parameter driving the multiplanar depth.
The depth is measured center-to-center of the bar.
A valid shape parameter must be assigned to DepthParamId before
the MultiplanarDefinition can be used in RebarShape creation.
Get: DepthParamId(self: RebarShapeMultiplanarDefinition) -> ElementId
Set: DepthParamId(self: RebarShapeMultiplanarDefinition)=value
"""
IsDuplicateShapePresent = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Whether the shape definition includes an offset
copy of the 2D shape.
Get: IsDuplicateShapePresent(self: RebarShapeMultiplanarDefinition) -> bool
"""
IsEndConnectorPresent = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Whether a perpendicular segment is constructed
from the end of the 2D shape.
Get: IsEndConnectorPresent(self: RebarShapeMultiplanarDefinition) -> bool
"""
IsStartConnectorPresent = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Whether a perpendicular segment is constructed
from the start of the 2D shape.
Get: IsStartConnectorPresent(self: RebarShapeMultiplanarDefinition) -> bool
"""
IsValidObject = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Specifies whether the .NET object represents a valid Revit entity.
Get: IsValidObject(self: RebarShapeMultiplanarDefinition) -> bool
"""
OutOfPlaneBendDiameter = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Bend diameter to be applied to the connector segments.
Get: OutOfPlaneBendDiameter(self: RebarShapeMultiplanarDefinition) -> float
Set: OutOfPlaneBendDiameter(self: RebarShapeMultiplanarDefinition)=value
"""
|
#
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from bigdl.orca.data.image.imagenet_dataset import *
from bigdl.orca.data.image.parquet_dataset import _check_arguments
def write_imagenet(imagenet_path: str,
output_path: str, **kwargs):
"""
Write ImageNet data to TFRecords file format. The train and validation data will be
converted into 1024 and 128 TFRecord files, respectively. Each train TFRecord file
contains ~1250 records. Each validation TFRecord file contains ~390 records.
Each record within the TFRecord file is a serialized Example proto. The Example proto
contains the following fields:
image/height: integer, image height in pixels
image/width: integer, image width in pixels
image/colorspace: string, specifying the colorspace, always 'RGB'
image/channels: integer, specifying the number of channels, always 3
image/class/label: integer, identifier for the ground truth for the network
image/class/synset: string, unique WordNet ID specifying the label, e.g., 'n02323233'
image/format: string, specifying the format, always'JPEG'
image/filename: string, path to an image file, e.g., '/path/to/example.JPG'
image/encoded: string containing JPEG encoded image in RGB colorspace
Args:
imagenet_path: ImageNet raw data path. Download raw ImageNet data from
http://image-net.org/download-images
e.g, if you use ImageNet 2012, please extract ILSVRC2012_img_train.tar and
ILSVRC2012_img_val.tar. Download the validation image labels file
https://github.com/tensorflow/models/blob/master/research/slim/datasets/
imagenet_2012_validation_synset_labels.txt and rename as synset_labels.txt
provide imagenet path in this format:
- Training images: train/n03062245/n03062245_4620.JPEG
- Validation Images: validation/ILSVRC2012_val_00000001.JPEG
- Validation Labels: synset_labels.txt
output_path: Output data directory
"""
if not imagenet_path:
raise AssertionError('ImageNet data path should not be empty. Please download '
'from http://image-net.org/download-images and extract .tar '
'and provide raw data directory path')
return convert_imagenet_to_tf_records(imagenet_path, output_path, **kwargs)
def read_imagenet(path: str,
is_training: bool):
"""
Convert ImageNet TFRecords files to tf.data.Dataset
Args:
data_dir: ImageNet TFRecords data path. It supports local path or hdfs path. If you use
hdfs path, please make sure set environment variables LD_LIBRARY_PATH within PATH.
- Training images: train/train-00000-of-01024
- Validation Images: validation/validation-00000-of-00128
is_training: True or False. train dataset or val dataset
"""
import tensorflow as tf
filenames = get_filenames(is_training, path)
dataset = tf.data.Dataset.from_tensor_slices(filenames)
# Convert to individual records
dataset = dataset.interleave(tf.data.TFRecordDataset,
num_parallel_calls=tf.data.experimental.AUTOTUNE)
return dataset
def get_filenames(is_training, data_dir):
"""Return filenames for dataset."""
_NUM_IMAGENET_TRAIN_FILES = 1024
_NUM_IMAGENET_VAL_FILES = 128
if is_training:
return [
os.path.join(data_dir, 'train-%05d-of-01024' % i)
for i in range(_NUM_IMAGENET_TRAIN_FILES)]
else:
return [
os.path.join(data_dir, 'validation-%05d-of-00128' % i)
for i in range(_NUM_IMAGENET_VAL_FILES)]
def write_tfrecord(format, output_path, *args, **kwargs):
"""
Convert input dataset to TFRecords
Args:
format: String. Support "imagenet" format.
output_path: String. output path.
"""
supported_format = {"imagenet"}
if format not in supported_format:
raise ValueError(format + " is not supported, should be 'imagenet'. ")
format_to_function = {"imagenet": (write_imagenet, ["imagenet_path"])}
func, required_args = format_to_function[format]
_check_arguments(format, kwargs, required_args)
func(output_path=output_path, *args, **kwargs)
def read_tfrecord(format, path, *args, **kwargs):
"""
Read TFRecords files
Args:
format: String. Support "imagenet" format.
path: String. TFRecords files path.
"""
supported_format = {"imagenet"}
if format not in supported_format:
raise ValueError(format + " is not supported, should be 'imagenet'. ")
format_to_function = {"imagenet": (read_imagenet, ["is_training"])}
func, required_args = format_to_function[format]
_check_arguments(format, kwargs, required_args)
return func(path=path, *args, **kwargs)
|
#!/usr/bin/env python3
import unittest
from hammingclasses import HammingEncoder, HammingChecker, random_word
from basictests import corrupt_one_bit, corrupt_two_bits
# ---- Unit test class --- #
class TestStringMethods(unittest.TestCase):
def setUp(self):
"""Sets up Hamming encoders and checkers for parameters 7 and 8"""
# for r = 7
self.encoder7 = HammingEncoder(7)
self.checker7 = HammingChecker(7)
# for r = 8
self.encoder8 = HammingEncoder(8)
self.checker8 = HammingChecker(8)
# set to print string of any length
self.maxDiff = None
# ---- Verifies that tests correctly identify uncorrupted codewords ---- #
def test_no_corruption_7(self):
# r = 7, n = 120
word = random_word(120)
codeword = self.encoder7.encode(word)
self.assertEqual(codeword, self.checker7.correct(codeword))
def test_no_corruption_8(self):
# r = 8, n = 247
word = random_word(247)
codeword = self.encoder8.encode(word)
self.assertEqual(codeword, self.checker8.correct(codeword))
# ---- Verifies that one corrupted bit can be successfully corrected ---- #
def test_corrupt_one_bit_7(self):
# r = 7, n = 120
word = random_word(120)
codeword = self.encoder7.encode(word)
corrupted = corrupt_one_bit(codeword)
self.assertEqual(codeword, self.checker7.correct(corrupted))
def test_corrupt_one_bit_8(self):
# r = 8, n = 247
word = random_word(247)
codeword = self.encoder8.encode(word)
corrupted = corrupt_one_bit(codeword)
self.assertEqual(codeword, self.checker8.correct(corrupted))
# ---- Verifies that correction fails when two bits are corrupted ---- #
def test_corrupt_two_bits_7(self):
# r = 7, n = 120
word = random_word(120)
codeword = self.encoder7.encode(word)
corrupted = corrupt_two_bits(codeword)
self.assertNotEqual(codeword, self.checker7.correct(corrupted))
def test_corrupt_two_bits_8(self):
# r = 8, n = 247
word = random_word(247)
codeword = self.encoder8.encode(word)
corrupted = corrupt_two_bits(codeword)
self.assertNotEqual(codeword, self.checker8.correct(corrupted))
if __name__ == '__main__':
unittest.main()
|
from django.db import models
# Create your models here.
class Messages(models.Model):
from_person = models.TextField()
to_person = models.TextField()
read = models.TextField()
title = models.TextField()
body = models.TextField()
time = models.TextField()
def __repr__(self):
return "Success"
|
from chronometry.date import get_today
import re
RESOLUTION_MARKERS0 = [
'2160p', '1440p', '1080p', '720p', '480p', '360p', '240p',
'4k', '8k'
]
RESOLUTION_MARKERS = RESOLUTION_MARKERS0 + [f'[{x}]' for x in RESOLUTION_MARKERS0]
def is_resolution(string):
return string.lower() in RESOLUTION_MARKERS
def is_year(string, year_min, year_max):
string = str(string)
if re.search(r'^\(\d{4}\)$', string):
return year_min <= int(string.strip('()')) <= year_max
elif re.search(r'^\d{4}$', string):
return year_min <= int(string) <= year_max
else:
return False
def get_title_year_and_resolution(name, sep=None, year_min=1950):
year_max = get_today().year
title_and_year_part = []
resolution_part = []
title_part = True
name = name.replace('.', ' ')
if sep is None:
if len(name.split(' ')) > len(name.split('.')):
sep = ' '
else:
sep = '.'
for x in name.split(sep):
if is_resolution(x):
title_part = False
if title_part:
title_and_year_part.append(x)
elif is_resolution(x):
resolution_part.append(x.lower().strip('[]'))
title_part_reversed = []
year = None
for x in reversed(title_and_year_part):
if year is None:
if is_year(string=x, year_min=year_min, year_max=year_max):
year = int(x.strip('()'))
else:
title_part_reversed.append(x)
title_part = reversed(title_part_reversed)
return {
'title': ' '.join(title_part),
'year': year,
'resolution': ' '.join(resolution_part)
}
|
import io
ASCII = b'.' * 32 + bytes(range(32, 127)) + b'.' * 129
def dump(data, n=16, base=0):
data = bytes(data)
res = io.StringIO()
for i in range(0, len(data), n):
row = data[i : i + n].hex() + ' ' * n
left = ' '.join(row[k : k + 2] for k in range(0, n, 2))
right = ' '.join(row[k : k + 2] for k in range(n, n * 2, 2))
text = data[i : i + 16].translate(ASCII).decode()
res.write(f'{base + i:016x} | {left} | {right} | {text}\n')
return res.getvalue()
def memprint(data, n=16, base=0, file=None):
print(dump(data, n, base), file=file)
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser('memdump')
parser.add_argument('--address', type=lambda x: int(x, 0), default=0)
parser.add_argument('--offset', type=lambda x: int(x, 0), default=0)
parser.add_argument('--size', type=lambda x: int(x, 0), default=None)
parser.add_argument('filename')
args = parser.parse_args()
fd = sys.stdin.buffer if args.filename == '-' else open(args.filename, 'rb')
with fd:
if args.offset:
fd.seek(args.offset)
data = fd.read(args.size)
memprint(data, base=args.address)
|
""" DQN - Test-time attacks
============ Sample usage ============
No attack, testing a DQN model of Breakout trained without parameter noise:
$> python3 enjoy-adv.py --env Breakout --model-dir \
./data/Breakout/model-100 --video ./Breakout.mp4
No attack, testing a DQN model of Breakout trained with parameter noise
(NoisyNet implementation):
$> python3 enjoy-adv.py --env Breakout --noisy \
--model-dir ./data/Breakout/model-173000 --video ./Breakout.mp4
Whitebox FGSM attack, testing a DQN model of Breakout
trained without parameter noise:
$> python3 enjoy-adv.py --env Breakout --model-dir \
./data/Breakout/model-173000 --attack fgsm --video ./Breakout.mp4
Whitebox FGSM attack, testing a DQN model of Breakout
trained with parameter noise (NoisyNet implementation):
$> python3 enjoy-adv.py --env Breakout --noisy --model-dir \
./data/Breakout/model-173000 --attack fgsm --video ./Breakout.mp4
Blackbox FGSM attack, testing a DQN model of Breakout
trained without parameter noise:
$> python3 enjoy-adv.py --env Breakout --model-dir \
./data/Breakout/model-173000 --attack fgsm --blackbox \
--model-dir2 ./data/Breakout/model-173000-2 --video ./Breakout.mp4
Blackbox FGSM attack, testing a DQN model of Breakout
trained with parameter noise (NoisyNet implementation),
replica model trained without parameter noise:
$> python3 enjoy-adv.py --env Breakout --noisy \
--model-dir ./data/Breakout/model-173000 --attack fgsm --blackbox \
--model-dir2 ./data/Breakout/model2-173000-2 --video ./Breakout.mp4
Blackbox FGSM attack, testing a DQN model of Breakout
trained with parameter noise (NoisyNet implementation),
replica model trained with parameter noise:
$> python3 enjoy-adv.py --env Breakout --noisy --model-dir \
./data/Breakout/model-173000 --attack fgsm --blackbox \
--model-dir2 ./data/Breakout/model2-173000 --noisy2 --video ./Breakout.mp4
"""
import argparse
import gym
import os
import numpy as np
from gym.monitoring import VideoRecorder
import rlattack.common.tf_util as U
from rlattack import deepq
from rlattack.common.misc_util import (
boolean_flag,
SimpleMonitor,
)
from rlattack.common.atari_wrappers_deprecated import wrap_dqn
# V: imports#
import tensorflow as tf
import cv2
from collections import deque
from model import model, dueling_model
from statistics import statistics
class DQNModel:
"""
Creating Q-graph, FGSM graph
Supports loading multiple graphs - needed for blackbox attacks
"""
def __init__(self, env, dueling, noisy, fname):
self.g = tf.Graph()
self.noisy = noisy
self.dueling = dueling
self.env = env
with self.g.as_default():
self.act = deepq.build_act_enjoy(
make_obs_ph=lambda name: U.Uint8Input(
env.observation_space.shape, name=name),
q_func=dueling_model if dueling else model,
num_actions=env.action_space.n,
noisy=noisy
)
self.saver = tf.train.Saver()
self.sess = tf.Session(graph=self.g)
if fname is not None:
print('Loading Model...')
self.saver.restore(self.sess, fname)
def get_act(self):
return self.act
def get_session(self):
return self.sess
def craft_adv(self):
with self.sess.as_default():
with self.g.as_default():
craft_adv_obs = deepq.build_adv(
make_obs_tf=lambda name: U.Uint8Input(
self.env.observation_space.shape, name=name),
q_func=dueling_model if self.dueling else model,
num_actions=self.env.action_space.n,
epsilon=1.0 / 255.0,
noisy=self.noisy,
)
return craft_adv_obs
def parse_args():
parser = argparse.ArgumentParser("Run an already learned DQN model.")
# Environment
parser.add_argument("--env", type=str, required=True,
help="name of the game")
parser.add_argument("--model-dir", type=str, default=None,
help="load model from this directory. ")
parser.add_argument("--video", type=str, default=None,
help="Path to mp4 file where the \
video of first episode will be recorded.")
boolean_flag(parser, "stochastic", default=True,
help="whether or not to use stochastic \
actions according to models eps value")
boolean_flag(parser, "dueling", default=False,
help="whether or not to use dueling model")
# V: Attack Arguments#
parser.add_argument("--model-dir2", type=str, default=None,
help="load adversarial model from \
this directory (blackbox attacks). ")
parser.add_argument("--attack", type=str, default=None,
help="Method to attack the model.")
boolean_flag(parser, "noisy", default=False,
help="whether or not to NoisyNetwork")
boolean_flag(parser, "noisy2", default=False,
help="whether or not to NoisyNetwork")
boolean_flag(parser, "blackbox", default=False,
help="whether or not to NoisyNetwork")
return parser.parse_args()
def make_env(game_name):
env = gym.make(game_name + "NoFrameskip-v4")
env = SimpleMonitor(env)
env = wrap_dqn(env)
return env
def play(env, act, craft_adv_obs, craft_adv_obs2, stochastic, video_path,
attack, m_target, m_adv):
num_episodes = 0
num_moves = 0
num_transfer = 0
video_recorder = None
video_recorder = VideoRecorder(
env, video_path, enabled=video_path is not None)
obs = env.reset()
while True:
env.unwrapped.render()
video_recorder.capture_frame()
# V: Attack #
if attack is not None:
# Craft adv. examples
with m_adv.get_session().as_default():
adv_obs = \
craft_adv_obs(np.array(obs)[None],
stochastic_adv=stochastic)[0]
with m_target.get_session().as_default():
action = act(np.array(adv_obs)[None],
stochastic=stochastic)[0]
action2 = act(np.array(obs)[None], stochastic=stochastic)[0]
num_moves += 1
if action != action2:
num_transfer += 1
else:
# Normal
action = act(np.array(obs)[None], stochastic=stochastic)[0]
obs, rew, done, info = env.step(action)
if done:
obs = env.reset()
if len(info["rewards"]) > num_episodes:
if len(info["rewards"]) == 1 and video_recorder.enabled:
# save video of first episode
print("Saved video.")
video_recorder.close()
video_recorder.enabled = False
print('Reward: ' + str(info["rewards"][-1]))
num_episodes = len(info["rewards"])
print('Episode: ' + str(num_episodes))
success = float(num_transfer / num_moves) * 100.0
print("Percentage of successful attacks: " + str(success))
num_moves = 0
num_transfer = 0
if __name__ == '__main__':
args = parse_args()
env = make_env(args.env)
g1 = tf.Graph()
g2 = tf.Graph()
with g1.as_default():
m1 = DQNModel(env, args.dueling, args.noisy,
os.path.join(args.model_dir, "saved"))
if args.blackbox:
with g2.as_default():
m2 = DQNModel(env, args.dueling, args.noisy2,
os.path.join(args.model_dir2, "saved"))
with m2.get_session().as_default():
craft_adv_obs = m2.craft_adv()
with m1.get_session().as_default():
craft_adv_obs2 = m1.craft_adv()
play(env, m1.get_act(), craft_adv_obs, craft_adv_obs2,
args.stochastic, args.video, args.attack, m1, m2)
else:
with m1.get_session().as_default():
craft_adv_obs = m1.craft_adv()
play(env, m1.get_act(), craft_adv_obs, None, args.stochastic,
args.video, args.attack, m1, m1)
|
import urllib.request, urllib.parse, urllib.error
import json
import ssl
# Ignore SSL certificate errors
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
address = input('Enter location: ')
serviceurl = "http://py4e-data.dr-chuck.net/geojson?"
url = serviceurl + urllib.parse.urlencode(
{'address': address})
print("Retrieving http: ", url)
connection = urllib.request.urlopen(url, context=ctx)
data = connection.read().decode()
print("Retrieved", len(data), "characters")
js = json.loads(data)
print("Place id ", js["results"][0]["place_id"])
|
# -*- coding: utf-8 -*-
"""
Содержит в себе информацию о доменах, запускаемых в других процессах / на других
серверах.
"""
from openre.domain.base import DomainBase
import logging
from openre.helpers import StatsMixin
from openre.layer import RemoteLayer
from copy import deepcopy
class RemoteDomainBase(DomainBase):
def __init__(self, *args, **kwargs):
super(RemoteDomainBase, self).__init__(*args, **kwargs)
self.layers = []
# domain layers config
self.layers_config = deepcopy(self.config['layers'])
def deploy_layers(self):
"""
Create layers
"""
logging.debug('Deploy domain (name: %s)', self.name)
for layer_config in self.layers_config:
layer = RemoteLayer(layer_config)
self.layers.append(layer)
layer_config['layer'] = layer
for layer_config in self.layers_config:
for connect in layer_config.get('connect', []):
connect['domain_layers'] = []
for layer in self.layers:
for connect in layer_config.get('connect', []):
if connect['name'] == layer.name:
connect['domain_layers'].append(layer)
def deploy_neurons(self):
pass
def pre_deploy_synapses(self):
pass
def deploy_synapses_async(self):
pass
def deploy_synapses(self):
pass
def post_deploy_synapses(self):
pass
def deploy_indexes(self):
pass
def deploy_device(self):
pass
def create_synapses(self):
pass
def connect_layers(self):
pass
def create_neurons(self):
pass
def connect_neurons(self, pre_address, post_address, synapse_address):
pass
def send_spikes(self):
pass
def receive_spikes(self):
pass
def register_spike(self, receiver_neuron_index):
pass
def tick(self):
pass
def register_input_layer_data(self, layer_index, data):
pass
class RemoteDomainDummy(StatsMixin):
"""
Do nothing
"""
def __init__(self, *args, **kwargs):
super(RemoteDomainDummy, self).__init__()
def __getattr__(self, name):
def api_call(*args, **kwargs):
pass
return api_call
def test_remote_domain():
from openre import OpenRE
from openre.domain import create_domain_factory, Domain
class RemoteDomainTest(StatsMixin):
"""
Тестовый прокси к удаленному домену.
"""
def __init__(self, config, net, domain_index):
super(RemoteDomainTest, self).__init__()
self.config = config
self.net = net
self.name = self.config['name']
logging.debug('Create remote domain (name: %s)', config['name'])
self.index = domain_index
def __setattr__(self, name, value):
super(RemoteDomainTest, self).__setattr__(name, value)
def __getattr__(self, name):
def api_call(*args, **kwargs):
self.stat_inc(name)
return
return api_call
config = {
'layers': [
{
'name': 'V1',
'threshold': 30000,
'relaxation': 1000,
'width': 30,
'height': 30,
'connect': [
{
'name': 'V2',
'radius': 3,
},
],
},
{
'name': 'V2',
'threshold': 30000,
'width': 10,
'height': 10,
'connect': [
{
'name': 'V2',
'radius': 3,
},
],
},
],
'domains': [
{
'name' : 'D1',
'layers' : [
{'name': 'V1'},
],
},
{
'name' : 'D2',
'layers' : [
{'name': 'V2'},
],
},
],
}
ore = OpenRE(config)
ore.deploy(create_domain_factory(Domain, RemoteDomainTest, ['D1']))
local = ore.domains[0]
remote = ore.domains[1]
assert local.name == 'D1'
assert local.index == 0
assert isinstance(local, Domain)
assert remote.name == 'D2'
assert remote.index == 1
assert isinstance(remote, RemoteDomainTest)
assert remote.stat('send_synapse') == 17424
assert remote.stat('deploy_layers') == 1
assert remote.stat('deploy_neurons') == 1
assert remote.stat('pre_deploy_synapses') == 1
assert remote.stat('deploy_indexes') == 1
assert remote.stat('deploy_device') == 1
config = {
'layers': [
{
'name': 'V1',
'threshold': 30000,
'relaxation': 1000,
'width': 2,
'height': 2,
'connect': [
{
'name': 'V2',
'radius': 2,
},
],
},
{
'name': 'V2',
'threshold': 30000,
'width': 2,
'height': 2,
},
],
'domains': [
{
'name' : 'D1',
'layers' : [
{'name': 'V1'},
],
},
{
'name' : 'D2',
'layers' : [
{'name': 'V2'},
],
},
],
}
ore = OpenRE(config)
ore.deploy(create_domain_factory(Domain, RemoteDomainTest, ['D1']))
local = ore.domains[0]
remote = ore.domains[1]
# 4 neurons in V1 connects to 4 neurons in V2 with radius 2
assert remote.stat('send_synapse') == 4*4
dummy = RemoteDomainDummy({'name':'D'}, None, 0)
assert dummy.deploy_layers() is None
|
"""AyudaEnPython: https://www.facebook.com/groups/ayudapython
Dadas N placas de automóvil, obtener la cantidad de placas que
contienen una secuencia determinada SEC de caracteres. Construya
el algoritmo.
Ejemplo:
Si N = 4 Placas: 649XGT, 1365SDG, 6789ERT, 1267SDG SEC = "SDG"
Entonces: Cantidad de placas = 2
"""
from typing import List
def contar_placas(placas: List[str], secuencia: str) -> int:
"""
>>> placas = ["649XGT", "1365SDG", "6789ERT", "1267SDG"]
>>> contar_placas(placas, "SDG")
2
"""
return len([placa for placa in placas if secuencia in placa])
def main():
n = int(input("Número de placas: "))
placas = [input(f"Placa N° {i}: ") for i in range(1, n + 1)]
secuencia = input("Secuencia: ")
print(f"Cantidad de placas: {contar_placas(placas, secuencia)}")
if __name__ == "__main__":
# import doctest
# doctest.testmod()
main()
|
#!/usr/bin/env python3
# Copyright (c) 2020 The Bitcoin Unlimited developers
import asyncio
from test_framework.util import assert_equal
from test_framework.test_framework import BitcoinTestFramework
from test_framework.loginit import logging
from test_framework.electrumutil import bitcoind_electrum_args, \
ElectrumConnection
def versiontuple(v):
v = tuple(map(int, (v.split("."))))
if len(v) == 2:
v = v + (0,)
return v
class ElectrumBasicTests(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [bitcoind_electrum_args()]
def run_test(self):
n = self.nodes[0]
# Bump out of IBD
n.generate(1)
async def async_tests():
electrum_client = ElectrumConnection()
await electrum_client.connect()
res = await electrum_client.call("server.features")
# Keys that the server MUST support
assert_equal(n.getblockhash(0), res['genesis_hash'])
assert_equal("sha256", res['hash_function'])
assert(versiontuple(res['protocol_min']) >= versiontuple("1.4"))
assert(versiontuple(res['protocol_max']) >= versiontuple("1.4"))
assert(len(res['server_version']))
loop = asyncio.get_event_loop()
loop.run_until_complete(async_tests())
if __name__ == '__main__':
ElectrumBasicTests().main()
|
# -*- coding: utf8 -*-
import codecs
import os
import sys
try:
from setuptools import setup
except:
from distutils.core import setup
"""
打包的用的setup必须引入,
"""
def read(fname):
return codecs.open(os.path.join(os.path.dirname(__file__), fname)).read()
NAME = "openastro"
"""
名字,一般放你包的名字即可
"""
PACKAGES = ["openastro"]
DESCRIPTION = "An python package (mostly a wrapper) for xtelescope.net," \
" an open observatory project which started in Xinjiang, China."
LONG_DESCRIPTION = ""
"""
参见read方法说明
"""
KEYWORDS = ("astropy", "openastro", "astronomy",)
"""
关于当前包的一些关键字,方便PyPI进行分类。
"""
AUTHOR = "Wen Gu"
AUTHOR_EMAIL = "emptyset110@gmail.com"
URL = "http://www.xtelescope.net"
VERSION = "0.2"
LICENSE = "Apache Software License"
setup(
name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Operating System :: OS Independent'
],
install_requires=[
"astropy"
],
entry_points='''
[console_scripts]
''',
keywords=KEYWORDS,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
license=LICENSE,
packages=PACKAGES,
include_package_data=True,
zip_safe=True
)
|
# Copyright (c) 2018 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module implements keywords to manipulate NSH-SFC data structures using
Honeycomb REST API."""
from resources.libraries.python.HTTPRequest import HTTPCodes
from resources.libraries.python.honeycomb.HoneycombSetup import HoneycombError
from resources.libraries.python.honeycomb.HoneycombUtil \
import HoneycombUtil as HcUtil
from resources.libraries.python.honeycomb.HoneycombUtil \
import DataRepresentation
class NSHKeywords(object):
"""Implementation of keywords which make it possible to:
- add and remove NSH entries,
- get operational data about NSH entries,
- add and remove NSH maps,
- get operational data about NSH maps.
"""
def __init__(self):
pass
@staticmethod
def _set_nsh_properties(node, path, data=None):
"""Set NSH properties and check the return code.
:param node: Honeycomb node.
:param path: Path which is added to the base path to identify the data.
:param data: The new data to be set. If None, the item will be removed.
:type node: dict
:type path: str
:type data: dict
:returns: Content of response.
:rtype: bytearray
:raises HoneycombError: If the status code in response to PUT is not
OK or ACCEPTED.
"""
if data:
status_code, resp = HcUtil. \
put_honeycomb_data(node, "config_nsh", data, path,
data_representation=DataRepresentation.JSON)
else:
status_code, resp = HcUtil. \
delete_honeycomb_data(node, "config_nsh", path)
if status_code not in (HTTPCodes.OK, HTTPCodes.ACCEPTED):
raise HoneycombError(
"The configuration of NSH-SFC was not successful. "
"Status code: {0}.".format(status_code))
return resp
@staticmethod
def add_nsh_entry(node, name, data):
"""Add an NSH entry to the list of entries. The keyword does
not validate given data.
:param node: Honeycomb node.
:param name: Name for the NSH entry.
:param data: Settings for the new entry.
:type node: dict
:type name: str
:type data: dict
:returns: Content of response.
:rtype: bytearray
"""
path = "/nsh-entries/nsh-entry/{0}".format(name)
return NSHKeywords._set_nsh_properties(node, path, data)
@staticmethod
def add_nsh_map(node, name, data):
"""Add an NSH map to the list of maps. The keyword does
not validate given data.
:param node: Honeycomb node.
:param name: Name for the NSH map.
:param data: Settings for the new map.
:type node: dict
:type name: str
:type data: dict
:returns: Content of response.
:rtype: bytearray
"""
path = "/nsh-maps/nsh-map/{0}".format(name)
return NSHKeywords._set_nsh_properties(node, path, data)
@staticmethod
def remove_nsh_entry(node, name):
"""Remove an NSH entry from the list of entries.
:param node: Honeycomb node.
:param name: Name of the NSH entry.
:type node: dict
:type name: str
:returns: Content of response.
:rtype: bytearray
"""
path = "/nsh-entries/nsh-entry/{0}".format(name)
return NSHKeywords._set_nsh_properties(node, path)
@staticmethod
def remove_nsh_map(node, name):
"""Remove an NSH map from the list of maps.
:param node: Honeycomb node.
:param name: Name of the NSH map.
:type node: dict
:type name: str
:returns: Content of response.
:rtype: bytearray
"""
path = "/nsh-maps/nsh-map/{0}".format(name)
return NSHKeywords._set_nsh_properties(node, path)
@staticmethod
def get_nsh_oper_data(node, entry_name=None, map_name=None):
"""Get all NSH operational data present on the node. Optionally
filter out data for a specific entry or map.
:param node: Honeycomb node.
:param entry_name: Name of a specific NSH entry. Optional.
:param map_name: Name of a specific NSH map. Optional. Do not use
together with entry_name.
:type node: dict
:type entry_name: str
:type map_name: str
:returns: List of classify tables.
:rtype: list
"""
if entry_name:
path = "/nsh-entries/nsh-entry/{0}".format(entry_name)
elif map_name:
path = "/nsh-maps/nsh-map/{0}".format(map_name)
else:
path = ''
status_code, resp = HcUtil. \
get_honeycomb_data(node, "oper_nsh", path)
if status_code != HTTPCodes.OK:
raise HoneycombError(
"Not possible to get operational information about the "
"classify tables. Status code: {0}.".format(status_code))
return resp
@staticmethod
def clear_nsh_settings(node):
"""Remove the entire NSH container with all of its entries and maps.
:param node: Honeycomb node.
:type node: dict
:returns: Content of response.
:rtype: bytearray
"""
return NSHKeywords._set_nsh_properties(node, '')
|
"""
This script adds MQTT discovery support for Shellies devices.
"""
ATTR_MANUFACTURER = "Allterco Robotics"
ATTR_SHELLY = "Shelly"
ATTR_MODEL_SHELLY1 = "Shelly 1"
ATTR_MODEL_SHELLY1PM = "Shelly 1PM"
ATTR_MODEL_SHELLY2 = "Shelly 2"
ATTR_MODEL_SHELLY25 = "Shelly 2.5"
ATTR_MODEL_SHELLY3EM = "Shelly 3EM"
ATTR_MODEL_SHELLY4PRO = "Shelly 4Pro"
ATTR_MODEL_SHELLYAIR = "Shelly Air"
ATTR_MODEL_SHELLYBULB = "Shelly Bulb"
ATTR_MODEL_SHELLYBUTTON1 = "Shelly Button1"
ATTR_MODEL_SHELLYDIMMER = "Shelly Dimmer"
ATTR_MODEL_SHELLYDUO = "Shelly DUO"
ATTR_MODEL_SHELLYDW = "Shelly Door/Window"
ATTR_MODEL_SHELLYEM = "Shelly EM"
ATTR_MODEL_SHELLYFLOOD = "Shelly Flood"
ATTR_MODEL_SHELLYGAS = "Shelly Gas"
ATTR_MODEL_SHELLYHT = "Shelly H&T"
ATTR_MODEL_SHELLYI3 = "Shelly i3"
ATTR_MODEL_SHELLYPLUG = "Shelly Plug"
ATTR_MODEL_SHELLYPLUG_S = "Shelly Plug S"
ATTR_MODEL_SHELLYRGBW2 = "Shelly RGBW2"
ATTR_MODEL_SHELLYSENSE = "Shelly Sense"
ATTR_MODEL_SHELLYSMOKE = "Shelly Smoke"
ATTR_MODEL_SHELLYVINTAGE = "Shelly Vintage"
ATTR_BATTERY = "battery"
ATTR_CHARGER = "charger"
ATTR_COLOR_0_STATUS = "color/0/status"
ATTR_CONCENTRATION = "concentration"
ATTR_CURRENT = "current"
ATTR_ENERGY = "energy"
ATTR_EXT_TEMPERATURE = "ext_temperature"
ATTR_FAN = "fan"
ATTR_FLOOD = "flood"
ATTR_GAS = "gas"
ATTR_HEAT = "heat"
ATTR_HUMIDITY = "humidity"
ATTR_ILLUMINANCE = "illuminance"
ATTR_INPUT = "input"
ATTR_INPUT_0 = "input/0"
ATTR_INPUT_1 = "input/1"
ATTR_INPUT_2 = "input/2"
ATTR_LIGHT = "light"
ATTR_LOADERROR = "loaderror"
ATTR_LONGPUSH = "longpush"
ATTR_LONGPUSH_0 = "longpush/0"
ATTR_LONGPUSH_1 = "longpush/1"
ATTR_LUX = "lux"
ATTR_MOISTURE = "moisture"
ATTR_MOTION = "motion"
ATTR_OPENING = "opening"
ATTR_OPERATION = "operation"
ATTR_OVERLOAD = "overload"
ATTR_OVERPOWER = "overpower"
ATTR_OVERTEMPERATURE = "overtemperature"
ATTR_POWER = "power"
ATTR_POWER_FACTOR = "pf"
ATTR_PROBLEM = "problem"
ATTR_REACTIVE_POWER = "reactive_power"
ATTR_RELAY = "relay"
ATTR_RETURNED_ENERGY = "returned_energy"
ATTR_RGBW = "rgbw"
ATTR_ROLLER = "roller"
ATTR_SELF_TEST = "self_test"
ATTR_DOUBLE_SHORTPUSH = "double shortpush"
ATTR_TRIPLE_SHORTPUSH = "triple shortpush"
ATTR_SHORTPUSH = "shortpush"
ATTR_SHORTPUSH_0 = "shortpush/0"
ATTR_SHORTPUSH_1 = "shortpush/1"
ATTR_SMOKE = "smoke"
ATTR_SWITCH = "switch"
ATTR_TEMPERATURE = "temperature"
ATTR_TILT = "tilt"
ATTR_TOTAL = "total"
ATTR_TOTAL_RETURNED = "total_returned"
ATTR_TOTALWORKTIME = "totalworktime"
ATTR_VIBRATION = "vibration"
ATTR_VOLTAGE = "voltage"
ATTR_WHITE = "white"
ATTR_POWER_AC = "ac"
CONF_DEVELOP = "develop"
CONF_DISCOVERY_PREFIX = "discovery_prefix"
CONF_FORCE_UPDATE_SENSORS = "force_update_sensors"
CONF_FRIENDLY_NAME = "friendly_name"
CONF_FW_VER = "fw_ver"
CONF_ID = "id"
CONF_IGNORED_DEVICES = "ignored_devices"
CONF_MAC = "mac"
CONF_MODE = "mode"
CONF_POWERED = "powered"
CONF_PUSH_OFF_DELAY = "push_off_delay"
CONF_QOS = "qos"
DEFAULT_DISC_PREFIX = "homeassistant"
KEY_AVAILABILITY_TOPIC = "avty_t"
KEY_COMMAND_TOPIC = "cmd_t"
KEY_DEVICE = "dev"
KEY_DEVICE_CLASS = "dev_cla"
KEY_EXPIRE_AFTER = "exp_aft"
KEY_FORCE_UPDATE = "frc_upd"
KEY_IDENTIFIERS = "ids"
KEY_MANUFACTURER = "mf"
KEY_MODEL = "mdl"
KEY_NAME = "name"
KEY_OFF_DELAY = "off_delay"
KEY_OPTIMISTIC = "opt"
KEY_PAYLOAD = "payload"
KEY_PAYLOAD_AVAILABLE = "pl_avail"
KEY_PAYLOAD_CLOSE = "pl_cls"
KEY_PAYLOAD_NOT_AVAILABLE = "pl_not_avail"
KEY_PAYLOAD_OFF = "pl_off"
KEY_PAYLOAD_ON = "pl_on"
KEY_PAYLOAD_OPEN = "pl_open"
KEY_PAYLOAD_STOP = "pl_stop"
KEY_POSITION_TOPIC = "pos_t"
KEY_QOS = "qos"
KEY_RETAIN = "retain"
KEY_SET_POSITION_TOPIC = "set_pos_t"
KEY_STATE_TOPIC = "stat_t"
KEY_SW_VERSION = "sw"
KEY_TOPIC = "topic"
KEY_UNIQUE_ID = "uniq_id"
KEY_UNIT = "unit_of_meas"
KEY_VALUE_TEMPLATE = "val_tpl"
TOPIC_INPUT_EVENT_0 = "input_event/0"
TPL_BATTERY = "{{value|float|round}}"
TPL_CURRENT = "{{value|float|round(2)}}"
TPL_DOUBLE_SHORTPUSH = "{{value_json.event == ^SS^}}"
TPL_ENERGY_WH = "{{(value|float/1000)|round(2)}}"
TPL_ENERGY_WMIN = "{{(value|float/60/1000)|round(2)}}"
TPL_HUMIDITY = "{{value|float|round(1)}}"
TPL_LONGPUSH = "{{value_json.event == ^L^}}"
TPL_LUX = "{{value|float|round}}"
TPL_OVERPOWER = "{% if value_json.overpower == true %}ON{% else %}OFF{% endif %}"
TPL_OVERPOWER_RELAY = "{% if value == ^overpower^ %}ON{% else %}OFF{% endif %}"
TPL_POWER = "{{value|float|round(1)}}"
TPL_POWER_FACTOR = "{{value|float*100|round}}"
TPL_SHORTPUSH = "{{value_json.event == ^S^}}"
TPL_TEMPERATURE = "{{value|float|round(1)}}"
TPL_TILT = "{{value|float}}"
TPL_TRIPLE_SHORTPUSH = "{{value_json.event == ^SSS^}}"
TPL_VOLTAGE = "{{value|float|round(1)}}"
UNIT_AMPERE = "A"
UNIT_CELSIUS = "°C"
UNIT_DEGREE = "°"
UNIT_KWH = "kWh"
UNIT_LUX = "lx"
UNIT_PERCENT = "%"
UNIT_SECONDS = "s"
UNIT_VAR = "VAR"
UNIT_VOLT = "V"
UNIT_WATT = "W"
VALUE_CLOSE = "close"
VALUE_FALSE = "false"
VALUE_OFF = "off"
VALUE_ON = "on"
VALUE_OPEN = "open"
VALUE_STOP = "stop"
VALUE_TRUE = "true"
PL_0_1 = {VALUE_ON: "0", VALUE_OFF: "1"}
PL_1_0 = {VALUE_ON: "1", VALUE_OFF: "0"}
PL_OPEN_CLOSE = {VALUE_ON: VALUE_OPEN, VALUE_OFF: VALUE_CLOSE}
PL_TRUE_FALSE = {VALUE_ON: VALUE_TRUE, VALUE_OFF: VALUE_FALSE}
expire_after = 43200
off_delay = 2
def get_device_config(id):
result = data.get(id, data.get(id.lower(), {}))
if not result:
result = {}
try:
if isinstance(result, list):
raise TypeError
if len(result) > 0:
result[0]
except TypeError:
logger.error("Wrong configuration for %s", id)
result = {}
finally:
return result
def mqtt_publish(topic, payload, retain, qos):
service_data = {
KEY_TOPIC: topic,
KEY_PAYLOAD: payload,
KEY_RETAIN: retain,
KEY_QOS: qos,
}
logger.debug("Sending to MQTT broker: %s %s", topic, payload)
hass.services.call("mqtt", "publish", service_data, False)
retain = True
qos = 0
roller_mode = False
no_battery_sensor = False
id = data.get(CONF_ID)
mac = data.get(CONF_MAC).lower()
fw_ver = data.get(CONF_FW_VER)
ignored = [element.lower() for element in data.get(CONF_IGNORED_DEVICES, [])]
if not id:
raise ValueError(f"{id} is wrong id argument")
if not mac:
raise ValueError(f"{mac} is wrong mac argument")
if not fw_ver:
raise ValueError(f"{fw_ver} is wrong mac argument")
logger.debug("id: %s, mac: %s, fw_ver: %s", id, mac, fw_ver)
try:
if int(data.get(CONF_QOS, 0)) in [0, 1, 2]:
qos = int(data.get(CONF_QOS, 0))
else:
raise ValueError()
except ValueError:
logger.error("Wrong qos argument, the default value 0 was used")
disc_prefix = data.get(CONF_DISCOVERY_PREFIX, DEFAULT_DISC_PREFIX)
develop = data.get(CONF_DEVELOP, False)
if develop:
disc_prefix = "develop"
retain = False
logger.error("DEVELOP MODE !!!")
battery_powered = False
bin_sensors = []
bin_sensors_classes = []
bin_sensors_topics = []
bin_sensors_tpls = []
ext_sensor_type = None
ext_sensors = 0
lights_bin_sensors = []
lights_bin_sensors_pl = []
lights_sensors = []
lights_sensors_classes = []
lights_sensors_tpls = []
lights_sensors_units = []
meters = 0
relay_components = [ATTR_SWITCH, ATTR_LIGHT, ATTR_FAN]
relays = 0
relays_bin_sensors = []
relays_bin_sensors_classes = []
relays_bin_sensors_pl = []
relays_bin_sensors_tpls = []
relays_sensors = []
relays_sensors_classes = []
relays_sensors_tpls = []
relays_sensors_units = []
rgbw_lights = 0
rollers = 0
sensors = []
sensors_classes = []
sensors_tpls = []
sensors_units = []
white_lights = 0
if id.rsplit("-", 1)[0] == "shelly1":
model = ATTR_MODEL_SHELLY1
relays = 1
relays_bin_sensors = [ATTR_INPUT, ATTR_LONGPUSH, ATTR_SHORTPUSH]
relays_bin_sensors_pl = [PL_1_0, PL_1_0, PL_0_1]
relays_bin_sensors_topics = [None, ATTR_LONGPUSH, ATTR_LONGPUSH]
relays_bin_sensors_tpls = [None, None, None]
relays_bin_sensors_classes = [None, None, None]
ext_sensors = 3
if id.rsplit("-", 1)[0] == "shelly1pm":
model = ATTR_MODEL_SHELLY1PM
relays = 1
relays_sensors = [ATTR_POWER, ATTR_ENERGY]
relays_sensors_units = [UNIT_WATT, UNIT_KWH]
relays_sensors_classes = [ATTR_POWER, ATTR_POWER]
relays_sensors_tpls = [TPL_POWER, TPL_ENERGY_WMIN]
relays_bin_sensors = [ATTR_INPUT, ATTR_LONGPUSH, ATTR_SHORTPUSH, ATTR_OVERPOWER]
relays_bin_sensors_pl = [PL_1_0, PL_1_0, PL_0_1, None]
relays_bin_sensors_topics = [None, ATTR_LONGPUSH, ATTR_LONGPUSH, ATTR_RELAY]
relays_bin_sensors_tpls = [None, None, None, TPL_OVERPOWER_RELAY]
relays_bin_sensors_classes = [None, None, None, ATTR_POWER]
sensors = [ATTR_TEMPERATURE]
sensors_classes = sensors
sensors_units = [UNIT_CELSIUS]
sensors_tpls = [TPL_TEMPERATURE]
bin_sensors = [ATTR_OVERTEMPERATURE]
bin_sensors_classes = [ATTR_HEAT]
bin_sensors_pl = [PL_1_0]
ext_sensors = 3
if id.rsplit("-", 1)[0] == "shellyair":
model = ATTR_MODEL_SHELLYAIR
relays = 1
relays_sensors = [ATTR_POWER, ATTR_ENERGY]
relays_sensors_units = [UNIT_WATT, UNIT_KWH]
relays_sensors_classes = [ATTR_POWER, ATTR_POWER]
relays_sensors_tpls = [TPL_POWER, TPL_ENERGY_WMIN]
relays_bin_sensors = [ATTR_INPUT]
relays_bin_sensors_pl = [PL_1_0]
relays_bin_sensors_tpls = [None]
relays_bin_sensors_classes = [None]
sensors = [ATTR_TEMPERATURE, ATTR_TOTALWORKTIME]
sensors_classes = [ATTR_TEMPERATURE, None]
sensors_units = [UNIT_CELSIUS, UNIT_SECONDS]
sensors_tpls = [TPL_TEMPERATURE, None]
bin_sensors = [ATTR_OVERTEMPERATURE]
bin_sensors_classes = [ATTR_HEAT]
bin_sensors_pl = [PL_1_0]
ext_sensors = 1
if id.rsplit("-", 1)[0] == "shellyswitch":
model = ATTR_MODEL_SHELLY2
relays = 2
rollers = 1
relays_sensors = [ATTR_POWER, ATTR_ENERGY]
relays_sensors_units = [UNIT_WATT, UNIT_KWH]
relays_sensors_classes = [ATTR_POWER, ATTR_POWER]
relays_sensors_tpls = [TPL_POWER, TPL_ENERGY_WMIN]
relays_bin_sensors = [ATTR_INPUT, ATTR_LONGPUSH, ATTR_SHORTPUSH, ATTR_OVERPOWER]
relays_bin_sensors_pl = [PL_1_0, PL_1_0, PL_0_1, None]
relays_bin_sensors_topics = [None, ATTR_LONGPUSH, ATTR_LONGPUSH, ATTR_RELAY]
relays_bin_sensors_tpls = [None, None, None, TPL_OVERPOWER_RELAY]
relays_bin_sensors_classes = [None, None, None, ATTR_POWER]
if id.rsplit("-", 1)[0] == "shellyswitch25":
model = ATTR_MODEL_SHELLY25
relays = 2
rollers = 1
relays_sensors = [ATTR_POWER, ATTR_ENERGY]
relays_sensors_units = [UNIT_WATT, UNIT_KWH]
relays_sensors_classes = [ATTR_POWER, ATTR_POWER]
relays_sensors_tpls = [TPL_POWER, TPL_ENERGY_WMIN]
relays_bin_sensors = [ATTR_INPUT, ATTR_LONGPUSH, ATTR_SHORTPUSH, ATTR_OVERPOWER]
relays_bin_sensors_pl = [PL_1_0, PL_1_0, PL_0_1, None]
relays_bin_sensors_topics = [None, ATTR_LONGPUSH, ATTR_LONGPUSH, ATTR_RELAY]
relays_bin_sensors_tpls = [None, None, None, TPL_OVERPOWER_RELAY]
relays_bin_sensors_classes = [None, None, None, ATTR_POWER]
sensors = [ATTR_TEMPERATURE]
sensors_classes = sensors
sensors_units = [UNIT_CELSIUS]
sensors_tpls = [TPL_TEMPERATURE]
bin_sensors = [ATTR_OVERTEMPERATURE]
bin_sensors_classes = [ATTR_HEAT]
bin_sensors_pl = [PL_1_0]
if id.rsplit("-", 1)[0] == "shellyplug":
model = ATTR_MODEL_SHELLYPLUG
relays = 1
relays_sensors = [ATTR_POWER, ATTR_ENERGY]
relays_sensors_units = [UNIT_WATT, UNIT_KWH]
relays_sensors_classes = [ATTR_POWER, ATTR_POWER]
relays_sensors_tpls = [TPL_POWER, TPL_ENERGY_WMIN]
relays_bin_sensors = [ATTR_OVERPOWER]
relays_bin_sensors_pl = [None]
relays_bin_sensors_topics = [ATTR_RELAY]
relays_bin_sensors_tpls = [TPL_OVERPOWER_RELAY]
relays_bin_sensors_classes = [ATTR_POWER]
if id.rsplit("-", 1)[0] == "shellyplug-s":
model = ATTR_MODEL_SHELLYPLUG_S
relays = 1
relays_sensors = [ATTR_POWER, ATTR_ENERGY]
relays_sensors_units = [UNIT_WATT, UNIT_KWH]
relays_sensors_classes = [ATTR_POWER, ATTR_POWER]
relays_sensors_tpls = [TPL_POWER, TPL_ENERGY_WMIN]
relays_bin_sensors = [ATTR_OVERPOWER]
relays_bin_sensors_pl = [None]
relays_bin_sensors_topics = [ATTR_RELAY]
relays_bin_sensors_tpls = [TPL_OVERPOWER_RELAY]
relays_bin_sensors_classes = [ATTR_POWER]
sensors = [ATTR_TEMPERATURE]
sensors_classes = [ATTR_TEMPERATURE]
sensors_units = [UNIT_CELSIUS]
sensors_tpls = [TPL_TEMPERATURE]
bin_sensors = [ATTR_OVERTEMPERATURE]
bin_sensors_classes = [ATTR_HEAT]
bin_sensors_pl = [PL_1_0]
if id.rsplit("-", 1)[0] == "shelly4pro":
model = ATTR_MODEL_SHELLY4PRO
relays = 4
relays_sensors = [ATTR_POWER, ATTR_ENERGY]
relays_sensors_units = [UNIT_WATT, UNIT_KWH]
relays_sensors_classes = [ATTR_POWER, ATTR_POWER]
relays_sensors_tpls = [TPL_POWER, TPL_ENERGY_WMIN]
relays_bin_sensors = [ATTR_OVERPOWER]
relays_bin_sensors_pl = [None]
relays_bin_sensors_topics = [ATTR_RELAY]
relays_bin_sensors_tpls = [TPL_OVERPOWER_RELAY]
relays_bin_sensors_classes = [ATTR_POWER]
if id.rsplit("-", 1)[0] == "shellyht":
model = ATTR_MODEL_SHELLYHT
sensors = [ATTR_TEMPERATURE, ATTR_HUMIDITY, ATTR_BATTERY]
sensors_classes = sensors
sensors_units = [UNIT_CELSIUS, UNIT_PERCENT, UNIT_PERCENT]
sensors_tpls = [TPL_TEMPERATURE, TPL_HUMIDITY, TPL_BATTERY]
battery_powered = True
if id.rsplit("-", 1)[0] == "shellygas":
model = ATTR_MODEL_SHELLYGAS
sensors = [ATTR_OPERATION, ATTR_GAS, ATTR_SELF_TEST, ATTR_CONCENTRATION]
sensors_classes = [None, None, None, None]
sensors_tpls = [None, None, None, None]
sensors_units = [None, None, None, None]
if id.rsplit("-", 1)[0] == "shellybutton1":
model = ATTR_MODEL_SHELLYBUTTON1
sensors = [ATTR_BATTERY]
sensors_classes = sensors
sensors_units = [UNIT_PERCENT]
sensors_tpls = [TPL_BATTERY]
bin_sensors = [
ATTR_INPUT_0,
ATTR_SHORTPUSH,
ATTR_DOUBLE_SHORTPUSH,
ATTR_TRIPLE_SHORTPUSH,
ATTR_LONGPUSH,
]
bin_sensors_tpls = [
None,
TPL_SHORTPUSH,
TPL_DOUBLE_SHORTPUSH,
TPL_TRIPLE_SHORTPUSH,
TPL_LONGPUSH,
]
bin_sensors_pl = [PL_1_0, None, None, None, None]
bin_sensors_topics = [
None,
TOPIC_INPUT_EVENT_0,
TOPIC_INPUT_EVENT_0,
TOPIC_INPUT_EVENT_0,
TOPIC_INPUT_EVENT_0,
]
battery_powered = True
if id.rsplit("-", 1)[0] == "shellydw":
model = ATTR_MODEL_SHELLYDW
sensors = [ATTR_LUX, ATTR_BATTERY, ATTR_TILT]
sensors_classes = [ATTR_ILLUMINANCE, ATTR_BATTERY, None]
sensors_units = [UNIT_LUX, UNIT_PERCENT, UNIT_DEGREE]
sensors_tpls = [TPL_LUX, TPL_BATTERY, TPL_TILT]
bin_sensors = [ATTR_OPENING, ATTR_VIBRATION]
bin_sensors_classes = bin_sensors
bin_sensors_pl = [PL_OPEN_CLOSE, PL_1_0]
battery_powered = True
if id.rsplit("-", 1)[0] == "shellysmoke":
model = ATTR_MODEL_SHELLYSMOKE
sensors = [ATTR_TEMPERATURE, ATTR_BATTERY]
sensors_classes = sensors
sensors_units = [UNIT_CELSIUS, UNIT_PERCENT]
sensors_tpls = [TPL_TEMPERATURE, TPL_BATTERY]
bin_sensors = [ATTR_SMOKE]
bin_sensors_classes = bin_sensors
bin_sensors_pl = [PL_TRUE_FALSE]
battery_powered = True
if id.rsplit("-", 1)[0] == "shellysense":
model = ATTR_MODEL_SHELLYSENSE
sensors = [ATTR_TEMPERATURE, ATTR_HUMIDITY, ATTR_LUX, ATTR_BATTERY]
sensors_classes = [ATTR_TEMPERATURE, ATTR_HUMIDITY, ATTR_ILLUMINANCE, ATTR_BATTERY]
sensors_units = [UNIT_CELSIUS, UNIT_PERCENT, UNIT_LUX, UNIT_PERCENT]
sensors_tpls = [TPL_TEMPERATURE, TPL_HUMIDITY, TPL_LUX, TPL_BATTERY]
bin_sensors = [ATTR_MOTION, ATTR_CHARGER]
bin_sensors_classes = [ATTR_MOTION, ATTR_POWER]
bin_sensors_pl = [PL_TRUE_FALSE, PL_TRUE_FALSE]
battery_powered = True
if id.rsplit("-", 1)[0] == "shellyrgbw2":
model = ATTR_MODEL_SHELLYRGBW2
rgbw_lights = 1
white_lights = 4
lights_sensors = [ATTR_POWER]
lights_sensors_classes = [ATTR_POWER]
lights_sensors_units = [UNIT_WATT]
lights_sensors_tpls = ["{{value_json.power|float|round(1)}}"]
bin_sensors = [ATTR_OVERPOWER, ATTR_INPUT_0, ATTR_LONGPUSH_0, ATTR_SHORTPUSH_0]
bin_sensors_classes = [ATTR_POWER, None, None, None]
bin_sensors_tpls = [TPL_OVERPOWER, None, None, None]
bin_sensors_pl = [None, PL_1_0, PL_1_0, PL_0_1]
bin_sensors_topics = [
ATTR_COLOR_0_STATUS,
ATTR_INPUT_0,
ATTR_LONGPUSH_0,
ATTR_LONGPUSH_0,
]
# to remove - compatibility
lights_bin_sensors = [ATTR_OVERPOWER, ATTR_INPUT]
lights_bin_sensors_classes = [ATTR_POWER, None]
lights_bin_sensors_tpls = [TPL_OVERPOWER, None]
lights_bin_sensors_pl = [None, PL_1_0]
if id.rsplit("-", 1)[0] == "shellydimmer":
model = ATTR_MODEL_SHELLYDIMMER
white_lights = 1
sensors = [ATTR_TEMPERATURE]
sensors_classes = [ATTR_TEMPERATURE]
sensors_units = [UNIT_CELSIUS]
sensors_tpls = [TPL_TEMPERATURE]
bin_sensors = [
ATTR_OVERTEMPERATURE,
ATTR_OVERLOAD,
ATTR_LOADERROR,
ATTR_INPUT_0,
ATTR_INPUT_1,
ATTR_LONGPUSH_0,
ATTR_LONGPUSH_1,
ATTR_SHORTPUSH_0,
ATTR_SHORTPUSH_1,
]
bin_sensors_classes = [
ATTR_HEAT,
ATTR_POWER,
ATTR_PROBLEM,
None,
None,
None,
None,
None,
None,
]
bin_sensors_pl = [
PL_1_0,
PL_1_0,
PL_1_0,
PL_1_0,
PL_1_0,
PL_1_0,
PL_1_0,
PL_0_1,
PL_0_1,
]
bin_sensors_topics = [
None,
None,
None,
ATTR_INPUT_0,
ATTR_INPUT_1,
ATTR_LONGPUSH_0,
ATTR_LONGPUSH_1,
ATTR_LONGPUSH_0,
ATTR_LONGPUSH_1,
]
lights_sensors = [ATTR_POWER, ATTR_ENERGY]
lights_sensors_units = [UNIT_WATT, UNIT_KWH]
lights_sensors_classes = [ATTR_POWER, ATTR_POWER]
lights_sensors_tpls = [TPL_POWER, TPL_ENERGY_WMIN]
if id.rsplit("-", 1)[0] == "shellybulb":
model = ATTR_MODEL_SHELLYBULB
rgbw_lights = 1
if id.rsplit("-", 1)[0].lower() == "shellybulbduo":
model = ATTR_MODEL_SHELLYDUO
white_lights = 1
lights_sensors = [ATTR_ENERGY, ATTR_POWER]
lights_sensors_units = [UNIT_KWH, UNIT_WATT]
lights_sensors_classes = [ATTR_POWER, ATTR_POWER]
lights_sensors_tpls = [TPL_ENERGY_WMIN, TPL_POWER]
if id.rsplit("-", 1)[0].lower() == "shellyvintage":
model = ATTR_MODEL_SHELLYVINTAGE
white_lights = 1
lights_sensors = [ATTR_ENERGY, ATTR_POWER]
lights_sensors_units = [UNIT_KWH, UNIT_WATT]
lights_sensors_classes = [ATTR_POWER, ATTR_POWER]
lights_sensors_tpls = [TPL_ENERGY_WMIN, TPL_POWER]
if id.rsplit("-", 1)[0] == "shellyem":
model = ATTR_MODEL_SHELLYEM
relays = 1
relays_sensors = [ATTR_POWER, ATTR_ENERGY]
relays_sensors_units = [UNIT_WATT, UNIT_KWH]
relays_sensors_classes = [ATTR_POWER, ATTR_POWER]
relays_sensors_tpls = [TPL_POWER, TPL_ENERGY_WMIN]
relays_bin_sensors = [ATTR_OVERPOWER]
relays_bin_sensors_pl = [None]
relays_bin_sensors_topics = [ATTR_RELAY]
relays_bin_sensors_tpls = [TPL_OVERPOWER_RELAY]
relays_bin_sensors_classes = [ATTR_POWER]
meters = 2
meters_sensors = [
ATTR_POWER,
ATTR_REACTIVE_POWER,
ATTR_VOLTAGE,
ATTR_ENERGY,
ATTR_RETURNED_ENERGY,
ATTR_TOTAL,
ATTR_TOTAL_RETURNED,
]
meters_sensors_units = [
UNIT_WATT,
UNIT_VAR,
UNIT_VOLT,
UNIT_KWH,
UNIT_KWH,
UNIT_KWH,
UNIT_KWH,
]
meters_sensors_classes = [
ATTR_POWER,
None,
None,
ATTR_POWER,
ATTR_POWER,
ATTR_POWER,
ATTR_POWER,
]
meters_sensors_tpls = [
TPL_POWER,
TPL_POWER,
TPL_VOLTAGE,
TPL_ENERGY_WMIN,
TPL_ENERGY_WMIN,
TPL_ENERGY_WH,
TPL_ENERGY_WH,
]
if id.rsplit("-", 1)[0] == "shellyem3":
model = ATTR_MODEL_SHELLY3EM
relays = 1
meters = 3
relays_bin_sensors = [ATTR_OVERPOWER]
relays_bin_sensors_pl = [None]
relays_bin_sensors_topics = [ATTR_RELAY]
relays_bin_sensors_tpls = [TPL_OVERPOWER_RELAY]
relays_bin_sensors_classes = [ATTR_POWER]
meters_sensors = [
ATTR_CURRENT,
ATTR_POWER,
ATTR_POWER_FACTOR,
ATTR_VOLTAGE,
ATTR_ENERGY,
ATTR_RETURNED_ENERGY,
ATTR_TOTAL,
ATTR_TOTAL_RETURNED,
]
meters_sensors_units = [
UNIT_AMPERE,
UNIT_WATT,
UNIT_PERCENT,
UNIT_VOLT,
UNIT_KWH,
UNIT_KWH,
UNIT_KWH,
UNIT_KWH,
]
meters_sensors_classes = [
None,
ATTR_POWER,
None,
None,
ATTR_POWER,
ATTR_POWER,
ATTR_POWER,
ATTR_POWER,
]
meters_sensors_tpls = [
TPL_CURRENT,
TPL_POWER,
TPL_POWER_FACTOR,
TPL_VOLTAGE,
TPL_ENERGY_WMIN,
TPL_ENERGY_WMIN,
TPL_ENERGY_WH,
TPL_ENERGY_WH,
]
if id.rsplit("-", 1)[0] == "shellyflood":
model = ATTR_MODEL_SHELLYFLOOD
sensors = [ATTR_TEMPERATURE, ATTR_BATTERY]
sensors_classes = sensors
sensors_units = [UNIT_CELSIUS, UNIT_PERCENT]
sensors_tpls = [TPL_TEMPERATURE, TPL_BATTERY]
bin_sensors = [ATTR_FLOOD]
bin_sensors_classes = [ATTR_MOISTURE]
bin_sensors_pl = [PL_TRUE_FALSE]
battery_powered = True
if id.rsplit("-", 1)[0] == "shellyix3":
model = ATTR_MODEL_SHELLYI3
bin_sensors = [ATTR_INPUT_0, ATTR_INPUT_1, ATTR_INPUT_2]
bin_sensors_classes = [None, None, None]
bin_sensors_tpls = [None, None, None]
bin_sensors_pl = [PL_1_0, PL_1_0, PL_0_1]
# rollers
for roller_id in range(0, rollers):
device_config = get_device_config(id)
config_mode = ATTR_RELAY
if device_config.get(CONF_MODE):
config_mode = device_config[CONF_MODE]
device_name = f"{model} {id.split('-')[-1]}"
if device_config.get(f"roller-{roller_id}-name"):
roller_name = device_config[f"roller-{roller_id}-name"]
else:
roller_name = f"{device_name} Roller {roller_id}"
default_topic = f"shellies/{id}/"
state_topic = f"~roller/{roller_id}"
command_topic = f"{state_topic}/command"
position_topic = f"{state_topic}/pos"
set_position_topic = f"{state_topic}/command/pos"
availability_topic = "~online"
unique_id = f"{id}-roller-{roller_id}".lower()
config_topic = f"{disc_prefix}/cover/{id}-roller-{roller_id}/config"
if config_mode == ATTR_ROLLER:
roller_mode = True
payload = {
KEY_NAME: roller_name,
KEY_COMMAND_TOPIC: command_topic,
KEY_POSITION_TOPIC: position_topic,
KEY_SET_POSITION_TOPIC: set_position_topic,
KEY_PAYLOAD_OPEN: VALUE_OPEN,
KEY_PAYLOAD_CLOSE: VALUE_CLOSE,
KEY_PAYLOAD_STOP: VALUE_STOP,
KEY_OPTIMISTIC: VALUE_FALSE,
KEY_AVAILABILITY_TOPIC: availability_topic,
KEY_PAYLOAD_AVAILABLE: VALUE_TRUE,
KEY_PAYLOAD_NOT_AVAILABLE: VALUE_FALSE,
KEY_UNIQUE_ID: unique_id,
KEY_QOS: qos,
KEY_DEVICE: {
KEY_IDENTIFIERS: [mac],
KEY_NAME: device_name,
KEY_MODEL: model,
KEY_SW_VERSION: fw_ver,
KEY_MANUFACTURER: ATTR_MANUFACTURER,
},
"~": default_topic,
}
else:
payload = ""
if id.lower() in ignored:
payload = ""
mqtt_publish(config_topic, str(payload).replace("'", '"'), retain, qos)
# relays
for relay_id in range(0, relays):
device_config = get_device_config(id)
device_name = f"{model} {id.split('-')[-1]}"
if device_config.get(f"relay-{relay_id}-name"):
relay_name = device_config[f"relay-{relay_id}-name"]
else:
relay_name = f"{device_name} Relay {relay_id}"
default_topic = f"shellies/{id}/"
state_topic = f"~relay/{relay_id}"
command_topic = f"{state_topic}/command"
availability_topic = "~online"
unique_id = f"{id}-relay-{relay_id}".lower()
config_component = ATTR_SWITCH
if device_config.get(f"relay-{relay_id}"):
config_component = device_config[f"relay-{relay_id}"]
for component in relay_components:
config_topic = f"{disc_prefix}/{component}/{id}-relay-{relay_id}/config"
if component == config_component and not roller_mode:
payload = {
KEY_NAME: relay_name,
KEY_COMMAND_TOPIC: command_topic,
KEY_STATE_TOPIC: state_topic,
KEY_PAYLOAD_OFF: VALUE_OFF,
KEY_PAYLOAD_ON: VALUE_ON,
KEY_AVAILABILITY_TOPIC: availability_topic,
KEY_PAYLOAD_AVAILABLE: VALUE_TRUE,
KEY_PAYLOAD_NOT_AVAILABLE: VALUE_FALSE,
KEY_UNIQUE_ID: unique_id,
KEY_QOS: qos,
KEY_DEVICE: {
KEY_IDENTIFIERS: [mac],
KEY_NAME: device_name,
KEY_MODEL: model,
KEY_SW_VERSION: fw_ver,
KEY_MANUFACTURER: ATTR_MANUFACTURER,
},
"~": default_topic,
}
else:
payload = ""
if id.lower() in ignored:
payload = ""
mqtt_publish(config_topic, str(payload).replace("'", '"'), retain, qos)
# relay's sensors
if relay_id == relays - 1:
for sensor_id in range(0, len(relays_sensors)):
device_config = get_device_config(id)
force_update = False
if isinstance(device_config.get(CONF_FORCE_UPDATE_SENSORS), bool):
force_update = device_config.get(CONF_FORCE_UPDATE_SENSORS)
unique_id = f"{id}-relay-{relays_sensors[sensor_id]}".lower()
config_topic = (
f"{disc_prefix}/sensor/{id}-{relays_sensors[sensor_id]}/config"
)
if device_config.get(f"relay-{relay_id}-name"):
sensor_name = f"{device_config[f'relay-{relay_id}-name']} {relays_sensors[sensor_id].title()}"
else:
sensor_name = f"{device_name} {relays_sensors[sensor_id].title()}"
state_topic = f"~relay/{relays_sensors[sensor_id]}"
if model == ATTR_MODEL_SHELLY2 or roller_mode:
payload = {
KEY_NAME: sensor_name,
KEY_STATE_TOPIC: state_topic,
KEY_UNIT: relays_sensors_units[sensor_id],
KEY_VALUE_TEMPLATE: relays_sensors_tpls[sensor_id],
KEY_DEVICE_CLASS: relays_sensors_classes[sensor_id],
KEY_AVAILABILITY_TOPIC: availability_topic,
KEY_PAYLOAD_AVAILABLE: VALUE_TRUE,
KEY_PAYLOAD_NOT_AVAILABLE: VALUE_FALSE,
KEY_FORCE_UPDATE: str(force_update),
KEY_UNIQUE_ID: unique_id,
KEY_QOS: qos,
KEY_DEVICE: {
KEY_IDENTIFIERS: [mac],
KEY_NAME: device_name,
KEY_MODEL: model,
KEY_SW_VERSION: fw_ver,
KEY_MANUFACTURER: ATTR_MANUFACTURER,
},
"~": default_topic,
}
else:
payload = ""
if id.lower() in ignored:
payload = ""
mqtt_publish(config_topic, str(payload).replace("'", '"'), retain, qos)
# relay's sensors
for sensor_id in range(0, len(relays_sensors)):
device_config = get_device_config(id)
force_update = False
if isinstance(device_config.get(CONF_FORCE_UPDATE_SENSORS), bool):
force_update = device_config.get(CONF_FORCE_UPDATE_SENSORS)
unique_id = f"{id}-relay-{relays_sensors[sensor_id]}-{relay_id}".lower()
config_topic = (
f"{disc_prefix}/sensor/{id}-{relays_sensors[sensor_id]}-{relay_id}/config"
)
if device_config.get(f"relay-{relay_id}-name"):
sensor_name = f"{device_config[f'relay-{relay_id}-name']} {relays_sensors[sensor_id].title()}"
else:
sensor_name = (
f"{device_name} {relays_sensors[sensor_id].title()} {relay_id}"
)
state_topic = f"~relay/{relay_id}/{relays_sensors[sensor_id]}"
if model != ATTR_MODEL_SHELLY2 and not roller_mode:
payload = {
KEY_NAME: sensor_name,
KEY_STATE_TOPIC: state_topic,
KEY_UNIT: relays_sensors_units[sensor_id],
KEY_VALUE_TEMPLATE: relays_sensors_tpls[sensor_id],
KEY_DEVICE_CLASS: relays_sensors_classes[sensor_id],
KEY_AVAILABILITY_TOPIC: availability_topic,
KEY_PAYLOAD_AVAILABLE: VALUE_TRUE,
KEY_PAYLOAD_NOT_AVAILABLE: VALUE_FALSE,
KEY_FORCE_UPDATE: str(force_update),
KEY_UNIQUE_ID: unique_id,
KEY_QOS: qos,
KEY_DEVICE: {
KEY_IDENTIFIERS: [mac],
KEY_NAME: device_name,
KEY_MODEL: model,
KEY_SW_VERSION: fw_ver,
KEY_MANUFACTURER: ATTR_MANUFACTURER,
},
"~": default_topic,
}
else:
payload = ""
if id.lower() in ignored:
payload = ""
mqtt_publish(config_topic, str(payload).replace("'", '"'), retain, qos)
# relay's binary sensors
for bin_sensor_id in range(0, len(relays_bin_sensors)):
device_config = get_device_config(id)
push_off_delay = True
if isinstance(device_config.get(CONF_PUSH_OFF_DELAY), bool):
push_off_delay = device_config.get(CONF_PUSH_OFF_DELAY)
unique_id = f"{id}-{relays_bin_sensors[bin_sensor_id]}-{relay_id}".lower()
config_topic = f"{disc_prefix}/binary_sensor/{id}-{relays_bin_sensors[bin_sensor_id]}-{relay_id}/config"
if device_config.get(f"relay-{relay_id}-name"):
sensor_name = f"{device_config[f'relay-{relay_id}-name']} {relays_bin_sensors[bin_sensor_id].title()}"
else:
sensor_name = (
f"{device_name} {relays_bin_sensors[bin_sensor_id].title()} {relay_id}"
)
if relays_bin_sensors_topics and relays_bin_sensors_topics[bin_sensor_id]:
state_topic = f"~{relays_bin_sensors_topics[bin_sensor_id]}/{relay_id}"
else:
state_topic = f"~{relays_bin_sensors[bin_sensor_id]}/{relay_id}"
if not roller_mode:
payload = {
KEY_NAME: sensor_name,
KEY_STATE_TOPIC: state_topic,
KEY_AVAILABILITY_TOPIC: availability_topic,
KEY_PAYLOAD_AVAILABLE: VALUE_TRUE,
KEY_PAYLOAD_NOT_AVAILABLE: VALUE_FALSE,
KEY_UNIQUE_ID: unique_id,
KEY_QOS: qos,
KEY_DEVICE: {
KEY_IDENTIFIERS: [mac],
KEY_NAME: device_name,
KEY_MODEL: model,
KEY_SW_VERSION: fw_ver,
KEY_MANUFACTURER: ATTR_MANUFACTURER,
},
"~": default_topic,
}
if (
relays_bin_sensors[bin_sensor_id] in [ATTR_LONGPUSH, ATTR_SHORTPUSH]
and push_off_delay
):
payload[KEY_OFF_DELAY] = off_delay
if relays_bin_sensors_tpls[bin_sensor_id]:
payload[KEY_VALUE_TEMPLATE] = relays_bin_sensors_tpls[bin_sensor_id]
else:
payload[KEY_PAYLOAD_ON] = relays_bin_sensors_pl[bin_sensor_id][VALUE_ON]
payload[KEY_PAYLOAD_OFF] = relays_bin_sensors_pl[bin_sensor_id][
VALUE_OFF
]
if relays_bin_sensors_classes[bin_sensor_id]:
payload[KEY_DEVICE_CLASS] = relays_bin_sensors_classes[bin_sensor_id]
else:
payload = ""
if id.lower() in ignored:
payload = ""
mqtt_publish(
config_topic, str(payload).replace("'", '"').replace("^", "'"), retain, qos
)
# sensors
for sensor_id in range(0, len(sensors)):
device_config = get_device_config(id)
force_update = False
if isinstance(device_config.get(CONF_FORCE_UPDATE_SENSORS), bool):
force_update = device_config.get(CONF_FORCE_UPDATE_SENSORS)
device_name = f"{model} {id.split('-')[-1]}"
unique_id = f"{id}-{sensors[sensor_id]}".lower()
config_topic = f"{disc_prefix}/sensor/{id}-{sensors[sensor_id]}/config"
default_topic = f"shellies/{id}/"
availability_topic = "~online"
sensor_name = f"{device_name} {sensors[sensor_id].title()}"
if relays > 0 or white_lights > 0:
state_topic = f"~{sensors[sensor_id]}"
else:
state_topic = f"~sensor/{sensors[sensor_id]}"
config_component = ATTR_SWITCH
if device_config.get(CONF_POWERED) == ATTR_POWER_AC:
no_battery_sensor = True
expire_after = 7200
payload = {
KEY_NAME: sensor_name,
KEY_STATE_TOPIC: state_topic,
KEY_EXPIRE_AFTER: expire_after,
KEY_FORCE_UPDATE: str(force_update),
KEY_UNIQUE_ID: unique_id,
KEY_QOS: qos,
KEY_DEVICE: {
KEY_IDENTIFIERS: [mac],
KEY_NAME: device_name,
KEY_MODEL: model,
KEY_SW_VERSION: fw_ver,
KEY_MANUFACTURER: ATTR_MANUFACTURER,
},
"~": default_topic,
}
if sensors_units[sensor_id]:
payload[KEY_UNIT] = sensors_units[sensor_id]
if sensors_classes[sensor_id]:
payload[KEY_DEVICE_CLASS] = sensors_classes[sensor_id]
if not battery_powered:
payload[KEY_AVAILABILITY_TOPIC] = availability_topic
payload[KEY_PAYLOAD_AVAILABLE] = VALUE_TRUE
payload[KEY_PAYLOAD_NOT_AVAILABLE] = VALUE_FALSE
if sensors_tpls[sensor_id]:
payload[KEY_VALUE_TEMPLATE] = sensors_tpls[sensor_id]
if no_battery_sensor and sensors[sensor_id] == ATTR_BATTERY:
payload = ""
if id.lower() in ignored:
payload = ""
mqtt_publish(config_topic, str(payload).replace("'", '"'), retain, qos)
# external sensors
for sensor_id in range(0, ext_sensors):
device_config = get_device_config(id)
force_update = False
if isinstance(device_config.get(CONF_FORCE_UPDATE_SENSORS), bool):
force_update = device_config.get(CONF_FORCE_UPDATE_SENSORS)
device_name = f"{model} {id.split('-')[-1]}"
unique_id = f"{id}-ext-{sensor_id}".lower()
if model == ATTR_MODEL_SHELLYAIR:
ext_sensor_type = ATTR_TEMPERATURE
else:
ext_sensor_type = device_config.get(f"ext-{sensor_id}")
if ext_sensor_type:
config_topic = f"{disc_prefix}/sensor/{id}-ext-{sensor_id}/config"
default_topic = f"shellies/{id}/"
availability_topic = "~online"
sensor_name = f"{device_name} External {sensor_id} {ext_sensor_type.title()}"
state_topic = f"~ext_{ext_sensor_type}/{sensor_id}"
payload = {
KEY_NAME: sensor_name,
KEY_STATE_TOPIC: state_topic,
KEY_EXPIRE_AFTER: expire_after,
KEY_FORCE_UPDATE: str(force_update),
KEY_AVAILABILITY_TOPIC: availability_topic,
KEY_PAYLOAD_AVAILABLE: VALUE_TRUE,
KEY_PAYLOAD_NOT_AVAILABLE: VALUE_FALSE,
KEY_UNIQUE_ID: unique_id,
KEY_QOS: qos,
KEY_DEVICE: {
KEY_IDENTIFIERS: [mac],
KEY_NAME: device_name,
KEY_MODEL: model,
KEY_SW_VERSION: fw_ver,
KEY_MANUFACTURER: ATTR_MANUFACTURER,
},
"~": default_topic,
}
if ext_sensor_type == ATTR_TEMPERATURE:
payload[KEY_UNIT] = UNIT_CELSIUS
payload[KEY_DEVICE_CLASS] = ATTR_TEMPERATURE
elif ext_sensor_type == ATTR_HUMIDITY:
payload[KEY_UNIT] = UNIT_PERCENT
payload[KEY_DEVICE_CLASS] = ATTR_HUMIDITY
else:
payload = ""
if id.lower() in ignored:
payload = ""
mqtt_publish(config_topic, str(payload).replace("'", '"'), retain, qos)
# binary sensors
for bin_sensor_id in range(0, len(bin_sensors)):
device_config = get_device_config(id)
push_off_delay = True
if isinstance(device_config.get(CONF_PUSH_OFF_DELAY), bool):
push_off_delay = device_config.get(CONF_PUSH_OFF_DELAY)
config_mode = ATTR_RGBW
if device_config.get(CONF_MODE):
config_mode = device_config[CONF_MODE]
device_name = f"{model} {id.split('-')[-1]}"
unique_id = (
f"{id}-{bin_sensors[bin_sensor_id].replace(' ', '-').replace('/', '-')}".lower()
)
config_topic = f"{disc_prefix}/binary_sensor/{id}-{bin_sensors[bin_sensor_id].replace(' ', '-').replace('/', '-')}/config"
default_topic = f"shellies/{id}/"
availability_topic = "~online"
sensor_name = (
f"{device_name} {bin_sensors[bin_sensor_id].replace('/', ' ').title()}"
)
if bin_sensors_topics and bin_sensors_topics[bin_sensor_id]:
state_topic = f"~{bin_sensors_topics[bin_sensor_id]}"
elif relays > 0 or white_lights > 0:
state_topic = f"~{bin_sensors[bin_sensor_id]}"
elif bin_sensors[bin_sensor_id] == ATTR_OPENING:
state_topic = "~sensor/state"
else:
state_topic = f"~sensor/{bin_sensors[bin_sensor_id]}"
payload = {
KEY_NAME: sensor_name,
KEY_STATE_TOPIC: state_topic,
KEY_UNIQUE_ID: unique_id,
KEY_QOS: qos,
KEY_DEVICE: {
KEY_IDENTIFIERS: [mac],
KEY_NAME: device_name,
KEY_MODEL: model,
KEY_SW_VERSION: fw_ver,
KEY_MANUFACTURER: ATTR_MANUFACTURER,
},
"~": default_topic,
}
if bin_sensors_tpls and bin_sensors_tpls[bin_sensor_id]:
payload[KEY_VALUE_TEMPLATE] = bin_sensors_tpls[bin_sensor_id]
else:
payload[KEY_PAYLOAD_ON] = bin_sensors_pl[bin_sensor_id][VALUE_ON]
payload[KEY_PAYLOAD_OFF] = bin_sensors_pl[bin_sensor_id][VALUE_OFF]
if battery_powered:
payload[KEY_EXPIRE_AFTER] = expire_after
else:
payload[KEY_AVAILABILITY_TOPIC] = availability_topic
payload[KEY_PAYLOAD_AVAILABLE] = VALUE_TRUE
payload[KEY_PAYLOAD_NOT_AVAILABLE] = VALUE_FALSE
if bin_sensors_classes and bin_sensors_classes[bin_sensor_id]:
payload[KEY_DEVICE_CLASS] = bin_sensors_classes[bin_sensor_id]
if (
bin_sensors[bin_sensor_id]
in [ATTR_LONGPUSH_0, ATTR_LONGPUSH_1, ATTR_SHORTPUSH_0]
and push_off_delay
):
payload[KEY_OFF_DELAY] = off_delay
if (
model == ATTR_MODEL_SHELLYRGBW2
and config_mode == ATTR_WHITE
and bin_sensors[bin_sensor_id] == ATTR_OVERPOWER
):
payload = ""
# to remove
if model == ATTR_MODEL_SHELLYBUTTON1 and bin_sensors[bin_sensor_id] == ATTR_INPUT_0:
payload = ""
if id.lower() in ignored:
payload = ""
mqtt_publish(
config_topic, str(payload).replace("'", '"').replace("^", "'"), retain, qos
)
# color lights
for light_id in range(0, rgbw_lights):
device_name = f"{model} {id.split('-')[-1]}"
light_name = f"{device_name} Light {light_id}"
default_topic = f"shellies/{id}/"
state_topic = f"~color/{light_id}/status"
command_topic = f"~color/{light_id}/set"
availability_topic = "~online"
unique_id = f"{id}-light-{light_id}".lower()
config_topic = f"{disc_prefix}/light/{id}-{light_id}/config"
device_config = get_device_config(id)
config_mode = ATTR_RGBW
if device_config.get(CONF_MODE):
config_mode = device_config[CONF_MODE]
if config_mode == ATTR_RGBW and model == ATTR_MODEL_SHELLYRGBW2:
payload = (
'{"schema":"template",'
'"name":"' + light_name + '",'
'"cmd_t":"' + command_topic + '",'
'"stat_t":"' + state_topic + '",'
'"avty_t":"' + availability_topic + '",'
'"pl_avail":"true",'
'"pl_not_avail":"false",'
'"fx_list":["Off", "Meteor Shower", "Gradual Change", "Flash"],'
'"cmd_on_tpl":"{\\"turn\\":\\"on\\"{% if brightness is defined %},\\"gain\\":{{brightness|float|multiply(0.3922)|round}}{% endif %}{% if red is defined and green is defined and blue is defined %},\\"red\\":{{red}},\\"green\\":{{green}},\\"blue\\":{{blue}}{% endif %}{% if white_value is defined %},\\"white\\":{{white_value}}{% endif %}{% if effect is defined %}{% if effect == \\"Meteor Shower\\" %}\\"effect\\":1{% elif effect == \\"Gradual Change\\" %}\\"effect\\":2{% elif effect == \\"Flash\\" %}\\"effect\\":3{% else %}\\"effect\\":0{% endif %}{% else %}\\"effect\\":0{% endif %}}",'
'"cmd_off_tpl":"{\\"turn\\":\\"off\\"}",'
'"stat_tpl":"{% if value_json.ison %}on{% else %}off{% endif %}",'
'"bri_tpl":"{{value_json.gain|float|multiply(2.55)|round}}",'
'"r_tpl":"{{value_json.red}}",'
'"g_tpl":"{{value_json.green}}",'
'"b_tpl":"{{value_json.blue}}",'
'"whit_val_tpl":"{{value_json.white}}",'
'"fx_tpl":"{% if value_json.effect == 1 %}Meteor Shower{% elif value_json.effect == 2 %}Gradual Change{% elif value_json.effect == 3 %}Flash{% else %}Off{% endif %}",'
'"uniq_id":"' + unique_id + '",'
'"qos":"' + str(qos) + '",'
'"dev": {"ids": ["' + mac + '"],'
'"name":"' + device_name + '",'
'"mdl":"' + model + '",'
'"sw":"' + fw_ver + '",'
'"mf":"' + ATTR_MANUFACTURER + '"},'
'"~":"' + default_topic + '"}'
)
elif config_mode == ATTR_RGBW and model == ATTR_MODEL_SHELLYBULB:
payload = (
'{"schema":"template",'
'"name":"' + light_name + '",'
'"cmd_t":"' + command_topic + '",'
'"stat_t":"' + state_topic + '",'
'"avty_t":"' + availability_topic + '",'
'"pl_avail":"true",'
'"pl_not_avail":"false",'
'"fx_list":["Off", "Meteor Shower", "Gradual Change", "Breath", "Flash", "On/Off Gradual", "Red/Green Change"],'
'"cmd_on_tpl":"{\\"turn\\":\\"on\\",\\"mode\\":\\"color\\",{% if red is defined and green is defined and blue is defined %}\\"red\\":{{red}},\\"green\\":{{green}},\\"blue\\":{{blue}},{% endif %}{% if white_value is defined %}\\"white\\":{{white_value}},{% endif %}{% if brightness is defined %}\\"gain\\":{{brightness|float|multiply(0.3922)|round}},{% endif %}{% if effect is defined %}{% if effect == \\"Meteor Shower\\" %}\\"effect\\":1{% elif effect == \\"Gradual Change\\" %}\\"effect\\":2{% elif effect == \\"Breath\\" %}\\"effect\\":3{% elif effect == \\"Flash\\" %}\\"effect\\":4{% elif effect == \\"On/Off Gradual\\" %}\\"effect\\":5{% elif effect == \\"Red/Green Change\\" %}\\"effect\\":6{% else %}\\"effect\\":0{% endif %}{% else %}\\"effect\\":0{% endif %}}",'
'"cmd_off_tpl":"{\\"turn\\":\\"off\\",\\"mode\\":\\"color\\",\\"effect\\": 0}",'
'"stat_tpl":"{% if value_json.ison == true and value_json.mode == \\"color\\" %}on{% else %}off{% endif %}",'
'"bri_tpl":"{{value_json.gain|float|multiply(2.55)|round}}",'
'"r_tpl":"{{value_json.red}}",'
'"g_tpl":"{{value_json.green}}",'
'"b_tpl":"{{value_json.blue}}",'
'"whit_val_tpl":"{{value_json.white}}",'
'"fx_tpl":"{% if value_json.effect == 1 %}Meteor Shower{% elif value_json.effect == 2 %}Gradual Change{% elif value_json.effect == 3 %}Breath{% elif value_json.effect == 4 %}Flash{% elif value_json.effect == 5 %}On/Off Gradual{% elif value_json.effect == 6 %}Red/Green Change{% else %}Off{% endif %}",'
'"uniq_id":"' + unique_id + '",'
'"qos":"' + str(qos) + '",'
'"dev": {"ids": ["' + mac + '"],'
'"name":"' + device_name + '",'
'"mdl":"' + model + '",'
'"sw":"' + fw_ver + '",'
'"mf":"' + ATTR_MANUFACTURER + '"},'
'"~":"' + default_topic + '"}'
)
else:
payload = ""
if id.lower() in ignored:
payload = ""
mqtt_publish(config_topic, payload, retain, qos)
# color light's binary sensors
for bin_sensor_id in range(0, len(lights_bin_sensors)):
sensor_name = (
f"{device_name} {lights_bin_sensors[bin_sensor_id].title()} {light_id}"
)
config_topic = f"{disc_prefix}/binary_sensor/{id}-color-{lights_bin_sensors[bin_sensor_id]}-{light_id}/config"
unique_id = f"{id}-color-{lights_bin_sensors[bin_sensor_id]}-{light_id}".lower()
if lights_bin_sensors[bin_sensor_id] == ATTR_INPUT:
state_topic = f"~{lights_bin_sensors[bin_sensor_id]}/{light_id}"
else:
state_topic = f"~color/{light_id}/status"
# to remove - compatibility
if (
model == ATTR_MODEL_SHELLYRGBW2
and lights_bin_sensors[bin_sensor_id] == ATTR_INPUT
):
payload = ""
# to remove - compatibility
elif (
model == ATTR_MODEL_SHELLYRGBW2
and lights_bin_sensors[bin_sensor_id] == ATTR_OVERPOWER
):
payload = ""
elif config_mode == ATTR_RGBW:
payload = {
KEY_NAME: sensor_name,
KEY_STATE_TOPIC: state_topic,
KEY_AVAILABILITY_TOPIC: availability_topic,
KEY_PAYLOAD_AVAILABLE: VALUE_TRUE,
KEY_PAYLOAD_NOT_AVAILABLE: VALUE_FALSE,
KEY_UNIQUE_ID: unique_id,
KEY_QOS: qos,
KEY_DEVICE: {
KEY_IDENTIFIERS: [mac],
KEY_NAME: device_name,
KEY_MODEL: model,
KEY_SW_VERSION: fw_ver,
KEY_MANUFACTURER: ATTR_MANUFACTURER,
},
"~": default_topic,
}
if lights_bin_sensors_classes and lights_bin_sensors_classes[bin_sensor_id]:
payload[KEY_DEVICE_CLASS] = lights_bin_sensors_classes[bin_sensor_id]
if lights_bin_sensors_tpls and lights_bin_sensors_tpls[bin_sensor_id]:
payload[KEY_VALUE_TEMPLATE] = lights_bin_sensors_tpls[bin_sensor_id]
else:
payload[KEY_PAYLOAD_ON] = lights_bin_sensors_pl[bin_sensor_id][VALUE_ON]
payload[KEY_PAYLOAD_OFF] = lights_bin_sensors_pl[bin_sensor_id][
VALUE_OFF
]
else:
payload = ""
if id.lower() in ignored:
payload = ""
mqtt_publish(config_topic, str(payload).replace("'", '"'), retain, qos)
# color light's sensors
for sensor_id in range(0, len(lights_sensors)):
device_config = get_device_config(id)
force_update = False
if isinstance(device_config.get(CONF_FORCE_UPDATE_SENSORS), bool):
force_update = device_config.get(CONF_FORCE_UPDATE_SENSORS)
unique_id = f"{id}-color-{lights_sensors[sensor_id]}-{light_id}".lower()
config_topic = f"{disc_prefix}/sensor/{id}-color-{lights_sensors[sensor_id]}-{light_id}/config"
sensor_name = f"{device_name} {lights_sensors[sensor_id].title()} {light_id}"
state_topic = f"~color/{light_id}/status"
if config_mode == ATTR_RGBW:
payload = {
KEY_NAME: sensor_name,
KEY_STATE_TOPIC: state_topic,
KEY_UNIT: lights_sensors_units[sensor_id],
KEY_VALUE_TEMPLATE: lights_sensors_tpls[sensor_id],
KEY_DEVICE_CLASS: lights_sensors_classes[sensor_id],
KEY_AVAILABILITY_TOPIC: availability_topic,
KEY_PAYLOAD_AVAILABLE: VALUE_TRUE,
KEY_PAYLOAD_NOT_AVAILABLE: VALUE_FALSE,
KEY_FORCE_UPDATE: str(force_update),
KEY_UNIQUE_ID: unique_id,
KEY_QOS: qos,
KEY_DEVICE: {
KEY_IDENTIFIERS: [mac],
KEY_NAME: device_name,
KEY_MODEL: model,
KEY_SW_VERSION: fw_ver,
KEY_MANUFACTURER: ATTR_MANUFACTURER,
},
"~": default_topic,
}
else:
payload = ""
if id.lower() in ignored:
payload = ""
mqtt_publish(config_topic, str(payload).replace("'", '"'), retain, qos)
# white lights
for light_id in range(0, white_lights):
device_name = f"{model} {id.split('-')[-1]}"
light_name = f"{device_name} Light {light_id}"
default_topic = f"shellies/{id}/"
if model in [
ATTR_MODEL_SHELLYDIMMER,
ATTR_MODEL_SHELLYDUO,
ATTR_MODEL_SHELLYVINTAGE,
]:
state_topic = f"~light/{light_id}/status"
command_topic = f"~light/{light_id}/set"
unique_id = f"{id}-light-{light_id}".lower()
config_topic = f"{disc_prefix}/light/{id}-{light_id}/config"
else:
state_topic = f"~white/{light_id}/status"
command_topic = f"~white/{light_id}/set"
unique_id = f"{id}-light-white-{light_id}".lower()
config_topic = f"{disc_prefix}/light/{id}-white-{light_id}/config"
availability_topic = "~online"
device_config = get_device_config(id)
config_mode = ATTR_RGBW
if device_config.get(CONF_MODE):
config_mode = device_config[CONF_MODE]
if config_mode == ATTR_WHITE and model == ATTR_MODEL_SHELLYRGBW2:
payload = (
'{"schema":"template",'
'"name":"' + light_name + '",'
'"cmd_t":"' + command_topic + '",'
'"stat_t":"' + state_topic + '",'
'"avty_t":"' + availability_topic + '",'
'"pl_avail":"true",'
'"pl_not_avail":"false",'
'"cmd_on_tpl":"{\\"turn\\":\\"on\\"{% if brightness is defined %},\\"brightness\\":{{brightness|float|multiply(0.3922)|round}}{% endif %}{% if white_value is defined %},\\"white\\":{{white_value}}{% endif %}{% if effect is defined %},\\"effect\\":{{effect}}{% endif %}}",'
'"cmd_off_tpl":"{\\"turn\\":\\"off\\"}",'
'"stat_tpl":"{% if value_json.ison %}on{% else %}off{% endif %}",'
'"bri_tpl":"{{value_json.brightness|float|multiply(2.55)|round}}",'
'"uniq_id":"' + unique_id + '",'
'"qos":"' + str(qos) + '",'
'"dev": {"ids": ["' + mac + '"],'
'"name":"' + device_name + '",'
'"mdl":"' + model + '",'
'"sw":"' + fw_ver + '",'
'"mf":"' + ATTR_MANUFACTURER + '"},'
'"~":"' + default_topic + '"}'
)
elif model == ATTR_MODEL_SHELLYDIMMER:
payload = (
'{"schema":"template",'
'"name":"' + light_name + '",'
'"cmd_t":"' + command_topic + '",'
'"stat_t":"' + state_topic + '",'
'"avty_t":"' + availability_topic + '",'
'"pl_avail":"true",'
'"pl_not_avail":"false",'
'"cmd_on_tpl":"{\\"turn\\":\\"on\\"{% if brightness is defined %},\\"brightness\\":{{brightness|float|multiply(0.3922)|round}}{% endif %}}",'
'"cmd_off_tpl":"{\\"turn\\":\\"off\\"}",'
'"stat_tpl":"{% if value_json.ison %}on{% else %}off{% endif %}",'
'"bri_tpl":"{{value_json.brightness|float|multiply(2.55)|round}}",'
'"uniq_id":"' + unique_id + '",'
'"qos":"' + str(qos) + '",'
'"dev": {"ids": ["' + mac + '"],'
'"name":"' + device_name + '",'
'"mdl":"' + model + '",'
'"sw":"' + fw_ver + '",'
'"mf":"' + ATTR_MANUFACTURER + '"},'
'"~":"' + default_topic + '"}'
)
elif model == ATTR_MODEL_SHELLYDUO:
payload = (
'{"schema":"template",'
'"name":"' + light_name + '",'
'"cmd_t":"' + command_topic + '",'
'"stat_t":"' + state_topic + '",'
'"avty_t":"' + availability_topic + '",'
'"pl_avail":"true",'
'"pl_not_avail":"false",'
'"cmd_on_tpl":"{\\"turn\\":\\"on\\"{% if brightness is defined %},\\"brightness\\":{{brightness|float|multiply(0.3922)|round}}{% endif %}{% if color_temp is defined %},\\"temp\\":{{(1000000/(color_temp|int))|round(0,\\"floor\\")}}{% endif %}}",'
'"cmd_off_tpl":"{\\"turn\\":\\"off\\"}",'
'"stat_tpl":"{% if value_json.ison %}on{% else %}off{% endif %}",'
'"bri_tpl":"{{value_json.brightness|float|multiply(2.55)|round}}",'
'"clr_temp_tpl":"{{((1000000/(value_json.temp|int))|round(0,\\"floor\\"))}}",'
'"max_mireds":370,'
'"min_mireds":153,'
'"uniq_id":"' + unique_id + '",'
'"qos":"' + str(qos) + '",'
'"dev": {"ids": ["' + mac + '"],'
'"name":"' + device_name + '",'
'"mdl":"' + model + '",'
'"sw":"' + fw_ver + '",'
'"mf":"' + ATTR_MANUFACTURER + '"},'
'"~":"' + default_topic + '"}'
)
elif model == ATTR_MODEL_SHELLYVINTAGE:
payload = (
'{"schema":"template",'
'"name":"' + light_name + '",'
'"cmd_t":"' + command_topic + '",'
'"stat_t":"' + state_topic + '",'
'"avty_t":"' + availability_topic + '",'
'"pl_avail":"true",'
'"pl_not_avail":"false",'
'"cmd_on_tpl":"{\\"turn\\":\\"on\\"{% if brightness is defined %},\\"brightness\\":{{brightness|float|multiply(0.3922)|round}}{% endif %}}",'
'"cmd_off_tpl":"{\\"turn\\":\\"off\\"}",'
'"stat_tpl":"{% if value_json.ison %}on{% else %}off{% endif %}",'
'"bri_tpl":"{{value_json.brightness|float|multiply(2.55)|round}}",'
'"uniq_id":"' + unique_id + '",'
'"qos":"' + str(qos) + '",'
'"dev": {"ids": ["' + mac + '"],'
'"name":"' + device_name + '",'
'"mdl":"' + model + '",'
'"sw":"' + fw_ver + '",'
'"mf":"' + ATTR_MANUFACTURER + '"},'
'"~":"' + default_topic + '"}'
)
else:
payload = ""
if id.lower() in ignored:
payload = ""
mqtt_publish(config_topic, payload, retain, qos)
# white light's binary sensors
for bin_sensor_id in range(0, len(lights_bin_sensors)):
if (
lights_bin_sensors[bin_sensor_id] == ATTR_INPUT and light_id == 0
) or lights_bin_sensors[bin_sensor_id] != ATTR_INPUT:
unique_id = (
f"{id}-white-{lights_bin_sensors[bin_sensor_id]}-{light_id}".lower()
)
config_topic = f"{disc_prefix}/binary_sensor/{id}-white-{lights_bin_sensors[bin_sensor_id]}-{light_id}/config"
if lights_bin_sensors[bin_sensor_id] == ATTR_INPUT:
state_topic = f"~{lights_bin_sensors[bin_sensor_id]}/{light_id}"
else:
state_topic = f"~white/{light_id}/status"
sensor_name = (
f"{device_name} {lights_bin_sensors[bin_sensor_id].title()} {light_id}"
)
# to remove - compatibility
if (
model == ATTR_MODEL_SHELLYRGBW2
and lights_bin_sensors[bin_sensor_id] == ATTR_INPUT
):
payload = ""
# to remove - compatibility
elif (
model == ATTR_MODEL_SHELLYRGBW2
and lights_bin_sensors[bin_sensor_id] == ATTR_OVERPOWER
):
payload = ""
elif config_mode != ATTR_RGBW:
payload = {
KEY_NAME: sensor_name,
KEY_STATE_TOPIC: state_topic,
KEY_AVAILABILITY_TOPIC: availability_topic,
KEY_PAYLOAD_AVAILABLE: VALUE_TRUE,
KEY_PAYLOAD_NOT_AVAILABLE: VALUE_FALSE,
KEY_UNIQUE_ID: unique_id,
KEY_QOS: qos,
KEY_DEVICE: {
KEY_IDENTIFIERS: [mac],
KEY_NAME: device_name,
KEY_MODEL: model,
KEY_SW_VERSION: fw_ver,
KEY_MANUFACTURER: ATTR_MANUFACTURER,
},
"~": default_topic,
}
if (
lights_bin_sensors_classes
and lights_bin_sensors_classes[bin_sensor_id]
):
payload[KEY_DEVICE_CLASS] = lights_bin_sensors_classes[
bin_sensor_id
]
if lights_bin_sensors_tpls and lights_bin_sensors_tpls[bin_sensor_id]:
payload[KEY_VALUE_TEMPLATE] = lights_bin_sensors_tpls[bin_sensor_id]
else:
payload[KEY_PAYLOAD_ON] = lights_bin_sensors_pl[bin_sensor_id][
VALUE_ON
]
payload[KEY_PAYLOAD_OFF] = lights_bin_sensors_pl[bin_sensor_id][
VALUE_OFF
]
else:
payload = ""
if id.lower() in ignored:
payload = ""
mqtt_publish(config_topic, str(payload).replace("'", '"'), retain, qos)
# white light's sensors
for sensor_id in range(0, len(lights_sensors)):
device_config = get_device_config(id)
force_update = False
if isinstance(device_config.get(CONF_FORCE_UPDATE_SENSORS), bool):
force_update = device_config.get(CONF_FORCE_UPDATE_SENSORS)
unique_id = f"{id}-white-{lights_sensors[sensor_id]}-{light_id}".lower()
config_topic = f"{disc_prefix}/sensor/{id}-white-{lights_sensors[sensor_id]}-{light_id}/config"
sensor_name = f"{device_name} {lights_sensors[sensor_id].title()} {light_id}"
if model in [
ATTR_MODEL_SHELLYDIMMER,
ATTR_MODEL_SHELLYDUO,
ATTR_MODEL_SHELLYVINTAGE,
]:
state_topic = f"~light/{light_id}/{lights_sensors[sensor_id]}"
else:
state_topic = f"~white/{light_id}/status"
if (
model == ATTR_MODEL_SHELLYDIMMER
or model == ATTR_MODEL_SHELLYDUO
or model == ATTR_MODEL_SHELLYVINTAGE
):
payload = {
KEY_NAME: sensor_name,
KEY_STATE_TOPIC: state_topic,
KEY_UNIT: lights_sensors_units[sensor_id],
KEY_VALUE_TEMPLATE: lights_sensors_tpls[sensor_id],
KEY_DEVICE_CLASS: lights_sensors_classes[sensor_id],
KEY_AVAILABILITY_TOPIC: availability_topic,
KEY_PAYLOAD_AVAILABLE: VALUE_TRUE,
KEY_PAYLOAD_NOT_AVAILABLE: VALUE_FALSE,
KEY_FORCE_UPDATE: str(force_update),
KEY_UNIQUE_ID: unique_id,
KEY_QOS: qos,
KEY_DEVICE: {
KEY_IDENTIFIERS: [mac],
KEY_NAME: device_name,
KEY_MODEL: model,
KEY_SW_VERSION: fw_ver,
KEY_MANUFACTURER: ATTR_MANUFACTURER,
},
"~": default_topic,
}
else:
payload = ""
if id.lower() in ignored:
payload = ""
mqtt_publish(config_topic, str(payload).replace("'", '"'), retain, qos)
# meters
for meter_id in range(0, meters):
device_config = get_device_config(id)
force_update = False
if isinstance(device_config.get(CONF_FORCE_UPDATE_SENSORS), bool):
force_update = device_config.get(CONF_FORCE_UPDATE_SENSORS)
device_name = f"{model} {id.split('-')[-1]}"
default_topic = f"shellies/{id}/"
availability_topic = "~online"
for sensor_id in range(0, len(meters_sensors)):
unique_id = f"{id}-emeter-{meters_sensors[sensor_id]}-{meter_id}".lower()
config_topic = f"{disc_prefix}/sensor/{id}-emeter-{meters_sensors[sensor_id]}-{meter_id}/config"
sensor_name = (
f"{device_name} Meter {meters_sensors[sensor_id].title()} {meter_id}"
)
state_topic = f"~emeter/{meter_id}/{meters_sensors[sensor_id]}"
payload = {
KEY_NAME: sensor_name,
KEY_STATE_TOPIC: state_topic,
KEY_UNIT: meters_sensors_units[sensor_id],
KEY_VALUE_TEMPLATE: meters_sensors_tpls[sensor_id],
KEY_AVAILABILITY_TOPIC: availability_topic,
KEY_PAYLOAD_AVAILABLE: VALUE_TRUE,
KEY_PAYLOAD_NOT_AVAILABLE: VALUE_FALSE,
KEY_FORCE_UPDATE: str(force_update),
KEY_UNIQUE_ID: unique_id,
KEY_QOS: qos,
KEY_DEVICE: {
KEY_IDENTIFIERS: [mac],
KEY_NAME: device_name,
KEY_MODEL: model,
KEY_SW_VERSION: fw_ver,
KEY_MANUFACTURER: ATTR_MANUFACTURER,
},
"~": default_topic,
}
if meters_sensors_classes and meters_sensors_classes[sensor_id]:
payload[KEY_DEVICE_CLASS] = meters_sensors_classes[sensor_id]
if id.lower() in ignored:
payload = ""
mqtt_publish(config_topic, str(payload).replace("'", '"'), retain, qos)
|
# -*- coding: utf-8 -*-
"""
Assembly
https://github.com/mardix/assembly
--------------------------------------------------------------------------------
"wsgi.py" is the application object. It's required by Assembly.
It sets up and initialize all the views per application
--------------------------------------------------------------------------------
# Environment variables
export ASSEMBLY_ENV=Development
export ASSEMBLY_APP=default
## ASSEMBLY_APP
By default, Assembly will attempt to load the 'default' app.
To specify a different app, set the environment variable 'ASSEMBLY_APP'
to the name of the app to use
ie: 'export ASSEMBLY_APP=default'
## ASSEMBLY_ENV
By default, Assembly will attempt to load the 'Development' config object from './config.py'
To specify a different environment, set the environment variable 'ASSEMEBLY_ENV'
to the environment class name
ie: 'export ASSEMBLY_ENV=Production'
"""
"""
Import the base Assembly
"""
from assembly import Assembly
"""
Import scripts to enable their command line interface
"""
import run.scripts
"""
APPS = {}
a dict with list of apps to load by name
You can add as many apps as you want per application.
Set the environment variable 'ASSEMBLY_APP' to the name of the app to use
ie: 'export ASSEMBLY_APP=default'
"""
APPS = {
"default": [
"views"
]
}
"""
Initialize the application
the 'app' variable is required
"""
app = Assembly.init(__name__, APPS)
|
import typing as T
from dataclasses import dataclass
import moonleap.resource.props as P
from moonleap import MemFun, Resource, extend, register_add
from titan.project_pkg.service import Service, Tool
from . import props
@dataclass
class PkgDependency(Resource):
package_names: T.List[str]
is_dev: bool = False
@register_add(PkgDependency)
def add_pkg_dependency(resource, pkg_dependency):
resource.pkg_dependencies.add(pkg_dependency)
@extend(Service)
class ExtendService:
get_pkg_names = MemFun(props.get_pkg_names())
@extend(Tool)
class ExtendTool:
pkg_dependencies = P.tree("pkg_dependencies")
|
# --------------
#Importing header files
import pandas as pd
import scipy.stats as stats
import math
import numpy as np
import matplotlib.pyplot as plt
from statsmodels.stats.weightstats import ztest
from statsmodels.stats.weightstats import ztest
from scipy.stats import chi2_contingency
import warnings
warnings.filterwarnings('ignore')
#Sample_Size
sample_size=2000
#Z_Critical Score
z_critical = stats.norm.ppf(q = 0.95)
# Critical Value
# critical_value = stats.chi2.ppf(q = 0.95, # Find the critical value for 95% confidence*
# df = 6) # Df = number of variable categories(in purpose) - 1
#Reading file
data=pd.read_csv(path)
#Code starts here
data_sample=data.sample(n=sample_size,random_state=0)
sample_mean=data_sample['installment'].mean()
sample_std=data_sample['installment'].std()
margin_of_error=z_critical*(sample_std/math.sqrt(sample_size))
confidence_interval=(sample_mean-margin_of_error, sample_mean+ margin_of_error)
print(confidence_interval)
true_mean=data['installment'].mean()
print(true_mean)
sample_size=np.array([20,50,100])
fig,axes=plt.subplots(3,1,figsize=(10,20))
for i in range(len(sample_size)):
m=[]
for j in range(1000):
mean=data['installment'].sample(sample_size[i]).mean()
m.append(mean)
mean_series=pd.Series(m)
axes[i].hist(mean_series,normed=True)
plt.show()
#Task 3
data['int.rate']=data['int.rate'].map(lambda x: str(x)[:-1])
data['int.rate']=data['int.rate'].astype(float)/100
z_statistic_1,p_value_1=ztest(x1=data[data['purpose']=='small_business']['int.rate'],value=data['int.rate'].mean(),alternative='larger')
print(z_statistic_1)
print(p_value_1)
#Installment vs Loan Defaulting
z_statistic_2,p_value_2=ztest(x1=data[data['paid.back.loan']=='No']['installment'],x2=data[data['paid.back.loan']=='Yes']['installment'])
print(z_statistic_2)
print(p_value_2)
|
import json
import imp
import os
# Remove when transferred to server
from flask import Flask
from flask import render_template
from flask import request
# Remove when transferred to server
app = Flask(__name__)
# Database integration
# from cassandra.cluster import Cluster
# lsof -i :5000
"""
# Ingestion integration
from kafka import SimpleProducer, KafkaClient
# Start zookeeper: bin/zookeeper-server-start.sh config/zookeeper.properties
# Start kafka: bin/kafka-server-start.sh config/server.properties
# Start consumer test: bin/kafka-console-consumer.sh --zookeeper localhost:2181 --topic userevent --from-beginning
# Create topic bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic geodecode-request
# Send test data
# curl -H "Content-Type: application/json" -X POST -d '{"eventType":"user-click","userId":"123"}' http://127.0.0.1:5000/producer/
@app.route("/producer/", methods=['POST'])
def producer():
data = request.get_json()
# To send messages synchronously
kafka = KafkaClient('localhost:9092')
producer = SimpleProducer(kafka)
# listToSend = [d['eventType'] + ',' + d['userId'] for d in data]
# Note that the application is responsible for encoding messages to type bytes
dataToSend = data['geodecode-request'] + ',' + data['userId']
producer.send_messages(b'userevent', dataToSend.encode('utf-8') )
"""
@app.route("/")
def hello():
return render_template('index.html')
@app.route("/geodecode/", methods=['POST','GET'])
def geodecode():
# Input sanity checks
#ip_address = request.args.get('ipaddress', '')
#data = request.get_json()
#data['ip']
return json.dumps([{'address':'1 Infinite Loop', \
'city':'Paradise City', \
'organization':'SVDS', \
'longitude':0.0, \
'latitude':0.0}])
if __name__ == "__main__":
app.run(debug=True)
|
import sys
from qshell.core import ctx
# Decorator for registering functions as commands
def command(func):
if callable(func):
def inner(*args, **kwargs):
return func(*args, **kwargs)
ctx.register(func.__name__, func)
return inner
else:
def decorator(fn):
def inner(*args, **kwargs):
return func(*args, **kwargs)
ctx.register(func, fn)
return inner
return decorator
# Decorator for init function. Run immediately
def init(func):
def inner():
args, kwargs = ctx._parse_args(' '.join(sys.argv[1:]))
return func(*args, **kwargs)
return inner()
|
# -*- coding: utf-8 -*-
# @File : xcache.py
# @Date : 2021/2/25
# @Desc :
import copy
import time
import uuid
from django.core.cache import cache
from Lib.log import logger
class Xcache(object):
"""缓存模块"""
XCACHE_MODULES_CONFIG = "XCACHE_MODULES_CONFIG"
XCACHE_SESSION_INFO = "XCACHE_SESSION_INFO"
XCACHE_HADLER_VIRTUAL_LIST = "XCACHE_HADLER_VIRTUAL_LIST"
XCACHE_HADLER_CACHE = "XCACHE_HADLER_CACHE"
XCACHE_NOTICES_LIST = "XCACHE_NOTICES_LIST"
XCACHE_MODULES_TASK_LIST = "XCACHE_MODULES_TASK_LIST"
XCACHE_BOT_MODULES_WAIT_LIST = "XCACHE_BOT_MODULES_WAIT_LIST"
XCACHE_MODULES_RESULT = "XCACHE_MODULES_RESULT"
XCACHE_MODULES_RESULT_HISTORY = "XCACHE_MODULES_RESULT_HISTORY"
XCACHE_HEARTBEAT_CACHE_RESULT_HISTORY = "XCACHE_HEARTBEAT_CACHE_RESULT_HISTORY"
XCACHE_HEARTBEAT_CACHE_NOTICES = "XCACHE_HEARTBEAT_CACHE_NOTICES"
XCACHE_HEARTBEAT_CACHE_JOBS = "XCACHE_HEARTBEAT_CACHE_JOBS"
XCACHE_HEARTBEAT_CACHE_BOT_WAIT_LIST = "XCACHE_HEARTBEAT_CACHE_BOT_WAIT_LIST"
XCACHE_HEARTBEAT_CACHE_HOSTS_SORTED = "XCACHE_HEARTBEAT_CACHE_HOSTS_SORTED"
XCACHE_MSFCONSOLE_INPUT_CACHE = "XCACHE_MSFCONSOLE_INPUT_CACHE"
XCACHE_MSFCONSOLE_CID = "XCACHE_MSFCONSOLE_CID"
XCACHE_MSFCONSOLE_HISTORY_CACHE = "XCACHE_MSFCONSOLE_HISTORY_CACHE"
XCACHE_MSFCONSOLE_HISTORY_CURSOR = "XCACHE_MSFCONSOLE_HISTORY_CURSOR"
XCACHE_MSF_JOB_CACHE = "XCACHE_MSF_JOB_CACHE"
XCACHE_MSF_SESSION_CACHE = "XCACHE_MSF_SESSION_CACHE"
XCACHE_TELEGRAM_CONFIG = "XCACHE_TELEGRAM_CONFIG"
XCACHE_DINGDING_CONFIG = "XCACHE_DINGDING_CONFIG"
XCACHE_SERVERCHAN_CONFIG = "XCACHE_SERVERCHAN_CONFIG"
XCACHE_FOFA_CONFIG = "XCACHE_FOFA_CONFIG"
XCACHE_SESSIONMONITOR_CONFIG = "XCACHE_SESSIONMONITOR_CONFIG"
XCACHE_SESSION_CONT = "XCACHE_SESSION_CONT"
XCACHE_AES_KEY = "XCACHE_AES_KEY"
XCACHE_NETWORK_TOPOLOGY = "XCACHE_NETWORK_TOPOLOGY"
XCACHE_GEOIP_CITYREADER = "XCACHE_GEOIP_CITYREADER"
XCACHE_GEOIP_ASNREADER = "XCACHE_GEOIP_ASNREADER"
XCACHE_MSFRPC_CONFIG = "XCACHE_MSFRPC_CONFIG"
XCACHE_SESSIONIO_CACHE = "XCACHE_SESSIONIO_CACHE"
XCACHE_LAZYLOADER_CACHE = "XCACHE_LAZYLOADER_CACHE"
def __init__(self):
pass
@staticmethod
def init_xcache_on_start():
# 清理模块配置缓存
cache.set(Xcache.XCACHE_MODULES_CONFIG, None, None)
# 清理muit_module缓存
re_key = "{}_*".format(Xcache.XCACHE_MODULES_TASK_LIST)
keys = cache.keys(re_key)
for key in keys:
try:
req = cache.get(key)
except Exception as _:
cache.delete(key)
continue
if req.get("job_id") is None:
cache.delete(key)
# 清理session_info缓存
re_key = "{}_*".format(Xcache.XCACHE_SESSION_INFO)
keys = cache.keys(re_key)
for key in keys:
try:
cache.delete(key)
except Exception as _:
continue
# 清理session_count 缓存
cache.set(Xcache.XCACHE_SESSION_CONT, 0, None)
return True
@staticmethod
def get_heartbeat_cache_hosts_sorted():
result = cache.get(Xcache.XCACHE_HEARTBEAT_CACHE_HOSTS_SORTED)
return result
@staticmethod
def set_heartbeat_cache_hosts_sorted(result):
cache.set(Xcache.XCACHE_HEARTBEAT_CACHE_HOSTS_SORTED, result, None)
return True
@staticmethod
def get_heartbeat_cache_result_history():
result = cache.get(Xcache.XCACHE_HEARTBEAT_CACHE_RESULT_HISTORY)
return result
@staticmethod
def set_heartbeat_cache_result_history(result):
cache.set(Xcache.XCACHE_HEARTBEAT_CACHE_RESULT_HISTORY, result, None)
return True
@staticmethod
def get_heartbeat_cache_notices():
result = cache.get(Xcache.XCACHE_HEARTBEAT_CACHE_NOTICES)
return result
@staticmethod
def set_heartbeat_cache_notices(result):
cache.set(Xcache.XCACHE_HEARTBEAT_CACHE_NOTICES, result, None)
return True
@staticmethod
def get_heartbeat_cache_jobs():
result = cache.get(Xcache.XCACHE_HEARTBEAT_CACHE_JOBS)
return result
@staticmethod
def set_heartbeat_cache_jobs(result):
cache.set(Xcache.XCACHE_HEARTBEAT_CACHE_JOBS, result, None)
return True
@staticmethod
def get_heartbeat_cache_bot_wait_list():
result = cache.get(Xcache.XCACHE_HEARTBEAT_CACHE_BOT_WAIT_LIST)
return result
@staticmethod
def set_heartbeat_cache_bot_wait_list(result):
cache.set(Xcache.XCACHE_HEARTBEAT_CACHE_BOT_WAIT_LIST, result, None)
return True
@staticmethod
def get_msf_job_cache():
result = cache.get(Xcache.XCACHE_MSF_JOB_CACHE)
return result
@staticmethod
def set_msf_job_cache(msfjobs):
cache.set(Xcache.XCACHE_MSF_JOB_CACHE, msfjobs, None)
return True
@staticmethod
def get_module_task_by_uuid(task_uuid):
for i in range(2):
key = "{}_{}".format(Xcache.XCACHE_MODULES_TASK_LIST, task_uuid)
req = cache.get(key)
if req is not None:
return req
else:
pass
time.sleep(1)
@staticmethod
def get_module_task_by_uuid_nowait(task_uuid):
key = "{}_{}".format(Xcache.XCACHE_MODULES_TASK_LIST, task_uuid)
req = cache.get(key)
return req
@staticmethod
def list_module_tasks():
re_key = "{}_*".format(Xcache.XCACHE_MODULES_TASK_LIST)
keys = cache.keys(re_key)
reqs = []
for key in keys:
reqs.append(cache.get(key))
return reqs
@staticmethod
def create_module_task(req):
"""任务队列"""
for i in range(5):
key = "{}_{}".format(Xcache.XCACHE_MODULES_TASK_LIST, req.get("uuid"))
cache.set(key, req, None)
if cache.get(key) is not None:
break
else:
logger.error("redis 缓存失败!")
time.sleep(0.5)
return True
@staticmethod
def del_module_task_by_uuid(task_uuid):
key = "{}_{}".format(Xcache.XCACHE_MODULES_TASK_LIST, task_uuid)
cache.delete(key)
# XCACHE_BOT_MODULES_WAIT_LIST
@staticmethod
def pop_one_from_bot_wait():
re_key = "{}_*".format(Xcache.XCACHE_BOT_MODULES_WAIT_LIST)
keys = cache.keys(re_key)
for key in keys:
req = cache.get(key)
if req is not None:
cache.delete(key)
return req
return None
@staticmethod
def list_bot_wait():
re_key = "{}_*".format(Xcache.XCACHE_BOT_MODULES_WAIT_LIST)
keys = cache.keys(re_key)
reqs = []
for key in keys:
reqs.append(cache.get(key))
return reqs
@staticmethod
def putin_bot_wait(req):
"""任务队列"""
key = "{}_{}".format(Xcache.XCACHE_BOT_MODULES_WAIT_LIST, req.get("uuid"))
cache.set(key, req, None)
return True
@staticmethod
def del_bot_wait_by_group_uuid(group_uuid):
re_key = "{}_*".format(Xcache.XCACHE_BOT_MODULES_WAIT_LIST)
keys = cache.keys(re_key)
for key in keys:
req = cache.get(key)
if req.get("group_uuid") == group_uuid:
cache.delete(key)
return True
@staticmethod
def get_module_result(ipaddress, loadpath):
key = "{}_{}_{}".format(Xcache.XCACHE_MODULES_RESULT, ipaddress, loadpath)
result_dict = cache.get(key)
if result_dict is None:
return {"update_time": int(time.time()), "result": ""}
return result_dict
@staticmethod
def set_module_result(ipaddress, loadpath, result):
key = "{}_{}_{}".format(Xcache.XCACHE_MODULES_RESULT, ipaddress, loadpath)
cache.set(key, {"update_time": int(time.time()), "result": result}, None)
return True
@staticmethod
def add_module_result(ipaddress, loadpath, result):
key = "{}_{}_{}".format(Xcache.XCACHE_MODULES_RESULT, ipaddress, loadpath)
old_result = cache.get(key)
if old_result is None:
new_result = result
else:
new_result = old_result.get("result") + result
cache.set(key, {"update_time": int(time.time()), "result": new_result}, None)
return True
@staticmethod
def del_module_result_by_hid(ipaddress):
re_key = "{}_{}_*".format(Xcache.XCACHE_MODULES_RESULT, ipaddress)
keys = cache.keys(re_key)
for key in keys:
cache.set(key, None, None)
return True
@staticmethod
def list_module_result_history():
result = cache.get(Xcache.XCACHE_MODULES_RESULT_HISTORY)
if result is None:
return []
return result[::-1]
@staticmethod
def add_module_result_history(ipaddress=None, loadpath=None, opts=None, update_time=0, result=""):
if opts is None:
opts = []
one_result = {"ipaddress": ipaddress,
"loadpath": loadpath,
"opts": opts,
"update_time": update_time,
"result": result}
old_result = cache.get(Xcache.XCACHE_MODULES_RESULT_HISTORY)
if old_result is None:
cache.set(Xcache.XCACHE_MODULES_RESULT_HISTORY, [one_result], None)
else:
old_result.append(one_result)
cache.set(Xcache.XCACHE_MODULES_RESULT_HISTORY, old_result, None)
return True
@staticmethod
def del_module_result_history():
cache.set(Xcache.XCACHE_MODULES_RESULT_HISTORY, [], None)
return True
@staticmethod
def del_module_result_history_by_hid(ipaddress):
old_result = cache.get(Xcache.XCACHE_MODULES_RESULT_HISTORY)
if old_result is None:
return False
else:
new_result = []
for one_result in old_result:
if one_result.get("ipaddress") != ipaddress:
new_result.append(one_result)
cache.set(Xcache.XCACHE_MODULES_RESULT_HISTORY, new_result, None)
return True
@staticmethod
def get_module_task_length():
re_key = "{}_*".format(Xcache.XCACHE_MODULES_TASK_LIST)
keys = cache.keys(re_key)
return len(keys)
@staticmethod
def get_notices():
notices = cache.get(Xcache.XCACHE_NOTICES_LIST)
if notices is None:
return []
else:
notices.reverse()
return notices
@staticmethod
def clean_notices():
cache.set(Xcache.XCACHE_NOTICES_LIST, [], None)
return True
@staticmethod
def add_one_notice(notice):
notices = cache.get(Xcache.XCACHE_NOTICES_LIST)
if notices is None:
cache.set(Xcache.XCACHE_NOTICES_LIST, [notice], None)
else:
tempnotices = copy.deepcopy(notices)
tempnotices.append(notice)
cache.set(Xcache.XCACHE_NOTICES_LIST, tempnotices, None)
@staticmethod
def list_moduleconfigs():
modules_config = cache.get(Xcache.XCACHE_MODULES_CONFIG)
if modules_config is None:
return None
else:
return modules_config
@staticmethod
def update_moduleconfigs(all_modules_config):
cache.set(Xcache.XCACHE_MODULES_CONFIG, all_modules_config, None)
return True
@staticmethod
def get_moduleconfig(loadpath):
modules_config = cache.get(Xcache.XCACHE_MODULES_CONFIG)
try:
for config in modules_config:
if config.get("loadpath") == loadpath:
return config
return None
except Exception as E:
logger.error(E)
return None
@staticmethod
def set_session_info(sessionid, session_info):
key = "{}_{}".format(Xcache.XCACHE_SESSION_INFO, sessionid)
cache.set(key, session_info, None)
return True
@staticmethod
def get_session_info(sessionid):
key = "{}_{}".format(Xcache.XCACHE_SESSION_INFO, sessionid)
session_info = cache.get(key)
return session_info
@staticmethod
def get_virtual_handlers():
handler_list = cache.get(Xcache.XCACHE_HADLER_VIRTUAL_LIST)
if handler_list is None:
handler_list = []
return handler_list
@staticmethod
def set_cache_handlers(handler_list):
cache.set(Xcache.XCACHE_HADLER_CACHE, handler_list, None)
return True
@staticmethod
def get_cache_handlers():
handler_list = cache.get(Xcache.XCACHE_HADLER_CACHE)
if handler_list is None:
handler_list = []
return handler_list
@staticmethod
def add_virtual_handler(onehandler):
handler_list = cache.get(Xcache.XCACHE_HADLER_VIRTUAL_LIST)
if handler_list is None:
handler_list = []
minid = -1
for handler in handler_list:
tmphandler = copy.copy(handler)
tmpid = tmphandler.pop('ID')
tmphandler.pop('timestamp')
tmphandler1 = copy.copy(onehandler)
tmphandler1.pop('timestamp')
if tmphandler == tmphandler1:
return tmpid
if tmpid <= minid:
minid = tmpid
handler_id = minid - 1
onehandler['ID'] = handler_id
handler_list.append(onehandler)
cache.set(Xcache.XCACHE_HADLER_VIRTUAL_LIST, handler_list, None)
return handler_id
@staticmethod
def del_virtual_handler(virtual_id):
handler_list = cache.get(Xcache.XCACHE_HADLER_VIRTUAL_LIST)
if handler_list is None:
handler_list = []
cache.set(Xcache.XCACHE_HADLER_VIRTUAL_LIST, handler_list, None)
return False
for onehandler in handler_list:
if onehandler.get('ID') == virtual_id:
handler_list.remove(onehandler)
cache.set(Xcache.XCACHE_HADLER_VIRTUAL_LIST, handler_list, None)
return True
@staticmethod
def add_to_msfconsoleinputcache(data):
inputcache = cache.get(Xcache.XCACHE_MSFCONSOLE_INPUT_CACHE)
if inputcache is None:
cache.set(Xcache.XCACHE_MSFCONSOLE_INPUT_CACHE, data, None)
return data
else:
cache.set(Xcache.XCACHE_MSFCONSOLE_INPUT_CACHE, inputcache + data, None)
return inputcache + data
@staticmethod
def del_one_from_msfconsoleinputcache():
inputcache = cache.get(Xcache.XCACHE_MSFCONSOLE_INPUT_CACHE)
if inputcache is None or inputcache == "":
return "\u0007"
else:
cache.set(Xcache.XCACHE_MSFCONSOLE_INPUT_CACHE, inputcache[0:-1], None)
return "\b\u001b[K"
@staticmethod
def clear_oneline_from_msfconsoleinputcache():
inputcache = cache.get(Xcache.XCACHE_MSFCONSOLE_INPUT_CACHE)
if inputcache is None or inputcache == "":
return "\u0007"
else:
cache.set(Xcache.XCACHE_MSFCONSOLE_INPUT_CACHE, "", None)
return "\b\u001b[K" * len(inputcache)
@staticmethod
def get_msfconsoleinputcache():
inputcache = cache.get(Xcache.XCACHE_MSFCONSOLE_INPUT_CACHE)
if inputcache is None:
return ""
else:
return inputcache
@staticmethod
def clean_msfconsoleinputcache():
cache.set(Xcache.XCACHE_MSFCONSOLE_INPUT_CACHE, "", None)
return True
@staticmethod
def add_to_msfconsole_history_cache(cmd):
if cmd is None or cmd == "":
return True
historys = cache.get(Xcache.XCACHE_MSFCONSOLE_HISTORY_CACHE)
cache.set(Xcache.XCACHE_MSFCONSOLE_HISTORY_CURSOR, 0, None) # 重置光标
if historys is None:
cache.set(Xcache.XCACHE_MSFCONSOLE_HISTORY_CACHE, [cmd], None)
return True
else:
historys.insert(0, cmd)
cache.set(Xcache.XCACHE_MSFCONSOLE_HISTORY_CACHE, historys, None)
return True
@staticmethod
def get_last_from_msfconsole_history_cache():
historys = cache.get(Xcache.XCACHE_MSFCONSOLE_HISTORY_CACHE)
if historys is None or len(historys) == 0:
return None
cursor = cache.get(Xcache.XCACHE_MSFCONSOLE_HISTORY_CURSOR)
if cursor is None:
cursor = 0
cache.set(Xcache.XCACHE_MSFCONSOLE_HISTORY_CURSOR, 1, None) # 重置光标
else:
cache.set(Xcache.XCACHE_MSFCONSOLE_HISTORY_CURSOR, cursor + 1, None) # 重置光标
cursor %= len(historys)
return historys[cursor]
@staticmethod
def get_next_from_msfconsole_history_cache():
historys = cache.get(Xcache.XCACHE_MSFCONSOLE_HISTORY_CACHE)
if historys is None or len(historys) == 0:
return None
cursor = cache.get(Xcache.XCACHE_MSFCONSOLE_HISTORY_CURSOR)
if cursor is None or cursor == 0:
cache.set(Xcache.XCACHE_MSFCONSOLE_HISTORY_CURSOR, 0, None)
return None
else:
cache.set(Xcache.XCACHE_MSFCONSOLE_HISTORY_CURSOR, cursor - 1, None) # 重置光标
cursor %= len(historys)
return historys[cursor]
@staticmethod
def set_console_id(cid):
cache.set(Xcache.XCACHE_MSFCONSOLE_CID, cid, None)
return True
@staticmethod
def get_console_id():
inputcache = cache.get(Xcache.XCACHE_MSFCONSOLE_CID)
return inputcache
@staticmethod
def alive_token(token):
cache_user = cache.get(token)
return cache_user
@staticmethod
def set_telegram_conf(conf):
cache.set(Xcache.XCACHE_TELEGRAM_CONFIG, conf, None)
return True
@staticmethod
def get_telegram_conf():
conf = cache.get(Xcache.XCACHE_TELEGRAM_CONFIG)
return conf
@staticmethod
def set_dingding_conf(conf):
cache.set(Xcache.XCACHE_DINGDING_CONFIG, conf, None)
return True
@staticmethod
def get_dingding_conf():
conf = cache.get(Xcache.XCACHE_DINGDING_CONFIG)
return conf
@staticmethod
def set_serverchan_conf(conf):
cache.set(Xcache.XCACHE_SERVERCHAN_CONFIG, conf, None)
return True
@staticmethod
def get_serverchan_conf():
conf = cache.get(Xcache.XCACHE_SERVERCHAN_CONFIG)
return conf
@staticmethod
def set_fofa_conf(conf):
cache.set(Xcache.XCACHE_FOFA_CONFIG, conf, None)
return True
@staticmethod
def get_fofa_conf():
conf = cache.get(Xcache.XCACHE_FOFA_CONFIG)
return conf
@staticmethod
def set_sessionmonitor_conf(conf):
cache.set(Xcache.XCACHE_SESSIONMONITOR_CONFIG, conf, None)
return True
@staticmethod
def get_sessionmonitor_conf():
conf = cache.get(Xcache.XCACHE_SESSIONMONITOR_CONFIG)
if conf is None:
conf = {"flag": False}
cache.set(Xcache.XCACHE_SESSIONMONITOR_CONFIG, conf, None)
return conf
@staticmethod
def set_session_count(count):
cache.set(Xcache.XCACHE_SESSION_CONT, count, None)
return True
@staticmethod
def get_session_count():
conf = cache.get(Xcache.XCACHE_SESSION_CONT)
if conf is None:
return 0
return conf
@staticmethod
def get_lhost_config():
cache_data = cache.get(Xcache.XCACHE_MSFRPC_CONFIG)
return cache_data
@staticmethod
def set_lhost_config(cache_data):
cache.set(Xcache.XCACHE_MSFRPC_CONFIG, cache_data, None)
return True
@staticmethod
def get_aes_key():
conf = cache.get(Xcache.XCACHE_AES_KEY)
if conf is None:
tmp_self_uuid = str(uuid.uuid1()).replace('-', "")[0:16]
cache.set(Xcache.XCACHE_AES_KEY, tmp_self_uuid, None)
return tmp_self_uuid
else:
return conf
@staticmethod
def set_network_topology_cache(cache_data):
cache.set(Xcache.XCACHE_NETWORK_TOPOLOGY, cache_data)
return True
@staticmethod
def get_network_topology_cache():
cache_data = cache.get(Xcache.XCACHE_NETWORK_TOPOLOGY)
return cache_data
@staticmethod
def set_city_reader_cache(ip, cache_data):
cache.set(f"{Xcache.XCACHE_GEOIP_CITYREADER}:{ip}", cache_data, 3600 * 24)
return True
@staticmethod
def get_city_reader_cache(ip):
cache_data = cache.get(f"{Xcache.XCACHE_GEOIP_CITYREADER}:{ip}")
return cache_data
@staticmethod
def set_asn_reader_cache(ip, cache_data):
cache.set(f"{Xcache.XCACHE_GEOIP_ASNREADER}:{ip}", cache_data, 3600 * 24) # 24小时是为了mmdb更新时启用
return True
@staticmethod
def get_asn_reader_cache(ip):
cache_data = cache.get(f"{Xcache.XCACHE_GEOIP_ASNREADER}:{ip}")
return cache_data
@staticmethod
def get_sessionio_cache(hid):
cache_dict = cache.get(Xcache.XCACHE_SESSIONIO_CACHE)
if cache_dict is None:
cache_dict = {hid: ''}
cache.set(Xcache.XCACHE_SESSIONIO_CACHE, cache_dict)
return {'hid': hid, 'buffer': ''}
if cache_dict.get(hid) is None:
cache_dict[hid] = ''
cache.set(Xcache.XCACHE_SESSIONIO_CACHE, cache_dict)
return {'hid': hid, 'buffer': ''}
old_buffer = cache_dict.get(hid)
return {'hid': hid, 'buffer': old_buffer}
@staticmethod
def add_sessionio_cache(hid, buffer):
cache_dict = cache.get(Xcache.XCACHE_SESSIONIO_CACHE)
if cache_dict is None:
cache_dict = {hid: buffer}
cache.set(Xcache.XCACHE_SESSIONIO_CACHE, cache_dict)
return {'hid': hid, 'buffer': buffer}
if cache_dict.get(hid) is None:
cache_dict[hid] = buffer
cache.set(Xcache.XCACHE_SESSIONIO_CACHE, cache_dict)
return {'hid': hid, 'buffer': buffer}
new_buffer = cache_dict.get(hid) + buffer
cache_dict[hid] = new_buffer
cache.set(Xcache.XCACHE_SESSIONIO_CACHE, cache_dict)
return {'hid': hid, 'buffer': new_buffer}
@staticmethod
def del_sessionio_cache(hid):
cache_dict = cache.get(Xcache.XCACHE_SESSIONIO_CACHE)
if cache_dict is None:
cache_dict = {hid: ''}
cache.set(Xcache.XCACHE_SESSIONIO_CACHE, cache_dict)
return {'hid': hid, 'buffer': ''}
cache_dict[hid] = ''
cache.set(Xcache.XCACHE_SESSIONIO_CACHE, cache_dict)
return {'hid': hid, 'buffer': ''}
@staticmethod
def list_lazyloader():
re_key = "{}_*".format(Xcache.XCACHE_LAZYLOADER_CACHE)
keys = cache.keys(re_key)
reqs = []
for key in keys:
req = cache.get(key)
reqs.append(req)
return reqs
@staticmethod
def get_lazyloader_by_uuid(loader_uuid):
key = f"{Xcache.XCACHE_LAZYLOADER_CACHE}_{loader_uuid}"
data = cache.get(key)
return data
@staticmethod
def set_lazyloader_by_uuid(loader_uuid, data):
key = f"{Xcache.XCACHE_LAZYLOADER_CACHE}_{loader_uuid}"
cache.set(key, data, None)
return True
@staticmethod
def del_lazyloader_by_uuid(loader_uuid):
key = f"{Xcache.XCACHE_LAZYLOADER_CACHE}_{loader_uuid}"
cache.delete(key)
return True
|
import py
import sys, os, gc
from rpython.translator.c.test import test_newgc
from rpython.translator.translator import TranslationContext
from rpython.translator.c.genc import CStandaloneBuilder
from rpython.annotator.listdef import s_list_of_strings
from rpython.conftest import option
from rpython.translator.tool.cbuild import ExternalCompilationInfo
from rpython.translator.platform import platform as compiler
from rpython.rlib.rarithmetic import is_emulated_long
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rlib.entrypoint import entrypoint_highlevel, secondary_entrypoints
from rpython.rtyper.lltypesystem.lloperation import llop
_MSVC = compiler.name == "msvc"
_MINGW = compiler.name == "mingw32"
_WIN32 = _MSVC or _MINGW
_WIN64 = _WIN32 and is_emulated_long
# XXX get rid of 'is_emulated_long' and have a real config here.
class AbstractTestAsmGCRoot:
# the asmgcroot gc transformer doesn't generate gc_reload_possibly_moved
# instructions:
should_be_moving = False
@classmethod
def make_config(cls):
if _MSVC:
py.test.skip("all asmgcroot tests disabled for MSVC")
from rpython.config.translationoption import get_combined_translation_config
config = get_combined_translation_config(translating=True)
config.translation.gc = cls.gcpolicy
config.translation.gcrootfinder = "asmgcc"
config.translation.taggedpointers = getattr(cls, "taggedpointers", False)
return config
@classmethod
def _makefunc_str_int(cls, func):
def main(argv):
arg0 = argv[1]
arg1 = int(argv[2])
try:
res = func(arg0, arg1)
except MemoryError:
print 'Result: MemoryError'
else:
print 'Result: "%s"' % (res,)
return 0
config = cls.make_config()
t = TranslationContext(config=config)
a = t.buildannotator()
sec_ep = getattr(cls, 'secondary_entrypoints', [])
for f, inputtypes in sec_ep:
a.build_types(f, inputtypes, False)
a.build_types(main, [s_list_of_strings])
t.buildrtyper().specialize()
t.checkgraphs()
cbuilder = CStandaloneBuilder(t, main, config=config,
secondary_entrypoints=sec_ep)
c_source_filename = cbuilder.generate_source(
defines = cbuilder.DEBUG_DEFINES)
cls._patch_makefile(cbuilder.targetdir)
if option.view:
t.view()
exe_name = cbuilder.compile()
def run(arg0, arg1, runner=None):
if runner is not None:
py.test.skip("unsupported test: runner=%r" % (runner,))
lines = []
print >> sys.stderr, 'RUN: starting', exe_name, arg0, arg1
if sys.platform == 'win32':
redirect = ' 2> NUL'
else:
redirect = ''
if config.translation.shared and os.name == 'posix':
library_path = exe_name.dirpath()
if sys.platform == 'darwin':
env = 'DYLD_LIBRARY_PATH="%s" ' % library_path
else:
env = 'LD_LIBRARY_PATH="%s" ' % library_path
else:
env = ''
cwd = os.getcwd()
try:
os.chdir(str(exe_name.dirpath()))
g = os.popen(
'%s"%s" %s %d%s' % (env, exe_name, arg0, arg1, redirect), 'r')
finally:
os.chdir(cwd)
for line in g:
print >> sys.stderr, 'RUN:', line.rstrip()
lines.append(line)
g.close()
if not lines:
py.test.fail("no output from subprocess")
if not lines[-1].startswith('Result:'):
py.test.fail("unexpected output from subprocess")
result = lines[-1][len('Result:'):].strip()
if result == 'MemoryError':
raise MemoryError("subprocess got an RPython MemoryError")
if result.startswith('"') and result.endswith('"'):
return result[1:-1]
else:
return int(result)
return run
@classmethod
def _patch_makefile(cls, targetdir):
# for testing, patch the Makefile to add the -r option to
# trackgcroot.py.
makefile = targetdir.join('Makefile')
f = makefile.open()
lines = f.readlines()
f.close()
found = False
for i in range(len(lines)):
if 'trackgcroot.py' in lines[i]:
lines[i] = lines[i].replace('trackgcroot.py',
'trackgcroot.py -r')
found = True
assert found
f = makefile.open('w')
f.writelines(lines)
f.close()
if sys.platform == 'win32':
def test_callback_with_collect(self):
py.test.skip("No libffi yet with mingw32")
def define_callback_with_collect(cls):
return lambda: 0
class TestAsmGCRootWithSemiSpaceGC(AbstractTestAsmGCRoot,
test_newgc.TestSemiSpaceGC):
# for the individual tests see
# ====> ../../test/test_newgc.py
secondary_entrypoints = []
def define_large_function(cls):
class A(object):
def __init__(self):
self.x = 0
d = dict(A=A)
exec ("def g(a):\n" +
" a.x += 1\n" * 1000 +
" return A()\n"
) in d
g = d['g']
def f():
a = A()
g(a)
return a.x
return f
def test_large_function(self):
res = self.run('large_function')
assert res == 1000
def define_callback_simple(cls):
c_source = py.code.Source("""
int mystuff(int(*cb)(int, int))
{
return cb(40, 2) + cb(3, 4);
}
""")
eci = ExternalCompilationInfo(separate_module_sources=[c_source])
S = lltype.GcStruct('S', ('x', lltype.Signed))
CALLBACK = lltype.FuncType([lltype.Signed, lltype.Signed],
lltype.Signed)
z = rffi.llexternal('mystuff', [lltype.Ptr(CALLBACK)], lltype.Signed,
compilation_info=eci)
def mycallback(a, b):
gc.collect()
return a + b
def f():
p = lltype.malloc(S)
p.x = 100
result = z(mycallback)
return result * p.x
return f
def test_callback_simple(self):
res = self.run('callback_simple')
assert res == 4900
def define_secondary_entrypoint_callback(cls):
# XXX this is baaaad, cleanup global state
try:
del secondary_entrypoints["x42"]
except KeyError:
pass
@entrypoint_highlevel("x42", [lltype.Signed, lltype.Signed],
c_name='callback')
def mycallback(a, b):
gc.collect()
return a + b
c_source = py.code.Source("""
int mystuff2()
{
return callback(40, 2) + callback(3, 4);
}
""")
eci = ExternalCompilationInfo(separate_module_sources=[c_source])
z = rffi.llexternal('mystuff2', [], lltype.Signed,
compilation_info=eci)
S = lltype.GcStruct('S', ('x', lltype.Signed))
cls.secondary_entrypoints = secondary_entrypoints["x42"]
def f():
p = lltype.malloc(S)
p.x = 100
result = z()
return result * p.x
return f
def test_secondary_entrypoint_callback(self):
res = self.run('secondary_entrypoint_callback')
assert res == 4900
class TestAsmGCRootWithSemiSpaceGC_Mingw32(TestAsmGCRootWithSemiSpaceGC):
# for the individual tests see
# ====> ../../test/test_newgc.py
@classmethod
def setup_class(cls):
if sys.platform != 'win32':
py.test.skip("mingw32 specific test")
if not ('mingw' in os.popen('gcc --version').read() and
'GNU' in os.popen('make --version').read()):
py.test.skip("mingw32 and MSYS are required for this test")
test_newgc.TestSemiSpaceGC.setup_class.im_func(cls)
@classmethod
def make_config(cls):
config = TestAsmGCRootWithSemiSpaceGC.make_config()
config.translation.cc = 'mingw32'
return config
def test_callback_with_collect(self):
py.test.skip("No libffi yet with mingw32")
def define_callback_with_collect(cls):
return lambda: 0
#class TestAsmGCRootWithSemiSpaceGC_Shared(TestAsmGCRootWithSemiSpaceGC):
# @classmethod
# def make_config(cls):
# config = TestAsmGCRootWithSemiSpaceGC.make_config()
# config.translation.shared = True
# return config
class TestAsmGCRootWithHybridTagged(AbstractTestAsmGCRoot,
test_newgc.TestHybridTaggedPointers):
pass
class TestAsmGCRootWithIncrementalMinimark(AbstractTestAsmGCRoot,
test_newgc.TestIncrementalMiniMarkGC):
pass
|
import tensorflow as tf
import numpy as np
from tensorflow import python_io as tpio
import os
def _int64_features(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def _bytes_features(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
class VOC_TFRecords(object):
def __init__(self, output_file):
self.output_file = output_file
def __enter__(self):
if os.path.exists(self.output_file):
raise IOError("file %s exists"
% self.output_file)
else:
options = tpio.TFRecordOptions(tpio.TFRecordCompressionType.GZIP)
self.writer = tpio.TFRecordWriter(self.output_file, options=options)
return self
def __exit__(self, type, value, trace):
self.close()
def add_example(self, image, annotations):
data = VOC_TFRecords.example(image, annotations)
self.writer.write(data.SerializeToString())
def close(self):
if self.writer:
self.writer.close()
@classmethod
def example(cls, image, annotations):
image_raw = image.tostring()
annotations_raw = annotations.tostring()
_example = tf.train.Example(features=tf.train.Features(
feature={
'image_raw': _bytes_features(image_raw),
'annotations_raw': _bytes_features(annotations_raw)
}
))
return _example
@classmethod
def parse_function_maker(cls, shape_img, shape_anno):
def _parse_function(example_proto):
features = tf.parse_single_example(
example_proto,
features={
'image_raw': tf.FixedLenFeature([], tf.string),
'annotations_raw': tf.FixedLenFeature([], tf.string),
})
image = tf.decode_raw(features['image_raw'], tf.uint8)
image = tf.cast(image, tf.float32)
max = tf.reduce_max(image)
min = tf.reduce_min(image)
image = tf.div(tf.subtract(image, min), tf.subtract(max, min))
image.set_shape(np.prod(shape_img))
image = tf.reshape(image, shape_img)
annotations = tf.decode_raw(features['annotations_raw'], tf.float32)
annotations.set_shape(np.prod(shape_anno))
return image, annotations
return _parse_function
|
import os
import logging
log = logging.getLogger("APTS.emit_changes")
def __main__(*args):
"""Emit file change on task completed even no one cares
Subscribers would lookup for changes and copy files to local drive if
any.
(NOTE) Post task script will not run if task has error.
Args:
(DeadlineRepository.Plugins): Plugin object
(str): Script type name, e.g. "post task"
"""
# (TODO) Where should I emit to ?
pass
|
from datetime import datetime
from background_worker.task.task import Task
class RemoveTask(Task):
def __init__(self, block_id_to_remove= None, datetime_to_run= None, client= None, dic = None):
if client:
self.notion_client = client
if dic:
super().__init__(type="RemoveTask", dic = dic)
elif block_id_to_remove and datetime_to_run:
super().__init__(type="RemoveTask", task_id=block_id_to_remove, datetime_to_run=datetime_to_run)
self.block_id_to_remove = block_id_to_remove
def __str__(self):
print("Task of type {0} at this time: {1}".format(self.type, self.datetime_to_run))
def run_task(self):
print("Removing block with id {0}".format(self.block_id_to_remove))
block = self.notion_client.get_block(self.block_id_to_remove)
block.remove()
print("Block removed succesfully!")
def _from_dic(self, dic):
self.type = dic['type']
self.task_id = dic['task_id']
self.datetime_to_run = datetime.strptime(dic['datetime_to_run'], "%Y-%m-%d %H:%M:%S")
self.block_id_to_remove = dic['block_id_to_remove']
def to_dic(self):
dic = {
"type": self.type,
"task_id": self.task_id,
"block_id_to_remove": self.block_id_to_remove,
"datetime_to_run": str(self.datetime_to_run)
}
self.task_dictionary = dic
return self.task_dictionary
|
from setuptools import setup
setup(
name='amptrac',
version='0.1',
url='https://github.com/twisted-infra/amptrac',
description="Client for twisted's amp interface to trac",
license='MIT',
author='Tom Prince',
author_email='tom.prince@ualberta.net',
packages=['amptrac', 'amptrac.test'],
install_requires=[
'twisted >= 13.0.0',
'treq',
],
zip_safe=False,
)
|
import torch
from gensim.models import FastText
from dataset import AuxTables
from .featurizer import Featurizer
class LangModelFeaturizer(Featurizer):
def specific_setup(self):
self.name = 'LangModelFeaturizer'
self.emb_size = 10
self.all_attrs = self.ds.get_attributes()
self.attrs_number = len(self.all_attrs)
self.attr_language_model = {}
raw_data = self.ds.get_raw_data()
for attr in self.all_attrs:
attr_corpus = list(zip(raw_data[attr].tolist()))
model = FastText(attr_corpus, min_count=1, size=self.emb_size)
self.attr_language_model[attr] = model
def gen_feat_tensor(self, input, classes):
vid = int(input[0])
attribute = input[1]
domain = input[2].split('|||')
attr_idx = self.ds.attr_to_idx[attribute]
model = self.attr_language_model[attribute]
tensor = torch.zeros(1, classes, self.attrs_number*self.emb_size)
for idx, val in enumerate(domain):
emb_val = model.wv[val]
start = attr_idx*self.emb_size
end = start+self.emb_size
tensor[0][idx][start:end] = torch.tensor(emb_val)
return tensor
def create_tensor(self):
query = 'SELECT _vid_, attribute, domain FROM %s ORDER BY _vid_' % AuxTables.cell_domain.name
results = self.ds.engine.execute_query(query)
tensors = [self.gen_feat_tensor(res, self.classes) for res in results]
combined = torch.cat(tensors)
return combined
def feature_names(self):
return ["{}_emb_{}".format(attr, emb_idx) for attr in self.all_attrs for emb_idx in range(self.emb_size)]
|
from django.template import RequestContext, loader
from django.shortcuts import get_object_or_404, render_to_response, redirect
from django.contrib.admin.views.decorators import staff_member_required
from django.http import HttpResponse, Http404
from apps.hypervisor.models import Hypervisor
from apps.storagepool.models import StoragePool
from apps.instance.models import Instance
from apps.network.models import Network
from apps.installationdisk.models import InstallationDisk
from apps.hypervisor.forms import HypervisorForm
from apps.shared.forms import SizeForm
from apps.shared.models import Size
from django.contrib import messages
import persistent_messages
import simplejson
@staff_member_required
def view(request, pk):
instance = get_object_or_404(Hypervisor, pk=pk)
storagepools = StoragePool.objects.filter(hypervisor=instance)
total_storagepool_allocated = 0
for i in storagepools: total_storagepool_allocated += i.allocated
instances = Instance.objects.filter(volume__storagepool__hypervisor=instance)
instances_online = 0
instances_offline = 0
for i in instances:
if i.status == 1: instances_online += 1
else: instances_offline += 1
allocated_memory = 0
allocated_vcpus = 0
for i in instances:
allocated_memory += i.memory.size
allocated_vcpus += i.vcpu
maximum_memory_form = SizeForm(prefix="memory")
maximum_hdd_form = SizeForm(prefix="memory")
size_array = []
for i in Size.objects.all():
size_array.append({'value': i.id, 'text': i.name})
installation_disks = InstallationDisk.objects.filter(hypervisor=instance).count()
networks = Network.objects.filter(hypervisor=instance).count()
return render_to_response('hypervisor/view.html',
{
'instance': instance,
'instances': instances,
'networks': networks,
'installation_disks': installation_disks,
'instances_online': instances_online,
'instances_offline': instances_offline,
'storagepools': storagepools,
'total_storagepool_allocated': total_storagepool_allocated,
'allocated_memory': allocated_memory,
'allocated_vcpus': allocated_vcpus,
'maximum_memory_form': maximum_memory_form,
'maximum_hdd_form': maximum_hdd_form,
'size_array': simplejson.dumps(size_array),
},
context_instance=RequestContext(request))
@staff_member_required
def index(request):
hypervisors = Hypervisor.objects.all()
return render_to_response('hypervisor/index.html', {
'hypervisors': hypervisors,
},
context_instance=RequestContext(request))
@staff_member_required
def initalize(request, pk):
instance = get_object_or_404(Hypervisor, pk=pk)
instance.status = 'IN'
instance.save()
return redirect('/hypervisor/%d/' % (instance.pk))
@staff_member_required
def add(request):
form = HypervisorForm()
if request.method == "POST":
form = HypervisorForm(request.POST)
if form.is_valid():
(hypervisor, created) = Hypervisor.objects.get_or_create(
name=form.cleaned_data['name'],
location=form.cleaned_data['location'],
address=form.cleaned_data['address'],
timeout=form.cleaned_data['timeout'],
libvirt_port=form.cleaned_data['libvirt_port'],
node_port=form.cleaned_data['node_port'],
install_medium_path=form.cleaned_data['install_medium_path'],
maximum_memory=form.cleaned_data['maximum_memory'],
maximum_vcpus=form.cleaned_data['maximum_vcpus'],
maximum_hdd=form.cleaned_data['maximum_hdd'],
)
if created: hypervisor.save()
return redirect('/hypervisor/')
return render_to_response('hypervisor/add.html', {
'form': form,
},
context_instance=RequestContext(request))
@staff_member_required
def edit(request):
if request.is_ajax() and request.method == 'POST':
json = request.POST
try:
hypervisor = Hypervisor.objects.get(pk=json['pk'])
orig_name = hypervisor.name
orig_value = None
if json['name'] == 'name':
orig_value = hypervisor.name
hypervisor.name = json['value']
elif json['name'] == 'status':
orig_value = hypervisor.status
hypervisor.status = json['value']
elif json['name'] == 'location':
orig_value = hypervisor.location
hypervisor.location = json['value']
elif json['name'] == 'address':
orig_value = hypervisor.address
hypervisor.address = json['value']
elif json['name'] == 'libvirt_port':
orig_value = hypervisor.libvirt_port
hypervisor.libvirt_port = json['value']
elif json['name'] == 'node_port':
orig_value = hypervisor.node_port
hypervisor.node_port = json['value']
elif json['name'] == 'maximum_hdd':
size = Size.objects.get(id=json['value'])
orig_value = hypervisor.maximum_hdd
hypervisor.maximum_hdd = size
elif json['name'] == 'maximum_memory':
size = Size.objects.get(id=json['value'])
orig_value = hypervisor.maximum_memory
hypervisor.maximum_memory = size
elif json['name'] == 'maximum_vcpus':
orig_value = hypervisor.maximum_vcpus
hypervisor.maximum_vcpus = json['value']
else:
raise Http404
hypervisor.save()
messages.add_message(request, persistent_messages.SUCCESS,
'Changed Hypervisor %s %s from %s to %s' % (orig_name, json['name'], orig_value, json['value']))
except Hypervisor.DoesNotExist:
raise Http404
return HttpResponse('{}', mimetype="application/json")
raise Http404
@staff_member_required
def start(request, pk):
hypervisor = get_object_or_404(Hypervisor, pk=pk)
hypervisor.start()
messages.add_message(request, persistent_messages.SUCCESS,
'Started Hypervisor %s' % (hypervisor))
return redirect('/hypervisor/')
@staff_member_required
def stop(request, pk):
hypervisor = get_object_or_404(Hypervisor, pk=pk)
hypervisor.stop()
messages.add_message(request, persistent_messages.SUCCESS,
'Stopped Hypervisor %s' % (hypervisor))
return redirect('/hypervisor/')
@staff_member_required
def delete(request, pk):
hypervisor = get_object_or_404(Hypervisor, pk=pk)
hypervisor.delete()
return redirect('/hypervisor/')
@staff_member_required
def update(request, pk):
hypervisor = get_object_or_404(Hypervisor, pk=pk)
conn = hypervisor.get_connection(True)
return redirect('/hypervisor/')
|
"""Model for the screens of protocol upload."""
import logging
from typing import Optional, Tuple
from selenium.webdriver.chrome.webdriver import WebDriver
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.webelement import WebElement
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.action_chains import ActionChains
from src.driver.highlight import highlight
logger = logging.getLogger(__name__)
"""Class for Protocol Upload landing page."""
class ProtocolUpload:
browse_our_protocol_library_button_locator: Tuple[str, str] = (
By.ID,
"UploadInput_protocolLibraryButton",
)
launch_protocol_designer_button_locator: Tuple[str, str] = (
By.ID,
"UploadInput_protocolDesignerButton",
)
get_app: Tuple[str, str] = (By.XPATH, f'//a[contains(@href,"#/more/app")]')
click_robot: Tuple[str, str] = (By.XPATH, f'//a[contains(@href,"#/robots")]')
file_upload_button_locator: Tuple[str, str] = (
By.XPATH,
"//button[text()='Choose File...']",
)
enable_developer_tool_toggle: Tuple[str, str] = (
By.XPATH,
"//p[text()='Enable Developer Tools']/following-sibling::button",
)
protocol: Tuple[str, str] = (By.XPATH, '//a[contains(@href,"#/protocol")]')
enable_pre_protocol_flow_without_rpc_toggle: Tuple[str, str] = (
By.XPATH,
"//p[text()='__DEV__ Pre Protocol Flow Without RPC']/following-sibling::button",
)
def __init__(self, driver: WebDriver) -> None:
"""Initialize with driver."""
self.driver: WebDriver = driver
@highlight
def get_file_upload_button(self) -> Optional[WebElement]:
"""Try to get the file_upload button.
If it does not show up you get None
"""
try:
return WebDriverWait(self.driver, 2).until(
EC.element_to_be_clickable((ProtocolUpload.get_file_upload_button))
)
except Exception: # pylint: disable=W0703
return None
def click_app_left_panel(self) -> None:
"""Linking to app link on the left panel."""
self.get_app_link().click()
@highlight
def get_app_link(self) -> WebElement:
"""Search for the app menu button."""
return WebDriverWait(self.driver, 2).until(
EC.element_to_be_clickable(ProtocolUpload.get_app)
)
@highlight
def get_enable_developer_tool_toggle(self) -> WebElement:
"""Locating the dev tool toggle button."""
toggle: WebElement = WebDriverWait(self.driver, 2).until(
EC.element_to_be_clickable(ProtocolUpload.enable_developer_tool_toggle)
)
actions = ActionChains(self.driver)
actions.move_to_element(toggle).perform()
return toggle
def click_enable_developer_toggle(self) -> None:
self.get_enable_developer_tool_toggle().click()
@highlight
def get_enable_protocol_flow_toggle(self) -> WebElement:
"""Locator for enable protocol flow toggle"""
toggle: WebElement = WebDriverWait(self.driver, 2).until(
EC.element_to_be_clickable(
ProtocolUpload.enable_pre_protocol_flow_without_rpc_toggle
)
)
actions = ActionChains(self.driver)
actions.move_to_element(toggle).perform()
return toggle
def click_enable_pur_feature(self) -> None:
self.get_enable_protocol_flow_toggle().click()
@highlight
def get_robot_page(self) -> WebElement:
"""Locator to get robot page."""
return WebDriverWait(self.driver, 100).until(
EC.element_to_be_clickable(ProtocolUpload.click_robot)
)
def goto_robots_page(self) -> None:
self.get_robot_page().click()
@highlight
def get_protocol_upload_button(self) -> WebElement:
"""Locator for file upload button."""
return WebDriverWait(self.driver, 100).until(
EC.element_to_be_clickable(ProtocolUpload.file_upload_button_locator)
)
def click_protocol_upload_link(self) -> None:
self.get_protocol_upload_button().click()
@highlight
def get_protocol_button(self) -> WebElement:
"""Locator for protocol button."""
return WebDriverWait(self.driver, 100).until(
EC.element_to_be_clickable(ProtocolUpload.protocol)
)
def click_protocol_left_menu(self) -> None:
self.get_protocol_button().click()
|
from django.db import models
from rest_offlinesync.models import TrackedModel
class Document(TrackedModel):
user = models.ForeignKey('auth.User', to_field='username', on_delete=models.CASCADE)
title = models.CharField(max_length=128)
text = models.TextField(max_length=2048)
|
from livro import Livro
class Biblioteca:
def __init__(self):
self.__livros = []
@property
def livros(self) -> Livro:
return self.__livros
def incluirLivro(self, livro: Livro) -> None:
if livro not in self.livros and isinstance(livro, Livro):
self.__livros.append(livro)
else:
print('Falha ao inserir livro na Biblioteca!')
def excluirLivro(self, livro: Livro) -> None:
if livro in self.livros:
self.livros.remove(livro)
else:
print('Falha ao excluir livro da Biblioteca!')
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.shell
~~~~~~~~~~~~~~~~~~~~~
Lexers for various shells.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, include
from pygments.token import Punctuation, \
Text, Comment, Operator, Keyword, Name, String, Number, Generic
from pygments.util import shebang_matches
__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
'PowerShellLexer', 'ShellSessionLexer']
line_re = re.compile('.*?\n')
class BashLexer(RegexLexer):
"""
Lexer for (ba|k|)sh shell scripts.
.. versionadded:: 0.6
"""
name = 'Bash'
aliases = ['bash', 'sh', 'ksh', 'shell']
filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
'.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD']
mimetypes = ['application/x-sh', 'application/x-shellscript']
tokens = {
'root': [
include('basic'),
(r'`', String.Backtick, 'backticks'),
include('data'),
include('interp'),
],
'interp': [
(r'\$\(\(', Keyword, 'math'),
(r'\$\(', Keyword, 'paren'),
(r'\$\{#?', String.Interpol, 'curly'),
(r'\$#?(\w+|.)', Name.Variable),
],
'basic': [
(r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
r'select|continue|until|esac|elif)(\s*)\b',
bygroups(Keyword, Text)),
(r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
r'ulimit|umask|unalias|unset|wait)\s*\b(?!\.)',
Name.Builtin),
(r'#.*\n', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]', Operator),
(r'<<<', Operator), # here-string
(r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
(r'&&|\|\|', Operator),
],
'data': [
(r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
(r'"', String.Double, 'string'),
(r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r"(?s)'.*?'", String.Single),
(r';', Punctuation),
(r'&', Punctuation),
(r'\|', Punctuation),
(r'\s+', Text),
(r'\d+(?= |\Z)', Number),
(r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
(r'<', Text),
],
'string': [
(r'"', String.Double, '#pop'),
(r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
include('interp'),
],
'curly': [
(r'\}', String.Interpol, '#pop'),
(r':-', Keyword),
(r'\w+', Name.Variable),
(r'[^}:"\'`$\\]+', Punctuation),
(r':', Punctuation),
include('root'),
],
'paren': [
(r'\)', Keyword, '#pop'),
include('root'),
],
'math': [
(r'\)\)', Keyword, '#pop'),
(r'[-+*/%^|&]|\*\*|\|\|', Operator),
(r'\d+#\d+', Number),
(r'\d+#(?! )', Number),
(r'\d+', Number),
include('root'),
],
'backticks': [
(r'`', String.Backtick, '#pop'),
include('root'),
],
}
def analyse_text(text):
if shebang_matches(text, r'(ba|z|)sh'):
return 1
if text.startswith('$ '):
return 0.2
class BashSessionLexer(Lexer):
"""
Lexer for simplistic shell sessions.
.. versionadded:: 1.1
"""
name = 'Bash Session'
aliases = ['console']
filenames = ['*.sh-session']
mimetypes = ['application/x-shell-session']
def get_tokens_unprocessed(self, text):
bashlexer = BashLexer(**self.options)
pos = 0
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
m = re.match(r'^((?:\(\S+\))?(?:|sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)'
r'?|\[\S+[@:][^\n]+\].+)[$#%])(.*\n?)' , line)
if m:
# To support output lexers (say diff output), the output
# needs to be broken by prompts whenever the output lexer
# changes.
if not insertions:
pos = match.start()
insertions.append((len(curcode),
[(0, Generic.Prompt, m.group(1))]))
curcode += m.group(2)
elif line.startswith('>'):
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:1])]))
curcode += line[1:]
else:
if insertions:
toks = bashlexer.get_tokens_unprocessed(curcode)
for i, t, v in do_insertions(insertions, toks):
yield pos+i, t, v
yield match.start(), Generic.Output, line
insertions = []
curcode = ''
if insertions:
for i, t, v in do_insertions(insertions,
bashlexer.get_tokens_unprocessed(curcode)):
yield pos+i, t, v
class ShellSessionLexer(Lexer):
"""
Lexer for shell sessions that works with different command prompts
.. versionadded:: 1.6
"""
name = 'Shell Session'
aliases = ['shell-session']
filenames = ['*.shell-session']
mimetypes = ['application/x-sh-session']
def get_tokens_unprocessed(self, text):
bashlexer = BashLexer(**self.options)
pos = 0
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
m = re.match(r'^((?:\[?\S+@[^$#%]+\]?\s*)[$#%])(.*\n?)', line)
if m:
# To support output lexers (say diff output), the output
# needs to be broken by prompts whenever the output lexer
# changes.
if not insertions:
pos = match.start()
insertions.append((len(curcode),
[(0, Generic.Prompt, m.group(1))]))
curcode += m.group(2)
else:
if insertions:
toks = bashlexer.get_tokens_unprocessed(curcode)
for i, t, v in do_insertions(insertions, toks):
yield pos+i, t, v
yield match.start(), Generic.Output, line
insertions = []
curcode = ''
if insertions:
for i, t, v in do_insertions(insertions,
bashlexer.get_tokens_unprocessed(curcode)):
yield pos+i, t, v
class BatchLexer(RegexLexer):
"""
Lexer for the DOS/Windows Batch file format.
.. versionadded:: 0.7
"""
name = 'Batchfile'
aliases = ['bat', 'batch', 'dosbatch', 'winbatch']
filenames = ['*.bat', '*.cmd']
mimetypes = ['application/x-dos-batch']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
# Lines can start with @ to prevent echo
(r'^\s*@', Punctuation),
(r'^(\s*)(rem\s.*)$', bygroups(Text, Comment)),
(r'".*?"', String.Double),
(r"'.*?'", String.Single),
# If made more specific, make sure you still allow expansions
# like %~$VAR:zlt
(r'%%?[~$:\w]+%?', Name.Variable),
(r'::.*', Comment), # Technically :: only works at BOL
(r'\b(set)(\s+)(\w+)', bygroups(Keyword, Text, Name.Variable)),
(r'\b(call)(\s+)(:\w+)', bygroups(Keyword, Text, Name.Label)),
(r'\b(goto)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
(r'\b(set|call|echo|on|off|endlocal|for|do|goto|if|pause|'
r'setlocal|shift|errorlevel|exist|defined|cmdextversion|'
r'errorlevel|else|cd|md|del|deltree|cls|choice)\b', Keyword),
(r'\b(equ|neq|lss|leq|gtr|geq)\b', Operator),
include('basic'),
(r'.', Text),
],
'echo': [
# Escapes only valid within echo args?
(r'\^\^|\^<|\^>|\^\|', String.Escape),
(r'\n', Text, '#pop'),
include('basic'),
(r'[^\'"^]+', Text),
],
'basic': [
(r'".*?"', String.Double),
(r"'.*?'", String.Single),
(r'`.*?`', String.Backtick),
(r'-?\d+', Number),
(r',', Punctuation),
(r'=', Operator),
(r'/\S+', Name),
(r':\w+', Name.Label),
(r'\w:\w+', Text),
(r'([<>|])(\s*)(\w+)', bygroups(Punctuation, Text, Name)),
],
}
class TcshLexer(RegexLexer):
"""
Lexer for tcsh scripts.
.. versionadded:: 0.10
"""
name = 'Tcsh'
aliases = ['tcsh', 'csh']
filenames = ['*.tcsh', '*.csh']
mimetypes = ['application/x-csh']
tokens = {
'root': [
include('basic'),
(r'\$\(', Keyword, 'paren'),
(r'\$\{#?', Keyword, 'curly'),
(r'`', String.Backtick, 'backticks'),
include('data'),
],
'basic': [
(r'\b(if|endif|else|while|then|foreach|case|default|'
r'continue|goto|breaksw|end|switch|endsw)\s*\b',
Keyword),
(r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|'
r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
r'source|stop|suspend|source|suspend|telltc|time|'
r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
r'ver|wait|warp|watchlog|where|which)\s*\b',
Name.Builtin),
(r'#.*', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]+', Operator),
(r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
(r';', Punctuation),
],
'data': [
(r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r'\s+', Text),
(r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
(r'\d+(?= |\Z)', Number),
(r'\$#?(\w+|.)', Name.Variable),
],
'curly': [
(r'\}', Keyword, '#pop'),
(r':-', Keyword),
(r'\w+', Name.Variable),
(r'[^}:"\'`$]+', Punctuation),
(r':', Punctuation),
include('root'),
],
'paren': [
(r'\)', Keyword, '#pop'),
include('root'),
],
'backticks': [
(r'`', String.Backtick, '#pop'),
include('root'),
],
}
class PowerShellLexer(RegexLexer):
"""
For Windows PowerShell code.
.. versionadded:: 1.5
"""
name = 'PowerShell'
aliases = ['powershell', 'posh', 'ps1', 'psm1']
filenames = ['*.ps1','*.psm1']
mimetypes = ['text/x-powershell']
flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
keywords = (
'while validateset validaterange validatepattern validatelength '
'validatecount until trap switch return ref process param parameter in '
'if global: function foreach for finally filter end elseif else '
'dynamicparam do default continue cmdletbinding break begin alias \\? '
'% #script #private #local #global mandatory parametersetname position '
'valuefrompipeline valuefrompipelinebypropertyname '
'valuefromremainingarguments helpmessage try catch throw').split()
operators = (
'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
'lt match ne not notcontains notlike notmatch or regex replace '
'wildcard').split()
verbs = (
'write where wait use update unregister undo trace test tee take '
'suspend stop start split sort skip show set send select scroll resume '
'restore restart resolve resize reset rename remove register receive '
'read push pop ping out new move measure limit join invoke import '
'group get format foreach export expand exit enter enable disconnect '
'disable debug cxnew copy convertto convertfrom convert connect '
'complete compare clear checkpoint aggregate add').split()
commenthelp = (
'component description example externalhelp forwardhelpcategory '
'forwardhelptargetname functionality inputs link '
'notes outputs parameter remotehelprunspace role synopsis').split()
tokens = {
'root': [
# we need to count pairs of parentheses for correct highlight
# of '$(...)' blocks in strings
(r'\(', Punctuation, 'child'),
(r'\s+', Text),
(r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
bygroups(Comment, String.Doc, Comment)),
(r'#[^\n]*?$', Comment),
(r'(<|<)#', Comment.Multiline, 'multline'),
(r'@"\n', String.Heredoc, 'heredoc-double'),
(r"@'\n.*?\n'@", String.Heredoc),
# escaped syntax
(r'`[\'"$@-]', Punctuation),
(r'"', String.Double, 'string'),
(r"'([^']|'')*'", String.Single),
(r'(\$|@@|@)((global|script|private|env):)?\w+',
Name.Variable),
(r'(%s)\b' % '|'.join(keywords), Keyword),
(r'-(%s)\b' % '|'.join(operators), Operator),
(r'(%s)-[a-z_]\w*\b' % '|'.join(verbs), Name.Builtin),
(r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s
(r'-[a-z_]\w*', Name),
(r'\w+', Name),
(r'[.,;@{}\[\]$()=+*/\\&%!~?^`|<>-]|::', Punctuation),
],
'child': [
(r'\)', Punctuation, '#pop'),
include('root'),
],
'multline': [
(r'[^#&.]+', Comment.Multiline),
(r'#(>|>)', Comment.Multiline, '#pop'),
(r'\.(%s)' % '|'.join(commenthelp), String.Doc),
(r'[#&.]', Comment.Multiline),
],
'string': [
(r"`[0abfnrtv'\"$`]", String.Escape),
(r'[^$`"]+', String.Double),
(r'\$\(', Punctuation, 'child'),
(r'""', String.Double),
(r'[`$]', String.Double),
(r'"', String.Double, '#pop'),
],
'heredoc-double': [
(r'\n"@', String.Heredoc, '#pop'),
(r'\$\(', Punctuation, 'child'),
(r'[^@\n]+"]', String.Heredoc),
(r".", String.Heredoc),
]
}
|
from sqlglot import expressions as exp
from sqlglot.optimizer.normalize import normalized
from sqlglot.optimizer.scope import traverse_scope
from sqlglot.optimizer.simplify import simplify
def pushdown_predicates(expression):
"""
Rewrite sqlglot AST to pushdown predicates in FROMS and JOINS
Example:
>>> import sqlglot
>>> sql = "SELECT * FROM (SELECT * FROM x AS x) AS y WHERE y.a = 1"
>>> expression = sqlglot.parse_one(sql)
>>> pushdown_predicates(expression).sql()
'SELECT * FROM (SELECT * FROM x AS x WHERE y.a = 1) AS y WHERE TRUE'
Args:
expression (sqlglot.Expression): expression to optimize
Returns:
sqlglot.Expression: optimized expression
"""
expression = simplify(expression)
for scope in reversed(traverse_scope(expression)):
select = scope.expression
where = select.args.get("where")
if where:
pushdown(where.this, scope.selected_sources)
# joins should only pushdown into itself, not to other joins
# so we limit the selected sources to only itself
for join in select.args.get("joins") or []:
name = join.this.alias_or_name
pushdown(join.args.get("on"), {name: scope.selected_sources[name]})
return expression
def pushdown(condition, sources):
if not condition:
return
condition = condition.unnest()
cnf_like = normalized(condition) or not normalized(condition, dnf=True)
predicates = list(
condition.flatten()
if isinstance(condition, exp.And if cnf_like else exp.Or)
else [condition]
)
if cnf_like:
pushdown_cnf(predicates, sources)
else:
pushdown_dnf(predicates, sources)
def pushdown_cnf(predicates, scope):
"""
If the predicates are in CNF like form, we can simply replace each block in the parent.
"""
for predicate in predicates:
for node in nodes_for_predicate(predicate, scope).values():
if isinstance(node, exp.Join):
predicate.replace(exp.TRUE)
node.on(predicate, copy=False)
break
if isinstance(node, exp.Select):
predicate.replace(exp.TRUE)
node.where(replace_aliases(node, predicate), copy=False)
def pushdown_dnf(predicates, scope):
"""
If the predicates are in DNF form, we can only push down conditions that are in all blocks.
Additionally, we can't remove predicates from their original form.
"""
# find all the tables that can be pushdown too
# these are tables that are referenced in all blocks of a DNF
# (a.x AND b.x) OR (a.y AND c.y)
# only table a can be push down
pushdown_tables = set()
for a in predicates:
a_tables = set(exp.column_table_names(a))
for b in predicates:
a_tables &= set(exp.column_table_names(b))
pushdown_tables.update(a_tables)
conditions = {}
# for every pushdown table, find all related conditions in all predicates
# combine them with ORS
# (a.x AND and a.y AND b.x) OR (a.z AND c.y) -> (a.x AND a.y) OR (a.z)
for table in sorted(pushdown_tables):
for predicate in predicates:
nodes = nodes_for_predicate(predicate, scope)
if table not in nodes:
continue
predicate_condition = None
for column in predicate.find_all(exp.Column):
if column.text("table") == table:
condition = column.find_ancestor(exp.Condition)
predicate_condition = (
exp.and_(predicate_condition, condition)
if predicate_condition
else condition
)
if predicate_condition:
conditions[table] = (
exp.or_(conditions[table], predicate_condition)
if table in conditions
else predicate_condition
)
for name, node in nodes.items():
if name not in conditions:
continue
predicate = conditions[name]
if isinstance(node, exp.Join):
node.on(predicate, copy=False)
elif isinstance(node, exp.Select):
node.where(replace_aliases(node, predicate), copy=False)
def nodes_for_predicate(predicate, sources):
nodes = {}
tables = exp.column_table_names(predicate)
where_condition = isinstance(
predicate.find_ancestor(exp.Join, exp.Where), exp.Where
)
for table in tables:
node, source = sources.get(table) or (None, None)
# if the predicate is in a where statement we can try to push it down
# we want to find the root join or from statement
if node and where_condition:
node = node.find_ancestor(exp.Join, exp.From)
# a node can reference a CTE which should be push down
if isinstance(node, exp.From) and not isinstance(source, exp.Table):
node = source.expression
if isinstance(node, exp.Join):
nodes[table] = node
elif isinstance(node, exp.Select) and len(tables) == 1:
if not node.args.get("group"):
nodes[table] = node
return nodes
def replace_aliases(source, predicate):
aliases = {}
for select in source.selects:
if isinstance(select, exp.Alias):
aliases[select.alias] = select.this
else:
aliases[select.name] = select
def _replace_alias(column):
# pylint: disable=cell-var-from-loop
if isinstance(column, exp.Column) and column.name in aliases:
return aliases[column.name]
return column
return predicate.transform(_replace_alias)
|
from glloader.lang.c.loader.egl import EGLCLoader
from glloader.lang.c.loader.gl import OpenGLCLoader
from glloader.lang.c.loader.glx import GLXCLoader
from glloader.lang.c.loader.wgl import WGLCLoader
from glloader.lang.c.generator import CGenerator
from glloader.lang.c.debug import CDebugGenerator
_specs = {
'egl': EGLCLoader,
'gl': OpenGLCLoader,
'glx': GLXCLoader,
'wgl': WGLCLoader
}
_generators = {
'c': CGenerator,
'c-debug': CDebugGenerator
}
def get_generator(name, spec):
gen = _generators.get(name)
loader = _specs.get(spec)
return gen, loader
|
"""
Contains names of DB tables
"""
T_NAME_AUTHORS = "authors"
"""`str` : `__tablename__` value for `BaseAuthor` table"""
T_NAME_EVENTS = "events"
"""`str` : `__tablename__` value for `BaseEvent` table"""
T_NAME_KEYS = "keys"
"""`str` : `__tablename__` value for `BaseKey` table"""
T_NAME_SETTINGS = "settings"
"""`str` : `__tablename__` value for `BaseSetting` table"""
T_NAME_SYLLABLES = "syllables"
"""`str` : `__tablename__` value for `BaseSyllable` table"""
T_NAME_TYPES = "types"
"""`str` : `__tablename__` value for `BaseType` table"""
T_NAME_WORDS = "words"
"""`str` : `__tablename__` value for `BaseWord` table"""
T_NAME_DEFINITIONS = "definitions"
"""`str` : `__tablename__` value for `BaseDefinition` table"""
T_NAME_WORD_SPELLS = "word_spells"
"""`str` : `__tablename__` value for `BaseWordSpell` table"""
T_NAME_WORD_SOURCES = "word_sources"
"""`str` : `__tablename__` value for `BaseWordSource` table"""
T_NAME_CONNECT_AUTHORS = "connect_authors"
"""`str` : `__tablename__` value for `t_connect_authors` table"""
T_NAME_CONNECT_WORDS = "connect_words"
"""`str` : `__tablename__` value for `t_connect_words` table"""
T_NAME_CONNECT_KEYS = "connect_keys"
"""`str` : `__tablename__` value for `t_connect_keys` table"""
|
# Tensorflow mandates these.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import namedtuple
import functools
import tensorflow as tf
slim = tf.contrib.slim
def ShuffleNet(images, bottleneck_layer_size, is_training, reuse=False, shuffle=True, base_ch=144, groups=1):
with tf.variable_scope('Stage1'):
net = slim.conv2d(images, 24, [3, 3], 2)
net = slim.max_pool2d(net, [3, 3], 2, padding='SAME')
net = shuffle_stage(net, base_ch, 3, groups, is_training, 'Stage2')
net = shuffle_stage(net, base_ch*2, 7, groups, is_training, 'Stage3')
net = shuffle_stage(net, base_ch*4, 3, groups, is_training, 'Stage4')
with tf.variable_scope('Stage5'):
net = slim.dropout(net, 0.5, is_training=is_training)
net = slim.conv2d(net, bottleneck_layer_size, [1, 1], stride=1,
padding="SAME",
scope="conv_stage5")
net = slim.avg_pool2d(net, kernel_size=4, stride=1)
net = tf.reduce_mean(net, [1, 2], name="logits")
return net, None
def shuffle_stage(net, output, repeat, group, is_training, scope="Stage"):
with tf.variable_scope(scope):
net = shuffle_bottleneck(net, output, 2, is_training, group, scope='Unit{}'.format(0))
for i in range(repeat):
net = shuffle_bottleneck(net, output, 1, is_training, group, scope='Unit{}'.format(i+1))
return net
def shuffle_bottleneck(net, output, stride, is_training, group=1, scope="Unit"):
if stride != 1:
_b, _h, _w, _c = net.get_shape().as_list()
output = output - _c
with tf.variable_scope(scope):
if stride != 1:
net_skip = slim.avg_pool2d(net, [3, 3], stride, padding="SAME", scope="3x3AVGPool")
else:
net_skip = net
net = group_conv(net, output, 1, group, is_training, relu=True, scope="1x1ConvIn")
net = channel_shuffle(net, output, group, is_training, scope="ChannelShuffle")
with tf.variable_scope("3x3DXConv"):
depthwise_filter = tf.get_variable("depth_conv_w", [3, 3, output, 1],
initializer=tf.truncated_normal_initializer(stddev=0.01))
net = tf.nn.depthwise_conv2d(net, depthwise_filter, [1, stride, stride, 1], 'SAME', name="DWConv")
net = group_conv(net, output, 1, group, is_training, relu=True, scope="1x1ConvOut")
if stride != 1:
net = tf.concat([net, net_skip], axis=3)
else:
net = net + net_skip
net = tf.nn.relu(net)
return net
def group_conv(net, output, stride, group, is_training, relu=True, scope="GConv"):
num_channels_in_group = output//group
with tf.variable_scope(scope):
net = tf.split(net, group, axis=3, name="split")
for i in range(group):
net[i] = slim.conv2d(net[i], num_channels_in_group, [1, 1], stride=stride,
activation_fn=tf.nn.relu if relu else None,
normalizer_fn=slim.batch_norm,
normalizer_params={'is_training':is_training})
net = tf.concat(net, axis=3, name="concat")
return net
def channel_shuffle(net, output, group, is_training, scope="ChannelShuffle"):
num_channels_in_group = output//group
with tf.variable_scope(scope):
net = tf.split(net, output, axis=3, name="split")
chs = []
for i in range(group):
for j in range(num_channels_in_group):
chs.append(net[i + j * group])
net = tf.concat(chs, axis=3, name="concat")
return net
def shufflenet_arg_scope(is_training=True,
weight_decay=0.00005,
regularize_depthwise=False):
"""Defines the default MobilenetV2 arg scope.
Args:
is_training: Whether or not we're training the model.
weight_decay: The weight decay to use for regularizing the model.
regularize_depthwise: Whether or not apply regularization on depthwise.
Returns:
An `arg_scope` to use for the mobilenet v2 model.
"""
batch_norm_params = {
'is_training': is_training,
'center': True,
'scale': True,
'fused': True,
'decay': 0.995,
'epsilon': 2e-5,
# force in-place updates of mean and variance estimates
'updates_collections': None,
# Moving averages ends up in the trainable variables collection
'variables_collections': [ tf.GraphKeys.TRAINABLE_VARIABLES ],
}
# Set weight_decay for weights in Conv and InvResBlock layers.
#weights_init = tf.truncated_normal_initializer(stddev=stddev)
weights_init = tf.contrib.layers.xavier_initializer(uniform=False)
regularizer = tf.contrib.layers.l2_regularizer(weight_decay)
if regularize_depthwise:
depthwise_regularizer = regularizer
else:
depthwise_regularizer = None
with slim.arg_scope([slim.conv2d, slim.separable_conv2d],
weights_initializer=weights_init,
activation_fn=tf.nn.relu, normalizer_fn=slim.batch_norm): #tf.keras.layers.PReLU
with slim.arg_scope([slim.batch_norm], **batch_norm_params):
with slim.arg_scope([slim.conv2d], weights_regularizer=regularizer):
with slim.arg_scope([slim.separable_conv2d],
weights_regularizer=depthwise_regularizer) as sc:
return sc
def inference(images, bottleneck_layer_size=128, phase_train=False,
weight_decay=0.00005, reuse=False):
'''build a mobilenet_v2 graph to training or inference.
Args:
images: a tensor of shape [batch_size, height, width, channels].
bottleneck_layer_size: number of predicted classes. If 0 or None, the logits layer
is omitted and the input features to the logits layer (before dropout)
are returned instead.
phase_train: Whether or not we're training the model.
weight_decay: The weight decay to use for regularizing the model.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
Returns:
net: a 2D Tensor with the logits (pre-softmax activations) if bottleneck_layer_size
is a non-zero integer, or the non-dropped-out input to the logits layer
if bottleneck_layer_size is 0 or None.
end_points: a dictionary from components of the network to the corresponding
activation.
Raises:
ValueError: Input rank is invalid.
'''
# pdb.set_trace()
arg_scope = shufflenet_arg_scope(is_training=phase_train, weight_decay=weight_decay)
with slim.arg_scope(arg_scope):
return ShuffleNet(images, bottleneck_layer_size=bottleneck_layer_size, is_training=phase_train, reuse=reuse)
|
from abc import ABC
from selenium.webdriver import ActionChains
from selenium.webdriver.common.alert import Alert
from selenium.webdriver.remote.switch_to import SwitchTo
from selenium.webdriver.remote.webelement import WebElement
from ..drivers.driver_manager import DriverManager
from ..general import Log
from ..mixins.javascript import JSBrowserMixin
from ..mixins.wait import WaitBrowserMixin
from ..support.driver_aware import DriverAware
class Browser(DriverAware, ABC):
def __new__(cls, *args, **kwargs):
obj = super(Browser, cls).__new__(cls)
obj.__before_instance = list()
obj.__after_instance = list()
return obj
def execute_script(self, script: str, element: WebElement, *args):
return self.get_driver().execute_script(script, element, *args)
@property
def action_chains(self) -> ActionChains:
return ActionChains(self.get_driver())
def get_driver(self, wait_time: int = 0):
return DriverManager.get_session(self)
def add_before(self, func):
self.__before_instance.append(func)
def add_after(self, func):
self.__after_instance.append(func)
@property
def wait_for(self) -> WaitBrowserMixin:
return WaitBrowserMixin(self)
@property
def switch_to(self) -> SwitchTo:
return SwitchTo(self.get_driver())
@property
def alert(self) -> Alert:
return Alert(self.get_driver())
@property
def js(self) -> JSBrowserMixin:
return JSBrowserMixin(self.get_driver())
def get(self, url: str, extensions: list = ()):
Log.info("Opening %s url" % url)
if not self.get_driver():
for func in self.__before_instance:
func()
DriverManager.create_session(self, extensions)
self.get_driver().get(url)
def refresh(self):
Log.info("Refreshing the browser")
self.get_driver().refresh()
self.wait_for.page_is_loaded()
def current_url(self):
return self.get_driver().current_url
def delete_all_cookies(self):
self.get_driver().delete_all_cookies()
def window_handles(self):
return self.get_driver().window_handles
def close(self):
self.get_driver().close()
def quit(self):
if self.get_driver():
Log.info("Closing the browser")
try:
self.get_driver().quit()
except Exception:
pass
finally:
DriverManager.destroy_session(self)
for func in self.__after_instance:
func()
def get_browser_log(self):
Log.info("Getting browser log")
logs = self.get_driver().get_log('browser')
list_logs = list()
for log_entry in logs:
log_str = ''
for key in log_entry.keys():
log_str += "%s: %s, " % (key, log_entry[key])
list_logs.append(log_str)
return list_logs
|
# Generated by Django 3.0.4 on 2020-03-24 06:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('customers', '0002_auto_20200324_0620'),
('invoices', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='invoice',
name='customerid',
field=models.ForeignKey(blank=True, db_column='customerid', null=True, on_delete=django.db.models.deletion.SET_NULL, to='customers.Customer'),
),
]
|
import hashlib
import json
import re
from typing import List, Union, Optional
import aiohttp
from sentry_sdk.utils import Dsn
from . import Storage, AlreadyStoredError
from ..database_entry import DatabaseEntry
from ..exceptions import StorageError, ParserError
class SentryFrame:
"""
Represents a Sentry stack frame payload.
Mostly based on a mix of reading the Sentry SDK docs, GlitchTip example data and a lot of trial-and-error.
Implements the value object pattern.
"""
def __init__(
self, filename: str, function: str, package: str, lineno: Optional[int] = None
):
# all the attributes in stack frames are optional
# in case of NewPipe, we require filename and function and package
self.filename = filename
self.function = function
self.package = package
# line number is optional, as for builtins (java.*), we don't have any
self.lineno = lineno
def to_dict(self):
# GlitchTip doesn't care if optional data is set to null, so we don't even have to implement checks for that
rv = {
"filename": self.filename,
"function": self.function,
"package": self.package,
"lineno": self.lineno,
# for the sake of simplicity, we just say "every frame belongs to the app"
"in_app": True,
}
return rv
class SentryStacktrace:
"""
Represents a Sentry stacktrace payload.
Mostly based on a mix of reading the Sentry SDK docs, GlitchTip example data and a lot of trial-and-error.
Implements the value object pattern.
"""
def __init__(self, frames: List[SentryFrame]):
# the only mandatory element is the stack frames
# we don't require any register values
self.frames = frames
def to_dict(self):
return {
"frames": [f.to_dict() for f in self.frames],
}
class SentryException:
"""
Represents a Sentry exception payload.
Mostly based on a mix of reading the Sentry SDK docs, GlitchTip example data and a lot of trial-and-error.
Implements the value object pattern.
"""
def __init__(
self, type: str, value: str, module: str, stacktrace: SentryStacktrace
):
# these are mandatory per the format description
self.type = type
# value appears to be the exception's message
self.value = value
# the fields module, thread_id, mechanism and stacktrace are optional
# we send the java package name as module, and a parsed stacktrace via stacktrace
self.module = module
self.stacktrace = stacktrace
def to_json(self) -> dict:
# format description: https://develop.sentry.dev/sdk/event-payloads/exception/
return {
"type": self.type,
"value": self.value,
"stacktrace": self.stacktrace.to_dict(),
}
class SentryPayload:
"""
Represents a Sentry event payload, sent to the GlitchTip instance.
Mostly based on a mix of reading the Sentry SDK docs, GlitchTip example data and a lot of trial-and-error.
This class doesn't strictly implement the value object, as some attributes are optional and can and shall be
mutated by the caller. The list of attributes initialized below, however, is constant.
"""
def __init__(
self,
event_id: str,
timestamp: Union[str, int],
message: str,
exception: SentryException,
):
# as we calculate hashes anyway for the directory storage, we probably should just use those as IDs here, too
# this allows cross-referencing events in both storage implementations, which might be important for re-imports
# of the database
# first, try to make sure we receive an actual SHA256 hash
assert len(event_id) == 64
# this is supposed to be a UUID4 (i.e., random) identifier, hence the limit to 32 characters (without dashes)
# however, those are SHA256 hashes, which means their hex digests have a length of 64 characters
# therefore, we derive a 32-character size MD5 hash from the SHA256 one
self.event_id = hashlib.md5(event_id.encode()).hexdigest()
assert len(self.event_id) == 32
# this could either be implemented as a RFC3339 string, or some numeric UNIX epoch style timestamp
self.timestamp = timestamp
# will be used as the value for the "formatted" key in the message interface
self.message = message
#
self.exception = exception
# these are optional attributes according to the format description
# IIRC, we had to explicitly these to null in order to avoid Sentry from guesstimating their values
# some of the values may be populated by users after initializing the object
self.extra = {
"user_comment": None,
"request": None,
"user_action": None,
}
self.tags = {
"os": None,
"service": None,
"content_language": None,
"content_country": None,
"app_language": None,
}
self.release: Optional[str] = None
@staticmethod
def _render_sdk():
# format description: https://develop.sentry.dev/sdk/event-payloads/sdk/
return {
"name": "newpipe.crashreportimporter",
# we don't really care at all about the version, but it's supposed to be semver
"version": "0.0.1",
}
def _render_exceptions(self):
return {"values": [self.exception.to_json()]}
def to_dict(self) -> dict:
# the Sentry API requires the keys event_id, timestamp and platform to be set
# optional keys we want to use for some additional convenience are release, tags, and extra
# future versions might use fingerprint as well to help with the deduplication of the events
rv = {
"event_id": self.event_id,
"timestamp": self.timestamp,
# setting the right platform apparently enables some convenience functionality in Sentry
# Java seems the most suitable for Android stuff
"platform": "java",
# doesn't seem to be contained in any of the examples in glitchtip-backend/events/test_data any more
# but still works, apparently (and is required by GlitchTip)
"message": self.message,
# Sentry apparently now allows for more than one exception to be passed (i.e., when an exception is
# caused by another exception)
# GlitchTip seems to support that, too, looking at their example data
# therefore, the singular is not really appropriate and misleading
"exception": self._render_exceptions(),
"extra": self.extra,
"tags": self.tags,
# sending None/null in case this won't cause any issues, so we can be lazy here
"release": self.release,
# for some annoying reason, GlitchTip insists on us specifying an SDK
"sdk": self._render_sdk(),
# we only report errors to GlitchTip (it's also the default value)
"level": "error",
}
return rv
class GlitchtipError(Exception):
def __init__(self, status: int, text: str):
self.status = status
self.text = text
def __str__(self):
# default values, which we'll try to specify depending on the status code below
description = self.text
message = "Request failed for unknown reason"
def extract_detail_from_text():
json_data = json.loads(self.text)
return json_data["detail"]
# a code of 400 usually means our request couldn't be parsed, and GlitchTip should send some JSON that contains
# details about it
if self.status == 400:
message = "Request could not be processed"
description = extract_detail_from_text()
# according to the GlitchTip source code (and some Sentry docs), 401 will be returned when the token
# authentication didn't work
if self.status == 401:
message = "Authentication failed"
if self.status == 403:
message = "Call not allowed"
description = extract_detail_from_text()
return f"{message} (status {self.status}): {description}"
class GlitchtipStorage(Storage):
"""
Used to store incoming mails on a GlitchTip server.
https://app.glitchtip.com/docs/
Remembers already sent mail reports by putting their hash IDs in a file
in the application's working directory.
"""
def __init__(self, dsn: str, package: str):
self.sentry_auth = Dsn(dsn).to_auth()
self.package = package
def make_sentry_payload(self, entry: DatabaseEntry):
newpipe_exc_info = entry.newpipe_exception_info
frames: List[SentryFrame] = []
try:
raw_data = "".join(newpipe_exc_info["exceptions"])
except KeyError:
raise StorageError("'exceptions' key missing in JSON body")
raw_frames = raw_data.replace("\n", " ").replace("\r", " ").split("\tat")
# pretty ugly, but that's what we receive from NewPipe
# both message and exception name are contained in the first item in the frames
message = raw_frames[0]
for raw_frame in raw_frames[1:]:
# some very basic sanitation, as e-mail clients all suck
raw_frame = raw_frame.strip()
# _very_ basic but gets the job done well enough
frame_match = re.search(r"(.+)\(([a-zA-Z0-9:.\s]+)\)", raw_frame)
if frame_match:
module_path = frame_match.group(1).split(".")
filename_and_lineno = frame_match.group(2)
if ":" in filename_and_lineno:
# "unknown source" is shown for lambda functions
filename_and_lineno_match = re.search(
r"(Unknown\s+Source|(?:[a-zA-Z]+\.(?:kt|java)+)):([0-9]+)",
filename_and_lineno,
)
if not filename_and_lineno_match:
raise ValueError(
f"could not find filename and line number in string {frame_match.group(2)}"
)
# we want just two matches, anything else would be an error in the regex
assert len(filename_and_lineno_match.groups()) == 2
frame = SentryFrame(
filename_and_lineno_match.group(1),
module_path[-1],
".".join(module_path[:-1]),
lineno=int(filename_and_lineno_match.group(2)),
)
frames.append(frame)
else:
# apparently a native exception, so we don't have a line number
frame = SentryFrame(
frame_match.group(2),
module_path[-1],
".".join(module_path[:-1]),
)
frames.append(frame)
else:
raise ParserError("Could not parse frame: '{}'".format(raw_frame))
try:
type = message.split(":")[0].split(".")[-1]
value = message.split(":")[1]
module = ".".join(message.split(":")[0].split(".")[:-1])
except IndexError:
type = value = module = "<none>"
timestamp = entry.date.timestamp()
# set up the payload, with all intermediary value objects
stacktrace = SentryStacktrace(frames)
exception = SentryException(type, value, module, stacktrace)
# TODO: support multiple exceptions to support "Caused by:"
payload = SentryPayload(entry.hash_id(), timestamp, message, exception)
# try to fill in as much optional data as possible
try:
# in Sentry, releases are now supposed to be unique organization wide
# in GlitchTip, however, they seem to be regarded as tags, so this should work well enough
payload.release = entry.newpipe_exception_info["version"]
except KeyError:
pass
for key in [
"user_comment",
"request",
"user_action",
"content_country",
"app_language",
]:
try:
payload.extra[key] = newpipe_exc_info[key]
except KeyError:
pass
for key in ["os", "service", "content_language"]:
try:
payload.tags[key] = newpipe_exc_info[key]
except KeyError:
pass
try:
package = newpipe_exc_info["package"]
except KeyError:
package = None
if package is not None:
if package != self.package:
raise ValueError("Package name not allowed: %s" % package)
else:
payload.tags["package"] = newpipe_exc_info["package"]
return payload
async def save(self, entry: DatabaseEntry):
exception = self.make_sentry_payload(entry)
data = exception.to_dict()
# we use Sentry SDK's auth helper object to calculate both the required auth header as well as the URL from the
# DSN string we already created a Dsn object for
url = self.sentry_auth.store_api_url
# it would be great if the Auth object just had a method to create/update a headers dict
headers = {
"X-Sentry-Auth": str(self.sentry_auth.to_header()),
# user agent isn't really necessary, but sentry-sdk sets it, too, so... why not
"User-Agent": "NewPipe Crash Report Importer",
# it's recommended by the Sentry docs to send a valid MIME type
"Content-Type": "application/json",
}
async with aiohttp.ClientSession() as session:
async with session.post(
url, data=json.dumps(data), headers=headers
) as response:
# pretty crude way to recognize this issue, but it works well enough
if response.status == 403:
if "An event with the same ID already exists" in (
await response.text()
):
raise AlreadyStoredError()
if response.status != 200:
raise GlitchtipError(response.status, await response.text())
|
# -*- coding: utf-8 -*-
import requests
import json
from multiprocessing import Pool
import time
import os
def send(data, url, key):
print('send ', data)
try:
headers = {'Content-type': 'application/json', 'Accept': 'text/plain', 'X-PYTILT-KEY': key}
r = requests.post(url, data=json.dumps(data), headers=headers, timeout=15)
return r.status_code == 200
except requests.exceptions.Timeout:
print('Connection Timeout.')
return False
except requests.exceptions.RequestException:
return False
class Sender(object):
def __init__(self, endpoint, batch_size=1):
self.queue = []
self.sending = []
self.batch_size = batch_size
self.url = "{}/{}".format(os.environ.get('PYTILT_URL', None), endpoint)
self.key = os.environ.get('PYTILT_KEY', None)
# self.key = '1234'
# self.url = 'http://127.0.0.1:5000/{}'.format(endpoint)
print("sending to {} with key {}".format(self.url,self.key))
def add_data(self, data):
self.queue.append(data)
if len(self.queue) >= self.batch_size:
print('Reached max len, sending batch')
self.send()
def send(self):
pool = Pool(processes=1)
self.sending = list(self.queue)
self.queue = []
result = pool.apply_async(send, args=[self.sending, self.url, self.key], callback=self.completed)
pool.close()
pool.join()
def completed(self, was_sent):
if was_sent:
self.sending = []
print('send success.')
else:
print('send failed.')
if len(self.queue) > 100:
self.queue = []
self.queue += self.sending
|
import os
from .base import *
ALLOWED_HOSTS = ['*']
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INSTALLED_APPS += ['debug_toolbar', 'django_extensions']
MIDDLEWARE = (
MIDDLEWARE[:-1]
+ [
'debug_toolbar.middleware.DebugToolbarMiddleware',
'querycount.middleware.QueryCountMiddleware',
]
+ MIDDLEWARE[-1:]
)
CACHE_MIDDLEWARE_SECONDS = 1
DEBUG_TOOLBAR_PANELS = [
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
]
if not DISABLE_CACHALOT:
DEBUG_TOOLBAR_PANELS.append('cachalot.panels.CachalotPanel')
DEBUG_TOOLBAR_PANELS += [
'debug_toolbar.panels.staticfiles.StaticFilesPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'debug_toolbar.panels.cache.CachePanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.redirects.RedirectsPanel',
]
# debug toolbar
INTERNAL_IPS = ['127.0.0.1']
# disable most periodic celery tasks
CELERY_BEAT_SCHEDULE = {
'sync_interest_task': {
'task': 'logs.tasks.sync_interest_task',
'schedule': schedule(run_every=timedelta(days=1)),
},
'find_and_renew_first_due_cached_query_task': {
'task': 'recache.tasks.find_and_renew_first_due_cached_query_task',
'schedule': schedule(run_every=timedelta(seconds=300)),
'options': {'expires': 4},
},
'process_outstanding_import_batch_sync_logs_task': {
'task': 'logs.tasks.process_outstanding_import_batch_sync_logs_task',
'schedule': schedule(run_every=timedelta(minutes=7)),
'options': {'expires': 7 * 60},
},
}
QUERYCOUNT = {'DISPLAY_DUPLICATES': 5}
|
import json
from logging import getLogger
import boto3
from django.apps import apps
from django.conf import settings
from domain_events.handler import HANDLERS
logger = getLogger(__name__)
def process_sqs_messages(event):
apps.populate(settings.INSTALLED_APPS)
for record in event["Records"]:
if record["eventSource"] != "aws:sqs":
logger.info(
"Skipping message_id=%s because it is not coming from SQS",
record["messageId"],
)
continue
process_message(record)
def process_message(record):
message_id = record["messageId"]
receipt_handle = record["receiptHandle"]
message_attributes = record["messageAttributes"]
message_type = message_attributes["MessageType"]["stringValue"]
handler = HANDLERS.get(message_type, None)
if not handler:
logger.info(
"Received SQS message_id=%s message_type=%s but no handler accepts it",
message_id,
message_type,
)
return
data = json.loads(record["body"])
logger.info("Processing sqs message_type=%s", message_type)
handler(data)
sqs = boto3.client("sqs")
sqs.delete_message(QueueUrl=settings.SQS_QUEUE_URL, ReceiptHandle=receipt_handle)
|
from base64 import b64encode
from app import app
import unittest
from mock import patch
import os
import json
from twython import Twython
class TestApp(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
os.environ['SERVICE_KEY'] = 'test-key'
os.environ['SERVICE_PASS'] = 'test-secret'
os.environ['APP_KEY'] = 'test-key'
os.environ['APP_SECRET'] = 'test-secret'
os.environ['OAUTH_TOKEN'] = 'test-oauth-token'
os.environ['OAUTH_TOKEN_SECRET'] = 'test-oauth-token-secret'
@patch('app.Twython.update_status')
def test_publish_tweet(self, update_status_mock):
update_status_mock.return_value = True
auth = (os.environ['SERVICE_KEY'] + ':' + os.environ['SERVICE_PASS']).encode('utf-8')
headers = {
'Authorization': 'Basic ' + b64encode(auth).decode()
}
rv = self.app.post('/api/v1/tweets',
data = json.dumps(dict(id = 3, message = 'test tweet', profileId = '1')),
content_type = 'application/json',
headers = headers)
self.assertEqual(rv.status_code, 200)
self.assertEqual(update_status_mock.call_count, 1)
update_status_mock.assert_called_once()
def test_404(self):
auth = (os.environ['SERVICE_KEY'] + ':' + os.environ['SERVICE_PASS']).encode('utf-8')
headers = {
'Authorization': 'Basic ' + b64encode(auth).decode()
}
rv = self.app.get('/i-am-not-found', headers=headers)
self.assertEqual(rv.status_code, 404)
if __name__ == '__main__':
unittest.main()
|
# flake8: noqa
from .segmentation import *
from .resnet_encoder import *
from .sequential import *
__all__ = ['UNet', 'ResNetUnet', 'LinkNet']
|
import pytest
from freezegun import freeze_time
from flask import current_app
from notifications_utils import SMS_CHAR_COUNT_LIMIT
import app
from app.dao import templates_dao
from app.models import SMS_TYPE, EMAIL_TYPE, LETTER_TYPE
from app.notifications.process_notifications import create_content_for_notification
from app.notifications.validators import (
check_content_char_count,
check_if_service_can_send_files_by_email,
check_notification_content_is_not_empty,
check_service_over_daily_message_limit,
check_template_is_for_notification_type,
check_template_is_active,
check_service_over_api_rate_limit,
check_service_email_reply_to_id,
check_service_sms_sender_id,
check_service_letter_contact_id,
check_reply_to,
service_can_send_to_recipient,
validate_and_format_recipient,
validate_template,
)
from app.utils import get_template_instance
from app.v2.errors import (
BadRequestError,
TooManyRequestsError,
RateLimitError)
from tests.conftest import set_config
from tests.app.db import (
create_api_key,
create_letter_contact,
create_notification,
create_reply_to_email,
create_service,
create_service_sms_sender,
create_service_whitelist,
create_template,
)
# all of these tests should have redis enabled (except where we specifically disable it)
@pytest.fixture(scope='module', autouse=True)
def enable_redis(notify_api):
with set_config(notify_api, 'REDIS_ENABLED', True):
yield
@pytest.mark.parametrize('key_type', ['test', 'team', 'normal'])
def test_check_service_message_limit_in_cache_with_unrestricted_service_is_allowed(
key_type,
sample_service,
mocker):
mocker.patch('app.notifications.validators.redis_store.get', return_value=1)
mocker.patch('app.notifications.validators.redis_store.set')
mocker.patch('app.notifications.validators.services_dao')
check_service_over_daily_message_limit(key_type, sample_service)
app.notifications.validators.redis_store.set.assert_not_called()
assert not app.notifications.validators.services_dao.mock_calls
@pytest.mark.parametrize('key_type', ['test', 'team', 'normal'])
def test_check_service_message_limit_in_cache_under_message_limit_passes(
key_type,
sample_service,
mocker):
mocker.patch('app.notifications.validators.redis_store.get', return_value=1)
mocker.patch('app.notifications.validators.redis_store.set')
mocker.patch('app.notifications.validators.services_dao')
check_service_over_daily_message_limit(key_type, sample_service)
app.notifications.validators.redis_store.set.assert_not_called()
assert not app.notifications.validators.services_dao.mock_calls
def test_should_not_interact_with_cache_for_test_key(sample_service, mocker):
mocker.patch('app.notifications.validators.redis_store')
check_service_over_daily_message_limit('test', sample_service)
assert not app.notifications.validators.redis_store.mock_calls
@pytest.mark.parametrize('key_type', ['team', 'normal'])
def test_should_set_cache_value_as_value_from_database_if_cache_not_set(
key_type,
sample_template,
sample_service,
mocker
):
with freeze_time("2016-01-01 12:00:00.000000"):
for x in range(5):
create_notification(sample_template)
mocker.patch('app.notifications.validators.redis_store.get', return_value=None)
mocker.patch('app.notifications.validators.redis_store.set')
check_service_over_daily_message_limit(key_type, sample_service)
app.notifications.validators.redis_store.set.assert_called_with(
str(sample_service.id) + "-2016-01-01-count", 5, ex=3600
)
def test_should_not_access_database_if_redis_disabled(notify_api, sample_service, mocker):
with set_config(notify_api, 'REDIS_ENABLED', False):
db_mock = mocker.patch('app.notifications.validators.services_dao')
check_service_over_daily_message_limit('normal', sample_service)
assert db_mock.method_calls == []
@pytest.mark.parametrize('key_type', ['team', 'normal'])
def test_check_service_message_limit_over_message_limit_fails(key_type, sample_service, mocker):
with freeze_time("2016-01-01 12:00:00.000000"):
mocker.patch('app.redis_store.get', return_value=None)
mocker.patch('app.notifications.validators.redis_store.set')
sample_service.restricted = True
sample_service.message_limit = 4
template = create_template(sample_service)
for x in range(5):
create_notification(template)
with pytest.raises(TooManyRequestsError) as e:
check_service_over_daily_message_limit(key_type, sample_service)
assert e.value.status_code == 429
assert e.value.message == 'Exceeded send limits (4) for today'
assert e.value.fields == []
app.notifications.validators.redis_store.set.assert_called_with(
str(sample_service.id) + "-2016-01-01-count", 5, ex=3600
)
@pytest.mark.parametrize('key_type', ['team', 'normal'])
def test_check_service_message_limit_in_cache_over_message_limit_fails(
notify_db_session,
key_type,
mocker):
with freeze_time("2016-01-01 12:00:00.000000"):
mocker.patch('app.redis_store.get', return_value=5)
mocker.patch('app.notifications.validators.redis_store.set')
mocker.patch('app.notifications.validators.services_dao')
service = create_service(restricted=True, message_limit=4)
with pytest.raises(TooManyRequestsError) as e:
check_service_over_daily_message_limit(key_type, service)
assert e.value.status_code == 429
assert e.value.message == 'Exceeded send limits (4) for today'
assert e.value.fields == []
app.notifications.validators.redis_store.set.assert_not_called()
assert not app.notifications.validators.services_dao.mock_calls
@pytest.mark.parametrize('template_type, notification_type',
[(EMAIL_TYPE, EMAIL_TYPE),
(SMS_TYPE, SMS_TYPE)])
def test_check_template_is_for_notification_type_pass(template_type, notification_type):
assert check_template_is_for_notification_type(notification_type=notification_type,
template_type=template_type) is None
@pytest.mark.parametrize('template_type, notification_type',
[(SMS_TYPE, EMAIL_TYPE),
(EMAIL_TYPE, SMS_TYPE)])
def test_check_template_is_for_notification_type_fails_when_template_type_does_not_match_notification_type(
template_type, notification_type):
with pytest.raises(BadRequestError) as e:
check_template_is_for_notification_type(notification_type=notification_type,
template_type=template_type)
assert e.value.status_code == 400
error_message = '{0} template is not suitable for {1} notification'.format(template_type, notification_type)
assert e.value.message == error_message
assert e.value.fields == [{'template': error_message}]
def test_check_template_is_active_passes(sample_template):
assert check_template_is_active(sample_template) is None
def test_check_template_is_active_fails(sample_template):
sample_template.archived = True
from app.dao.templates_dao import dao_update_template
dao_update_template(sample_template)
with pytest.raises(BadRequestError) as e:
check_template_is_active(sample_template)
assert e.value.status_code == 400
assert e.value.message == 'Template has been deleted'
assert e.value.fields == [{'template': 'Template has been deleted'}]
@pytest.mark.parametrize('key_type',
['test', 'normal'])
def test_service_can_send_to_recipient_passes(key_type, notify_db_session):
trial_mode_service = create_service(service_name='trial mode', restricted=True)
assert service_can_send_to_recipient(trial_mode_service.users[0].email_address,
key_type,
trial_mode_service) is None
assert service_can_send_to_recipient(trial_mode_service.users[0].mobile_number,
key_type,
trial_mode_service) is None
@pytest.mark.parametrize('key_type',
['test', 'normal'])
def test_service_can_send_to_recipient_passes_for_live_service_non_team_member(key_type, sample_service):
assert service_can_send_to_recipient("some_other_email@test.com",
key_type,
sample_service) is None
assert service_can_send_to_recipient('07513332413',
key_type,
sample_service) is None
def test_service_can_send_to_recipient_passes_for_whitelisted_recipient_passes(sample_service):
create_service_whitelist(sample_service, email_address="some_other_email@test.com")
assert service_can_send_to_recipient("some_other_email@test.com",
'team',
sample_service) is None
create_service_whitelist(sample_service, mobile_number='07513332413')
assert service_can_send_to_recipient('07513332413',
'team',
sample_service) is None
@pytest.mark.parametrize('recipient', [
{"email_address": "some_other_email@test.com"},
{"mobile_number": "07513332413"},
])
def test_service_can_send_to_recipient_fails_when_ignoring_whitelist(
notify_db,
notify_db_session,
sample_service,
recipient,
):
create_service_whitelist(sample_service, **recipient)
with pytest.raises(BadRequestError) as exec_info:
service_can_send_to_recipient(
next(iter(recipient.values())),
'team',
sample_service,
allow_whitelisted_recipients=False,
)
assert exec_info.value.status_code == 400
assert exec_info.value.message == 'Can’t send to this recipient using a team-only API key'
assert exec_info.value.fields == []
@pytest.mark.parametrize('recipient', ['07513332413', 'some_other_email@test.com'])
@pytest.mark.parametrize('key_type, error_message',
[('team', 'Can’t send to this recipient using a team-only API key'),
('normal',
"Can’t send to this recipient when service is in trial mode – see https://www.notifications.service.gov.uk/trial-mode")]) # noqa
def test_service_can_send_to_recipient_fails_when_recipient_is_not_on_team(
recipient,
key_type,
error_message,
notify_db_session,
):
trial_mode_service = create_service(service_name='trial mode', restricted=True)
with pytest.raises(BadRequestError) as exec_info:
service_can_send_to_recipient(recipient,
key_type,
trial_mode_service)
assert exec_info.value.status_code == 400
assert exec_info.value.message == error_message
assert exec_info.value.fields == []
def test_service_can_send_to_recipient_fails_when_mobile_number_is_not_on_team(sample_service):
with pytest.raises(BadRequestError) as e:
service_can_send_to_recipient("0758964221",
'team',
sample_service)
assert e.value.status_code == 400
assert e.value.message == 'Can’t send to this recipient using a team-only API key'
assert e.value.fields == []
@pytest.mark.parametrize('char_count', [612, 0, 494, 200, 918])
@pytest.mark.parametrize('show_prefix', [True, False])
@pytest.mark.parametrize('template_type', ['sms', 'email', 'letter'])
def test_check_content_char_count_passes(notify_db_session, show_prefix, char_count, template_type):
service = create_service(prefix_sms=show_prefix)
t = create_template(service=service, content='a' * char_count, template_type=template_type)
template = templates_dao.dao_get_template_by_id_and_service_id(template_id=t.id, service_id=service.id)
template_with_content = get_template_instance(template=template.__dict__, values={})
assert check_content_char_count(template_with_content) is None
@pytest.mark.parametrize('char_count', [919, 6000])
@pytest.mark.parametrize('show_prefix', [True, False])
def test_check_content_char_count_fails(notify_db_session, show_prefix, char_count):
with pytest.raises(BadRequestError) as e:
service = create_service(prefix_sms=show_prefix)
t = create_template(service=service, content='a' * char_count, template_type='sms')
template = templates_dao.dao_get_template_by_id_and_service_id(template_id=t.id, service_id=service.id)
template_with_content = get_template_instance(template=template.__dict__, values={})
check_content_char_count(template_with_content)
assert e.value.status_code == 400
assert e.value.message == f'Text messages cannot be longer than {SMS_CHAR_COUNT_LIMIT} characters. ' \
f'Your message is {char_count} characters'
assert e.value.fields == []
@pytest.mark.parametrize('template_type', ['email', 'letter'])
def test_check_content_char_count_passes_for_long_email_or_letter(sample_service, template_type):
t = create_template(service=sample_service, content='a' * 1000, template_type=template_type)
template = templates_dao.dao_get_template_by_id_and_service_id(template_id=t.id,
service_id=t.service_id)
template_with_content = get_template_instance(template=template.__dict__, values={})
assert check_content_char_count(template_with_content) is None
def test_check_notification_content_is_not_empty_passes(notify_api, mocker, sample_service):
template_id = create_template(sample_service, content="Content is not empty").id
template = templates_dao.dao_get_template_by_id_and_service_id(
template_id=template_id,
service_id=sample_service.id
)
template_with_content = create_content_for_notification(template, {})
assert check_notification_content_is_not_empty(template_with_content) is None
@pytest.mark.parametrize('template_content,notification_values', [
("", {}),
("((placeholder))", {"placeholder": ""})
])
def test_check_notification_content_is_not_empty_fails(
notify_api, mocker, sample_service, template_content, notification_values
):
template_id = create_template(sample_service, content=template_content).id
template = templates_dao.dao_get_template_by_id_and_service_id(
template_id=template_id,
service_id=sample_service.id
)
template_with_content = create_content_for_notification(template, notification_values)
with pytest.raises(BadRequestError) as e:
check_notification_content_is_not_empty(template_with_content)
assert e.value.status_code == 400
assert e.value.message == 'Your message is empty.'
assert e.value.fields == []
def test_validate_template(sample_service):
template = create_template(sample_service, template_type="email")
validate_template(template.id, {}, sample_service, "email")
def test_validate_template_calls_all_validators(mocker, fake_uuid, sample_service):
template = create_template(sample_service, template_type="email")
mock_check_type = mocker.patch('app.notifications.validators.check_template_is_for_notification_type')
mock_check_if_active = mocker.patch('app.notifications.validators.check_template_is_active')
mock_create_conent = mocker.patch(
'app.notifications.validators.create_content_for_notification', return_value="content"
)
mock_check_not_empty = mocker.patch('app.notifications.validators.check_notification_content_is_not_empty')
mock_check_message_is_too_long = mocker.patch('app.notifications.validators.check_content_char_count')
validate_template(template.id, {}, sample_service, "email")
mock_check_type.assert_called_once_with("email", "email")
mock_check_if_active.assert_called_once_with(template)
mock_create_conent.assert_called_once_with(template, {})
mock_check_not_empty.assert_called_once_with("content")
mock_check_message_is_too_long.assert_called_once_with("content")
@pytest.mark.parametrize('key_type', ['team', 'live', 'test'])
def test_that_when_exceed_rate_limit_request_fails(
key_type,
sample_service,
mocker):
with freeze_time("2016-01-01 12:00:00.000000"):
if key_type == 'live':
api_key_type = 'normal'
else:
api_key_type = key_type
mocker.patch('app.redis_store.exceeded_rate_limit', return_value=True)
mocker.patch('app.notifications.validators.services_dao')
sample_service.restricted = True
api_key = create_api_key(sample_service, key_type=api_key_type)
with pytest.raises(RateLimitError) as e:
check_service_over_api_rate_limit(sample_service, api_key)
assert app.redis_store.exceeded_rate_limit.called_with(
"{}-{}".format(str(sample_service.id), api_key.key_type),
sample_service.rate_limit,
60
)
assert e.value.status_code == 429
assert e.value.message == 'Exceeded rate limit for key type {} of {} requests per {} seconds'.format(
key_type.upper(), sample_service.rate_limit, 60
)
assert e.value.fields == []
def test_that_when_not_exceeded_rate_limit_request_succeeds(
sample_service,
mocker):
with freeze_time("2016-01-01 12:00:00.000000"):
mocker.patch('app.redis_store.exceeded_rate_limit', return_value=False)
mocker.patch('app.notifications.validators.services_dao')
sample_service.restricted = True
api_key = create_api_key(sample_service)
check_service_over_api_rate_limit(sample_service, api_key)
assert app.redis_store.exceeded_rate_limit.called_with(
"{}-{}".format(str(sample_service.id), api_key.key_type),
3000,
60
)
def test_should_not_rate_limit_if_limiting_is_disabled(
sample_service,
mocker):
with freeze_time("2016-01-01 12:00:00.000000"):
current_app.config['API_RATE_LIMIT_ENABLED'] = False
mocker.patch('app.redis_store.exceeded_rate_limit', return_value=False)
mocker.patch('app.notifications.validators.services_dao')
sample_service.restricted = True
api_key = create_api_key(sample_service)
check_service_over_api_rate_limit(sample_service, api_key)
assert not app.redis_store.exceeded_rate_limit.called
@pytest.mark.parametrize('key_type', ['test', 'normal'])
def test_rejects_api_calls_with_international_numbers_if_service_does_not_allow_int_sms(
key_type,
notify_db_session,
):
service = create_service(service_permissions=[SMS_TYPE])
with pytest.raises(BadRequestError) as e:
validate_and_format_recipient('20-12-1234-1234', key_type, service, SMS_TYPE)
assert e.value.status_code == 400
assert e.value.message == 'Cannot send to international mobile numbers'
assert e.value.fields == []
@pytest.mark.parametrize('key_type', ['test', 'normal'])
def test_allows_api_calls_with_international_numbers_if_service_does_allow_int_sms(
key_type, sample_service_full_permissions):
result = validate_and_format_recipient('20-12-1234-1234', key_type, sample_service_full_permissions, SMS_TYPE)
assert result == '201212341234'
def test_rejects_api_calls_with_no_recipient():
with pytest.raises(BadRequestError) as e:
validate_and_format_recipient(None, 'key_type', 'service', 'SMS_TYPE')
assert e.value.status_code == 400
assert e.value.message == "Recipient can't be empty"
@pytest.mark.parametrize('notification_type', ['sms', 'email', 'letter'])
def test_check_service_email_reply_to_id_where_reply_to_id_is_none(notification_type):
assert check_service_email_reply_to_id(None, None, notification_type) is None
def test_check_service_email_reply_to_where_email_reply_to_is_found(sample_service):
reply_to_address = create_reply_to_email(sample_service, "test@test.com")
assert check_service_email_reply_to_id(sample_service.id, reply_to_address.id, EMAIL_TYPE) == "test@test.com"
def test_check_service_email_reply_to_id_where_service_id_is_not_found(sample_service, fake_uuid):
reply_to_address = create_reply_to_email(sample_service, "test@test.com")
with pytest.raises(BadRequestError) as e:
check_service_email_reply_to_id(fake_uuid, reply_to_address.id, EMAIL_TYPE)
assert e.value.status_code == 400
assert e.value.message == 'email_reply_to_id {} does not exist in database for service id {}' \
.format(reply_to_address.id, fake_uuid)
def test_check_service_email_reply_to_id_where_reply_to_id_is_not_found(sample_service, fake_uuid):
with pytest.raises(BadRequestError) as e:
check_service_email_reply_to_id(sample_service.id, fake_uuid, EMAIL_TYPE)
assert e.value.status_code == 400
assert e.value.message == 'email_reply_to_id {} does not exist in database for service id {}' \
.format(fake_uuid, sample_service.id)
@pytest.mark.parametrize('notification_type', ['sms', 'email', 'letter'])
def test_check_service_sms_sender_id_where_sms_sender_id_is_none(notification_type):
assert check_service_sms_sender_id(None, None, notification_type) is None
def test_check_service_sms_sender_id_where_sms_sender_id_is_found(sample_service):
sms_sender = create_service_sms_sender(service=sample_service, sms_sender='123456')
assert check_service_sms_sender_id(sample_service.id, sms_sender.id, SMS_TYPE) == '123456'
def test_check_service_sms_sender_id_where_service_id_is_not_found(sample_service, fake_uuid):
sms_sender = create_service_sms_sender(service=sample_service, sms_sender='123456')
with pytest.raises(BadRequestError) as e:
check_service_sms_sender_id(fake_uuid, sms_sender.id, SMS_TYPE)
assert e.value.status_code == 400
assert e.value.message == 'sms_sender_id {} does not exist in database for service id {}' \
.format(sms_sender.id, fake_uuid)
def test_check_service_sms_sender_id_where_sms_sender_is_not_found(sample_service, fake_uuid):
with pytest.raises(BadRequestError) as e:
check_service_sms_sender_id(sample_service.id, fake_uuid, SMS_TYPE)
assert e.value.status_code == 400
assert e.value.message == 'sms_sender_id {} does not exist in database for service id {}' \
.format(fake_uuid, sample_service.id)
def test_check_service_letter_contact_id_where_letter_contact_id_is_none():
assert check_service_letter_contact_id(None, None, 'letter') is None
def test_check_service_letter_contact_id_where_letter_contact_id_is_found(sample_service):
letter_contact = create_letter_contact(service=sample_service, contact_block='123456')
assert check_service_letter_contact_id(sample_service.id, letter_contact.id, LETTER_TYPE) == '123456'
def test_check_service_letter_contact_id_where_service_id_is_not_found(sample_service, fake_uuid):
letter_contact = create_letter_contact(service=sample_service, contact_block='123456')
with pytest.raises(BadRequestError) as e:
check_service_letter_contact_id(fake_uuid, letter_contact.id, LETTER_TYPE)
assert e.value.status_code == 400
assert e.value.message == 'letter_contact_id {} does not exist in database for service id {}' \
.format(letter_contact.id, fake_uuid)
def test_check_service_letter_contact_id_where_letter_contact_is_not_found(sample_service, fake_uuid):
with pytest.raises(BadRequestError) as e:
check_service_letter_contact_id(sample_service.id, fake_uuid, LETTER_TYPE)
assert e.value.status_code == 400
assert e.value.message == 'letter_contact_id {} does not exist in database for service id {}' \
.format(fake_uuid, sample_service.id)
@pytest.mark.parametrize('notification_type', ['sms', 'email', 'letter'])
def test_check_reply_to_with_empty_reply_to(sample_service, notification_type):
assert check_reply_to(sample_service.id, None, notification_type) is None
def test_check_reply_to_email_type(sample_service):
reply_to_address = create_reply_to_email(sample_service, "test@test.com")
assert check_reply_to(sample_service.id, reply_to_address.id, EMAIL_TYPE) == 'test@test.com'
def test_check_reply_to_sms_type(sample_service):
sms_sender = create_service_sms_sender(service=sample_service, sms_sender='123456')
assert check_reply_to(sample_service.id, sms_sender.id, SMS_TYPE) == '123456'
def test_check_reply_to_letter_type(sample_service):
letter_contact = create_letter_contact(service=sample_service, contact_block='123456')
assert check_reply_to(sample_service.id, letter_contact.id, LETTER_TYPE) == '123456'
def test_check_if_service_can_send_files_by_email_raises_if_no_contact_link_set(sample_service):
with pytest.raises(BadRequestError) as e:
check_if_service_can_send_files_by_email(
service_contact_link=sample_service.contact_link,
service_id=sample_service.id
)
message = f"Send files by email has not been set up - add contact details for your service at " \
f"http://localhost:6012/services/{sample_service.id}/service-settings/send-files-by-email"
assert e.value.status_code == 400
assert e.value.message == message
def test_check_if_service_can_send_files_by_email_passes_if_contact_link_set(sample_service):
sample_service.contact_link = 'contact.me@gov.uk'
check_if_service_can_send_files_by_email(
service_contact_link=sample_service.contact_link,
service_id=sample_service.id
)
|
import pandas as pd
import numpy
datset=pd.read_csv("zoo.data")
X=datset.iloc[:,1:17].values
y=datset.iloc[:,17].values
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0)
from sklearn.svm import SVC
classifier = SVC(kernel = 'linear', random_state = 0)
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
classifier.score(X_test,y_test)
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test, y_pred)
from sklearn.model_selection import cross_val_score
#
scores = cross_val_score(classifier, X_train, y_train, cv=5)
scores
print("Accuracy: %0.2f (+/- %0.2f)" % (scores.mean(), scores.std() * 2))
from sklearn.model_selection import KFold
kf=KFold(n_splits=5)
score_linear_kernel=[]
def get_score(model,X_train,X_test,y_train,y_test):
model.fit(X_train,y_train)
return model.score(X_test,y_test)
score=get_score(classifier,X_train,X_test,y_train,y_test)
for train_index,test_index in kf.split(X_train,y_train):
x_train,x_test=X_train[train_index],X_train[test_index]
Y_train,Y_test=y_train[train_index],y_train[test_index]
score_linear_kernel.append(get_score(classifier,x_train,x_test,Y_train,Y_test))
print("After k fold for k=5 in linear kernel, the scores array is as follows:")
print(score_linear_kernel)
from sklearn.metrics import classification_report
print(classification_report(y_test,y_pred))
|
from .Cross_Validation import Cross_Validation
from .BootstrapCV import BootstrapCV
from .KFold import KFold
from .NxM import NxM
from .TestCV import TestCV
from .loadercv import LoaderCV
from .VersionDropout import VersionDropout
__all__ = [
"Cross_Validation",
"BootstrapCV",
"KFold",
"NxM",
"TestCV",
"LoaderCV"
"VersionDropout"
]
|
import ast
import io
import json
import tokenize
from collections import namedtuple
import asttokens
import six
def parse_acl_formula(acl_formula):
"""
Parse an ACL formula expression into a parse tree that we can interpret in JS, e.g.
"rec.office == 'Seattle' and user.email in ['sally@', 'xie@']".
The idea is to support enough to express ACL rules flexibly, but we don't need to support too
much, since rules should be reasonably simple.
The returned tree has the form [NODE_TYPE, arguments...], with these NODE_TYPEs supported:
And|Or ...values
Add|Sub|Mult|Div|Mod left, right
Not operand
Eq|NotEq|Lt|LtE|Gt|GtE left, right
Is|IsNot|In|NotIn left, right
List ...elements
Const value (number, string, bool)
Name name (string)
Attr node, attr_name
Comment node, comment
"""
if isinstance(acl_formula, six.binary_type):
acl_formula = acl_formula.decode('utf8')
try:
tree = ast.parse(acl_formula, mode='eval')
result = _TreeConverter().visit(tree)
for part in tokenize.generate_tokens(io.StringIO(acl_formula).readline):
if part[0] == tokenize.COMMENT and part[1].startswith('#'):
result = ['Comment', result, part[1][1:].strip()]
break
return result
except SyntaxError as err:
# In case of an error, include line and offset.
raise SyntaxError("%s on line %s col %s" % (err.args[0], err.lineno, err.offset))
def parse_acl_formula_json(acl_formula):
"""
As parse_acl_formula(), but stringifies the result, and converts empty string to empty string.
"""
return json.dumps(parse_acl_formula(acl_formula)) if acl_formula else ""
# Entities encountered in ACL formulas, which may get renamed.
# type : 'recCol'|'userAttr'|'userAttrCol',
# start_pos: number, # start position of the token in the code.
# name: string, # the name that may be updated by a rename.
# extra: string|None, # name of userAttr in case of userAttrCol; otherwise None.
NamedEntity = namedtuple('NamedEntity', ('type', 'start_pos', 'name', 'extra'))
def parse_acl_grist_entities(acl_formula):
"""
Parse the ACL formula collecting any entities that may be subject to renaming. Returns a
NamedEntity list.
"""
try:
atok = asttokens.ASTTokens(acl_formula, tree=ast.parse(acl_formula, mode='eval'))
converter = _EntityCollector()
converter.visit(atok.tree)
return converter.entities
except SyntaxError as err:
return []
named_constants = {
'True': True,
'False': False,
'None': None,
}
class _TreeConverter(ast.NodeVisitor):
# AST nodes are documented here: https://docs.python.org/2/library/ast.html#abstract-grammar
# pylint:disable=no-self-use
def visit_Expression(self, node):
return self.visit(node.body)
def visit_BoolOp(self, node):
return [node.op.__class__.__name__] + [self.visit(v) for v in node.values]
def visit_BinOp(self, node):
if not isinstance(node.op, (ast.Add, ast.Sub, ast.Mult, ast.Div, ast.Mod)):
return self.generic_visit(node)
return [node.op.__class__.__name__, self.visit(node.left), self.visit(node.right)]
def visit_UnaryOp(self, node):
if not isinstance(node.op, (ast.Not)):
return self.generic_visit(node)
return [node.op.__class__.__name__, self.visit(node.operand)]
def visit_Compare(self, node):
# We don't try to support chained comparisons like "1 < 2 < 3" (though it wouldn't be hard).
if len(node.ops) != 1 or len(node.comparators) != 1:
raise ValueError("Can't use chained comparisons")
return [node.ops[0].__class__.__name__, self.visit(node.left), self.visit(node.comparators[0])]
def visit_Name(self, node):
if node.id in named_constants:
return ["Const", named_constants[node.id]]
return ["Name", node.id]
def visit_Constant(self, node):
return ["Const", node.value]
visit_NameConstant = visit_Constant
def visit_Attribute(self, node):
return ["Attr", self.visit(node.value), node.attr]
def visit_Num(self, node):
return ["Const", node.n]
def visit_Str(self, node):
return ["Const", node.s]
def visit_List(self, node):
return ["List"] + [self.visit(e) for e in node.elts]
def visit_Tuple(self, node):
return self.visit_List(node) # We don't distinguish tuples and lists
def generic_visit(self, node):
raise ValueError("Unsupported syntax at %s:%s" % (node.lineno, node.col_offset + 1))
class _EntityCollector(_TreeConverter):
def __init__(self):
self.entities = [] # NamedEntity list
def visit_Attribute(self, node):
parent = self.visit(node.value)
# We recognize a couple of specific patterns for entities that may be affected by renames.
if parent == ['Name', 'rec'] or parent == ['Name', 'newRec']:
# rec.COL refers to the column from the table that the rule is on.
self.entities.append(NamedEntity('recCol', node.last_token.startpos, node.attr, None))
if parent == ['Name', 'user']:
# user.ATTR is a user attribute.
self.entities.append(NamedEntity('userAttr', node.last_token.startpos, node.attr, None))
elif parent[0] == 'Attr' and parent[1] == ['Name', 'user']:
# user.ATTR.COL is a column from the lookup table of the UserAttribute ATTR.
self.entities.append(
NamedEntity('userAttrCol', node.last_token.startpos, node.attr, parent[2]))
return ["Attr", parent, node.attr]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
##
QC
##
*Created on Mon, Mar 26 by A. Pahl*
Tools for interactive plate quality control in the notebook.
Uses Holoviews and its Bokeh backend for visualization.
"""
import os.path as op
import pandas as pd
import numpy as np
import matplotlib as mpl
import holoviews as hv
hv.extension("bokeh")
from bokeh.models import HoverTool
from cellpainting2 import processing as cpp
from cellpainting2 import reporting as cpr
from cellpainting2 import tools as cpt
cp_config = cpt.load_config("config")
ACT_PROF_PARAMETERS = cp_config["Parameters"]
ACT_CUTOFF_PERC = cp_config["Cutoffs"]["ActCutoffPerc"]
ACT_CUTOFF_PERC_H = cp_config["Cutoffs"]["ActCutoffPercH"]
ACT_CUTOFF_PERC_REF = cp_config["Cutoffs"]["ActCutoffPercRef"]
ACT_CUTOFF_PERC_REF_H = cp_config["Cutoffs"]["ActCutoffPercRefH"]
OVERACT_H = cp_config["Cutoffs"]["OverActH"]
LIMIT_ACTIVITY_H = cp_config["Cutoffs"]["LimitActivityH"]
LIMIT_ACTIVITY_L = cp_config["Cutoffs"]["LimitActivityL"]
LIMIT_CELL_COUNT_H = cp_config["Cutoffs"]["LimitCellCountH"]
LIMIT_CELL_COUNT_L = cp_config["Cutoffs"]["LimitCellCountL"]
LIMIT_SIMILARITY_H = cp_config["Cutoffs"]["LimitSimilarityH"]
LIMIT_SIMILARITY_L = cp_config["Cutoffs"]["LimitSimilarityL"]
IGNORE_SKIPPED = cp_config["Options"]["IgnoreSkipped"]
def add_images(df):
"""Adds an Image column to the MolFrame, used for structure tooltips in plotting.
Only works on Pandas DataFrames, does not work for Dask DataFrames
(call `.compute()` first)."""
assert cpp.is_pandas(df), "Only works when the data object is a Pandas DataFrame. Consider running `.compute()` first."
def _img_method(x):
return "data:image/png;base64,{}".format(cpr.b64_mol(cpt.mol_from_smiles(x)))
result = df.copy()
result["Image"] = result["Smiles"].apply(_img_method)
return result
def contained_in_list(s, lst):
for el in lst:
if el in s:
return True
return False
def process_plate_for_qc(plate_full_name, structures=True, act_cutoff=1.585):
plate = cpt.split_plate_name(plate_full_name)
src_templ = op.join(cp_config["Dirs"]["PlatesDir"], "{}-{}")
src_dir = src_templ.format(plate.date, plate.name)
# process as Pandas
ds_plate = cpp.read_csv(op.join(src_dir, "Results.tsv")).compute()
ds_plate = ds_plate.group_on_well()
ds_plate.position_from_well() # inplace
if contained_in_list(plate.name, IGNORE_SKIPPED):
print("* skipping Echo filter step.")
else:
ds_plate = ds_plate.remove_skipped_echo_direct_transfer(op.join(src_dir, "*_print.xml"))
ds_plate.well_type_from_position()
ds_plate.flag_toxic()
# print(sorted(set(ds_plate.keys()) - set(cpp.ACT_PROF_PARAMETERS))))
ds_plate.data["Plate"] = "{}-{}".format(plate.date, plate.name)
ds_plate.qc_stats()
# ds_plate = ds_plate.activity_profile(act_cutoff=1.585) # current production cutoff
ds_plate = ds_plate.activity_profile(act_cutoff=act_cutoff)
ds_plate = ds_plate.keep_cols(cpp.FINAL_PARAMETERS) # JUST FOR QC
ds_plate = ds_plate.join_layout_1536(plate.name, keep_ctrls=True)
ds_plate.data["Plate"] = "{}-{}".format(plate.date, plate.name)
ds_plate = ds_plate.join_smiles()
ds_plate = ds_plate.join_batch_data()
if structures:
data = add_images(ds_plate.data)
else:
data = ds_plate.data
data["Image"] = "no struct"
return data
def struct_hover():
"""Create a structure tooltip that can be used in Holoviews.
Takes a MolFrame instance as parameter."""
hover = HoverTool(
tooltips="""
<div>
<div>
<img src="@Image" alt="Mol" width="70%"><br>
<div>
<div>
<span style="font-size: 12px; font-weight: bold;">@Metadata_Well</span>
</div>
<div>
<span style="font-size: 12px; font-weight: bold;">@{}</span>
<span style="font-size: 12px;"> (@Producer)</span>
</div>
<div>
<span style="font-size: 12px;">Induction: @Activity</span>
</div>
<div>
<span style="font-size: 12px;">Rel_Cell_Count: @Rel_Cell_Count</span>
</div>
</div>
""".format("Batch_Id")
)
return hover
def view_plate(plate, parm="Activity",
cmap="gist_heat_r", low=0, high=50, show=True,
title="Plate View"):
if isinstance(plate, str):
data = process_plate_for_qc(plate)
else:
data = plate.copy() # already processed, Pandas DF
id_prop = "Batch_Id"
hover = struct_hover()
plot_options = {
"width": 800, "height": 450, "legend_position": "top_left",
"tools": [hover], "invert_yaxis": True,
"colorbar": True,
"colorbar_opts": {"width": 10},
}
plot_styles = {"size": 20, "cmap": cmap}
vdims = ["plateRow", id_prop, "Image", "Producer", "Activity", "Rel_Cell_Count", "Metadata_Well"]
if parm == "Activity" or parm == "Induction":
plot_options["color_index"] = 5
else:
plot_options["color_index"] = 6
opts = {'Scatter': {'plot': plot_options, "style": plot_styles}}
scatter_plot = hv.Scatter(data, "plateColumn", vdims=vdims, label=title)
range_args = {"plateRow": (0.5, 16.5), "plateColumn": (0.5, 24.5),
parm: (low, high)}
scatter_plot = scatter_plot.redim.range(**range_args)
# return data
return scatter_plot(opts)
def ecdf(data, formal=False, x_range=None):
"""
taken from the "DataFramed" podcast, episode 14 (https://www.datacamp.com/community/podcast/text-mining-nlproc)
code: https://github.com/justinbois/dataframed-plot-examples/blob/master/ecdf.ipynb
Get x, y, values of an ECDF for plotting.
Parameters
----------
data : ndarray
One dimensional Numpay array with data.
formal : bool, default False
If True, generate x and y values for formal ECDF (staircase). If
False, generate x and y values for ECDF as dots.
x_range : 2-tuple, default None
If not None and `formal` is True, then specifies range of plot
on x-axis.
Returns
-------
x : ndarray
x-values for plot
y : ndarray
y-values for plot
"""
x = np.sort(data)
y = np.arange(1, len(data) + 1) / len(data)
if formal:
# Set up output arrays
x_formal = np.empty(2 * (len(x) + 1))
y_formal = np.empty(2 * (len(x) + 1))
# y-values for steps
y_formal[:2] = 0
y_formal[2::2] = y
y_formal[3::2] = y
# x- values for steps
x_formal[0] = x[0]
x_formal[1] = x[0]
x_formal[2::2] = x
x_formal[3:-1:2] = x[1:]
x_formal[-1] = x[-1]
if x_range is not None:
if np.all(x >= x_range[0]) and np.all(x <= x_range[1]):
x_formal = np.concatenate(((x_range[0],), x_formal, (x_range[1],)))
y_formal = np.concatenate(((0,), y_formal, (1,)))
else:
raise RuntimeError('Some data values outside of `x_range`.')
return x_formal, y_formal
else:
return x, y
def view_control_stats(full_plate_name):
assert isinstance(full_plate_name, str), "`full_plate_name` has to be the full plate name"
what_stats = ["Min_rel", "Max_rel", "MAD_rel"]
qc_stats = cpp.read_resource("QCSTATS")
plate_stats = qc_stats[qc_stats["Plate"] == full_plate_name]
melt = plate_stats.copy()
melt = melt.drop("Plate", axis=1)
melt = pd.melt(melt, id_vars="Stat",
var_name="Parameter", value_name="Value")
melt = melt.reset_index().drop("index", axis=1)
title = "{} Controls Stats".format(full_plate_name)
df_d = {"Stat": [], "x": [], "ECDF": []}
for stat in what_stats:
x, y = ecdf(melt.loc[(melt['Stat'] == stat), 'Value'])
df_d["Stat"].extend([stat] * len(x))
df_d["x"].extend(x)
df_d["ECDF"].extend(y)
data = pd.DataFrame(df_d)
cmap_ecdf = mpl.colors.ListedColormap(colors=["#e5ae38", "#fc4f30", "#30a2da"]) # , "#55aa00"])
plot_opts = dict(show_legend=True, width=350, height=350, toolbar='right',
color_index=2, legend_position="bottom_right")
plot_styles = dict(size=5, cmap=cmap_ecdf)
vdims = ["ECDF", "Stat"]
ecdf_plot = hv.Scatter(
data, "x", vdims=vdims, label=title,
).redim.range(x=(0, 1), ECDF=(0, 1.02)).redim.label(x='Deviation from Median')
return ecdf_plot(plot=plot_opts, style=plot_styles)
|
from django.contrib import admin
from .models import Actor
admin.site.register(Actor)
|
from flask import Flask, render_template, request, jsonify, send_file, make_response
app = Flask(__name__)
@app.route('/files/<path>/<filename>')
def test(path, filename):
print(request.url)
print(path, filename)
return send_file('/home/ej/tmp.txt', as_attachment=True, mimetype='text/csv; charset=x-EBCDIC-KoreanAndKoreanExtended'
)
if __name__=='__main__':
app.run(host='0.0.0.0')
|
import logging
import random
from tornado import gen
log = logging.getLogger()
def arguments(parser):
parser.add_argument(
"--workers", "-w", type=int, default=5,
help="Number of workers to launch."
)
parser.add_argument(
"--znode-path", "-p", type=str, default="examplelock",
help="ZNode path to use for the election."
)
@gen.coroutine
def run(client, args):
log.info("Launching %d workers.", args.workers)
yield client.start()
yield [
worker(i, client, args)
for i in range(args.workers)
]
yield client.close()
@gen.coroutine
def worker(number, client, args):
party = client.recipes.Party(args.znode_path, "worker_%d" % number)
log.info("[WORKER #%d] Joining the party", number)
yield party.join()
for _ in range(10):
log.info("[WORKER #%d] Members I see: %s", number, party.members)
yield gen.sleep(.5)
should_leave = random.choice([False, False, True])
if should_leave:
log.info("[WORKER #%d] Leaving the party temporarily", number)
yield party.leave()
yield gen.sleep(1)
log.info("[WORKER #%d] Rejoining the party", number)
yield party.join()
yield party.leave()
|
from django.forms import ModelForm
from .models import Automate_text
class TimetableForm(ModelForm):
class Meta:
model = Automate_text
fields = ['title', 'message', 'number', 'important']
|
from datetime import datetime
from typing import List
from marshmallow import Schema, fields, post_load
from src.dto.common.base_dto import BaseDto
class ConsoleGamesListDto(BaseDto):
def __init__(self, console_code: str = None,
reference_id: str = None,
title: str = None,
link: str = None):
self.console_code = console_code
self.reference_id = reference_id
self.title = title
self.link = link
@staticmethod
def mapper(data):
return ConsoleGamesListDto(**data)
class ConsoleGamesListDtoSchema(Schema):
console_code = fields.Str()
reference_id = fields.Str()
title = fields.Str()
link = fields.Str()
@post_load
def make_dto(self, data):
return ConsoleGamesListDto(**data)
class ConsoleGamesDto(BaseDto):
def __init__(self, data: List[ConsoleGamesListDto] = None):
self.data = data
@staticmethod
def mapper(data):
return ConsoleGamesDto(**data)
class ConsoleGamesDtoSchema(Schema):
data = fields.List(fields.Nested(ConsoleGamesListDtoSchema))
code_transfer = fields.Int()
@post_load
def make_model(self, data):
return ConsoleGamesDto.mapper(data)
|
# coding=utf-8
# !/usr/bin/python3.6 ## Please use python 3.6
"""
__synopsis__ : Produces pdfs over the support set classes for the target set sample.
__description__ : Produces pdfs over the support set classes for the target set sample.
__project__ : MNXC
__author__ : Samujjwal Ghosh <cs16resch01001@iith.ac.in>
__version__ : "0.1"
__date__ : "08-11-2018"
__copyright__ : "Copyright (c) 2019"
__license__ : This source code is licensed under the MIT-style license found in the LICENSE file in the root
directory of this source tree.
__classes__ : Attn
__variables__ :
__methods__ :
"""
import torch
import numpy as np
import torch.nn as nn
from logger.logger import logger
from models_orig import PairCosineSim as C
class Attn(nn.Module):
def __init__(self):
super(Attn,self).__init__()
def forward(self,similarities,supports_hots,dim=1):
"""
Produces pdfs over the support set classes for the target samples.
:param dim: Dimension along which Softmax will be computed (so every slice along dim will sum to 1).
:param similarities: A tensor with cosine similarities of size [sequence_length, batch_size]
:param supports_hots: A tensor with the one hot vectors of the targets for each support set image
[sequence_length, batch_size, num_classes]
:return: Softmax pdf
"""
# logger.debug(("similarities.shape: ",similarities.shape))
# logger.debug(("supports_hots.shape: ",supports_hots.shape))
# logger.debug(("supports_hots: ",supports_hots))
softmax = nn.Softmax(dim=dim)
softmax_similarities = softmax(similarities)
# logger.debug(("softmax_similarities.shape: ",softmax_similarities.shape))
softmax_similarities = softmax_similarities.unsqueeze(1)
# logger.debug(("softmax_similarities.unsqueeze(1).shape: ",softmax_similarities.shape))
# preds = softmax_similarities.unsqueeze(1).bmm(supports_hots).squeeze()
preds = softmax_similarities.bmm(supports_hots)
# logger.debug(("preds.shape: ",preds.shape))
preds = preds.squeeze()
# logger.debug(("preds.squeeze().shape: ",preds.shape))
# logger.debug(("preds: ",preds))
return preds
if __name__ == '__main__':
a = torch.tensor([[[1.,0.4],
[1.,1.]],
[[1.,0.4],
[0.,1.5]],
[[1.,0.4],
[1.,1.5]]])
b = torch.tensor([[[1.,0.4],
[0.,1.5]],
[[1.,0.4],
[1.,1.5]]])
# a = torch.ones(1,4,7)
# logger.debug(a)
logger.debug(a.shape)
# b = torch.ones(1,2,7)
# logger.debug(b)
logger.debug(b.shape)
test_DN = C.PairCosineSim()
sim = test_DN(a,b)
logger.debug(sim.shape)
logger.debug("sim: {}".format(sim))
# y = torch.tensor([[1., 0.],
# [0., 1.],
# [1., 0.]])
y = torch.tensor([[[1.,0.],
[0.,1.]],
[[0.,1.],
[1.,0.]]])
# y = torch.ones(2, 2, 5)
# y = torch.ones(1, 4, 5)
logger.debug("y.shape: {}".format(y.shape))
logger.debug("y: {}".format(y))
test_attn = Attn()
result = test_attn(sim,y)
logger.debug("Attention: {}".format(result))
logger.debug("Attention.shape: {}".format(result.shape))
# result = test_attn.forward2(sim, y)
# logger.debug("Attention: {}".format(result))
# logger.debug("Attention.shape: {}".format(result.shape))
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging as log
class Singleton:
def __init__(self, decrated):
log.debug("Singleton Init %s" % decrated)
self._decorated = decrated
def getInstance(self):
try:
return self._instance
except AttributeError:
self._instance = self._decorated()
return self._instance
def __new__(class_, *args, **kwargs):
print "__new__"
class_.instances[class_] = super(Singleton, class_).__new__(class_, *args, **kwargs)
return class_.instances[class_]
def __call__(self):
raise TypeError("Singletons must be accessed through 'getInstance()'")
class SingletonInstane:
__instance = None
@classmethod
def __getInstance(cls):
return cls.__instance
@classmethod
def instance(cls, *args, **kargs):
cls.__instance = cls(*args, **kargs)
cls.instance = cls.__getInstance
return cls.__instance
|
from urllib import urlopen
from bs4 import BeautifulSoup
htmlparent = urlopen("http://www.pythonscraping.com/pages/page3.html")
bsObj = BeautifulSoup(htmlparent.read())
print(bsObj.find("img",{"src":"../img/gifts/img1.jpg"}).parent.previous_sibling.get_text())
|
# Copyright (c) 2021 - present, Timur Shenkao
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
from typing import Optional
from python_code.helper.binary_trees import TreeNode
# 617. Merge Two Binary Trees https://leetcode.com/problems/merge-two-binary-trees/
# You are given two binary trees root1 and root2.
# Imagine that when you put one of them to cover the other, some nodes of the two trees are overlapped while
# the others are not. You need to merge the two trees into a new binary tree.
# The merge rule is that if two nodes overlap, then sum node values up as the new value of the merged node.
# Otherwise, the NOT null node will be used as the node of the new tree.
# Return the merged tree.
# Note: The merging process must start from the root nodes of both trees.
class Solution:
def merge_trees_recursion(self, root1: Optional[TreeNode], root2: Optional[TreeNode]) -> Optional[TreeNode]:
""" Time complexity: O(N). We check / visit every node
Space complexity: if we don't count recursion stack then O(1).
Else O(log N) in case of balanced tree or O(N) in case of unbalanced tree.
"""
# if both trees are non-empty: add root values; call recursively on left and right children
# assign the returned values as left and right children of the first tree
if root1 and root2:
root1.val += root2.val
root1.left = self.merge_trees_recursion(root1.left, root2.left)
root1.right = self.merge_trees_recursion(root1.right, root2.right)
# if the first tree is empty then use nodes from the second tree
if not root1:
root1 = root2
# return the first tree (its root) as a result
# subtlety: if the second tree is empty then use nodes from the first tree
return root1
def merge_trees_iteration(self, root1: Optional[TreeNode], root2: Optional[TreeNode]) -> Optional[TreeNode]:
""" Time complexity: O(N). We check / visit every node
Space complexity: if we don't count recursion stack then O(1).
Else O(log N) in case of balanced tree or O(N) in case of unbalanced tree.
"""
# empty trees
if not root1:
return root2
if not root2:
return root1
# both roots are non-empty
stack = list()
# use Depth First Search style to traverse
stack.append((root1, root2))
while stack:
curr1, curr2 = stack.pop()
# nodes from both trees are present
if curr1 and curr2:
curr1.val += curr2.val
# there is no left subtree in the first tree
if not curr1.left:
curr1.left = curr2.left
else:
stack.append((curr1.left, curr2.left))
if not curr1.right:
curr1.right = curr2.right
else:
stack.append((curr1.right, curr2.right))
# there is no second tree: do nothing as we return modified first tree as a result
# don't add to stack anything
else:
continue
return root1
|
from django.contrib.contenttypes.models import ContentType
from django.dispatch import receiver
from django.db.models.signals import pre_delete
from . import utils
ModelFieldSchema = utils.get_model_field_schema_model()
@receiver(pre_delete, sender=ModelFieldSchema)
def drop_table_column(sender, instance, **kwargs): # pylint: disable=unused-argument
instance.drop_column()
instance.update_last_modified()
@receiver(pre_delete, sender=ModelFieldSchema.ModelSchema)
def drop_relation_by_model(sender, instance, **kwargs): # pylint: disable=unused-argument
instance.destroy_model()
content_type = ContentType.objects.get_for_model(instance)
content_type.model_field_columns.filter(model_id=instance.pk).delete()
@receiver(pre_delete, sender=ModelFieldSchema.FieldSchema)
def drop_relation_by_field(sender, instance, **kwargs): # pylint: disable=unused-argument
content_type = ContentType.objects.get_for_model(instance)
content_type.model_field_tables.filter(field_id=instance.pk).delete()
|
__author__ = 'at'
from app import AfricasTalkingGateway, AfricasTalkingGatewayException
from app import settings
from app import logging
from urllib import urlencode
import requests
from redis import Redis
redis_conn = Redis()
class FetchUrl(object):
"""
Form urls, parse and
"""
def __init__(self, base_url, metric, username, apikey, granularity, start_date, end_date):
"""
:param base_url:
:param metric:
:param username:
:param apikey:
:param granularity:
:param start_date:
:param end_date:
:return: url object
"""
self.base_url = base_url
self.metric = metric
self.username = username
self.apikey = apikey
self.granularity = granularity
self.start_date = start_date
self.end_date = end_date
def form_url(self):
query_args = {'granularity': self.granularity, 'startDate': self.start_date,
'endDate': self.end_date, 'metric': self.metric, 'username': self.username}
_url = urlencode(query_args)
return self.base_url + _url
def get_apikey(self):
return self.apikey
class MakeRequests(object):
"""
Some request helper classes
"""
def __init__(self, url, apikey, method='GET'):
"""
:param url:
:param apikey:
:param method:
:return: None
"""
self.method = method
self.url = url
self.apikey = apikey
def send_(self):
"""
:param self
:return: response object
"""
if self.method is 'GET':
headers = {'apikey': self.apikey}
r = requests.get(self.url, headers=headers)
return r
@job('high', connection=redis_conn, timeout=5)
def consume_call(from_, to):
api = AfricasTalkingGateway(apiKey_=settings.api_key, username_=settings.username)
try:
api.call(from_, to)
except AfricasTalkingGatewayException:
logging.warning("call init failed")
@job('high', connection=redis_conn, timeout=5)
def get_witty_intent(text):
try:
intent = ask_wit(text)
# pull game info (game info for scores and results)
return intent
except Exception as e:
logging.error("call init failed", e)
# send_message job
|
from django.urls import path, include
from . import views
urlpatterns = [
# Articles Home/Index page path:
path('', views.blog_homepage, name='blog_homepage'),
# Articles Home page path w/ specific category search:
path('blog_category/<str:category>', views.blog_homepage, name='blog_homepage_category'),
# Papers & PDF Home/Index page path:
path('papers', views.papers_homepage, name='papers_homepage'),
# Papers Home Page w/ specific category search:
path('papers/<str:category>', views.papers_homepage, name='papers_homepage_category'),
# Path to individual papers w/ pdf path url param:
path('papers/download/<str:pdf_slug>', views.paper_download, name='paper_download'),
# Path to individual article w/ slug url param:
path('article/<str:slug>', views.blog_post, name='article')
]
|
import itasca
from itasca import wall, ballfacetarray
import statistic.models
def get_stress_mpa(project: statistic.models.Project):
"""获取荷载板的正应力,单位为MPa"""
width = project.specimen_width / 1000
top_position = wall.find('top').pos_y()
bottom_position = wall.find('bottom').pos_y()
top_position_array = abs(ballfacetarray.pos()[:, 1] - top_position) < 0.001
bottom_position_array = abs(ballfacetarray.pos()[:, 1] - bottom_position) < 0.001
top_normal_force = ballfacetarray.force_normal()[top_position_array]
bottom_normal_force = ballfacetarray.force_normal()[bottom_position_array]
normal_force_average = (sum(top_normal_force) + sum(bottom_normal_force)) / 2 / 1E6
stress = normal_force_average / width
cycle = itasca.cycle()
if statistic.models.Statistic.objects.filter(project=project, cycle=cycle).count():
state = statistic.models.Statistic.objects.get(project=project, cycle=itasca.cycle())
state.stress = stress
state.save()
else:
statistic.models.Statistic.objects.create(project=project, cycle=cycle, stress=stress)
return stress
def get_strain(project: statistic.models.Project):
"""获取试样的应变"""
height = project.specimen_height / 1000
top_position = wall.find('top').pos_y()
bottom_position = wall.find('bottom').pos_y()
strain = (height - (top_position - bottom_position)) / height
cycle = itasca.cycle()
if statistic.models.Statistic.objects.filter(project=project, cycle=cycle).count():
state = statistic.models.Statistic.objects.get(project=project, cycle=itasca.cycle())
state.strain = strain
state.save()
else:
statistic.models.Statistic.objects.create(project=project, cycle=cycle, strain=strain)
return strain
def cycle(project: statistic.models.Project, number):
"""
前进给定的帧数
:param project:
:param number: 帧数
:return:
"""
itasca.command(f'model cycle {number}')
state, created = statistic.models.Statistic.objects.get_or_create(project=project, cycle=itasca.cycle())
state.save_state()
def save(path: str):
"""
将当前状态保存为给定的路径
:param name: 存档文件的名字
:return:
"""
itasca.command(f'model save "{path}"')
def restore(path: str):
"""
从给定的路径中恢复状态
:param path: sav文件的路径
:return:
"""
itasca.command(f'model restore "{path}"')
def clear():
"""清空模型"""
itasca.command('model new')
|
import unittest
from garminworkouts.models.power import Power
class PowerTestCase(unittest.TestCase):
def test_valid_power_to_watts_conversion(self):
ftp = 200
diff = 0
valid_powers = [
("0", 0),
("0%", 0),
("10", 20),
("10%", 20),
("100", 200),
("100%", 200),
("120", 240),
("120%", 240),
("150", 300),
("150%", 300),
("0W", 0),
("0w", 0),
("100W", 100),
("100w", 100),
("1000W", 1000),
("1000w", 1000)
]
for power, watts in valid_powers:
with self.subTest(msg="Expected %d watts for '%s' (ftp=%s, diff=%s)" % (watts, power, ftp, diff)):
self.assertEqual(Power(power).to_watts(ftp, diff), watts)
def test_invalid_power_to_watts_conversion(self):
ftp = 200
diff = 0
invalid_powers = ["-1", "-1%", "2500", "2500%", "-1W", "5000W", "foo", "foo%", "fooW"]
for power in invalid_powers:
with self.subTest(msg="Expected ValueError for '%s" % power):
with self.assertRaises(ValueError):
Power(power).to_watts(ftp, diff)
def test_power_to_watts_conversion_with_valid_diff(self):
power = "100"
ftp = 200
valid_diffs = [
(0.5, 300),
(0.05, 210),
(-0.05, 190),
(-0.5, 100)
]
for diff, watts in valid_diffs:
with self.subTest(msg="Expected %d watts for diff '%s' (power=%s, ftp=%s)" % (watts, diff, power, ftp)):
self.assertEqual(Power(power).to_watts(ftp, diff), watts)
def test_power_to_watts_conversion_with_invalid_diff(self):
power = "100"
ftp = 200
invalid_diffs = [-1.0, 1.0, "foo"]
for diff in invalid_diffs:
with self.subTest(msg="Expected ValueError for '%s" % diff):
with self.assertRaises(ValueError):
Power(power).to_watts(ftp, diff)
def test_power_to_watts_conversion_with_valid_ftp(self):
power = "50"
diff = 0
valid_ftps = [
(0, 0),
(100, 50),
(250, 125),
(999, 500)
]
for ftp, watts in valid_ftps:
with self.subTest(msg="Expected %d watts for ftp '%s' (power=%s, diff=%s)" % (watts, ftp, power, diff)):
self.assertEqual(Power(power).to_watts(ftp, diff), watts)
def test_power_to_watts_conversion_with_invalid_ftp(self):
power = "100"
diff = 0
invalid_ftps = [-1, 1000, "foo"]
for ftp in invalid_ftps:
with self.subTest(msg="Expected ValueError for '%s" % ftp):
with self.assertRaises(ValueError):
Power(power).to_watts(ftp, diff)
if __name__ == '__main__':
unittest.main()
|
import setlibspath
from builder import *
a = Builder()
|
from app.core.config import settings
def test_fetch_ideas_reddit_sync(client):
# When
response = client.get(f"{settings.API_V1_STR}/recipes/ideas/")
data = response.json()
# Then
assert response.status_code == 200
for key in data.keys():
assert key in ["recipes", "easyrecipes", "TopSecretRecipes"]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.