content
stringlengths 5
1.05M
|
|---|
from flask import Flask, abort, request
from flask_cors import CORS
import redis as red
import serial, serial.tools.list_ports
import json, struct, sys, time
import threading
####* DEFAULT ARGUMENTS *####
try:
sys.argv[1]
except IndexError:
baudrate = 115200
else:
baudrate = sys.argv[1]
try:
sys.argv[2]
except IndexError:
# For use in desktop environment:
#ports = serial.tools.list_ports.comports()
#print(ports)
#com_list = []
#for p in ports:
# com_list.append(p.device)
#print(com_list)
#port = com_list[1]
#print(port)
# For use in live environment
port = '/dev/controller_valve' # defult value
else:
port = sys.argv[2]
try:
sys.argv[3]
except IndexError:
caching_stream_name = "sensor_stream"
else:
caching_stream_name = sys.argv[3]
try:
sys.argv[4]
except IndexError:
valve_stream_name = "valve_stream"
else:
valve_stream_name = sys.argv[4]
####* END DEFAULT ARGUMENTS *####
# Flask app setup
app = Flask(__name__)
CORS(app) # Enables POST requests
# Lock a thread
lock = threading.Lock()
serial_lock = threading.Lock()
# Creating redis client instance
redis = red.Redis(host='redis-database', port=6379)
# Loop control variable
CACHING = False
# Serial port setup
ser = serial.Serial(timeout=1)
ser.baudrate = baudrate
ser.port = port
ser.open()
# JSON Key list for sensor data
Sensor_Keys = [
"Timestamp",
"PT_HE",
"PT_Purge",
"PT_Pneu",
"PT_FUEL_PV",
"PT_LOX_PV",
#"PT_FUEL_INJ",
"PT_CHAM",
"TC_FUEL_PV",
"TC_LOX_PV",
"TC_LOX_Valve_Main",
"TC_WATER_In",
"TC_WATER_Out",
"TC_CHAM",
#"RC_LOX_Level",
"FT_Thrust",
"FL_WATER"
]
# JSON Key list for valve status
Valve_Keys = [
"Timestamp",
"FUEL_Press",
"LOX_Press",
"FUEL_Vent",
"LOX_Vent",
"MAIN",
"FUEL_Purge",
"LOX_Purge",
]
def run_app():
app.run(debug=False, threaded=True, host='0.0.0.0', port=3005)
def Cache(ser, redis, caching_stream_name, valve_stream_name):
# Function for sorting out which type of message is being received
global CACHING
# Sensor serial messages are of length 40 bytes
SENSOR_BUFFER_LENGTH = 40
# Valve serial messages are of length 19 bytes
VALVE_BUFFER_LENGTH = 19
# Flush the input buffer to avoid overflow and get fresh data
ser.reset_input_buffer()
# Both are padded with a starter sequence of 4 zero bytes
# and terminated with a sequence of 4 max bytes
# Start the loop in the right place by finding a terminator string in the buffer
# serial_buffer = ser.read_until(b'\xFF\xFF\xFF\xFF\x00\x00\x00\x00')
while True:
print(time.time()*1000)
if CACHING:
#print("LOOPING")
# Extract the next sequence of serial data until the terminator/starter packets
serial_buffer = ser.read_until(b'\xFF\xFF\xFF\xFF\x00\x00\x00\x00')
#print(serial_buffer)
# If the serial buffer has a length of 40 bytes, it is a sensor data package
if len(serial_buffer) == SENSOR_BUFFER_LENGTH:
# Unpack the struct that is the serial message
# Arduino is little-endian
unpack_data = struct.unpack('<i h h h h h h h h h h h h h h d', serial_buffer)
# Build the JSON with struct method
data = {}
for item in range(len(Sensor_Keys)):
data[Sensor_Keys[item]] = str(unpack_data[item])
#print(data)
json_data = json.dumps(data)
json_data = json.loads(json_data) # Weird fix?
# Insert to redis
if json_data:
redis.xadd(caching_stream_name, json_data)
# print("Added to redis stream")
elif len(serial_buffer) == VALVE_BUFFER_LENGTH:
# Unpack the struct that is the serial message
# Arduino is little-endian
# Unpack the struct that is the serial message
# Arduino is little-endian
unpack_data = struct.unpack('<i b b b b b b b d', serial_buffer)
print(unpack_data)
# Build the JSON with struct method
data = {}
for item in range(len(Valve_Keys)):
data[Valve_Keys[item]] = str(unpack_data[item])
print(data)
json_data = json.dumps(data)
json_data = json.loads(json_data) # Weird fix?
print(json_data)
# Insert to redis
if json_data:
redis.xadd(valve_stream_name, json_data)
print("Added to redis stream")
else:
# If the buffer length is improper, the message is invalid and should be discarded
print("=====INVALID MESSAGE=====")
print(serial_buffer)
# else:
# print("Not caching...")
def compose_pair(key, state, instruction):
if key == Valve_Keys[1]:
leadByte = b'\x53' # FUEL_Pres(S)
elif key == Valve_Keys[2]:
leadByte = b'\x73' # FUEL_Pres(s)
elif key == Valve_Keys[3]:
leadByte = b'\x54' # FUEL_Ven(T)
elif key == Valve_Keys[4]:
leadByte = b'\x74' # LOX_Ven(t)
elif key == Valve_Keys[5]:
leadByte = b'\x4D' # (M)ain
elif key == Valve_Keys[6]:
leadByte = b'\x45' # FUEL_Purg(E)
elif key == Valve_Keys[7]:
leadByte = b'\x65' # FUEL_Purg(e)
if state == True:
stateByte = b'\x31' # True (1)
elif state == False:
stateByte = b'\x30' # False (0)
instruction += leadByte + stateByte
return instruction
if __name__ == "__main__":
# Threading the routes
flaskApp_thread = threading.Thread(target=run_app)
caching_thread = threading.Thread(target=Cache, args=[ser, redis, caching_stream_name, valve_stream_name])
flaskApp_thread.start()
caching_thread.start()
@app.route('/serial/caching/START')
def cachingStart():
print('ACTION START')
# Begin pumping data into the redis database
try:
ser.open()
except:
print("SERIAL ALREADY OPEN")
global CACHING
with lock:
CACHING = True
# This unblocks the Cache() while loop
return 'Caching Started'
@app.route('/serial/caching/STOP')
def cachingStop():
global CACHING
with lock:
CACHING = False
# ser.close()
return 'Caching Closed'
# Defining the valve data caching route
# Has two options...
# POST: sends a command downstream to the Arduino to execute
# GET: polls the Arduino for the current status
@app.route('/serial/valve/update', methods=['POST', 'GET'])
def serialSend():
print("ROUTE REACHED")
print(request.method)
if request.method == 'POST':
# Command is sent in as a JSON object
message = request.get_json(force=True)
print(message)
# Instruction starter character '<'
instruction = b'\x3C'
# Pull the items out of the JSON object using the key list
for key in Valve_Keys[1:]:
print(key)
print(int(message[key]))
# Pair up the keys and the instruction values
instruction = compose_pair(key,message[key],instruction)
# Instruction terminator character '>'
instruction += b'\x3E'
ser.write(instruction)
print(instruction)
if request.method == 'GET':
# Generate a polling message for the Arduino
# A string of same length as the instruction message for simplicity
status_request_char = b'\x3F'
status_request = b'\x3C'
for i in range(0,14):
status_request += status_request_char
status_request += b'\x3E'
ser.write(status_request)
print(status_request)
return "Message Sent"
|
def decode(data):
strings = [x for x in data.split('\n') if len(x) > 0]
pairs = [l.split('->') for l in strings]
pairs = [[i.strip() for i in x] for x in pairs]
pairs = [tuple([tuple([int(i) for i in j.split(',')]) for j in x]) for x in pairs]
return pairs
def print_map(map):
if len(map) > 0:
dx = max([v[0] for v in map.keys()])
dy = max([v[1] for v in map.keys()])
print(f'dx = {dx}, dy = {dy}')
for y in range(0, dy+1):
for x in range(0, dx+1):
print(map.get((x, y), 0), end = '')
print('')
print('')
else:
print('empty map')
def compute(data):
lines = decode(data)
print(lines)
map = {}
for ((sx, sy), (ex, ey)) in lines:
print(f'line = {sx},{sy} to {ex},{ey}')
if sx != ex and sy == ey:
# fill in the x direction
if sx > ex:
sx, ex = ex, sx
for x in range(sx, ex+1):
map[(x, sy)] = map.get((x, sy), 0) + 1
elif sy != ey and sx == ex:
# fill in the y direction
if sy > ey:
sy, ey = ey, sy
for y in range(sy, ey+1):
map[(sx, y)] = map.get((sx, y), 0) + 1
else:
# fill in the y direction
if sy > ey:
sy, ey = ey, sy
sx, ex = ex, sx
xinc = 1
if sx > ex:
xinc = -1
x = sx
for y in range(sy, ey+1):
map[(x, y)] = map.get((x, y), 0) + 1
x = x + xinc
# print_map(map)
print(map)
ones = [1 for v in map.values() if v > 1]
print(ones)
print(sum(ones))
test_data="""
0,9 -> 5,9
8,0 -> 0,8
9,4 -> 3,4
2,2 -> 2,1
7,0 -> 7,4
6,4 -> 2,0
0,9 -> 2,9
3,4 -> 1,4
0,0 -> 8,8
5,5 -> 8,2"""
real_data="""
941,230 -> 322,849
762,196 -> 701,257
656,197 -> 595,136
687,692 -> 57,692
37,953 -> 903,87
674,102 -> 84,102
952,323 -> 786,157
807,948 -> 430,948
280,66 -> 514,66
810,381 -> 928,263
41,278 -> 112,207
754,11 -> 754,574
499,830 -> 725,604
713,172 -> 658,172
805,54 -> 594,54
442,910 -> 40,508
160,170 -> 925,935
265,899 -> 265,313
960,976 -> 77,93
820,244 -> 877,187
883,501 -> 345,501
12,978 -> 941,49
988,46 -> 988,572
285,775 -> 285,298
718,69 -> 121,69
218,641 -> 146,641
857,277 -> 124,277
32,36 -> 657,36
964,280 -> 609,280
739,981 -> 910,981
960,794 -> 243,794
447,682 -> 751,378
813,103 -> 813,240
568,705 -> 497,705
888,47 -> 888,231
936,95 -> 336,695
305,349 -> 18,636
54,240 -> 54,222
28,704 -> 625,107
680,325 -> 680,623
209,405 -> 209,123
947,906 -> 947,721
149,810 -> 834,125
897,875 -> 146,124
928,267 -> 928,484
871,516 -> 871,136
954,725 -> 706,725
680,645 -> 958,645
680,326 -> 908,326
173,157 -> 890,874
842,802 -> 166,126
750,442 -> 270,922
567,891 -> 567,784
374,623 -> 374,174
979,725 -> 765,511
336,440 -> 82,440
214,213 -> 939,938
652,815 -> 763,815
220,48 -> 331,159
580,522 -> 141,522
286,685 -> 286,779
865,343 -> 865,257
738,898 -> 405,565
703,571 -> 420,571
792,368 -> 792,955
738,905 -> 738,79
646,95 -> 737,95
930,908 -> 72,50
310,933 -> 310,243
192,22 -> 918,748
245,803 -> 81,639
567,218 -> 901,218
148,950 -> 965,133
147,772 -> 159,772
774,84 -> 774,960
860,798 -> 372,798
856,131 -> 856,703
368,603 -> 247,603
587,533 -> 301,533
832,461 -> 832,506
164,709 -> 960,709
874,471 -> 327,471
346,237 -> 346,921
683,300 -> 910,527
353,717 -> 353,575
586,578 -> 798,366
27,813 -> 27,434
311,391 -> 418,391
369,304 -> 33,304
591,226 -> 591,558
634,545 -> 513,545
439,257 -> 207,257
42,791 -> 581,252
155,801 -> 155,294
599,603 -> 599,182
48,607 -> 337,896
199,828 -> 506,828
28,147 -> 733,852
799,563 -> 799,22
206,625 -> 455,874
185,330 -> 335,480
161,746 -> 590,746
932,13 -> 269,13
649,746 -> 649,309
463,169 -> 930,636
568,251 -> 386,251
739,692 -> 233,692
941,989 -> 84,132
513,356 -> 513,628
534,168 -> 285,168
447,563 -> 447,698
898,915 -> 791,808
339,405 -> 432,405
414,940 -> 335,940
591,741 -> 59,741
347,330 -> 347,341
186,40 -> 438,292
849,872 -> 295,318
406,620 -> 938,620
346,226 -> 864,226
609,40 -> 478,171
820,900 -> 947,900
201,63 -> 201,107
984,652 -> 47,652
193,204 -> 776,204
173,892 -> 740,892
389,675 -> 709,355
489,954 -> 546,954
18,82 -> 587,651
646,150 -> 675,150
618,805 -> 618,592
178,617 -> 178,606
179,30 -> 505,30
984,21 -> 21,984
172,167 -> 15,167
17,209 -> 192,209
814,945 -> 814,18
385,632 -> 161,632
126,41 -> 474,389
575,778 -> 737,778
74,270 -> 147,270
891,248 -> 467,672
95,426 -> 95,728
235,73 -> 235,583
730,302 -> 730,466
388,587 -> 377,598
525,155 -> 184,155
370,278 -> 966,874
950,150 -> 444,656
644,935 -> 401,935
798,515 -> 506,807
976,562 -> 253,562
674,350 -> 603,421
686,653 -> 576,653
691,278 -> 593,180
964,961 -> 76,73
735,582 -> 735,389
786,885 -> 76,885
402,732 -> 231,732
660,881 -> 660,525
683,383 -> 683,364
174,20 -> 174,75
692,819 -> 107,819
344,669 -> 577,902
562,126 -> 697,261
621,344 -> 621,707
731,892 -> 213,374
216,828 -> 663,828
990,534 -> 990,356
973,714 -> 519,714
25,981 -> 983,23
659,399 -> 535,275
967,885 -> 183,101
612,684 -> 732,684
955,485 -> 955,806
582,714 -> 582,719
342,203 -> 905,203
188,488 -> 272,488
659,65 -> 659,679
306,85 -> 605,384
975,847 -> 975,353
742,989 -> 742,652
917,524 -> 934,524
890,571 -> 662,799
901,791 -> 901,118
631,447 -> 114,447
850,28 -> 797,28
842,759 -> 91,759
659,538 -> 253,944
693,69 -> 693,452
161,515 -> 789,515
892,630 -> 892,785
78,947 -> 931,947
561,728 -> 11,178
138,842 -> 138,133
890,373 -> 628,373
509,370 -> 592,370
982,41 -> 185,838
184,210 -> 184,218
390,525 -> 390,558
387,151 -> 387,39
718,808 -> 833,808
206,234 -> 206,620
84,150 -> 84,959
336,468 -> 307,468
764,19 -> 739,44
752,607 -> 643,607
233,149 -> 112,149
368,612 -> 725,255
929,497 -> 909,477
829,274 -> 829,190
312,268 -> 312,128
519,18 -> 519,552
896,19 -> 140,19
368,727 -> 368,114
233,813 -> 750,813
477,758 -> 477,213
615,171 -> 615,530
38,461 -> 301,461
862,107 -> 154,815
271,52 -> 271,517
203,936 -> 365,936
96,700 -> 13,617
290,554 -> 389,455
377,923 -> 377,890
347,511 -> 147,511
889,412 -> 762,412
558,412 -> 424,412
45,838 -> 45,845
958,27 -> 958,454
154,244 -> 20,244
315,154 -> 315,173
135,618 -> 135,71
380,422 -> 131,671
314,500 -> 314,873
915,320 -> 915,159
213,772 -> 977,772
14,22 -> 978,986
444,759 -> 444,385
730,650 -> 730,210
532,551 -> 633,652
547,426 -> 335,426
868,191 -> 156,903
462,599 -> 611,748
729,709 -> 729,714
665,229 -> 849,413
880,947 -> 880,159
249,837 -> 249,604
575,205 -> 196,584
960,665 -> 320,25
617,853 -> 412,853
224,60 -> 224,467
226,741 -> 226,47
371,595 -> 118,342
371,708 -> 371,561
236,141 -> 955,860
55,509 -> 55,938
684,885 -> 684,670
93,509 -> 497,105
284,61 -> 812,61
438,353 -> 242,353
77,716 -> 363,430
283,769 -> 905,147
56,799 -> 551,799
804,637 -> 804,526
476,54 -> 154,54
686,400 -> 686,145
740,905 -> 417,905
21,113 -> 823,915
286,132 -> 880,726
923,378 -> 771,378
924,922 -> 36,34
801,609 -> 801,407
465,671 -> 550,756
628,235 -> 628,842
684,840 -> 716,808
841,366 -> 495,712
740,208 -> 740,174
657,370 -> 657,731
817,781 -> 466,781
308,894 -> 308,370
497,233 -> 755,233
35,145 -> 35,398
383,163 -> 578,163
620,985 -> 620,849
178,253 -> 178,724
556,51 -> 556,525
650,187 -> 706,243
161,988 -> 599,550
861,256 -> 501,616
46,555 -> 181,555
980,975 -> 980,916
345,751 -> 479,617
534,642 -> 534,202
901,240 -> 901,490
984,280 -> 337,927
578,663 -> 578,298
377,943 -> 259,943
975,38 -> 39,974
697,870 -> 387,560
147,520 -> 218,520
683,711 -> 486,711
825,26 -> 122,729
855,84 -> 751,84
558,945 -> 989,945
660,195 -> 597,195
889,696 -> 317,696
969,248 -> 240,977
598,625 -> 598,148
176,151 -> 256,151
939,70 -> 648,70
645,431 -> 411,431
502,518 -> 221,518
821,988 -> 213,988
361,850 -> 684,850
506,173 -> 506,405
323,151 -> 726,151
131,519 -> 35,519
164,445 -> 798,445
425,989 -> 425,133
18,739 -> 684,73
138,545 -> 138,155
401,104 -> 766,104
864,855 -> 203,855
636,361 -> 604,361
820,970 -> 820,882
866,859 -> 835,859
112,507 -> 112,715
529,494 -> 529,928
104,469 -> 193,469
82,841 -> 831,92
258,518 -> 258,778
34,917 -> 135,917
777,553 -> 985,345
64,952 -> 719,297
341,224 -> 902,224
87,128 -> 525,566
951,400 -> 448,903
344,963 -> 21,963
983,244 -> 983,503
938,771 -> 635,771
560,262 -> 560,974
46,386 -> 75,386
898,747 -> 898,17
239,929 -> 149,929
849,881 -> 849,251
204,204 -> 204,753
830,33 -> 830,130
304,339 -> 42,339
565,312 -> 773,312
387,523 -> 234,523
239,421 -> 543,725
197,433 -> 197,723
595,21 -> 370,21
547,171 -> 480,104
639,910 -> 639,241
908,185 -> 560,185
947,565 -> 947,411
211,670 -> 588,293
753,708 -> 753,624
36,147 -> 859,970
423,94 -> 930,94
613,680 -> 607,680
277,263 -> 836,822
186,413 -> 827,413
483,173 -> 142,173
25,771 -> 409,387
328,916 -> 613,631
267,604 -> 724,147
430,616 -> 150,896
692,463 -> 50,463
306,360 -> 306,653
736,948 -> 736,174
797,529 -> 774,529
492,486 -> 492,812
659,429 -> 102,429
582,503 -> 695,616
780,62 -> 780,164
58,318 -> 387,318
286,694 -> 286,396
248,241 -> 248,361
112,963 -> 707,963
771,722 -> 636,722
508,76 -> 389,76
435,307 -> 201,541
167,312 -> 618,763
721,407 -> 305,823
57,203 -> 516,203
83,239 -> 83,607
810,686 -> 137,13
817,268 -> 101,984
379,975 -> 379,631
597,38 -> 611,38
56,504 -> 56,900
108,587 -> 261,740
625,426 -> 476,426
248,486 -> 643,881
932,25 -> 21,936
388,613 -> 388,296
644,188 -> 644,273
871,425 -> 871,791
722,866 -> 722,39
96,579 -> 96,97
876,64 -> 297,643
581,633 -> 59,633
11,10 -> 989,988
947,55 -> 266,736
532,553 -> 735,756
898,855 -> 83,40
533,289 -> 306,62
497,736 -> 332,571
871,201 -> 345,727
550,686 -> 256,686
858,585 -> 607,836
380,171 -> 15,171
864,112 -> 864,686
791,857 -> 305,857
898,579 -> 741,579
479,713 -> 113,713
19,143 -> 779,903
347,161 -> 140,368
479,395 -> 534,340
929,37 -> 77,889
128,958 -> 884,202
921,18 -> 921,650
263,550 -> 263,280
155,592 -> 235,592
565,34 -> 565,454
913,371 -> 173,371
199,158 -> 974,933
98,775 -> 98,234
649,576 -> 649,444
801,855 -> 548,855
859,913 -> 363,913
274,487 -> 274,654
729,982 -> 443,982
664,827 -> 77,240
656,885 -> 656,350
916,74 -> 284,706
439,31 -> 439,175
423,753 -> 280,753
424,914 -> 948,914
980,723 -> 980,674
656,437 -> 626,407
577,654 -> 423,654
19,224 -> 424,224
310,181 -> 704,575
828,296 -> 828,308
905,151 -> 955,151
319,178 -> 892,178
972,939 -> 65,32
497,98 -> 91,98
987,402 -> 943,446
904,19 -> 174,749
265,885 -> 265,835
475,414 -> 658,597
610,93 -> 938,93
961,892 -> 661,892
297,600 -> 378,600
405,637 -> 52,284
439,874 -> 439,612
275,185 -> 275,218
220,840 -> 220,735
372,153 -> 644,425
896,964 -> 896,461
916,484 -> 951,449
485,355 -> 456,355
198,793 -> 198,132
614,735 -> 561,735
181,591 -> 147,591
175,289 -> 159,289
899,758 -> 962,695
506,647 -> 506,858
443,828 -> 720,828
623,641 -> 623,631
202,409 -> 891,409
486,751 -> 80,345
781,73 -> 781,710
911,643 -> 911,571
799,151 -> 89,861
716,815 -> 810,815
947,517 -> 947,575
704,260 -> 704,727
113,581 -> 113,606
408,252 -> 408,761
601,753 -> 457,609
851,424 -> 501,774
670,941 -> 916,941
480,839 -> 205,564
912,949 -> 38,75
477,39 -> 925,487
139,898 -> 309,898
93,386 -> 93,194
184,132 -> 943,891
247,557 -> 247,182
832,22 -> 76,778
61,814 -> 806,69
816,640 -> 604,428
214,561 -> 623,152
698,858 -> 389,858"""
compute(real_data)
|
import os
import torch
import onnxruntime as ort
import pandas as pd
import numpy as np
import os
import time
import torch.nn.functional as F
import onnx
import getpass
from transformers import AutoTokenizer
import time
import pyarrow.parquet as pq
from glob import glob
import os
import numpy as np
import argparse
import logging
import socket
import multiprocessing as mp
from functools import partial
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO)
logger = logging.getLogger(__name__)
print('libs loaded')
parser = argparse.ArgumentParser()
parser.add_argument("--input_path", type=str, help="path to input data")
parser.add_argument("--output_path", type=str, help="path where inference csv is saved")
parser.add_argument("--country_code", type=str, help="path where inference csv is saved")
parser.add_argument("--iteration_number", type=int)
args = parser.parse_args()
print(args)
####################################################################################################################################
# HELPER FUNCTIONS
####################################################################################################################################
# inference
def get_tokens(tokens_dict, i):
i_tokens_dict = dict()
for key in ['input_ids', 'token_type_ids', 'attention_mask']:
i_tokens_dict[key] = tokens_dict[key][i]
tokens = {name: np.atleast_2d(value) for name, value in i_tokens_dict.items()}
return tokens
def chunkIt(seq, num):
avg = len(seq) / float(num)
out = []
last = 0.0
while last < len(seq):
out.append(seq[int(last):int(last + avg)])
last += avg
return out
def run_inference(ort_session, tokens):
ort_outs = ort_session.run(None, tokens)
torch_onnx_output = torch.tensor(ort_outs[0], dtype=torch.float32)
onnx_logits = F.softmax(torch_onnx_output, dim=1)
return onnx_logits.detach().cpu().numpy()[0].tolist()
def inference(onnx_model, model_dir, examples):
quantized_str = ''
if 'quantized' in onnx_model:
quantized_str = 'quantized'
# onnx session
options = ort.SessionOptions()
options.graph_optimization_level = ort.GraphOptimizationLevel.ORT_ENABLE_ALL
options.execution_mode = ort.ExecutionMode.ORT_SEQUENTIAL
options.intra_op_num_threads = 1
#options.inter_op_num_threads = multiprocessing.cpu_count()
print(onnx_model)
ort_session = ort.InferenceSession(onnx_model, options)
# pytorch pretrained model and tokenizer
if 'bertweet' in onnx_model:
tokenizer = AutoTokenizer.from_pretrained(model_dir, normalization=True)
else:
tokenizer = AutoTokenizer.from_pretrained(model_dir)
tokenizer_str = "TokenizerFast"
print("**************** {} ONNX inference with batch tokenization and with {} tokenizer****************".format(
quantized_str, tokenizer_str))
tokens_dict = tokenizer.batch_encode_plus(examples, max_length=128)
tokens_dict_list = [dict(zip(tokens_dict, t)) for t in zip(*tokens_dict.values())]
with mp.Pool(mp.cpu_count()) as pool:
run_inference_on_tokens = partial(run_inference, ort_session=ort_session)
onnx_inference = pool.map(run_inference_on_tokens, tokens_dict_list)
return onnx_inference
def get_env_var(varname, default):
if os.environ.get(varname) != None:
var = int(os.environ.get(varname))
print(varname, ':', var)
else:
var = default
print(varname, ':', var, '(Default)')
return var
# Choose Number of Nodes To Distribute Credentials: e.g. jobarray=0-4, cpu_per_task=20, credentials = 90 (<100)
SLURM_ARRAY_TASK_ID = get_env_var('SLURM_ARRAY_TASK_ID', 0)
SLURM_ARRAY_TASK_COUNT = get_env_var('SLURM_ARRAY_TASK_COUNT', 1)
SLURM_JOB_ID = get_env_var('SLURM_JOB_ID', 1)
print('Hostname:', socket.gethostname())
print('SLURM_ARRAY_TASK_ID', SLURM_ARRAY_TASK_ID)
print('SLURM_ARRAY_TASK_COUNT', SLURM_ARRAY_TASK_COUNT)
print('Number of CPUs per task:', mp.cpu_count())
# ####################################################################################################################################
# # loading data
# ####################################################################################################################################
path_to_data = args.input_path
print('Load random Tweets:')
start_time = time.time()
paths_to_random = list(np.array_split(
glob(os.path.join(path_to_data, '*.parquet')),
SLURM_ARRAY_TASK_COUNT)[SLURM_ARRAY_TASK_ID])
print('#files:', len(paths_to_random))
tweets_random = pd.DataFrame()
for file in paths_to_random:
print(file)
tweets_random = pd.concat([tweets_random, pd.read_parquet(file)[['tweet_id', 'text']]])
print(tweets_random.shape)
print('load random sample:', str(time.time() - start_time), 'seconds')
print(tweets_random.shape)
print('dropping duplicates:')
# random contains 7.3G of data!!
start_time = time.time()
tweets_random = tweets_random.drop_duplicates('text')
print('drop duplicates:', str(time.time() - start_time), 'seconds')
print(tweets_random.shape)
start_time = time.time()
print('converting to list')
examples = tweets_random.text.values.tolist()
print('convert to list:', str(time.time() - start_time), 'seconds')
best_model_folders_dict = {'iter0': { 'US': {
'lost_job_1mo': 'DeepPavlov_bert-base-cased-conversational_jan5_iter0_928497_SEED_14',
'is_hired_1mo': 'DeepPavlov_bert-base-cased-conversational_jan5_iter0_928488_SEED_5',
'is_unemployed': 'DeepPavlov_bert-base-cased-conversational_jan5_iter0_928498_SEED_15',
'job_offer': 'DeepPavlov_bert-base-cased-conversational_jan5_iter0_928493_SEED_10',
'job_search': 'DeepPavlov_bert-base-cased-conversational_jan5_iter0_928486_SEED_3'
# 'lost_job_1mo': 'vinai_bertweet-base_jan5_iter0_928517_SEED_7',
# 'is_hired_1mo': 'vinai_bertweet-base_jan5_iter0_928525_SEED_15',
# 'is_unemployed': 'vinai_bertweet-base_jan5_iter0_928513_SEED_3',
# 'job_offer': 'vinai_bertweet-base_jan5_iter0_928513_SEED_3',
# 'job_search': 'vinai_bertweet-base_jan5_iter0_928513_SEED_3'
}}}
for column in ["is_unemployed", "lost_job_1mo", "job_search", "is_hired_1mo", "job_offer"]:
print('\n\n!!!!!', column)
loop_start = time.time()
best_model_folder = best_model_folders_dict[f'iter{str(args.iteration_number)}'][args.country_code][column]
model_path = os.path.join('/scratch/mt4493/twitter_labor/trained_models', args.country_code, best_model_folder, column, 'models', 'best_model')
print(model_path)
onnx_path = os.path.join(model_path, 'onnx')
print(onnx_path)
####################################################################################################################################
# TOKENIZATION and INFERENCE
####################################################################################################################################
print('Predictions of random Tweets:')
start_time = time.time()
onnx_labels = inference(os.path.join(onnx_path, 'converted-optimized-quantized.onnx'),
model_path,
examples)
print('time taken:', str(time.time() - start_time), 'seconds')
print('per tweet:', (time.time() - start_time) / tweets_random.shape[0], 'seconds')
####################################################################################################################################
# SAVING
####################################################################################################################################
print('Save Predictions of random Tweets:')
start_time = time.time()
final_output_path = args.output_path
if not os.path.exists(os.path.join(final_output_path, column)):
print('>>>> directory doesnt exists, creating it')
os.makedirs(os.path.join(final_output_path, column))
# create dataframe containing tweet id and probabilities
predictions_random_df = pd.DataFrame(data=onnx_labels, columns=['first', 'second'])
predictions_random_df = predictions_random_df.set_index(tweets_random.tweet_id)
# reformat dataframe
predictions_random_df = predictions_random_df[['second']]
predictions_random_df.columns = ['score']
print(predictions_random_df.head())
predictions_random_df.to_parquet(
os.path.join(final_output_path, column,
str(getpass.getuser()) + '_random' + '-' + str(SLURM_ARRAY_TASK_ID) + '.parquet'))
print('saved to:\n', os.path.join(final_output_path, column,
str(getpass.getuser()) + '_random' + '-' + str(SLURM_ARRAY_TASK_ID) + '.parquet'),
'saved')
print('save time taken:', str(time.time() - start_time), 'seconds')
print('full loop:', str(time.time() - loop_start), 'seconds', (time.time() - loop_start) / len(examples))
|
# -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Declarative hooks for Cloud Identity Groups CLI."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.command_lib.organizations import org_utils
# request hooks
def SetOrganization(unused_ref, args, request):
"""Set organization ID to request.organizationId.
Args:
unused_ref: A string representing the operation reference. Unused and may
be None.
args: The argparse namespace.
request: The request to modify.
Returns:
The updated request.
"""
org_id = org_utils.GetOrganizationId(args.organization)
if org_id:
request.organizationsId = org_id
return request
else:
raise org_utils.UnknownOrganizationError(args.organization)
|
import pandas as pd
from datetime import datetime, timedelta
from comet_utils.processor.processor import Processor as p
from time import sleep
from comet_utils.analyzer.entry_strategy import EntryStrategy
from comet_utils.analyzer.exit_strategy import ExitStrategy
import pickle
class Backtester(object):
@classmethod
def backtest(self,start,end,params,prices,models):
status = "loads"
symbols = params["symbols"]
if "ALL" in symbols:
symbols = prices["crypto"].unique()
rt = params["retrack_days"]
s = params["signal"]
r = params["req"]
value = params["value"]
positions = params["positions"]
conservative = params["conservative"]
entry_strat = params["entry_strategy"]
exit_strat = params["exit_strategy"]
if exit_strat == "ai" or entry_strat == "ai":
rt = 7
market = prices.pivot_table(index="date",columns="crypto",values="close").reset_index()
market = p.column_date_processing(market)
market = market.fillna(method="ffill")
sim = market.melt(id_vars="date").copy()
ns = []
sim = sim[sim["value"] > 0]
for crypto in [x.lower() for x in symbols]:
try:
crypto_sim = sim[sim["crypto"]==crypto].copy()
crypto_sim.sort_values("date",inplace=True)
crypto_sim["signal"] = crypto_sim["value"].pct_change(rt)
crypto_sim["velocity"] = crypto_sim["signal"].pct_change(rt)
crypto_sim["inflection"] = crypto_sim["velocity"].pct_change(rt)
crypto_sim["p_sign_change"] = [row[1]["velocity"] * row[1]["inflection"] < 0 for row in crypto_sim.iterrows()]
crypto_sim.rename(columns={"inflection":"concavity"},inplace=True)
model = models[models["symbol"]==crypto.upper()]["model"].item()
crypto_sim["prediction"] = pickle.loads(model).predict(crypto_sim[["signal","velocity","concavity"]])
crypto_sim.rename(columns={"concavity":"inflection"},inplace=True)
ns.append(crypto_sim)
except:
continue
final = pd.concat(ns)
final = final[(final["date"] < end) & (final["value"] > 0)].dropna()
final.rename(columns={"value":"close"},inplace=True)
signal = float(s/100)
req = float(r/100)
date = start
trades = []
print("backtest_started")
for position in range(positions):
date = start
while date < end:
try:
status = "entries"
offerings = EntryStrategy.backtest_entry_analysis(date,entry_strat,final,signal,value,conservative)
if offerings.index.size < 1:
date = date + timedelta(days=1)
else:
status = "exits"
if offerings.index.size < position:
date = date + timedelta(days=1)
continue
else:
trade = offerings.iloc[position]
trade = ExitStrategy.backtest_exit_analysis(exit_strat,final,trade,rt,req)
trade["signal"] = signal
trade["req"] = req
trade["retrack_days"] = rt
trade["value"] = value
trade["conservative"] = conservative
trade["entry_strategy"] = entry_strat
trade["exit_strategy"] = exit_strat
trade["position"] = position
trade["positions"] = positions
trades.append(trade)
status = "date adding"
date = trade["sell_date"] + timedelta(days=1)
except Exception as e:
# print(date,status,trade,str(e))
date = date + timedelta(days=1)
return pd.DataFrame(trades)
@classmethod
def pairs_trading_backtest(self,start,end,params,prices,correlations):
status = "loads"
rt = params["retrack_days"]
s = params["signal"]
r = params["req"]
value = params["value"]
conservative = params["conservative"]
entry_strat = params["entry_strategy"]
exit_strat = params["exit_strategy"]
market = prices.pivot_table(index="date",columns="crypto",values="close").reset_index()
market = p.column_date_processing(market)
market = market.fillna(method="ffill")
sim = market.melt(id_vars="date").copy()
ns = []
sim = sim[sim["value"] > 0]
for crypto in sim["crypto"].unique():
crypto_sim = sim[sim["crypto"]==crypto].copy()
crypto_sim.sort_values("date",inplace=True)
crypto_sim["signal"] = crypto_sim["value"].pct_change(rt)
crypto_sim["velocity"] = crypto_sim["signal"].pct_change(rt)
crypto_sim["inflection"] = crypto_sim["velocity"].pct_change(rt)
crypto_sim["p_sign_change"] = [row[1]["velocity"] * row[1]["inflection"] < 0 for row in crypto_sim.iterrows()]
ns.append(crypto_sim)
final = pd.concat(ns)
final = final[(final["date"] < end) & (final["value"] > 0)].dropna()
final.rename(columns={"value":"close"},inplace=True)
signal = float(s/100)
req = float(r/100)
date = start
trades = []
while date < end:
try:
status = "entries"
offerings = EntryStrategy.backtest_entry_analysis(date,entry_strat,final,signal,value,conservative)
if offerings.index.size < 1:
date = date + timedelta(days=1)
else:
status = "exits"
trade_i = offerings.iloc[0]
trade_i = ExitStrategy.backtest_exit_analysis(exit_strat,final,trade_i,rt,req)
trade_i["signal"] = signal
trade_i["req"] = req
trade_i["retrack_days"] = rt
trade_i["value"] = value
trade_i["conservative"] = conservative
trade_i["entry_strategy"] = entry_strat
trade_i["exit_strategy"] = exit_strat
trade_i["position"] = 0
trades.append(trade_i)
date_2 = date
while date_2 < trade_i["sell_date"] and date_2 < end:
try:
status = "entries"
symbol_correlations = correlations[(correlations["crypto"]==trade_i["crypto"]) & (correlations["value"]<=-0.7)]["crypto_ii"].unique()
second_final = final[final["crypto"].isin(list(symbol_correlations)) & (final["date"] <= trade_i["sell_date"])].sort_values("date")
offerings = EntryStrategy.backtest_entry_analysis(date_2,entry_strat,second_final,float(signal/3),value,conservative)
if offerings.index.size < 1:
date_2 = date_2 + timedelta(days=1)
else:
status = "exits"
trade_ii = offerings.iloc[0]
trade_ii = ExitStrategy.backtest_exit_analysis(exit_strat,second_final,trade_ii,rt,float(req/3))
trade_ii["signal"] = signal
trade_ii["req"] = req
trade_ii["retrack_days"] = rt
trade_ii["value"] = value
trade_ii["conservative"] = conservative
trade_ii["entry_strategy"] = entry_strat
trade_ii["exit_strategy"] = exit_strat
trade_ii["position"] = 1
trades.append(trade_ii)
date_2 = trade_ii["sell_date"] + timedelta(days=1)
except Exception as e:
# print(date,status,trade_ii,str(e))
date_2 = date_2 + timedelta(days=1)
status = "date adding"
date = trade_i["sell_date"] + timedelta(days=1)
except Exception as e:
print(date,status,trade_i,str(e))
date = date + timedelta(days=1)
return pd.DataFrame(trades)
@classmethod
def analyze(self,current_trades,final):
viz = []
row = current_trades.iloc[0]
pv = 100
start_date = row["date"]
symbol = row["crypto"]
amount = float(pv/row["buy_price"])
end_date = row["sell_date"]
pv2 = amount * row["sell_price"]
viz.append({"date":start_date,"crypto":symbol,"amount":amount})
viz.append({"date":end_date,"crypto":symbol,"amount":amount})
track_date = start_date
while track_date < end_date - timedelta(days=1):
track_date = track_date + timedelta(days=1)
viz.append({"date":track_date,"crypto":symbol,"amount":amount})
for i in range(1,current_trades.index.size-1):
row = current_trades.iloc[i]
symbol = current_trades.iloc[i]["crypto"]
start_date = row["date"]
pv = pv2
amount = pv /row["buy_price"]
viz.append({"date":start_date,"crypto":symbol,"amount":amount})
track_date = start_date
end_date = row["sell_date"]
while track_date < end_date:
track_date = track_date + timedelta(days=1)
viz.append({"date":track_date,"crypto":symbol,"amount":amount})
pv2 = amount * row["sell_price"]
viz.append({"date":end_date,"crypto":symbol,"amount":amount})
window = pd.DataFrame(viz)
window["crypto"] = [x.upper() for x in window["crypto"]]
example = final.merge(window,how="left",on=["date","crypto"])
example = example.dropna().sort_values("date")
example["actual"] = example["amount"] * example["close"]
example["actual_delta"] = (example["actual"] - example["actual"].iloc[0]) / example["actual"].iloc[0]
return example[["date","actual_delta"]]
|
# Copyright (c) 2013, Kevin Greenan (kmgreen2@gmail.com)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution. THIS SOFTWARE IS
# PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
# NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# PyEClib Companion tool
# Goal: When defining an EC pool, help cluster admin make an informed choice
# between available EC implementations. Generate sample swift.conf + swift-
# ring-builder hints.
#
# Suggested features:
#
# - List the "EC types" supported - EC algorithms
# - List implementations of each EC type available on the platform
# (dumb-software-only, software with SIMD acceleration,
# specialized hardware, etc).
# - Benchmark each algorithm with possible implementation and display
# performance numbers.
# - Generate sample EC policy entry (for inclusion in swift.conf) for the
# best performing algorithm + implementation. (And optionally provide swift-
# ring-builder hints).
#
# Suggested EC policy entry format:
#
# ======== swift.conf ============
# [storage-policy:10]
# type = erasure_coding
# name = ec_jerasure_rs_cauchy_12_2
# ec_type = jerasure_rs_cauchy
# ec_k = 12
# ec_m = 2
# ============================
#
# (ec_type values are one of those available within PyEClib)
#
# User input: Num data, num parity, average file size
# Output: Ordered list of options and their corresponding conf entries
# (limit 10)
#
from pyeclib.ec_iface import ECDriver
import random
import string
import sys
import argparse
import time
import math
class Timer:
def __init__(self):
self.start_time = 0
self.end_time = 0
def reset(self):
self.start_time = 0
self.end_time = 0
def start(self):
self.start_time = time.time()
def stop(self):
self.end_time = time.time()
def curr_delta(self):
return self.end_time - self.start_time
def stop_and_return(self):
self.end_time = time.time()
return self.curr_delta()
def nCr(n, r):
f = math.factorial
return f(n) / f(r) / f(n - r)
class ECScheme:
def __init__(self, k, m, ec_type):
self.k = k
self.m = m
self.ec_type = ec_type
def __str__(self):
return "k=%d m=%d ec_type=%s" % (self.k, self.m, self.ec_type)
valid_flat_xor_hd_3 = [(6, 6), (7, 6), (8, 6), (9, 6),
(10, 6), (11, 6), (12, 6), (13, 6),
(14, 6), (15, 6)]
valid_flat_xor_hd_4 = [(6, 6), (7, 6), (8, 6), (9, 6),
(10, 6), (11, 6), (12, 6), (13, 6),
(14, 6), (15, 6), (16, 6), (17, 6),
(18, 6), (19, 6), (20, 6)]
def get_viable_schemes(
max_num_frags, minimum_rate, avg_stripe_size, fault_tolerance):
list_of_schemes = []
#
# Get min_k from (minimum_rate * max_num_frags)
#
min_k = int(math.ceil(minimum_rate * max_num_frags))
#
# Get min_m from the fault tolerance
#
min_m = fault_tolerance
#
# Is not information theoretically possible
#
if (min_k + min_m) > max_num_frags:
return list_of_schemes
#
# Iterate over EC(k, max_num_frags-k) k \in [min_k, n-min_m]
#
for k in range(min_k, max_num_frags - min_m + 1):
list_of_schemes.append(
ECScheme(k, max_num_frags - k, "jerasure_rs_vand"))
list_of_schemes.append(
ECScheme(k, max_num_frags - k, "jerasure_rs_cauchy"))
#
# The XOR codes are a little tricker
# (only check if fault_tolerance = 2 or 3)
#
# Constraint for 2: k <= (m choose 2)
# Constraint for 3: k <= (m choose 3)
#
# The '3' flat_xor_hd_3 (and '4' in flat_xor_hd_4) refers to the Hamming
# distance, which means the code guarantees the reconstruction of any
# 2 lost fragments (or 3 in the case of flat_xor_hd_4).
#
# So, only consider the XOR code if the fault_tolerance matches and
# the additional constraint is met
#
if fault_tolerance == 2:
max_k = nCr(max_num_frags - k, 2)
if k <= max_k and (k, max_num_frags - k) in valid_flat_xor_hd_3:
list_of_schemes.append(
ECScheme(k, max_num_frags - k, "flat_xor_hd_3"))
if fault_tolerance == 3:
max_k = nCr(max_num_frags - k, 3)
if k <= max_k and (k, max_num_frags - k) in valid_flat_xor_hd_4:
list_of_schemes.append(
ECScheme(k, max_num_frags - k, "flat_xor_hd_4"))
return list_of_schemes
parser = argparse.ArgumentParser(
description='PyECLib tool to evaluate viable EC options, benchmark them '
'and report results with the appropriate conf entries.')
parser.add_argument(
'-n',
type=int,
help='max number of fragments',
required=True)
parser.add_argument('-f', type=int, help='fault tolerance', required=True)
parser.add_argument(
'-r',
type=float,
help='minimum coding rate (num_data / num_data+num_parity)',
required=True)
parser.add_argument('-s', type=int, help='average stripe size', required=True)
parser.add_argument(
'-l',
type=int,
help='set limit on number of entries returned (default = 10)',
default=10,
)
args = parser.parse_args(sys.argv[1:])
MB = 1024 * 1024
# Generate a buffer of size 's'
if args.s > 10 * MB:
print("s must be smaller than 10 MB.")
sys.exit(1)
# Instantiate the timer
timer = Timer()
return_limit = args.l
schemes = get_viable_schemes(args.n, args.r, args.s, args.f)
# Results will be List[(ec_type, throughput)]
results = []
# Num iterations
num_iterations = 10
for scheme in schemes:
print(scheme)
# Generate a new string for each test
file_str = ''.join(
random.choice(
string.ascii_uppercase + string.digits) for x in range(args.s))
try:
ec_driver = ECDriver(k=scheme.k, m=scheme.m, ec_type=scheme.ec_type)
except Exception as e:
print("Scheme %s is not defined (%s)." % (scheme, e))
continue
timer.start()
for i in range(num_iterations):
ec_driver.encode(file_str)
duration = timer.stop_and_return()
results.append((scheme, duration))
timer.reset()
print(results)
results.sort(lambda x, y: (int)((1000 * x[1]) - (1000 * y[1])))
for i in range(len(results)):
if i > return_limit:
break
print("\n\nPerf Rank #%d:" % i)
print(" ======== To Use this Policy, Copy and Paste Text (not including "
"this header and footer) to Swift Conf ========")
print(" type = erasure_coding")
print(" name = %s_%d_%d" % (results[i][0].ec_type,
results[i][0].k, results[i][0].m))
print(" ec_type = %s" % results[i][0].ec_type)
print(" ec_k = %s" % results[i][0].k)
print(" ec_m = %s" % results[i][0].m)
print(" ================================================================"
"==============================================")
results[i]
|
from __future__ import absolute_import
from sentry.tasks.signals import signal
from sentry.testutils import TestCase
class SignalTest(TestCase):
def test_task_persistent_name(self):
assert signal.name == "sentry.tasks.signal"
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import nltk.data
def getNGrams(n, text):
ngram_set = set()
text_length = len(text)
max_index_ngram_start = text_length - n
for i in range (max_index_ngram_start + 1):
ngram_set.add(tuple(text[i:i+n]))
#ngram_set.add(text[i])
return ngram_set
def splitIntoWords(sentences):
fullTextWords = sentences.strip().split(' ')
#for s in sentences:
# fullTextWords.extend(s.words)
return fullTextWords
def getWordNGrams(n, sentences):
assert (len(sentences) > 0)
assert (n > 0)
words = splitIntoWords(sentences)
return getNGrams(n, words)
def filterText(text):
notNum = re.compile(r'[^0-9a-zA-Z ]+')
text = notNum.sub('',text)
return text
def Rouge_n(candidate_sentences, reference_sentences, n=3):
candidate_sentences = filterText(candidate_sentences)
reference_sentences = filterText(reference_sentences)
if len(candidate_sentences) <= 0 or len(reference_sentences) <= 0:
raise (ValueError("Collections must contain at least 1 sentence."))
candidate_ngrams = getWordNGrams(n, candidate_sentences)
reference_ngrams = getWordNGrams(n, reference_sentences)
reference_count = float(len(reference_ngrams))
# Gets the overlapping ngrams between candidate and reference
overlapping_ngrams = candidate_ngrams.intersection(reference_ngrams)
overlapping_count = float(len(overlapping_ngrams))
return overlapping_count / reference_count
def writeRougeScore(id,score_1,score_2,increase,fp):
fp.write(str(id) + '\t' + str(score_1) + '\t' + str(score_2) + '\t' + str(increase) + '\n')
#reference_sentences = 'Ese héroe de la Patria que se sacrificó precisamente, para que nosotros podamos cumplirle al pueblo colombiano y hacer realidad los sueños que desde hace dos siglos tiene el pueblo colombiano, de tener cada vez más un país justo, un país moderno, un país seguro, a eso aspira cualquier sociedad, a eso aspira cualquier país.Y que hoy ya podemos decir que no ocupamos ese segundo vergonzoso lugar entre los países más desiguales de toda la región, después de Haití.No hay un camino más efectivo para conseguir un país más justo que la educación.Si a Daniel José o cualquier niño o a cualquier niña le da uno una buena educación, inmediatamente se nivela el punto de partida y vamos a tener un país mucho más justo.Ninguna familia en Colombia, ningún padre o madre tiene por qué pagar un solo peso por la educación de sus hijos, del grado cero al grado once, eso es una revolución ya de por sí.Porque, qué hay más importante para una sociedad que los niños y las niñas, que van a ser el futuro, aprendan bien.'
#candidate_sentences = 'Maritza Frías es la rectora de una institución que se llama Almirante Padilla, yo también estuve en una institución que se llama Almirante Padilla, en la Escuela Naval en Cartagena, de allá me gradúe y por eso es tan importante cumplirle al Almirante Padilla, que nos está viendo en este momento.Ese héroe de la Patria que se sacrificó precisamente, para que nosotros podamos cumplirle al pueblo colombiano y hacer realidad los sueños que desde hace dos siglos tiene el pueblo colombiano, de tener cada vez más un país justo, un país moderno, un país seguro, a eso aspira cualquier sociedad, a eso aspira cualquier país.Y hemos puesto en marcha iniciativas para generar cada vez más empleos, óigase bien, Colombia ha sido el país de América Latina que en estos dos últimos años más empleos ha generado, más de dos millones de empleos, un número similar de empleos ha creado Brasil, pero Brasil tiene cuatro veces la población nuestra.'
#print Rouge_n(filterText(candidate_sentences),filterText(reference_sentences))
|
'''Crie um programa que leia vários números inteiros pelo teclado.
No final da execução, mostre a média entre todos os valores e qual foi o maior e o menor valores lidos.
O programa deve perguntar ao usuário se ele quer ou não continuar a digitar valores.'''
n = 0
med = 0
cont = 0
soma = 0
maior = 0
menor = 0
continuar = 'S'
while continuar in 'Ss':
n = int(input('Digite um número: '))
cont = cont + 1
soma = soma + n
if cont == 1:
maior = menor = n
else:
if n > maior:
maior = n
elif n < menor:
menor = n
continuar = str(input('Você quer continuar [S/N]? ')).strip().upper()
med = soma / cont
print('A média entre os {} valores digitados foi {:.2f}'.format(cont, med))
print('O menor valor foi {} e o maior {}.'.format(menor, maior))
|
"""Test pyscripts test module."""
from custom_components.pyscript.state import State
from pytest_homeassistant_custom_component.async_mock import patch
from homeassistant.core import Context
from homeassistant.helpers.state import State as HassState
async def test_service_call(hass):
"""Test calling a service using the entity_id as a property."""
with patch(
"custom_components.pyscript.state.async_get_all_descriptions",
return_value={
"test": {
"test": {"description": None, "fields": {"entity_id": "blah", "other_service_data": "blah"}}
}
},
), patch.object(hass.states, "get", return_value=HassState("test.entity", "True")), patch.object(
hass.services, "async_call"
) as call:
State.init(hass)
await State.get_service_params()
func = State.get("test.entity.test")
await func(context=Context(id="test"), blocking=True, limit=1, other_service_data="test")
assert call.called
assert call.call_args[0] == (
"test",
"test",
{"other_service_data": "test", "entity_id": "test.entity"},
)
assert call.call_args[1] == {"context": Context(id="test"), "blocking": True, "limit": 1}
call.reset_mock()
func = State.get("test.entity.test")
await func(context=Context(id="test"), blocking=False, other_service_data="test")
assert call.called
assert call.call_args[0] == (
"test",
"test",
{"other_service_data": "test", "entity_id": "test.entity"},
)
assert call.call_args[1] == {"context": Context(id="test"), "blocking": False}
|
# /*
# * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# *
# * Licensed under the Apache License, Version 2.0 (the "License").
# * You may not use this file except in compliance with the License.
# * A copy of the License is located at
# *
# * http://aws.amazon.com/apache2.0
# *
# * or in the "license" file accompanying this file. This file is distributed
# * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# * express or implied. See the License for the specific language governing
# * permissions and limitations under the License.
# */
import json
import logging
import uuid
from threading import Timer, Lock, Thread
# Flow:
# 1. Listen to notify or notify-next to trigger the next job
# 2. Call a function to get the next job to be executed and set its status to IN_PROGRESS
# 3. Set self._jobId accordingly
# 4. Pass the job description to the callback
# 5. Run job-handling logic
# 6. Call a function to update the job status to FAILED, SUCCEEDED, or REJECTED
class _jobRequestToken:
URN_PREFIX_LENGTH = 9
def getNextToken(self):
return uuid.uuid4().urn[self.URN_PREFIX_LENGTH:] # We only need the uuid digits, not the urn prefix
class _basicJSONParser:
def setString(self, srcString):
self._rawString = srcString
self._dictionObject = None
def regenerateString(self):
return json.dumps(self._dictionaryObject)
def getAttributeValue(self, srcAttributeKey):
return self._dictionaryObject.get(srcAttributeKey)
def setAttributeValue(self, srcAttributeKey, srcAttributeValue):
self._dictionaryObject[srcAttributeKey] = srcAttributeValue
def validateJSON(self):
try:
self._dictionaryObject = json.loads(self._rawString)
except ValueError:
return False
return True
class deviceJob:
_logger = logging.getLogger(__name__)
_statusType = ["IN_PROGRESS", "QUEUED", "FAILED", "SUCCEEDED", "CANCELED", "REJECTED", "REMOVED"]
_finishedStatusType = ["FAILED", "SUCCEEDED", "CANCELED", "REJECTED", "REMOVED"]
def __init__(self, srcThingName, srcIsPersistentSubscribe, srcJobManager):
if srcThingName is None or srcIsPersistentSubscribe is None or srcJobManager is None:
raise TypeError("None type inputs detected.")
self._thingName = srcThingName
# Tool handler
self._jobManagerHandler = srcJobManager
self._basicJSONParserHandler = _basicJSONParser()
self._tokenHandler = _jobRequestToken()
# Properties
self._isPersistentSubscribe = srcIsPersistentSubscribe
self._lastVersionInSync = -1 # -1 means not initialized
self._isStartNextSubscribed = False
self._isUpdateSubscribed = False
self._currentJobId = None
self._jobSubscribeCallbackTable = dict()
self._jobSubscribeCallbackTable["update"] = None
self._jobSubscribeCallbackTable["start-next"] = None
self._jobSubscribeCallbackTable["notify"] = None
self._jobSubscribeCallbackTable["notify-next"] = None
self._jobSubscribeStatusTable = dict()
self._jobSubscribeStatusTable["update"] = 0
self._jobSubscribeStatusTable["start-next"] = 0
self._tokenPool = dict()
self._dataStructureLock = Lock()
def _doNonPersistentUnsubscribe(self, currentAction):
self._jobManagerHandler.basicJobUnsubscribe(self._thingName, currentAction)
self._logger.info("Unsubscribed to " + currentAction + " accepted/rejected topics for device: " + self._thingName)
def generalCallback(self, client, userdata, message):
# In Py3.x, message.payload comes in as a bytes(string)
# json.loads needs a string input
self._logger.debug("job message topic: " + message.topic)
self._logger.debug("job message payload: " + message.payload)
with self._dataStructureLock:
currentTopic = message.topic
currentAction = self._parseTopicAction(currentTopic) # start-next/update/notify-next
currentType = self._parseTopicType(currentTopic) # accepted/rejected/notify-next
payloadUTF8String = message.payload.decode('utf-8')
# start-next/update: Need to deal with token, timer and unsubscribe
if currentAction in ["start-next", "update"]:
# Check for token
self._basicJSONParserHandler.setString(payloadUTF8String)
if self._basicJSONParserHandler.validateJSON(): # Filter out invalid JSON
currentToken = self._basicJSONParserHandler.getAttributeValue(u"clientToken")
if currentToken is not None:
self._logger.debug("job message clientToken: " + currentToken)
if currentToken is not None and currentToken in self._tokenPool.keys(): # Filter out JSON without the desired token
# Sync local version when it is an accepted response
self._logger.debug("Token is in the pool. Type: " + currentType)
if currentType == "accepted":
if currentAction == "start-next":
incomingExecution = self._basicJSONParserHandler.getAttributeValue(u"execution")
else:
incomingExecution = self._basicJSONParserHandler.getAttributeValue(u"executionState")
if incomingExecution is not None:
# If it is accepted response, we need to sync the local version and jobId
if u"jobId" in incomingExecution.keys():
self._currentJobId = incomingExecution[u"jobId"]
if u"versionNumber" in incomingExecution.keys():
incomingVersion = incomingExecution[u"versionNumber"]
if incomingVersion > self._lastVersionInSync:
self._lastVersionInSync = incomingVersion
# Reset version and jobId if job is finished
if u"status" in incomingExecution.keys():
incomingStatus = incomingExecution[u"status"]
if incomingStatus in self._finishedStatusType:
self._lastVersionInSync = -1 # The version will always be synced for the next incoming accepted response
self._currentJobId = None
# Cancel the timer and clear the token
self._tokenPool[currentToken].cancel()
del self._tokenPool[currentToken]
# Need to unsubscribe?
self._jobSubscribeStatusTable[currentAction] -= 1
if not self._isPersistentSubscribe and self._jobSubscribeStatusTable.get(currentAction) <= 0:
self._jobSubscribeStatusTable[currentAction] = 0
processNonPersistentUnsubscribe = Thread(target=self._doNonPersistentUnsubscribe, args=[currentAction])
processNonPersistentUnsubscribe.start()
# Custom callback
if self._jobSubscribeCallbackTable.get(currentAction) is not None:
processCustomCallback = Thread(target=self._jobSubscribeCallbackTable[currentAction], args=[payloadUTF8String, currentType, currentToken])
processCustomCallback.start()
# notify-next: Watch execution data
else:
# currentType += "/" + self._parseTopicThingName(currentTopic)
# Sync local version
self._basicJSONParserHandler.setString(payloadUTF8String)
if self._basicJSONParserHandler.validateJSON(): # Filter out invalid JSON
# Custom callback
if self._jobSubscribeCallbackTable.get(currentAction) is not None:
processCustomCallback = Thread(target=self._jobSubscribeCallbackTable[currentAction], args=[payloadUTF8String, currentType, None])
processCustomCallback.start()
def _parseTopicAction(self, srcTopic):
ret = None
fragments = srcTopic.split('/')
if fragments[-1] in ["accepted", "rejected"]:
ret = fragments[-2]
else:
ret = fragments[-1]
return ret
def _parseTopicJobId(self, srcTopic):
fragments = srcTopic.split('/')
return fragments[4]
def _parseTopicType(self, srcTopic):
fragments = srcTopic.split('/')
return fragments[-1]
def _parseTopicThingName(self, srcTopic):
fragments = srcTopic.split('/')
return fragments[2]
def _timerHandler(self, srcActionName, srcToken):
with self._dataStructureLock:
# Don't crash if we try to remove an unknown token
if srcToken not in self._tokenPool:
self._logger.warning('Tried to remove non-existent token from pool: %s' % str(srcToken))
return
# Remove the token
del self._tokenPool[srcToken]
# Need to unsubscribe?
self._jobSubscribeStatusTable[srcActionName] -= 1
if not self._isPersistentSubscribe and self._jobSubscribeStatusTable.get(srcActionName) <= 0:
self._jobSubscribeStatusTable[srcActionName] = 0
self._jobManagerHandler.basicJobUnsubscribe(self._thingName, srcActionName)
# Notify time-out issue
if self._jobSubscribeCallbackTable.get(srcActionName) is not None:
self._logger.info("Job request with token: " + str(srcToken) + " has timed out.")
self._jobSubscribeCallbackTable[srcActionName]("REQUEST TIME OUT", "timeout", srcToken)
def jobStartNext(self, srcCallback, srcTimeout):
with self._dataStructureLock:
# Update callback data structure
self._jobSubscribeCallbackTable["start-next"] = srcCallback
# Update number of pending feedback
self._jobSubscribeStatusTable["start-next"] += 1
# clientToken
currentToken = self._tokenHandler.getNextToken()
self._tokenPool[currentToken] = Timer(srcTimeout, self._timerHandler, ["start-next", currentToken])
self._basicJSONParserHandler.setString("{}")
self._basicJSONParserHandler.validateJSON()
self._basicJSONParserHandler.setAttributeValue("clientToken", currentToken)
currentPayload = self._basicJSONParserHandler.regenerateString()
# Two subscriptions
if not self._isPersistentSubscribe or not self._isStartNextSubscribed:
self._jobManagerHandler.basicJobSubscribe(self._thingName, "start-next", self.generalCallback)
self._isStartNextSubscribed = True
self._logger.info("Subscribed to start-next accepted/rejected topics for device: " + self._thingName)
# One publish
self._jobManagerHandler.basicJobPublish(self._thingName, "start-next", currentPayload)
# Start the timer
self._tokenPool[currentToken].start()
return currentToken
def jobUpdate(self, srcStatus, srcCallback, srcTimeout):
if srcStatus not in self._statusType:
raise TypeError("Invalid job status.")
if self._currentJobId is None:
raise TypeError("No job in progress to update.")
with self._dataStructureLock:
# Update callback data structure
self._jobSubscribeCallbackTable["update"] = srcCallback
# Update number of pending feedback
self._jobSubscribeStatusTable["update"] += 1
# clientToken
currentToken = self._tokenHandler.getNextToken()
self._tokenPool[currentToken] = Timer(srcTimeout, self._timerHandler, ["update", currentToken])
self._basicJSONParserHandler.setString("{}")
self._basicJSONParserHandler.validateJSON()
self._basicJSONParserHandler.setAttributeValue("status", srcStatus)
self._basicJSONParserHandler.setAttributeValue("expectedVersion", self._lastVersionInSync)
self._basicJSONParserHandler.setAttributeValue("includeJobExecutionState", True)
self._basicJSONParserHandler.setAttributeValue("clientToken", currentToken)
currentPayload = self._basicJSONParserHandler.regenerateString()
# Two subscriptions
if not self._isPersistentSubscribe or not self._isUpdateSubscribed:
self._jobManagerHandler.basicJobSubscribe(self._thingName, "update", self.generalCallback, srcJobId=self._currentJobId)
self._isUpdateSubscribed = True
self._logger.info("Subscribed to update accepted/rejected topics for device/job: " + self._thingName + "/" + self._currentJobId)
# One publish
self._jobManagerHandler.basicJobPublish(self._thingName, "update", currentPayload, srcJobId=self._currentJobId)
# Start the timer
self._tokenPool[currentToken].start()
return currentToken
def jobRegisterNotifyNextCallback(self, srcCallback):
with self._dataStructureLock:
# Update callback data structure
self._jobSubscribeCallbackTable["notify-next"] = srcCallback
# One subscription
self._jobManagerHandler.basicJobSubscribe(self._thingName, "notify-next", self.generalCallback)
self._logger.info("Subscribed to notify-next topic for device: " + self._thingName)
def jobUnregisterNotifyNextCallback(self):
with self._dataStructureLock:
# Update callback data structure
del self._jobSubscribeCallbackTable["notify-next"]
# One unsubscription
self._jobManagerHandler.basicJobUnsubscribe(self._thingName, "notify-next")
self._logger.info("Unsubscribed from notify-next topic for device: " + self._thingName)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""File with mock classes."""
class MockRepo(object):
"""MockClass to represent a GitRepository."""
def __init__(self, full_name, description):
"""Constructor.
:param full_name: Name of the repository.
:param description: Description of the repository.
"""
self.full_name = full_name
self.description = description
def __eq__(self, other):
return self.full_name == other.full_name and self.description == other.description
|
import gnssShadowing
import pytest
import numpy as np
def test_MapNode():
a=gnssShadowing.MapNode()
assert a.x == 0
assert a.y == 0
assert type(a.x) == int
assert type(a.y) == int
a=gnssShadowing.MapNode(1,2)
assert a.x == 1
assert a.y == 2
a.x = 3
a.y = 4
assert a.x == 3
assert a.y == 4
def test_MapTimeNode():
a=gnssShadowing.MapTimeNode()
assert a.timestep == 0
assert a.position.x == 0
assert a.position.y == 0
assert type(a.timestep) == int
assert type(a.position) == gnssShadowing.MapNode
a=gnssShadowing.MapTimeNode(1,gnssShadowing.MapNode(2,3))
assert a.timestep == 1
assert a.position.x == 2
assert a.position.y == 3
a.timestep = 4
a.position.x = 5
a.position.y = 6
assert a.timestep == 4
assert a.position.x == 5
assert a.position.y == 6
def test_MapNodeList():
a=gnssShadowing.MapNodeList()
assert a.size() == 0
a.append(gnssShadowing.MapNode(1,2))
assert a.at(0).x == 1
assert a.at(0).y == 2
# at returns by value:
a.at(0).x = 3
a.at(0).y = 4
assert a.at(0).x == 1
assert a.at(0).y == 2
assert a.size() == 1
a.append(gnssShadowing.MapNode(5,6))
b = a.at(1)
assert b.x == 5
assert b.y == 6
assert a.size() == 2
a.clear()
assert a.size() == 0
def test_MapTimeNodeList():
a=gnssShadowing.MapTimeNodeList()
assert a.size() == 0
a.append(gnssShadowing.MapTimeNode(1,gnssShadowing.MapNode(2,3)))
assert a.at(0).timestep == 1
assert a.at(0).position.x == 2
assert a.at(0).position.y == 3
# at returns by value:
a.at(0).timestep = 4
a.at(0).position.x = 5
a.at(0).position.y = 6
assert a.at(0).timestep == 1
assert a.at(0).position.x == 2
assert a.at(0).position.y == 3
a.append(gnssShadowing.MapTimeNode(7,gnssShadowing.MapNode(8,9)))
b = a.at(1)
assert b.timestep == 7
assert b.position.x == 8
assert b.position.y == 9
assert a.size() == 2
a.clear()
assert a.size() == 0
def test_MapProblem():
world=gnssShadowing.World("data/2017-03-28.tle","data/uni.obj","Building")
planeLevels = gnssShadowing.PlaneLevelList()
planeLevels.extend([56.0])
width_m = 800
height_m = 400
res_x = res_y = 10.0
width = (int)(width_m / res_x)
height = (int)(height_m / res_y)
min_x = -width_m/2.
min_y = -height_m/2.
mapProperties = gnssShadowing.MapProperties(min_x,min_y,width,height,res_x,res_y,planeLevels)
minimumElevation = gnssShadowing.deg2rad(5)
mapper = gnssShadowing.Mapper(world, mapProperties, minimumElevation)
dopMap = mapper.computeDOPMap(gnssShadowing.mkSeconds(2017,3,28,12,0,0))
maxHorizontalDOP = 5
start = gnssShadowing.MapNode(0,0)
target = gnssShadowing.MapNode(1,1)
problem = gnssShadowing.MapProblem(maxHorizontalDOP,dopMap,
start,target)
assert type(problem.m_map) == gnssShadowing.DOPMap
assert type(problem.m_maxHorizontalDOP) == float
assert problem.targetReached(target) == True
assert problem.targetReached(start) == False
problem.computeCost(start,target)
neighbors = problem.computeNeighbors(start)
assert type(neighbors) == gnssShadowing.MapNodeList
problem.computeHeuristic(start)
def test_MapTimeProblem():
world=gnssShadowing.World("data/2017-03-28.tle","data/uni.obj","Building")
planeLevels = gnssShadowing.PlaneLevelList()
planeLevels.extend([56.0])
width_m = 800
height_m = 400
res_x = res_y = 10.0
width = (int)(width_m / res_x)
height = (int)(height_m / res_y)
min_x = -width_m/2.
min_y = -height_m/2.
mapProperties = gnssShadowing.MapProperties(min_x,min_y,width,height,res_x,res_y,planeLevels)
minimumElevation = gnssShadowing.deg2rad(5)
time = gnssShadowing.mkSeconds(2017,3,28,12,0,0)
timePerStep = 5 # in seconds
mapper = gnssShadowing.MapperLazyTimesteps(world,mapProperties,time,timePerStep,minimumElevation)
maxHorizontalDOP = 5
start = gnssShadowing.MapTimeNode(0,gnssShadowing.MapNode(0,0))
target = gnssShadowing.MapTimeNode(0,gnssShadowing.MapNode(1,1))
maxHorizontalDOP = 5.
costPerHorizontalDOP = 1.
costPerGridStep = 1.
costPerTimeStep = 1.
costPerTimeTotal = 0.
timeStepsPerStep = 1
problem = gnssShadowing.MapTimeProblem(
maxHorizontalDOP,costPerHorizontalDOP,costPerGridStep,
costPerTimeStep,costPerTimeTotal,timeStepsPerStep,
mapper,
start,target)
assert type(problem.m_maps) == gnssShadowing.MapperLazyTimesteps
assert type(problem.m_maxHorizontalDOP) == float
assert type(problem.m_costPerHorizontalDOP) == float
assert type(problem.m_costPerGridStep) == float
assert type(problem.m_costPerTimeStep) == float
assert type(problem.m_costPerTimeTotal) == float
assert type(problem.m_timeStepsPerStep) == int
assert problem.targetReached(target) == True
assert problem.targetReached(start) == False
problem.computeCost(start,target)
neighbors = problem.computeNeighbors(start)
assert type(neighbors) == gnssShadowing.MapTimeNodeList
problem.computeHeuristic(start)
def test_MapProblemSolver():
world=gnssShadowing.World("data/2017-03-28.tle","data/uni.obj","Building")
planeLevels = gnssShadowing.PlaneLevelList()
planeLevels.extend([56.0])
width_m = 800
height_m = 400
res_x = res_y = 10.0
width = (int)(width_m / res_x)
height = (int)(height_m / res_y)
min_x = -width_m/2.
min_y = -height_m/2.
mapProperties = gnssShadowing.MapProperties(min_x,min_y,width,height,res_x,res_y,planeLevels)
minimumElevation = gnssShadowing.deg2rad(5)
mapper = gnssShadowing.Mapper(world, mapProperties, minimumElevation)
dopMap = mapper.computeDOPMap(gnssShadowing.mkSeconds(2017,3,28,12,0,0))
maxHorizontalDOP = 5
start = gnssShadowing.MapNode(0,0)
target = gnssShadowing.MapNode(1,1)
problem = gnssShadowing.MapProblem(maxHorizontalDOP,dopMap,
start,target)
solver = gnssShadowing.MapProblemSolver()
path = solver.findShortestPath(problem)
assert type(path) == gnssShadowing.MapNodeList
assert path.size() == 3
assert path.at(0).x == start.x
assert path.at(0).y == start.y
assert path.at(path.size()-1).x == target.x
assert path.at(path.size()-1).y == target.y
def test_MapTimeProblemSolver():
world=gnssShadowing.World("data/2017-03-28.tle","data/uni.obj","Building")
planeLevels = gnssShadowing.PlaneLevelList()
planeLevels.extend([56.0])
width_m = 800
height_m = 400
res_x = res_y = 10.0
width = (int)(width_m / res_x)
height = (int)(height_m / res_y)
min_x = -width_m/2.
min_y = -height_m/2.
mapProperties = gnssShadowing.MapProperties(min_x,min_y,width,height,res_x,res_y,planeLevels)
minimumElevation = gnssShadowing.deg2rad(5)
time = gnssShadowing.mkSeconds(2017,3,28,12,0,0)
timePerStep = 5 # in seconds
mapper = gnssShadowing.MapperLazyTimesteps(world,mapProperties,time,timePerStep,minimumElevation)
maxHorizontalDOP = 5
start = gnssShadowing.MapTimeNode(0,gnssShadowing.MapNode(0,0))
target = gnssShadowing.MapTimeNode(0,gnssShadowing.MapNode(1,1))
maxHorizontalDOP = 5.
costPerHorizontalDOP = 1.
costPerGridStep = 1.
costPerTimeStep = 1.
costPerTimeTotal = 0.
timeStepsPerStep = 1
problem = gnssShadowing.MapTimeProblem(
maxHorizontalDOP,costPerHorizontalDOP,costPerGridStep,
costPerTimeStep,costPerTimeTotal,timeStepsPerStep,
mapper,
start,target)
solver = gnssShadowing.MapTimeProblemSolver()
path = solver.findShortestPath(problem)
assert type(path) == gnssShadowing.MapTimeNodeList
assert path.size() == 3
assert path.at(0).position.x == start.position.x
assert path.at(0).position.y == start.position.y
assert path.at(path.size()-1).position.x == target.position.x
assert path.at(path.size()-1).position.y == target.position.y
|
'''
In this exercise, assume that you are looking to start a business in the city of Chicago. Your perfect idea is to start a company that uses goats to mow the lawn for other businesses. However, you have to choose a location in the city to put your goat farm. You need a location with a great deal of space and relatively few businesses and people around to avoid complaints about the smell. You will need to merge three tables to help you choose your location. The land_use table has info on the percentage of vacant land by city ward. The census table has population by ward, and the licenses table lists businesses by ward.
The land_use, census, and licenses tables have been loaded for you.
'''
# Merge land_use and census and merge result with licenses including suffixes
land_cen_lic = land_use.merge(census, on='ward') \
.merge(licenses, on='ward', suffixes=('_cen','_lic'))
# Group by ward, pop_2010, and vacant, then count the # of accounts
pop_vac_lic = land_cen_lic.groupby(['ward','pop_2010','vacant'],
as_index=False).agg({'account':'count'})
# Sort pop_vac_lic and print the results
sorted_pop_vac_lic = pop_vac_lic.sort_values(['vacant', 'account', 'pop_2010'],
ascending=[False, True, True])
# Print the top few rows of sorted_pop_vac_lic
print(sorted_pop_vac_lic.head())
|
import numpy as np
from mvn import *
from utils import kernel_covariance
class FactorGP:
"""
Latent factor Gaussian process model for multivariate time series
Data: n epochs, t time points, q dimensional, r latent factors
Parameters: loading matrix (r x q), variance vector (q), length scale (r)
Priors: Conjugate Normal, inverse-Gamma, and Gamma (needs to be informative)
"""
def __init__(self, dims, mu_var=[0, 1], inverse_gamma=[1, 1], gamma=[10, 1], F=None):
n, t, q, r = dims
self.dims = dims
self.x = np.linspace(1, t, t) # time points are indexed by intergers from 1 to t
self.loading_prior_params = mu_var # prior mean and variance for loading coeffcients
self.variance_prior_params = inverse_gamma # inverse Gamma prior for variance
self.length_prior_params = gamma # Gamma prior on length scale
# self.kernel_type = 'default'
self.loading, self.variance, self.theta = self.__initiate_params(dims, mu_var, inverse_gamma, gamma)
self.F = F
def __initiate_params(self, dims, mu_var, inverse_gamma, gamma):
n, t, q, r = dims
loading = np.random.normal(mu_var[0], np.sqrt(mu_var[1]), [r, q])
variance = np.random.normal(0, 0.5, q) ** 2 # TODO: implement inverse-Gamma prior
theta = np.repeat(gamma[0] * gamma[1], r) # set length scale to gamma mean
return loading, variance, theta
def update_conditional_latent(self, Y):
n, t, q, r = self.dims
covs = []
for l in self.theta:
covs.append(kernel_covariance(self.x, l, 1.0))
prod, covariance = conditional_F_dist(covs, self.loading, self.variance) # only invert covariance once
F = np.zeros((n * t, r))
for i in range(n): # sample from F conditional distribution for each epoch independently
F[(i * t):(i * t + t), :] = sample_conditonal_F_dist(Y[(i * t):(i * t + t), :], prod, covariance)
self.F = F
def predict(self):
return np.matmul(self.F, self.loading)
class IterFactorGP:
"""
Update latent factors iteratively.
"""
def __init__(self, dims, mu_var=[0, 1], inverse_gamma=[1, 1], gamma=[10, 1], F=None):
n, t, q, r = dims
self.dims = dims
self.x = np.linspace(1, t, t) # time points are indexed by intergers from 1 to t
self.loading_prior_params = mu_var # prior mean and variance for loading coeffcients
self.variance_prior_params = inverse_gamma # inverse Gamma prior for variance
self.length_prior_params = gamma # Gamma prior on length scale
# self.kernel_type = 'default'
self.loading, self.variance, self.theta = self.__initiate_params(dims, mu_var, inverse_gamma, gamma)
self.F = F
def __initiate_params(self, dims, mu_var, inverse_gamma, gamma):
n, t, q, r = dims
loading = np.random.normal(mu_var[0], np.sqrt(mu_var[1]), [r, q])
variance = np.random.normal(0, 0.5, q) ** 2 # TODO: implement inverse-Gamma prior
theta = np.repeat(gamma[0] * gamma[1], r) # set length scale to gamma mean
return loading, variance, theta
def update_conditional_latent(self, Y):
n, t, q, r = self.dims
covs = []
for l in self.theta:
covs.append(kernel_covariance(self.x, l, 1.0))
F = np.zeros((n * t, r))
residuals = Y.copy()
for j in range(r): # update factors iteratively
prod, covariance = conditional_factor_dist(covs, self.loading, self.variance, j)
for i in range(n): # sample from F conditional distribution for each epoch independently
F[(i * t):(i * t + t), j] = sample_conditonal_factor_dist(residuals[(i * t):(i * t + t), :], prod, covariance)
hat = np.matmul(F[:, j].reshape((n * t, 1)), self.loading[j, :].reshape((1, q)))
residuals = residuals - hat
self.F = F
def predict(self):
return np.matmul(self.F, self.loading)
|
#!/usr/bin/env python
import os
import sys
import time
import datetime
from numpy import base_repr
from PIL import Image
#Argparse
import argparse
# beautiful soup
import bs4 as bs
# Selenium
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import Select
# Standard Libs
from random import *
import random
import subprocess # to get clipboarData
browser = webdriver.Chrome()
# browser = webdriver.Firefox(executable_path = '/usr/local/bin/geckodriver')
# browser = webdriver.Safari()
# Args
parser = argparse.ArgumentParser()
# parser.parse_args()
parser.add_argument("-v", "--verbose", help="increase output verbosity",action="store_true")
args = parser.parse_args()
if args.verbose:
print("verbosity turned on")
# global variables
mailservice_email = "entre-fd6968@inbox.mailtrap.io"
mailservice_email_user = mailservice_email.split('@')[0]
mailservice_domain = mailservice_email.split('@')[1]
#========================================== functions ===========================
#=================================== does element exist =========================
def element_exists(locator_attribute, locator_text):
possible_locators = ["id", "xpath", "link text", "partial link text", "name", "class name", "css selector"]
if locator_attribute not in possible_locators:
raise BaseException("locator provided not within allowed locators which are : %s" % possible_locators)
try:
browser.find_element(locator_attribute, locator_text)
return True
except:
return False
def assert_element_exists(locator_attribute, locator_text):
if not element_exists(locator_attribute, locator_text):
raise AssertionError("The requested element with '%s' of '%s' does not exist" % (locator_attribute, locator_text))
return
def element_visible(element):
if element.is_displayed(): # <- selenium
return True
else:
return False
def assert_element_visible(element):
if not element_visible(element):
raise AssertionError("Element is not displayed")
def find_and_assert_element_visible(locator_type, search_term):
element = browser.find_element(locator_type, search_term)
if not element.is_displayed():
raise AssertionError("Element with locator type '%s' and named '%s' is not displayed" % (locator_type))
else:
print("Element %s visible" % search_term)
# assert_element_exists('id', )
def list_all_images_on_page():
# images = browser.find_elements_by_tag_name('img')
images = browser.find_element_by_xpath("//img[contains(@src,'/assets/el_logo_rev-1304b11dc193fcbe45259eda7f2c00705646a4a3cb029963ba33f583a7462620.svg')]")
# for image in images:
print(images.get_attribute('src'))
#================================================================================
def waitUntilExists(locatorType, element):
element = "browser.find_element(By.%s, %s)" % (locatorType, element)
# WebDriverWait(browser, 10).until(ExpectedCondition.invisibility_of_element_located((By.ID, element)))
def loginWeeklyReport(url, email, pwd):
browser.get (url)
time.sleep(0.5)
browser.find_element_by_name("email").send_keys(email)
browser.find_element_by_name("password").send_keys(pwd)
browser.find_element_by_class_name("Button").click()
def loginEntre(url, email, pwd):
browser.get (url)
browser.find_element_by_link_text("Log In").click()
browser.find_element_by_name("email").send_keys(email)
browser.find_element_by_name("password").send_keys(pwd)
browser.find_element_by_name("commit").click()
def select_dropdown_value(id, value):
selectOption = Select(browser.find_element_by_id(id))
option_selected = selectOption.select_by_value(value)
def elementExists(url, locator_attr, locator_text):
# Description: return true if element exists
browser.get (url)
try:
browser.find_element(locator_attr, locator_text)
print("true")
return True
except:
print("false")
return False
def getClipboardData():
p = subprocess.Popen(['pbpaste'], stdout=subprocess.PIPE)
retcode = p.wait()
data = p.stdout.read()
return data
def setClipboardData(data):
p = subprocess.Popen(['pbcopy'], stdin=subprocess.PIPE)
p.stdin.write(data)
p.stdin.close()
retcode = p.wait()
def signUpAAuserForWRT(url, email, password, firstLogin, am_i_signed_in, num_users_to_create):
browser.get (url)
if am_i_signed_in == True:
pass
else:
# WRT Sign In
browser.find_element_by_name("email").send_keys(email)
browser.find_element_by_name("password").send_keys(password)
browser.find_element_by_class_name("Button").click()
# # Get started
if firstLogin == True:
try:
browser.implicitly_wait(10)
browser.find_element_by_xpath("//a[contains(text(), 'Get Started')]").click()
# Continue Sign-up
browser.find_element_by_name("title").send_keys("Nerf Herder")
browser.implicitly_wait(10)
time.sleep(0.5)
browser.find_element_by_xpath("//button[contains(text(), 'Continue')]").click()
except Exception:
print("Not my first login")
pass
# Invite Team
# copy link
browser.implicitly_wait(10)
time.sleep(10)
browser.find_element_by_xpath("//button[contains(text(), 'Copy Link')]").click()
comapnyLink = str(getClipboardData())
# drop un-needed chars on clipboard content
comapnyLink = comapnyLink[2:-1]
randUser = email.split('@')[0]
# Add n number users to the Company WRT
for n in range(0, num_users_to_create):
print("n = %s" % n)
time.sleep(1)
# browser.switch_to.window(browser.window_handles[(n-1)])
browser.execute_script("window.open('%s', 'tab%s')" % (comapnyLink, (n)))
browser.switch_to.window('tab%s' % (n))
time.sleep(2)
try:
browser.find_element_by_xpath("//input[@class='SelectBox PersistEmail-field']").send_keys("%s-%s@%s" % (randUser, n, mailservice_domain))
browser.find_element_by_xpath("//button[contains(text(), 'Sign Up')]").click()
browser.find_element_by_xpath("//input[@name='firstName']").send_keys("fName")
browser.find_element_by_xpath("//input[@name='lastName']").send_keys("%s-%s" % (randUser, n))
browser.find_element_by_xpath("//input[@name='title']").send_keys("Nerf Herder")
browser.find_element_by_xpath("//input[@name='password']").send_keys("password")
browser.find_element_by_xpath("//button[contains(text(), 'Continue')]").click()
browser.implicitly_wait(20)
time.sleep(10)
browser.find_element_by_xpath("//div[@id='addeventatc1']").click()
browser.find_element_by_xpath("//div[@id='addeventatc1']").click()
except:
pass
browser.switch_to.window(browser.window_handles[0])
# Go to "Edit Team" page
print("before sleep before clicking weekly report tool")
time.sleep(1)
print("before clicking weekly report tool")
print("about to exit program")
sys.exit()
sys.exit(0)
print("exited")
# os.system("pause")
# time.sleep(45)
browser.find_element_by_xpath("//a[contains(text(), 'Weekly Report Tool')]").click()
time.sleep(2)
browser.refresh()
browser.refresh()
time.sleep(2)
browser.refresh()
print("after clicking weekly report tool")
time.sleep(1)
print("after sleep after clicking weekly report tool")
browser.find_element_by_xpath("//a[contains(text(), 'Edit Team')]").click()
print("after clicking Edit Team")
time.sleep(3)
print("after waiting 3 sec after clicking Edit Team")
# CRUD members of team
browser.find_element_by_xpath("//div[@class='TeamMemberPanel TeamMemberPanel--admin']").click()
print("after clicking team meber")
browser.find_element_by_xpath("//button[contains(text(), 'Edit')]").click()
# edit team member first name
browser.find_element_by_name("firstName").click()
time.sleep(1)
browser.find_element_by_name("firstName").clear()
time.sleep(1)
timestamp = datetime.datetime.now().strftime('%b-%d_%I:%M:%S')
time.sleep(1)
browser.find_element_by_name("firstName").send_keys("botWroteThis-%s" % timestamp)
time.sleep(2)
# same user new name
browser.find_element_by_class_name("Button").click()
# delete team member from team
findTeamMemeberToBeDeleted = browser.find_elements_by_xpath("//div[@class='TeamMemberPanel TeamMemberPanel--admin']") #clickable by selenium
findTeamMemeberNameToBeDeleted = browser.find_elements_by_xpath("//div[@class='TeamMemberPanel-label']") # list of names of users on the page
print("findTeamMemeberToBeDeleted = %s" % len(findTeamMemeberToBeDeleted))
time.sleep(2)
teamMemberThreeToBeDeleted = findTeamMemeberToBeDeleted[2]
teamMemberThreeToBeDeleted.click()
print("after clicking 3rd member")
browser.find_element_by_xpath("//button[contains(text(), 'Delete')]").click()
browser.find_element_by_xpath("//button[contains(text(), 'Delete')]").click()
print("User %s has been deleted, attempted" % findTeamMemeberNameToBeDeleted[2].text)
findTeamMemeberNameAfterDeletion = browser.find_elements_by_xpath("//div[@class='TeamMemberPanel-label']") # list of names of users on the page
# time.sleep(2)
# print("teamMemberThreeToBeDeleted = %s" % findTeamMemeberNameToBeDeleted[2].text)
# print("findTeamMemeberNameAfterDeletion[2].text = %s" % findTeamMemeberNameAfterDeletion[2].text)
if findTeamMemeberNameToBeDeleted[2].text == findTeamMemeberNameAfterDeletion[2].text:
print("User %s has been deleted" % findTeamMemeberNameToBeDeleted[2].text)
else:
print("problem occured user: %s was not deleted" % findTeamMemeberNameToBeDeleted[2].text)
def loginWRT(email, password, env):
browser.get ("https://weeklyreport.%s.entreleadership.com/get-started" % (env))
browser.implicitly_wait(30)
time.sleep(3)
browser.find_element_by_name("email").send_keys(email)
browser.find_element_by_name("password").send_keys(password)
browser.find_element_by_class_name("Button").click()
time.sleep(2)
# workflows
# =====================================================================
# ===================================================
# Create many team members using company sign up link
# ===================================================
def createUserByCompanyLink(companyURL, num_users_to_create):
comapnyLink = companyURL
# email = "makeManyUsers9998@mailinator.com"
randUser = email.split('@')[0]
# Add n number users to the Company WRT
for n in range(0, num_users_to_create):
print("n = %s" % n)
time.sleep(1)
# browser.switch_to.window(browser.window_handles[(n-1)])
browser.execute_script("window.open('%s', 'tab%s')" % (comapnyLink, (n)))
browser.switch_to.window('tab%s' % (n))
time.sleep(2)
try:
browser.find_element_by_xpath("//input[@class='SelectBox PersistEmail-field']").send_keys("%s-%s@%s" % (randUser, n, mailservice_domain))
browser.find_element_by_xpath("//button[contains(text(), 'Sign Up')]").click()
browser.find_element_by_xpath("//input[@name='firstName']").send_keys("fName")
browser.find_element_by_xpath("//input[@name='lastName']").send_keys(randUser)
browser.find_element_by_xpath("//input[@name='title']").send_keys("Nerf Herder")
browser.find_element_by_xpath("//input[@name='password']").send_keys("password")
browser.find_element_by_xpath("//button[contains(text(), 'Continue')]").click()
browser.implicitly_wait(20)
time.sleep(10)
browser.find_element_by_xpath("//div[@id='addeventatc1']").click()
browser.find_element_by_xpath("//div[@id='addeventatc1']").click()
except:
pass
browser.switch_to.window(browser.window_handles[0])
# ===================================================
def newDashBoardOnboardingSteps(whichEnv):
browser.find_element_by_xpath("//span[@class='GetStarted-title']").click()
browser.find_element_by_link_text("Set Up Weekly Report Tool").click()
time.sleep(0.2)
browser.find_element_by_link_text("Set up the Weekly Report Tool").click()
time.sleep(1)
browser.switch_to.window(browser.window_handles[3])
# Sign Up for WRT
print("before clicking sign up")
browser.find_element_by_xpath("//a[contains(text(), 'Sign Up')]").click()
print("after clicking sign up")
# Fill out /company/sign-ups page
browser.find_element_by_xpath("//input[@id='company_sign_up_company_name']").send_keys("Dance Gavin Dance")
browser.find_element_by_xpath("//input[@id='company_sign_up_first_name']").send_keys("myNameIs")
browser.find_element_by_xpath("//input[@id='company_sign_up_last_name']").send_keys("NF")
browser.find_element_by_xpath("//input[@id='company_sign_up_title']").send_keys("Nerf Herder")
browser.find_element_by_xpath("//input[@id='SignUp-submitBtn']").click()
def onboardFB():
# # signup for Facebook
browser.switch_to.window(browser.window_handles[0])
browser.find_element_by_xpath("//a[contains(text(), 'Dashboard')]").click()
browser.find_element_by_link_text("Join our Facebook Commmunity").click()
browser.find_element_by_xpath("//a[contains(text(), 'Join our Facebook Community')]").click()
def inviteWrtTeamMembers(email, start_num, end_num):
# Invite Team by copying link
time.sleep(3)
browser.find_element_by_xpath("//button[contains(text(), 'Copy Link')]").click()
comapnyLink = str(getClipboardData())
# drop un-needed chars on clipboard content
comapnyLink = comapnyLink[2:-1]
randUser = email.split('@')[0]
# Add n number users to the Company WRT
for n in range(start_num, end_num):
print("n = %s" % n)
time.sleep(1)
# browser.switch_to.window(browser.window_handles[(n-1)])
browser.execute_script("window.open('%s', 'tab%s')" % (comapnyLink, (n)))
browser.switch_to.window('tab%s' % (n))
time.sleep(2)
try:
browser.find_element_by_xpath("//input[@class='SelectBox PersistEmail-field']").send_keys("%s-%s@%s" % (randUser, n, mailservice_domain))
browser.find_element_by_xpath("//button[contains(text(), 'Sign Up')]").click()
browser.find_element_by_xpath("//input[@name='firstName']").send_keys("fName")
browser.find_element_by_xpath("//input[@name='lastName']").send_keys("%s-%s" % (randUser, n))
browser.find_element_by_xpath("//input[@name='title']").send_keys("Nerf Herder")
browser.find_element_by_xpath("//input[@name='password']").send_keys("password")
browser.find_element_by_xpath("//button[contains(text(), 'Continue')]").click()
browser.implicitly_wait(20)
time.sleep(2)
browser.find_element_by_xpath("//div[@id='addeventatc1']").click()
browser.find_element_by_xpath("//div[@id='addeventatc1']").click()
except:
pass
browser.switch_to.window(browser.window_handles[0])
def teamMemberLoginSelectLeader(email, num_start, num_end, password, env):
# open new browser and login via
# browser = webdriver.Chrome()
for memberNum in range(num_start, num_end):
# browser = webdriver.Chrome()
print(email,", team number = %s" % memberNum)
randUser = email.split('@')[0]
teamMemberEmail = "%s-%s@%s" % (randUser, memberNum, mailservice_domain)
# browser.get("https://weeklyreport.qa.entreleadership.com/sign-in")
time.sleep(1)
# Sign out of Owner Account
browser.get("https://weeklyreport.%s.entreleadership.com/sign-out" % env)
# sign in to WRT
loginWeeklyReport('https://weeklyreport.%s.entreleadership.com' % env, teamMemberEmail, 'password')
browser.implicitly_wait(20)
time.sleep(2)
# Select a leader
browser.find_element_by_xpath("//div[@style='cursor: pointer; height: 100%; position: relative; width: 100%;']").click()
time.sleep(2)
cleanRandUser = randUser.replace('-', '')
cleanRandUser = cleanRandUser.replace('+', '')
if memberNum == 0:
browser.find_element_by_xpath("//span[@name='testFirstName %s']" % cleanRandUser).click()
elif memberNum == 1:
browser.find_element_by_xpath("//span[@name='fName %s-0']" % randUser).click()
elif memberNum == 2:
browser.find_element_by_xpath("//span[@name='fName %s-1']" % randUser).click()
time.sleep(0.5 )
browser.find_element_by_xpath("//span[@name='fName %s-0']" % randUser).click()
elif memberNum == 3:
browser.find_element_by_xpath("//span[@name='fName %s-1']" % randUser).click()
time.sleep(0.5 )
browser.find_element_by_xpath("//span[@name='fName %s-0']" % randUser).click()
elif memberNum > 3:
browser.find_element_by_xpath("//span[@name='fName %s-1']" % randUser).click()
time.sleep(2)
webdriver.ActionChains(browser).send_keys(Keys.ESCAPE).perform()
print("after pressing the escape key")
time.sleep(2)
browser.find_element_by_xpath("//button[contains(text(), 'Continue')]").click()
browser.implicitly_wait(15)
time.sleep(6)
# fill out WRT weekly form Previous Week
print("signing out")
browser.find_element_by_xpath("//a[contains(text(), 'sign out')]").click()
browser.implicitly_wait(15)
time.sleep(5)
print("logging back in")
loginWeeklyReport('https://weeklyreport.%s.entreleadership.com' % env, teamMemberEmail, 'password')
time.sleep(4)
completeWRTform('previousWeek', memberNum, teamMemberEmail, env)
# browser.get("https://weeklyreport.qa.entreleadership.com")
completeWRTform('currentWeek', memberNum, teamMemberEmail, env)
def completeWRTform(whichWeek, memberNum, teamMemberEmail, env):
browser.implicitly_wait(3)
time.sleep(1)
print("browser refresh y'all")
score = [20,40,60,80,100]
randStressScore = random.choice(score)
randMoraleScore = random.choice(score)
randWorkloadScore = random.choice(score)
if whichWeek == 'previousWeek':
print("inside previousWeek")
print("whichWeek = %s" % whichWeek)
week = 'previous -%s \n\n memberName = %s' % (memberNum, teamMemberEmail)
additional = 'nope'
dateRange = 'previous'
elif whichWeek == "currentWeek":
print("inside currentWeek")
print("whichWeek = %s" % whichWeek)
week = 'current -%s \n\n memberName = %s' % (memberNum, teamMemberEmail)
additional = 'nada'
dateRange = 'current'
# print variabels
print("randStressScore = %s" % randStressScore)
print("randMoraleScore = %s" % randMoraleScore)
print("randWorkloadScore = %s" % randWorkloadScore)
print("whichWeek = %s" % whichWeek)
# fill out your high for the week
time.sleep(3)
print("go to wrt form page")
browser.get ("https://weeklyreport.%s.entreleadership.com" % env)
# browser.find_element_by_name("high").click()
# browser.find_element_by_name("high").send_keys(week)
browser.find_element_by_xpath("//textarea[@name='high']").send_keys(week)
# low for the week
browser.find_element_by_xpath("//textarea[@name='low']").send_keys(week)
# stress level
stressElement = browser.find_element_by_xpath("//input[@name='stress'][@value='%s']" % randStressScore)
browser.execute_script("arguments[0].click();",stressElement)
# morale level
moraleElement = browser.find_element_by_xpath("//input[@name='morale'][@value='%s']" % randMoraleScore)
browser.execute_script("arguments[0].click();",moraleElement)
# workload level
workloadElement = browser.find_element_by_xpath("//input[@name='workload'][@value='%s']" % randWorkloadScore)
browser.execute_script("arguments[0].click();",workloadElement)
# Anything Else
browser.find_element_by_xpath("//textarea[@name='extra']").send_keys(additional)
# select which date range to submit
browser.find_element_by_xpath("//div[@value='%s']" % dateRange).click()
# Submit Report
time.sleep(0.25)
# pause()
browser.find_element_by_xpath("//button[contains(text(), 'Submit Report')]").click()
time.sleep(1)
def createNewUserAA_NewOnboarding(url, randUser, randEmail, pwd, term):
whichEnv = url.split('.')[1]
# root of domain
browser.get (url)
# pause()
# go to allaccess
browser.find_element_by_tag_name('body').send_keys(Keys.ESCAPE)
browser.implicitly_wait(3)
browser.find_element_by_xpath("//a[@class='HeroButter-cta btn-primary btn-yellow']").click()
# go to sign up page for allaccess
browser.find_element_by_xpath("//a[contains(text(), 'Become a Member')]").click()
# fill out new All Access member form
browser.find_element_by_id("user_first_name").send_keys("testFirstName")
randUser = randUser.replace('-', '')
randUser = randUser.replace('+', '')
browser.find_element_by_id("user_last_name").send_keys("%s" % randUser)
browser.find_element_by_id("user_email").send_keys(randEmail)
browser.find_element_by_id("user_phone_number").send_keys("6155551234")
browser.find_element_by_id("user_company_name").send_keys("King of The Nerds")
# browser.find_element_by_name("password").send_keys(pwd)
browser.find_element_by_id("user_agreed_to_tos").click() # agree to terms checkbox
# screenShot()
browser.find_element_by_name("commit").click() # submit create new user
# pause()
if whichEnv == 'qa':
print("TestEnv = %s" % whichEnv)
if term == "annual":
browser.find_element_by_xpath("//input[@id='term_yearly']").click()
browser.find_element_by_id("coupon_code").send_keys("YRSAVE") # enter discount code value
elif term == "monthly":
browser.find_element_by_xpath("//input[@id='term_monthly']").click()
browser.find_element_by_id("coupon_code").send_keys("321") # enter discount code value
# browser.find_element_by_id("coupon_code").send_keys("REACTIVATE") # enter discount code value
# browser.find_element_by_id("coupon_code").send_keys("321") # enter discount code value
# pause()
browser.find_element_by_id("coupon_submit").click() # apply discount code
time.sleep(1)
else:
print("TestEnv = %s \nUsing discount code test" % whichEnv)
browser.find_element_by_id("coupon_code").send_keys("lapin") # enter discount code value
browser.find_element_by_id("coupon_submit").click() # apply discount code
browser.find_element_by_xpath("//input[@value='Next Page']").click() # submit
# pause()
# payment page
iframe = browser.find_element_by_id("z_hppm_iframe")
browser.switch_to.frame(iframe)
browser.find_element_by_id("input-creditCardNumber").send_keys("5454545454545454")
# browser.find_element_by_id("input-creditCardNumber").send_keys("4470330769941000")
select_dropdown_value('input-creditCardExpirationMonth', '03')
select_dropdown_value('input-creditCardExpirationYear', '2037')
browser.find_element_by_id("input-cardSecurityCode").send_keys("989")
select_dropdown_value('input-creditCardState', 'Tennessee')
browser.find_element_by_id("input-creditCardAddress1").send_keys("123 Test Dr")
browser.find_element_by_id("input-creditCardCity").send_keys("Nashville")
browser.find_element_by_id("input-creditCardPostalCode").send_keys("37214")
browser.find_element_by_id("submitButton").click()
# initial user login
browser.implicitly_wait(35)
# browser.find_element_by_name("email").send_keys(randEmail)
browser.find_element_by_xpath("//input[@type='password']").send_keys(pwd)
browser.find_element_by_xpath("//button[@type='submit']").click()
def verifyEmailAddress(email):
# Login to mailtrap.io
browser.execute_script("window.open('%s', 'tab%s')" % ("https://mailtrap.io/inboxes/397636/messages", (0)))
browser.switch_to.window('tab%s' % (0))
browser.find_element_by_id("user_email").send_keys("paul.campbell@daveramsey.com")
browser.find_element_by_id("user_password").send_keys("Tommyboy2!")
browser.find_element_by_xpath("//input[@type='submit']").click()
# Open up email
time.sleep(4)
browser.refresh()
time.sleep(2)
browser.find_element_by_xpath("//span[contains(text(), '%s')]" % (email)).click()
# Move inside iframe
browser.switch_to.frame(browser.find_element_by_tag_name("iframe"))
time.sleep(2)
browser.find_element_by_xpath("//a[contains(text(), 'Verify Email')]").click()
# Click Continue
# browser.switch_to_window(main_window)
# browser.find_element_by_tag_name('body').send_keys(Keys.CONTROL + Keys.TAB)
browser.switch_to.window(browser.window_handles[2])
browser.find_element_by_xpath("//a[contains(text(), 'Continue')]").click()
def getstartedURL():
browser.find_element_by_link_text("Get Started Now").click()
# pause()
def newUserTestNewOnboarding(env, email, pwd, start_num, end_num, term):
randUser = email.split('@')[0]
try:
createNewUserAA_NewOnboarding("https://www.%s.entreleadership.com" % (env), randUser, email, pwd, term)
except:
print("creating new AA user failed")
pause()
browser.quit()
try:
verifyEmailAddress(email)
except:
print("verifying email failed")
pause()
browser.quit()
try:
cookieChecker()
except:
print("cookieCheckerFailed")
pause()
browser.quit()
try:
getstartedURL()
except:
print("failed to click the get started URL")
pause()
browser.quit()
try:
cookieChecker()
except:
print("cookieCheckerFailed")
pause()
browser.quit()
try:
newDashBoardOnboardingSteps(env)
except:
print("new onbaording failed")
pause()
browser.quit()
try:
cookieChecker()
except:
print("cookieCheckerFailed")
pause()
browser.quit()
try:
browser.implicitly_wait(35)
inviteWrtTeamMembers(email, start_num, end_num)
except:
print("failed to invite team mebers to join the company")
pause()
browser.quit()
try:
cookieChecker()
except:
print("cookieCheckerFailed")
pause()
browser.quit()
try:
onboardFB()
except:
print("FB onboard failed")
pause()
browser.quit()
try:
cookieChecker()
except:
print("cookieCheckerFailed")
pause()
browser.quit()
try:
teamMemberLoginSelectLeader(email, start_num, end_num, "password", env)
except:
print("self selecting leader or submiting weekly report errored")
pause()
browser.quit()
try:
cookieChecker()
# print("killing browser")
except:
print("cookieCheckerFailed")
pause()
browser.quit()
def loginAndTestNewOnboarding(env, email, pwd, num_users_to_create):
loginEntre("https://www.%s.entreleadership.com" % (env), email, pwd)
time.sleep(2)
newDashBoardOnboardingSteps(env)
inviteWrtTeamMembers(email, num_users_to_create)
onboardFB()
def cookieChecker():
cookieSizeShowThreshold = 700
cookieSizeWarningThreshold = 1000
cookieSizeTopThreshold = 1550
cookies_list = browser.get_cookies()
cookies_dict = {}
for cookie in cookies_list:
cookies_dict[cookie['name']] = cookie['value']
for each in cookies_dict:
cookieSize = sys.getsizeof(cookies_dict[each])
if cookieSize > cookieSizeShowThreshold and cookieSize < cookieSizeWarningThreshold:
print(" cookieSize: %s CookieName: %s" % ( cookieSize, each))
elif cookieSize >= cookieSizeWarningThreshold and cookieSize < cookieSizeTopThreshold:
print("\nWARNING exceeding cookieSizeWarningThreshold of %s: \n ==========================================\n cookieSize: %s CookieName: %s\n ==========================================\n" % (cookieSizeWarningThreshold, cookieSize, each))
elif cookieSize >= cookieSizeTopThreshold:
print("\nHOUSTON WE HAVE A PROBLEM we are exceeding cookieSizeTopThreshold of %s: \n ==========================================\n cookieSize: %s CookieName: %s\n ==========================================\n" % (cookieSizeTopThreshold, cookieSize, each))
def randEmailUser():
randUser = "%s+%s" % (mailservice_email_user, base_repr(int(time.time()), 36).lower())
# randUser = "%s+%s" % (mailservice_email_user, "AA-user-w-payment")
print(randUser)
randEmail = "%s@%s" % (randUser, mailservice_domain)
print(randEmail)
return randEmail
def testForgotPasswordAA(env, email):
browser.implicitly_wait(5)
pwdURL = "https://www.%s.entreleadership.com/users/password/reset" % (env)
browser.get (pwdURL)
browser.find_element_by_xpath("//input[@id='email']").send_keys(email)
browser.find_element_by_xpath("//input[@name='commit']").click()
def pause():
programPause = input("Press the <ENTER> key to continue...")
def screenShot():
whatTimeIsIt = datetime.datetime.now().strftime('%b-%d_%I-%M-%S')
print("time = %s" % whatTimeIsIt)
browser.get_screenshot_as_file("/shots/screenShots.%s.png" % whatTimeIsIt)
print("after screenShot")
# =====================================================================
#========================================== main =================================
# AA forgot password
# testForgotPasswordAA ("qa", "entre-fd6968@inbox.mailtrap.io")
# ***********************************
newUserTestNewOnboarding("qa", randEmailUser(), "Password1!", 0,3, "annual")
|
# Copyright 2022 Quantapix Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import numpy as np
import re
import tensorflow as tf
import torch
from argparse import ArgumentParser
from os.path import abspath
from transformers.utils import logging
from ..config.megatron import PreTrained
from ...models.megatron import ForPreTraining
import os
import re
import zipfile
logging.set_verbosity_info()
log = logging.get_logger(__name__)
def load_src_weights(model, config, tf_checkpoint_path):
tf_path = abspath(tf_checkpoint_path)
log.info("Converting TensorFlow checkpoint from {}".format(tf_path))
init_vars = tf.train.list_variables(tf_path)
names = []
arrays = []
for name, shape in init_vars:
log.info(f"Loading TF weight {name} with shape {shape}")
array = tf.train.load_variable(tf_path, name)
names.append(name)
arrays.append(array)
for name, array in zip(names, arrays):
name = name.split("/")
if any(
n
in [
"adam_v",
"adam_m",
"AdamWeightDecayOptimizer",
"AdamWeightDecayOptimizer_1",
"global_step",
]
for n in name
):
log.info(f"Skipping {'/'.join(name)}")
continue
pointer = model
for m_name in name:
if re.fullmatch(r"[A-Za-z]+_\d+", m_name):
scope_names = re.split(r"_(\d+)", m_name)
else:
scope_names = [m_name]
if scope_names[0] == "kernel" or scope_names[0] == "gamma":
pointer = getattr(pointer, "weight")
elif scope_names[0] == "output_bias" or scope_names[0] == "beta":
pointer = getattr(pointer, "bias")
elif scope_names[0] == "output_weights":
pointer = getattr(pointer, "weight")
elif scope_names[0] == "squad":
pointer = getattr(pointer, "classifier")
else:
try:
pointer = getattr(pointer, scope_names[0])
except AttributeError:
log.info(f"Skipping {'/'.join(name)}")
continue
if len(scope_names) >= 2:
num = int(scope_names[1])
pointer = pointer[num]
if m_name[-11:] == "_embeddings":
pointer = getattr(pointer, "weight")
elif m_name == "kernel":
array = np.transpose(array)
if pointer.shape != array.shape:
raise ValueError(
f"Pointer shape {pointer.shape} and array shape {array.shape} mismatched"
)
log.info("Initialize PyTorch weight {}".format(name))
pointer.data = torch.from_numpy(array)
return model
def recursive_print(name, val, spaces=0):
if name is None:
msg = None
else:
fmt = "." * max(0, spaces - 2) + "# {:" + str(50 - spaces) + "s}"
msg = fmt.format(name)
if isinstance(val, dict):
if msg is not None:
print(msg)
for k in val.keys():
recursive_print(k, val[k], spaces + 2)
elif isinstance(val, torch.Tensor):
print(msg, ":", val.size())
else:
print(msg, ":", val)
def fix_query_key_value_ordering(param, checkpoint_version, num_splits, n_heads, d_hidden):
input_shape = param.size()
if checkpoint_version == 1.0:
# version 1.0 stores [n_heads * d_hidden * num_splits, :]
saved_shape = (n_heads, d_hidden, num_splits) + input_shape[1:]
param = param.view(*saved_shape)
param = param.transpose(0, 2)
param = param.transpose(1, 2).contiguous()
elif checkpoint_version >= 2.0:
# other versions store [n_heads * num_splits * d_hidden, :]
saved_shape = (n_heads, num_splits, d_hidden) + input_shape[1:]
param = param.view(*saved_shape)
param = param.transpose(0, 1).contiguous()
param = param.view(*input_shape)
return param
def convert_megatron_checkpoint(args, input_state_dict, config):
output_state_dict = {}
ds_args = input_state_dict.get("args", None)
if ds_args is not None:
config.tokenizer_type = ds_args.tokenizer_type
config.s_vocab = ds_args.padded_vocab_size
config.n_pos = ds_args.n_pos
config.d_hidden = ds_args.d_hidden
config.n_lays = ds_args.n_lays
config.n_heads = ds_args.n_heads
config.d_ff = (
ds_args.ffn_hidden_size if "ffn_hidden_size" in ds_args else 4 * ds_args.d_hidden
)
heads = config.n_heads
hidden_size_per_head = config.d_hidden // heads
if "checkpoint_version" in input_state_dict.keys():
checkpoint_version = input_state_dict["checkpoint_version"]
else:
checkpoint_version = 0.0
# The model.
model = input_state_dict["model"]
# The language model.
lm = model["language_model"]
# The embeddings.
embeddings = lm["embedding"]
# The word embeddings.
word_embeddings = embeddings["word_embeddings"]["weight"]
# Truncate the embedding table to s_vocab rows.
word_embeddings = word_embeddings[: config.s_vocab, :]
# Store the word embeddings.
output_state_dict["bert.embeddings.word_embeddings.weight"] = word_embeddings
# The position embeddings.
pos_embeddings = embeddings["position_embeddings"]["weight"]
assert pos_embeddings.size(0) == config.n_pos and pos_embeddings.size(1) == config.d_hidden
# Store the position embeddings.
output_state_dict["bert.embeddings.position_embeddings.weight"] = pos_embeddings
# The token-type embeddings.
tokentype_embeddings = embeddings["tokentype_embeddings"]["weight"]
# Store the position embeddings.
output_state_dict["bert.embeddings.token_type_embeddings.weight"] = tokentype_embeddings
# The transformer.
transformer = lm["transformer"] if "transformer" in lm.keys() else lm["encoder"]
# The regex to extract layer names.
layer_re = re.compile("layers\.(\d+)\.([a-z0-9_.]+)\.([a-z]+)")
# The simple map of names for "automated" rules.
megatron_to_transformers = {
"attention.dense": ".attention.output.dense.",
"mlp.dense_h_to_4h": ".intermediate.dense.",
"mlp.dense_4h_to_h": ".output.dense.",
}
# Keep track of the attention/query/value tensor.
attention_qkv_weight = None
# Extract the layers.
for key, val in transformer.items():
# Match the name.
m = layer_re.match(key)
# Stop if that's not a layer
if m is None:
break
# The index of the layer.
layer_idx = int(m.group(1))
# The name of the operation.
op_name = m.group(2)
# Is it a weight or a bias?
weight_or_bias = m.group(3)
# The name of the layer.
layer_name = f"bert.encoder.layer.{layer_idx}"
# For layernorm(s), simply store the layer norm.
if op_name.endswith("layernorm"):
ln_name = "attention.ln" if op_name.startswith("input") else "ln"
output_state_dict[layer_name + "." + ln_name + "." + weight_or_bias] = val
# Transpose the QKV matrix.
elif op_name == "attention.query_key_value" and weight_or_bias == "weight":
# Make sure the QKV pointer is nil.
assert attention_qkv_weight is None, ""
out_val = fix_query_key_value_ordering(
val, checkpoint_version, 3, heads, hidden_size_per_head
)
# Store the tensor as we need the bias as well to interleave QKV and biases.
attention_qkv_weight = out_val
# Transpose the bias.
elif op_name == "attention.query_key_value" and weight_or_bias == "bias":
# Make sure we read the weight tensor.
assert attention_qkv_weight is not None, ""
# Split the QKV matrix into Q, K and V. Megatron stores Q,K,V interleaved.
q = attention_qkv_weight[0 * config.d_hidden : 1 * config.d_hidden, :]
k = attention_qkv_weight[1 * config.d_hidden : 2 * config.d_hidden, :]
v = attention_qkv_weight[2 * config.d_hidden : 3 * config.d_hidden, :]
out_val = fix_query_key_value_ordering(
val, checkpoint_version, 3, heads, hidden_size_per_head
)
# Split the bias.
q_bias = out_val[0 * config.d_hidden : 1 * config.d_hidden]
k_bias = out_val[1 * config.d_hidden : 2 * config.d_hidden]
v_bias = out_val[2 * config.d_hidden : 3 * config.d_hidden]
# Store.
output_state_dict[f"{layer_name}.attention.self.query.weight"] = q
output_state_dict[f"{layer_name}.attention.self.query.bias"] = q_bias
output_state_dict[f"{layer_name}.attention.self.key.weight"] = k
output_state_dict[f"{layer_name}.attention.self.key.bias"] = k_bias
output_state_dict[f"{layer_name}.attention.self.value.weight"] = v
output_state_dict[f"{layer_name}.attention.self.value.bias"] = v_bias
# Clear the stored tensor.
attention_qkv_weight = None
# Copy weights and biases as is.
elif weight_or_bias in ["weight", "bias"]:
out_name = megatron_to_transformers[op_name]
output_state_dict[layer_name + out_name + weight_or_bias] = val
# The final layernorm.
output_state_dict["bert.encoder.ln.weight"] = transformer["final_layernorm.weight"]
output_state_dict["bert.encoder.ln.bias"] = transformer["final_layernorm.bias"]
# The pooler.
pooler = lm["pooler"]
# Store the matrix and the bias.
output_state_dict["bert.pooler.dense.weight"] = pooler["dense.weight"]
output_state_dict["bert.pooler.dense.bias"] = pooler["dense.bias"]
# The LM head from Megatron (for RACE).
lm_head = model["lm_head"]
# The transform matrix.
output_state_dict["cls.predictions.transform.dense.weight"] = lm_head["dense.weight"]
output_state_dict["cls.predictions.transform.dense.bias"] = lm_head["dense.bias"]
# The transform LN.
output_state_dict["cls.predictions.transform.LayerNorm.weight"] = lm_head["layernorm.weight"]
output_state_dict["cls.predictions.transform.LayerNorm.bias"] = lm_head["layernorm.bias"]
# For the decoder, we replicate the weights.
output_state_dict["cls.predictions.decoder.weight"] = word_embeddings
output_state_dict["cls.predictions.bias"] = lm_head["bias"]
# The classifier from Megatron (for MLNI).
binary_head = model["binary_head"]
# Store the classifier.
output_state_dict["cls.seq_relationship.weight"] = binary_head["weight"]
output_state_dict["cls.seq_relationship.bias"] = binary_head["bias"]
# It should be done!
return output_state_dict
def main():
parser = ArgumentParser()
parser.add_argument("--print-checkpoint-structure", action="store_true")
parser.add_argument(
"path_to_checkpoint", type=str, help="Path to the ZIP file containing the checkpoint"
)
parser.add_argument(
"--config_file",
default="",
type=str,
help="An optional config json file describing the pre-trained model.",
)
args = parser.parse_args()
basename = os.path.dirname(args.path_to_checkpoint)
print(f'Extracting PyTorch state dictionary from "{args.path_to_checkpoint}"')
if args.path_to_checkpoint.endswith(".zip"):
with zipfile.ZipFile(args.path_to_checkpoint, "r") as checkpoint:
with checkpoint.open("release/mp_rank_00/model_optim_rng.pt") as pytorch_dict:
input_state_dict = torch.load(pytorch_dict, map_location="cpu")
else:
input_state_dict = torch.load(args.path_to_checkpoint, map_location="cpu")
if args.config_file == "":
config = PreTrained()
config.s_vocab = input_state_dict["model"]["lm_head"]["bias"].numel()
else:
config = PreTrained.from_json_file(args.config_file)
print("Converting")
output_state_dict = convert_megatron_checkpoint(args, input_state_dict, config)
if args.print_checkpoint_structure:
recursive_print(None, output_state_dict)
print("Saving config")
config.save_pretrained(basename)
output_checkpoint_file = os.path.join(basename, "pytorch_model.bin")
print(f'Saving checkpoint to "{output_checkpoint_file}"')
torch.save(output_state_dict, output_checkpoint_file)
if __name__ == "__main__":
main()
|
# manos.py
"""
td - todos diferentes
1p - 1 par
2p - 2 pares
tercia - 3 iguales
full - 1 trio y 1 par
poker - 4 iguales
quintilla - todos iguales
"""
probabilidad = {'td':0.30240, '1p':0.50400, '2p':0.10800, 'tercia':0.07200, 'full':0.00900, 'poker':0.00450, 'quintilla':0.00010}
def quintilla(numero):
digito1 = numero[0]
for digito in numero:
if digito != digito1:
return False
return True
def full(numero):
# Conteo
guia = dict.fromkeys(numero, 0)
for digito in numero:
guia[digito]+=1
if(2 in guia.values() and 3 in guia.values()):
return True
return False
def poker(numero):
if(tercia(numero)):
# Conteo
guia = dict.fromkeys(numero, 0)
for digito in numero:
guia[digito]+=1
for conteo in guia.values():
if conteo >= 4:
return True
return False
else:
return False
def tercia(numero):
# Conteo
guia = dict.fromkeys(numero, 0)
for digito in numero:
guia[digito]+=1
# Impar
for conteo in guia.values():
if conteo >= 3:
return True
return False
def onep(numero):
# Conteo
guia = dict.fromkeys(numero, 0)
for digito in numero:
guia[digito]+=1
# Par
for conteo in guia.values():
if conteo >= 2:
return True
return False
def twop(numero):
# Conteo
guia = dict.fromkeys(numero, 0)
for digito in numero:
guia[digito]+=1
# Primer par
# Solo si sabemos que había uno
if onep(numero):
par = None
for conteo in guia.items():
if conteo[1] >= 2:
par = conteo[0]
break
# Quitamos el que había
del guia[par]
# Segundo par
for conteo in guia.values():
if conteo >= 2:
return True
return False
else:
return False
def td(numero):
return not (len(numero) != len(set(numero)))
def tipo(numero):
if quintilla(numero):
return 'quintilla'
elif poker(numero):
return 'poker'
elif full(numero):
return 'full'
elif tercia(numero):
return 'tercia'
elif twop(numero):
return '2p'
elif onep(numero):
return '1p'
else:
return 'td'
|
"""
# This files originate from the "PyPipes" project:
# https://github.com/ZENULI/PyPipes
# Created by ZENULI at University Paul Sabatier III :
# https://github.com/BastienKovac
# https://github.com/Ulynor
# https://github.com/seb2s
# License:
# MIT License Copyright (c) 2022 ZENULI
"""
import sys
sys.path.append('../PyPipes')
import argparse
from src.graph_creator.GraphCreator import GraphCreator
from src.pipeline_constructor.PipelineConstructor import PipelineConstructor
def main(args):
print("building model...")
creator = GraphCreator()
test_graph = creator.create_random_graph(10)
constructor = PipelineConstructor()
model = constructor.construct_pipeline(test_graph)
model.visualize()
if __name__ == '__main__':
#input arguments
parser = argparse.ArgumentParser()
#parser.add_argument('--nb_models', type=str, required=True, help='input image for generating caption')
#parser.add_argument('--encoder_path', type=str, default='models/encoder-5-3000.pkl', help='path for trained encoder')
args = parser.parse_args()
main(args)
|
minNum = int(input('What\'s your minimum number? '))
maxNum = int(input('What\'s your maximum number? '))
def myNumbers(minNum, maxNum):
myList = []
for num in range(minNum, maxNum + 1):
myList.append(num)
print(myList)
myNumbers(minNum, maxNum)
|
# -*- coding: utf-8 -*-
"""
Created on Tue May 16 18:25:31 2017
@author: nchitta
"""
import visa, matplotlib.pyplot as plt,time, csv
#Lets Initialize our resource manager to speak with the devices
rm=visa.ResourceManager()
#We want to do this just to check out spool results for reference later on.
rm.list_resources()
#Just checking if we are able to access both the instruments correctly.
fluke6060b=rm.open_resource('GPIB0::2::INSTR')
sr844=rm.open_resource('GPIB0::8::INSTR')
agi8648a=rm.open_resource('GPIB0::19::INSTR')
output_dir = "Data/SR844/"+time.strftime('%Y%m%d-%H%M%S')
def mkdir_p(mypath):
'''Creates a directory. equivalent to using mkdir -p on the command line'''
from errno import EEXIST
from os import makedirs,path
try:
makedirs(mypath)
except OSError as exc: # Python >2.5
if exc.errno == EEXIST and path.isdir(mypath):
pass
else: raise
#
mkdir_p(output_dir)
sr844.timeout = 50000
reading=[]
raw_input("Please make sure Fluke 6060B, Agilent 8648A are in Sync and ensure Fluke RF Out is connected to Ref In and Agilent RF Out at Signal Input - Press Enter to Continue")
time.sleep(5)
freq1=int(raw_input('Please Input Frequency in MHZ..\n'))
fluke6060b.write("FR %s MZ" % freq1)
sr844.write('FMOD 0')# Setting the SR844 to run on reference input frequency
time.sleep(10)
pow1=-100
pin=[]
pout=[]
refphase=0
rdbmphase=0
fig=plt.figure(figsize=(12,8))
while pow1<=0:
freq2=float(sr844.query("FRAQ?"))#Query the external frequency setting on the frequency tab in SR844
print('sr844 is running on external frequency %s' %freq2)
#agi8648a.write("OUTPUT 719;FREQ:CW %s HZ" % freq2) # Commenting until we figure out the FRAQ problem
agi8648a.write("OUTPUT 719;FREQ:CW %s MHZ" %freq1)
agi8648a.write("POW:AMPL %s DBM" % pow1)
#fluke6060b.write("AP %s DBM" %pow1)
agi8648a.write(":OUTP:STAT ON")
#fluke6060b.write("RO 1")
#agi4396b.write("INIT:IMM;*WAI")
# for 4396B we have a little difference we gotta do the below to get the above:- :
# :INITiate:CONTinuoust{OFF|0}
# :ABORt
# :SENSe:SWEep:COUNtt1
# :INITiate
time.sleep(5)
sr844.write('AWRS')#Run auto select widereserve mode.
sr844.write('AGAN')#Run auto sensitivity mode.
time.sleep(5)
sr844.write('APHS')#Run Autophase function
time.sleep(5)
refphase=sr844.query('PHAS?')#Adjusted Phase Value.
time.sleep(1)
rdbmphase=sr844.query("SNAP?4,5")
data=[float(i) for i in rdbmphase.strip('{}').split(',')]
reading.append([refphase,data[0],data[1]])
pin.append(pow1)
pout.append(data[0])
pow1=pow1+1
agi8648a.write(":OUTP:STAT OFF")
#fluke6060b.write("RO 0")
else:
#if pow1>=10:
if pow1>=0:
with open(output_dir+'/SR844_3DeviceTest_Freq'+str(freq2)+time.strftime('%Y%m%d-%H%M%S')+'.csv',"wb") as f:
writer = csv.writer(f)
writer.writerows(reading)
ax = fig.add_subplot(1,1,1)
ax.plot(pin, pin, 'g')
ax.plot(pin, pout, 'r')
ax.set_xlabel('Input Power')
ax.set_ylabel('Output Power')
else:
print('There is some trouble plz recheck results')
plt.savefig(output_dir+'/SR844_3DeviceTest_Freq_'+str(freq2)+time.strftime('%Y%m%d-%H%M%S')+'.png')
|
"""This module provides test for the pca task."""
import pandas as pd
import numpy as np
from fractalis.analytics.tasks.pca.main import PCATask
# noinspection PyMissingTypeHints
class TestPCATask:
task = PCATask()
def test_correct_output(self):
features = [
pd.DataFrame([[101, 'foo', 5], [101, 'bar', 6],
[102, 'foo', 10], [102, 'bar', 11],
[103, 'foo', 15], [103, 'bar', 16],
[104, 'foo', 20], [104, 'bar', 21]],
columns=['id', 'feature', 'value']),
pd.DataFrame([[101, 'baz', 5],
[102, 'baz', 10],
[104, 'baz', 20],
[105, 'baz', 100]],
columns=['id', 'feature', 'value'])
]
categories = [
pd.DataFrame([[101, '_', 'a'],
[102, '_', 'a'],
[104, '_', 'a']],
columns=['id', 'feature', 'value'])
]
result = self.task.main(features=features,
categories=categories,
whiten=False,
id_filter=[],
subsets=[],
subset_labels=[])
data = result['data']
assert 0 in data
assert 1 in data
assert 'category' in data
assert 'subset' in data
assert 'id' in data
assert data['id'] == [101, 102, 103, 104, 105]
assert data['subset'] == [0, 0, 0, 0, 0]
assert data['subset_label'] == ['s1', 's1', 's1', 's1', 's1']
np.testing.assert_equal(data['category'],
['a', 'a', float('nan'), 'a', float('nan')])
def test_id_filter_works(self):
features = [
pd.DataFrame([[101, 'foo', 5], [101, 'bar', 6],
[102, 'foo', 10], [102, 'bar', 11],
[103, 'foo', 15], [103, 'bar', 16],
[104, 'foo', 20], [104, 'bar', 21]],
columns=['id', 'feature', 'value'])
]
result = self.task.main(features=features,
categories=[],
whiten=False,
id_filter=[101, 104],
subsets=[],
subset_labels=[])
data = result['data']
assert all(np.unique(data['id']) == [101, 104])
def test_correct_loadings(self):
features = [
pd.DataFrame([[101, 'foo', 5], [101, 'bar', 20],
[102, 'foo', 10], [102, 'bar', 15],
[103, 'foo', 15], [103, 'bar', 10],
[104, 'foo', 20], [104, 'bar', 5]],
columns=['id', 'feature', 'value'])
]
result = self.task.main(features=features,
categories=[],
whiten=False,
id_filter=[],
subsets=[],
subset_labels=[])
loadings = result['loadings']
assert loadings[0][0] == -loadings[0][1]
assert loadings[1][0] == loadings[1][1]
def test_correct_variance_ratios(self):
features = [
pd.DataFrame([[101, 'foo', 5], [101, 'bar', 5],
[102, 'foo', 10], [102, 'bar', 5],
[103, 'foo', 15], [103, 'bar', 5],
[104, 'foo', 20], [104, 'bar', 5]],
columns=['id', 'feature', 'value'])
]
result = self.task.main(features=features,
categories=[],
whiten=False,
id_filter=[],
subsets=[],
subset_labels=[])
variance_ratios = result['variance_ratios']
assert variance_ratios == [1, 0]
|
# -*- coding: utf-8 -*-
import torch
import numpy as np
from linearlayers import cmultiply
import sys
import os
def random_point_on_disk(N):
r = torch.rand(N)
t = torch.rand(N)*2*np.pi
output=torch.empty(N+tuple([2]))
output[...,0] = torch.sqrt(r)*torch.cos(t)
output[...,1] = torch.sqrt(r)*torch.sin(t)
return output
def generate_noisy_triangles(outlier_ratio, nbr_points, nbr_clouds,inlier_noise=.01):
with torch.no_grad():
# generate the three corners defining the triangle
# Note: currently in format (clouds,2,nbr_points)!
corners = random_point_on_disk((nbr_clouds,3)).transpose(1,2)
# embed them in R^3, z=1
corners = torch.cat((corners, torch.ones(nbr_clouds,1,3)),1)
# for picking out pairs of points
choice = torch.zeros(3,1,1,3)
choice[0,:,:,0]=1
choice[0,:,:,1]=1
choice[1,:,:,0]=1
choice[1,:,:,2]=1
choice[2,:,:,1]=1
choice[2,:,:,2]=1
# for rotating
# generate rotation
theta = torch.randn(nbr_clouds,1,2)
theta = theta/torch.sqrt((theta**2).sum(2)).unsqueeze(2)
# create_inliers
inliers = random_point_on_disk((3,nbr_clouds,nbr_points)).transpose(2,3)
inliers = torch.cat((inliers,torch.ones(3,nbr_clouds,1,nbr_points)),2)
for k in range(3):
definers = choice[k,...]*corners
U , _, _ = torch.svd(definers@definers.transpose(1,2))
# orthogonal line
y = U[:,:,-1].unsqueeze(2)
# project inliers
cof = (inliers[k,...]*y).sum(1)/(y[:,:2,:]**2).sum(1)
inliers[k,...] = inliers[k,...] - cof.unsqueeze(1)*y
# choose lines for inliers
crit = torch.rand(nbr_clouds,nbr_points).unsqueeze(1)
inliers = inliers[0,...]*(crit<1/3) + inliers[1,...]*(crit>1/3)*(crit<2/3) +\
inliers[2,...]*(crit>2/3)
# project back to R^2 and add noise
inliersA = inliers[:,:-1,:] + inlier_noise*torch.randn(nbr_clouds,2,nbr_points)
inliersB = cmultiply(theta,inliersA.transpose(1,2)).transpose(1,2) + inlier_noise*torch.randn(nbr_clouds,2,nbr_points)
# create outliers
outliersA = random_point_on_disk((nbr_clouds,nbr_points)).transpose(1,2)
outliersB = random_point_on_disk((nbr_clouds,nbr_points)).transpose(1,2)
# create mask
mask = (torch.rand(nbr_clouds,nbr_points)>outlier_ratio).float().unsqueeze(1)
# choose points and reshape
pointsA = (1.0 - mask)*outliersA + mask*inliersA
pointsB = (1.0 - mask)*outliersB + mask*inliersB
cloud1 = pointsA.transpose(1,2)
cloud2 = pointsB.transpose(1,2)
return (cloud1,cloud2),(theta,mask.transpose(1,2))
def save_data(train_data,
train_labels,
test_data,
test_labels,
filename):
torch.save((train_data, train_labels, test_data, test_labels), filename)
def generate_and_save_pair_data(user_input):
name, outlier_ratios, nbr_points, nbr_train, nbr_val, nbr_test,inlier_noise = user_input
for ratio in outlier_ratios:
#train and validation data
train_data, train_labels = generate_noisy_triangles(outlier_ratio = ratio,
nbr_points = nbr_points,
nbr_clouds = nbr_train,
inlier_noise = inlier_noise)
val_data, val_labels = generate_noisy_triangles(outlier_ratio = ratio,
nbr_points = nbr_points,
nbr_clouds = nbr_val,
inlier_noise = inlier_noise)
# test_data
test_data, test_labels = generate_noisy_triangles(outlier_ratio = ratio,
nbr_points = nbr_points,
nbr_clouds = nbr_test,
inlier_noise = inlier_noise)
# save test_and_val data
torch.save((train_data, train_labels, test_data, test_labels), name+'_'+str(ratio) +'.pt')
# save testdata
torch.save(( test_data, test_labels), name+'_'+str(ratio)+ '_test.pt')
if __name__ == '__main__':
#python generate_point_cloud_data.py name ratio nbr_points nbr_train nbr_val n_test inlier noise
# take in user input.
# if 'paper' -use same parameters as in paper
if sys.argv[1]=='paper':
name = 'control_rotated_pairs_'
ratios = [0.4, 0.6, 0.8, 0.85]
nbr_points = 100
nbr_train = 2000
nbr_val = 500
nbr_test =300
inlier_noise = .03
else:
if len(sys.argv)<2:
name = 'rotated_pairs_'
else:
name = sys.argv[1]
if len(sys.argv)<3:
ratios = [0.4,0.6,0.8,0.85]
else:
ratios = [ float(sys.argv[2])]
if len(sys.argv)<4:
nbr_points = 100
else:
nbr_points = int(sys.argv[3])
if len(sys.argv)<5:
nbr_train = 2000
else:
nbr_train = int(sys.argv[4])
if len(sys.argv)<6:
nbr_val =500
else:
nbr_val = int(sys.argv[5])
if len(sys.argv)<7:
nbr_test =300
else:
nbr_test = int(sys.argv[6])
if len(sys.argv)<8:
inlier_noise = 0.03
else:
inlier_noise = float(sys.argv[7])
user_input = [None]*7
user_input[0] = os.path.join('data',name)
user_input[1] = ratios
user_input[2] = nbr_points
user_input[3] = nbr_train
user_input[4] = nbr_val
user_input[5] = nbr_test
user_input[6] = inlier_noise
generate_and_save_pair_data(user_input)
|
# Generated by Django 3.1.1 on 2021-04-03 18:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("charity", "0010_auto_20210403_1729"),
]
operations = [
migrations.CreateModel(
name="CCEWCharity",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("date_of_extract", models.DateField(blank=True, null=True)),
("organisation_number", models.IntegerField()),
("registered_charity_number", models.IntegerField(db_index=True)),
("linked_charity_number", models.IntegerField(db_index=True)),
(
"charity_name",
models.CharField(blank=True, max_length=255, null=True),
),
(
"charity_type",
models.CharField(blank=True, max_length=255, null=True),
),
(
"charity_registration_status",
models.CharField(blank=True, max_length=255, null=True),
),
("date_of_registration", models.DateField(blank=True, null=True)),
("date_of_removal", models.DateField(blank=True, null=True)),
(
"charity_reporting_status",
models.CharField(blank=True, max_length=255, null=True),
),
(
"latest_acc_fin_period_start_date",
models.DateField(blank=True, null=True),
),
(
"latest_acc_fin_period_end_date",
models.DateField(blank=True, null=True),
),
("latest_income", models.FloatField(blank=True, null=True)),
("latest_expenditure", models.FloatField(blank=True, null=True)),
(
"charity_contact_address1",
models.CharField(blank=True, max_length=255, null=True),
),
(
"charity_contact_address2",
models.CharField(blank=True, max_length=255, null=True),
),
(
"charity_contact_address3",
models.CharField(blank=True, max_length=255, null=True),
),
(
"charity_contact_address4",
models.CharField(blank=True, max_length=255, null=True),
),
(
"charity_contact_address5",
models.CharField(blank=True, max_length=255, null=True),
),
(
"charity_contact_postcode",
models.CharField(blank=True, max_length=255, null=True),
),
(
"charity_contact_phone",
models.CharField(blank=True, max_length=255, null=True),
),
(
"charity_contact_email",
models.CharField(blank=True, max_length=255, null=True),
),
(
"charity_contact_web",
models.CharField(blank=True, max_length=255, null=True),
),
(
"charity_company_registration_number",
models.CharField(blank=True, max_length=255, null=True),
),
("charity_insolvent", models.BooleanField(blank=True, null=True)),
(
"charity_in_administration",
models.BooleanField(blank=True, null=True),
),
(
"charity_previously_excepted",
models.BooleanField(blank=True, null=True),
),
(
"charity_is_cdf_or_cif",
models.CharField(blank=True, max_length=255, null=True),
),
("charity_is_cio", models.BooleanField(blank=True, null=True)),
("cio_is_dissolved", models.BooleanField(blank=True, null=True)),
(
"date_cio_dissolution_notice",
models.DateField(blank=True, null=True),
),
("charity_activities", models.TextField(blank=True, null=True)),
("charity_gift_aid", models.BooleanField(blank=True, null=True)),
("charity_has_land", models.BooleanField(blank=True, null=True)),
],
),
migrations.CreateModel(
name="CCEWCharityAnnualReturnHistory",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("date_of_extract", models.DateField(blank=True, null=True)),
("organisation_number", models.IntegerField()),
("registered_charity_number", models.IntegerField(db_index=True)),
("fin_period_start_date", models.DateField(blank=True, null=True)),
("fin_period_end_date", models.DateField(blank=True, null=True)),
(
"ar_cycle_reference",
models.CharField(blank=True, max_length=255, null=True),
),
("reporting_due_date", models.DateField(blank=True, null=True)),
(
"date_annual_return_received",
models.DateField(blank=True, null=True),
),
("date_accounts_received", models.DateField(blank=True, null=True)),
("total_gross_income", models.BigIntegerField(blank=True, null=True)),
(
"total_gross_expenditure",
models.BigIntegerField(blank=True, null=True),
),
("accounts_qualified", models.BooleanField(blank=True, null=True)),
("suppression_ind", models.BooleanField(blank=True, null=True)),
(
"suppression_type",
models.CharField(blank=True, max_length=255, null=True),
),
],
),
migrations.CreateModel(
name="CCEWCharityAreaOfOperation",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("date_of_extract", models.DateField(blank=True, null=True)),
("organisation_number", models.IntegerField()),
("registered_charity_number", models.IntegerField(db_index=True)),
("linked_charity_number", models.IntegerField(blank=True, null=True)),
(
"geographic_area_type",
models.CharField(blank=True, max_length=255, null=True),
),
(
"geographic_area_description",
models.CharField(blank=True, max_length=255, null=True),
),
(
"parent_geographic_area_type",
models.CharField(blank=True, max_length=255, null=True),
),
(
"parent_geographic_area_description",
models.CharField(blank=True, max_length=255, null=True),
),
("welsh_ind", models.BooleanField(blank=True, null=True)),
],
),
migrations.CreateModel(
name="CCEWCharityARPartA",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("date_of_extract", models.DateField(blank=True, null=True)),
("organisation_number", models.IntegerField()),
("registered_charity_number", models.IntegerField(db_index=True)),
(
"latest_fin_period_submitted_ind",
models.BooleanField(blank=True, null=True),
),
("fin_period_order_number", models.IntegerField(blank=True, null=True)),
(
"ar_cycle_reference",
models.CharField(blank=True, max_length=255, null=True),
),
("fin_period_start_date", models.DateField(blank=True, null=True)),
("fin_period_end_date", models.DateField(blank=True, null=True)),
("ar_due_date", models.DateField(blank=True, null=True)),
("ar_received_date", models.DateField(blank=True, null=True)),
("total_gross_income", models.BigIntegerField(blank=True, null=True)),
(
"total_gross_expenditure",
models.BigIntegerField(blank=True, null=True),
),
(
"charity_raises_funds_from_public",
models.BooleanField(blank=True, null=True),
),
(
"charity_professional_fundraiser",
models.BooleanField(blank=True, null=True),
),
(
"charity_agreement_professional_fundraiser",
models.BooleanField(blank=True, null=True),
),
(
"charity_commercial_participator",
models.BooleanField(blank=True, null=True),
),
(
"charity_agreement_commerical_participator",
models.BooleanField(blank=True, null=True),
),
(
"grant_making_is_main_activity",
models.BooleanField(blank=True, null=True),
),
(
"charity_receives_govt_funding_contracts",
models.BooleanField(blank=True, null=True),
),
("count_govt_contracts", models.IntegerField(blank=True, null=True)),
(
"charity_receives_govt_funding_grants",
models.BooleanField(blank=True, null=True),
),
("count_govt_grants", models.IntegerField(blank=True, null=True)),
(
"income_from_government_contracts",
models.BigIntegerField(blank=True, null=True),
),
(
"income_from_government_grants",
models.BigIntegerField(blank=True, null=True),
),
(
"charity_has_trading_subsidiary",
models.BooleanField(blank=True, null=True),
),
(
"trustee_also_director_of_subsidiary",
models.BooleanField(blank=True, null=True),
),
(
"does_trustee_receive_any_benefit",
models.BooleanField(blank=True, null=True),
),
(
"trustee_payments_acting_as_trustee",
models.BooleanField(blank=True, null=True),
),
(
"trustee_receives_payments_services",
models.BooleanField(blank=True, null=True),
),
(
"trustee_receives_other_benefit",
models.BooleanField(blank=True, null=True),
),
(
"trustee_resigned_employment",
models.BooleanField(blank=True, null=True),
),
(
"employees_salary_over_60k",
models.BooleanField(blank=True, null=True),
),
(
"count_salary_band_60001_70000",
models.IntegerField(blank=True, null=True),
),
(
"count_salary_band_70001_80000",
models.IntegerField(blank=True, null=True),
),
(
"count_salary_band_80001_90000",
models.IntegerField(blank=True, null=True),
),
(
"count_salary_band_90001_100000",
models.IntegerField(blank=True, null=True),
),
(
"count_salary_band_100001_110000",
models.IntegerField(blank=True, null=True),
),
(
"count_salary_band_110001_120000",
models.IntegerField(blank=True, null=True),
),
(
"count_salary_band_120001_130000",
models.IntegerField(blank=True, null=True),
),
(
"count_salary_band_130001_140000",
models.IntegerField(blank=True, null=True),
),
(
"count_salary_band_140001_150000",
models.IntegerField(blank=True, null=True),
),
(
"count_salary_band_150001_200000",
models.IntegerField(blank=True, null=True),
),
(
"count_salary_band_200001_250000",
models.IntegerField(blank=True, null=True),
),
(
"count_salary_band_250001_300000",
models.IntegerField(blank=True, null=True),
),
(
"count_salary_band_300001_350000",
models.IntegerField(blank=True, null=True),
),
(
"count_salary_band_350001_400000",
models.IntegerField(blank=True, null=True),
),
(
"count_salary_band_400001_450000",
models.IntegerField(blank=True, null=True),
),
(
"count_salary_band_450001_500000",
models.IntegerField(blank=True, null=True),
),
(
"count_salary_band_over_500000",
models.IntegerField(blank=True, null=True),
),
("count_volunteers", models.IntegerField(blank=True, null=True)),
],
),
migrations.CreateModel(
name="CCEWCharityARPartB",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("date_of_extract", models.DateField(blank=True, null=True)),
("organisation_number", models.IntegerField()),
("registered_charity_number", models.IntegerField(db_index=True)),
(
"latest_fin_period_submitted_ind",
models.BooleanField(blank=True, null=True),
),
("fin_period_order_number", models.IntegerField(blank=True, null=True)),
("ar_cycle_reference", models.CharField(max_length=255)),
("fin_period_start_date", models.DateField(blank=True, null=True)),
("fin_period_end_date", models.DateField(blank=True, null=True)),
("ar_due_date", models.DateField(blank=True, null=True)),
("ar_received_date", models.DateField(blank=True, null=True)),
(
"income_donations_and_legacies",
models.BigIntegerField(blank=True, null=True),
),
(
"income_charitable_activities",
models.BigIntegerField(blank=True, null=True),
),
(
"income_other_trading_activities",
models.BigIntegerField(blank=True, null=True),
),
("income_investments", models.BigIntegerField(blank=True, null=True)),
("income_other", models.BigIntegerField(blank=True, null=True)),
(
"income_total_income_and_endowments",
models.BigIntegerField(blank=True, null=True),
),
("income_legacies", models.BigIntegerField(blank=True, null=True)),
("income_endowments", models.BigIntegerField(blank=True, null=True)),
(
"expenditure_raising_funds",
models.BigIntegerField(blank=True, null=True),
),
(
"expenditure_charitable_expenditure",
models.BigIntegerField(blank=True, null=True),
),
("expenditure_other", models.BigIntegerField(blank=True, null=True)),
("expenditure_total", models.BigIntegerField(blank=True, null=True)),
(
"expenditure_investment_management",
models.BigIntegerField(blank=True, null=True),
),
(
"expenditure_grants_institution",
models.BigIntegerField(blank=True, null=True),
),
(
"expenditure_governance",
models.BigIntegerField(blank=True, null=True),
),
(
"expenditure_support_costs",
models.BigIntegerField(blank=True, null=True),
),
(
"expenditure_depreciation",
models.BigIntegerField(blank=True, null=True),
),
("gain_loss_investment", models.BigIntegerField(blank=True, null=True)),
(
"gain_loss_pension_fund",
models.BigIntegerField(blank=True, null=True),
),
(
"gain_loss_revaluation_fixed_investment",
models.BigIntegerField(blank=True, null=True),
),
("gain_loss_other", models.BigIntegerField(blank=True, null=True)),
("reserves", models.BigIntegerField(blank=True, null=True)),
("assets_total_fixed", models.BigIntegerField(blank=True, null=True)),
("assets_own_use", models.BigIntegerField(blank=True, null=True)),
(
"assets_long_term_investment",
models.BigIntegerField(blank=True, null=True),
),
(
"defined_benefit_pension_scheme",
models.BigIntegerField(blank=True, null=True),
),
("assets_other_assets", models.BigIntegerField(blank=True, null=True)),
(
"assets_total_liabilities",
models.BigIntegerField(blank=True, null=True),
),
(
"assets_current_investment",
models.BigIntegerField(blank=True, null=True),
),
(
"assets_total_assets_and_liabilities",
models.BigIntegerField(blank=True, null=True),
),
(
"creditors_one_year_total_current",
models.BigIntegerField(blank=True, null=True),
),
(
"creditors_falling_due_after_one_year",
models.BigIntegerField(blank=True, null=True),
),
("assets_cash", models.BigIntegerField(blank=True, null=True)),
("funds_endowment", models.BigIntegerField(blank=True, null=True)),
("funds_unrestricted", models.BigIntegerField(blank=True, null=True)),
("funds_restricted", models.BigIntegerField(blank=True, null=True)),
("funds_total", models.BigIntegerField(blank=True, null=True)),
("count_employees", models.IntegerField(blank=True, null=True)),
("charity_only_accounts", models.BooleanField(blank=True, null=True)),
("consolidated_accounts", models.BooleanField(blank=True, null=True)),
],
),
migrations.CreateModel(
name="CCEWCharityClassification",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("date_of_extract", models.DateField(blank=True, null=True)),
("organisation_number", models.IntegerField()),
("registered_charity_number", models.IntegerField(db_index=True)),
("linked_charity_number", models.IntegerField(blank=True, null=True)),
("classification_code", models.IntegerField(blank=True, null=True)),
(
"classification_type",
models.CharField(blank=True, max_length=255, null=True),
),
(
"classification_description",
models.CharField(blank=True, max_length=255, null=True),
),
],
),
migrations.CreateModel(
name="CCEWCharityEventHistory",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("date_of_extract", models.DateField(blank=True, null=True)),
("organisation_number", models.IntegerField()),
("registered_charity_number", models.IntegerField(db_index=True)),
("linked_charity_number", models.IntegerField(blank=True, null=True)),
("charity_name", models.CharField(max_length=255)),
("charity_event_order", models.IntegerField(blank=True, null=True)),
("event_type", models.CharField(blank=True, max_length=255, null=True)),
("date_of_event", models.DateField(blank=True, null=True)),
("reason", models.CharField(blank=True, max_length=255, null=True)),
(
"assoc_organisation_number",
models.IntegerField(blank=True, null=True),
),
(
"assoc_registered_charity_number",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"assoc_charity_name",
models.CharField(blank=True, max_length=255, null=True),
),
],
),
migrations.CreateModel(
name="CCEWCharityGoverningDocument",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("date_of_extract", models.DateField(blank=True, null=True)),
("organisation_number", models.IntegerField()),
("registered_charity_number", models.IntegerField(db_index=True)),
("linked_charity_number", models.IntegerField(blank=True, null=True)),
(
"governing_document_description",
models.TextField(blank=True, null=True),
),
("charitable_objects", models.TextField(blank=True, null=True)),
("area_of_benefit", models.TextField(blank=True, null=True)),
],
),
migrations.CreateModel(
name="CCEWCharityOtherNames",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("date_of_extract", models.DateField(blank=True, null=True)),
("organisation_number", models.IntegerField()),
("registered_charity_number", models.IntegerField(db_index=True)),
("linked_charity_number", models.IntegerField(blank=True, null=True)),
("charity_name_id", models.IntegerField(blank=True, null=True)),
(
"charity_name_type",
models.CharField(blank=True, max_length=255, null=True),
),
(
"charity_name",
models.CharField(blank=True, max_length=255, null=True),
),
],
),
migrations.CreateModel(
name="CCEWCharityOtherRegulators",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("date_of_extract", models.DateField(blank=True, null=True)),
("organisation_number", models.IntegerField()),
("registered_charity_number", models.IntegerField(db_index=True)),
("regulator_order", models.IntegerField(blank=True, null=True)),
(
"regulator_name",
models.CharField(blank=True, max_length=255, null=True),
),
(
"regulator_web_url",
models.CharField(blank=True, max_length=255, null=True),
),
],
),
migrations.CreateModel(
name="CCEWCharityPolicy",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("date_of_extract", models.DateField(blank=True, null=True)),
("organisation_number", models.IntegerField()),
("registered_charity_number", models.IntegerField(db_index=True)),
("linked_charity_number", models.IntegerField()),
(
"policy_name",
models.CharField(blank=True, max_length=255, null=True),
),
],
),
migrations.CreateModel(
name="CCEWCharityPublishedReport",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("date_of_extract", models.DateField(blank=True, null=True)),
("charity_id", models.IntegerField()),
("registered_charity_number", models.IntegerField(db_index=True)),
("linked_charity_number", models.IntegerField(blank=True, null=True)),
(
"report_name",
models.CharField(blank=True, max_length=255, null=True),
),
(
"report_location",
models.CharField(blank=True, max_length=255, null=True),
),
("date_published", models.DateField(blank=True, null=True)),
],
),
migrations.CreateModel(
name="CCEWCharityTrustee",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("date_of_extract", models.DateField(blank=True, null=True)),
("organisation_number", models.IntegerField()),
("registered_charity_number", models.IntegerField(db_index=True)),
("linked_charity_number", models.IntegerField()),
("trustee_id", models.IntegerField(blank=True, null=True)),
(
"trustee_name",
models.CharField(blank=True, max_length=255, null=True),
),
("trustee_is_chair", models.BooleanField(blank=True, null=True)),
(
"individual_or_organisation",
models.CharField(
blank=True,
choices=[("P", "Individual"), ("O", "Organisation")],
max_length=1,
null=True,
),
),
(
"trustee_date_of_appointment",
models.DateField(blank=True, null=True),
),
],
),
]
|
from unittest.mock import MagicMock
from mpf.platforms.interfaces.driver_platform_interface import PulseSettings
from mpf.core.platform import SwitchSettings, DriverSettings
from mpf.tests.MpfTestCase import MpfTestCase
class TestKickback(MpfTestCase):
def get_config_file(self):
return 'config.yaml'
def get_machine_path(self):
return 'tests/machine_files/kickback/'
def test_kickback_with_ball_save(self):
self.machine.default_platform.set_pulse_on_hit_rule = MagicMock()
self.mock_event("kickback_kickback_test_fired")
self.assertFalse(self.machine.ball_saves["kickback_save"].enabled)
# kickback is not enabled. nothing should happen
self.hit_and_release_switch("s_kickback")
self.advance_time_and_run(.01)
self.assertEventNotCalled("kickback_kickback_test_fired")
# enable kickback
self.post_event("kickback_enable")
self.advance_time_and_run(.01)
# should write a hw rule
self.machine.default_platform.set_pulse_on_hit_rule.assert_called_once_with(
SwitchSettings(hw_switch=self.machine.switches["s_kickback"].hw_switch, invert=False, debounce=False),
DriverSettings(hw_driver=self.machine.coils["kickback_coil"].hw_driver,
pulse_settings=PulseSettings(power=1.0, duration=100), hold_settings=None, recycle=True)
)
# a hit should fire it
self.hit_and_release_switch("s_kickback")
self.advance_time_and_run(.01)
self.assertEventCalled("kickback_kickback_test_fired")
# ball save should be enabled just in case
self.assertTrue(self.machine.ball_saves["kickback_save"].enabled)
# but disable after 6s
self.advance_time_and_run(6.1)
self.assertFalse(self.machine.ball_saves["kickback_save"].enabled)
# it only works once though
self.mock_event("kickback_kickback_test_fired")
self.hit_and_release_switch("s_kickback")
self.advance_time_and_run(.01)
self.assertEventNotCalled("kickback_kickback_test_fired")
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- coding: utf-8 -*-
"""This module executes post model stopped acitvities.
It is used by stop_model cloud function to execute any necessary
post model stopped activity: house keeping, write data into another location...
"""
from typing import Any, Dict, Optional
from google.cloud.functions_v1.context import Context
# Add your imports here i.e
#
# import os
# from google.cloud import bigquery
# from datetime import datetime
# Declare the module global variables here i.e
#
# PREDICTIONS_TABLE_GCP_PROJECT = str(
# os.getenv('PREDICTIONS_TABLE_GCP_PROJECT',
# '')
# )
def hook_post_stop_action(event: Dict[str, Any],
context=Optional[Context]):
"""Executes after the model is stopped.
The only information available at this execution moment is the pub/sub event
from the scheduler which triggered the stop_model cloud function.
Args:
event (dict): The dictionary with data specific to this type of event. The
`data` field contains the PubsubMessage message. The `attributes` field
will contain custom attributes if there are any.
context (google.cloud.functions.Context): The Cloud Functions event
metadata. The `event_id` field contains the Pub/Sub message ID. The
`timestamp` field contains the publish time.
Example of use:
Writing data into another project: publish_date = get_date(context)
print('Received: {}'.format(publish_date)) predictions_table =
'{}_{}'.format( BQ_LTV_PREDICTIONS_TABLE, publish_date )
target_table_full_path = '{}.{}.{}'.format( PREDICTIONS_TABLE_GCP_PROJECT,
PREDICTIONS_TABLE_DATASET, predictions_table )
write_on_external_project(BQ_LTV_GCP_PROJECT, BQ_LTV_DATASET,
predictions_table, target_table_full_path)
"""
del event, context # Unused by default
|
import multiprocessing
import sys
def worker_with(lock, f):
with lock:
fs = open(f, 'a+')
n = 10
while n > 1:
fs.write("Locked acquired via with\n")
n -= 1
fs.close()
def worker_no_with(lock, f):
lock.acquire()
try:
fs = open(f, 'a+')
n = 10
while n > 1:
fs.write("Locked acquired via directly\n")
n -= 1
fs.close()
finally:
lock.release()
if __name__ == "__main__":
lock = multiprocessing.Lock()
f = 'file.txt'
w = multiprocessing.Process(target=worker_with, args=(lock, f))
nw = multiprocessing.Process(target=worker_no_with, args=(lock, f))
w.start()
nw.start()
print('end')
|
import pickle
import json
import spacy
import random
from spacy.util import minibatch, compounding
from pathlib import Path
import os
import training.config as config
def custom_nlp_train(filename, output_dir):
with open(filename, 'rb') as fp:
doc = pickle.load(fp)
## Creating a blank spacy model
nlp = spacy.blank('en')
# nlp = spacy.load(config.base_model_dir)
# print("Created a blank en model")
if 'ner' not in nlp.pipe_names:
ner = nlp.create_pipe('ner')
nlp.add_pipe(ner)
print("Added ner pipe")
else:
ner = nlp.get_pipe('ner')
# for i in LABEL:
# ner.add_label(i)
# add labels
for _, annotations in doc:
for ent in annotations.get('entities'):
ner.add_label(ent[2])
optimizer = nlp.begin_training()
for itn in range(1):
random.shuffle(doc)
losses = {}
# batch up the examples using spaCy's minibatch
batches = minibatch(doc, size=compounding(4.0, 32.0, 1.001))
for batch in batches:
texts, annotations = zip(*batch)
nlp.update(
texts, # batch of texts
annotations, # batch of annotations
drop=0.3, # dropout - make it harder to memorise data
losses=losses,
)
print("Losses", losses)
# output_dir = os.path.dirname(os.path.realpath(__file__)) + "/training_data"
new_model_name="customspacy"
output_dir = Path(output_dir)
if not output_dir.exists():
output_dir.mkdir()
nlp.meta["name"] = new_model_name # rename model
nlp.to_disk(output_dir)
print("Saved model to", output_dir)
if __name__ == "__main__":
custom_nlp_train('/home/accubits/Documents/Projects/AI/resume/training/Models/EducationForTraining', \
'/home/accubits/Documents/Projects/AI/resume/training/Models/Education')
|
from arekit.common.data import const
from arekit.common.data.input.providers.columns.base import BaseColumnsProvider
class SampleColumnsProvider(BaseColumnsProvider):
"""
[id, label, text_a] -- for train
[id, text_a] -- for test
"""
def __init__(self, store_labels):
super(SampleColumnsProvider, self).__init__()
self.__store_labels = store_labels
self.__text_column_names = None
# region properties
@property
def StoreLabels(self):
return self.__store_labels
@property
def TextColumnNames(self):
return self.__text_column_names
# endregion
def get_columns_list_with_types(self):
"""
Composing df with the following columns:
[id, label, type, text_a]
"""
dtypes_list = super(SampleColumnsProvider, self).get_columns_list_with_types()
dtypes_list.append((const.ID, str))
dtypes_list.append((const.DOC_ID, 'int32'))
# insert labels
if self.__store_labels:
dtypes_list.append((const.LABEL, 'int32'))
# insert text columns
for col_name in self.__text_column_names:
dtypes_list.append((col_name, str))
# insert indices
dtypes_list.append((const.S_IND, 'int32'))
dtypes_list.append((const.T_IND, 'int32'))
return dtypes_list
def set_text_column_names(self, text_column_names):
assert(isinstance(text_column_names, list))
self.__text_column_names = text_column_names
|
from rest_framework.exceptions import APIException
from django.utils.encoding import force_text
from rest_framework import status
class CustomValidation(APIException):
status_code = status.HTTP_500_INTERNAL_SERVER_ERROR
default_detail = 'A server error occurred.'
def __init__(self, detail, field, status_code):
if status_code is not None:self.status_code = status_code
if detail is not None:
self.detail = {field: force_text(detail)}
else: self.detail = {'detail': force_text(self.default_detail)}
|
"""Script used to split the UrbanSound8K dataset by class into separate folders"""
import os
import shutil
FOLDS = ['9', '10'] # the folds to be split
csv = open("../data/UrbanSound8K.tar/UrbanSound8K/metadata/UrbanSound8K.csv")
csv.readline()
for line in csv.readlines():
split = line.split(",")
slice_file_name = split[0]
fold = split[5]
if fold in FOLDS:
class_name = split[7].rstrip()
src_path = "../data/UrbanSound8K.tar/UrbanSound8K/audio/fold" + fold + "/" + slice_file_name
dest_path = "../data/ByClass2/" + class_name + "/"
if not os.path.isdir(dest_path):
os.makedirs(dest_path)
shutil.copy2(src_path, dest_path)
print(slice_file_name)
|
from sklearn.linear_model import Perceptron
from import_data import import_raw_data, get_features_and_target
from data_cleaning import complete_cleaning
train_chunks = import_raw_data('train', by_chunk=True)
model = Perceptron(penalty='l2',
alpha=0.0001,
fit_intercept=True,
n_iter=5000,
shuffle=True,
random_state=0,
verbose=1000,
n_jobs=-1,
eta0=0.9,
warm_start=True,)
for (i, train_chunk) in enumerate(train_chunks):
print 'Chunk %s' % i + '-'*25
train_chunk = complete_cleaning(train_chunk)
X, y = get_features_and_target(train_chunk)
model.fit(X, y)
|
import time
class Simulator():
__sample_rate: float
__time_step: float # sample_rate converted for time.
__simulating: bool
def __init__(self, sample_rate: float = 1):
self.__sample_rate = sample_rate
self.__time_step = 1 / sample_rate
def __execute(self):
print("simulate")
def __step(self):
self.__execute()
time.sleep(self.__time_step)
def simulate(self):
self.__simulating = True
while(self.__simulating):
self.__step()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayBusinessOrderCancelResponse(AlipayResponse):
def __init__(self):
super(AlipayBusinessOrderCancelResponse, self).__init__()
self._merchant_order_no = None
self._order_no = None
@property
def merchant_order_no(self):
return self._merchant_order_no
@merchant_order_no.setter
def merchant_order_no(self, value):
self._merchant_order_no = value
@property
def order_no(self):
return self._order_no
@order_no.setter
def order_no(self, value):
self._order_no = value
def parse_response_content(self, response_content):
response = super(AlipayBusinessOrderCancelResponse, self).parse_response_content(response_content)
if 'merchant_order_no' in response:
self.merchant_order_no = response['merchant_order_no']
if 'order_no' in response:
self.order_no = response['order_no']
|
'''
Miscellaneous functions for SpiralDraw.py
Developed by Richie Zhang for the 2016 IB Math SL Internal Assessment.
The class 'SpiralDrawMisc' was mostly used for testing during development
but was also used occasionally when writing the IA.
MIT Licensed, Copyright (c) 2016 Richie Zhang
'''
import turtle
class SpiralDrawMisc:
'''
Miscellaneous functions for SpiralDraw.py
Mostly used for testing during initial development
'''
def circle(radius=1):
'''
Draw a circle without the use of turtle.curcle()
First moves the pen forward "radius" pixels, and left by 1 degree.
Repeat 360 times, causing the curve to loop back around into a circle.
'''
for a in range(360):
# Move forward by "radius".
# "radius" is actually how far to move each time, which *indirectly*
# controls the actual radius of the circle.
turtle.forward(radius)
# Move left by 1 degree
turtle.left(1)
def spiral(radius=1, advance=1, loops=3):
'''
Draw a straight spiral
When the pen begins to draw a circle, also move the
pen left (relative to the canvas) {advance} units during
each step of the circle drawing process. This creates
one loop in of the spiral. Simply repeat this action to
achieve the desired spiral length.
'''
for loop_main in range(loops):
spiral_heading = 0
for a in range(360):
# Begin drawing a circle
turtle.forward(radius)
turtle.left(1)
# Save the heading of the circle drawing process
spiral_heading = turtle.heading()
# Move the pen right by {advance / 10}
# Because advancing by 1 pixel is too large
turtle.setheading(0)
turtle.forward(advance / 10)
# Restore the original heading of the pen
# Allows the circle drawing process to continue
turtle.setheading(spiral_heading)
|
# encoding: utf-8
from http import HTTPStatus
from flask_restx import Resource
from hash_chain.app.modules.auth.decorators import admin_token_required
from hash_chain.app.modules.ledger.ddl.dto import DdlDto
from hash_chain.app.modules.ledger.ddl.services import DdlServices
from hash_chain.app.modules.ledger.parameters import ledger_name_parser_plain, table_name_parser_plain, \
ledger_name_parser_choices_or_plain, table_index_parser_plain
api = DdlDto.api
ledgers = DdlDto.ledgers
ledger = DdlDto.ledger
ledger_name_plain = ledger_name_parser_plain(default=None)
ledger_name_choices = ledger_name_parser_choices_or_plain(location='args')
table_name = table_name_parser_plain()
table_index_create = table_index_parser_plain()
@api.route('/ledgers/all')
class ListLedgers(Resource):
"""Handles HTTP requests to URL: /api/v1/ledger/dml/ledgers."""
@api.doc('List all QLDB ledgers in a given account')
@api.doc(security="jwt_token")
@admin_token_required
@api.marshal_list_with(ledgers, envelope='data')
@api.response(int(HTTPStatus.OK), "Found all ledger.")
@api.response(int(HTTPStatus.UNAUTHORIZED), "Token is invalid or expired.")
@api.response(int(HTTPStatus.BAD_REQUEST), "Validation error.")
@api.response(int(HTTPStatus.UNPROCESSABLE_ENTITY), "Unable to create the ledger.")
@api.response(int(HTTPStatus.INTERNAL_SERVER_ERROR), "Internal server error.")
def get(self):
"""List all QLDB ledgers in a given account"""
return DdlServices.get_ledger_list()
@api.route('/ledgers')
@api.doc(security="jwt_token")
@api.response(int(HTTPStatus.UNAUTHORIZED), "Token is invalid or expired.")
@api.response(int(HTTPStatus.BAD_REQUEST), "Validation error.")
@api.response(int(HTTPStatus.INTERNAL_SERVER_ERROR), "Internal server error.")
class Ledger(Resource):
"""Handles HTTP requests to URL: /api/v1/ledger/ddl/."""
@api.doc('Returns information about a ledger, including its state and when it was created')
@admin_token_required
@api.expect(ledger_name_choices, validate=True)
@api.marshal_with(ledger, envelope='data')
@api.response(int(HTTPStatus.NOT_FOUND), "Ledger Not Found.")
@api.response(int(HTTPStatus.OK), "Retrieving the digest.")
@api.response(int(HTTPStatus.UNPROCESSABLE_ENTITY), "Unable to connect ledgers.")
def get(self):
"""Returns information about a ledger, including its state and when it was created."""
args = ledger_name_choices.parse_args(req=None, strict=False)
return DdlServices.describe_ledger(**args)
@api.doc('Create QLDB Ledger')
@admin_token_required
@api.expect(ledger_name_plain, validate=True)
@api.response(int(HTTPStatus.CREATED), "Ledger is active and ready to use.")
@api.response(int(HTTPStatus.UNPROCESSABLE_ENTITY), "Unable to create the ledger.")
def post(self):
"""Create QLDB Ledger"""
args = ledger_name_plain.parse_args(req=None, strict=False)
return DdlServices.create_ledger(**args)
@api.doc('Delete QLDB Ledger')
@admin_token_required
@api.expect(ledger_name_choices, validate=True)
@api.response(int(HTTPStatus.ACCEPTED), "The ledger is successfully deleted.")
@api.response(int(HTTPStatus.UNPROCESSABLE_ENTITY), "Unable to delete the ledger.")
def delete(self):
"""Delete QLDB Ledger"""
args = ledger_name_choices.parse_args(req=None, strict=False)
return DdlServices.delete_ledger(**args)
@api.route('/tables')
@api.doc(security="jwt_token")
@api.response(int(HTTPStatus.UNAUTHORIZED), "Token is invalid or expired.")
@api.response(int(HTTPStatus.BAD_REQUEST), "Validation error.")
@api.response(int(HTTPStatus.INTERNAL_SERVER_ERROR), "Internal server error.")
class LedgerTable(Resource):
"""Handles HTTP requests to URL: /api/v1/ledger/ddl/tables."""
@api.doc('List all the tables in the configured ledger in QLDB')
@admin_token_required
@api.expect(ledger_name_choices, validate=True)
@api.response(int(HTTPStatus.OK), "List of all tables.")
@api.response(int(HTTPStatus.UNPROCESSABLE_ENTITY), "Unable to connect ledgers.")
def get(self):
"""List all the tables in the configured ledger in QLDB"""
args = ledger_name_choices.parse_args(req=None, strict=False)
return DdlServices.list_tables(**args)
@api.doc('Create QLDB Table')
@api.expect(table_name, validate=True)
@api.response(int(HTTPStatus.CREATED), "Tables created successfully.")
@api.response(int(HTTPStatus.UNPROCESSABLE_ENTITY), "Unable to create the table.")
@admin_token_required
def post(self):
"""Create QLDB Table"""
args = table_name.parse_args(req=None, strict=False)
return DdlServices.create_table(**args)
@api.doc('Drop QLDB Table')
@api.expect(table_name, validate=True)
@api.response(int(HTTPStatus.ACCEPTED), "Tables dropped successfully.")
@api.response(int(HTTPStatus.UNPROCESSABLE_ENTITY), "Unable to drop the table.")
@admin_token_required
def delete(self):
"""Drop QLDB Table"""
args = table_name.parse_args(req=None, strict=False)
return DdlServices.drop_table(**args)
@api.route('/table_index')
@api.doc(security="jwt_token")
@api.response(int(HTTPStatus.UNAUTHORIZED), "Token is invalid or expired.")
@api.response(int(HTTPStatus.BAD_REQUEST), "Validation error.")
@api.response(int(HTTPStatus.INTERNAL_SERVER_ERROR), "Internal server error.")
class LedgerTableIndex(Resource):
"""Handles HTTP requests to URL: /api/v1/ledger/ddl/table_index."""
@api.doc('Create index on table in a particular ledger')
@api.expect(table_index_create, validate=True)
@admin_token_required
@api.response(int(HTTPStatus.CREATED), "Index created successfully.")
@api.response(int(HTTPStatus.UNPROCESSABLE_ENTITY), "Unable to create the index.")
def post(self):
"""Create index on table in a particular ledger"""
args = table_index_create.parse_args(req=None, strict=False)
return DdlServices.create_table_index(**args)
|
##########################################################################################################
#Author: Amit Shinde
#Date: 30 March 2020
#Desc: This paython code is based on AI, code will Accepts a text file as an argument and generates questions from it.
#Type: Supervisied Learning
###########################################################################################################
from textblob import TextBlob
import nltk
from textblob import Word
import sys
def parse(string):
"""
Parse a paragraph. Devide it into sentences and try to generate quesstions from each sentences.
"""
try:
txt = TextBlob(string)
for sentence in txt.sentences:
genQuestion(sentence)
except Exception as e:
raise e
def genQuestion(line):
"""print outputs question from the given text"""
print(line+ "##########")
if type(line) is str:
line = TextBlob(line)
# print(line+ "@@@@@@@@@@@")
bucket = {} # Create an empty dictionary
for i,j in enumerate(line.tags): # line.tags are the parts-of-speach in English
if j[1] not in bucket:
bucket[j[1]] = i
# print(i)
# print(bucket[j[1]])
if verbvar: # In verbvar more print the key,values of dictionary
print('\n','-'*20)
print(line ,'\n')
print("TAGS:",line.tags, '\n')
print(bucket)
question = '' # Create an empty string
######################################### Treaning Dtat ############################################################
# These are the english part-of-speach tags used in this demo program.
#.....................................................................
# NNS Noun, plural
# JJ Adjective
# NNP Proper noun, singular
# VBG Verb, gerund or present participle
# VBN Verb, past participle
# VBZ Verb, 3rd person singular present
# VBD Verb, past tense
# IN Preposition or subordinating conjunction
# PRP Personal pronoun
# NN Noun, singular or mass
#.....................................................................
# Create a list of tag-combination
l1 = ['NNP', 'VBG', 'VBZ', 'IN']
l2 = ['NNP', 'VBG', 'VBZ']
l3 = ['PRP', 'VBG', 'VBZ', 'IN']
l4 = ['PRP', 'VBG', 'VBZ']
l5 = ['PRP', 'VBG', 'VBD']
l6 = ['NNP', 'VBG', 'VBD']
l7 = ['NN', 'VBG', 'VBZ']
l8 = ['NNP', 'VBZ', 'JJ']
l9 = ['NNP', 'VBZ', 'NN']
l10 = ['NNP', 'VBZ']
l11 = ['PRP', 'VBZ']
l12 = ['NNP', 'NN', 'IN']
l13 = ['NN', 'VBZ']
########################################################################################################################S
# Mapping Between Input And Treaning Data
if all(key in bucket for key in l1): #'NNP', 'VBG', 'VBZ', 'IN' in sentence.
question = 'What' + ' ' + line.words[bucket['VBZ']] +' '+ line.words[bucket['NNP']]+ ' '+ line.words[bucket['VBG']] + '?'
elif all(key in bucket for key in l2): #'NNP', 'VBG', 'VBZ' in sentence.
question = 'What' + ' ' + line.words[bucket['VBZ']] +' '+ line.words[bucket['NNP']] +' '+ line.words[bucket['VBG']] + '?'
elif all(key in bucket for key in l3): #'PRP', 'VBG', 'VBZ', 'IN' in sentence.
question = 'What' + ' ' + line.words[bucket['VBZ']] +' '+ line.words[bucket['PRP']]+ ' '+ line.words[bucket['VBG']] + '?'
elif all(key in bucket for key in l4): #'PRP', 'VBG', 'VBZ' in sentence.
question = 'What ' + line.words[bucket['PRP']] +' '+ ' does ' + line.words[bucket['VBG']]+ ' '+ line.words[bucket['VBG']] + '?'
elif all(key in bucket for key in l7): #'NN', 'VBG', 'VBZ' in sentence.
question = 'What' + ' ' + line.words[bucket['VBZ']] +' '+ line.words[bucket['NN']] +' '+ line.words[bucket['VBG']] + '?'
elif all(key in bucket for key in l8): #'NNP', 'VBZ', 'JJ' in sentence.
question = 'What' + ' ' + line.words[bucket['VBZ']] + ' ' + line.words[bucket['NNP']] + '?'
elif all(key in bucket for key in l9): #'NNP', 'VBZ', 'NN' in sentence
question = 'What' + ' ' + line.words[bucket['VBZ']] + ' ' + line.words[bucket['NNP']] + '?'
elif all(key in bucket for key in l11): #'PRP', 'VBZ' in sentence.
if line.words[bucket['PRP']] in ['she','he']:
question = 'What' + ' does ' + line.words[bucket['PRP']].lower() + ' ' + line.words[bucket['VBZ']].singularize() + '?'
elif all(key in bucket for key in l10): #'NNP', 'VBZ' in sentence.
question = 'What' + ' does ' + line.words[bucket['NNP']] + ' ' + line.words[bucket['VBZ']].singularize() + '?'
elif all(key in bucket for key in l13): #'NN', 'VBZ' in sentence.
question = 'What' + ' ' + line.words[bucket['VBZ']] + ' ' + line.words[bucket['NN']] + '?'
if 'VBZ' in bucket and line.words[bucket['VBZ']] == "’":
question = question.replace(" ’ ","'s ")
if question != '':
print('\n', 'Question: ' + question )
####################################################################################################################
def main():
"""
Accepts a text file as an argument and generates questions from it.
"""
#verbvar mode is activated when we give -v as argument.
global verbvar
verbvar = False
if len(sys.argv) >= 3:
if sys.argv[2] == '-v':
print('verbvar Mode Activated\n')
verbvar = True
# Open the file given as argument in read-only mode.
filehandle = open(sys.argv[1], 'r')
textinput = filehandle.read()
print('\n-----------INPUT TEXT-------------\n')
print(textinput,'\n')
print('\n-----------INPUT END---------------\n')
parse(textinput)
if __name__ == "__main__":
main()
|
import numpy as np
from .track import Track
from .linear_assignment import LinearAssignment, iou_distance, cosine_distance
class Tracker(object):
def __init__(self,
metric,
min_distance=0.5,
n_init=6,
max_disappeared=5,
max_age=8):
self.n_init = n_init
self.max_disappeared = max_disappeared
self.max_age = max_age
self.tracks = []
self.iou_assignment = LinearAssignment(metric, min_distance=min_distance)
def update(self, rgb_image, detections):
matches, unmatched_detections, unmatched_tracks = self.iou_assignment.match(self.tracks, detections)
for detection_indice, track_indice in matches:
self.tracks[track_indice].update(rgb_image, detections[detection_indice])
for track_indice in unmatched_tracks:
if self.tracks[track_indice].is_confirmed():
self.tracks[track_indice].predict(rgb_image)
else:
self.tracks[track_indice].mark_missed()
for detection_indice in unmatched_detections:
self.start_track(rgb_image, detections[detection_indice])
self.tracks = [t for t in self.tracks if not t.is_deleted()]
return self.tracks
def start_track(self, rgb_image, detection):
if detection.class_label == "person":
self.tracks.append(Track(rgb_image,
detection,
self.n_init,
self.max_disappeared,
self.max_age,
use_correlation_tracker=False))
else:
self.tracks.append(Track(rgb_image,
detection,
self.n_init,
self.max_disappeared,
self.max_age,
use_correlation_tracker=True))
return len(self.tracks)-1
|
import sys, os
path - os.path.dirname(__file__)
sys.path.append(path)
from core import src
if __name__ == "__main":
src.run()
|
#!/usr/bin/env python3
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Update the firebase project hosting the Super Size UI."""
import os
import shutil
import subprocess
import sys
import tempfile
import uuid
FIREBASE_PROJECT = 'chrome-supersize'
def _FirebaseLogin():
"""Login into the Firebase CLI"""
subprocess.check_call(['firebase', 'login'])
def _CheckFirebaseCLI():
"""Fail with a proper error message, if Firebase CLI is not installed."""
if subprocess.call(['firebase', '--version'], stdout=subprocess.DEVNULL) != 0:
link = 'https://firebase.google.com/docs/cli#install_the_firebase_cli'
raise Exception('Firebase CLI not installed or not on your PATH. Follow '
'the instructions at ' + link + ' to install')
def _FirebaseInitProjectDir(project_dir):
"""Create a firebase.json file that is needed for deployment."""
static_dir = os.path.join(project_dir, 'public')
with open(os.path.join(project_dir, 'firebase.json'), 'w') as f:
f.write("""
{
"hosting": {
"public": "public",
"ignore": [
"firebase.json",
"**/README*",
"**/.*"
]
}
}
""")
return static_dir
def _FirebaseDeploy(project_dir):
"""Deploy the project to firebase hosting."""
subprocess.check_call(['firebase', 'deploy', '-P', FIREBASE_PROJECT],
cwd=project_dir)
def _CopyStaticFiles(project_static_dir):
"""Copy over static files from the static directory."""
static_files = os.path.join(os.path.dirname(__file__), 'static')
shutil.copytree(static_files, project_static_dir)
def _FillInAndCopyTemplates(project_static_dir):
"""Generate and copy over the templates/sw.js file."""
template_file = os.path.join(os.path.dirname(__file__), 'templates', 'sw.js')
cache_hash = uuid.uuid4().hex
with open(template_file, 'r') as in_file:
with open(os.path.join(project_static_dir, 'sw.js'), 'w') as out_file:
out_file.write(in_file.read().replace('{{cache_hash}}', cache_hash))
def _Prompt(message):
"""Prompt the user with a message and request affirmative outcome."""
choice = input(message + ' [y/N] ').lower()
return choice and choice[0] == 'y'
def main():
message = (
"""This script deploys the contents of //tools/binary_size/libsupersize/static
to firebase hosting at chrome-supersize.firebaseapp.com. Please ensure you have
read the instructions at //tools/binary_size/libsupersize/static/README.md first
before running this. Are you sure you want to continue?""")
if _Prompt(message):
_CheckFirebaseCLI()
_FirebaseLogin()
with tempfile.TemporaryDirectory(prefix='firebase-') as project_dir:
static_dir = _FirebaseInitProjectDir(project_dir)
_CopyStaticFiles(static_dir)
_FillInAndCopyTemplates(static_dir)
_FirebaseDeploy(project_dir)
else:
print('Nothing was deployed.')
if __name__ == '__main__':
main()
|
from __future__ import print_function
from CGAL.CGAL_Kernel import Point_3
from CGAL.CGAL_Kernel import Plane_3
from CGAL import CGAL_Convex_hull_3
from CGAL.CGAL_Polyhedron_3 import Polyhedron_3
pts = []
pts.append(Point_3(0, 0, 0))
pts.append(Point_3(0, 1, 0))
pts.append(Point_3(1, 1, 0))
pts.append(Point_3(1, 0, 0))
pts.append(Point_3(0, 0, 1))
pts.append(Point_3(0, 1, 1))
pts.append(Point_3(1, 1, 1))
pts.append(Point_3(1, 0, 1))
res = Polyhedron_3()
CGAL_Convex_hull_3.convex_hull_3(pts, res)
print("convex hull has ", res.size_of_vertices(), " vertices")
print("is strongly convex: ", CGAL_Convex_hull_3.is_strongly_convex_3(res))
planes = []
planes.append(Plane_3(-1, 0, 0, 0))
planes.append(Plane_3(1, 0, 0, -1))
planes.append(Plane_3(0, -1, 0, 0))
planes.append(Plane_3(0, 1, 0, -1))
planes.append(Plane_3(0, 0, -1, 0))
planes.append(Plane_3(0, 0, 1, -1))
res.clear()
CGAL_Convex_hull_3.halfspace_intersection_3(planes, res)
print("halfspace intersection has ", res.size_of_vertices(), " vertices")
|
"""
@brief test log(time=1s)
You should indicate a time in seconds. The program ``run_unittests.py``
will sort all test files by increasing time and run them.
"""
import unittest
from pyquickhelper.pycode import ExtTestCase
from csharpy.cparts import version_c
from csharpy import __version__
class TestCModule(ExtTestCase):
"""Test dynamic compilation."""
def test_version_c(self):
ver = version_c()
self.assertEqual(ver.split('.')[:2], __version__.split('.')[:2])
if __name__ == "__main__":
unittest.main()
|
#
# Copyright (C) 2008, Brian Tanner
#
#http://rl-glue-ext.googlecode.com/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# $Revision: 446 $
# $Date: 2009-01-22 20:20:21 -0700 (Thu, 22 Jan 2009) $
# $Author: brian@tannerpages.com $
# $HeadURL: http://rl-glue-ext.googlecode.com/svn/trunk/projects/codecs/Python/src/tests/test_1_agent.py $
import random
import sys
from rlglue.agent.Agent import Agent
from rlglue.agent import AgentLoader as AgentLoader
from rlglue.types import Action
from rlglue.types import Observation
class test_1_agent(Agent):
stepCount=0
def agent_init(self,taskSpec):
self.stepCount=0
def agent_start(self,observation):
self.stepCount=0
action=Action()
action.intArray=observation.intArray
action.doubleArray=observation.doubleArray
action.charArray=observation.charArray
return action
def agent_step(self,reward, observation):
self.stepCount=self.stepCount+1
action=Action()
action.intArray=observation.intArray
action.doubleArray=observation.doubleArray
action.charArray=observation.charArray
return action
def agent_end(self,reward):
pass
def agent_cleanup(self):
pass
def agent_message(self,inMessage):
timesToPrint=self.stepCount%3
outMessage=inMessage+"|"
for i in range(0, timesToPrint):
outMessage=outMessage+"%d" % (self.stepCount)
outMessage=outMessage+"."
outMessage=outMessage+"|"+inMessage
return outMessage
if __name__=="__main__":
AgentLoader.loadAgent(test_1_agent())
|
import random
from main_lib.AppointmentSchedule import AppointmentSchedule
from main_lib.Appointment import Appointment
from main_lib.Doctor import Doctor
from main_lib.HealthcareProfessional import HealthcareProfessional
from main_lib.Nurse import Nurse
from main_lib.Patient import Patient
from main_lib.Prescription import Prescription
from main_lib.Receptionist import Receptionist
from pprint import pprint
def get_random_appointment(_appointment_schedule):
"""
Custom method to get random appointment from _appointment_schedule
:param _appointment_schedule: The AppointmentSchedule object
:type _appointment_schedule: AppointmentSchedule
:return: Random appointment ID
:rtype: str
"""
random_x = random.randint(0, len(_appointment_schedule.appointments) - 1)
random_id = _appointment_schedule.appointments[random_x].id
return random_id
if __name__ == '__main__':
appointment_schedule = AppointmentSchedule()
patient_1 = Patient(
name='Patient 1',
address='Address 1',
phone='Phone 1',
appointment_schedule=appointment_schedule
)
patient_2 = Patient(
name='Patient 2',
address='Address 2',
phone='Phone 2',
appointment_schedule=appointment_schedule
)
hp_doctor_1 = HealthcareProfessional(
name='Doctor 1',
appointment_schedule=appointment_schedule
)
hp_doctor_2 = HealthcareProfessional(
name='Doctor 2',
appointment_schedule=appointment_schedule
)
doctor_1 = Doctor(
name='Doctor 1',
appointment_schedule=appointment_schedule
)
receptionist = Receptionist(
name='Receptionist 1',
appointment_schedule=appointment_schedule,
)
# Test 1
# Manual create appointment
appointment_1 = Appointment(
staff=hp_doctor_1,
patient=patient_1,
appointment_schedule=appointment_schedule
)
# Print all appointment
appointment_schedule.print_appointment_list()
# Test 2
# Test creating appointment by receptionist
receptionist.make_appointment(
staff=hp_doctor_2,
patient=patient_2
)
# Print all appointment
appointment_schedule.print_appointment_list()
# Test 3
# Cancel appointment by receptionist
receptionist.cancel_appointment(
appointment_id=get_random_appointment(appointment_schedule)
)
# Print all appointment
appointment_schedule.print_appointment_list()
# Test 4
# Doctor issuing prescriptions
prescription_1 = doctor_1.issue_prescription(
_type='Type 1',
patient=patient_1,
quantity=30,
dosage=1.5
)
print("Doctor issuing prescriptions:")
pprint(prescription_1.__dict__)
print("\n")
# Test 5
# Request repeated prescription
print("Request repeat prescription: ")
repeated_prescription = patient_1.request_repeat(prescription_1)
if repeated_prescription is not None:
pprint({
"type": type(repeated_prescription),
"data": repeated_prescription.__dict__
})
print('\n')
# Test 6
# Request repeated prescription of other patient
print("Request repeated prescription of other patient:")
repeated_prescription = patient_2.request_repeat(prescription_1)
if repeated_prescription is not None:
pprint({
"type": type(repeated_prescription),
"data": repeated_prescription
})
print('\n')
# Test 7
# Patient request appointment
before = len(appointment_schedule.appointments)
patient_1.request_appointment(
staff=doctor_1
)
appointment_schedule.print_appointment_list()
after = len(appointment_schedule.appointments)
print(
"Before and after patient appointment request: {0} & {1}"
.format(before, after)
)
# Test 8
# After consultation, remove from appointment schedule
before = len(appointment_schedule.appointments)
doctor_1.consultation(get_random_appointment(appointment_schedule))
after = len(appointment_schedule.appointments)
print("Before and after consultation: {0} & {1}".format(
before, after
))
# Final list of all appointments
appointment_schedule.print_appointment_list()
|
#!/usr/bin/env python
"""
Network server to receive GfxTables events based
on version 1 of this network tablet protocol.
https://github.com/rfc2822/GfxTablet/blob/master/doc/protocol.txt
Version 1
---------
GfxTablet app sends UDP packets to port 40118 of the destination host.
Packet structure, uses network byte order (big endian):
9 bytes "GfxTablet"
2 bytes version number
1 byte event:
0 motion (hovering)
1 control (finger, pen etc. touches surface)
2 bytes x (using full range: 0..65535)
2 bytes y (using full range: 0..65535)
2 bytes pressure (using full range 0..65535, 32768 == pressure 1.0f on Android device)
when type == control event:
1 byte number of control, starting with 0
1 byte control status:
0 control is off
1 control is active
Comments:
- use netcat to test server `nc -u 127.0.0.1 40118`
"""
__author__ = "anatoly techtonik <techtonik@gmail.com>"
__license__ = "MIT/Public Domain/CC0"
__version__ = "1.0.beta2"
# --- python helpers ---
def _none(msg):
pass
def _printer(msg):
print(msg)
# debugging helper, which can be turned turned off with
# echo = _none
echo = _printer
# --- communicating.. networking.. ---
import socket
IP4 = socket.AF_INET
UDP = socket.SOCK_DGRAM
def getmyips():
"""Return all IP addresses that belong to current machine
[x] Windows returns only real LAN IP, no 127.x.x.x
[ ] Linux, [ ] OS X - unknown
"""
return socket.gethostbyname_ex(socket.gethostname())[2]
class UDPSocketStream(object):
""" Convert network socket endpoint to a readable stream object """
def __init__(self, host='0.0.0.0', port=40118):
# reading from socket blocks keyboard input, so CtrlC/CtrlBreak
# may not work until read operation completes
sock = socket.socket(IP4, UDP)
#sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((host, port))
self.sock = sock
def read(self, size):
return self.sock.recvfrom(size)[0]
def close(self):
try:
self.sock.shutdown(socket.SHUT_RDWR)
except:
pass
self.sock.close()
# --- /networking ---
# --- packet processing ---
import ctypes
CHAR = ctypes.c_ubyte
BYTE = ctypes.c_ubyte
WORD = ctypes.c_ushort # two bytes
ENUM = ctypes.c_ubyte # one byte, fixed set of values
INT = ctypes.c_ushort # two bytes, integer value
class Packet(ctypes.BigEndianStructure):
_pack_ = 1
_fields_ = [
# 9 bytes "GfxTablet"
('magic', CHAR*9), # string(9), [ ] ability to output .value
# 2 bytes version number
('version', WORD), # word
# [ ] ability to output as hex
# [ ] to int
# [ ] to version tuple
# 1 byte event:
# 0 motion event (hovering)
# 1 control event (finger, pen etc. touches surface)
('event', ENUM), # word
# [ ] to type string
# [ ] to some corresponding object
('x', INT), # WORD, using full range: 0..65535
('y', INT), # WORD, using full range: 0..65535
('pressure', INT), # WORD, full range 0..65535,
# 32768 == pressure 1.0f on Android device
# when event == control event,
('control', BYTE), # number of control, starting with 0
('state', BYTE), # control status - 0 off, 1 active
]
def parse(self, bdata):
fit = min(len(bdata), ctypes.sizeof(self))
ctypes.memmove(ctypes.addressof(self), bdata, fit)
def __len__(self):
return ctypes.sizeof(self)
class Processor(object):
def __init__(self):
self.count = 0
self.packet = Packet()
def process(self, b): # b is a binary string
self.count += 1
if not b.startswith('GfxTablet'):
echo('#%3s (discarded) invalid signature' % self.count)
return
# packet accepted
msg1 = '#%3s (got %s bytes)' % (self.count, len(b))
#debug echo(msg1)
#debug import hexdump; hexdump.hexdump(b)
self.packet.parse(b)
if self.packet.version != 1:
echo(' warn: only version 1 of protocol is supported')
msg2 = 'event:%s x,y:%s,%s pressure:%s' % (self.packet.event,
self.packet.x, self.packet.y, self.packet.pressure)
if len(b) == len(self.packet):
state = 'active' if self.packet.state else 'inactive'
msg2 += ' control:%s %s' % (self.packet.control, state)
echo(msg1 + ' ' + msg2)
return self.packet
# --- /parsing ---
# Get last IP from all available
ip = getmyips()[-1]
print('GfxTablet Server IP: %s' % ip)
s = UDPSocketStream()
p = Processor()
try:
import autopy
except ImportError:
autopy = None
print('..autopy is not installed, mouse control is disabled')
while True:
res = p.process(s.read(1024))
if autopy:
# normalize screen coordinates
width, height = autopy.screen.get_size()
x, y = res.x*width//32768, res.y*height//32768
#print x,y
autopy.mouse.move(x, y)
if res.event == 1 and res.state == 1:
autopy.mouse.click()
|
import json
from ..tobject import TObject
class Base(TObject):
@property
def as_json(self):
return json.dumps(self.__dict__)
@classmethod
def from_json(cls, *args, **kargs):
cls(json.loads(args))
|
#Import Packages
import tkinter
import random
from random import randint
from tkinter import Button
import matplotlib.pyplot as plt
import numpy as np
#Interface layout
root = tkinter.Tk()
root.title('Basic GUI for Machines 2')
root.geometry("400x200")
correct_result = 0
correct_answers = 0
total_questions = 0
incorrect_answer = 0
#evaluate the result
def evaluate(event):
global correct_result
global user_input
user_input_given = user_input.get()
if str(user_input_given) == str(correct_result):
global correct_answers
correct_answers += 1
nextQuestion()
else:
global incorrect_answer
incorrect_answer += 1
result = tkinter.Label(root, text="Hard Luck!!nThe correct answer is : "+str(correct_result), font=('Helvetica', 10))
result.pack()
nextQuestion()
root.after(1500, result.destroy)
def nextQuestion():
user_input.focus_set()
user_input.delete(0, tkinter.END)
global first_num
first_num = randint(1, 15)
global second_num
second_num = randint(1, 15)
global character
character = random.choice('+-*')
global correct_result
if character == '*':
correct_result = first_num*second_num
if character == '+':
correct_result = first_num+second_num
if character == '-':
correct_result = first_num-second_num
text="Enter the value of "+str(first_num)+" "+character+" "+str(second_num)
global total_questions
total_questions += 1
user_question.config(text=text)
user_question.pack()
def exitThis():
print("Total Questions attended : "+str(total_questions))
print("Total Correct Answers : "+str(correct_answers))
print("Total Incorrect Answers : "+str(incorrect_answer))
root.destroy()
first_num = randint(1,15)
second_num = randint(1,15)
character = random.choice("+-*")
if character == '*':
correct_result = first_num*second_num
if character == '+':
correct_result = first_num+second_num
if character == '-':
correct_result = first_num-second_num
user_question = tkinter.Label(root, text="Enter the value of "+str(first_num)+" "+character+" "+str(second_num), font=('Helvetica', 10))
user_question.pack()
user_input = tkinter.Entry(root)
root.bind('<Return>',evaluate)
user_input.pack()
user_input.focus_set()
exitButton = Button(root, text="EXIT and Check Result", command=exitThis)
exitButton.pack(side="top", expand=True, padx=4, pady=4)
root.mainloop()
#Plotting the bar graph
plt.figure(0)
objects = ('Total Number of Questions','Correct Answers','Incorrect answers')
y_pos = np.arange(len(objects))
stats = [total_questions,correct_answers,incorrect_answer]
plt.bar(y_pos, stats, align='center', alpha=0.5)
plt.xticks(y_pos, objects)
plt.ylabel('Numbers')
plt.title('Your Result!')
#Plotting the pie chart
if str(total_questions) != "0":
plt.figure(1)
labels = 'Correct Answers','Incorrect answers'
sizes = [correct_answers,incorrect_answer]
colors = ['green', 'red']
explode = (0.1, 0) # explode 1st slice
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=140)
plt.axis('equal')
#Show both the graphs
plt.show()
|
#!/usr/bin/env python
'''
given tumour and normal vcf pairs, explore msi status
'''
import argparse
import csv
import logging
import sys
def main(vep_header):
logging.info('starting...')
vep_fields = vep_header.split('|')
header = None
writer = csv.writer(sys.stdout, delimiter='\t')
for row_count, row in enumerate(csv.reader(sys.stdin, delimiter='\t')):
if header is None:
header = row
csq = header.index('CSQ')
vep_fields = ['vep_{}'.format(x) for x in vep_fields]
logging.debug('%i vep columns', len(vep_fields))
new_header = header[:csq-1] + vep_fields + header[csq+1:]
writer.writerow(new_header)
logging.debug('new header has %i columns', len(new_header))
continue
for tx in row[csq].split(','):
vep_cols = tx.split('|')
if vep_cols[-1] == '1':
break
new_row = row[:csq-1] + vep_cols + row[csq+1:]
writer.writerow(new_row)
if row_count % 10000 == 0:
logging.info('%i records read...', row_count)
logging.info('done')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Assess MSI')
parser.add_argument('--header', required=True, help='vep header')
parser.add_argument('--verbose', action='store_true', help='more logging')
args = parser.parse_args()
if args.verbose:
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG)
else:
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
main(args.header)
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: lift_zone.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from biostarPython.service import zone_pb2 as zone__pb2
from biostarPython.service import action_pb2 as action__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='lift_zone.proto',
package='lift_zone',
syntax='proto3',
serialized_options=_b('\n\034com.supremainc.sdk.lift_zoneP\001Z\030biostar/service/liftZone'),
serialized_pb=_b('\n\x0flift_zone.proto\x12\tlift_zone\x1a\nzone.proto\x1a\x0c\x61\x63tion.proto\",\n\x04Lift\x12\x0e\n\x06liftID\x18\x01 \x01(\r\x12\x14\n\x0c\x66loorIndexes\x18\x03 \x03(\r\"\xf6\x01\n\x08ZoneInfo\x12\x0e\n\x06zoneID\x18\x01 \x01(\r\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x1a\n\x12\x61\x63tivateScheduleID\x18\x03 \x01(\r\x12\x1c\n\x14\x64\x65\x61\x63tivateScheduleID\x18\x04 \x01(\r\x12\x10\n\x08\x64isabled\x18\x05 \x01(\x08\x12\x0f\n\x07\x61larmed\x18\x06 \x01(\x08\x12\x1e\n\x05lifts\x18\x07 \x03(\x0b\x32\x0f.lift_zone.Lift\x12\x1f\n\x07\x61\x63tions\x18\x08 \x03(\x0b\x32\x0e.action.Action\x12\x16\n\x0e\x62ypassGroupIDs\x18\t \x03(\r\x12\x16\n\x0eunlockGroupIDs\x18\n \x03(\r\"\x1e\n\nGetRequest\x12\x10\n\x08\x64\x65viceID\x18\x01 \x01(\r\"1\n\x0bGetResponse\x12\"\n\x05zones\x18\x01 \x03(\x0b\x32\x13.lift_zone.ZoneInfo\"5\n\x10GetStatusRequest\x12\x10\n\x08\x64\x65viceID\x18\x01 \x01(\r\x12\x0f\n\x07zoneIDs\x18\x02 \x03(\r\"5\n\x11GetStatusResponse\x12 \n\x06status\x18\x01 \x03(\x0b\x32\x10.zone.ZoneStatus\"B\n\nAddRequest\x12\x10\n\x08\x64\x65viceID\x18\x01 \x01(\r\x12\"\n\x05zones\x18\x02 \x03(\x0b\x32\x13.lift_zone.ZoneInfo\"\r\n\x0b\x41\x64\x64Response\"2\n\rDeleteRequest\x12\x10\n\x08\x64\x65viceID\x18\x01 \x01(\r\x12\x0f\n\x07zoneIDs\x18\x02 \x03(\r\"\x10\n\x0e\x44\x65leteResponse\"$\n\x10\x44\x65leteAllRequest\x12\x10\n\x08\x64\x65viceID\x18\x01 \x01(\r\"\x13\n\x11\x44\x65leteAllResponse\"E\n\x0fSetAlarmRequest\x12\x10\n\x08\x64\x65viceID\x18\x01 \x01(\r\x12\x0f\n\x07zoneIDs\x18\x02 \x03(\r\x12\x0f\n\x07\x61larmed\x18\x03 \x01(\x08\"\x12\n\x10SetAlarmResponse2\x8a\x03\n\x08LiftZone\x12\x34\n\x03Get\x12\x15.lift_zone.GetRequest\x1a\x16.lift_zone.GetResponse\x12\x46\n\tGetStatus\x12\x1b.lift_zone.GetStatusRequest\x1a\x1c.lift_zone.GetStatusResponse\x12\x34\n\x03\x41\x64\x64\x12\x15.lift_zone.AddRequest\x1a\x16.lift_zone.AddResponse\x12=\n\x06\x44\x65lete\x12\x18.lift_zone.DeleteRequest\x1a\x19.lift_zone.DeleteResponse\x12\x46\n\tDeleteAll\x12\x1b.lift_zone.DeleteAllRequest\x1a\x1c.lift_zone.DeleteAllResponse\x12\x43\n\x08SetAlarm\x12\x1a.lift_zone.SetAlarmRequest\x1a\x1b.lift_zone.SetAlarmResponseB:\n\x1c\x63om.supremainc.sdk.lift_zoneP\x01Z\x18\x62iostar/service/liftZoneb\x06proto3')
,
dependencies=[zone__pb2.DESCRIPTOR,action__pb2.DESCRIPTOR,])
_LIFT = _descriptor.Descriptor(
name='Lift',
full_name='lift_zone.Lift',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='liftID', full_name='lift_zone.Lift.liftID', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='floorIndexes', full_name='lift_zone.Lift.floorIndexes', index=1,
number=3, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=56,
serialized_end=100,
)
_ZONEINFO = _descriptor.Descriptor(
name='ZoneInfo',
full_name='lift_zone.ZoneInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='zoneID', full_name='lift_zone.ZoneInfo.zoneID', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='lift_zone.ZoneInfo.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='activateScheduleID', full_name='lift_zone.ZoneInfo.activateScheduleID', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='deactivateScheduleID', full_name='lift_zone.ZoneInfo.deactivateScheduleID', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='disabled', full_name='lift_zone.ZoneInfo.disabled', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='alarmed', full_name='lift_zone.ZoneInfo.alarmed', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lifts', full_name='lift_zone.ZoneInfo.lifts', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='actions', full_name='lift_zone.ZoneInfo.actions', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bypassGroupIDs', full_name='lift_zone.ZoneInfo.bypassGroupIDs', index=8,
number=9, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='unlockGroupIDs', full_name='lift_zone.ZoneInfo.unlockGroupIDs', index=9,
number=10, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=103,
serialized_end=349,
)
_GETREQUEST = _descriptor.Descriptor(
name='GetRequest',
full_name='lift_zone.GetRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='deviceID', full_name='lift_zone.GetRequest.deviceID', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=351,
serialized_end=381,
)
_GETRESPONSE = _descriptor.Descriptor(
name='GetResponse',
full_name='lift_zone.GetResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='zones', full_name='lift_zone.GetResponse.zones', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=383,
serialized_end=432,
)
_GETSTATUSREQUEST = _descriptor.Descriptor(
name='GetStatusRequest',
full_name='lift_zone.GetStatusRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='deviceID', full_name='lift_zone.GetStatusRequest.deviceID', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='zoneIDs', full_name='lift_zone.GetStatusRequest.zoneIDs', index=1,
number=2, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=434,
serialized_end=487,
)
_GETSTATUSRESPONSE = _descriptor.Descriptor(
name='GetStatusResponse',
full_name='lift_zone.GetStatusResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='lift_zone.GetStatusResponse.status', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=489,
serialized_end=542,
)
_ADDREQUEST = _descriptor.Descriptor(
name='AddRequest',
full_name='lift_zone.AddRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='deviceID', full_name='lift_zone.AddRequest.deviceID', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='zones', full_name='lift_zone.AddRequest.zones', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=544,
serialized_end=610,
)
_ADDRESPONSE = _descriptor.Descriptor(
name='AddResponse',
full_name='lift_zone.AddResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=612,
serialized_end=625,
)
_DELETEREQUEST = _descriptor.Descriptor(
name='DeleteRequest',
full_name='lift_zone.DeleteRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='deviceID', full_name='lift_zone.DeleteRequest.deviceID', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='zoneIDs', full_name='lift_zone.DeleteRequest.zoneIDs', index=1,
number=2, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=627,
serialized_end=677,
)
_DELETERESPONSE = _descriptor.Descriptor(
name='DeleteResponse',
full_name='lift_zone.DeleteResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=679,
serialized_end=695,
)
_DELETEALLREQUEST = _descriptor.Descriptor(
name='DeleteAllRequest',
full_name='lift_zone.DeleteAllRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='deviceID', full_name='lift_zone.DeleteAllRequest.deviceID', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=697,
serialized_end=733,
)
_DELETEALLRESPONSE = _descriptor.Descriptor(
name='DeleteAllResponse',
full_name='lift_zone.DeleteAllResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=735,
serialized_end=754,
)
_SETALARMREQUEST = _descriptor.Descriptor(
name='SetAlarmRequest',
full_name='lift_zone.SetAlarmRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='deviceID', full_name='lift_zone.SetAlarmRequest.deviceID', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='zoneIDs', full_name='lift_zone.SetAlarmRequest.zoneIDs', index=1,
number=2, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='alarmed', full_name='lift_zone.SetAlarmRequest.alarmed', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=756,
serialized_end=825,
)
_SETALARMRESPONSE = _descriptor.Descriptor(
name='SetAlarmResponse',
full_name='lift_zone.SetAlarmResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=827,
serialized_end=845,
)
_ZONEINFO.fields_by_name['lifts'].message_type = _LIFT
_ZONEINFO.fields_by_name['actions'].message_type = action__pb2._ACTION
_GETRESPONSE.fields_by_name['zones'].message_type = _ZONEINFO
_GETSTATUSRESPONSE.fields_by_name['status'].message_type = zone__pb2._ZONESTATUS
_ADDREQUEST.fields_by_name['zones'].message_type = _ZONEINFO
DESCRIPTOR.message_types_by_name['Lift'] = _LIFT
DESCRIPTOR.message_types_by_name['ZoneInfo'] = _ZONEINFO
DESCRIPTOR.message_types_by_name['GetRequest'] = _GETREQUEST
DESCRIPTOR.message_types_by_name['GetResponse'] = _GETRESPONSE
DESCRIPTOR.message_types_by_name['GetStatusRequest'] = _GETSTATUSREQUEST
DESCRIPTOR.message_types_by_name['GetStatusResponse'] = _GETSTATUSRESPONSE
DESCRIPTOR.message_types_by_name['AddRequest'] = _ADDREQUEST
DESCRIPTOR.message_types_by_name['AddResponse'] = _ADDRESPONSE
DESCRIPTOR.message_types_by_name['DeleteRequest'] = _DELETEREQUEST
DESCRIPTOR.message_types_by_name['DeleteResponse'] = _DELETERESPONSE
DESCRIPTOR.message_types_by_name['DeleteAllRequest'] = _DELETEALLREQUEST
DESCRIPTOR.message_types_by_name['DeleteAllResponse'] = _DELETEALLRESPONSE
DESCRIPTOR.message_types_by_name['SetAlarmRequest'] = _SETALARMREQUEST
DESCRIPTOR.message_types_by_name['SetAlarmResponse'] = _SETALARMRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Lift = _reflection.GeneratedProtocolMessageType('Lift', (_message.Message,), dict(
DESCRIPTOR = _LIFT,
__module__ = 'lift_zone_pb2'
# @@protoc_insertion_point(class_scope:lift_zone.Lift)
))
_sym_db.RegisterMessage(Lift)
ZoneInfo = _reflection.GeneratedProtocolMessageType('ZoneInfo', (_message.Message,), dict(
DESCRIPTOR = _ZONEINFO,
__module__ = 'lift_zone_pb2'
# @@protoc_insertion_point(class_scope:lift_zone.ZoneInfo)
))
_sym_db.RegisterMessage(ZoneInfo)
GetRequest = _reflection.GeneratedProtocolMessageType('GetRequest', (_message.Message,), dict(
DESCRIPTOR = _GETREQUEST,
__module__ = 'lift_zone_pb2'
# @@protoc_insertion_point(class_scope:lift_zone.GetRequest)
))
_sym_db.RegisterMessage(GetRequest)
GetResponse = _reflection.GeneratedProtocolMessageType('GetResponse', (_message.Message,), dict(
DESCRIPTOR = _GETRESPONSE,
__module__ = 'lift_zone_pb2'
# @@protoc_insertion_point(class_scope:lift_zone.GetResponse)
))
_sym_db.RegisterMessage(GetResponse)
GetStatusRequest = _reflection.GeneratedProtocolMessageType('GetStatusRequest', (_message.Message,), dict(
DESCRIPTOR = _GETSTATUSREQUEST,
__module__ = 'lift_zone_pb2'
# @@protoc_insertion_point(class_scope:lift_zone.GetStatusRequest)
))
_sym_db.RegisterMessage(GetStatusRequest)
GetStatusResponse = _reflection.GeneratedProtocolMessageType('GetStatusResponse', (_message.Message,), dict(
DESCRIPTOR = _GETSTATUSRESPONSE,
__module__ = 'lift_zone_pb2'
# @@protoc_insertion_point(class_scope:lift_zone.GetStatusResponse)
))
_sym_db.RegisterMessage(GetStatusResponse)
AddRequest = _reflection.GeneratedProtocolMessageType('AddRequest', (_message.Message,), dict(
DESCRIPTOR = _ADDREQUEST,
__module__ = 'lift_zone_pb2'
# @@protoc_insertion_point(class_scope:lift_zone.AddRequest)
))
_sym_db.RegisterMessage(AddRequest)
AddResponse = _reflection.GeneratedProtocolMessageType('AddResponse', (_message.Message,), dict(
DESCRIPTOR = _ADDRESPONSE,
__module__ = 'lift_zone_pb2'
# @@protoc_insertion_point(class_scope:lift_zone.AddResponse)
))
_sym_db.RegisterMessage(AddResponse)
DeleteRequest = _reflection.GeneratedProtocolMessageType('DeleteRequest', (_message.Message,), dict(
DESCRIPTOR = _DELETEREQUEST,
__module__ = 'lift_zone_pb2'
# @@protoc_insertion_point(class_scope:lift_zone.DeleteRequest)
))
_sym_db.RegisterMessage(DeleteRequest)
DeleteResponse = _reflection.GeneratedProtocolMessageType('DeleteResponse', (_message.Message,), dict(
DESCRIPTOR = _DELETERESPONSE,
__module__ = 'lift_zone_pb2'
# @@protoc_insertion_point(class_scope:lift_zone.DeleteResponse)
))
_sym_db.RegisterMessage(DeleteResponse)
DeleteAllRequest = _reflection.GeneratedProtocolMessageType('DeleteAllRequest', (_message.Message,), dict(
DESCRIPTOR = _DELETEALLREQUEST,
__module__ = 'lift_zone_pb2'
# @@protoc_insertion_point(class_scope:lift_zone.DeleteAllRequest)
))
_sym_db.RegisterMessage(DeleteAllRequest)
DeleteAllResponse = _reflection.GeneratedProtocolMessageType('DeleteAllResponse', (_message.Message,), dict(
DESCRIPTOR = _DELETEALLRESPONSE,
__module__ = 'lift_zone_pb2'
# @@protoc_insertion_point(class_scope:lift_zone.DeleteAllResponse)
))
_sym_db.RegisterMessage(DeleteAllResponse)
SetAlarmRequest = _reflection.GeneratedProtocolMessageType('SetAlarmRequest', (_message.Message,), dict(
DESCRIPTOR = _SETALARMREQUEST,
__module__ = 'lift_zone_pb2'
# @@protoc_insertion_point(class_scope:lift_zone.SetAlarmRequest)
))
_sym_db.RegisterMessage(SetAlarmRequest)
SetAlarmResponse = _reflection.GeneratedProtocolMessageType('SetAlarmResponse', (_message.Message,), dict(
DESCRIPTOR = _SETALARMRESPONSE,
__module__ = 'lift_zone_pb2'
# @@protoc_insertion_point(class_scope:lift_zone.SetAlarmResponse)
))
_sym_db.RegisterMessage(SetAlarmResponse)
DESCRIPTOR._options = None
_LIFTZONE = _descriptor.ServiceDescriptor(
name='LiftZone',
full_name='lift_zone.LiftZone',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=848,
serialized_end=1242,
methods=[
_descriptor.MethodDescriptor(
name='Get',
full_name='lift_zone.LiftZone.Get',
index=0,
containing_service=None,
input_type=_GETREQUEST,
output_type=_GETRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='GetStatus',
full_name='lift_zone.LiftZone.GetStatus',
index=1,
containing_service=None,
input_type=_GETSTATUSREQUEST,
output_type=_GETSTATUSRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='Add',
full_name='lift_zone.LiftZone.Add',
index=2,
containing_service=None,
input_type=_ADDREQUEST,
output_type=_ADDRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='Delete',
full_name='lift_zone.LiftZone.Delete',
index=3,
containing_service=None,
input_type=_DELETEREQUEST,
output_type=_DELETERESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='DeleteAll',
full_name='lift_zone.LiftZone.DeleteAll',
index=4,
containing_service=None,
input_type=_DELETEALLREQUEST,
output_type=_DELETEALLRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='SetAlarm',
full_name='lift_zone.LiftZone.SetAlarm',
index=5,
containing_service=None,
input_type=_SETALARMREQUEST,
output_type=_SETALARMRESPONSE,
serialized_options=None,
),
])
_sym_db.RegisterServiceDescriptor(_LIFTZONE)
DESCRIPTOR.services_by_name['LiftZone'] = _LIFTZONE
# @@protoc_insertion_point(module_scope)
|
# Copyright (c) 2020, Huawei Technologies.All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import numpy as np
import sys
import copy
from common_utils import TestCase, run_tests
from common_device_type import dtypes, instantiate_device_type_tests
from util_test import create_common_tensor
class TestConstantPadNd(TestCase):
def op_exec_cpu(self, input1, pad_shape):
output = torch.constant_pad_nd(input1, pad_shape)
output = output.numpy()
return output
def op_exec_npu(self, input1, pad_shape):
input1 = input1.to("npu")
output = torch.constant_pad_nd(input1, pad_shape)
output = output.to("cpu")
output = output.numpy()
return output
def test_constant_pad_nd_shape_format(self, device):
shape_format = [
[[np.float32, 3, (25, 32, 1, 1)], (1,1)],
[[np.float32, 0, [25, 32, 11, 11]], (2,2,2,2)],
[[np.float32, 0, [25, 3, 22, 22]],(2,2,2,2,20,20)],
[[np.float16, 3, [25, 12, 7, 7]], (20,20,20,20)],
[[np.float16, 0, [25, 3, 22, 22]], (20,20,20,20,5,5,5,5)],
[[np.float16, 4, (2, 3, 3, 3)], (1,1,1,20,5,5,5,5)],
[[np.float16, 4, [100, 20, 7, 7]], (0,0,0,0,0,0,0,0)],
[[np.float16, 0, [2,3,4,5]], (1,0,1,0,1,0,1,0)],
[[np.float16, 4, [2]],(0,1)],
[[np.float16, 0, [20,20]],(0,1,0,2)],
[[np.float16, 0, [20,20,20]],(1,1,1,1) ],
[[np.float16, 3, [1,1,1,1]], (1,1)],
[[np.float16, 3, [1]], (1,1)],
[[np.float16, 0, [50, 24, 56, 56]], (100, 100, 100, 100, 100, 100, 100, 100)],
]
for item in shape_format:
input_cpu, input_npu = create_common_tensor(item[0], 1, 1)
pad_shape = item[1]
cpu_output = self.op_exec_cpu(input_cpu, pad_shape)
npu_output = self.op_exec_npu(input_npu, pad_shape)
self.assertRtolEqual(cpu_output, npu_output)
instantiate_device_type_tests(TestConstantPadNd, globals(), except_for='cpu')
if __name__ == "__main__":
run_tests()
|
# Constants for console colors
W = '\033[0m' # white (normal)
R = '\033[1;41m' # red
G = '\033[1;42m' # green
O = '\033[1;33m' # orange
B = '\033[1;34m' # blue
P = '\033[1;35m' # purple
|
# Copyright Contributors to the Pyro project.
# SPDX-License-Identifier: Apache-2.0
import math
import warnings
import weakref
import numpy as np
from jax import lax, ops, tree_flatten, tree_map, vmap
from jax.dtypes import canonicalize_dtype
from jax.flatten_util import ravel_pytree
from jax.nn import softplus
import jax.numpy as jnp
from jax.scipy.linalg import solve_triangular
from jax.scipy.special import expit, logit
from numpyro.distributions import constraints
from numpyro.distributions.util import (
get_dtype,
matrix_to_tril_vec,
signed_stick_breaking_tril,
sum_rightmost,
vec_to_tril_matrix
)
from numpyro.util import not_jax_tracer
__all__ = [
'biject_to',
'AbsTransform',
'AffineTransform',
'ComposeTransform',
'CorrCholeskyTransform',
'ExpTransform',
'IdentityTransform',
'InvCholeskyTransform',
'LowerCholeskyTransform',
'LowerCholeskyAffine',
'PermuteTransform',
'PowerTransform',
'SigmoidTransform',
'StickBreakingTransform',
'Transform',
'UnpackTransform',
]
def _clipped_expit(x):
finfo = jnp.finfo(get_dtype(x))
return jnp.clip(expit(x), a_min=finfo.tiny, a_max=1. - finfo.eps)
class Transform(object):
domain = constraints.real
codomain = constraints.real
_inv = None
@property
def event_dim(self):
warnings.warn("transform.event_dim is deprecated. Please use Transform.domain.event_dim to "
"get input event dim or Transform.codomain.event_dim to get output event dim.",
FutureWarning)
return self.domain.event_dim
@property
def inv(self):
inv = None
if self._inv is not None:
inv = self._inv()
if inv is None:
inv = _InverseTransform(self)
self._inv = weakref.ref(inv)
return inv
def __call__(self, x):
return NotImplementedError
def _inverse(self, y):
raise NotImplementedError
def log_abs_det_jacobian(self, x, y, intermediates=None):
raise NotImplementedError
def call_with_intermediates(self, x):
return self(x), None
def forward_shape(self, shape):
"""
Infers the shape of the forward computation, given the input shape.
Defaults to preserving shape.
"""
return shape
def inverse_shape(self, shape):
"""
Infers the shapes of the inverse computation, given the output shape.
Defaults to preserving shape.
"""
return shape
class _InverseTransform(Transform):
def __init__(self, transform):
super().__init__()
self._inv = transform
@property
def domain(self):
return self._inv.codomain
@property
def codomain(self):
return self._inv.domain
@property
def inv(self):
return self._inv
def __call__(self, x):
return self._inv._inverse(x)
def log_abs_det_jacobian(self, x, y, intermediates=None):
# NB: we don't use intermediates for inverse transform
return -self._inv.log_abs_det_jacobian(y, x, None)
def forward_shape(self, shape):
return self._inv.inverse_shape(shape)
def inverse_shape(self, shape):
return self._inv.forward_shape(shape)
class AbsTransform(Transform):
domain = constraints.real
codomain = constraints.positive
def __eq__(self, other):
return isinstance(other, AbsTransform)
def __call__(self, x):
return jnp.abs(x)
def _inverse(self, y):
return y
class AffineTransform(Transform):
"""
.. note:: When `scale` is a JAX tracer, we always assume that `scale > 0`
when calculating `codomain`.
"""
def __init__(self, loc, scale, domain=constraints.real):
self.loc = loc
self.scale = scale
self.domain = domain
@property
def codomain(self):
if self.domain is constraints.real:
return constraints.real
elif isinstance(self.domain, constraints.greater_than):
if not_jax_tracer(self.scale) and np.all(np.less(self.scale, 0)):
return constraints.less_than(self(self.domain.lower_bound))
# we suppose scale > 0 for any tracer
else:
return constraints.greater_than(self(self.domain.lower_bound))
elif isinstance(self.domain, constraints.less_than):
if not_jax_tracer(self.scale) and np.all(np.less(self.scale, 0)):
return constraints.greater_than(self(self.domain.upper_bound))
# we suppose scale > 0 for any tracer
else:
return constraints.less_than(self(self.domain.upper_bound))
elif isinstance(self.domain, constraints.interval):
if not_jax_tracer(self.scale) and np.all(np.less(self.scale, 0)):
return constraints.interval(self(self.domain.upper_bound),
self(self.domain.lower_bound))
else:
return constraints.interval(self(self.domain.lower_bound),
self(self.domain.upper_bound))
else:
raise NotImplementedError
def __call__(self, x):
return self.loc + self.scale * x
def _inverse(self, y):
return (y - self.loc) / self.scale
def log_abs_det_jacobian(self, x, y, intermediates=None):
return jnp.broadcast_to(jnp.log(jnp.abs(self.scale)), jnp.shape(x))
def forward_shape(self, shape):
return lax.broadcast_shapes(shape,
getattr(self.loc, "shape", ()),
getattr(self.scale, "shape", ()))
def inverse_shape(self, shape):
return lax.broadcast_shapes(shape,
getattr(self.loc, "shape", ()),
getattr(self.scale, "shape", ()))
def _get_compose_transform_input_event_dim(parts):
input_event_dim = parts[-1].domain.event_dim
for part in parts[len(parts) - 1::-1]:
input_event_dim = part.domain.event_dim + max(input_event_dim - part.codomain.event_dim, 0)
return input_event_dim
def _get_compose_transform_output_event_dim(parts):
output_event_dim = parts[0].codomain.event_dim
for part in parts[1:]:
output_event_dim = part.codomain.event_dim + max(output_event_dim - part.domain.event_dim, 0)
return output_event_dim
class ComposeTransform(Transform):
def __init__(self, parts):
self.parts = parts
@property
def domain(self):
input_event_dim = _get_compose_transform_input_event_dim(self.parts)
first_input_event_dim = self.parts[0].domain.event_dim
assert input_event_dim >= first_input_event_dim
if input_event_dim == first_input_event_dim:
return self.parts[0].domain
else:
return constraints.independent(self.parts[0].domain, input_event_dim - first_input_event_dim)
@property
def codomain(self):
output_event_dim = _get_compose_transform_output_event_dim(self.parts)
last_output_event_dim = self.parts[-1].codomain.event_dim
assert output_event_dim >= last_output_event_dim
if output_event_dim == last_output_event_dim:
return self.parts[-1].codomain
else:
return constraints.independent(self.parts[-1].codomain, output_event_dim - last_output_event_dim)
def __call__(self, x):
for part in self.parts:
x = part(x)
return x
def _inverse(self, y):
for part in self.parts[::-1]:
y = part.inv(y)
return y
def log_abs_det_jacobian(self, x, y, intermediates=None):
if intermediates is not None:
if len(intermediates) != len(self.parts):
raise ValueError('Intermediates array has length = {}. Expected = {}.'
.format(len(intermediates), len(self.parts)))
result = 0.
input_event_dim = self.domain.event_dim
for i, part in enumerate(self.parts[:-1]):
y_tmp = part(x) if intermediates is None else intermediates[i][0]
inter = None if intermediates is None else intermediates[i][1]
logdet = part.log_abs_det_jacobian(x, y_tmp, intermediates=inter)
batch_ndim = input_event_dim - part.domain.event_dim
result = result + sum_rightmost(logdet, batch_ndim)
input_event_dim = part.codomain.event_dim + batch_ndim
x = y_tmp
# account the the last transform, where y is available
inter = None if intermediates is None else intermediates[-1]
part = self.parts[-1]
logdet = part.log_abs_det_jacobian(x, y, intermediates=inter)
result = result + sum_rightmost(logdet, input_event_dim - part.domain.event_dim)
return result
def call_with_intermediates(self, x):
intermediates = []
for part in self.parts[:-1]:
x, inter = part.call_with_intermediates(x)
intermediates.append([x, inter])
# NB: we don't need to hold the last output value in `intermediates`
x, inter = self.parts[-1].call_with_intermediates(x)
intermediates.append(inter)
return x, intermediates
def forward_shape(self, shape):
for part in self.parts:
shape = part.forward_shape(shape)
return shape
def inverse_shape(self, shape):
for part in reversed(self.parts):
shape = part.inverse_shape(shape)
return shape
def _matrix_forward_shape(shape, offset=0):
# Reshape from (..., N) to (..., D, D).
if len(shape) < 1:
raise ValueError("Too few dimensions in input")
N = shape[-1]
D = round((0.25 + 2 * N) ** 0.5 - 0.5)
if D * (D + 1) // 2 != N:
raise ValueError("Input is not a flattend lower-diagonal number")
D = D - offset
return shape[:-1] + (D, D)
def _matrix_inverse_shape(shape, offset=0):
# Reshape from (..., D, D) to (..., N).
if len(shape) < 2:
raise ValueError("Too few dimensions on input")
if shape[-2] != shape[-1]:
raise ValueError("Input is not square")
D = shape[-1] + offset
N = D * (D + 1) // 2
return shape[:-2] + (N,)
class CorrCholeskyTransform(Transform):
r"""
Transforms a uncontrained real vector :math:`x` with length :math:`D*(D-1)/2` into the
Cholesky factor of a D-dimension correlation matrix. This Cholesky factor is a lower
triangular matrix with positive diagonals and unit Euclidean norm for each row.
The transform is processed as follows:
1. First we convert :math:`x` into a lower triangular matrix with the following order:
.. math::
\begin{bmatrix}
1 & 0 & 0 & 0 \\
x_0 & 1 & 0 & 0 \\
x_1 & x_2 & 1 & 0 \\
x_3 & x_4 & x_5 & 1
\end{bmatrix}
2. For each row :math:`X_i` of the lower triangular part, we apply a *signed* version of
class :class:`StickBreakingTransform` to transform :math:`X_i` into a
unit Euclidean length vector using the following steps:
a. Scales into the interval :math:`(-1, 1)` domain: :math:`r_i = \tanh(X_i)`.
b. Transforms into an unsigned domain: :math:`z_i = r_i^2`.
c. Applies :math:`s_i = StickBreakingTransform(z_i)`.
d. Transforms back into signed domain: :math:`y_i = (sign(r_i), 1) * \sqrt{s_i}`.
"""
domain = constraints.real_vector
codomain = constraints.corr_cholesky
def __call__(self, x):
# we interchange step 1 and step 2.a for a better performance
t = jnp.tanh(x)
return signed_stick_breaking_tril(t)
def _inverse(self, y):
# inverse stick-breaking
z1m_cumprod = 1 - jnp.cumsum(y * y, axis=-1)
pad_width = [(0, 0)] * y.ndim
pad_width[-1] = (1, 0)
z1m_cumprod_shifted = jnp.pad(z1m_cumprod[..., :-1], pad_width,
mode="constant", constant_values=1.)
t = matrix_to_tril_vec(y, diagonal=-1) / jnp.sqrt(
matrix_to_tril_vec(z1m_cumprod_shifted, diagonal=-1))
# inverse of tanh
x = jnp.log((1 + t) / (1 - t)) / 2
return x
def log_abs_det_jacobian(self, x, y, intermediates=None):
# NB: because domain and codomain are two spaces with different dimensions, determinant of
# Jacobian is not well-defined. Here we return `log_abs_det_jacobian` of `x` and the
# flatten lower triangular part of `y`.
# stick_breaking_logdet = log(y / r) = log(z_cumprod) (modulo right shifted)
z1m_cumprod = 1 - jnp.cumsum(y * y, axis=-1)
# by taking diagonal=-2, we don't need to shift z_cumprod to the right
# NB: diagonal=-2 works fine for (2 x 2) matrix, where we get an empty array
z1m_cumprod_tril = matrix_to_tril_vec(z1m_cumprod, diagonal=-2)
stick_breaking_logdet = 0.5 * jnp.sum(jnp.log(z1m_cumprod_tril), axis=-1)
tanh_logdet = -2 * jnp.sum(x + softplus(-2 * x) - jnp.log(2.), axis=-1)
return stick_breaking_logdet + tanh_logdet
def forward_shape(self, shape):
return _matrix_forward_shape(shape, offset=-1)
def inverse_shape(self, shape):
return _matrix_inverse_shape(shape, offset=-1)
class ExpTransform(Transform):
# TODO: refine domain/codomain logic through setters, especially when
# transforms for inverses are supported
def __init__(self, domain=constraints.real):
self.domain = domain
@property
def codomain(self):
if self.domain is constraints.real:
return constraints.positive
elif isinstance(self.domain, constraints.greater_than):
return constraints.greater_than(self.__call__(self.domain.lower_bound))
elif isinstance(self.domain, constraints.interval):
return constraints.interval(self.__call__(self.domain.lower_bound),
self.__call__(self.domain.upper_bound))
else:
raise NotImplementedError
def __call__(self, x):
# XXX consider to clamp from below for stability if necessary
return jnp.exp(x)
def _inverse(self, y):
return jnp.log(y)
def log_abs_det_jacobian(self, x, y, intermediates=None):
return x
class IdentityTransform(Transform):
def __call__(self, x):
return x
def _inverse(self, y):
return y
def log_abs_det_jacobian(self, x, y, intermediates=None):
return jnp.zeros_like(x)
class IndependentTransform(Transform):
"""
Wraps a transform by aggregating over ``reinterpreted_batch_ndims``-many
dims in :meth:`check`, so that an event is valid only if all its
independent entries are valid.
"""
def __init__(self, base_transform, reinterpreted_batch_ndims):
assert isinstance(base_transform, Transform)
assert isinstance(reinterpreted_batch_ndims, int)
assert reinterpreted_batch_ndims >= 0
self.base_transform = base_transform
self.reinterpreted_batch_ndims = reinterpreted_batch_ndims
super().__init__()
@property
def domain(self):
return constraints.independent(self.base_transform.domain, self.reinterpreted_batch_ndims)
@property
def codomain(self):
return constraints.independent(self.base_transform.codomain, self.reinterpreted_batch_ndims)
def __call__(self, x):
return self.base_transform(x)
def _inverse(self, y):
return self.base_transform._inverse(y)
def log_abs_det_jacobian(self, x, y, intermediates=None):
result = self.base_transform.log_abs_det_jacobian(x, y, intermediates=intermediates)
if jnp.ndim(result) < self.reinterpreted_batch_ndims:
expected = self.domain.event_dim
raise ValueError(f"Expected x.dim() >= {expected} but got {jnp.ndim(x)}")
return sum_rightmost(result, self.reinterpreted_batch_ndims)
def call_with_intermediates(self, x):
return self.base_transform.call_with_intermediates(x)
def forward_shape(self, shape):
return self.base_transform.forward_shape(shape)
def inverse_shape(self, shape):
return self.base_transform.inverse_shape(shape)
class InvCholeskyTransform(Transform):
r"""
Transform via the mapping :math:`y = x @ x.T`, where `x` is a lower
triangular matrix with positive diagonal.
"""
def __init__(self, domain=constraints.lower_cholesky):
assert domain in [constraints.lower_cholesky, constraints.corr_cholesky]
self.domain = domain
@property
def codomain(self):
if self.domain is constraints.lower_cholesky:
return constraints.positive_definite
elif self.domain is constraints.corr_cholesky:
return constraints.corr_matrix
def __call__(self, x):
return jnp.matmul(x, jnp.swapaxes(x, -2, -1))
def _inverse(self, y):
return jnp.linalg.cholesky(y)
def log_abs_det_jacobian(self, x, y, intermediates=None):
if self.domain is constraints.lower_cholesky:
# Ref: http://web.mit.edu/18.325/www/handouts/handout2.pdf page 13
n = jnp.shape(x)[-1]
order = jnp.arange(n, 0, -1)
return n * jnp.log(2) + jnp.sum(order * jnp.log(jnp.diagonal(x, axis1=-2, axis2=-1)), axis=-1)
else:
# NB: see derivation in LKJCholesky implementation
n = jnp.shape(x)[-1]
order = jnp.arange(n - 1, -1, -1)
return jnp.sum(order * jnp.log(jnp.diagonal(x, axis1=-2, axis2=-1)), axis=-1)
class LowerCholeskyAffine(Transform):
r"""
Transform via the mapping :math:`y = loc + scale\_tril\ @\ x`.
:param loc: a real vector.
:param scale_tril: a lower triangular matrix with positive diagonal.
"""
domain = constraints.real_vector
codomain = constraints.real_vector
def __init__(self, loc, scale_tril):
if jnp.ndim(scale_tril) != 2:
raise ValueError("Only support 2-dimensional scale_tril matrix. "
"Please make a feature request if you need to "
"use this transform with batched scale_tril.")
self.loc = loc
self.scale_tril = scale_tril
def __call__(self, x):
return self.loc + jnp.squeeze(jnp.matmul(self.scale_tril, x[..., jnp.newaxis]), axis=-1)
def _inverse(self, y):
y = y - self.loc
original_shape = jnp.shape(y)
yt = jnp.reshape(y, (-1, original_shape[-1])).T
xt = solve_triangular(self.scale_tril, yt, lower=True)
return jnp.reshape(xt.T, original_shape)
def log_abs_det_jacobian(self, x, y, intermediates=None):
return jnp.broadcast_to(jnp.log(jnp.diagonal(self.scale_tril, axis1=-2, axis2=-1)).sum(-1),
jnp.shape(x)[:-1])
def forward_shape(self, shape):
if len(shape) < 1:
raise ValueError("Too few dimensions on input")
return lax.broadcast_shapes(shape, self.loc.shape, self.scale_tril.shape[:-1])
def inverse_shape(self, shape):
if len(shape) < 1:
raise ValueError("Too few dimensions on input")
return lax.broadcast_shapes(shape, self.loc.shape, self.scale_tril.shape[:-1])
class LowerCholeskyTransform(Transform):
domain = constraints.real_vector
codomain = constraints.lower_cholesky
def __call__(self, x):
n = round((math.sqrt(1 + 8 * x.shape[-1]) - 1) / 2)
z = vec_to_tril_matrix(x[..., :-n], diagonal=-1)
diag = jnp.exp(x[..., -n:])
return z + jnp.expand_dims(diag, axis=-1) * jnp.identity(n)
def _inverse(self, y):
z = matrix_to_tril_vec(y, diagonal=-1)
return jnp.concatenate([z, jnp.log(jnp.diagonal(y, axis1=-2, axis2=-1))], axis=-1)
def log_abs_det_jacobian(self, x, y, intermediates=None):
# the jacobian is diagonal, so logdet is the sum of diagonal `exp` transform
n = round((math.sqrt(1 + 8 * x.shape[-1]) - 1) / 2)
return x[..., -n:].sum(-1)
def forward_shape(self, shape):
return _matrix_forward_shape(shape)
def inverse_shape(self, shape):
return _matrix_inverse_shape(shape)
class OrderedTransform(Transform):
"""
Transform a real vector to an ordered vector.
**References:**
1. *Stan Reference Manual v2.20, section 10.6*,
Stan Development Team
"""
domain = constraints.real_vector
codomain = constraints.ordered_vector
def __call__(self, x):
z = jnp.concatenate([x[..., :1], jnp.exp(x[..., 1:])], axis=-1)
return jnp.cumsum(z, axis=-1)
def _inverse(self, y):
x = jnp.log(y[..., 1:] - y[..., :-1])
return jnp.concatenate([y[..., :1], x], axis=-1)
def log_abs_det_jacobian(self, x, y, intermediates=None):
return jnp.sum(x[..., 1:], -1)
class PermuteTransform(Transform):
domain = constraints.real_vector
codomain = constraints.real_vector
def __init__(self, permutation):
self.permutation = permutation
def __call__(self, x):
return x[..., self.permutation]
def _inverse(self, y):
size = self.permutation.size
permutation_inv = ops.index_update(jnp.zeros(size, dtype=canonicalize_dtype(jnp.int64)),
self.permutation,
jnp.arange(size))
return y[..., permutation_inv]
def log_abs_det_jacobian(self, x, y, intermediates=None):
return jnp.full(jnp.shape(x)[:-1], 0.)
class PowerTransform(Transform):
domain = constraints.positive
codomain = constraints.positive
def __init__(self, exponent):
self.exponent = exponent
def __call__(self, x):
return jnp.power(x, self.exponent)
def _inverse(self, y):
return jnp.power(y, 1 / self.exponent)
def log_abs_det_jacobian(self, x, y, intermediates=None):
return jnp.log(jnp.abs(self.exponent * y / x))
def forward_shape(self, shape):
return lax.broadcast_shapes(shape, getattr(self.exponent, "shape", ()))
def inverse_shape(self, shape):
return lax.broadcast_shapes(shape, getattr(self.exponent, "shape", ()))
class SigmoidTransform(Transform):
codomain = constraints.unit_interval
def __call__(self, x):
return _clipped_expit(x)
def _inverse(self, y):
return logit(y)
def log_abs_det_jacobian(self, x, y, intermediates=None):
x_abs = jnp.abs(x)
return -x_abs - 2 * jnp.log1p(jnp.exp(-x_abs))
class StickBreakingTransform(Transform):
domain = constraints.real_vector
codomain = constraints.simplex
def __call__(self, x):
# we shift x to obtain a balanced mapping (0, 0, ..., 0) -> (1/K, 1/K, ..., 1/K)
x = x - jnp.log(x.shape[-1] - jnp.arange(x.shape[-1]))
# convert to probabilities (relative to the remaining) of each fraction of the stick
z = _clipped_expit(x)
z1m_cumprod = jnp.cumprod(1 - z, axis=-1)
pad_width = [(0, 0)] * x.ndim
pad_width[-1] = (0, 1)
z_padded = jnp.pad(z, pad_width, mode="constant", constant_values=1.)
pad_width = [(0, 0)] * x.ndim
pad_width[-1] = (1, 0)
z1m_cumprod_shifted = jnp.pad(z1m_cumprod, pad_width, mode="constant", constant_values=1.)
return z_padded * z1m_cumprod_shifted
def _inverse(self, y):
y_crop = y[..., :-1]
z1m_cumprod = jnp.clip(1 - jnp.cumsum(y_crop, axis=-1), a_min=jnp.finfo(y.dtype).tiny)
# hence x = logit(z) = log(z / (1 - z)) = y[::-1] / z1m_cumprod
x = jnp.log(y_crop / z1m_cumprod)
return x + jnp.log(x.shape[-1] - jnp.arange(x.shape[-1]))
def log_abs_det_jacobian(self, x, y, intermediates=None):
# Ref: https://mc-stan.org/docs/2_19/reference-manual/simplex-transform-section.html
# |det|(J) = Product(y * (1 - z))
x = x - jnp.log(x.shape[-1] - jnp.arange(x.shape[-1]))
z = jnp.clip(expit(x), a_min=jnp.finfo(x.dtype).tiny)
# XXX we use the identity 1 - z = z * exp(-x) to not worry about
# the case z ~ 1
return jnp.sum(jnp.log(y[..., :-1] * z) - x, axis=-1)
def forward_shape(self, shape):
if len(shape) < 1:
raise ValueError("Too few dimensions on input")
return shape[:-1] + (shape[-1] + 1,)
def inverse_shape(self, shape):
if len(shape) < 1:
raise ValueError("Too few dimensions on input")
return shape[:-1] + (shape[-1] - 1,)
class UnpackTransform(Transform):
"""
Transforms a contiguous array to a pytree of subarrays.
:param unpack_fn: callable used to unpack a contiguous array.
"""
domain = constraints.real_vector
codomain = constraints.dependent
def __init__(self, unpack_fn):
self.unpack_fn = unpack_fn
def __call__(self, x):
batch_shape = x.shape[:-1]
if batch_shape:
unpacked = vmap(self.unpack_fn)(x.reshape((-1,) + x.shape[-1:]))
return tree_map(lambda z: jnp.reshape(z, batch_shape + z.shape[1:]), unpacked)
else:
return self.unpack_fn(x)
def _inverse(self, y):
leading_dims = [v.shape[0] if jnp.ndim(v) > 0 else 0
for v in tree_flatten(y)[0]]
d0 = leading_dims[0]
not_scalar = d0 > 0 or len(leading_dims) > 1
if not_scalar and all(d == d0 for d in leading_dims[1:]):
warnings.warn("UnpackTransform.inv might lead to an unexpected behavior because it"
" cannot transform a batch of unpacked arrays.")
return ravel_pytree(y)[0]
def log_abs_det_jacobian(self, x, y, intermediates=None):
return jnp.zeros(jnp.shape(x)[:-1])
def forward_shape(self, shape):
raise NotImplementedError
def inverse_shape(self, shape):
raise NotImplementedError
##########################################################
# CONSTRAINT_REGISTRY
##########################################################
class ConstraintRegistry(object):
def __init__(self):
self._registry = {}
def register(self, constraint, factory=None):
if factory is None:
return lambda factory: self.register(constraint, factory)
if isinstance(constraint, constraints.Constraint):
constraint = type(constraint)
self._registry[constraint] = factory
def __call__(self, constraint):
try:
factory = self._registry[type(constraint)]
except KeyError as e:
raise NotImplementedError from e
return factory(constraint)
biject_to = ConstraintRegistry()
@biject_to.register(constraints.corr_cholesky)
def _transform_to_corr_cholesky(constraint):
return CorrCholeskyTransform()
@biject_to.register(constraints.corr_matrix)
def _transform_to_corr_matrix(constraint):
return ComposeTransform([CorrCholeskyTransform(),
InvCholeskyTransform(domain=constraints.corr_cholesky)])
@biject_to.register(constraints.greater_than)
def _transform_to_greater_than(constraint):
if constraint is constraints.positive:
return ExpTransform()
return ComposeTransform([ExpTransform(),
AffineTransform(constraint.lower_bound, 1,
domain=constraints.positive)])
@biject_to.register(constraints.less_than)
def _transform_to_less_than(constraint):
return ComposeTransform([ExpTransform(),
AffineTransform(constraint.upper_bound, -1,
domain=constraints.positive)])
@biject_to.register(constraints.independent)
def _biject_to_independent(constraint):
return IndependentTransform(biject_to(constraint.base_constraint),
constraint.reinterpreted_batch_ndims)
@biject_to.register(constraints.interval)
def _transform_to_interval(constraint):
if constraint is constraints.unit_interval:
return SigmoidTransform()
scale = constraint.upper_bound - constraint.lower_bound
return ComposeTransform([SigmoidTransform(),
AffineTransform(constraint.lower_bound, scale,
domain=constraints.unit_interval)])
@biject_to.register(constraints.lower_cholesky)
def _transform_to_lower_cholesky(constraint):
return LowerCholeskyTransform()
@biject_to.register(constraints.ordered_vector)
def _transform_to_ordered_vector(constraint):
return OrderedTransform()
@biject_to.register(constraints.positive_definite)
def _transform_to_positive_definite(constraint):
return ComposeTransform([LowerCholeskyTransform(), InvCholeskyTransform()])
@biject_to.register(constraints.real)
def _transform_to_real(constraint):
return IdentityTransform()
@biject_to.register(constraints.simplex)
def _transform_to_simplex(constraint):
return StickBreakingTransform()
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
"""Contain logic to help format logging messages in a consistent way."""
from datetime import datetime
import logging, os
from typing import Callable, Dict, Optional, TYPE_CHECKING
from cdm.enums import CdmStatusLevel, CdmLogCode
from cdm.utilities import time_utils, storage_utils
if TYPE_CHECKING:
from cdm.objectmodel import CdmCorpusContext, CdmManifestDefinition, CdmEntityDefinition
ROOT_PATH = os.path.dirname(os.path.abspath(__file__))
default_logger = logging.getLogger('cdm-python')
# Log to console by default if handler is not specified.
handler = logging.StreamHandler()
handler.setLevel(default_logger.level)
handler.setFormatter(
logging.Formatter('%(asctime)s\t%(levelname)s\t%(filename)s:%(lineno)s\t%(funcName)s\t%(message)s'))
default_logger.handlers = [handler] # Overwrite existing handler.
resource_file_path = os.path.abspath(os.path.join(ROOT_PATH, '..', '..', 'resx','logmessages.txt'))
with open(resource_file_path, 'r') as resource_file:
log_messages = dict(line.strip().split(': ', 1) for line in resource_file)
def debug(ctx: 'CdmCorpusContext', class_name: str, method: str, corpus_path: str, message: str, ingest_telemetry: Optional[bool] = False) -> None:
_log(CdmStatusLevel.PROGRESS, ctx, class_name, message, method, default_logger.debug, corpus_path, CdmLogCode.NONE, ingest_telemetry)
def info(ctx: 'CdmCorpusContext', class_name: str, method: str, corpus_path: str, message: str) -> None:
_log(CdmStatusLevel.INFO, ctx, class_name, message, method, default_logger.info, corpus_path, CdmLogCode.NONE)
def warning(ctx: 'CdmCorpusContext', class_name: str, method: str, corpus_path: str, code: 'CdmLogCode', *args) -> None:
# Get message from resource for the code enum.
message = _get_message_from_resource_file(code, args)
_log(CdmStatusLevel.WARNING, ctx, class_name, message, method, default_logger.warning, corpus_path, code)
def error(ctx: 'CdmCorpusContext', class_name: str, method: str, corpus_path: str, code: 'CdmLogCode', *args) -> None:
# Get message from resource for the code enum.
message = _get_message_from_resource_file(code, args)
_log(CdmStatusLevel.ERROR, ctx, class_name, message, method, default_logger.error, corpus_path, code)
def _log(level: 'CdmStatusLevel', ctx: 'CdmCorpusContext', class_name: str, message: str, method: str,
default_status_event: Callable, corpus_path: str, code: 'CdmLogCode', ingest_telemetry: Optional[bool] = False) -> None:
"""
Log to the specified status level by using the status event on the corpus context (if it exists) or to the default logger.
The log level, class_name, message and path values are also added as part of a new entry to the log recorder.
"""
if ctx.suppressed_log_codes.__contains__(code):
return
# Write message to the configured logger
if level >= ctx.report_at_level:
timestamp = time_utils._get_formatted_date_string(datetime.utcnow())
# Store a record of the event.
# Save some dict init and string formatting cycles by checking
# whether the recording is actually enabled.
if ctx.events.is_recording:
event = {
'timestamp': timestamp,
'level': level.name,
'class': class_name,
'message': message,
'method': method
}
if CdmStatusLevel.ERROR == level or CdmStatusLevel.WARNING == level:
event['code'] = code.name
if ctx.correlation_id is not None:
event['cid'] = ctx.correlation_id
if corpus_path is not None:
event['path'] = corpus_path
ctx.events.append(event)
formatted_message = _format_message(class_name, message, method, ctx.correlation_id, corpus_path)
if ctx and ctx.status_event:
ctx.status_event(level, formatted_message)
else:
default_status_event(formatted_message)
# Ingest the logs into telemetry database
if ctx.corpus.telemetry_client:
ctx.corpus.telemetry_client.add_to_ingestion_queue(timestamp, level, class_name, method, corpus_path,
message, ingest_telemetry, code)
def _get_message_from_resource_file(code: 'CdmLogCode', args) -> str:
"""
Loads the string from resource file for particular enum and inserts arguments in it.
"""
message = log_messages[code.name]
i = 0
for x in args:
string = '{' + str(i) + '}'
message = message.replace(string, str(x))
i = i + 1
return message
def _format_message(timestamp: str, class_name: str, message: str, method: Optional[str] = None,
correlation_id: Optional[str] = None, corpus_path: Optional[str] = None) -> str:
method = ' | {}'.format(method) if method is not None else ''
correlation_id = ' | {}'.format(correlation_id) if correlation_id is not None else ''
corpus_path = ' | {}'.format(corpus_path) if corpus_path is not None else ''
return '{} | {} | {}'.format(timestamp, class_name, message) + method + correlation_id + corpus_path
class _TState:
"""
Helper struct to keep few needed bits of information about the logging scope.
"""
def __init__(self, class_name: str, ctx: 'CdmCorpusContext', path: str):
self.class_name = class_name # type: str
self.ctx = ctx # type: CdmCorpusContext
self.path = path # type: str
class _LoggerScope:
"""
LoggerScope class is responsible for enabling/disabling event recording
and will log the scope entry/exit debug events.
"""
def __init__(self, state: _TState):
self.state = state # type: _TState
self.time = datetime.utcnow() # type: Date
self.is_top_level_method = False # type: bool
def __enter__(self):
self.state.ctx.events._enable()
# Check if the method is at the outermost level
if self.state.ctx.events.nesting_level == 1:
self.is_top_level_method = True
self.time = datetime.utcnow()
debug(self.state.ctx, self.state.class_name, self.state.path, None, 'Entering scope')
def __exit__(self, exc_type, exc_value, exc_traceback):
message = 'Leaving scope. Time elapsed: {0} ms.'\
.format((datetime.utcnow() - self.time).microseconds / 1000)
# Commenting out to keep consistent with C#
# In C# - Cache is a concurrent dict, and getting the Count on it is getting blocked by other cache updates
# message = 'Leaving scope. Time elapsed: {0} ms; Cache memory used: {1}.'\
# .format((datetime.utcnow() - self.time).microseconds / 1000, len(self.state.ctx._attribute_cache))
debug(self.state.ctx, self.state.class_name, self.state.path, None, message, self.is_top_level_method)
self.state.ctx.events._disable()
def _enter_scope(class_name: str, ctx: 'CdmCorpusContext', path: str) -> _LoggerScope:
"""
Creates a new LoggerScope instance with the provided details of the scope being entered.
To be used at beginning of functions via resource wrapper 'with ...: # function body.
"""
return _LoggerScope(_TState(class_name, ctx, path))
def _ingest_manifest_telemetry(manifest: 'CdmManifestDefinition', ctx: 'CdmCorpusContext',
class_name: str, method: str, corpus_path: str) -> None:
if ctx.corpus.telemetry_client is None:
return
# Get the namespace of the storage for the manifest
storage_namespace = manifest.namespace or manifest.ctx.corpus.storage.default_namespace
# Get storage adapter type
adapter = manifest.ctx.corpus.storage.fetch_adapter(storage_namespace)
adapter_type = type(adapter).__name__
message = 'ManifestStorage:{0};'.format(adapter_type)
entity_num = len(manifest.entities)
manifest_info = {'RelationshipNum': len(manifest.relationships),
'EntityNum': entity_num} # type: Dict[str, int]
# Counts the total number partitions in the manifest
partition_num = 0
# Counts the number of different partition patterns in all the entities
partition_glob_pattern_num = 0
partition_regex_pattern_num = 0
# Counts the number of standard entities
standard_entity_num = 0
# Get detailed info for each entity
for entity_dec in manifest.entities:
# Get data partition info, if any
if entity_dec.data_partitions:
partition_num += len(entity_dec.data_partitions)
for pattern in entity_dec.data_partition_patterns:
# If both globPattern and regularExpression is set, globPattern will be used
if pattern.glob_pattern:
partition_glob_pattern_num += 1
elif pattern.regular_expression:
partition_regex_pattern_num += 1
# Check if entity is standard
entity_namespace = storage_utils.StorageUtils.split_namespace_path(entity_dec.entity_path)[0]
if entity_namespace == 'cdm':
standard_entity_num += 1
manifest_info['PartitionNum'] = partition_num
manifest_info['PartitionGlobPatternNum'] = partition_glob_pattern_num
manifest_info['PartitionRegExPatternNum'] = partition_regex_pattern_num
manifest_info['StandardEntityNum'] = standard_entity_num
manifest_info['CustomEntityNum'] = entity_num - standard_entity_num
# Serialize manifest info dictionary
message += _serialize_dictionary(manifest_info)
debug(ctx, class_name, method, corpus_path, 'Manifest Info: {{{0}}}'.format(message), True)
def _ingest_entity_telemetry(entity: 'CdmEntityDefinition', ctx: 'CdmCorpusContext',
class_name: str, method: str, corpus_path: str) -> None:
if ctx.corpus.telemetry_client is None:
return
# Get entity storage namespace
entity_namespace = entity.in_document.namespace or entity.ctx.corpus.storage.default_namespace
# Get storage adapter type
adapter = entity.ctx.corpus.storage.fetch_adapter(entity_namespace)
adapter_type = type(adapter).__name__
message = 'EntityStorage:{0};EntityNamespace:{1};'.format(adapter_type, entity_namespace)
# Collect all entity info
entity_info = _form_entity_info_dict(entity)
message += _serialize_dictionary(entity_info)
debug(ctx, class_name, method, corpus_path, 'Entity Info: {{{0}}}'.format(message), True)
def _form_entity_info_dict(entity: 'CdmEntityDefinition'):
entity_info = {} # type: Dict[str, int]
# Check whether entity is resolved
is_resolved = 0
if entity.attribute_context:
is_resolved = 1
entity_info['ResolvedEntity'] = is_resolved
entity_info['ExhibitsTraitNum'] = len(entity.exhibits_traits)
entity_info['AttributeNum'] = len(entity.attributes)
# The number of traits whose name starts with "means."
semantics_trait_num = 0
for trait in entity.exhibits_traits:
if trait.fetch_object_definition_name().startswith('means.'):
semantics_trait_num += 1
entity_info['SemanticsTraitNum'] = semantics_trait_num
return entity_info
def _serialize_dictionary(dictionary: Dict[str, int]):
"""
Serialize the map and return a string.
:param dictionary: The dictionary to be serialized.
:return: The serialized dictionary.
"""
dict_str = ''
for key, val in dictionary.items():
if not val:
val = 'None'
dict_str += key + ':' + str(val) + ';'
return dict_str
|
# Copyright 2018 Google Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
from apiclient.errors import HttpError
import cloudstorage
from google.appengine.ext import testbed
from google.cloud.bigquery.dataset import Dataset
from google.cloud.bigquery.table import Table
from google.cloud.exceptions import ClientError
import mock
from core import workers
class TestAbstractWorker(unittest.TestCase):
def setUp(self):
super(TestAbstractWorker, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
# Activate which service we want to stub
self.testbed.init_taskqueue_stub()
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
def tearDown(self):
super(TestAbstractWorker, self).tearDown()
self.testbed.deactivate()
def test_default_params_values(self):
class DummyWorker(workers.Worker):
PARAMS = [
('int_with_default', 'number', True, 20, 'Description'),
]
worker = DummyWorker({}, 1, 1)
self.assertIsInstance(worker._params['int_with_default'], int)
self.assertEqual(worker._params['int_with_default'], 20)
@mock.patch('core.cloud_logging.logger')
def test_log_info_succeeds(self, patched_logger):
patched_logger.log_struct.__name__ = 'foo'
worker = workers.Worker({}, 1, 1)
self.assertEqual(patched_logger.log_struct.call_count, 0)
worker.log_info('Hi there!')
self.assertEqual(patched_logger.log_struct.call_count, 1)
call_first_arg = patched_logger.log_struct.call_args[0][0]
self.assertEqual(call_first_arg.get('log_level'), 'INFO')
@mock.patch('core.cloud_logging.logger')
def test_log_warn_succeeds(self, patched_logger):
patched_logger.log_struct.__name__ = 'foo'
worker = workers.Worker({}, 1, 1)
self.assertEqual(patched_logger.log_struct.call_count, 0)
worker.log_warn('Hi there!')
self.assertEqual(patched_logger.log_struct.call_count, 1)
call_first_arg = patched_logger.log_struct.call_args[0][0]
self.assertEqual(call_first_arg.get('log_level'), 'WARNING')
@mock.patch('core.cloud_logging.logger')
def test_log_error_succeeds(self, patched_logger):
patched_logger.log_struct.__name__ = 'foo'
worker = workers.Worker({}, 1, 1)
self.assertEqual(patched_logger.log_struct.call_count, 0)
worker.log_error('Hi there!')
self.assertEqual(patched_logger.log_struct.call_count, 1)
call_first_arg = patched_logger.log_struct.call_args[0][0]
self.assertEqual(call_first_arg.get('log_level'), 'ERROR')
@mock.patch('core.cloud_logging.logger')
def test_execute_client_error_raises_worker_exception(self, patched_logger):
patched_logger.log_struct.__name__ = 'foo'
class DummyWorker(workers.Worker):
def _execute(self):
raise ClientError('There has been an issue here.')
worker = DummyWorker({}, 1, 1)
with self.assertRaises(workers.WorkerException):
worker.execute()
def test_enqueue_succeedly_add_to_the_list(self):
worker = workers.Worker({}, 1, 1)
self.assertEqual(len(worker._workers_to_enqueue), 0)
worker._enqueue('DummyClass', 'params')
self.assertEqual(len(worker._workers_to_enqueue), 1)
self.assertEqual(worker._workers_to_enqueue[0][0], 'DummyClass')
self.assertEqual(worker._workers_to_enqueue[0][1], 'params')
@mock.patch('time.sleep')
@mock.patch('core.cloud_logging.logger')
def test_retry_until_a_finite_number_of_times(self, patched_logger,
patched_time_sleep):
patched_logger.log_struct.__name__ = 'foo'
# NB: bypass the time.sleep wait, otherwise the test will take ages
patched_time_sleep.side_effect = lambda delay: delay
worker = workers.Worker({}, 1, 1)
def _raise_value_error_exception(*args, **kwargs):
raise ValueError('Wrong value.')
fake_request = mock.Mock()
fake_request.__name__ = 'foo'
fake_request.side_effect = _raise_value_error_exception
with self.assertRaises(ValueError):
worker.retry(fake_request)()
self.assertGreaterEqual(fake_request.call_count, 2)
def test_retry_raises_error_if_bad_request_error(self):
worker = workers.Worker({}, 1, 1)
def _raise_value_error_exception(*args, **kwargs):
raise HttpError(mock.Mock(status=400), '')
fake_request = mock.Mock()
fake_request.__name__ = 'foo'
fake_request.side_effect = _raise_value_error_exception
with self.assertRaises(HttpError):
worker.retry(fake_request)()
self.assertEqual(fake_request.call_count, 1)
class TestBQWorker(unittest.TestCase):
@mock.patch('time.sleep')
@mock.patch('google.cloud.bigquery.job.QueryJob')
def test_begin_and_wait_start_jobs(self, patched_bigquery_QueryJob,
patched_time_sleep):
# NB: bypass the time.sleep wait, otherwise the test will take ages
patched_time_sleep.side_effect = lambda delay: delay
worker = workers.BQWorker({}, 1, 1)
job0 = patched_bigquery_QueryJob()
job0.begin.side_effect = lambda: True
def _mark_as_done():
job0.state = 'DONE'
job0.reload.side_effect = _mark_as_done
job0.error_result = None
worker._begin_and_wait(job0)
job0.begin.assert_called_once()
@mock.patch('time.sleep')
@mock.patch('google.cloud.bigquery.job.QueryJob')
@mock.patch('core.workers.BQWorker._enqueue')
def test_begin_and_wait_enqueue_bqwaiter_after_some_time(self,
patched_BQWorker_enqueue, patched_bigquery_QueryJob, patched_time_sleep):
# NB: bypass the time.sleep wait, otherwise the test will take ages
patched_time_sleep.side_effect = lambda delay: delay
def _fake_enqueue(*args, **kwargs):
# Do Nothing
return True
patched_BQWorker_enqueue.side_effect = _fake_enqueue
worker = workers.BQWorker({'bq_project_id': 'BQID'}, 1, 1)
job0 = patched_bigquery_QueryJob()
job0.error_result = None
worker._begin_and_wait(job0)
patched_BQWorker_enqueue.assert_called_once()
self.assertEqual(patched_BQWorker_enqueue.call_args[0][0], 'BQWaiter')
self.assertIsInstance(patched_BQWorker_enqueue.call_args[0][1], dict)
class TestBQWaiter(unittest.TestCase):
def test_execute_enqueue_job_if_done(self):
patcher_get_client = mock.patch.object(workers.BQWaiter, '_get_client',
return_value=None)
self.addCleanup(patcher_get_client.stop)
patcher_get_client.start()
mockAsyncJob = mock.Mock()
mockAsyncJob.error_result = None
patcher_async_job = mock.patch('google.cloud.bigquery.job._AsyncJob',
return_value=mockAsyncJob)
self.addCleanup(patcher_async_job.stop)
patcher_async_job.start()
patcher_worker_enqueue = mock.patch('core.workers.BQWaiter._enqueue')
self.addCleanup(patcher_worker_enqueue.stop)
patched_enqueue = patcher_worker_enqueue.start()
worker = workers.BQWaiter(
{
'bq_project_id': 'BQID',
'job_names': ['Job1', 'Job2'],
},
1,
1)
worker._client = mock.Mock()
worker._execute()
patched_enqueue.assert_called_once()
self.assertEqual(patched_enqueue.call_args[0][0], 'BQWaiter')
class TestStorageToBQImporter(unittest.TestCase):
def setUp(self):
super(TestStorageToBQImporter, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
# Activate which service we want to stub
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.testbed.init_app_identity_stub()
self.testbed.init_blobstore_stub()
self.testbed.init_datastore_v3_stub()
patcher_listbucket = mock.patch('cloudstorage.listbucket')
patched_listbucket = patcher_listbucket.start()
self.addCleanup(patcher_listbucket.stop)
def _fake_listbucket(bucket_prefix):
filenames = [
'input.csv',
'subdir/input.csv',
'data.csv',
'subdir/data.csv',
]
for suffix in filenames:
filename = os.path.join(bucket_prefix, suffix)
stat = cloudstorage.GCSFileStat(
filename,
0,
'686897696a7c876b7e',
0)
yield stat
patched_listbucket.side_effect = _fake_listbucket
def tearDown(self):
super(TestStorageToBQImporter, self).tearDown()
self.testbed.deactivate()
def test_get_source_uris_succeeds(self):
worker = workers.StorageToBQImporter(
{
'source_uris': [
'gs://bucket/data.csv',
'gs://bucket/subdir/data.csv',
]
},
1,
1)
source_uris = worker._get_source_uris()
self.assertEqual(len(source_uris), 2)
self.assertEqual(source_uris[0], 'gs://bucket/data.csv')
self.assertEqual(source_uris[1], 'gs://bucket/subdir/data.csv')
def test_get_source_uris_with_pattern(self):
worker = workers.StorageToBQImporter(
{
'source_uris': [
'gs://bucket/subdir/*.csv',
]
},
1,
1)
source_uris = worker._get_source_uris()
self.assertEqual(len(source_uris), 2)
self.assertEqual(source_uris[0], 'gs://bucket/subdir/input.csv')
self.assertEqual(source_uris[1], 'gs://bucket/subdir/data.csv')
class TestBQToMeasurementProtocolMixin(object):
def _use_query_results(self, response_json):
# NB: be sure to remove the jobReference from the api response used to
# create the Table instance.
response_json_copy = response_json.copy()
del response_json_copy['jobReference']
mock_dataset = mock.Mock()
mock_dataset._client = self._client
mock_table = Table('mock_table', mock_dataset)
self._client._connection.api_request.return_value = response_json
self._client.dataset.return_value = mock_dataset
mock_dataset.table.return_value = mock_table
class TestBQToMeasurementProtocolProcessor(TestBQToMeasurementProtocolMixin, unittest.TestCase):
def setUp(self):
super(TestBQToMeasurementProtocolProcessor, self).setUp()
self._client = mock.Mock()
patcher_get_client = mock.patch.object(
workers.BQToMeasurementProtocolProcessor,
'_get_client',
return_value=self._client)
self.addCleanup(patcher_get_client.stop)
patcher_get_client.start()
patcher_requests_post = mock.patch('requests.post')
self.addCleanup(patcher_requests_post.stop)
self._patched_post = patcher_requests_post.start()
self.maxDiff = None # This is to see full diff when self.assertEqual fails.
@mock.patch('time.sleep')
def test_success_with_one_post_request(self, patched_time_sleep):
# Bypass the time.sleep wait
patched_time_sleep.return_value = 1
self._worker = workers.BQToMeasurementProtocolProcessor(
{
'bq_project_id': 'BQID',
'bq_dataset_id': 'DTID',
'bq_table_id': 'table_id',
'bq_page_token': None,
'bq_batch_size': 10,
'mp_batch_size': 20,
},
1,
1)
self._use_query_results({
'tableReference': {
'tableId': 'mock_table',
},
'jobReference': {
'jobId': 'two-rows-query',
},
'rows': [
{
'f': [
{'v': 'UA-12345-1'},
{'v': '35009a79-1a05-49d7-b876-2b884d0f825b'},
{'v': 'event'},
{'v': 1},
{'v': 'category'},
{'v': 'action'},
{'v': 'label'},
{'v': 0.9},
{'v': 'User Agent / 1.0'},
{'v': None},
]
},
{
'f': [
{'v': 'UA-12345-1'},
{'v': '35009a79-1a05-49d7-b876-2b884d0f825b'},
{'v': 'event'},
{'v': 1},
{'v': 'category'},
{'v': 'action'},
{'v': u'\u043c\u0435\u0442\u043a\u0430'},
{'v': 0.8},
{'v': 'User Agent / 1.0'},
{'v': 'segment1'},
]
}
],
'schema': {
'fields': [
{'name': 'tid', 'type': 'STRING'},
{'name': 'cid', 'type': 'STRING'},
{'name': 't', 'type': 'STRING'},
{'name': 'ni', 'type': 'INTEGER'},
{'name': 'ec', 'type': 'STRING'},
{'name': 'ea', 'type': 'STRING'},
{'name': 'el', 'type': 'STRING'},
{'name': 'ev', 'type': 'FLOAT'},
{'name': 'ua', 'type': 'STRING'},
{'name': 'cd1', 'type': 'STRING'},
]
}
})
mock_response = mock.Mock()
mock_response.status_code = 200
self._patched_post.return_value = mock_response
self._worker._execute()
self._patched_post.assert_called_once()
self.assertEqual(
self._patched_post.call_args[0][0],
'https://www.google-analytics.com/batch')
self.assertEqual(
self._patched_post.call_args[1],
{
'headers': {'user-agent': 'CRMint / 0.1'},
'data':
"""cid=35009a79-1a05-49d7-b876-2b884d0f825b&ea=action&ec=category&el=label&ev=0.9&ni=1&t=event&tid=UA-12345-1&ua=User+Agent+%2F+1.0&v=1
cd1=segment1&cid=35009a79-1a05-49d7-b876-2b884d0f825b&ea=action&ec=category&el=%D0%BC%D0%B5%D1%82%D0%BA%D0%B0&ev=0.8&ni=1&t=event&tid=UA-12345-1&ua=User+Agent+%2F+1.0&v=1""",
})
@mock.patch('time.sleep')
def test_success_with_enhanced_ecommerce_request(self, patched_time_sleep):
# Bypass the time.sleep wait
patched_time_sleep.return_value = 1
self._worker = workers.BQToMeasurementProtocolProcessor(
{
'bq_project_id': 'BQID',
'bq_dataset_id': 'DTID',
'bq_table_id': 'table_id',
'bq_page_token': None,
'bq_batch_size': 10,
'mp_batch_size': 20,
},
1,
1)
self._use_query_results({
'tableReference': {
'tableId': 'mock_table',
},
'jobReference': {
'jobId': 'one-row-with-array-of-structs-query',
},
'rows': [
{
'f': [
{'v': 'UA-12345-6'}, # tid
{'v': '123456789.1234567890'}, # cid
{'v': 'pageview'}, # t
{'v': 'purchase'}, # pa
{'v': '987654321'}, # ti
{'v': 'Moscow'}, # ta
{'v': '1540.0'}, # tr
{'v': 'RUB'}, # cu
{
'v': [ # pr
{
'v': { # pr1
'f': [
{'v': 'SKU1'}, # pr1id
{'v': 'Product1'}, # pr1nm
{'v': 'Brand1'}, # pr1br
{'v': 'Cat1'}, # pr1ca
{'v': '110.0'}, # pr1pr
{'v': '1'} # pr1qt
]
}
},
{
'v': { # pr2
'f': [
{'v': 'SKU2'}, # pr2id
{'v': 'Product2'}, # pr2nm
{'v': 'Brand2'}, # pr2br
{'v': 'Cat2'}, # pr2ca
{'v': '220.0'}, # pr2pr
{'v': '2'} # pr2qt
]
}
},
{
'v': { # pr3
'f': [
{'v': 'SKU3'}, # pr3id
{'v': 'Product3'}, # pr3nm
{'v': 'Brand3'}, # pr3br
{'v': 'Cat3'}, # pr3ca
{'v': '330.0'}, # pr3pr
{'v': '3'} # pr3qt
]
}
}
]
},
{
'v': [ # il
{ # il1
'v': {
'f': [
{'v': 'List1'}, # il1nm
{
'v': [ # il1pi
{
'v': { # il1pi1
'f': [
{'v': 'SKU11'}, # il1pi1id
{'v': 'Product11'}, # il1pi1nm
{'v': 'Brand11'}, # il1pi1br
{'v': 'Cat11'}, # il1pi1ca
{'v': '1110.0'} # il1pi1pr
]
}
},
{
'v': { # il1pi2
'f': [
{'v': 'SKU12'}, # il1pi2id
{'v': 'Product12'}, # il1pi2nm
{'v': 'Brand12'}, # il1pi2br
{'v': 'Cat12'}, # il1pi2ca
{'v': '1220.0'} # il1pi2pr
]
}
},
{
'v': { # il1pi3
'f': [
{'v': 'SKU13'}, # il1pi3id
{'v': 'Product13'}, # il1pi3nm
{'v': 'Brand13'}, # il1pi3br
{'v': 'Cat13'}, # il1pi3ca
{'v': '1330.0'} # il1pi3pr
]
}
}
]
}
]
}
},
{ # il2
'v': {
'f': [
{'v': 'List2'}, # il2nm
{
'v': [ # il2pi
{
'v': { # il2pi1
'f': [
{'v': 'SKU21'}, # il2pi1id
{'v': 'Product21'}, # il2pi1nm
{'v': 'Brand21'}, # il2pi1br
{'v': 'Cat21'}, # il2pi1ca
{'v': '2110.0'} # il2pi1pr
]
}
},
{
'v': { # il2pi2
'f': [
{'v': 'SKU22'}, # il2pi2id
{'v': 'Product22'}, # il2pi2nm
{'v': 'Brand22'}, # il2pi2br
{'v': None}, # il2pi2ca
{'v': '2220.0'} # il2pi2pr
]
}
},
{
'v': { # il2pi3
'f': [
{'v': 'SKU23'}, # il2pi3id
{'v': 'Product23'}, # il2pi3nm
{'v': 'Brand23'}, # il2pi3br
{'v': 'Cat23'}, # il2pi3ca
{'v': '2330.0'} # il2pi3pr
]
}
}
]
}
]
}
}
]
}
]
}
],
'schema': {
'fields': [
{'name': 'tid', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'cid', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 't', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'pa', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'ti', 'type': 'INTEGER', 'mode': 'NULLABLE'},
{'name': 'ta', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'tr', 'type': 'FLOAT', 'mode': 'NULLABLE'},
{'name': 'cu', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'pr', 'type': 'RECORD', 'mode': 'REPEATED', 'fields': [
{'name': 'id', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'nm', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'br', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'ca', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'pr', 'type': 'FLOAT', 'mode': 'NULLABLE'},
{'name': 'qt', 'type': 'INTEGER', 'mode': 'NULLABLE'}
]},
{'name': 'il', 'type': 'RECORD', 'mode': 'REPEATED', 'fields': [
{'name': 'nm', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'pi', 'type': 'RECORD', 'mode': 'REPEATED', 'fields': [
{'name': 'id', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'nm', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'br', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'ca', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'pr', 'type': 'FLOAT', 'mode': 'NULLABLE'}
]},
]}
]
}
})
mock_response = mock.Mock()
mock_response.status_code = 200
self._patched_post.return_value = mock_response
self._worker._execute()
self._patched_post.assert_called_once()
self.assertEqual(
self._patched_post.call_args[0][0],
'https://www.google-analytics.com/batch')
self.assertEqual(
self._patched_post.call_args[1],
{
'headers': {'user-agent': 'CRMint / 0.1'},
'data': 'cid=123456789.1234567890&cu=RUB&il1nm=List1&il1pi1br=Brand11&il1pi1ca=Cat11&il1pi1id=SKU11&il1pi1nm=Product11&il1pi1pr=1110.0&il1pi2br=Brand12&il1pi2ca=Cat12&il1pi2id=SKU12&il1pi2nm=Product12&il1pi2pr=1220.0&il1pi3br=Brand13&il1pi3ca=Cat13&il1pi3id=SKU13&il1pi3nm=Product13&il1pi3pr=1330.0&il2nm=List2&il2pi1br=Brand21&il2pi1ca=Cat21&il2pi1id=SKU21&il2pi1nm=Product21&il2pi1pr=2110.0&il2pi2br=Brand22&il2pi2id=SKU22&il2pi2nm=Product22&il2pi2pr=2220.0&il2pi3br=Brand23&il2pi3ca=Cat23&il2pi3id=SKU23&il2pi3nm=Product23&il2pi3pr=2330.0&pa=purchase&pr1br=Brand1&pr1ca=Cat1&pr1id=SKU1&pr1nm=Product1&pr1pr=110.0&pr1qt=1&pr2br=Brand2&pr2ca=Cat2&pr2id=SKU2&pr2nm=Product2&pr2pr=220.0&pr2qt=2&pr3br=Brand3&pr3ca=Cat3&pr3id=SKU3&pr3nm=Product3&pr3pr=330.0&pr3qt=3&t=pageview&ta=Moscow&ti=987654321&tid=UA-12345-6&tr=1540.0&v=1'
})
@mock.patch('core.cloud_logging.logger')
@mock.patch('time.sleep')
def test_log_exception_if_http_fails(self, patched_time_sleep, patched_logger):
# Bypass the time.sleep wait
patched_time_sleep.return_value = 1
# NB: patching the StackDriver logger is needed because there is no
# testbed service available for now
patched_logger.log_struct.__name__ = 'foo'
patched_logger.log_struct.return_value = "patched_log_struct"
self._worker = workers.BQToMeasurementProtocolProcessor(
{
'bq_project_id': 'BQID',
'bq_dataset_id': 'DTID',
'bq_table_id': 'table_id',
'bq_page_token': None,
'bq_batch_size': 10,
'mp_batch_size': 20,
},
1,
1)
self._use_query_results({
'tableReference': {
'tableId': 'mock_table',
},
'jobReference': {
'jobId': 'one-row-query',
},
'rows': [
{
'f': [
{'v': 'UA-12345-1'},
{'v': '35009a79-1a05-49d7-b876-2b884d0f825b'},
{'v': 'event'},
{'v': 1},
{'v': 'category'},
{'v': 'action'},
{'v': 'label'},
{'v': 'value'},
{'v': 'User Agent / 1.0'},
]
}
],
'schema': {
'fields': [
{'name': 'tid', 'type': 'STRING'},
{'name': 'cid', 'type': 'STRING'},
{'name': 't', 'type': 'STRING'},
{'name': 'ni', 'type': 'INTEGER'},
{'name': 'ec', 'type': 'STRING'},
{'name': 'ea', 'type': 'STRING'},
{'name': 'el', 'type': 'STRING'},
{'name': 'ev', 'type': 'STRING'},
{'name': 'ua', 'type': 'STRING'},
]
}
})
mock_response = mock.Mock()
mock_response.status_code = 500
self._patched_post.return_value = mock_response
self._worker._execute()
# Called 2 times because of 1 retry.
self.assertEqual(self._patched_post.call_count, 2)
# When retry stops it should log the message as an error.
patched_logger.log_error.called_once()
class TestBQToMeasurementProtocol(TestBQToMeasurementProtocolMixin, unittest.TestCase):
def setUp(self):
super(TestBQToMeasurementProtocol, self).setUp()
self._client = mock.Mock()
patcher_get_client = mock.patch.object(
workers.BQToMeasurementProtocol,
'_get_client',
return_value=self._client)
self.addCleanup(patcher_get_client.stop)
patcher_get_client.start()
@mock.patch('time.sleep')
def test_success_with_spawning_new_worker(self, patched_time_sleep):
# Bypass the time.sleep wait
patched_time_sleep.return_value = 1
self._worker = workers.BQToMeasurementProtocol(
{
'bq_project_id': 'BQID',
'bq_dataset_id': 'DTID',
'bq_table_id': 'table_id',
'bq_page_token': None,
'mp_batch_size': 20,
},
1,
1)
self._worker.MAX_ENQUEUED_JOBS = 1
api_response = {
'tableReference': {
'tableId': 'mock_table',
},
'jobReference': {
'jobId': 'one-row-query',
},
'pageToken': 'abc',
'rows': [
{
'f': [
{'v': 'UA-12345-1'},
{'v': '35009a79-1a05-49d7-b876-2b884d0f825b'},
{'v': 'event'},
{'v': 1},
{'v': 'category'},
{'v': 'action'},
{'v': 'label'},
{'v': 0.9},
{'v': 'User Agent / 1.0'},
]
},
{
'f': [
{'v': 'UA-12345-1'},
{'v': '35009a79-1a05-49d7-b876-2b884d0f825b'},
{'v': 'event'},
{'v': 1},
{'v': 'category'},
{'v': 'action'},
{'v': 'label'},
{'v': 0.8},
{'v': 'User Agent / 1.0'},
]
},
],
'schema': {
'fields': [
{'name': 'tid', 'type': 'STRING'},
{'name': 'cid', 'type': 'STRING'},
{'name': 't', 'type': 'STRING'},
{'name': 'ni', 'type': 'INTEGER'},
{'name': 'ec', 'type': 'STRING'},
{'name': 'ea', 'type': 'STRING'},
{'name': 'el', 'type': 'STRING'},
{'name': 'ev', 'type': 'FLOAT'},
{'name': 'ua', 'type': 'STRING'},
]
}
}
self._use_query_results(api_response)
patcher_worker_enqueue = mock.patch.object(workers.BQToMeasurementProtocol, '_enqueue')
self.addCleanup(patcher_worker_enqueue.stop)
patched_enqueue = patcher_worker_enqueue.start()
def _remove_next_page_token(worker_name, *args, **kwargs):
if worker_name == 'BQToMeasurementProtocol':
del api_response['pageToken']
self._use_query_results(api_response)
patched_enqueue.side_effect = _remove_next_page_token
self._worker._execute()
self.assertEqual(patched_enqueue.call_count, 2)
self.assertEqual(patched_enqueue.call_args_list[0][0][0], 'BQToMeasurementProtocolProcessor')
self.assertEqual(patched_enqueue.call_args_list[0][0][1]['bq_page_token'], None)
self.assertEqual(patched_enqueue.call_args_list[1][0][0], 'BQToMeasurementProtocol')
self.assertEqual(patched_enqueue.call_args_list[1][0][1]['bq_page_token'], 'abc')
|
from rest_framework.response import Response
from rest_framework import status
class ResponseFail(Response):
def __init__(self, data=""):
data = {"code":200, "error":True, "status": "fail", "result": data}
self.__init__ = super().__init__(data, status=200)
class ResponseSuccess(Response):
def __init__(self, data="", filter_fields=None):
if isinstance(data, Response):
data = data.data
data = {"code":200, "error":False, "status": "success", "result": data}
if filter_fields:
data["filter_fields"] = filter_fields
super().__init__(data, status=200)
|
import sys, logging
from socketserver import ThreadingMixIn
from http.server import HTTPServer
import socket, ssl
import config, features
import handlers
# _SERVICES = [ 'ksp', handlers.TODO, handlers.CDE, handlers.FIRS, handlers.FIRS_TA ]
class Server (ThreadingMixIn, HTTPServer):
"""
actual HTTP server, though is more set-up and configuration than anything else
"""
def __init__(self):
if config.disconnected:
self._handlers = []
else:
self._handlers = self._setup_handlers()
from server.http_handler import Handler
HTTPServer.__init__(self, (config.server_host, config.server_port), Handler, False)
def _setup_handlers(self):
# the order is important when paths might collide, e.g. for -ta- services
hlist = [
handlers.KSP_Handler(),
handlers.TODO_SyncMetadata(),
handlers.TODO_GetItems(),
handlers.TODO_RemoveItems(),
handlers.CDE_DownloadContent(),
handlers.CDE_UploadSnapshot(),
handlers.CDE_Sidecar(),
handlers.CDE_GetPageNumbers(),
handlers.CDE_GetAnnotations(),
handlers.CDE_ShareAnnotations(),
handlers.CDE_DevicesWithCollections(),
handlers.CDE_GetCollections(),
# catch-all for todo and cde services
handlers.Upstream(handlers.TODO, handlers.TODO_PATH[:-1]), # all other todo calls
handlers.Upstream(handlers.CDE, handlers.CDE_PATH[:-1]), # all other cde calls
# device (de)registration
handlers.FIRS_GetNamesForFiona(),
handlers.FIRS_NewDevice(),
handlers.FIRS_TA_NewDevice(),
handlers.Upstream(handlers.FIRS, handlers.FIRS_PATH[:-1]),
# handlers.Store(), # book infos?
# handlers.Dummy(handlers.WWW, handlers.EMBER_PATH), # ads?
handlers.ECX_Images(),
]
# for h in hlist:
# if h.service not in _SERVICES:
# raise Exception("tried to register handler %s for unknown service %s", h, h.service)
return hlist
def find_handler(self, request):
for h in self._handlers: # first volunteer wins
if h.accept(request):
return h
logging.warn("no handler found for %s", request.requestline)
def run(self):
self.server_bind()
protocol = 'HTTP'
if config.server_certificate:
# self.socket = ssl.SSLSocket(self.socket, certfile = config.server_certificate, server_side = True)
protocol = 'HTTP+HTTPS'
self.server_activate()
logging.info("started on %s:%s (%s)", self.server_name, self.server_port, protocol)
import select
try:
self.serve_forever(1)
except select.error as err:
logging.critical("select.error %s", err)
except KeyboardInterrupt: # ^C
logging.warn("received ^C")
pass
logging.info("shutdown")
self.server_close()
def get_request(self):
sock, client_address = HTTPServer.get_request(self)
if sock and config.server_certificate:
# this is this kind of smart shit that gets you in really deep trouble later
peek = sock.recv(5, socket.MSG_PEEK)
if peek and len(peek) == 5:
if peek[0] == 0x16:
logging.debug("socket %s from %s appears to start with a TSL handshake, version %d.%d", sock, client_address, peek[1], peek[2])
try:
sock = ssl.SSLSocket(sock=sock, certfile=config.server_certificate, server_side=True)
except:
logging.exception("failed to SSL wrap socket %s from %s", sock, client_address)
elif peek[0] == 0x80 and peek[2] == 0x01:
logging.debug("socket %s from %s appears to start with a SSLv2 handshake, supported version %d.%d", sock, client_address, peek[3], peek[4])
try:
sock = ssl.SSLSocket(sock=sock, certfile=config.server_certificate, server_side=True)
except:
logging.exception("failed to SSL wrap socket %s from %s", sock, client_address)
return sock, client_address
# def verify_request(self, request, client_address):
# logging.debug("verify %s %s", request, client_address)
# if type(request) == ssl.SSLSocket:
# logging.debug("peer certificate %s", request.getpeercert(binary_form = False))
# return True
def handle_error(self, request, client_address):
etype, evalue = sys.exc_info()[:2]
logging.warn("exception %s %s", etype, evalue)
if etype == socket.error and evalue.errno in (104, 10054):
return
logging.exception("request from %s : %s", client_address, request)
|
import Levenshtein
from math import sqrt
import sys
l_distance = Levenshtein.distance
match = {"inv-match", "sub-match", "sub-match-norm"}
def e_s(x, is_eu):
if is_eu:
return x**2
else:
return x
def e_f(x, is_eu):
if is_eu:
return sqrt(x)
else:
return x
def distance(msg1, msg2, opt=None, is_eu=True):
if opt in match:
msg1.clues.sort()
msg2.clues.sort()
words_1 = msg1.clues
words_2 = msg2.clues
i = 0
j = 0
counter = 0
while i < len(words_1) and j < len(words_2):
clue_1 = words_1[i]
clue_2 = words_2[j]
if clue_1[0] < clue_2[0]:
i += 1
elif clue_1[0] > clue_2[0]:
j += 1
else:
counter += 1
i += 1
j += 1
if opt == "inv-match":
if counter == 0:
return 2
else:
return float(1) / float(counter)
elif opt == "sub-match":
return max(len(msg1.clues), len(msg2.clues)) - counter
elif opt == "sub-match-norm":
denominator = max(len(msg1.clues), len(msg2.clues))
try:
return float(denominator - counter) / denominator
except ZeroDivisionError:
if len(msg1.clues) == 0 & len(msg2.clues) == 0:
return 0
else:
raise AssertionError
if opt is None:
s = 0
for i in range(min(len(msg1.clues), len(msg2.clues))):
s += e_s(l_distance(msg1.clues[len(msg1.clues) - 1 - i][1], msg2.clues[len(msg2.clues) - 1 - i][1]), is_eu)
# This basically adds for overlap; we can try removing this to see
# if distance is a bit more accurate
if len(msg1.clues) != len(msg2.clues):
if len(msg1.clues) > len(msg2.clues):
for i in range(len(msg1.clues) - len(msg2.clues)):
s += e_s(l_distance(msg1.clues[i][1], ""), is_eu)
else:
for i in range(len(msg2.clues) - len(msg1.clues)):
s += e_s(l_distance(msg2.clues[i][1], ""), is_eu)
return e_f(s, is_eu)
if opt == "ac":
s = 0
for i in range(min(len(msg1.allclues), len(msg2.allclues))):
s += e_s(l_distance(msg1.allclues[len(msg1.clues) - 1 - i][1], msg2.allclues[len(msg2.clues) - 1 - i][1]),
is_eu)
if len(msg1.allclues) != len(msg2.allclues):
if len(msg1.allclues) > len(msg2.allclues):
for i in range(len(msg1.allclues) - len(msg2.allclues)):
s += e_s(l_distance(msg1.allclues[i][1], ""), is_eu)
else:
for i in range(len(msg2.allclues) - len(msg1.allclues)):
s += e_s(l_distance(msg2.allclues[i][1], ""), is_eu)
return e_f(s, is_eu)
if opt == "ac-trunc":
s = 0
for i in range(min(len(msg1.allclues), len(msg2.allclues))):
s += e_s(l_distance(msg1.allclues[i][1], msg2.allclues[i][1]), is_eu)
return e_f(s, is_eu)
if opt == "trunc":
s = 0
for i in range(min(len(msg1.clues), len(msg2.clues))):
s += e_s(l_distance(msg1.clues[i][1], msg2.clues[i][1]), is_eu)
return e_f(s, is_eu)
if opt == "extreme":
msg1.clues.sort()
msg2.clues.sort()
s = 0
if len(msg1.clues) >= len(msg2.clues):
i = 0
j = len(msg2.clues) - 1
while i < j:
s += e_s(l_distance(msg1.clues[i][1], msg2.clues[i][1]), is_eu)
s += e_s(l_distance(msg1.clues[j - len(msg2.clues) + len(msg1.clues)][1], msg2.clues[j][1]), is_eu)
i += 1
j -= 1
if i == j:
s += e_s(l_distance(msg1.clues[i][1], msg2.clues[i][1]), is_eu)
i += 1
j -= 1
for i in range(i, j - len(msg2.clues) + len(msg1.clues) + 1):
s += e_s(l_distance(msg1.clues[i][1], ""), is_eu)
else:
i = 0
j = len(msg1.clues) - 1
while i < j:
s += e_s(l_distance(msg2.clues[i][1], msg1.clues[i][1]), is_eu)
s += e_s(l_distance(msg2.clues[j - len(msg1.clues) + len(msg2.clues)][1], msg1.clues[j][1]), is_eu)
i += 1
j -= 1
if i == j:
s += e_s(l_distance(msg1.clues[i][1], msg2.clues[i][1]), is_eu)
for i in range(i, j - len(msg1.clues) + len(msg2.clues) + 1):
s += e_s(l_distance(msg2.clues[i][1], ""), is_eu)
return e_f(s, is_eu)
if opt == "extreme-trunc":
msg1.allclues.sort()
msg2.allclues.sort()
s = 0
if len(msg1.clues) >= len(msg2.clues):
i = 0
j = len(msg2.clues) - 1
while i < j:
s += e_s(l_distance(msg1.clues[i][1], msg2.clues[i][1]), is_eu)
s += e_s(l_distance(msg1.clues[j - len(msg2.clues) + len(msg1.clues)][1], msg2.clues[j][1]), is_eu)
i += 1
j -= 1
if i == j:
s += e_s(l_distance(msg1.clues[i][1], msg2.clues[i][1]), is_eu)
i += 1
j -= 1
else:
i = 0
j = len(msg1.clues) - 1
while i < j:
s += e_s(l_distance(msg2.clues[i][1], msg1.clues[i][1]), is_eu)
s += e_s(l_distance(msg2.clues[j - len(msg1.clues) + len(msg2.clues)][1], msg1.clues[j][1]), is_eu)
i += 1
j -= 1
if i == j:
s += e_s(l_distance(msg1.clues[i][1], msg2.clues[i][1]), is_eu)
i += 1
j -= 1
return e_f(s, is_eu)
if opt == "ac-extreme":
msg1.allclues.sort()
msg2.allclues.sort()
s = 0
if len(msg1.allclues) >= len(msg2.allclues):
i = 0
j = len(msg2.allclues) - 1
while i < j:
s += e_s(l_distance(msg1.allclues[i][1], msg2.allclues[i][1]), is_eu)
s += e_s(l_distance(msg1.allclues[j - len(msg2.allclues) + len(msg1.allclues)][1],
msg2.allclues[j][1]), is_eu)
i += 1
j -= 1
if i == j:
s += e_s(l_distance(msg1.allclues[i][1], msg2.allclues[i][1]), is_eu)
i += 1
j -= 1
for i in range(i, j - len(msg2.allclues) + len(msg1.allclues) + 1):
s += e_s(l_distance(msg1.allclues[i][1], ""), is_eu)
else:
i = 0
j = len(msg1.allclues) - 1
while i < j:
s += e_s(l_distance(msg2.allclues[i][1], msg1.allclues[i][1]), is_eu)
s += e_s(l_distance(msg2.allclues[j - len(msg1.allclues) + len(msg2.allclues)][1],
msg1.allclues[j][1]), is_eu)
i += 1
j -= 1
if i == j:
s += e_s(l_distance(msg1.allclues[i][1], msg2.allclues[i][1]), is_eu)
for i in range(i, j - len(msg1.allclues) + len(msg2.allclues) + 1):
s += e_s(l_distance(msg2.allclues[i][1], ""), is_eu)
return e_f(s, is_eu)
|
from django.apps import AppConfig
class DescriptionConfig(AppConfig):
name = 'description'
|
def add(x, y):
return x+y
def subtract(x, y):
return abs(x-y)
|
# -*- coding: utf-8 -*-
"""
Conversion extensions for basic LOD models
"""
from datetime import datetime
class ConvertBase:
"""
Common convert extension for basic LOD models
"""
__index_sort_import__ = None
__index_sort_export__ = None
__load_from_file__ = True
__load_to_file__ = True
__load_to_db__ = True
file_name = None
@classmethod
def export_file_name(cls) -> str:
"""
Generate text file name for export class data
:return:
"""
return f"PG_{datetime.now().strftime('%y%m%d%H%M')}_{cls.file_name}"
@classmethod
def export_file_path(cls, export_directory) -> str:
"""
Return full file path according export_file_name
:param export_directory:
:return:
"""
return export_directory + cls.export_file_name()
class ConvertAuthor(ConvertBase):
"""
Convert extension for basic Author LOD model
"""
__index_sort_import__ = 1
__index_sort_export__ = 1
file_name = "Author.txt"
class ConvertEvent(ConvertBase):
"""
Convert extension for basic Event LOD model
"""
__index_sort_import__ = 2
__index_sort_export__ = 3
file_name = "LexEvent.txt"
class ConvertKey(ConvertBase):
"""
Convert extension for basic Key LOD model
"""
__index_sort_import__ = 3
__index_sort_export__ = 8
__load_from_file__ = False
__load_to_file__ = False
file_name = "LexEvent.txt"
class ConvertSetting(ConvertBase):
"""
Convert extension for basic Setting LOD model
"""
__index_sort_import__ = 4
__index_sort_export__ = 4
file_name = "Settings.txt"
class ConvertSyllable(ConvertBase):
"""
Convert extension for basic Syllable LOD model
"""
__index_sort_import__ = 5
__index_sort_export__ = 5
file_name = "Syllable.txt"
class ConvertType(ConvertBase):
"""
Convert extension for basic Type LOD model
"""
__index_sort_import__ = 6
__index_sort_export__ = 6
file_name = "Type.txt"
class ConvertDefinition(ConvertBase):
"""
Convert extension for basic Definition LOD model
"""
__index_sort_import__ = 8
__index_sort_export__ = 2
file_name = "WordDefinition.txt"
class ConvertWord(ConvertBase):
"""
Convert extension for basic Word LOD model
"""
__index_sort_import__ = 7
__index_sort_export__ = 8
file_name = "Words.txt"
class ConvertWordSpell(ConvertBase):
"""
Convert extension for basic WordSpell LOD model
"""
__index_sort_import__ = 9
__index_sort_export__ = 7
file_name = "WordSpell.txt"
__load_to_db__ = False
all_models_convert = sorted(
ConvertBase.__subclasses__(),
key=lambda model: model.__index_sort_import__)
|
from django.http import HttpResponse
from rnk.forms import LinkForm
from django.shortcuts import render
from rnk.classifier import callit
from rq import Queue
#import gc
def index(request):
return render(request, 'test.html')
def result(request):
link = "No Link"
#cls = ""
#prb = 0.0
if request.method == "POST":
#Get the posted form
MyLinkForm = LinkForm(data=request.POST)
#print("errors=")
#print(MyLinkForm.errors)
if MyLinkForm.is_valid():
#print("Form is Valid")
link = MyLinkForm.cleaned_data['link']
cls, prb = callit(link)
else:
MyLinkForm = LinkForm()
print("Form Not Valid")
#print("Before delete views")
#print(dir())
#print("Local files")
#print(locals())
#for name in dir():
# if name!='link' and name!='cls' and name!='prb' and not name.startswith('__'):
# del locals()[name]
#print("After delete views")
#print(dir())
#gc.collect()
return render(request, 'test1.html', {"link" : link,
"cls": cls,
"prb": prb})
|
#!/usr/bin/env python
import argparse
import commands
import getpass
import os
import os.path
import sys
import time
from env import gidgetConfigVars
import miscIO
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def getFeatureIndex(indexString, featureMatrixFile):
print " <%s> <%s> " % (indexString, featureMatrixFile)
matchList = []
indexList = []
fh = file(featureMatrixFile)
ii = 0
for aLine in fh:
if (aLine.find(indexString) >= 0):
tokenList = aLine.split('\t')
if (tokenList[0].find(indexString) >= 0):
matchList += [tokenList[0]]
indexList += [(ii - 1)]
ii += 1
if (len(matchList) == 0):
print " no matching feature ??? ", indexString
sys.exit(-1)
if (len(matchList) == 1):
return (indexList[0])
for ii in range(len(matchList)):
if (matchList[ii] == indexString):
return (indexList[ii])
for ii in range(len(matchList)):
tokenList = matchList[ii].split(':')
if (tokenList[2] == indexString):
return (indexList[ii])
print " in getFeatureIndex ... too many possible matches ??? "
print matchList
sys.exit(-1)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def getNumFeat(featureMatrixFile):
fh = file(featureMatrixFile)
numLines = miscIO.num_lines(fh)
numFeat = numLines - 1
fh.close()
return (numFeat)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def getNumSamples(featureMatrixFile):
fh = file(featureMatrixFile)
numCols = miscIO.num_cols(fh, '\t')
numSamples = numCols - 1
fh.close()
return (numSamples)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# input file is assumed to end in .tsv
# this function checks to see if the binFile exists and is up to date
# with respect to the tsvFile ... if necessary, it will call prep4pairwise
# to create the bin file
def preProcessTSV(tsvFile):
tsvTime = os.path.getmtime(tsvFile)
# print tsvTime
binFile = tsvFile[:-4] + ".bin"
catFile = tsvFile[:-4] + ".cat"
try:
binTime = os.path.getmtime(binFile)
# print binTime
except:
binTime = 0
if (tsvTime > binTime):
# just to be sure, delete the *.bin and *.cat files ...
cmdString = "rm -fr %s" % binFile
(status, output) = commands.getstatusoutput(cmdString)
cmdString = "rm -fr %s" % catFile
(status, output) = commands.getstatusoutput(cmdString)
print " creating bin file "
cmdString = "%s %s/prep4pairwise.py %s" % (gidgetConfigVars['TCGAFMP_PYTHON3'], gidgetConfigVars['TCGAFMP_PAIRWISE_ROOT'], tsvFile)
(status, output) = commands.getstatusoutput(cmdString)
if (status != 0):
print " (a) ERROR ??? failed to execute command ??? "
print cmdString
print status
print output
sys.exit(-1)
print " --> bin file created "
# verify that the bin file actually exists now, otherwise bail ...
try:
binTime = os.path.getmtime(binFile)
except:
print " "
print " FATAL ERROR ... prep4pairwise has failed "
print " "
print cmdString
print status
print output
sys.exit(-1)
else:
print " bin file already up to date "
return (binFile)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
#
if __name__ == "__main__":
# ALL necessary inputs should be handled using this ArgumentParser ... there shouldn't
# be any 'left-over' arguments ... any unrecognized command-line inputs will result
# in an error like:
# rkpw_list_gen.py: error: unrecognized arguments: abc def
parser = argparse.ArgumentParser(
description='Create runlist for pairwise')
parser.add_argument('--min-ct-cell', '-minct',
action='store', default=5, type=int)
parser.add_argument('--min-mx-cell', '-minmx',
action='store', default=5, type=int)
parser.add_argument('--min-samples', '-M',
action='store', default=30, type=int)
parser.add_argument('--pvalue', '-p', action='store',
default=0.000001, type=float)
parser.add_argument('--adjP', '-a', action='store_true')
parser.add_argument('--all', '-A', action='store_true')
parser.add_argument('--one', '-O', action='store')
parser.add_argument('--verbosity', '-v',
action='store', default=0, type=int)
parser.add_argument('--tsvFile', '-f', action='store', required=True)
parser.add_argument('--forRE', '-R', action='store_true')
parser.add_argument('--forLisa', '-L', action='store_true')
args = parser.parse_args()
print args
# at this point we should have a Namespace called 'args' that looks something like this:
# Namespace ( tsvFile=['test.tsv'],
# runFile=['test.run'],
## byname=False, input=None,
# min_ct_cell=5, one=None, all=True,
# pvalue=1e-06, tail=0, verbosity=0 )
if (0):
# NEW 19feb13 : need to have either "forRE" or "forLisa" specified so that we know
# what type of post-processing to invoke ...
if (args.forRE):
if (args.forLisa):
print " ERROR : must choose either --forRE or --forLisa, not both "
sys.exit(-1)
else:
if (not args.forLisa):
print " ERROR : must specify either --forRE or --forLisa "
sys.exit(-1)
# note that we must either have an integer value in 'one' or else 'all'
# must be TRUE
print args
indexString = ''
if (not args.all):
if (args.one == None):
args.all = True
if (0):
print " ERROR: either --all or --one must be specified "
sys.exit(-1)
else:
try:
indexString = str(args.one)
except:
print " could not get index ??? "
print " ERROR: either --all or --one must be specified "
sys.exit(-1)
else:
if (not args.one == None):
print " ERROR: either --all or --one must be specified, NOT both "
sys.exit(-1)
# get the tsv feature matrix file and also the number of features it
# contains
tsvFile = args.tsvFile
print " input tsv file name <%s> " % tsvFile
if (not os.path.exists(tsvFile)):
print " <%s> is not a valid file, exiting ... " % tsvFile
sys.exit(-1)
if (not tsvFile.endswith(".tsv")):
print " <%s> input file should be a TSV file " % tsvFile
sys.exit(-1)
if (tsvFile[0] != "/"):
print " absolute path name for input file <%s> is required " % tsvFile
sys.exit(-1)
numFeat = getNumFeat(tsvFile)
print " --> number of features : ", numFeat
numSamples = getNumSamples(tsvFile)
print " --> number of samples : ", numSamples
# if the user wants to use the "adjP" option, then we set the p-value based on
# the number of samples ... right now the approach is to do 1.e-X where X=5+(N/100)
# and N is the number of samples
if (args.adjP):
args.pvalue = (1. / 100000.) / float(10. ** (int(numSamples / 100)))
print " --> setting pvalue threshold to : ", args.pvalue
# we need to pre-process the tsv file (unless it appears to have already
# been done)
binFile = preProcessTSV(tsvFile)
# create a random name for this particular run ...
# and then make a subdirectory for the outputs ...
curJobName = miscIO.make_random_fname()
print " "
print " randomly generated job name : <%s> " % curJobName
print " "
tmpDir = "%s/%s" % (gidgetConfigVars['TCGAFMP_CLUSTER_SCRATCH'], curJobName)
cmdString = "mkdir %s" % tmpDir
(status, output) = commands.getstatusoutput(cmdString)
if (not os.path.exists(tmpDir)):
print " mkdir command failed ??? "
print cmdString
sys.exit(-1)
# open the jobInfo file ...
jobFile = tmpDir + "/jobInfo.txt"
try:
fh = file(jobFile, 'w')
except:
print " failed to open output file <%s>, exiting ... " % jobFile
sys.exit(-1)
fh.write("tsvFile = %s\n" % args.tsvFile)
if (args.all):
fh.write("all = TRUE\n")
else:
try:
index = int(args.one)
except:
index = getFeatureIndex(args.one, args.tsvFile)
fh.write("one = %d\n" % index)
if (args.adjP):
fh.write("adjP = TRUE\n")
fh.write("pvalue = %f\n" % args.pvalue)
fh.write("min-samples = %d\n" % args.min_samples)
fh.write("min-ct-cell = %d\n" % args.min_ct_cell)
fh.write("min-mx-cell = %d\n" % args.min_mx_cell)
fh.close()
# next open the runFile ...
runFile = tmpDir + "/runList.txt"
try:
fh = file(runFile, 'w')
except:
print " failed to open output file <%s>, exiting ... " % runFile
sys.exit(-1)
pythonbin = sys.executable
golempwd = "PASSWD_HERE"
fhC = file ( gidgetConfigVars['TCGAFMP_CLUSTER_SCRATCH'] + "/config", 'r' )
aLine = fhC.readline()
fhC.close()
aLine = aLine.strip()
golempwd = aLine
print " got this ... <%s> " % golempwd
if (args.all):
# handle the all by all option ...
# calling with these options:
# --outer index:index:1 --inner +1::1
numJobs = 0
for index in range(numFeat - 1):
outName = tmpDir + "/" + str(index) + ".pw"
cmdString = "1 " + gidgetConfigVars['TCGAFMP_PAIRWISE_ROOT'] + "/pairwise-2.1.2"
cmdString += " --pvalue %g --min-ct-cell %d --min-mx-cell %d --min-samples %d" \
% (args.pvalue, args.min_ct_cell, args.min_mx_cell, args.min_samples)
cmdString += " --outer %d:%d:1 --inner +1::1 %s %s " \
% (index, index + 1, binFile, outName)
fh.write("%s\n" % cmdString)
numJobs += 1
else:
# handle the single index vs all option ...
outName = tmpDir + "/" + str(index) + ".pw"
cmdString = "1 " + gidgetConfigVars['TCGAFMP_PAIRWISE_ROOT'] + "/pairwise-2.1.2"
cmdString += " --pvalue %g --min-ct-cell %d --min-mx-cell %d --min-samples %d" \
% (args.pvalue, args.min_ct_cell, args.min_mx_cell, args.min_samples)
cmdString += " --outer %d:%d:1 --inner 0::1 %s %s " \
% (index, index + 1, binFile, outName)
fh.write("%s\n" % cmdString)
numJobs = 1
fh.close()
# ok, now we want to actually launch the jobs ...
cmdString = "python %s/main/golem.py " % gidgetConfigVars['TCGAFMP_ROOT_DIR']
cmdString += "http://glados.systemsbiology.net:7083 -p " + golempwd + " "
cmdString += "-L pairwiseRK -u "
cmdString += getpass.getuser() + " "
cmdString += "runlist " + runFile
print cmdString
(status, output) = commands.getstatusoutput(cmdString)
print status
print output
print " "
print " "
print " --------------- "
done = 0
while not done:
numOutFiles = 0
for aName in os.listdir(tmpDir):
if (aName.endswith(".pw")):
numOutFiles += 1
print numOutFiles
if (numOutFiles == numJobs):
done = 1
tSleep = max(5, int((numJobs - numOutFiles) / 200))
time.sleep(tSleep)
print " should be done !!! ", numOutFiles, numJobs
tSleep = 5
time.sleep(tSleep)
# if there was only one job, then we're done now ...
if (numJobs == 1):
print " handling a one-by-all run ... "
# first we run post_rkpw.py which writes
# out something that looks like the output from runPWPV
iOne = index
cmdString = "python %s/main/post_pwRK2.py %s %s %d" % (gidgetConfigVars['TCGAFMP_ROOT_DIR'], tmpDir,
tsvFile, iOne)
print " < %s > " % cmdString
(status, output) = commands.getstatusoutput(cmdString)
print status, output
cmdString = "sort -grk 5 --temporary-directory=%s/ %s/post_proc_all.tsv >& %s/%d.all.pwpv.sort" % (
gidgetConfigVars['TCGAFMP_CLUSTER_SCRATCH'], tmpDir, tmpDir, iOne)
print " < %s > " % cmdString
(status, output) = commands.getstatusoutput(cmdString)
print status, output
cmdString = "mv %s/%d.all.pwpv.sort %s.%d.all.pwpv.sort" % (tmpDir,
iOne, tsvFile[:-4], iOne)
print " < %s > " % cmdString
(status, output) = commands.getstatusoutput(cmdString)
print status, output
print "\n\n DONE \n\n"
sys.exit(-1)
# now that the job is finished, we need to handle the post-processing
if (args.forRE):
print " post-processing for RE ... "
# first we run post_rkpw.py which concatenates them all and writes
# out something that looks like the output from runPWPV
cmdString = "python %s/main/post_pwRK2.py %s %s" % (gidgetConfigVars['TCGAFMP_ROOT_DIR'], tmpDir,
tsvFile)
print " < %s > " % cmdString
(status, output) = commands.getstatusoutput(cmdString)
# and then we run the script that sorts and trims the output file
cmdString = "%s/shscript/proc_pwpv2.sh %s" % (gidgetConfigVars['TCGAFMP_ROOT_DIR'], tmpDir)
print " < %s > " % cmdString
(status, output) = commands.getstatusoutput(cmdString)
# and now we move the files that we want to keep ...
cmdString = "mv %s/post_proc_all.short.sort.mapped.noPathway %s.pwpv.forRE" % (
tmpDir, tsvFile[:-4])
print " < %s > " % cmdString
(status, output) = commands.getstatusoutput(cmdString)
cmdString = "mv %s/post_proc_all.tsv %s.pwpv" % (tmpDir, tsvFile[:-4])
print " < %s > " % cmdString
(status, output) = commands.getstatusoutput(cmdString)
elif (args.forLisa):
print " post-processing for Lisa's pancan analysis ... "
# first we run post_rkpw.py which concatenates them all and writes
# out something that looks like the output from runPWPV
cmdString = "python %s/main/post_pwRK2.py %s %s" % (gidgetConfigVars['TCGAFMP_ROOT_DIR'], tmpDir,
tsvFile)
print " < %s > " % cmdString
(status, output) = commands.getstatusoutput(cmdString)
# at this point we have post_proc_all.tsv
# and post_proc_all.NGEXP.NGEXP.tmp
cmdString = "%s/shscript/proc_pancan.sh %s" % (gidgetConfigVars['TCGAFMP_ROOT_DIR'], tmpDir)
print " < %s > " % cmdString
(status, output) = commands.getstatusoutput(cmdString)
# and now we move the files that we want to keep ...
cmdString = "mv %s/post_proc_all.NGEXP.NGEXP.tmp.sort.top1M %s.pwpv.NGEXP.NGEXP.top1M" % (
tmpDir, tsvFile[:-4])
print " < %s > " % cmdString
(status, output) = commands.getstatusoutput(cmdString)
cmdString = "mv %s/post_proc_all.NGEXP.NGEXP.tmp.sort %s.pwpv.NGEXP.NGEXP.all" % (
tmpDir, tsvFile[:-4])
print " < %s > " % cmdString
(status, output) = commands.getstatusoutput(cmdString)
cmdString = "mv %s/post_proc_all.tsv %s.pwpv" % (tmpDir, tsvFile[:-4])
print " < %s > " % cmdString
(status, output) = commands.getstatusoutput(cmdString)
print "\n\n DONE \n\n"
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
|
from __future__ import absolute_import, division, print_function
# XXX most of this code is unused when run from the command line, but the
# PHENIX GUI includes a simple frontend that uses the phil interface.
import libtbx.phil
import libtbx.phil.command_line
from libtbx.utils import Sorry
#from libtbx import easy_run
import sys
import os
from mmtbx import conformation_dependent_library
from mmtbx.conformation_dependent_library import cdl_utils
from mmtbx.conformation_dependent_library.cdl_database import cdl_database
master_phil = libtbx.phil.parse("""
cdl_lookup
.caption = CDL
{
residue_names = None
.type = str
residue_group_class = None
.type = choice
phi_psi_angles = None
.type = str
}""")
def run2(args=(), out=sys.stdout):
argument_interpreter = libtbx.phil.command_line.argument_interpreter(
master_phil=master_phil,
home_scope="cdl_lookup")
phils = []
phil_args = []
pdbs = []
for arg in args:
if os.path.isfile(arg):
if iotbx.pdb.is_pdb_file(arg):
pdbs.append(arg)
continue
try :
file_phil = phil.parse(file_name=arg)
except RuntimeError :
pass
else :
phils.append(file_phil)
else :
phil_args.append(arg)
phils.append(argument_interpreter.process(arg))
working_phil = master_phil.fetch(sources=phils)
working_phil.show()
working_params = working_phil.extract()
if working_params.cdl_lookup.residue_group_class is None:
working_params.cdl_lookup.residue_group_class = cdl_utils.get_res_type_group(
*tuple(working_params.cdl_lookup.residue_names.split(",")[1:])
)
print("\nPeptide triplet class : %s" % working_params.cdl_lookup.residue_group_class)
key = working_params.cdl_lookup.phi_psi_angles.split(",")
key[0] = int(key[0])
key[1] = int(key[1])
key = tuple(key)
restraints_values = cdl_database[working_params.cdl_lookup.residue_group_class][key]
outl = conformation_dependent_library.restraints_show(restraints_values)
print("\nCDL values\n%s" % outl)
return restraints_values
def validate_params(params):
if (params.fetch_pdb.pdb_ids is None) or (len(params.fetch_pdb.pdb_ids)==0):
raise Sorry("No PDB IDs specified!")
return True
if __name__ == "__main__" :
run2(sys.argv[1:])
|
from django.http import HttpResponse
import logging
logger = logging.getLogger(__name__)
# Create your views here.
def helloworld(request):
logging.error('Hello world DJ4E in the log...')
print('Hello world f231f210 DJ4E in a print statement...')
visits = request.session.get('visits', 0) + 1
request.session['visits'] = visits
if visits > 3:
del (request.session['visits'])
response = """<html><body><p>f231f210 Hello world DJ4E in HTML</p>
<p>view count="""+str(visits)+"""</p>
</body></html>"""
resp = HttpResponse(response)
resp.set_cookie('dj4e_cookie', 'f231f210', max_age=1000)
return resp
|
# -*- coding: utf-8 -*-
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = os.environ.get('DJANGO_SECRET_KEY', '')
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
# built in
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# elvanto subgroups
'elvanto_subgroups',
# 3rd party
'social.apps.django_app.default',
'bootstrap3',
'django_extensions',
'rest_framework',
]
MIDDLEWARE_CLASSES = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
TEMPLATE_CONTEXT_PROCESSORS = (
"django.core.context_processors.request",
'django.core.context_processors.static',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'social.apps.django_app.context_processors.backends',
'social.apps.django_app.context_processors.login_redirect',
)
AUTHENTICATION_BACKENDS = (
'social.backends.google.GoogleOAuth2',
'django.contrib.auth.backends.ModelBackend',
)
ROOT_URLCONF = 'elvanto_subgroups.urls'
WSGI_APPLICATION = 'elvanto_subgroups.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
LANGUAGE_CODE = 'en-gb'
TIME_ZONE = 'GMT'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Static files (CSS, JavaScript, Images)
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, '..', 'elvanto_subgroups', 'static'),
)
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# Elvanto
ELVANTO_KEY = os.environ.get('ELVANTO_KEY', '')
ELVANTO_PEOPLE_PAGE_SIZE = 1000 # must be 10 or larger
# social login settings
SOCIAL_AUTH_URL_NAMESPACE = 'social'
SOCIAL_AUTH_LOGIN_REDIRECT_ULR = '/'
SOCIAL_AUTH_MODEL = 'elvanto_subgroups'
SOCIAL_AUTH_USER_MODEL = 'auth.User'
SOCIAL_AUTH_STRATEGY = 'social.strategies.django_strategy.DjangoStrategy'
LOGIN_URL = '/login/google-oauth2'
LOGIN_ERROR_URL = '/'
LOGIN_REDIRECT_URL = '/'
# Google auth credentials
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = os.environ.get('SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', '')
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = os.environ.get('SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET', '')
SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS = os.environ.get('SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS', '').replace(' ', '').split(',')
SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_EMAILS = os.environ.get('SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_EMAILS', '').replace(' ', '').split(',')
|
from __future__ import print_function
import tensorflow as tf
# Explicitly create a computation graph
graph = tf.Graph()
with graph.as_default():
# Declare one-dimensional tensors (vectors)
input1 = tf.constant([1.0, 2.0])
input2 = tf.constant([3.0, 4.0])
# Add the two tensors
output = tf.add(input1, input2)
# print the operations stored by the graph
print(graph.get_operations())
# Evaluate the graph in a session
with tf.Session(graph = graph):
result = output.eval()
print("result: ", result)
# Evaluate using the default graph
with tf.Session():
input1 = tf.constant([1.0, 2.0])
input2 = tf.constant([3.0, 4.0])
output = tf.add(input1, input2)
# Show the operations in the default graph
print(tf.get_default_graph().get_operations())
result = output.eval()
print("result: ", result)
# Evaluate a matrix-vector multiplication
matmul_graph = tf.Graph()
with matmul_graph.as_default():
# Declare a 2x2 matrix and a 2x1 vector
matrix = tf.constant([[1.0, 2.0], [3.0, 4.0]])
vector = tf.constant([[1.0], [2.0]])
# Matrix multiply (matmul) the two tensors
output = tf.matmul(matrix, vector)
with tf.Session(graph = matmul_graph):
result = output.eval()
print(result)
# Evaluate repeated matrix-vector multiplications
var_graph = tf.Graph()
with var_graph.as_default():
# Declare a constant 2x2 matrix and a variable 2x1 vector
matrix = tf.constant([[1.0, 1.0], [1.0, 1.0]])
vector = tf.Variable([[1.0], [1.0]])
# Multiply the matrix and vector 4 times
for _ in range(4):
vector = tf.matmul(matrix, vector)
with tf.Session(graph = var_graph):
# Initialize the variables we defined above.
tf.global_variables_initializer().run()
result = vector.eval()
print(result)
|
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
import os
from datadog_checks.utils.common import get_docker_hostname
HERE = os.path.dirname(os.path.abspath(__file__))
COMPOSE = os.path.join(HERE, 'compose')
ROOT = os.path.dirname(os.path.dirname(HERE))
CHECK_NAME = 'gunicorn'
HOST = get_docker_hostname()
PORT = 18000
PROC_NAME = 'dd-test-gunicorn'
CONTAINER_NAME = 'dd-test-gunicorn'
INSTANCE = {'proc_name': PROC_NAME}
GUNICORN_VERSION = os.getenv('GUNICORN_VERSION')
|
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import math
import random
#y^2=x^3+ax+b mod n
prime=[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, 233, 239, 241, 251, 257, 263, 269, 271 ]
# ax+by=gcd(a,b). This function returns [gcd(a,b),x,y]. Source Wikipedia
def extended_gcd(a,b):
x,y,lastx,lasty=0,1,1,0
while b!=0:
q=a/b
a,b=b,a%b
x,lastx=(lastx-q*x,x)
y,lasty=(lasty-q*y,y)
if a<0:
return (-a,-lastx,-lasty)
else:
return (a,lastx,lasty)
# pick first a point P=(u,v) with random non-zero coordinates u,v (mod N), then pick a random non-zero A (mod N),
# then take B = u^2 - v^3 - Ax (mod N).
# http://en.wikipedia.org/wiki/Lenstra_elliptic_curve_factorization
def randomCurve(N):
A,u,v=random.randrange(N),random.randrange(N),random.randrange(N)
B=(v*v-u*u*u-A*u)%N
return [(A,B,N),(u,v)]
# Given the curve y^2 = x^3 + ax + b over the field K (whose characteristic we assume to be neither 2 nor 3), and points
# P = (xP, yP) and Q = (xQ, yQ) on the curve, assume first that xP != xQ. Let the slope of the line s = (yP − yQ)/(xP − xQ); since K # is a field, s is well-defined. Then we can define R = P + Q = (xR, − yR) by
# s=(xP-xQ)/(yP-yQ) Mod N
# xR=s^2-xP-xQ Mod N
# yR=yP+s(xR-xP) Mod N
# If xP = xQ, then there are two options: if yP = −yQ, including the case where yP = yQ = 0, then the sum is defined as 0[Identity]. # thus, the inverse of each point on the curve is found by reflecting it across the x-axis. If yP = yQ != 0, then R = P + P = 2P = # (xR, −yR) is given by
# s=3xP^2+a/(2yP) Mod N
# xR=s^2-2xP Mod N
# yR=yP+s(xR-xP) Mod N
# http://en.wikipedia.org/wiki/Elliptic_curve#The_group_law''')
def addPoint(E,p_1,p_2):
if p_1=="Identity": return [p_2,1]
if p_2=="Identity": return [p_1,1]
a,b,n=E
(x_1,y_1)=p_1
(x_2,y_2)=p_2
x_1%=n
y_1%=n
x_2%=n
y_2%=n
if x_1 != x_2 :
d,u,v=extended_gcd(x_1-x_2,n)
s=((y_1-y_2)*u)%n
x_3=(s*s-x_1-x_2)%n
y_3=(-y_1-s*(x_3-x_1))%n
else:
if (y_1+y_2)%n==0:return ["Identity",1]
else:
d,u,v=extended_gcd(2*y_1,n)
s=((3*x_1*x_1+a)*u)%n
x_3=(s*s-2*x_1)%n
y_3=(-y_1-s*(x_3-x_1))%n
return [(x_3,y_3),d]
# http://en.wikipedia.org/wiki/Elliptic_curve_point_multiplication
# Q=0 [Identity element]
# while m:
# if (m is odd) Q+=P
# P+=P
# m/=2
# return Q')
def mulPoint(E,P,m):
Ret="Identity"
d=1
while m!=0:
if m%2!=0: Ret,d=addPoint(E,Ret,P)
if d!=1 : return [Ret,d] # as soon as i got anything otherthan 1 return
P,d=addPoint(E,P,P)
if d!=1 : return [Ret,d]
m>>=1
return [Ret,d]
def ellipticFactor(N,m,times=5):
for i in xrange(times):
E,P=randomCurve(N);
Q,d=mulPoint(E,P,m)
if d!=1 : return d
return N
if __name__=="__main__":
n=input()
for p in prime:#preprocessing
while n%p==0:
print p
n/=p
m=int(math.factorial(2000))
while n!=1:
k=ellipticFactor(n,m)
n/=k
print k
|
import random
import json
import pickle
import numpy as np
import nltk
from nltk.sten import WordNetLemmatizer
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers Dense, Activation, Dropout
from tensorflow.optimizers import SGD
lemmatizer = WordNetLemmatizer()
intents = json.loads(open('intents.json').read())
words = []
classess = []
documents = []
ignore_latters = ['!', '?', '.', ',']
for intent in intents['intentss']:
for pattern in intent['patterns']:
word-list = nltk.word_tokenize(pattern)
words.extend(word_list)
documents.append((woed_list, intent['tag']))
if intent['tag'] not in classes:
classess.append(intent['tag'])
words = [lemmatizer.lemmatize(word) for word in words if word not in ignore_latters]
words = sorted(set(words))
classess = sorted(set(classess))
pickle.dump(words, open('words.pkl', 'wb'))
pickle.dump(words, open('classes.pkl', 'wb'))
training = []
output_empty = [0] * len(classess)
from document in documents:
bag = []
word_patterns = document[0]
word_patterns = [lemmatizer.lemmatize(word.lower()) for word in word_patterns]
for word in words:
bag.append(1) if word in word_patterns else bag.append(0)
output_row = list(output_empty)
output_row[classess.index(document[1])] = 1
training.append([bag, output_row])
random.shuffle(training)
training = np.array(training)
train_x = list(training[:, 0])
train_y = list(training[:, 1])
model = Sequential()
model.add(Dense(128, input_shape=(len(train_x[0]), ), activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(64, activation= 'relu'))
model.add(Dropout(0.5))
model.add(Dense(len(train_y[0]), activation='softmax'))
sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentorpy', optimizer=sgd, metrics=[,accuracy])
model.fit(np.array(train_x), np.array(train_y), epochs=200, batch_size=5, verbose=1)
model.save('chatbotmodel.h5', hist)
print("finnaly you did it ")
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
def content_file_name(instance, filename):
return '/'.join(['pics', instance.user.username, filename])
class appField(models.Model):
appField = models.CharField(max_length=30)
def __str__(self):
return (self.appField)
class UserProfile(models.Model):
#Use case selection
USE_CASE = (
('1', 'Use Case 1: Comparison between stratospheric ozone model output and satellite observations'),
('2', 'Use Case 2: Model validation tool'),
('3', 'Use Case 3: Characterization of optical and microphysical properties of aerosol'),
('4', 'Use Case 4: ECARE lidar/ CALIPSO Simulation'),
('5', 'Use Case 5: Development of Scientific L2 products based on OMI instruments'),
('6', 'Use Case 6: Model Quality Assessment'),
('7', 'Use Case 7: Re-grid and time average satellite data'),
('8', 'Use Case 8: Model Validation against satellite data (Aerosol NO2, trace gases)'),
)
#Group selection
GROUP = (
('G1', 'Group 1'),
('G2', 'Group 2'),
('G3', 'Group 3'),
)
user = models.OneToOneField(User)
affiliation = models.TextField(blank=True, null=True)
use_case = models.CharField(max_length=3, choices=USE_CASE, blank=True, null=True)
application_field = models.ManyToManyField(appField, blank=True)
group = models.CharField(max_length=2, choices=GROUP, blank=True, null=True)
user_pic = models.ImageField(upload_to=content_file_name, blank=True, null=True)
def __unicode__(self):
return self.user.username
|
# -*- coding: utf-8 -*-
"""`sphinx_minoo_theme` on `Github`_.
.. _github: https://github.com/saeiddrv/SphinxMinooTheme
"""
from setuptools import setup
from sphinx_minoo_theme import __version__
setup(
name = "sphinx_minoo_theme",
version = __version__.info(),
url = "https://github.com/saeiddrv/SphinxMinooTheme.",
license = "MIT",
author = "Saeid Darvishi",
author_email = "saeid.dq@gmail.com",
description = "A simple Sphinx theme with RTL language support.",
long_description = open("README.rst").read(),
zip_safe = False,
packages = ["sphinx_minoo_theme"],
package_data = {"sphinx_minoo_theme": [
"theme.conf",
"*.html",
"includes/*.html",
"static/*.css",
"static/*.js",
"static/*.jpg",
"static/fonts/*.*"
]},
include_package_data = True,
install_requires = open("requirements.txt").read().splitlines(),
classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
"Intended Audience :: System Administrators",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Operating System :: OS Independent',
'Topic :: Documentation',
'Topic :: Software Development :: Documentation',
],
)
|
"""Lovelace04_PartB.py"""
"""Licensed Under the MIT License: CHECK IN REPO: https://github.com/Rongmario/FoP-T1-Assignment-2020"""
__author__ = "Rongmario"
def main(expression: str): # Expects user's input in the function (string)
if len(expression) == 0:
return "Empty string." # Output when user's input is empty
first, signs = expression[0], {'+', '-'} # Using tuple unpacking to assign variables
if not expression[-1].isdigit() or not first.isdigit() and first not in signs:
# Output when final char is not a number or when first char is an illegal character
return "Invalid expression."
splits = [first] # Store first character as starting element in list
for i in expression[1:]:
# Make new element with current index if it is a sign and if last element's last character is a number
if splits[-1][-1].isdigit() and i in signs:
splits.append(i)
elif i.isdigit() or i in signs:
splits[-1] += i # If current is an accepted character, append onto last element
else:
return "Invalid expression." # Output when an illegal character is found
''' Sum of everything in the list, we use list comprehension here to convert all elements with type 'str' to 'int'.
We replace bundles of operators here to what it would be evaluated as.
We also make + blank because Python's int() doesn't allow multiple + signs to be casted '''
return sum([int(x.replace('+', '').replace('--', '')) for x in splits])
user_input = input("Please enter an arithmetic expression: ")
result = main(user_input)
if type(result) == int:
# Format is used here so I can put a full+-stop straight after the variable without having a space dividing it
print("The result is {0}.\nGoodbye.".format(result))
else:
print(result)
|
'''ips .txt'''
def ip_ok(ip):
ip = ip.split('.')
for byte in ip:
if int(byte) > 255:
return False
return True
arq=open('ips.txt')
validos=open('válidos.txt','w')
invalidos=open('inválidos.txt','w')
for linha in arq.readlines():
if ip_ok(linha):
validos.write(linha)
else:
invalidos.write(linha)
arq.close()
validos.close()
invalidos.close()
|
import sys
import random
import os
import re
import time
def set_option(option):
if option == 'R': return '⊚ - Rock'
if option == 'P': return '▮ - Paper'
if option == 'S': return '✂ - Scissors'
return 'x'
def user_won(result):
if result:
print('┌───────────────────────────────────┐')
print('│ 🎉 🎉 YOU WIN!!! │')
print('└───────────────────────────────────┘')
else:
print('┌───────────────────────────────────┐')
print('│ 😥 😥 You lose... │')
print('└───────────────────────────────────┘')
input('Exit game...')
def rock_paper():
os.system('cls' if os.name=='nt' else 'clear')
print('┌───────────────────────────────────┐')
print('│ 3. Rock, Paper, Scissors! │')
print('└───────────────────────────────────┘')
print()
print("I'ts a best of five game")
print('The first one in reach 3 core points wins')
input('Start...')
user_points = 0
cpu_points = 0
playing = True
while playing:
if user_points == 3:
user_won(True)
break
if cpu_points == 3:
user_won(False)
break
os.system('cls' if os.name=='nt' else 'clear')
print('┌───────────────────────────────────┐')
print('│ USER CPU │')
print('│ Score: {} {} │'.format(user_points, cpu_points))
print('└───────────────────────────────────┘')
print('Rock, Paper, Scissors - Shoot!')
user_pick = input('→ Choose your weapon [R]ock], [P]aper, or [S]cissors: ')
if not re.match('[SsRrPp]', user_pick):
print('Oops! \nSorry, that is a invalid character :(')
input()
continue
print('Preparing my choice...')
time.sleep(1)
options = ['R', 'P', 'S']
npc_pick = random.choice(options)
user_pick = str.upper(user_pick)
print('\nUSER: {}'.format(set_option(user_pick)))
print('\nNPC: {}'.format(set_option(npc_pick)))
print('\nSo the winner is...')
time.sleep(3)
if npc_pick == user_pick:
print("It's a DRAW! \nNo points distribution")
input('Continue...')
elif (npc_pick == 'R' and user_pick == 'S') or (npc_pick == 'P' and user_pick == 'R') or (npc_pick == 'S' and user_pick == 'P'):
print('I win! \n1 Point for CPU')
cpu_points += 1
input('Continue...')
else:
print('You win! \n1 Point for USER')
user_points += 1
input('Continue...')
sys.modules[__name__] = rock_paper
|
import requests
import bs4
import jk_pypiorgapi
from errors.errors_api import UrlNotFound, SiteError
from errors.errors_pack import MultiplePackageFound, PackageNotFound
EXT_BUILD = {"bdist_wheel": "whl", "sdist": ("tar.gz", "zip")}
URL_SIMPLE_PYPI = "https://pypi.org/simple/"
URL_PYPI = "https://pypi.org/"
get_end_ext = lambda h : h.split(".", len(h.split("."))-2)[-1] if list(filter(lambda a:a in["tar", "gz"], h.split("."))) else h.split('.')[-1]
get_file = lambda h : ".".join(filter(lambda i: i not in ["zip", "tar", "gz"], h.split('.')))
def get_all_url(sitename, prefix_url='', suffix_url=''):
"""get all href in site"""
resp = requests.get(sitename)
if resp.status_code == 404:
raise UrlNotFound("url not found: %s" % resp.url)
elif resp.status_code == 500:
raise SiteError("a error at %s" % sitename)
soup = bs4.BeautifulSoup(resp.text, 'html.parser')
urls = []
for a in soup.find_all('a'):
if a.has_attr("href"):
urls.append({'href': prefix_url + a['href'] + suffix_url, 'text': a.text})
return urls
def get_list_module():
"""get list module of pypi"""
api = jk_pypiorgapi.PyPiOrgAPI()
packageNames = api.listAllPackages()
return [i[1] for i in packageNames]
def get_module_url_src(name_module):
"""get url source of module"""
list_module = get_list_module()
list_module_find = list(filter(lambda v: v["text"] == name_module, list_module))
if len(list_module_find) > 1:
raise MultiplePackageFound("find multiple package")
elif len(list_module_find) < 1:
raise PackageNotFound("not found: %s " % name_module)
module = list_module_find[0]
return module
def no_letter(string):
return any(i.isalpha() for i in string)
|
# Copyright 2020 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Configure the system logger for the default pw command log format."""
import logging
from typing import NamedTuple, Optional
import pw_cli.color
import pw_cli.env
import pw_cli.plugins
# Log level used for captured output of a subprocess run through pw.
LOGLEVEL_STDOUT = 21
class LogLevel(NamedTuple):
level: int
color: str
ascii: str
emoji: str
# Shorten all the log levels to 3 characters for column-aligned logs.
# Color the logs using ANSI codes.
# pylint: disable=bad-whitespace
# yapf: disable
_LOG_LEVELS = (
LogLevel(logging.CRITICAL, 'bold_red', 'CRT', '☠️ '),
LogLevel(logging.ERROR, 'red', 'ERR', '❌'),
LogLevel(logging.WARNING, 'yellow', 'WRN', '⚠️ '),
LogLevel(logging.INFO, 'magenta', 'INF', 'ℹ️ '),
LogLevel(LOGLEVEL_STDOUT, 'cyan', 'OUT', '💬'),
LogLevel(logging.DEBUG, 'blue', 'DBG', '👾'),
)
# yapf: enable
# pylint: enable=bad-whitespace
_LOG = logging.getLogger(__name__)
_STDERR_HANDLER = logging.StreamHandler()
def main():
# Force the log level to make sure all logs are shown.
_LOG.setLevel(logging.DEBUG)
# Log one message for every log level.
_LOG.critical('Something terrible has happened!')
_LOG.error('There was an error on our last operation')
_LOG.warning('Looks like something is amiss; consider investigating')
_LOG.info('The operation went as expected')
_LOG.log(LOGLEVEL_STDOUT, 'Standard output of subprocess')
_LOG.debug('Adding 1 to i')
def install(level: int = logging.INFO,
use_color: Optional[bool] = None,
hide_timestamp: bool = False) -> None:
"""Configure the system logger for the default pw command log format."""
colors = pw_cli.color.colors(use_color)
env = pw_cli.env.pigweed_environment()
if env.PW_SUBPROCESS or hide_timestamp:
# If the logger is being run in the context of a pw subprocess, the
# time and date are omitted (since pw_cli.process will provide them).
timestamp_fmt = ''
else:
# This applies a gray background to the time to make the log lines
# distinct from other input, in a way that's easier to see than plain
# colored text.
timestamp_fmt = colors.black_on_white('%(asctime)s') + ' '
# Set log level on root logger to debug, otherwise any higher levels
# elsewhere are ignored.
root = logging.getLogger()
root.setLevel(logging.DEBUG)
_STDERR_HANDLER.setLevel(level)
_STDERR_HANDLER.setFormatter(
logging.Formatter(timestamp_fmt + '%(levelname)s %(message)s',
'%Y%m%d %H:%M:%S'))
root.addHandler(_STDERR_HANDLER)
if env.PW_EMOJI:
name_attr = 'emoji'
colorize = lambda ll: str
else:
name_attr = 'ascii'
colorize = lambda ll: getattr(colors, ll.color)
for log_level in _LOG_LEVELS:
name = getattr(log_level, name_attr)
logging.addLevelName(log_level.level, colorize(log_level)(name))
def set_level(log_level: int):
"""Sets the log level for logs to stderr."""
_STDERR_HANDLER.setLevel(log_level)
# Note: normally this shouldn't be done at the top level without a try/catch
# around the pw_cli.plugins registry import, since pw_cli might not be
# installed.
pw_cli.plugins.register(
name='logdemo',
short_help='Show how how logs look at various levels',
command_function=main,
)
if __name__ == '__main__':
install()
main()
|
# uu.formlibrary package
import sys
import logging
from zope.i18nmessageid import MessageFactory
PKGNAME = 'uu.formlibrary'
_ = MessageFactory(PKGNAME)
product_log = logging.getLogger(PKGNAME)
product_log.addHandler(logging.StreamHandler(sys.stderr))
|
from bar_simulation.person import Person
def main():
p1 = Person()
print("Number of instances in Person:", Person.get_no_instances())
p2 = Person()
p3 = Person()
print("Number of instances in Person:", Person.get_no_instances())
del p3
print("Number of instances in Person:", Person.get_no_instances())
if __name__ == "__main__":
main()
|
import json
def get_dso_id(data, dso_name):
for index, keys in enumerate(data['features']):
if dso_name == keys['id']:
return index
return -1
def add_dso_to_catalog(catalog, dso_id, source_catalog_name):
# Search if the object already is on the catalog
for dso in catalog:
if dso["id"] == dso_id:
dso["appears_on"].append(source_catalog_name)
break
# Else add the object to the catalog
else:
catalog.append({
"id": dso_id,
"appears_on": [source_catalog_name],
})
# Names of DSOs by catalog
source_catalogs = [
{
"name": "Binosky",
"dso_names": ["NGC104", "M31", "NGC292", "NGC869", "NGC884", "Mel20",
"M45", "Mel25", "M42", "NGC1981", "M43", "M35", "NGC2232", "M41",
"M47", "NGC2451", "NGC2516", "M44", "IC2391", "NGC3114", "IC2602",
"NGC3532", "Mel111", "NGC4755", "NGC5139", "NGC6231", "M6", "M7",
"M8", "NGC6530", "M39"],
},
{
"name": "Caldwell",
"dso_names": ["NGC188", "NGC40", "NGC4236", "NGC7023", "IC 342",
"NGC6543", "Hyades", "CoalSack", "NGC2403", "NGC559", "Sh2-155",
"NGC663", "NGC7635", "NGC6946", "NGC457", "NGC869", "NGC6826",
"NGC7243", "NGC147", "NGC185", "IC 5146", "NGC7000", "NGC4449",
"NGC7662", "NGC891", "NGC1275", "NGC2419", "NGC4244", "NGC6888",
"NGC752", "NGC5005", "NGC7331", "IC 405", "NGC4631", "NGC6992",
"NGC6960", "NGC4889", "NGC4559", "NGC6885", "NGC4565", "NGC2392",
"NGC3626", "NGC7006", "NGC7814", "NGC7479", "NGC5248", "NGC2261",
"NGC6934", "NGC2775", "NGC2237", "NGC2244", "IC 1613", "NGC4697",
"NGC3115", "NGC2506", "NGC7009", "NGC246", "NGC6822", "NGC2360",
"NGC3242", "NGC4038", "NGC4039", "NGC247", "NGC7293", "NGC2362",
"NGC253", "NGC5694", "NGC1097", "NGC6729", "NGC6302", "NGC300",
"NGC2477", "NGC55", "NGC1851", "NGC3132", "NGC6124", "NGC6231",
"NGC5128", "NGC6541", "NGC3201", "NGC5139", "NGC6352", "NGC6193",
"NGC4945", "NGC5286", "IC 2391", "NGC6397", "NGC1261", "NGC5823",
"NGC6087", "NGC2867", "NGC3532", "NGC3372", "NGC6752", "NGC4755",
"NGC6025", "NGC2516", "NGC3766", "NGC4609", "IC 2944", "NGC6744",
"IC 2602", "NGC2070", "NGC362", "NGC4833", "NGC104", "NGC6101",
"NGC4372", "NGC3195"],
},
{
"name": "110 Deep-Sky Highlights",
"dso_names": ["NGC55", "NGC104", "M31", "NGC253", "NGC300", "NGC362",
"NGC457", "SMC", "LMC", "M33", "NGC869", "NGC884", "M77", "NGC1291",
"NGC1313", "NGC1316", "NGC1435", "NGC1535", "NGC1851", "M1",
"NGC1977", "M42", "NGC2024", "M78", "M37", "NGC2070", "M35", "M41",
"NGC2392", "NGC2403", "M46", "NGC2440", "M93", "NGC2477", "NGC2516",
"NGC2547", "M67", "NGC2808", "NGC2903", "M81", "M82", "NGC3114",
"NGC3115", "NGC3201", "NGC3242", "NGC3293", "NGC3372", "NGC3532",
"NGC3521", "M66", "NGC3766", "NGC3918", "M106", "M86", "M49", "M87",
"M104", "M60", "M94", "NGC4755", "M64", "NGC4833", "NGC4945", "M63",
"NGC5128", "NGC5139", "M51", "M83", "M3", "M101", "M5", "NGC6067",
"M4", "M13", "M12", "NGC6231", "M10", "M62", "M19", "M92",
"NGC6388", "M6", "NGC6397", "M7", "M23", "M20", "M8", "NGC6541",
"M16", "M17", "M28", "M22", "M11", "M57", "NGC6744", "NGC6752",
"M55", "NGC6818", "NGC6822", "M27", "NGC6946", "NGC6992", "NGC7009",
"NGC7027", "M15", "M2", "M52", "NGC7662", "NGC7789"]
},
{
"name": "Benett",
"dso_names": ["NGC55", "NGC104", "NGC247", "NGC253", "NGC288", "NGC300",
"NGC362", "NGC613", "NGC1068", "NGC1097", "NGC1232", "NGC1261",
"NGC1291", "NGC1313", "NGC1316", "NGC1350", "NGC1360", "NGC1365",
"NGC1380", "NGC1387", "NGC1399", "NGC1398", "NGC1404", "NGC1433",
"NGC1512", "NGC1535", "NGC1549", "NGC1553", "NGC1566", "NGC1617",
"NGC1672", "NGC1763", "NGC1783", "NGC1792", "NGC1818", "NGC1808",
"NGC1851", "NGC1866", "NGC1904", "NGC2070", "NGC2214", "NGC2243",
"NGC2298", "NGC2467", "NGC2489", "NGC2506", "NGC2627", "NGC2671",
"NGC2808", "NGC2972", "NGC2997", "NGC3115", "NGC3132", "NGC3201",
"NGC3242", "NGC3621", "Mel105", "NGC3960", "NGC3923", "NGC4372",
"NGC4590", "NGC4594", "NGC4697", "NGC4699", "NGC4753", "NGC4833",
"NGC4945", "NGC4976", "NGC5061", "NGC5068", "NGC5128", "NGC5139",
"NGC5189", "NGC5236", "NGC5253", "NGC5286", "NGC5617", "NGC5634",
"NGC5824", "NGC5897", "NGC5927", "NGC5986", "NGC5999", "NGC6005",
"Tr23", "NGC6093", "NGC6101", "NGC6121", "NGC6134", "NGC6144",
"NGC6139", "NGC6171", "NGC6167", "NGC6192", "NGC6218", "NGC6216",
"NGC6235", "NGC6254", "NGC6253", "NGC6266", "NGC6273", "NGC6284",
"NGC6287", "NGC6293", "NGC6304", "NGC6316", "NGC6318", "NGC6333",
"NGC6356", "NGC6352", "NGC6362", "NGC6388", "NGC6402", "NGC6397",
"NGC6440", "NGC6445", "NGC6441", "NGC6496", "NGC6522", "NGC6528",
"NGC6544", "NGC6541", "NGC6553", "NGC6569", "NGC6584", "NGC6603",
"NGC6618", "NGC6624", "NGC6626", "NGC6638", "NGC6637", "NGC6642",
"NGC6652", "NGC6656", "NGC6681", "NGC6705", "NGC6712", "NGC6715",
"NGC6723", "NGC6744", "NGC6752", "NGC6809", "NGC6818", "NGC6864",
"NGC6981", "NGC7009", "NGC7089", "NGC7099", "NGC7293", "NGC7410",
"IC1459", "NGC7793"],
},
{
"name": "AAAA Northern Urban",
"dso_names": ["NGC129", "NGC221", "NGC224", "NGC457", "NGC663", "Cr463",
"NGC752", "Stock2", "NGC869", "NGC884", "Tr2", "NGC1068", "Tr3",
"Stock23", "Mel20", "NGC1342", "M45", "Hyades", "NGC1647",
"NGC1807", "NGC1817", "NGC1912", "NGC1960", "NGC1976", "NGC1981",
"NGC2099", "NGC2168", "NGC2169", "NGC2232", "NGC2244", "NGC2264",
"NGC2281", "NGC2287", "NGC2301", "NGC2323", "NGC2392", "NGC2539",
"NGC2548", "NGC2632", "NGC2682", "NGC3031", "NGC3034", "NGC3242",
"Mel111", "NGC4374", "NGC4406", "NGC4486", "NGC4594", "NGC4736",
"NGC4826", "NGC5272", "NGC5904", "NGC6121", "NGC6205", "NGC6210",
"NGC6218", "NGC6254", "NGC6266", "NGC6341", "NGC6405", "IC4665",
"NGC6475", "NGC6520", "NGC6523", "NGC6618", "NGC6633", "NGC6656",
"IC4756", "NGC6705", "NGC6709", "NGC6720", "Cr399", "NGC6818",
"NGC6826", "NGC6853", "NGC6910", "NGC6934", "NGC6940", "NGC7009",
"NGC7078", "NGC7089", "NGC7092", "NGC7160", "NGC7209", "NGC7243",
"NGC7662", "NGC7789"],
},
{
"name": "Southern Sky Binocular",
"dso_names": ["NGC104", "SMC", "NGC362", "NGC1261", "NGC1851", "LMC",
"NGC2070", "NGC2451", "NGC2477", "NGC2516", "NGC2547", "NGC2546",
"NGC2627", "IC2391", "IC2395", "NGC2659", "NGC2670", "NGC2808",
"IC2488", "NGC2910", "NGC2925", "NGC3114", "NGC3201", "NGC3228",
"NGC3293", "Mel101", "IC2602", "NGC3372", "NGC3532", "IC2714",
"Mel105", "NGC3766", "NGC4052", "NGC4103", "NGC4337", "NGC4349",
"H5", "NGC4463", "H6", "NGC4609", "COALSACK", "NGC4755", "NGC4815",
"NGC4833", "NGC4852", "NGC5128", "NGC5139", "NGC5286", "NGC5316",
"NGC5460", "NGC5617", "NGC5662", "NGC5822", "NGC5823", "NGC5925",
"NGC6025", "NGC6067", "H10", "NGC6087", "NGC6124", "NGC6134",
"NGC6152", "NGC6167", "NGC6208", "NGC6231", "H13", "IC4651",
"NGC6352", "NGC6362", "NGC6397", "NGC6541", "NGC6584", "NGC6752"],
},
]
if __name__ == "__main__":
# Catalog to export on json format, e.g.:
# catalog = [
# {
# id: 6217,
# appears_on: ["Binosky", "Caldwell"],
# },
# {
# id: 37,
# appears_on: ["Binosky"],
# },
# ]
catalog = []
with open('../app/www/data/dsos.14.json', 'r') as f:
data = json.load(f)
for source_catalog in source_catalogs:
for dso_name in source_catalog["dso_names"]:
dso_id = get_dso_id(data, dso_name)
if dso_id == -1:
print("dso_name {} not found on data file".format(dso_name))
else:
add_dso_to_catalog(catalog, dso_id, source_catalog["name"])
with open('../app/www/app/catalog.js', 'w') as f:
catalog_json = json.dumps(catalog)
f.write(
"// File generated using tools/generate_catalog.py, do not touch\n"
"export const catalog = " + catalog_json + ";"
)
|
from factory import Factory, Faker
from pss_project.api.models.rest.metadata.JenkinsMetadata import JenkinsMetadata
class JenkinsMetadataFactory(Factory):
class Meta:
model = JenkinsMetadata
jenkins_job_id = Faker('pystr_format', string_format='###')
|
import sqlalchemy as sa
from sqlalchemy.orm import relationship
import geoalchemy2 as ga
from .database import Base
from api.schemas import Gender, InterestedIn
class User(Base):
__tablename__ = "users"
id = sa.Column(sa.String, primary_key=True, index=True)
gender = sa.Column(sa.Enum(Gender, name="gender_enum", create_type=False), nullable=False)
dob = sa.Column(sa.Date(), nullable=False)
location = sa.Column(ga.Geography(geometry_type='POINT', srid=4326), nullable=False)
pref_interested_in = sa.Column(sa.Enum(InterestedIn, name="pref_interested_in_enum",
create_type=False), nullable=False)
pref_age_min = sa.Column(sa.Integer, nullable=False, default=18)
pref_age_max = sa.Column(sa.Integer, nullable=False, default=60)
pref_distance = sa.Column(sa.Integer, nullable=False, default=20)
registered_at = sa.Column(sa.DateTime(timezone=True), server_default=sa.func.now())
tracks = relationship("Track", back_populates="users", cascade="all, delete")
tastes = relationship("MusicTaste", back_populates="users", cascade="all, delete")
class Track(Base):
__tablename__ = 'tracks'
id = sa.Column(sa.Integer, autoincrement=True, primary_key=True)
user_id = sa.Column(sa.String, sa.ForeignKey("users.id", ondelete="CASCADE"))
track = sa.Column(sa.String(length=40), nullable=False)
users = relationship("User", back_populates="tracks")
class MusicTaste(Base):
__tablename__ = 'musictaste'
id = sa.Column(sa.Integer, autoincrement=True, primary_key=True)
user_id = sa.Column(sa.String, sa.ForeignKey("users.id", ondelete="CASCADE"))
vector = sa.Column(sa.ARRAY(sa.Float), nullable=False)
created_on = sa.Column(sa.DateTime(timezone=True), server_default=sa.func.now())
users = relationship("User", back_populates="tastes")
class RightSwipe(Base):
__tablename__ = 'rightswipes'
id = sa.Column(sa.Integer, autoincrement=True, primary_key=True)
swiper = sa.Column(
sa.String,
sa.ForeignKey("users.id", ondelete="CASCADE"),
nullable=False
)
swipee = sa.Column(
sa.String,
sa.ForeignKey("users.id", ondelete="CASCADE"),
nullable=False
)
created_on = sa.Column(sa.DateTime(timezone=True), server_default=sa.func.now())
|
# Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# This file is licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License. A copy of the
# License is located at
#
# http://aws.amazon.com/apache2.0/
#
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
# OF ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from __future__ import print_function
import base64
import boto3
# Encrypt data key
# Replace the fictitious key ARN with a valid key ID
key_id = 'arn:aws:kms:us-west-2:111122223333:key/0987dcba-09fe-87dc-65ba-ab0987654321'
region_name = 'us-west-2'
client = boto3.client('kms', region_name=region_name)
text = '1234567890'
response = client.encrypt(
KeyId=key_id,
Plaintext=text,
)
print('Encrypted ciphertext:', response['CiphertextBlob'])
|
#!/usr/bin/python3
''' Interface principal do Candida '''
from arguments import parse_arguments
# processo a entrada e o help
cli_arguments = parse_arguments()
print("*** Candida ***")
if cli_arguments.a != None:
print("* Procurando por '%s' *" % cli_arguments.a)
else:
print("Use -a to say what software you want to remove")
print("Use -h for help")
|
# Generated by Django 3.2.4 on 2021-06-17 14:09
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("geneinfo", "0023_materialized_view_geneidinhpo_fix"),
]
operations = [
migrations.AlterField(
model_name="ncbigenerif",
name="pubmed_ids",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.CharField(max_length=16), default=list, size=None
),
),
]
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from pymongo import MongoClient
conn = MongoClient('127.0.0.1', 27017)
db = conn.mydb
my_set = db.test_set
my_set.insert({"name":"joe001","age":3})
my_set.insert({"name":"joe002","age":4})
my_set.insert({"name":"joe003","age":5})
for i in my_set.find():
print(i)
|
# AUTHOR: Sanket Khadse
# Python3 Concept: Find first and last positions of an element in a sorted array
# GITHUB: https://github.com/edusanketdk
def sol_1(ar: list, x: int) -> tuple:
"""using index function"""
return (ar.index(x), len(ar)-ar[::-1].index(x)-1) if x in ar else (-1, -1)
def sol_2(ar: list, x: int) -> tuple:
"""using single traversal"""
f, l = -1, -1
for i in range(len(ar)):
if ar[i] == x:
if f == -1: f = i
l = i
return (f, l)
def sol_3(ar: list, x: int) -> list:
"""using binary search"""
ans = [-1, -1]
l, r = 0, n-1
while l <= r:
m = l + (r-l)//2
if ar[m] == x and (m == 0 or ar[m-1] < x):
ans[0] = m
break
elif ar[m] < x:
l, r = m+1, r
else:
l, r = l, m-1
l, r = 0, n-1
while l <= r:
m = l + (r-l)//2
if ar[m] == x and (m == n-1 or ar[(m+1)%n] > x):
ans[1] = m
break
elif ar[m] > x:
l, r = l, m-1
else:
l, r = m+1, r
return ans
|
import versioneer
import os
from setuptools import find_packages, setup
here = os.path.abspath(os.path.dirname(__file__))
# Dependencies.
with open('requirements.txt') as f:
requirements = f.readlines()
install_requires = [t.strip() for t in requirements]
with open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dinosar',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='SAR processing on the Cloud',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/scottyhq/dinosar',
author='Scott Henderson',
author_email='scottyh@uw.edu',
maintainer='Scott Henderson',
maintainer_email='scottyh@uw.edu',
python_requires='>=3.6',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Scientific/Engineering'
],
keywords=['SAR', 'Cloud', 'Batch', 'AWS'],
packages=find_packages(),
install_requires=install_requires,
scripts=['bin/get_inventory_asf.py'],
)
|
# Copyright 2020 Marcello Yesca
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from threading import Thread
from queue import Queue
from enum import Flag, auto
from mycroft.util.log import LOG
class PrinterCommand(Flag):
RESET = auto()
NEW_LINE = auto()
LETTER_ON = auto()
LETTER_OFF = auto()
UNDERLINE_ON = auto()
UNDERLINE_OFF = auto()
SUPERSCRIPT_ON = auto()
SUPERSCRIPT_OFF = auto()
SUBSCRIPT_ON = auto()
SUBSCRIPT_OFF = auto()
CONDENSED_ON = auto()
CONDENSED_OFF = auto()
EXPANDED_ON = auto()
EXPANDED_OFF = auto()
class Printer(Thread):
"""
Writes data to Printer port.
#. Enqueues all commands received from Mycroft enclosures
implementation
#. Process them on the received order by writing on the Printer port
"""
def __init__(self, bus, size=16):
super(Printer, self).__init__(target=self.flush)
self.alive = True
self.daemon = True
self.bus = bus
self.chuncks = Queue(size)
self.bus.on("mycroft.stop", self.stop)
LOG.debug("Starting printer thread")
self.start()
def flush(self):
raise NotImplementedError
def print(self, chunck):
self.chuncks.put(chunck)
def command(self, cmd):
if cmd == PrinterCommand.RESET:
chunck = b'\x1B\x40\x1B\x51\x50' # \x1B\x43\x5A form size 90
elif cmd == PrinterCommand.NEW_LINE:
chunck = b'\x0A'
elif cmd == PrinterCommand.LETTER_ON:
chunck = b'\x1B\x47'
elif cmd == PrinterCommand.LETTER_OFF:
chunck = b'\x1B\x48'
elif cmd == PrinterCommand.UNDERLINE_ON:
chunck = b'\x1B\x5F\x01'
elif cmd == PrinterCommand.UNDERLINE_OFF:
chunck = b'\x1B\x5F\x00'
elif cmd == PrinterCommand.SUPERSCRIPT_ON:
chunck = b'\x1B\x53\x00'
elif cmd == PrinterCommand.SUPERSCRIPT_OFF:
chunck = b'\x1B\x54'
elif cmd == PrinterCommand.SUBSCRIPT_ON:
chunck = b'\x1B\x53\x01'
elif cmd == PrinterCommand.SUBSCRIPT_OFF:
chunck = b'\x1B\x54'
elif cmd == PrinterCommand.CONDENSED_ON:
chunck = b'\x0F'
elif cmd == PrinterCommand.CONDENSED_OFF:
chunck = b'\x12'
elif cmd == PrinterCommand.EXPANDED_ON:
chunck = b'\x1B\x57\x01'
elif cmd == PrinterCommand.EXPANDED_OFF:
chunck = b'\x1B\x57\x00'
else:
chunck = None
if chunck is not None:
self.chuncks.put(chunck)
def stop(self):
self.alive = False
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# pylint: disable=C0103,W0703
"""
yaml_check.py
Check if there is syntax error in YAML file. If it's good, do a pretty printing in JSON format.
"""
import sys
import os
import argparse
import json
import yaml
parser: argparse.ArgumentParser = argparse.ArgumentParser(prog=os.path.basename(__file__)
, description="yaml_check.py: YAML syntax check")
parser.add_argument("yaml_file", help='specify the yaml file')
args = parser.parse_args()
fn_yaml = args.yaml_file
fp_yaml= open(fn_yaml, "r")
try:
doc=yaml.full_load(fp_yaml)
except yaml.parser.ParserError as e:
print("YAML file syntax error: %s"%fn_yaml)
print(e)
sys.exit(-1)
json.dump(doc, sys.stdout, indent=4, ensure_ascii=False)
print("\n\n==== Syntax of this YAML is good! ====\n")
|
# Generated by Django 2.2.2 on 2019-06-28 18:53
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('oracle_db', '0002_auto_20190627_1541'),
]
operations = [
migrations.AlterModelOptions(
name='xxtmp_po_headers',
options={'managed': False},
),
migrations.AlterModelOptions(
name='xxtmp_po_lines',
options={'managed': False},
),
]
|
# -*- coding: utf-8 -*-
import urllib, urllib2, json
def coord(address):
params = {
'address' : address,
'sensor' : 'false',
}
url = 'https://maps.googleapis.com/maps/api/geocode/json?' + urllib.urlencode(params)
response = urllib2.urlopen(url)
result = json.load(response)
try:
coords = result['results'][0]['geometry']['location']
return ",".join(str(i) for i in coords.values())
except:
return None
def address(latitude_and_logitude):
url = 'https://maps.googleapis.com/maps/api/geocode/json?latlng=%s' % latitude_and_logitude
response = urllib2.urlopen(url)
result = json.load(response)
try:
return str(result['results'][0]['formatted_address'].encode('utf-8'))
except:
return None
|
#
# PySNMP MIB module BDCOM-QOS-PIB-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/BDCOM-QOS-PIB-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:36:45 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ConstraintsUnion")
bdcomPibToMib, = mibBuilder.importSymbols("BDCOM-SMI", "bdcomPibToMib")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
NotificationType, Bits, ModuleIdentity, Counter32, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity, Counter64, iso, Integer32, IpAddress, Gauge32, TimeTicks, MibIdentifier, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "NotificationType", "Bits", "ModuleIdentity", "Counter32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity", "Counter64", "iso", "Integer32", "IpAddress", "Gauge32", "TimeTicks", "MibIdentifier", "Unsigned32")
TextualConvention, TruthValue, MacAddress, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "TruthValue", "MacAddress", "DisplayString")
bdcomQosPIBMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1))
bdcomQosPIBMIB.setRevisions(('2003-10-16 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: bdcomQosPIBMIB.setRevisionsDescriptions(('Initial version of this MIB.',))
if mibBuilder.loadTexts: bdcomQosPIBMIB.setLastUpdated('200310160000Z')
if mibBuilder.loadTexts: bdcomQosPIBMIB.setOrganization('BDCOM, Inc.')
if mibBuilder.loadTexts: bdcomQosPIBMIB.setContactInfo(' Tel: +86-21-50800666 Postal: No.123,Juli RD,Zhangjiang Hitech Park, Shanghai Baud Data Communication Corporation Inc, Shanghai City 201203, P.R.C ')
if mibBuilder.loadTexts: bdcomQosPIBMIB.setDescription('The BDCOM QOS Policy PIB for provisioning QOS policy.')
class Dscp(TextualConvention, Integer32):
description = 'An integer that is in the range of the DiffServ codepoint values.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 63)
class QosLayer2Cos(TextualConvention, Integer32):
description = 'An integer that is in the range of the layer 2 CoS values. This corresponds to the 802.1p and ISL CoS values.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 7)
class QueueRange(TextualConvention, Integer32):
description = 'An integer that is limited to the number of queues per interface supported by the PIB. Limited to 64 which is the number of codepoints.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 8, 16, 32, 64))
namedValues = NamedValues(("oneQ", 1), ("twoQ", 2), ("threeQ", 3), ("fourQ", 4), ("eightQ", 8), ("sixteenQ", 16), ("thirtyTwoQ", 32), ("sixtyFourQ", 64))
class ThresholdSetRange(TextualConvention, Integer32):
description = 'An integer that is limited to the number of threshold sets per queue supported by the PIB. A threshold set is a collection of parameters describing queue threshold. The parameters of a threshold set depend on the drop mechanism the queue implements. For example, the threshold set for tail-drop comprises a single parameter, the percentage of queue size at which dropping occurs. The threshold set for WRED comprises two parameters; within the range of the two parameters packets are randomly dropped.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 4, 8))
namedValues = NamedValues(("zeroT", 0), ("oneT", 1), ("twoT", 2), ("fourT", 4), ("eightT", 8))
class Percent(TextualConvention, Integer32):
description = 'An integer that is in the range of a percent value.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 100)
class QosInterfaceQueueType(TextualConvention, Integer32):
description = 'An enumerated type for all the known interface types. The interface types are currently limited to a predefined combination of queues and thresholds such that the product of queues and thresholds does not exceed 64 (i.e., the total number of DSCPs.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33))
namedValues = NamedValues(("oneQ1t", 1), ("oneQ2t", 2), ("oneQ4t", 3), ("oneQ8t", 4), ("twoQ1t", 5), ("twoQ2t", 6), ("twoQ4t", 7), ("twoQ8t", 8), ("threeQ1t", 9), ("threeQ2t", 10), ("threeQ4t", 11), ("threeQ8t", 12), ("fourQ1t", 13), ("fourQ2t", 14), ("fourQ4t", 15), ("fourQ8t", 16), ("eightQ1t", 17), ("eightQ2t", 18), ("eightQ4t", 19), ("eightQ8t", 20), ("sixteenQ1t", 21), ("sixteenQ2t", 22), ("sixteenQ4t", 23), ("sixtyfourQ1t", 24), ("sixtyfourQ2t", 25), ("sixtyfourQ4t", 26), ("oneP1Q0t", 27), ("oneP1Q4t", 28), ("oneP1Q8t", 29), ("oneP2Q1t", 30), ("oneP2Q2t", 31), ("oneP3Q1t", 32), ("oneP7Q8t", 33))
class QosInterfaceTypeCapabilities(TextualConvention, Bits):
description = 'An enumeration of interface capabilities. Used by the PDP to select policies and configuration to push to the PEP.'
status = 'current'
namedValues = NamedValues(("unspecified", 0), ("inputL2Classification", 1), ("inputIpClassification", 2), ("outputL2Classification", 3), ("outputIpClassification", 4), ("inputPortClassification", 19), ("outputPortClassification", 20), ("inputUflowPolicing", 5), ("inputAggregatePolicing", 6), ("outputUflowPolicing", 7), ("outputAggregatePolicing", 8), ("policeByMarkingDown", 9), ("policeByDropping", 10), ("inputUflowShaping", 21), ("inputAggregateShaping", 22), ("outputUflowShaping", 23), ("outputAggregateShaping", 24), ("fifo", 11), ("wrr", 12), ("wfq", 13), ("cq", 14), ("pq", 15), ("cbwfq", 16), ("pqWrr", 25), ("pqCbwfq", 26), ("tailDrop", 17), ("wred", 18))
class RoleCombination(TextualConvention, OctetString):
description = "A Display string consisting of a set of roles concatenated with a '+' character where the roles are in lexicographic order from minimum to maximum."
status = 'current'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 255)
class PolicyInstanceId(TextualConvention, Unsigned32):
description = 'A textual convention for an attribute that is an an unsigned integer index attribute of class. It is used for attributes that exist for the purpose of providing an integer index of an instance. For any integer index that refers to another policy instance, that other policy instance must exist. Furthermore, it is an error to try to delete a policy instance that is referred to by another instance without first deleting the referring instance.'
status = 'current'
class Unsigned64(TextualConvention, Counter64):
description = 'An unsigned 64 bit integer. We use SYNTAX Counter64 for the enconding rules.'
status = 'current'
qosPIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1))
qosDeviceConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2))
qosDevicePibIncarnationTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 1), )
if mibBuilder.loadTexts: qosDevicePibIncarnationTable.setStatus('current')
if mibBuilder.loadTexts: qosDevicePibIncarnationTable.setDescription('This class contains a single policy instance that identifies the current incarnation of the PIB and the PDP that installed this incarnation. The instance of this class is reported to the PDP at client connect time so that the PDP can (attempt to) ascertain the current state of the PIB.')
qosDevicePibIncarnationEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 1, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosDeviceIncarnationId"))
if mibBuilder.loadTexts: qosDevicePibIncarnationEntry.setStatus('current')
if mibBuilder.loadTexts: qosDevicePibIncarnationEntry.setDescription('The single policy instance of this class identifies the current incarnation of the PIB and the PDP that installed this incarnation.')
qosDeviceIncarnationId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 1, 1, 1), PolicyInstanceId())
if mibBuilder.loadTexts: qosDeviceIncarnationId.setStatus('current')
if mibBuilder.loadTexts: qosDeviceIncarnationId.setDescription('An integer index to identify the instance of the policy class.')
qosDevicePdpName = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosDevicePdpName.setStatus('current')
if mibBuilder.loadTexts: qosDevicePdpName.setDescription('The name of the PDP that installed the current incarnation of the PIB into the device. By default it is the zero length string.')
qosDevicePibIncarnation = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 1, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(128, 128)).setFixedLength(128)).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosDevicePibIncarnation.setStatus('current')
if mibBuilder.loadTexts: qosDevicePibIncarnation.setDescription('An octet string to identify the current incarnation. It has meaning to the PDP that installed the PIB and perhaps its standby PDPs. By default the empty string.')
qosDevicePibTtl = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 1, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosDevicePibTtl.setStatus('current')
if mibBuilder.loadTexts: qosDevicePibTtl.setDescription('The number of seconds after a client close or TCP timeout for which the PEP continues to enforce the policy in the PIB. After this interval, the PIB is consired expired and the device no longer enforces the policy installed in the PIB.')
qosDeviceAttributeTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 2), )
if mibBuilder.loadTexts: qosDeviceAttributeTable.setStatus('current')
if mibBuilder.loadTexts: qosDeviceAttributeTable.setDescription('The single instance of this class indicates specific attributes of the device. These include configuration values such as the configured PDP addresses, the maximum message size, and specific device capabilities. The latter include input port-based and output port-based classification and/or policing, support for flow based policing, aggregate based policing, traffic shaping capabilities, etc.')
qosDeviceAttributeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 2, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosDeviceAttributeId"))
if mibBuilder.loadTexts: qosDeviceAttributeEntry.setStatus('current')
if mibBuilder.loadTexts: qosDeviceAttributeEntry.setDescription('The single instance of this class indicates specific attributes of the device.')
qosDeviceAttributeId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 2, 1, 1), PolicyInstanceId())
if mibBuilder.loadTexts: qosDeviceAttributeId.setStatus('current')
if mibBuilder.loadTexts: qosDeviceAttributeId.setDescription('An integer index to identify the instance of the policy class.')
qosDevicePepDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 2, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosDevicePepDomain.setStatus('current')
if mibBuilder.loadTexts: qosDevicePepDomain.setDescription('The QoS domain that this device belongs to. This is configured locally on the device (perhaps by some management protocol such as SNMP). By default, it is the zero-length string.')
qosDevicePrimaryPdp = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 2, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosDevicePrimaryPdp.setStatus('current')
if mibBuilder.loadTexts: qosDevicePrimaryPdp.setDescription('The address of the PDP configured to be the primary PDP for the device.')
qosDeviceSecondaryPdp = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 2, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosDeviceSecondaryPdp.setStatus('current')
if mibBuilder.loadTexts: qosDeviceSecondaryPdp.setDescription('The address of the PDP configured to be the secondary PDP for the device. An address of zero indicates no secondary is configured.')
qosDeviceMaxMessageSize = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 2, 1, 5), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosDeviceMaxMessageSize.setStatus('current')
if mibBuilder.loadTexts: qosDeviceMaxMessageSize.setDescription("The maximum size message that this PEP is capable of receiving in bytes. A value of zero means that the maximum message size is unspecified (but does not mean it is unlimited). A message greater than this maximum results in a MessageTooBig error on a 'no commit' REP.")
qosDeviceCapabilities = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 2, 1, 6), Bits().clone(namedValues=NamedValues(("unspecified", 0), ("layer2Cos", 1), ("ipPrecedence", 2), ("dscp", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosDeviceCapabilities.setStatus('current')
if mibBuilder.loadTexts: qosDeviceCapabilities.setDescription('An enumeration of device capabilities. Used by the PDP to select policies and configuration to push to the PEP.')
qosInterfaceTypeTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 3), )
if mibBuilder.loadTexts: qosInterfaceTypeTable.setStatus('current')
if mibBuilder.loadTexts: qosInterfaceTypeTable.setDescription('This class describes the interface types of the interfaces that exist on the device. It includes the queue type, role combination and capabilities of interfaces. The PEP does not report which specific interfaces have which characteristics.')
qosInterfaceTypeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 3, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosInterfaceTypeId"))
if mibBuilder.loadTexts: qosInterfaceTypeEntry.setStatus('current')
if mibBuilder.loadTexts: qosInterfaceTypeEntry.setDescription('An instance of this class describes a role combination for an interface type of an interface that exists on the device.')
qosInterfaceTypeId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 3, 1, 1), PolicyInstanceId())
if mibBuilder.loadTexts: qosInterfaceTypeId.setStatus('current')
if mibBuilder.loadTexts: qosInterfaceTypeId.setDescription('An integer index to identify the instance of the policy class.')
qosInterfaceQueueType = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 3, 1, 2), QosInterfaceQueueType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosInterfaceQueueType.setStatus('current')
if mibBuilder.loadTexts: qosInterfaceQueueType.setDescription('The interface type in terms of number of queues and thresholds.')
qosInterfaceTypeRoles = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 3, 1, 3), RoleCombination()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosInterfaceTypeRoles.setStatus('current')
if mibBuilder.loadTexts: qosInterfaceTypeRoles.setDescription('A combination of roles on at least one interface of type qosInterfaceType.')
qosInterfaceTypeCapabilities = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 2, 3, 1, 4), QosInterfaceTypeCapabilities()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosInterfaceTypeCapabilities.setStatus('current')
if mibBuilder.loadTexts: qosInterfaceTypeCapabilities.setDescription('An enumeration of interface capabilities. Used by the PDP to select policies and configuration to push to the PEP.')
qosDomainConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 3))
qosDiffServMappingTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 3, 1), )
if mibBuilder.loadTexts: qosDiffServMappingTable.setStatus('current')
if mibBuilder.loadTexts: qosDiffServMappingTable.setDescription('Maps each DSCP to a marked-down DSCP. Also maps each DSCP to an IP precedence and QosLayer2Cos. When configured for the first time, all 64 entries of the table must be specified. Thereafter, instances may be modified (with a delete and install in a single decision) but not deleted unless all instances are deleted.')
qosDiffServMappingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 3, 1, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosDscp"))
if mibBuilder.loadTexts: qosDiffServMappingEntry.setStatus('current')
if mibBuilder.loadTexts: qosDiffServMappingEntry.setDescription('An instance of this class represents mappings from a DSCP.')
qosDscp = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 3, 1, 1, 1), Dscp())
if mibBuilder.loadTexts: qosDscp.setStatus('current')
if mibBuilder.loadTexts: qosDscp.setDescription('A DSCP for which this entry contains mappings.')
qosMarkedDscp = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 3, 1, 1, 2), Dscp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosMarkedDscp.setStatus('current')
if mibBuilder.loadTexts: qosMarkedDscp.setDescription('The DSCP to use instead of the qosDscp when the packet is out of profile and hence marked as such.')
qosL2Cos = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 3, 1, 1, 3), QosLayer2Cos()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosL2Cos.setStatus('current')
if mibBuilder.loadTexts: qosL2Cos.setDescription('The L2 CoS value to use when mapping this DSCP to layer 2 CoS.')
qosCosToDscpTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 3, 2), )
if mibBuilder.loadTexts: qosCosToDscpTable.setStatus('current')
if mibBuilder.loadTexts: qosCosToDscpTable.setDescription('Maps each of eight CoS values to a DSCP. When configured for the first time, all 8 entries of the table must be specified. Thereafter, instances may be modified (with a delete and install in a single decision) but not deleted unless all instances are deleted.')
qosCosToDscpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 3, 2, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosCosToDscpCos"))
if mibBuilder.loadTexts: qosCosToDscpEntry.setStatus('current')
if mibBuilder.loadTexts: qosCosToDscpEntry.setDescription('An instance of this class maps a CoS value to a DSCP.')
qosCosToDscpCos = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 3, 2, 1, 1), QosLayer2Cos())
if mibBuilder.loadTexts: qosCosToDscpCos.setStatus('current')
if mibBuilder.loadTexts: qosCosToDscpCos.setDescription('The L2 CoS value that is being mapped.')
qosCosToDscpDscp = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 3, 2, 1, 2), Dscp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosCosToDscpDscp.setStatus('current')
if mibBuilder.loadTexts: qosCosToDscpDscp.setDescription('The DSCP value to use when mapping the L2 CoS to a DSCP.')
qosUnmatchedPolicy = MibIdentifier((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 4))
qosUnmatchedPolicyTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 4, 1), )
if mibBuilder.loadTexts: qosUnmatchedPolicyTable.setStatus('current')
if mibBuilder.loadTexts: qosUnmatchedPolicyTable.setDescription('A policy class that specifies what QoS to apply to a packet that does not match any other policy configured for this role combination for a particular direction of traffic.')
qosUnmatchedPolicyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 4, 1, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosUnmatchedPolicyId"))
if mibBuilder.loadTexts: qosUnmatchedPolicyEntry.setStatus('current')
if mibBuilder.loadTexts: qosUnmatchedPolicyEntry.setDescription('An instance of this class specifies the unmatched policy for a particular role combination for incoming or outgoing traffic.')
qosUnmatchedPolicyId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 4, 1, 1, 1), PolicyInstanceId())
if mibBuilder.loadTexts: qosUnmatchedPolicyId.setStatus('current')
if mibBuilder.loadTexts: qosUnmatchedPolicyId.setDescription('An integer index to identify the instance of the policy class.')
qosUnmatchedPolicyRole = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 4, 1, 1, 2), RoleCombination()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosUnmatchedPolicyRole.setStatus('current')
if mibBuilder.loadTexts: qosUnmatchedPolicyRole.setDescription('Role combination for which this instance applies.')
qosUnmatchedPolicyDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 4, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("in", 0), ("out", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosUnmatchedPolicyDirection.setStatus('current')
if mibBuilder.loadTexts: qosUnmatchedPolicyDirection.setDescription('The direction of packet flow at the interface in question to which this instance applies.')
qosUnmatchedPolicyDscp = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 4, 1, 1, 4), Dscp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosUnmatchedPolicyDscp.setStatus('current')
if mibBuilder.loadTexts: qosUnmatchedPolicyDscp.setDescription('The DSCP to classify the unmatched packet with. This must be specified even if qosUnmatchedPolicyDscpTrusted is true.')
qosUnmatchedPolicyDscpTrusted = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 4, 1, 1, 5), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosUnmatchedPolicyDscpTrusted.setStatus('current')
if mibBuilder.loadTexts: qosUnmatchedPolicyDscpTrusted.setDescription('If this attribute is true, then the Dscp associated with the packet is trusted, i.e., it is assumed to have already been set. In this case, the Dscp is not rewritten with qosUnmatchedPolicyDscp (qosUnmatchedPolicyDscp is ignored) unless this is a non-IP packet and arrives untagged. The packet is still policed as part of its micro flow and its aggregate flow. When a trusted action is applied to an input interface, the Dscp (for an IP packet) or CoS (for a non-IP packet) associated with the packet is the one contained in the packet. When a trusted action is applied to an output interface, the Dscp associated with the packet is the one that is the result of the input classification and policing.')
qosUnmatchPolMicroFlowPolicerId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 4, 1, 1, 6), PolicyInstanceId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosUnmatchPolMicroFlowPolicerId.setStatus('current')
if mibBuilder.loadTexts: qosUnmatchPolMicroFlowPolicerId.setDescription('An index identifying the instance of policer to apply to unmatched packets. It must correspond to the integer index of an instance of class qosPolicerTable or be zero. If zero, the microflow is not policed.')
qosUnmatchedPolicyAggregateId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 4, 1, 1, 7), PolicyInstanceId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosUnmatchedPolicyAggregateId.setStatus('current')
if mibBuilder.loadTexts: qosUnmatchedPolicyAggregateId.setDescription('An index identifying the aggregate that the packet belongs to. It must correspond to the integer index of an instance of class qosAggregateTable or be zero. If zero, the microflow does not belong to any aggregate and is not policed as part of any aggregate.')
qosPolicer = MibIdentifier((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 5))
qosPolicerTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 5, 1), )
if mibBuilder.loadTexts: qosPolicerTable.setStatus('current')
if mibBuilder.loadTexts: qosPolicerTable.setDescription('A class specifying policing parameters for both microflows and aggregate flows. This table is designed for policing according to a token bucket scheme where an average rate and burst size is specified.')
qosPolicerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 5, 1, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosPolicerId"))
if mibBuilder.loadTexts: qosPolicerEntry.setStatus('current')
if mibBuilder.loadTexts: qosPolicerEntry.setDescription('An instance of this class specifies a set of policing parameters.')
qosPolicerId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 5, 1, 1, 1), PolicyInstanceId())
if mibBuilder.loadTexts: qosPolicerId.setStatus('current')
if mibBuilder.loadTexts: qosPolicerId.setDescription('An integer index to identify the instance of the policy class.')
qosPolicerRate = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 5, 1, 1, 2), Unsigned64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosPolicerRate.setStatus('current')
if mibBuilder.loadTexts: qosPolicerRate.setDescription('The token rate. It is specified in units of bit/s. A rate of zero means that all packets will be out of profile. If the qosPolicerAction is set to drop then this effectively denies any service to packets policed by this policer.')
qosPolicerNormalBurst = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 5, 1, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosPolicerNormalBurst.setStatus('current')
if mibBuilder.loadTexts: qosPolicerNormalBurst.setDescription('The normal size of a burst in terms of bits.')
qosPolicerExcessBurst = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 5, 1, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosPolicerExcessBurst.setStatus('current')
if mibBuilder.loadTexts: qosPolicerExcessBurst.setDescription('The excess size of a burst in terms of bits.')
qosPolicerAction = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 5, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("drop", 0), ("mark", 1), ("shape", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosPolicerAction.setStatus('current')
if mibBuilder.loadTexts: qosPolicerAction.setDescription('An indication of how to handle out of profile packets. When the shape action is chosen then traffic is shaped to the rate specified by qosPolicerRate.')
qosAggregateTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 5, 2), )
if mibBuilder.loadTexts: qosAggregateTable.setStatus('current')
if mibBuilder.loadTexts: qosAggregateTable.setDescription('Instances of this class identify aggregate flows and the policer to apply to each.')
qosAggregateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 5, 2, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosAggregateId"))
if mibBuilder.loadTexts: qosAggregateEntry.setStatus('current')
if mibBuilder.loadTexts: qosAggregateEntry.setDescription('An instance of this class specifies the policer to apply to an aggregate flow.')
qosAggregateId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 5, 2, 1, 1), PolicyInstanceId())
if mibBuilder.loadTexts: qosAggregateId.setStatus('current')
if mibBuilder.loadTexts: qosAggregateId.setDescription('An integer index to identify the instance of the policy class.')
qosAggregatePolicerId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 5, 2, 1, 2), PolicyInstanceId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosAggregatePolicerId.setStatus('current')
if mibBuilder.loadTexts: qosAggregatePolicerId.setDescription('An index identifying the instance of policer to apply to the aggregate. It must correspond to the integer index of an instance of class qosPolicerTable.')
qosMacQos = MibIdentifier((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 6))
qosMacClassificationTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 6, 1), )
if mibBuilder.loadTexts: qosMacClassificationTable.setStatus('current')
if mibBuilder.loadTexts: qosMacClassificationTable.setDescription('A class of MAC/Vlan tuples and their associated CoS values.')
qosMacClassificationEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 6, 1, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosMacClassificationId"))
if mibBuilder.loadTexts: qosMacClassificationEntry.setStatus('current')
if mibBuilder.loadTexts: qosMacClassificationEntry.setDescription('An instance of this class specifies the mapping of a VLAN and a MAC address to a CoS value.')
qosMacClassificationId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 6, 1, 1, 1), PolicyInstanceId())
if mibBuilder.loadTexts: qosMacClassificationId.setStatus('current')
if mibBuilder.loadTexts: qosMacClassificationId.setDescription('An integer index to identify the instance of the policy class.')
qosDstMacVlan = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 6, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4095))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosDstMacVlan.setStatus('current')
if mibBuilder.loadTexts: qosDstMacVlan.setDescription('The VLAN of the destination MAC address of the L2 frame.')
qosDstMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 6, 1, 1, 3), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosDstMacAddress.setStatus('current')
if mibBuilder.loadTexts: qosDstMacAddress.setDescription('The destination MAC address of the L2 frame.')
qosDstMacCos = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 6, 1, 1, 4), QosLayer2Cos()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosDstMacCos.setStatus('current')
if mibBuilder.loadTexts: qosDstMacCos.setDescription('The CoS to assign the packet with the associated MAC/VLAN tuple. Note that this CoS is overridden by the policies to classify the frame at layer 3 if there are any.')
qosIpQos = MibIdentifier((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7))
qosIpAceTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 1), )
if mibBuilder.loadTexts: qosIpAceTable.setStatus('current')
if mibBuilder.loadTexts: qosIpAceTable.setDescription('ACE definitions.')
qosIpAceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 1, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosIpAceId"))
if mibBuilder.loadTexts: qosIpAceEntry.setStatus('current')
if mibBuilder.loadTexts: qosIpAceEntry.setDescription('An instance of this class specifies an ACE.')
qosIpAceId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 1, 1, 1), PolicyInstanceId())
if mibBuilder.loadTexts: qosIpAceId.setStatus('current')
if mibBuilder.loadTexts: qosIpAceId.setDescription('An integer index to identify the instance of the policy class.')
qosIpAceDstAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 1, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAceDstAddr.setStatus('current')
if mibBuilder.loadTexts: qosIpAceDstAddr.setDescription("The IP address to match against the packet's destination IP address.")
qosIpAceDstAddrMask = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 1, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAceDstAddrMask.setStatus('current')
if mibBuilder.loadTexts: qosIpAceDstAddrMask.setDescription('A mask for the matching of the destination IP address.')
qosIpAceSrcAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 1, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAceSrcAddr.setStatus('current')
if mibBuilder.loadTexts: qosIpAceSrcAddr.setDescription("The IP address to match against the packet's source IP address.")
qosIpAceSrcAddrMask = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 1, 1, 5), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAceSrcAddrMask.setStatus('current')
if mibBuilder.loadTexts: qosIpAceSrcAddrMask.setDescription('A mask for the matching of the source IP address.')
qosIpAceDscpMin = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 1, 1, 6), Dscp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAceDscpMin.setStatus('current')
if mibBuilder.loadTexts: qosIpAceDscpMin.setDescription('The minimum value that the DSCP in the packet can have and match this ACE.')
qosIpAceDscpMax = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 1, 1, 7), Dscp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAceDscpMax.setStatus('current')
if mibBuilder.loadTexts: qosIpAceDscpMax.setDescription('The maximum value that the DSCP in the packet can have and match this ACE.')
qosIpAceProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAceProtocol.setStatus('current')
if mibBuilder.loadTexts: qosIpAceProtocol.setDescription("The IP protocol to match against the packet's protocol. A value of zero means match all.")
qosIpAceDstL4PortMin = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAceDstL4PortMin.setStatus('current')
if mibBuilder.loadTexts: qosIpAceDstL4PortMin.setDescription("The minimum value that the packet's layer 4 dest port number can have and match this ACE.")
qosIpAceDstL4PortMax = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAceDstL4PortMax.setStatus('current')
if mibBuilder.loadTexts: qosIpAceDstL4PortMax.setDescription("The maximum value that the packet's layer 4 dest port number can have and match this ACE.")
qosIpAceSrcL4PortMin = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 1, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAceSrcL4PortMin.setStatus('current')
if mibBuilder.loadTexts: qosIpAceSrcL4PortMin.setDescription("The minimum value that the packet's layer 4 source port number can have and match this ACE.")
qosIpAceSrcL4PortMax = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 1, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAceSrcL4PortMax.setStatus('current')
if mibBuilder.loadTexts: qosIpAceSrcL4PortMax.setDescription("The maximum value that the packet's layer 4 source port number can have and match this ACE.")
qosIpAcePermit = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 1, 1, 13), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAcePermit.setStatus('current')
if mibBuilder.loadTexts: qosIpAcePermit.setDescription('If the packet matches this ACE and the value of this attribute is true, then the matching process terminates and the QoS associated with this ACE (indirectly through the ACL) is applied to the packet. If the value of this attribute is false, then no more ACEs in this ACL are compared to this packet and matching continues with the first ACE of the next ACL.')
qosIpAclDefinitionTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 2), )
if mibBuilder.loadTexts: qosIpAclDefinitionTable.setStatus('current')
if mibBuilder.loadTexts: qosIpAclDefinitionTable.setDescription('A class that defines a set of ACLs each being an ordered list of ACEs.')
qosIpAclDefinitionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 2, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosIpAclDefinitionId"))
if mibBuilder.loadTexts: qosIpAclDefinitionEntry.setStatus('current')
if mibBuilder.loadTexts: qosIpAclDefinitionEntry.setDescription('An instance of this class specifies an ACE in an ACL and its order with respect to other ACEs in the same ACL.')
qosIpAclDefinitionId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 2, 1, 1), PolicyInstanceId())
if mibBuilder.loadTexts: qosIpAclDefinitionId.setStatus('current')
if mibBuilder.loadTexts: qosIpAclDefinitionId.setDescription('An integer index to identify the instance of the policy class.')
qosIpAclId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 2, 1, 2), PolicyInstanceId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAclId.setStatus('current')
if mibBuilder.loadTexts: qosIpAclId.setDescription('An index for this ACL. There will be one instance of policy class qosIpAclDefinition with this integer index for each ACE in the ACL per role combination.')
qosIpAceOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 2, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAceOrder.setStatus('current')
if mibBuilder.loadTexts: qosIpAceOrder.setDescription('An integer that determines the position of this ACE in the ACL. An ACE with a given order is positioned in the access contol list before one with a higher order.')
qosIpAclDefAceId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 2, 1, 4), PolicyInstanceId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAclDefAceId.setStatus('current')
if mibBuilder.loadTexts: qosIpAclDefAceId.setDescription('This attribute specifies the ACE in the qosIpAceTable that is in the ACL specified by qosIpAclId at the position specified by qosIpAceOrder.')
qosIpAclActionTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 3), )
if mibBuilder.loadTexts: qosIpAclActionTable.setStatus('current')
if mibBuilder.loadTexts: qosIpAclActionTable.setDescription('A class that applies a set of ACLs to interfaces specifying, for each interface the order of the ACL with respect to other ACLs applied to the same interface and, for each ACL the action to take for a packet that matches a permit ACE in that ACL. Interfaces are specified abstractly in terms of interface role combinations.')
qosIpAclActionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 3, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosIpAclActionId"))
if mibBuilder.loadTexts: qosIpAclActionEntry.setStatus('current')
if mibBuilder.loadTexts: qosIpAclActionEntry.setDescription('An instance of this class applies an ACL to traffic in a particular direction on an interface with a particular role combination, and specifies the action for packets which match the ACL.')
qosIpAclActionId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 3, 1, 1), PolicyInstanceId())
if mibBuilder.loadTexts: qosIpAclActionId.setStatus('current')
if mibBuilder.loadTexts: qosIpAclActionId.setDescription('An integer index to identify the instance of the policy class.')
qosIpAclActAclId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 3, 1, 2), PolicyInstanceId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAclActAclId.setStatus('current')
if mibBuilder.loadTexts: qosIpAclActAclId.setDescription('The ACL associated with this action.')
qosIpAclInterfaceRoles = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 3, 1, 3), RoleCombination()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAclInterfaceRoles.setStatus('current')
if mibBuilder.loadTexts: qosIpAclInterfaceRoles.setDescription('The interfaces to which this ACL applies specified in terms of a set of roles.')
qosIpAclInterfaceDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("in", 0), ("out", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAclInterfaceDirection.setStatus('current')
if mibBuilder.loadTexts: qosIpAclInterfaceDirection.setDescription('The direction of packet flow at the interface in question to which this ACL applies.')
qosIpAclOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 3, 1, 5), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAclOrder.setStatus('current')
if mibBuilder.loadTexts: qosIpAclOrder.setDescription('An integer that determines the order of this ACL in the list of ACLs applied to interfaces of the specified role combination. An ACL with a given order is positioned in the list before one with a higher order.')
qosIpAclDscp = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 3, 1, 6), Dscp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAclDscp.setStatus('current')
if mibBuilder.loadTexts: qosIpAclDscp.setDescription('The DSCP to classify the packet with in the event that the packet matches an ACE in this ACL and the ACE is a permit.')
qosIpAclDscpTrusted = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 3, 1, 7), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAclDscpTrusted.setStatus('current')
if mibBuilder.loadTexts: qosIpAclDscpTrusted.setDescription('If this attribute is true, then the Dscp associated with the packet is trusted, i.e., it is assumed to have already been set. In this case, the Dscp is not rewritten with qosIpAclDscp (qosIpAclDscp is ignored). The packet is still policed as part of its micro flow and its aggregate flow. When a trusted action is applied to an input interface, the Dscp associated with the packet is the one contained in the packet. When a trusted action is applied to an output interface, the Dscp associated with the packet is the one that is the result of the input classification and policing.')
qosIpAclMicroFlowPolicerId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 3, 1, 8), PolicyInstanceId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAclMicroFlowPolicerId.setStatus('current')
if mibBuilder.loadTexts: qosIpAclMicroFlowPolicerId.setDescription('An index identifying the instance of policer to apply to the microflow. It must correspond to the integer index of an instance of class qosPolicerTableor be zero. If zero, the microflow is not policed.')
qosIpAclAggregateId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 7, 3, 1, 9), PolicyInstanceId()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIpAclAggregateId.setStatus('current')
if mibBuilder.loadTexts: qosIpAclAggregateId.setDescription('An index identifying the aggregate that the packet belongs to. It must correspond to the integer index of an instance of class qosAggregateTable or be zero. If zero, the microflow does not belong to any aggregate and is not policed as part of any aggregate.')
qosIfParameters = MibIdentifier((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8))
qosIfSchedulingPreferencesTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 1), )
if mibBuilder.loadTexts: qosIfSchedulingPreferencesTable.setStatus('current')
if mibBuilder.loadTexts: qosIfSchedulingPreferencesTable.setDescription('This class specifies the scheduling preference an interface chooses if it supports multiple scheduling types. Higher values are preferred over lower values.')
qosIfSchedulingPreferenceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 1, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosIfSchedulingPreferenceId"))
if mibBuilder.loadTexts: qosIfSchedulingPreferenceEntry.setStatus('current')
if mibBuilder.loadTexts: qosIfSchedulingPreferenceEntry.setDescription('An instance of this class specifies a scheduling preference for a queue-type on an interface with a particular role combination.')
qosIfSchedulingPreferenceId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 1, 1, 1), PolicyInstanceId())
if mibBuilder.loadTexts: qosIfSchedulingPreferenceId.setStatus('current')
if mibBuilder.loadTexts: qosIfSchedulingPreferenceId.setDescription('An integer index to identify the instance of the policy class.')
qosIfSchedulingRoles = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 1, 1, 2), RoleCombination()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfSchedulingRoles.setStatus('current')
if mibBuilder.loadTexts: qosIfSchedulingRoles.setDescription('The combination of roles the interface must have for this policy instance to apply to that interface.')
qosIfSchedulingPreference = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfSchedulingPreference.setStatus('current')
if mibBuilder.loadTexts: qosIfSchedulingPreference.setDescription('The preference to use this scheduling discipline and queue type. A higher value means a higher preference. If two disciplines have the same preference the choice is a local decision.')
qosIfSchedulingDiscipline = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("weightedFairQueueing", 1), ("weightedRoundRobin", 2), ("customQueueing", 3), ("priorityQueueing", 4), ("classBasedWFQ", 5), ("fifo", 6), ("pqWrr", 7), ("pqCbwfq", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfSchedulingDiscipline.setStatus('current')
if mibBuilder.loadTexts: qosIfSchedulingDiscipline.setDescription('An enumerate type for all the known scheduling disciplines.')
qosIfSchedulingQueueType = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 1, 1, 5), QosInterfaceQueueType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfSchedulingQueueType.setStatus('current')
if mibBuilder.loadTexts: qosIfSchedulingQueueType.setDescription('The queue type of this preference.')
qosIfDropPreferenceTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 2), )
if mibBuilder.loadTexts: qosIfDropPreferenceTable.setStatus('current')
if mibBuilder.loadTexts: qosIfDropPreferenceTable.setDescription('This class specifies the preference of the drop mechanism an interface chooses if it supports multiple drop mechanisms. Higher values are preferred over lower values.')
qosIfDropPreferenceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 2, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosIfDropPreferenceId"))
if mibBuilder.loadTexts: qosIfDropPreferenceEntry.setStatus('current')
if mibBuilder.loadTexts: qosIfDropPreferenceEntry.setDescription('An instance of this class specifies a drop preference for a drop mechanism on an interface with a particular role combination.')
qosIfDropPreferenceId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 2, 1, 1), PolicyInstanceId())
if mibBuilder.loadTexts: qosIfDropPreferenceId.setStatus('current')
if mibBuilder.loadTexts: qosIfDropPreferenceId.setDescription('An integer index to identify the instance of the policy class.')
qosIfDropRoles = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 2, 1, 2), RoleCombination()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfDropRoles.setStatus('current')
if mibBuilder.loadTexts: qosIfDropRoles.setDescription('The combination of roles the interface must have for this policy instance to apply to that interface.')
qosIfDropPreference = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfDropPreference.setStatus('current')
if mibBuilder.loadTexts: qosIfDropPreference.setDescription('The preference to use this drop mechanism. A higher value means a higher preference. If two mechanisms have the same preference the choice is a local decision.')
qosIfDropDiscipline = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("qosIfDropWRED", 1), ("qosIfDropTailDrop", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfDropDiscipline.setStatus('current')
if mibBuilder.loadTexts: qosIfDropDiscipline.setDescription('An enumerate type for all the known drop mechanisms.')
qosIfDscpAssignmentTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 3), )
if mibBuilder.loadTexts: qosIfDscpAssignmentTable.setStatus('current')
if mibBuilder.loadTexts: qosIfDscpAssignmentTable.setDescription('The assignment of each DSCP to a queue and threshold for each interface queue type.')
qosIfDscpAssignmentEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 3, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosIfDscpAssignmentId"))
if mibBuilder.loadTexts: qosIfDscpAssignmentEntry.setStatus('current')
if mibBuilder.loadTexts: qosIfDscpAssignmentEntry.setDescription('An instance of this class specifies the queue and threshold set for a packet with a particular DSCP on an interface of a particular type with a particular role combination.')
qosIfDscpAssignmentId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 3, 1, 1), PolicyInstanceId())
if mibBuilder.loadTexts: qosIfDscpAssignmentId.setStatus('current')
if mibBuilder.loadTexts: qosIfDscpAssignmentId.setDescription('An integer index to identify the instance of the policy class.')
qosIfDscpRoles = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 3, 1, 2), RoleCombination()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfDscpRoles.setStatus('current')
if mibBuilder.loadTexts: qosIfDscpRoles.setDescription('The role combination the interface must be configured with.')
qosIfQueueType = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 3, 1, 3), QosInterfaceQueueType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfQueueType.setStatus('current')
if mibBuilder.loadTexts: qosIfQueueType.setDescription('The interface queue type to which this row applies.')
qosIfDscp = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 3, 1, 4), Dscp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfDscp.setStatus('current')
if mibBuilder.loadTexts: qosIfDscp.setDescription('The DSCP to which this row applies.')
qosIfQueue = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 3, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfQueue.setStatus('current')
if mibBuilder.loadTexts: qosIfQueue.setDescription('The queue to which the DSCP applies for the given interface type.')
qosIfThresholdSet = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 3, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfThresholdSet.setStatus('current')
if mibBuilder.loadTexts: qosIfThresholdSet.setDescription('The threshold set of the specified queue to which the DSCP applies for the given interface type.')
qosIfRedTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 4), )
if mibBuilder.loadTexts: qosIfRedTable.setStatus('current')
if mibBuilder.loadTexts: qosIfRedTable.setDescription('A class of lower and upper values for each threshold set in a queue supporting WRED. If the size of the queue for a given threshold is below the lower value then packets assigned to that threshold are always accepted into the queue. If the size of the queue is above upper value then packets are always dropped. If the size of the queue is between the lower and the upper then packets are randomly dropped.')
qosIfRedEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 4, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosIfRedId"))
if mibBuilder.loadTexts: qosIfRedEntry.setStatus('current')
if mibBuilder.loadTexts: qosIfRedEntry.setDescription('An instance of this class specifies threshold limits for a particular RED threshold of a given threshold set on an interface and with a particular role combination.')
qosIfRedId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 4, 1, 1), PolicyInstanceId())
if mibBuilder.loadTexts: qosIfRedId.setStatus('current')
if mibBuilder.loadTexts: qosIfRedId.setDescription('An integer index to identify the instance of the policy class.')
qosIfRedRoles = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 4, 1, 2), RoleCombination()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfRedRoles.setStatus('current')
if mibBuilder.loadTexts: qosIfRedRoles.setDescription('The role combination the interface must be configured with.')
qosIfRedNumThresholdSets = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 4, 1, 3), ThresholdSetRange()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfRedNumThresholdSets.setStatus('current')
if mibBuilder.loadTexts: qosIfRedNumThresholdSets.setDescription('The values in this entry apply only to queues with the number of thresholds specified by this attribute.')
qosIfRedThresholdSet = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 4, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfRedThresholdSet.setStatus('current')
if mibBuilder.loadTexts: qosIfRedThresholdSet.setDescription('The threshold set to which the lower and upper values apply. It must be in the range 1 through qosIfRedNumThresholdSets. There must be exactly one PRI for each value in this range.')
qosIfRedThresholdSetLower = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 4, 1, 5), Percent()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfRedThresholdSetLower.setStatus('current')
if mibBuilder.loadTexts: qosIfRedThresholdSetLower.setDescription('The threshold value below which no packets are dropped.')
qosIfRedThresholdSetUpper = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 4, 1, 6), Percent()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfRedThresholdSetUpper.setStatus('current')
if mibBuilder.loadTexts: qosIfRedThresholdSetUpper.setDescription('The threshold value above which all packets are dropped.')
qosIfTailDropTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 5), )
if mibBuilder.loadTexts: qosIfTailDropTable.setStatus('current')
if mibBuilder.loadTexts: qosIfTailDropTable.setDescription('A class for threshold sets in a queue supporting tail drop. If the size of the queue for a given threshold set is at or below the specified value then packets assigned to that threshold set are always accepted into the queue. If the size of the queue is above the specified value then packets are always dropped.')
qosIfTailDropEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 5, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosIfTailDropId"))
if mibBuilder.loadTexts: qosIfTailDropEntry.setStatus('current')
if mibBuilder.loadTexts: qosIfTailDropEntry.setDescription('An instance of this class specifies the queue depth for a particular tail-drop threshold set on an interface with a particular role combination.')
qosIfTailDropId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 5, 1, 1), PolicyInstanceId())
if mibBuilder.loadTexts: qosIfTailDropId.setStatus('current')
if mibBuilder.loadTexts: qosIfTailDropId.setDescription('An integer index to identify the instance of the policy class.')
qosIfTailDropRoles = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 5, 1, 2), RoleCombination()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfTailDropRoles.setStatus('current')
if mibBuilder.loadTexts: qosIfTailDropRoles.setDescription('The role combination the interface must be configured with.')
qosIfTailDropNumThresholdSets = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 5, 1, 3), ThresholdSetRange()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfTailDropNumThresholdSets.setStatus('current')
if mibBuilder.loadTexts: qosIfTailDropNumThresholdSets.setDescription('The value in this entry applies only to queues with the number of thresholds specified by this attribute.')
qosIfTailDropThresholdSet = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 5, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfTailDropThresholdSet.setStatus('current')
if mibBuilder.loadTexts: qosIfTailDropThresholdSet.setDescription('The threshold set to which the threshold value applies')
qosIfTailDropThresholdSetValue = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 5, 1, 5), Percent()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfTailDropThresholdSetValue.setStatus('current')
if mibBuilder.loadTexts: qosIfTailDropThresholdSetValue.setDescription('The threshold value above which packets are dropped.')
qosIfWeightsTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 6), )
if mibBuilder.loadTexts: qosIfWeightsTable.setStatus('current')
if mibBuilder.loadTexts: qosIfWeightsTable.setDescription('A class of scheduling weights for each queue of an interface that supports weighted round robin scheduling or a mix of priority queueing and weighted round robin. For a queue with N priority queues, the N highest queue numbers are the priority queues with the highest queue number having the highest priority. WRR is applied to the non-priority queues.')
qosIfWeightsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 6, 1), ).setIndexNames((0, "BDCOM-QOS-PIB-MIB", "qosIfWeightsId"))
if mibBuilder.loadTexts: qosIfWeightsEntry.setStatus('current')
if mibBuilder.loadTexts: qosIfWeightsEntry.setDescription('An instance of this class specifies the scheduling weight for a particular queue of an interface with a particular number of queues and with a particular role combination.')
qosIfWeightsId = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 6, 1, 1), PolicyInstanceId())
if mibBuilder.loadTexts: qosIfWeightsId.setStatus('current')
if mibBuilder.loadTexts: qosIfWeightsId.setDescription('An integer index to identify the instance of the policy class.')
qosIfWeightsRoles = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 6, 1, 2), RoleCombination()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfWeightsRoles.setStatus('current')
if mibBuilder.loadTexts: qosIfWeightsRoles.setDescription('The role combination the interface must be configured with.')
qosIfWeightsNumQueues = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 6, 1, 3), QueueRange()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfWeightsNumQueues.setStatus('current')
if mibBuilder.loadTexts: qosIfWeightsNumQueues.setDescription('The value of the weight in this instance applies only to interfaces with the number of queues specified by this attribute.')
qosIfWeightsQueue = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 6, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfWeightsQueue.setStatus('current')
if mibBuilder.loadTexts: qosIfWeightsQueue.setDescription('The queue to which the weight applies.')
qosIfWeightsDrainSize = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 6, 1, 5), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfWeightsDrainSize.setStatus('current')
if mibBuilder.loadTexts: qosIfWeightsDrainSize.setDescription('The maximum number of bytes that may be drained from the queue in one cycle. The percentage of the bandwith allocated to this queue can be calculated from this attribute and the sum of the drain sizes of all the non-priority queues of the interface.')
qosIfWeightsQueueSize = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 8, 6, 1, 6), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qosIfWeightsQueueSize.setStatus('current')
if mibBuilder.loadTexts: qosIfWeightsQueueSize.setDescription('The size of the queue in bytes. Some devices set queue size in terms of packets. These devices must calculate the queue size in packets by assuming an average packet size suitable for the particular interface. Some devices have a fixed size buffer to be shared among all queues. These devices must allocate a fraction of the total buffer space to this queue calculated as the the ratio of the queue size to the sum of the queue sizes for the interface.')
qosPIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 1))
qosPIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2))
qosPIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 1, 1)).setObjects(("BDCOM-QOS-PIB-MIB", "qosDevicePibIncarnationTableGroup"), ("BDCOM-QOS-PIB-MIB", "qosDeviceAttributeTableGroup"), ("BDCOM-QOS-PIB-MIB", "qosInterfaceTypeTableGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosPIBCompliance = qosPIBCompliance.setStatus('current')
if mibBuilder.loadTexts: qosPIBCompliance.setDescription('The compliance statement for the QOS Policy Derived MIB.')
qosDevicePibIncarnationTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 1)).setObjects(("BDCOM-QOS-PIB-MIB", "qosDevicePdpName"), ("BDCOM-QOS-PIB-MIB", "qosDevicePibIncarnation"), ("BDCOM-QOS-PIB-MIB", "qosDevicePibTtl"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosDevicePibIncarnationTableGroup = qosDevicePibIncarnationTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosDevicePibIncarnationTableGroup.setDescription('')
qosDeviceAttributeTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 2)).setObjects(("BDCOM-QOS-PIB-MIB", "qosDevicePepDomain"), ("BDCOM-QOS-PIB-MIB", "qosDevicePrimaryPdp"), ("BDCOM-QOS-PIB-MIB", "qosDeviceSecondaryPdp"), ("BDCOM-QOS-PIB-MIB", "qosDeviceMaxMessageSize"), ("BDCOM-QOS-PIB-MIB", "qosDeviceCapabilities"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosDeviceAttributeTableGroup = qosDeviceAttributeTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosDeviceAttributeTableGroup.setDescription('')
qosInterfaceTypeTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 3)).setObjects(("BDCOM-QOS-PIB-MIB", "qosInterfaceQueueType"), ("BDCOM-QOS-PIB-MIB", "qosInterfaceTypeRoles"), ("BDCOM-QOS-PIB-MIB", "qosInterfaceTypeCapabilities"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosInterfaceTypeTableGroup = qosInterfaceTypeTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosInterfaceTypeTableGroup.setDescription('')
qosDiffServMappingTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 4)).setObjects(("BDCOM-QOS-PIB-MIB", "qosMarkedDscp"), ("BDCOM-QOS-PIB-MIB", "qosL2Cos"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosDiffServMappingTableGroup = qosDiffServMappingTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosDiffServMappingTableGroup.setDescription('')
qosCosToDscpTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 5)).setObjects(("BDCOM-QOS-PIB-MIB", "qosCosToDscpDscp"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosCosToDscpTableGroup = qosCosToDscpTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosCosToDscpTableGroup.setDescription('')
qosUnmatchedPolicyTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 6)).setObjects(("BDCOM-QOS-PIB-MIB", "qosUnmatchedPolicyRole"), ("BDCOM-QOS-PIB-MIB", "qosUnmatchedPolicyDirection"), ("BDCOM-QOS-PIB-MIB", "qosUnmatchedPolicyDscp"), ("BDCOM-QOS-PIB-MIB", "qosUnmatchedPolicyDscpTrusted"), ("BDCOM-QOS-PIB-MIB", "qosUnmatchPolMicroFlowPolicerId"), ("BDCOM-QOS-PIB-MIB", "qosUnmatchedPolicyAggregateId"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosUnmatchedPolicyTableGroup = qosUnmatchedPolicyTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosUnmatchedPolicyTableGroup.setDescription('')
qosPolicerTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 7)).setObjects(("BDCOM-QOS-PIB-MIB", "qosPolicerRate"), ("BDCOM-QOS-PIB-MIB", "qosPolicerNormalBurst"), ("BDCOM-QOS-PIB-MIB", "qosPolicerExcessBurst"), ("BDCOM-QOS-PIB-MIB", "qosPolicerAction"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosPolicerTableGroup = qosPolicerTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosPolicerTableGroup.setDescription('')
qosAggregateTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 8)).setObjects(("BDCOM-QOS-PIB-MIB", "qosAggregatePolicerId"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosAggregateTableGroup = qosAggregateTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosAggregateTableGroup.setDescription('')
qosMacClassificationTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 9)).setObjects(("BDCOM-QOS-PIB-MIB", "qosDstMacVlan"), ("BDCOM-QOS-PIB-MIB", "qosDstMacAddress"), ("BDCOM-QOS-PIB-MIB", "qosDstMacCos"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosMacClassificationTableGroup = qosMacClassificationTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosMacClassificationTableGroup.setDescription('')
qosIpAceTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 10)).setObjects(("BDCOM-QOS-PIB-MIB", "qosIpAceDstAddr"), ("BDCOM-QOS-PIB-MIB", "qosIpAceDstAddrMask"), ("BDCOM-QOS-PIB-MIB", "qosIpAceSrcAddr"), ("BDCOM-QOS-PIB-MIB", "qosIpAceSrcAddrMask"), ("BDCOM-QOS-PIB-MIB", "qosIpAceDscpMin"), ("BDCOM-QOS-PIB-MIB", "qosIpAceDscpMax"), ("BDCOM-QOS-PIB-MIB", "qosIpAceProtocol"), ("BDCOM-QOS-PIB-MIB", "qosIpAceDstL4PortMin"), ("BDCOM-QOS-PIB-MIB", "qosIpAceDstL4PortMax"), ("BDCOM-QOS-PIB-MIB", "qosIpAceSrcL4PortMin"), ("BDCOM-QOS-PIB-MIB", "qosIpAceSrcL4PortMax"), ("BDCOM-QOS-PIB-MIB", "qosIpAcePermit"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosIpAceTableGroup = qosIpAceTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosIpAceTableGroup.setDescription('')
qosIpAclDefinitionTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 11)).setObjects(("BDCOM-QOS-PIB-MIB", "qosIpAclId"), ("BDCOM-QOS-PIB-MIB", "qosIpAceOrder"), ("BDCOM-QOS-PIB-MIB", "qosIpAclDefAceId"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosIpAclDefinitionTableGroup = qosIpAclDefinitionTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosIpAclDefinitionTableGroup.setDescription('')
qosIpAclActionTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 12)).setObjects(("BDCOM-QOS-PIB-MIB", "qosIpAclActAclId"), ("BDCOM-QOS-PIB-MIB", "qosIpAclInterfaceRoles"), ("BDCOM-QOS-PIB-MIB", "qosIpAclInterfaceDirection"), ("BDCOM-QOS-PIB-MIB", "qosIpAclOrder"), ("BDCOM-QOS-PIB-MIB", "qosIpAclDscp"), ("BDCOM-QOS-PIB-MIB", "qosIpAclDscpTrusted"), ("BDCOM-QOS-PIB-MIB", "qosIpAclMicroFlowPolicerId"), ("BDCOM-QOS-PIB-MIB", "qosIpAclAggregateId"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosIpAclActionTableGroup = qosIpAclActionTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosIpAclActionTableGroup.setDescription('')
qosIfSchedulingPreferencesTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 13)).setObjects(("BDCOM-QOS-PIB-MIB", "qosIfSchedulingRoles"), ("BDCOM-QOS-PIB-MIB", "qosIfSchedulingPreference"), ("BDCOM-QOS-PIB-MIB", "qosIfSchedulingDiscipline"), ("BDCOM-QOS-PIB-MIB", "qosIfSchedulingQueueType"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosIfSchedulingPreferencesTableGroup = qosIfSchedulingPreferencesTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosIfSchedulingPreferencesTableGroup.setDescription('')
qosIfDropPreferenceTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 14)).setObjects(("BDCOM-QOS-PIB-MIB", "qosIfDropRoles"), ("BDCOM-QOS-PIB-MIB", "qosIfDropPreference"), ("BDCOM-QOS-PIB-MIB", "qosIfDropDiscipline"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosIfDropPreferenceTableGroup = qosIfDropPreferenceTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosIfDropPreferenceTableGroup.setDescription('')
qosIfDscpAssignmentTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 15)).setObjects(("BDCOM-QOS-PIB-MIB", "qosIfDscpRoles"), ("BDCOM-QOS-PIB-MIB", "qosIfQueueType"), ("BDCOM-QOS-PIB-MIB", "qosIfDscp"), ("BDCOM-QOS-PIB-MIB", "qosIfQueue"), ("BDCOM-QOS-PIB-MIB", "qosIfThresholdSet"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosIfDscpAssignmentTableGroup = qosIfDscpAssignmentTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosIfDscpAssignmentTableGroup.setDescription('')
qosIfRedTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 16)).setObjects(("BDCOM-QOS-PIB-MIB", "qosIfRedRoles"), ("BDCOM-QOS-PIB-MIB", "qosIfRedNumThresholdSets"), ("BDCOM-QOS-PIB-MIB", "qosIfRedThresholdSet"), ("BDCOM-QOS-PIB-MIB", "qosIfRedThresholdSetLower"), ("BDCOM-QOS-PIB-MIB", "qosIfRedThresholdSetUpper"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosIfRedTableGroup = qosIfRedTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosIfRedTableGroup.setDescription('')
qosIfTailDropTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 17)).setObjects(("BDCOM-QOS-PIB-MIB", "qosIfTailDropRoles"), ("BDCOM-QOS-PIB-MIB", "qosIfTailDropNumThresholdSets"), ("BDCOM-QOS-PIB-MIB", "qosIfTailDropThresholdSet"), ("BDCOM-QOS-PIB-MIB", "qosIfTailDropThresholdSetValue"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosIfTailDropTableGroup = qosIfTailDropTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosIfTailDropTableGroup.setDescription('')
qosIfWeightsTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 3320, 18, 2, 1, 1, 2, 18)).setObjects(("BDCOM-QOS-PIB-MIB", "qosIfWeightsRoles"), ("BDCOM-QOS-PIB-MIB", "qosIfWeightsNumQueues"), ("BDCOM-QOS-PIB-MIB", "qosIfWeightsQueue"), ("BDCOM-QOS-PIB-MIB", "qosIfWeightsDrainSize"), ("BDCOM-QOS-PIB-MIB", "qosIfWeightsQueueSize"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
qosIfWeightsTableGroup = qosIfWeightsTableGroup.setStatus('current')
if mibBuilder.loadTexts: qosIfWeightsTableGroup.setDescription('')
mibBuilder.exportSymbols("BDCOM-QOS-PIB-MIB", qosIpAceTable=qosIpAceTable, qosIfSchedulingPreferencesTableGroup=qosIfSchedulingPreferencesTableGroup, qosAggregateTableGroup=qosAggregateTableGroup, qosIpAceTableGroup=qosIpAceTableGroup, qosInterfaceTypeEntry=qosInterfaceTypeEntry, qosIfRedNumThresholdSets=qosIfRedNumThresholdSets, qosIpAclDefAceId=qosIpAclDefAceId, qosIfRedThresholdSet=qosIfRedThresholdSet, qosUnmatchPolMicroFlowPolicerId=qosUnmatchPolMicroFlowPolicerId, qosDevicePdpName=qosDevicePdpName, qosMacClassificationId=qosMacClassificationId, qosDeviceAttributeTableGroup=qosDeviceAttributeTableGroup, qosIfDscpAssignmentTableGroup=qosIfDscpAssignmentTableGroup, qosIpAceSrcAddrMask=qosIpAceSrcAddrMask, qosIfDropRoles=qosIfDropRoles, qosIpAceProtocol=qosIpAceProtocol, qosDiffServMappingTableGroup=qosDiffServMappingTableGroup, qosIpAclDscpTrusted=qosIpAclDscpTrusted, qosUnmatchedPolicyTable=qosUnmatchedPolicyTable, qosIfDropPreferenceEntry=qosIfDropPreferenceEntry, qosIpAceDstAddr=qosIpAceDstAddr, qosPIBConformance=qosPIBConformance, qosIpAclDefinitionId=qosIpAclDefinitionId, qosIfTailDropTable=qosIfTailDropTable, qosIpAceSrcL4PortMax=qosIpAceSrcL4PortMax, qosUnmatchedPolicyAggregateId=qosUnmatchedPolicyAggregateId, qosPolicer=qosPolicer, qosIfSchedulingPreferencesTable=qosIfSchedulingPreferencesTable, qosMacQos=qosMacQos, qosIfTailDropNumThresholdSets=qosIfTailDropNumThresholdSets, qosIpAclDefinitionTableGroup=qosIpAclDefinitionTableGroup, Unsigned64=Unsigned64, qosIfParameters=qosIfParameters, qosIfTailDropId=qosIfTailDropId, qosMarkedDscp=qosMarkedDscp, qosDiffServMappingEntry=qosDiffServMappingEntry, qosUnmatchedPolicyDscp=qosUnmatchedPolicyDscp, qosAggregatePolicerId=qosAggregatePolicerId, qosIfTailDropEntry=qosIfTailDropEntry, qosIfDscpAssignmentEntry=qosIfDscpAssignmentEntry, qosIfRedThresholdSetUpper=qosIfRedThresholdSetUpper, qosDevicePibIncarnationEntry=qosDevicePibIncarnationEntry, qosIfWeightsTable=qosIfWeightsTable, qosPolicerTable=qosPolicerTable, qosPolicerRate=qosPolicerRate, qosPolicerNormalBurst=qosPolicerNormalBurst, RoleCombination=RoleCombination, qosIpQos=qosIpQos, qosIpAceSrcL4PortMin=qosIpAceSrcL4PortMin, qosIfTailDropThresholdSetValue=qosIfTailDropThresholdSetValue, qosIfWeightsId=qosIfWeightsId, qosInterfaceTypeTable=qosInterfaceTypeTable, qosPIBCompliances=qosPIBCompliances, qosIpAceDstAddrMask=qosIpAceDstAddrMask, qosAggregateTable=qosAggregateTable, qosIfDscpRoles=qosIfDscpRoles, qosIpAclActAclId=qosIpAclActAclId, qosDeviceAttributeTable=qosDeviceAttributeTable, qosDscp=qosDscp, qosInterfaceQueueType=qosInterfaceQueueType, qosIfRedTable=qosIfRedTable, qosIfWeightsTableGroup=qosIfWeightsTableGroup, qosDeviceConfig=qosDeviceConfig, qosIfSchedulingRoles=qosIfSchedulingRoles, qosAggregateId=qosAggregateId, qosIpAcePermit=qosIpAcePermit, qosIpAclId=qosIpAclId, QosLayer2Cos=QosLayer2Cos, qosDiffServMappingTable=qosDiffServMappingTable, qosIfDropPreferenceTable=qosIfDropPreferenceTable, qosIpAclDefinitionEntry=qosIpAclDefinitionEntry, qosIfWeightsDrainSize=qosIfWeightsDrainSize, qosCosToDscpTable=qosCosToDscpTable, qosDeviceIncarnationId=qosDeviceIncarnationId, qosMacClassificationTableGroup=qosMacClassificationTableGroup, qosIpAceOrder=qosIpAceOrder, qosDstMacVlan=qosDstMacVlan, qosDeviceSecondaryPdp=qosDeviceSecondaryPdp, qosIpAceEntry=qosIpAceEntry, qosIfTailDropRoles=qosIfTailDropRoles, qosCosToDscpEntry=qosCosToDscpEntry, qosIpAclInterfaceDirection=qosIpAclInterfaceDirection, qosIpAceDstL4PortMax=qosIpAceDstL4PortMax, qosIfQueue=qosIfQueue, QueueRange=QueueRange, qosIpAceId=qosIpAceId, qosDstMacAddress=qosDstMacAddress, bdcomQosPIBMIB=bdcomQosPIBMIB, QosInterfaceTypeCapabilities=QosInterfaceTypeCapabilities, qosDevicePepDomain=qosDevicePepDomain, qosPolicerEntry=qosPolicerEntry, qosDeviceAttributeId=qosDeviceAttributeId, qosMacClassificationTable=qosMacClassificationTable, qosInterfaceTypeTableGroup=qosInterfaceTypeTableGroup, qosIfWeightsEntry=qosIfWeightsEntry, qosDevicePrimaryPdp=qosDevicePrimaryPdp, qosIfWeightsRoles=qosIfWeightsRoles, qosIpAceSrcAddr=qosIpAceSrcAddr, qosDeviceMaxMessageSize=qosDeviceMaxMessageSize, qosIpAclActionTableGroup=qosIpAclActionTableGroup, qosIfSchedulingDiscipline=qosIfSchedulingDiscipline, qosIfDropDiscipline=qosIfDropDiscipline, Dscp=Dscp, qosPIBCompliance=qosPIBCompliance, qosCosToDscpCos=qosCosToDscpCos, qosIpAclMicroFlowPolicerId=qosIpAclMicroFlowPolicerId, qosIpAclActionId=qosIpAclActionId, qosIfRedId=qosIfRedId, qosDomainConfig=qosDomainConfig, qosIfDropPreferenceTableGroup=qosIfDropPreferenceTableGroup, qosIfSchedulingQueueType=qosIfSchedulingQueueType, qosIfRedTableGroup=qosIfRedTableGroup, qosPolicerTableGroup=qosPolicerTableGroup, qosIfDropPreference=qosIfDropPreference, ThresholdSetRange=ThresholdSetRange, qosIpAclDefinitionTable=qosIpAclDefinitionTable, qosIfSchedulingPreferenceId=qosIfSchedulingPreferenceId, qosUnmatchedPolicy=qosUnmatchedPolicy, qosUnmatchedPolicyEntry=qosUnmatchedPolicyEntry, qosIfQueueType=qosIfQueueType, qosAggregateEntry=qosAggregateEntry, qosIpAclDscp=qosIpAclDscp, qosUnmatchedPolicyRole=qosUnmatchedPolicyRole, PolicyInstanceId=PolicyInstanceId, qosIfDscpAssignmentTable=qosIfDscpAssignmentTable, qosDevicePibIncarnation=qosDevicePibIncarnation, qosIfDscp=qosIfDscp, qosDeviceAttributeEntry=qosDeviceAttributeEntry, qosInterfaceTypeId=qosInterfaceTypeId, qosIfRedRoles=qosIfRedRoles, qosUnmatchedPolicyDirection=qosUnmatchedPolicyDirection, qosDevicePibIncarnationTable=qosDevicePibIncarnationTable, qosInterfaceTypeRoles=qosInterfaceTypeRoles, qosPolicerId=qosPolicerId, qosPIBGroups=qosPIBGroups, qosMacClassificationEntry=qosMacClassificationEntry, qosL2Cos=qosL2Cos, qosIfWeightsNumQueues=qosIfWeightsNumQueues, qosIfTailDropTableGroup=qosIfTailDropTableGroup, QosInterfaceQueueType=QosInterfaceQueueType, PYSNMP_MODULE_ID=bdcomQosPIBMIB, qosIfTailDropThresholdSet=qosIfTailDropThresholdSet, qosIpAceDstL4PortMin=qosIpAceDstL4PortMin, qosIpAclAggregateId=qosIpAclAggregateId, qosIfDscpAssignmentId=qosIfDscpAssignmentId, qosDevicePibTtl=qosDevicePibTtl, qosDeviceCapabilities=qosDeviceCapabilities, qosPolicerExcessBurst=qosPolicerExcessBurst, qosDstMacCos=qosDstMacCos, qosIpAclActionTable=qosIpAclActionTable, qosIpAclActionEntry=qosIpAclActionEntry, qosDevicePibIncarnationTableGroup=qosDevicePibIncarnationTableGroup, qosCosToDscpTableGroup=qosCosToDscpTableGroup, qosUnmatchedPolicyDscpTrusted=qosUnmatchedPolicyDscpTrusted, qosIpAceDscpMax=qosIpAceDscpMax, Percent=Percent, qosIfSchedulingPreferenceEntry=qosIfSchedulingPreferenceEntry, qosIfRedEntry=qosIfRedEntry, qosInterfaceTypeCapabilities=qosInterfaceTypeCapabilities, qosIfWeightsQueueSize=qosIfWeightsQueueSize, qosPolicerAction=qosPolicerAction, qosIpAceDscpMin=qosIpAceDscpMin, qosIfSchedulingPreference=qosIfSchedulingPreference, qosUnmatchedPolicyId=qosUnmatchedPolicyId, qosIfDropPreferenceId=qosIfDropPreferenceId, qosUnmatchedPolicyTableGroup=qosUnmatchedPolicyTableGroup, qosIfRedThresholdSetLower=qosIfRedThresholdSetLower, qosIpAclInterfaceRoles=qosIpAclInterfaceRoles, qosIpAclOrder=qosIpAclOrder, qosIfWeightsQueue=qosIfWeightsQueue, qosCosToDscpDscp=qosCosToDscpDscp, qosIfThresholdSet=qosIfThresholdSet)
|
# coding: utf-8
# /*##########################################################################
# MIT License
#
# Copyright (c) 2018 DAXS developers.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# ###########################################################################*/
"""This module provides base classes to implement models for
spectroscopy data.
"""
from __future__ import absolute_import, division
__authors__ = ['Marius Retegan', 'Mauro Rovezzi']
__license__ = 'MIT'
import os
import silx.io
from silx.gui import qt
from .items import ExperimentItem, GroupItem, FileItem, ScanItem
from sloth.utils.logging import getLogger
logger = getLogger('sloth.gui.daxs.view')
class HorizontalHeaderView(qt.QHeaderView):
def __init__(self, parent=None):
super(HorizontalHeaderView, self).__init__(qt.Qt.Horizontal, parent)
# Some properties
self.setStretchLastSection(True)
# self.setSectionsMovable(True)
# Context menu
self.setContextMenuPolicy(qt.Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.showContextMenu)
def showContextMenu(self, position):
menu = qt.QMenu('Horizontal Header View Menu', self)
section = self.logicalIndexAt(position)
action = qt.QAction('Add', self, triggered=self.append)
menu.addAction(action)
action = qt.QAction('Remove', self,
triggered=lambda: self.remove(section))
menu.addAction(action)
menu.exec_(self.mapToGlobal(position))
def append(self):
pass
def remove(self, section):
model = self.model()
if not model.header[section].removable:
logger.info('The selected column cannot be removed')
return
model.setHeaderData(section, orientation=qt.Qt.Horizontal, value=None)
view = self.parent()
view.setItemsDelegates()
class TreeView(qt.QTreeView):
def __init__(self, parent=None):
super(TreeView, self).__init__(parent)
# Header
headerView = HorizontalHeaderView()
self.setHeader(headerView)
# Context menu
self.setContextMenuPolicy(qt.Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.showContextMenu)
# Selection mode
self.setSelectionMode(qt.QAbstractItemView.ExtendedSelection)
def setModel(self, model):
super(TreeView, self).setModel(model)
self.setItemsDelegates()
def setItemsDelegates(self):
if self.model() is None:
return
header = self.model().header
for i, _ in enumerate(header):
delegate = header.delegate(i)
if delegate is not None:
self.setItemDelegateForColumn(i, delegate(parent=self))
def showContextMenu(self, position):
menu = qt.QMenu('Tree View Menu', self)
action = qt.QAction(
'Add Experiment', self, triggered=self.addExperiment)
menu.addAction(action)
# Get the index under the cursor.
index = self.indexAt(position)
item = self.model().itemFromIndex(index)
if isinstance(item, ExperimentItem) or isinstance(item, GroupItem):
action = qt.QAction('Add Group', self, triggered=self.addGroup)
menu.addAction(action)
action = qt.QAction('Load Files', self, triggered=self.loadFiles)
menu.addAction(action)
# If there are selected indexes, they can be removed or checked.
if self.selectedIndexes():
menu.addSeparator()
action = qt.QAction(
'Toggle Selected', self, triggered=self.toggleSelected)
menu.addAction(action)
action = qt.QAction(
'Remove Selected', self, triggered=self.removeSelected)
menu.addAction(action)
if isinstance(item, ScanItem) and index.column() > 0:
menu.addSeparator()
action = qt.QAction(
'Copy Value to Selected', self,
triggered=lambda: self.copyValueToSelected(index))
menu.addAction(action)
action = qt.QAction(
'Copy Value to Toggled', self,
triggered=lambda: self.copyValueToToggled(index))
menu.addAction(action)
menu.exec_(self.mapToGlobal(position))
def loadFiles(self):
paths, _ = qt.QFileDialog.getOpenFileNames(
self, 'Select Files to Load', os.getcwd(),
'Data Files (*.spec *.hdf5 *.h5);; All Files (*)')
if not paths:
return
parent = self.selectionModel().selectedRows().pop()
parentItem = self.model().itemFromIndex(parent)
for path in paths:
self.addFile(path, parentItem)
def addExperiment(self, name=None):
rootItem = self.model().rootItem
row = rootItem.childCount()
if name is None or not name:
name = 'Experiment{}'.format(row)
item = ExperimentItem(name=name, parentItem=rootItem)
self.model().appendRow(item)
return item
def addGroup(self, name=None, parentItem=None):
"""Add a generic GroupItem at a given parentItem"""
# Add the file to the last added item.
if parentItem is None:
parent = self.selectionModel().selectedRows().pop()
parentItem = self.model().itemFromIndex(parent)
row = parentItem.childCount()
if name is None or not name:
name = 'Group{}'.format(row)
item = GroupItem(name, parentItem)
self.model().appendRow(item)
def addFile(self, path=None, parentItem=None):
if path is None:
return
# Add the file to the last added experiment item.
if parentItem is None:
parentItem = self.model().rootItem.lastChild()
try:
data = silx.io.open(path)
except OSError as e:
logger.warning(e)
return
# Create a tree item for the file and add it to the experiment item.
name, _ = os.path.splitext(os.path.basename(path))
item = FileItem(name, parentItem)
self.model().appendRow(item)
# Create a tree item for each scan. The parent item is now the
# previous file item.
# TODO: Make this more "intelligent" by using the command to
# set better defaults for x, signal, etc.
parentItem = item
for scan in data:
item = ScanItem(name=scan, parentItem=parentItem, data=data[scan])
self.model().appendRow(item)
def selectedItems(self):
indexes = self.selectionModel().selectedRows()
items = [self.model().itemFromIndex(index) for index in indexes]
return items
def scanItems(self):
for index in self.model().visitModel():
item = self.model().itemFromIndex(index)
if isinstance(item, ScanItem):
yield item
def toggleSelected(self):
for item in self.selectedItems():
index = self.model().indexFromItem(item)
try:
if item.isChecked:
self.model().setData(
index, qt.Qt.Unchecked, qt.Qt.CheckStateRole)
else:
self.model().setData(
index, qt.Qt.Checked, qt.Qt.CheckStateRole)
except AttributeError:
pass
def removeSelected(self):
items = self.selectedItems()
parentItems = dict()
for item in items:
parentItem = item.parentItem
remove = True
while parentItem is not self.model().rootItem:
if parentItem in items:
remove = False
break
parentItem = parentItem.parentItem
# If an ancestors is selected for removal, pass the item.
if not remove:
continue
# Get the parent item for the current item.
parentItem = item.parentItem
if parentItem not in parentItems:
parentItems[parentItem] = list()
# Create a list with the positions of the children that are
# going to be removed.
parentItems[parentItem].append(item.childPosition())
# Remove the rows from the parent.
for parentItem in parentItems:
rows = parentItems[parentItem]
parent = self.model().indexFromItem(parentItem)
for row in reversed(sorted(rows)):
self.model().removeRow(row, parent)
def copyValueToToggled(self, indexAt):
"""Copy the value under the cursor to the toggled items."""
indexes = self.model().visitModel(columns=True)
for index in indexes:
item = self.model().itemFromIndex(index)
if not item.isChecked or index == indexAt:
continue
elif index.column() == indexAt.column():
value = self.model().data(indexAt)
self.model().setData(index, value)
def copyValueToSelected(self, indexAt):
"""Copy the value under the cursor to the selected indexes."""
indexes = self.selectionModel().selectedIndexes()
for index in indexes:
if index == indexAt:
continue
elif index.column() == indexAt.column():
value = self.model().data(indexAt)
self.model().setData(index, value)
|
from setuptools import setup
with open('README.rst', 'r') as file:
long_desc = file.read()
version = __import__('django_mediamosa').get_version()
setup(
name='django-mediamosa',
version=version,
author='UGent Portaal Team',
author_email='portaal-tech@ugent.be',
packages=['django_mediamosa', 'tests'],
scripts=[],
url='https://github.com/UGentPortaal/django-mediamosa',
license='BSD',
description='Django integration support for the mediamosa api.',
long_description=long_desc,
install_requires=(
'mediamosa>=0.0.2'
),
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Multimedia :: Video',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities'
],
)
|
from .logic_adapter import LogicAdapter
from .closest_match import ClosestMatchAdapter
from .closest_meaning import ClosestMeaningAdapter
from .time_adapter import TimeLogicAdapter
from .multi_adapter import MultiLogicAdapter
from .no_knowledge_adapter import NoKnowledgeAdapter
from .mathematical_evaluation import MathematicalEvaluation
from .approximate_sentence_match import ApproximateSentenceMatchAdapter
from .sentiment_adapter import SentimentAdapter
|
import sys
# Add your fancy code here
print(f"Running with argument {sys.argv[1]}")
|
# -*- coding: utf-8 -*-
'''
##########################
Acme::MetaSyntactic::screw
##########################
****
NAME
****
Acme::MetaSyntactic::screw - The screw drives theme
***********
DESCRIPTION
***********
This theme lists different screw drive types.
See `https://en.wikipedia.org/wiki/Screw_drive <https://en.wikipedia.org/wiki/Screw_drive>`_.
***********
CONTRIBUTOR
***********
Abigail
*******
CHANGES
*******
- \*
2012-09-17 - v1.000
Expanded the list thanks to Wikipedia,
and published in Acme-MetaSyntactic-Themes version 1.019.
- \*
2006-05-13
Submitted by Abigail.
********
SEE ALSO
********
`Acme::MetaSyntactic <http://search.cpan.org/search?query=Acme%3a%3aMetaSyntactic&mode=module>`_, `Acme::MetaSyntactic::List <http://search.cpan.org/search?query=Acme%3a%3aMetaSyntactic%3a%3aList&mode=module>`_.
'''
name = 'screw_drives'
DATA = '''\
# names
square
hex
pentagon
thumbscrew
slot
cross cross_recess
Phillips
Frearson Reed_and_Prince
French_recess BNAE_NFL22_070
JIS_B_1012
Mortorq
Pozidriv
Supadriv
Robertson Scrulox
hex_socket Allen
hexalobular_socket Torx star_drive
TTAP
Phillips_square Quadrex Deck_Mate
breakaway_head
Bristol
clutch clutch_type_A clutch_type_G
claw
double_hex
line ALR2 ALR3 ALR4 ALR5 ALR6 ALH2 ALH3 ALH4 ALH5 ALH6 ALR3T
one_way
pentalobe
polydrive
spanner snake_eyes
spline
Torq_set
TA
TP3
tri_wing triangular_slotted tri_groove Opsit
Triple_square XZN
protruding_obstacle\
'''
from metasyntactic.base import parse_data
from random import choice, shuffle
from six import iteritems
data = parse_data(DATA)
def default():
try:
if 'default' in data:
return data['default'][0]
except (KeyError, IndexError):
pass
return 'en'
def all():
acc = set()
for category, names in iteritems(data['names']):
if names:
acc |= names
return acc
def names(category=None):
if not category:
category = default()
if category == ':all':
return list(all())
category = category.replace('/', ' ')
return list(data['names'][category])
def random(n=1, category=None):
got = names(category)
if got:
shuffle(got)
if n == 1:
return choice(got)
return got[:n]
def categories():
return set(data['names'])
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from future.builtins import *
import time
import contextlib
import numpy as np
class Measurement(object):
"""Abstract base class defining the heat capacity measurement interface.
:param currentsource: An object implementing the :class:`CurrentSource`
interface.
:param powermeter: An object implementing the :class:`Powermeter` interface.
:param thermometer: An object implementing the :class:`Thermometer`
interface.
"""
def __init__(self, currentsource, powermeter, thermometer):
self.currentsource = currentsource
self.powermeter = powermeter
self.thermometer = thermometer
def start(self):
"""Starts the heat capacity measurement."""
raise NotImplementedError()
def measure(self):
"""Measures the time, heater power and platform temperature."""
timestamp = time.time()
current = self.currentsource.current
heater_voltage = self.powermeter.voltage
temperature = self.thermometer.temperature
return timestamp, current * heater_voltage, temperature
class CurrentSource(object):
"""Abstract base class defining the current source interface."""
@property
def current(self):
raise NotImplementedError()
@current.setter
def current(self, value):
raise NotImplementedError()
class Powermeter(object):
"""Abstract base class defining the powermeter interface."""
@property
def voltage(self):
raise NotImplementedError()
class Thermometer(object):
"""Abstract base class defining the thermometer interface."""
@property
def temperature(self):
raise NotImplementedError()
class PulseMeasurement(Measurement):
"""A heat capacity measurement using a predefined pulse sequence.
:param currentsource: An object implementing the :class:`CurrentSource`
interface.
:param powermeter: An object implementing the :class:`Powermeter` interface.
:param thermometer: An object implementing the :class:`Thermometer`
interface.
:param pulse: A sequence of current values.
:param sampling_time: The sampling time.
"""
def __init__(self, currentsource, powermeter, thermometer, pulse, sampling_time):
super(PulseMeasurement, self).__init__(currentsource, powermeter, thermometer)
self.pulse = pulse
self.sampling_time = sampling_time
def start(self):
"""Starts the heat capacity measurement."""
data = []
for current in self.pulse:
with sampling(self.sampling_time):
self.currentsource.current = current
data.append(self.measure)
timestamp, power, temperature = zip(*data)
return timestamp, power, temperature
class AdaptiveStep(Measurement):
def __init__(self, currentsource, powermeter, thermometer, duration, max_current, min_current=0., window=None, sampling=0.1):
super(AdaptiveStep, self).__init__(currentsource, powermeter, thermometer)
self.duration = duration
self.max_current = max_current
self.min_current = min_current
self.sampling = sampling
self.deriv_threshold = 1
if window is None:
window = duration / sampling / 5
self.window = window + 1 if window % 2 == 0 else window
else:
self.window = window
self.order = 2
def start(self, verbose=False):
data = []
derivative = []
input = audiolazy.ControlStream(0.)
deriv_filt = savitzky_golay(self.window, self.order, deriv=1., sampling=self.sampling)(input)
# measure steady state
start = time.time()
self.currentsource.current = self.min_current
while time.time() - start < self.duration:
with sampling(self.sampling):
timestamp, power, temperature = self.measure()
data.append((timestamp, power, temperature))
# Update derivative filter
input.value = temperature / power if power else temperature
derivative.append(deriv_filt.take())
#measure response to heat pulse
start = time.time()
self.currentsource.current = self.max_current
while (time.time() - start < self.duration) or (np.abs(derivative[-1]) > self.deriv_threshold):
with sampling(self.sampling):
timestamp, power, temperature = self.measure()
data.append((timestamp, power, temperature))
# Update derivative filter
input.value = temperature / power if power else temperature
derivative.append(deriv_filt.take())
#measure decay for the same time
duration = time.time() - start
start = time.time()
self.currentsource.current = self.min_current
while time.time() - start < duration:
with sampling(self.sampling):
timestamp, power, temperature = self.measure()
data.append((timestamp, power, temperature))
# Update derivative filter
input.value = temperature / power if power else temperature
derivative.append(deriv_filt.take())
timestamp, power, temperature = zip(*data)
if verbose:
return timestamp, power, temperature, derivative
return timestamp, power, temperature
import audiolazy
from scipy import signal
def savitzky_golay(window, order, deriv=1, sampling=1.):
return audiolazy.ZFilter(numerator=signal.savgol_coeffs(window, order, deriv=deriv, delta=sampling, use='conv').tolist())
@contextlib.contextmanager
def sampling(step, sleep_ratio=0.01):
assert step > 0
start = time.time()
yield
while (time.time() - start) < step:
time.sleep(step * sleep_ratio)
|
"""
IP Types
"""
import logging
from ipaddress import ip_address
from socket import AF_INET, AF_INET6
from vpp_papi import VppEnum
from vpp_object import VppObject
try:
text_type = unicode
except NameError:
text_type = str
_log = logging.getLogger(__name__)
class DpoProto:
DPO_PROTO_IP4 = 0
DPO_PROTO_IP6 = 1
DPO_PROTO_MPLS = 2
DPO_PROTO_ETHERNET = 3
DPO_PROTO_BIER = 4
DPO_PROTO_NSH = 5
INVALID_INDEX = 0xffffffff
def get_dpo_proto(addr):
if ip_address(addr).version == 6:
return DpoProto.DPO_PROTO_IP6
else:
return DpoProto.DPO_PROTO_IP4
class VppIpAddressUnion():
def __init__(self, addr):
self.addr = addr
self.ip_addr = ip_address(text_type(self.addr))
def encode(self):
if self.version == 6:
return {'ip6': self.ip_addr}
else:
return {'ip4': self.ip_addr}
@property
def version(self):
return self.ip_addr.version
@property
def address(self):
return self.addr
@property
def length(self):
return self.ip_addr.max_prefixlen
@property
def bytes(self):
return self.ip_addr.packed
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.ip_addr == other.ip_addr
elif hasattr(other, "ip4") and hasattr(other, "ip6"):
# vl_api_address_union_t
if 4 == self.version:
return self.ip_addr == other.ip4
else:
return self.ip_addr == other.ip6
else:
raise Exception("Comparing VppIpAddressUnions:%s"
" with incomparable type: %s",
self, other)
def __ne__(self, other):
return not (self == other)
def __str__(self):
return str(self.ip_addr)
class VppIpMPrefix():
def __init__(self, saddr, gaddr, glen):
self.saddr = saddr
self.gaddr = gaddr
self.glen = glen
if ip_address(self.saddr).version != \
ip_address(self.gaddr).version:
raise ValueError('Source and group addresses must be of the '
'same address family.')
def encode(self):
return {
'af': ip_address(self.gaddr).vapi_af,
'grp_address': {
ip_address(self.gaddr).vapi_af_name: self.gaddr
},
'src_address': {
ip_address(self.saddr).vapi_af_name: self.saddr
},
'grp_address_length': self.glen,
}
@property
def length(self):
return self.glen
@property
def version(self):
return ip_address(self.gaddr).version
def __str__(self):
return "(%s,%s)/%d" % (self.saddr, self.gaddr, self.glen)
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self.glen == other.glen and
self.saddr == other.gaddr and
self.saddr == other.saddr)
elif (hasattr(other, "grp_address_length") and
hasattr(other, "grp_address") and
hasattr(other, "src_address")):
# vl_api_mprefix_t
if 4 == self.version:
return (self.glen == other.grp_address_length and
self.gaddr == str(other.grp_address.ip4) and
self.saddr == str(other.src_address.ip4))
else:
return (self.glen == other.grp_address_length and
self.gaddr == str(other.grp_address.ip6) and
self.saddr == str(other.src_address.ip6))
return NotImplemented
class VppIpPuntPolicer(VppObject):
def __init__(self, test, policer_index, is_ip6=False):
self._test = test
self._policer_index = policer_index
self._is_ip6 = is_ip6
def add_vpp_config(self):
self._test.vapi.ip_punt_police(policer_index=self._policer_index,
is_ip6=self._is_ip6, is_add=True)
def remove_vpp_config(self):
self._test.vapi.ip_punt_police(policer_index=self._policer_index,
is_ip6=self._is_ip6, is_add=False)
def query_vpp_config(self):
NotImplemented
class VppIpPuntRedirect(VppObject):
def __init__(self, test, rx_index, tx_index, nh_addr):
self._test = test
self._rx_index = rx_index
self._tx_index = tx_index
self._nh_addr = ip_address(nh_addr)
def encode(self):
return {"rx_sw_if_index": self._rx_index,
"tx_sw_if_index": self._tx_index, "nh": self._nh_addr}
def add_vpp_config(self):
self._test.vapi.ip_punt_redirect(punt=self.encode(), is_add=True)
self._test.registry.register(self, self._test.logger)
def remove_vpp_config(self):
self._test.vapi.ip_punt_redirect(punt=self.encode(), is_add=False)
def get_vpp_config(self):
is_ipv6 = True if self._nh_addr.version == 6 else False
return self._test.vapi.ip_punt_redirect_dump(
sw_if_index=self._rx_index, is_ipv6=is_ipv6)
def query_vpp_config(self):
if self.get_vpp_config():
return True
return False
class VppIpPathMtu(VppObject):
def __init__(self, test, nh, pmtu, table_id=0):
self._test = test
self.nh = nh
self.pmtu = pmtu
self.table_id = table_id
def add_vpp_config(self):
self._test.vapi.ip_path_mtu_update(pmtu={'nh': self.nh,
'table_id': self.table_id,
'path_mtu': self.pmtu})
self._test.registry.register(self, self._test.logger)
return self
def modify(self, pmtu):
self.pmtu = pmtu
self._test.vapi.ip_path_mtu_update(pmtu={'nh': self.nh,
'table_id': self.table_id,
'path_mtu': self.pmtu})
return self
def remove_vpp_config(self):
self._test.vapi.ip_path_mtu_update(pmtu={'nh': self.nh,
'table_id': self.table_id,
'path_mtu': 0})
def query_vpp_config(self):
ds = list(self._test.vapi.vpp.details_iter(
self._test.vapi.ip_path_mtu_get))
for d in ds:
if self.nh == str(d.pmtu.nh) \
and self.table_id == d.pmtu.table_id \
and self.pmtu == d.pmtu.path_mtu:
return True
return False
def object_id(self):
return ("ip-path-mtu-%d-%s-%d" % (self.table_id,
self.nh,
self.pmtu))
def __str__(self):
return self.object_id()
|
import os
import tensorflow as tf
import numpy as np
'''Explaining how to custom input for tensorflow models.
'''
def input_pipe_fn(session):
dataset = tf.contrib.data.Dataset.from_tensor_slices(
tf.reshape(np.arange(200), [-1, 2])
)
dataset = dataset.batch(4)
iterator = dataset.make_initializable_iterator()
next_element = iterator.get_next()
session.run(iterator.initializer)
return next_element
def training_fn(batch):
return batch + 1
def evaluation_fn(batch):
return batch - 1
if __name__ == '__main__':
with tf.Session() as session:
session.run(tf.global_variables_initializer())
_batch = input_pipe_fn(session)
train_op = training_fn(_batch)
evaluation_op = evaluation_fn(_batch)
_batch_ouput, _train_ouput, _evaluation_output = session.run([_batch, train_op, evaluation_op])
print(_batch_ouput)
print('+++' * 40)
print(_train_ouput)
print('+++' * 40)
print(_evaluation_output)
print('---' * 40)
_batch_ouput, _train_ouput = session.run([_batch, train_op])
print(_batch_ouput)
print('+++' * 40)
print(_train_ouput)
print('+++' * 40)
_batch_ouput, _evaluation_output = session.run([_batch, evaluation_op])
print(_batch_ouput)
print('+++' * 40)
print(_evaluation_output)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.