content
stringlengths
7
1.05M
fixed_cases
stringlengths
1
1.28M
# -*- coding: utf-8 -*- """ micropy.utils.decorators ~~~~~~~~~~~~~~ This module contains generic decorators used by MicropyCli """ __all__ = ['lazy_property'] def lazy_property(fn): attr = '_lazy__' + fn.__name__ @property def _lazy_property(self): if not hasattr(self, attr): setattr(self, attr, fn(self)) return getattr(self, attr) return _lazy_property
""" micropy.utils.decorators ~~~~~~~~~~~~~~ This module contains generic decorators used by MicropyCli """ __all__ = ['lazy_property'] def lazy_property(fn): attr = '_lazy__' + fn.__name__ @property def _lazy_property(self): if not hasattr(self, attr): setattr(self, attr, fn(self)) return getattr(self, attr) return _lazy_property
""" Un obrero necesita calcular su salario semanal, el cual se obtiene de la siguiente manera: Si trabaja 40 horas o menos se le paga 16 euros por hora Si trabaja mas de 40 horas se le paga 16 euros por cada una de las primeras 40 horas y 20 euros por cada hora extra. """ print("============SALARIO SEMANAL OBRERO============") hours_work = int(input("Horas trabajadas: ")) if (hours_work <= 40): print("El salario semanal del trabajador por trabajar {0} horas es: {1}".format( hours_work, hours_work * 16 )) else : extraHours = hours_work - 40 print(f"El trabajador ha trabajado 40 horas con {extraHours} horas extras") print(f"El salario semanal por 40 horas son {40 * 16}") print(f"El salario semanal por {extraHours} horas extras son {extraHours * 20}") print("El salario semanal del trabajador por trabajar {0} es {1}".format( hours_work, (40*16) + (extraHours * 20) ))
""" Un obrero necesita calcular su salario semanal, el cual se obtiene de la siguiente manera: Si trabaja 40 horas o menos se le paga 16 euros por hora Si trabaja mas de 40 horas se le paga 16 euros por cada una de las primeras 40 horas y 20 euros por cada hora extra. """ print('============SALARIO SEMANAL OBRERO============') hours_work = int(input('Horas trabajadas: ')) if hours_work <= 40: print('El salario semanal del trabajador por trabajar {0} horas es: {1}'.format(hours_work, hours_work * 16)) else: extra_hours = hours_work - 40 print(f'El trabajador ha trabajado 40 horas con {extraHours} horas extras') print(f'El salario semanal por 40 horas son {40 * 16}') print(f'El salario semanal por {extraHours} horas extras son {extraHours * 20}') print('El salario semanal del trabajador por trabajar {0} es {1}'.format(hours_work, 40 * 16 + extraHours * 20))
palavras = ('APRENDER', 'ESTUDAR', 'PROGRAMAR', 'AUTOMATIZAR', 'ANALISTA DE TESTES', 'PYTHON', 'ROBOT FRAMEWORK') for c in palavras: print(f'\nNa palavra {c.upper()} temos ',end='') for vogal in c: if vogal.lower() in 'aeiou': print(vogal.lower(), end='')
palavras = ('APRENDER', 'ESTUDAR', 'PROGRAMAR', 'AUTOMATIZAR', 'ANALISTA DE TESTES', 'PYTHON', 'ROBOT FRAMEWORK') for c in palavras: print(f'\nNa palavra {c.upper()} temos ', end='') for vogal in c: if vogal.lower() in 'aeiou': print(vogal.lower(), end='')
russian_word_list = [] abkhazian_word_list = [] outputfile = 'ab-ru-probability.dic' output = open(outputfile,"w+") cyrillic_encoding="utf-8" probability = 0.9 #read the russian word into the list with open('../draft/dictionary_prescript.ru', 'r+',encoding=cyrillic_encoding) as f: russian_word_list = f.read().splitlines() with open('../draft/dictionary.ru', 'r+',encoding=cyrillic_encoding) as f: russian_word_list += f.read().splitlines() # read also the abkhazian translations with open('../draft/dictionary_prescript.ab', 'r+',encoding=cyrillic_encoding) as f: abkhazian_word_list = f.read().splitlines() with open('../draft/dictionary.ab', 'r+',encoding=cyrillic_encoding) as f: abkhazian_word_list += f.read().splitlines() for translation_tuple in zip(abkhazian_word_list, russian_word_list): if probability: output.write(translation_tuple[0]+"\t"+translation_tuple[1]+"\t"+str(probability)+"\n") else: output.write(translation_tuple[0]+"\t"+translation_tuple[1]+"\n") ''' The generated dictionary can be used with https://github.com/bitextor bifixer: python3 bifixer/bifixer.py --scol 1 --tcol 2 --ignore_duplicates ru-ab-parallel.txt ru-ab-parallel.bifixed ru ab apply the hardrules: python3 bicleaner/bicleaner_hardrules.py ru-ab-parallel.bifixed ru-ab-parallel.clean -s ru -t ab --scol 1 --tcol 2 --disable_lm_filter train bicleaner: python3.7 bicleaner/bicleaner_train.py \ ru-ab-parallel.clean \ --treat_oovs --normalize_by_length \ -s ru -t ab \ -d ru-ab-probability.dic.gz -D ab-ru-probability.dic.gz \ -b 1000 -c ru-ab.classifier \ -g 10000 -w 10000 \ -m ru-ab.yaml \ --classifier_type random_forest \ --lm_training_file_sl lmtrain.ru-ab.ru --lm_training_file_tl lmtrain.ru-ab.ab \ --lm_file_sl model.ru-ab.ru --lm_file_tl model.ru-ab.ab '''
russian_word_list = [] abkhazian_word_list = [] outputfile = 'ab-ru-probability.dic' output = open(outputfile, 'w+') cyrillic_encoding = 'utf-8' probability = 0.9 with open('../draft/dictionary_prescript.ru', 'r+', encoding=cyrillic_encoding) as f: russian_word_list = f.read().splitlines() with open('../draft/dictionary.ru', 'r+', encoding=cyrillic_encoding) as f: russian_word_list += f.read().splitlines() with open('../draft/dictionary_prescript.ab', 'r+', encoding=cyrillic_encoding) as f: abkhazian_word_list = f.read().splitlines() with open('../draft/dictionary.ab', 'r+', encoding=cyrillic_encoding) as f: abkhazian_word_list += f.read().splitlines() for translation_tuple in zip(abkhazian_word_list, russian_word_list): if probability: output.write(translation_tuple[0] + '\t' + translation_tuple[1] + '\t' + str(probability) + '\n') else: output.write(translation_tuple[0] + '\t' + translation_tuple[1] + '\n') '\nThe generated dictionary can be used with https://github.com/bitextor\n\nbifixer:\npython3 bifixer/bifixer.py --scol 1 --tcol 2 --ignore_duplicates ru-ab-parallel.txt ru-ab-parallel.bifixed ru ab\n\napply the hardrules:\n\npython3 bicleaner/bicleaner_hardrules.py ru-ab-parallel.bifixed ru-ab-parallel.clean -s ru -t ab --scol 1 --tcol 2 --disable_lm_filter\n\ntrain bicleaner:\n\npython3.7 bicleaner/bicleaner_train.py ru-ab-parallel.clean --treat_oovs --normalize_by_length -s ru -t ab -d ru-ab-probability.dic.gz -D ab-ru-probability.dic.gz -b 1000 -c ru-ab.classifier -g 10000 -w 10000 -m ru-ab.yaml --classifier_type random_forest --lm_training_file_sl lmtrain.ru-ab.ru --lm_training_file_tl lmtrain.ru-ab.ab --lm_file_sl model.ru-ab.ru --lm_file_tl model.ru-ab.ab\n\n'
num1 = 10 num2 = 3 result = num1 / num2 print(result) # 10//3 # ==> 3 print(10//3);
num1 = 10 num2 = 3 result = num1 / num2 print(result) print(10 // 3)
ipDDN = "192.168.0.1" octets = ipDDN.split(".") #convert ddn to int ip = int(octets[0]) << 24 ip += int(octets[1]) << 16 ip += int(octets[2]) << 8 ip += int(octets[3]) print(ip)
ip_ddn = '192.168.0.1' octets = ipDDN.split('.') ip = int(octets[0]) << 24 ip += int(octets[1]) << 16 ip += int(octets[2]) << 8 ip += int(octets[3]) print(ip)
# Write a program to turn a number into its English name # Assume that the number is positive below 1000 # for more info on this quiz, go to this url: http://www.programmr.com/word-representation-number ones = ["zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine"] teens = ["ten", "eleven", "twelve", "thirteen", "fourteen", "fifteen", "sixteen", "seventeen", "eighteen", "nineteen"] decade = ["", "ten", "twenty", "thirty", "forty", "fifty", "sixty", "seventy", "eighty", "ninety"] def first(num): for i in num: if num[0] == i: return ones[i] + " " + "hundred" def three_numb(num): for i in num: if num[1] == 0 and num[2] == 0: return"" elif num[1] == 0 and num[2] == i: return "and" + " " + ones[i] elif num[1] == i and num[2] == 0: return "and" + " " + decade[i] elif num[1] == 1 and num[2] == i: return "and" + " " + teens[i] elif num[1] > 1 and num[2] == i: return "and" + " " + decade[num[1]] + " " + ones[i] def two_numb(num): for i in num: if num[0] == i and num[1] == 0: return decade[i] elif num[0] == 1 and num[1] == i: return teens[i] elif num[0] > 1 and num[1] == i: return decade[num[0]] + " " + ones[i] def single_numb(num): for i in num: return ones[i] def word_rep_of_number(number): num = list(map(int, str(number))) if len(num) == 3: return first(num) + " " + three_numb(num) elif len(num) == 2: return two_numb(num) elif len(num) == 1: return single_numb(num) if __name__ == "__main__": print(word_rep_of_number(310))
ones = ['zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine'] teens = ['ten', 'eleven', 'twelve', 'thirteen', 'fourteen', 'fifteen', 'sixteen', 'seventeen', 'eighteen', 'nineteen'] decade = ['', 'ten', 'twenty', 'thirty', 'forty', 'fifty', 'sixty', 'seventy', 'eighty', 'ninety'] def first(num): for i in num: if num[0] == i: return ones[i] + ' ' + 'hundred' def three_numb(num): for i in num: if num[1] == 0 and num[2] == 0: return '' elif num[1] == 0 and num[2] == i: return 'and' + ' ' + ones[i] elif num[1] == i and num[2] == 0: return 'and' + ' ' + decade[i] elif num[1] == 1 and num[2] == i: return 'and' + ' ' + teens[i] elif num[1] > 1 and num[2] == i: return 'and' + ' ' + decade[num[1]] + ' ' + ones[i] def two_numb(num): for i in num: if num[0] == i and num[1] == 0: return decade[i] elif num[0] == 1 and num[1] == i: return teens[i] elif num[0] > 1 and num[1] == i: return decade[num[0]] + ' ' + ones[i] def single_numb(num): for i in num: return ones[i] def word_rep_of_number(number): num = list(map(int, str(number))) if len(num) == 3: return first(num) + ' ' + three_numb(num) elif len(num) == 2: return two_numb(num) elif len(num) == 1: return single_numb(num) if __name__ == '__main__': print(word_rep_of_number(310))
# https://www.codewars.com/kata/exclamation-marks-series-number-17-put-the-exclamation-marks-and-question-marks-to-the-balance-are-they-balanced/train/python leftString = '!!' rightString = '??' def balance(left, right): if ((left.count('!') * 2) + (left.count('?') * 3)) == ((right.count('!') * 2) + (right.count('?') * 3)): return 'Balance' elif ((left.count('!') * 2) + (left.count('?') * 3)) > ((right.count('!') * 2) + (right.count('?') * 3)): return 'Left' elif ((left.count('!') * 2) + (left.count('?') * 3)) < ((right.count('!') * 2) + (right.count('?') * 3)): return 'Right' print(balance("!!","??") == "Right") print(balance("!??","?!!") == "Left") print(balance("!?!!","?!?") == "Left") print(balance("!!???!????","??!!?!!!!!!!") == "Balance") balance = lambda * a:['Balance','Left','Right'][cmp(*map(lambda s:2*s.count('!')+3*s.count('?'),a))]
left_string = '!!' right_string = '??' def balance(left, right): if left.count('!') * 2 + left.count('?') * 3 == right.count('!') * 2 + right.count('?') * 3: return 'Balance' elif left.count('!') * 2 + left.count('?') * 3 > right.count('!') * 2 + right.count('?') * 3: return 'Left' elif left.count('!') * 2 + left.count('?') * 3 < right.count('!') * 2 + right.count('?') * 3: return 'Right' print(balance('!!', '??') == 'Right') print(balance('!??', '?!!') == 'Left') print(balance('!?!!', '?!?') == 'Left') print(balance('!!???!????', '??!!?!!!!!!!') == 'Balance') balance = lambda *a: ['Balance', 'Left', 'Right'][cmp(*map(lambda s: 2 * s.count('!') + 3 * s.count('?'), a))]
input_file = "" #file location def print_all(f): print(f.read()) def rewind(f): f.seek(0) def print_a_line(line_count, f): print(line_count, f.readline()) current_file = open(input_file) print("First let's print the whole file:\n") print_all(current_file) print("Now let's rewind, kid of like a tape.") rewind(current_file) print("Let's print three lines:") current_line =1 print_a_line(current_line, current_file) current_line += 1 print_a_line(current_line, current_file) current_line += 1 print_a_line(current_line, current_file)
input_file = '' def print_all(f): print(f.read()) def rewind(f): f.seek(0) def print_a_line(line_count, f): print(line_count, f.readline()) current_file = open(input_file) print("First let's print the whole file:\n") print_all(current_file) print("Now let's rewind, kid of like a tape.") rewind(current_file) print("Let's print three lines:") current_line = 1 print_a_line(current_line, current_file) current_line += 1 print_a_line(current_line, current_file) current_line += 1 print_a_line(current_line, current_file)
#!/usr/bin/env python3 # add one level of indentation to code def indent(code): return [ " " + l for l in code ] # remove one level of indentation from code def unindent(code): cs = [] for l in code: if l != "" and l[0:4] != " ": print("Malformed conditional code '" + l[0:4] +"'") assert False cs.append(l[4:]) return cs # Execute ASL code often has a header like this: # # if ConditionPassed() then # EncodingSpecificOperations(); # # that we need to transform into a more usable form. # Other patterns found are: # - declaring an enumeration before the instruction # - inserting another line of code between the first and second lines. # eg "if PSTATE.EL == EL2 then UNPREDICTABLE;" # - wrapping the entire instruction in # "if code[0].startswith("if CurrentInstrSet() == InstrSet_A32 then"): # # Return value consists of (top, cond, dec, exec): # - additional top level declarations (of enumerations) # - boolean: is the instruction conditional? # - additional decode logic (to be added to start of decode ASL) # - demangled execute logic def demangleExecuteASL(code): tops = None conditional = False decode = None if code[0].startswith("enumeration ") and code[1] == "": tops = code[0] code = code[2:] if code[0].startswith("if CurrentInstrSet() == InstrSet_A32 then"): first = code[0] code = code[1:] mid = code.index("else") code1 = unindent(code[:mid]) code2= unindent(code[mid+1:]) (tops1, conditional1, decode1, code1) = demangleExecuteASL(code1) (tops2, conditional2, decode2, code2) = demangleExecuteASL(code2) assert tops1 == None and tops2 == None assert conditional1 == conditional2 code = [first] + indent(code1) + ["else"] + indent(code2) ([], conditional1, "\n".join([decode1 or "", decode2 or ""]), code) if code[0] == "if ConditionPassed() then": conditional = True code = code[1:] # delete first line code = unindent(code) if code[0] == "bits(128) result;": tmp = code[0] code[0] = code[1] code[1] = tmp elif len(code) >= 2 and code[1] == "EncodingSpecificOperations();": decode = code[0] code = code[1:] if code[0].startswith("EncodingSpecificOperations();"): rest = code[0][29:].strip() if rest == "": code = code[1:] else: code[0] = rest return (tops, conditional, decode, code)
def indent(code): return [' ' + l for l in code] def unindent(code): cs = [] for l in code: if l != '' and l[0:4] != ' ': print("Malformed conditional code '" + l[0:4] + "'") assert False cs.append(l[4:]) return cs def demangle_execute_asl(code): tops = None conditional = False decode = None if code[0].startswith('enumeration ') and code[1] == '': tops = code[0] code = code[2:] if code[0].startswith('if CurrentInstrSet() == InstrSet_A32 then'): first = code[0] code = code[1:] mid = code.index('else') code1 = unindent(code[:mid]) code2 = unindent(code[mid + 1:]) (tops1, conditional1, decode1, code1) = demangle_execute_asl(code1) (tops2, conditional2, decode2, code2) = demangle_execute_asl(code2) assert tops1 == None and tops2 == None assert conditional1 == conditional2 code = [first] + indent(code1) + ['else'] + indent(code2) ([], conditional1, '\n'.join([decode1 or '', decode2 or '']), code) if code[0] == 'if ConditionPassed() then': conditional = True code = code[1:] code = unindent(code) if code[0] == 'bits(128) result;': tmp = code[0] code[0] = code[1] code[1] = tmp elif len(code) >= 2 and code[1] == 'EncodingSpecificOperations();': decode = code[0] code = code[1:] if code[0].startswith('EncodingSpecificOperations();'): rest = code[0][29:].strip() if rest == '': code = code[1:] else: code[0] = rest return (tops, conditional, decode, code)
BATCH_SIZE = 64 EPOCHS = 100 IMG_WIDTH = 1801 IMG_HEIGHT = 32 NUM_CHANNELS = 3 NUM_CLASSES = 2 NUM_REGRESSION_OUTPUTS = 24 K_NEGATIVE_SAMPLE_RATIO_WEIGHT = 4 INPUT_SHAPE = (IMG_HEIGHT, IMG_WIDTH, NUM_CHANNELS) PREDICTION_FILE_NAME = 'objects_obs1_lidar_predictions.csv' PREDICTION_MD_FILE_NAME = 'objects_obs1_metadata.csv' WEIGHT_BB = 0.01 LEARNING_RATE = 0.001 LIDAR_CONV_VERTICAL_STRIDE = 1 IMG_CAM_WIDTH = 1368 IMG_CAM_HEIGHT = 512 NUM_CAM_CHANNELS = 1 USE_FEATURE_WISE_BATCH_NORMALIZATION = True USE_SAMPLE_WISE_BATCH_NORMALIZATION = False
batch_size = 64 epochs = 100 img_width = 1801 img_height = 32 num_channels = 3 num_classes = 2 num_regression_outputs = 24 k_negative_sample_ratio_weight = 4 input_shape = (IMG_HEIGHT, IMG_WIDTH, NUM_CHANNELS) prediction_file_name = 'objects_obs1_lidar_predictions.csv' prediction_md_file_name = 'objects_obs1_metadata.csv' weight_bb = 0.01 learning_rate = 0.001 lidar_conv_vertical_stride = 1 img_cam_width = 1368 img_cam_height = 512 num_cam_channels = 1 use_feature_wise_batch_normalization = True use_sample_wise_batch_normalization = False
class MinHeap: def __init__(self, array): # Do not edit the line below. self.heap = self.buildHeap(array) def buildHeap(self, array): # Write your code here. pass def siftDown(self): # Write your code here. pass def siftUp(self): # Write your code here. pass def peek(self): # Write your code here. pass def remove(self): # Write your code here. pass def insert(self, value): # Write your code here. pass
class Minheap: def __init__(self, array): self.heap = self.buildHeap(array) def build_heap(self, array): pass def sift_down(self): pass def sift_up(self): pass def peek(self): pass def remove(self): pass def insert(self, value): pass
NAMES = ["Bill", "Richie", "Ben", "Eddie", "Mike", "Beverly"] while True: searched_name = input("Give a member of The Losers' Club: ") if searched_name is "": break elif (searched_name in NAMES) is True: print("Correct!") else: print("Wrong!")
names = ['Bill', 'Richie', 'Ben', 'Eddie', 'Mike', 'Beverly'] while True: searched_name = input("Give a member of The Losers' Club: ") if searched_name is '': break elif (searched_name in NAMES) is True: print('Correct!') else: print('Wrong!')
# -*- coding: utf-8 -*- # @Author: Mujib # @Date: 2017-07-15 15:05:13 # @Last Modified by: Mujib # @Last Modified time: 2017-07-15 15:20:03 actualString = 'Monster truck rally. 4pm. Monday' print( 'This is upperCase >>> ' + actualString.upper() ) print( '---------------------------------------------' ) print( 'This is lowerCase >>> ' + actualString.lower() ) print( '---------------------------------------------' ) print( 'Is string ends with ".jpg" >>> ' ) print( actualString.endswith( '.jpg' ) ) print( '---------------------------------------------' ) print( 'Is string starts with "Monster" >>> ' ) print( actualString.startswith( 'Monster' ) ) print( '---------------------------------------------' ) print( 'This will strip the given string.' ) print( actualString.strip() ) print( '---------------------------------------------' ) print( 'This will find the given text and return index. i.e. "python"' ) print( actualString.find( 'python' ) ) print( '---------------------------------------------' ) print( 'This will implement replace the words or letter in a given string.' ) print( actualString.replace( 'Monday', 'Friday' ) ) print( '---------------------------------------------' )
actual_string = 'Monster truck rally. 4pm. Monday' print('This is upperCase >>> ' + actualString.upper()) print('---------------------------------------------') print('This is lowerCase >>> ' + actualString.lower()) print('---------------------------------------------') print('Is string ends with ".jpg" >>> ') print(actualString.endswith('.jpg')) print('---------------------------------------------') print('Is string starts with "Monster" >>> ') print(actualString.startswith('Monster')) print('---------------------------------------------') print('This will strip the given string.') print(actualString.strip()) print('---------------------------------------------') print('This will find the given text and return index. i.e. "python"') print(actualString.find('python')) print('---------------------------------------------') print('This will implement replace the words or letter in a given string.') print(actualString.replace('Monday', 'Friday')) print('---------------------------------------------')
cffi_template = '''from cffi import FFI def link_clib(block_cell_name): ffi = FFI() ffi.cdef(r\'\'\'{{headers}}\'\'\') C = ffi.dlopen('{{dynlib_file}}') return ffi, C '''
cffi_template = "from cffi import FFI\ndef link_clib(block_cell_name):\n ffi = FFI()\n ffi.cdef(r'''{{headers}}''')\n C = ffi.dlopen('{{dynlib_file}}')\n return ffi, C\n"
class Store: def __init__(self, name, categories): self.name = name self.categories = categories def __str__(self): output = f"{self.name}\n" for idx, category in enumerate(self.categories): output += " " + str(idx+1) + ", " + category + "\n" return output my_store = Store("The Dugout", ["Running", "Baseball", "Basketball"]) print(my_store) selection = input("Select the number of a department:") print("The user selected " + str(selection))
class Store: def __init__(self, name, categories): self.name = name self.categories = categories def __str__(self): output = f'{self.name}\n' for (idx, category) in enumerate(self.categories): output += ' ' + str(idx + 1) + ', ' + category + '\n' return output my_store = store('The Dugout', ['Running', 'Baseball', 'Basketball']) print(my_store) selection = input('Select the number of a department:') print('The user selected ' + str(selection))
''' Gui/Views/Dialogs _________________ Subset of views that are specifically for QDialog and QMessageBox classes. :copyright: (c) 2015 The Regents of the University of California. :license: GNU GPL, see licenses/GNU GPLv3.txt for more details. ''' __all__ = [ 'base', 'export', 'findreplace', 'information', 'save' ]
""" Gui/Views/Dialogs _________________ Subset of views that are specifically for QDialog and QMessageBox classes. :copyright: (c) 2015 The Regents of the University of California. :license: GNU GPL, see licenses/GNU GPLv3.txt for more details. """ __all__ = ['base', 'export', 'findreplace', 'information', 'save']
_base_ = [ '../../_base_/models/tanet_r50.py', '../../_base_/default_runtime.py' ] # dataset settings dataset_type = 'VideoDataset' data_root = '/home/petros/Datasets/hmdb51/videos' data_root_val = '/home/petros/Datasets/hmdb51/videos' ann_file_train = '/home/petros/Datasets/hmdb51/hmdb51_train_split_1_videos.txt' ann_file_val = '/home/petros/Datasets/hmdb51/hmdb51_val_split_1_videos.txt' ann_file_test = '/home/petros/Datasets/hmdb51/hmdb51_val_split_1_videos.txt' img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ dict(type='DecordInit'), dict(type='DenseSampleFrames', clip_len=1, frame_interval=1, num_clips=8), dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict( type='MultiScaleCrop', input_size=224, scales=(1, 0.875, 0.75, 0.66), random_crop=False, max_wh_scale_gap=1, num_fixed_crops=13), dict(type='Resize', scale=(224, 224), keep_ratio=False), dict(type='Flip', flip_ratio=0.5), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs', 'label']) ] val_pipeline = [ dict(type='DecordInit'), dict( type='DenseSampleFrames', clip_len=1, frame_interval=1, num_clips=8, test_mode=False), dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs', 'label']) ] test_pipeline = [ dict(type='DecordInit'), dict( type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8, test_mode=True), dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), # dict(type='ThreeCrop', crop_size=256), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs']) ] data = dict( videos_per_gpu=16, workers_per_gpu=6, train=dict( type=dataset_type, ann_file=ann_file_train, data_prefix=data_root, pipeline=train_pipeline), val=dict( type=dataset_type, ann_file=ann_file_val, data_prefix=data_root_val, pipeline=val_pipeline), test=dict( type=dataset_type, ann_file=ann_file_test, data_prefix=data_root_val, pipeline=test_pipeline)) # optimizer optimizer = dict( type='SGD', constructor='TSMOptimizerConstructor', paramwise_cfg=dict(fc_lr5=True), lr=0.001, # this lr is used for 8 gpus momentum=0.9, weight_decay=0.0005) optimizer_config = dict(grad_clip=dict(max_norm=20, norm_type=2)) # learning policy lr_config = dict(policy='step', warmup='linear', warmup_by_epoch=True, warmup_iters=5, warmup_ratio=0.1, step=[20, 30]) total_epochs = 40 checkpoint_config = dict(interval=5) evaluation = dict( interval=3, metrics=['top_k_accuracy', 'mean_class_accuracy']) log_config = dict( interval=20, hooks=[ dict(type='TextLoggerHook'), dict(type='TensorboardLoggerHook'), ]) # runtime settings log_level = 'INFO' work_dir = './work_dirs/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/' workflow = [('train', 1)] # use the pre-trained model for the whole TANET network load_from = 'checkpoints/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219-032c8e94.pth' resume_from = None
_base_ = ['../../_base_/models/tanet_r50.py', '../../_base_/default_runtime.py'] dataset_type = 'VideoDataset' data_root = '/home/petros/Datasets/hmdb51/videos' data_root_val = '/home/petros/Datasets/hmdb51/videos' ann_file_train = '/home/petros/Datasets/hmdb51/hmdb51_train_split_1_videos.txt' ann_file_val = '/home/petros/Datasets/hmdb51/hmdb51_val_split_1_videos.txt' ann_file_test = '/home/petros/Datasets/hmdb51/hmdb51_val_split_1_videos.txt' img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [dict(type='DecordInit'), dict(type='DenseSampleFrames', clip_len=1, frame_interval=1, num_clips=8), dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='MultiScaleCrop', input_size=224, scales=(1, 0.875, 0.75, 0.66), random_crop=False, max_wh_scale_gap=1, num_fixed_crops=13), dict(type='Resize', scale=(224, 224), keep_ratio=False), dict(type='Flip', flip_ratio=0.5), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs', 'label'])] val_pipeline = [dict(type='DecordInit'), dict(type='DenseSampleFrames', clip_len=1, frame_interval=1, num_clips=8, test_mode=False), dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs', 'label'])] test_pipeline = [dict(type='DecordInit'), dict(type='SampleFrames', clip_len=1, frame_interval=1, num_clips=8, test_mode=True), dict(type='DecordDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs'])] data = dict(videos_per_gpu=16, workers_per_gpu=6, train=dict(type=dataset_type, ann_file=ann_file_train, data_prefix=data_root, pipeline=train_pipeline), val=dict(type=dataset_type, ann_file=ann_file_val, data_prefix=data_root_val, pipeline=val_pipeline), test=dict(type=dataset_type, ann_file=ann_file_test, data_prefix=data_root_val, pipeline=test_pipeline)) optimizer = dict(type='SGD', constructor='TSMOptimizerConstructor', paramwise_cfg=dict(fc_lr5=True), lr=0.001, momentum=0.9, weight_decay=0.0005) optimizer_config = dict(grad_clip=dict(max_norm=20, norm_type=2)) lr_config = dict(policy='step', warmup='linear', warmup_by_epoch=True, warmup_iters=5, warmup_ratio=0.1, step=[20, 30]) total_epochs = 40 checkpoint_config = dict(interval=5) evaluation = dict(interval=3, metrics=['top_k_accuracy', 'mean_class_accuracy']) log_config = dict(interval=20, hooks=[dict(type='TextLoggerHook'), dict(type='TensorboardLoggerHook')]) log_level = 'INFO' work_dir = './work_dirs/tanet_r50_dense_1x1x8_100e_kinetics400_rgb/' workflow = [('train', 1)] load_from = 'checkpoints/tanet/tanet_r50_dense_1x1x8_100e_kinetics400_rgb_20210219-032c8e94.pth' resume_from = None
# https://leetcode.com/problems/kth-smallest-element-in-a-bst/ class Solution(object): result = [] def getSmallest(self, root, k): global y if root != None: left = self.getSmallest(root.left) if left != None: self.result.append(left.val) y -=1 if y == 0: self.result.append(root.val) right = self.getSmallest(root.right) def kthSmallest(self, root, k): """ :type root: TreeNode :type k: int :rtype: int """ print(root) global y y = k self.getSmallest(root) print(self.result) if self.result == None: return None else: return self.result[len(self.result)-1]
class Solution(object): result = [] def get_smallest(self, root, k): global y if root != None: left = self.getSmallest(root.left) if left != None: self.result.append(left.val) y -= 1 if y == 0: self.result.append(root.val) right = self.getSmallest(root.right) def kth_smallest(self, root, k): """ :type root: TreeNode :type k: int :rtype: int """ print(root) global y y = k self.getSmallest(root) print(self.result) if self.result == None: return None else: return self.result[len(self.result) - 1]
class Solution: def search(self, nums, target): low , high = 0 , len(nums)-1 while low <= high: mid = low + (high-low) // 2 # if high is greater than low if nums[mid] == target: return mid if nums[mid] > target: # if target is smaller ignore right half high = mid - 1 else: low = mid + 1 return -1
class Solution: def search(self, nums, target): (low, high) = (0, len(nums) - 1) while low <= high: mid = low + (high - low) // 2 if nums[mid] == target: return mid if nums[mid] > target: high = mid - 1 else: low = mid + 1 return -1
""" Auxilliary list of HEX colors for graphic purposes. Recommended usage, if wanted: `from pylabutils._tools._colors import color_dict [as <name>]` """ __all__ = ['color_dict'] color_dict = { """ Auxilliary list of HEX colors for graphic purposes. """ 'light_blue' : '#60A0FF', 'blue' : '#3385FF', 'dark_blue' : '#0A4AAA', 'light_red' : '#FC6A4B', 'red' : '#FB4A24', 'dark_red' : '#B22305', 'light_yellow' : '#F6FF6C', 'yellow' : '#EDF92B', 'dark_yellow' : '#D2D602', 'light_green' : '#4BFF4B', 'green' : '#0DC40F', 'dark_green' : '#018D01', 'light_orange' : '#FF884D', 'orange' : '#FF641E', 'dark_orange' : '#DC500A', 'light_purple' : '#D480FF', 'violet' : '#CC66FF', # :-) 'purple' : '#BA44F4', 'dark_purple' : '#9619D7', 'light_brown' : '#B37700', 'brown' : '#965A05', 'dark_brown' : '#422804', 'black' : '#000000', 'african_american' : '#503208', # I'm so funny 'white' : '#FFFFFF', # I'll stop the jokes 'grey' : '#909090', }
""" Auxilliary list of HEX colors for graphic purposes. Recommended usage, if wanted: `from pylabutils._tools._colors import color_dict [as <name>]` """ __all__ = ['color_dict'] color_dict = {'\n Auxilliary list of HEX colors for graphic purposes.\n light_blue': '#60A0FF', 'blue': '#3385FF', 'dark_blue': '#0A4AAA', 'light_red': '#FC6A4B', 'red': '#FB4A24', 'dark_red': '#B22305', 'light_yellow': '#F6FF6C', 'yellow': '#EDF92B', 'dark_yellow': '#D2D602', 'light_green': '#4BFF4B', 'green': '#0DC40F', 'dark_green': '#018D01', 'light_orange': '#FF884D', 'orange': '#FF641E', 'dark_orange': '#DC500A', 'light_purple': '#D480FF', 'violet': '#CC66FF', 'purple': '#BA44F4', 'dark_purple': '#9619D7', 'light_brown': '#B37700', 'brown': '#965A05', 'dark_brown': '#422804', 'black': '#000000', 'african_american': '#503208', 'white': '#FFFFFF', 'grey': '#909090'}
# empt_dict = {'01': '31', '02': '29', '03': '31', '04': '30', '05': '31', '06': '30', '07': '31', '08': '31', '09': '30', '10': '31', '11': '30', '12': '31'} # date = '06_04' def get_last_seven(date_input): dict_input = {'01': '31', '02': '29', '03': '31', '04': '30', '05': '31', '06': '30', '07': '31', '08': '31', '09': '30', '10': '31', '11': '30', '12': '31'} dates_to_grab = [] current_date = date_input.split('_') month = current_date[0] day = current_date[1] month_before = int(month) -1 previous_month = f'0{month_before}' days_prev_month = dict_input[previous_month] for day_num in range(0, 7): curr_day = int(day) - day_num if int(day) >= 7: for day_num in range(0, 7): curr_day = int(day) - day_num curr_day = str(curr_day).zfill(2) file_add = f'2020-{month}-{curr_day}' dates_to_grab.append(file_add) else: days_left = 7 - int(day) for day_num in range(0, int(day)): curr_day = int(day) - day_num curr_day = str(curr_day).zfill(2) file_add = f'2020-{month}-{curr_day}' dates_to_grab.append(file_add) for day_num in range(0, days_left): curr_day = int(days_prev_month) - day_num curr_day = str(curr_day).zfill(2) file_add = f'2020-{previous_month}-{curr_day}' dates_to_grab.append(file_add) return dates_to_grab if __name__ == '__main__': get_last_seven(date_input)
def get_last_seven(date_input): dict_input = {'01': '31', '02': '29', '03': '31', '04': '30', '05': '31', '06': '30', '07': '31', '08': '31', '09': '30', '10': '31', '11': '30', '12': '31'} dates_to_grab = [] current_date = date_input.split('_') month = current_date[0] day = current_date[1] month_before = int(month) - 1 previous_month = f'0{month_before}' days_prev_month = dict_input[previous_month] for day_num in range(0, 7): curr_day = int(day) - day_num if int(day) >= 7: for day_num in range(0, 7): curr_day = int(day) - day_num curr_day = str(curr_day).zfill(2) file_add = f'2020-{month}-{curr_day}' dates_to_grab.append(file_add) else: days_left = 7 - int(day) for day_num in range(0, int(day)): curr_day = int(day) - day_num curr_day = str(curr_day).zfill(2) file_add = f'2020-{month}-{curr_day}' dates_to_grab.append(file_add) for day_num in range(0, days_left): curr_day = int(days_prev_month) - day_num curr_day = str(curr_day).zfill(2) file_add = f'2020-{previous_month}-{curr_day}' dates_to_grab.append(file_add) return dates_to_grab if __name__ == '__main__': get_last_seven(date_input)
class Point(): def __init__(self, input1, input2): self.x = input1 self.y = input2 p = Point(2, 8) # PRINT THE X&Y-VALUES OF THE POINT print(p.x) print(p.y) # CREATED A CLASS (Flight which takes capacity as input) class FLight(): # FUNCTION FOR CAPACITY def __init__(self, capacity): self.capacity = capacity self.passengers = [] # FUNCTION TO APPEND PASSENGERS WITH NAME def add_passenger(self, name): if not self.open_seats(): return False self.passengers.append(name) return True # FUNCTION TO GET EMPTY SEATS def open_seats(self): return self.capacity - len(self.passengers) flight = FLight(3) print(flight) people = ["HIK HIK", "HIKAL LAKIH", "LAKIH HIKAL", "OTHER USER"] for person in people: success = flight.add_passenger(person) if success: print(f"Added {person}, ") # PRINT: Added..... else: print(f"No available seats for {person} ") # PRINT: No available seats for OTHER USER
class Point: def __init__(self, input1, input2): self.x = input1 self.y = input2 p = point(2, 8) print(p.x) print(p.y) class Flight: def __init__(self, capacity): self.capacity = capacity self.passengers = [] def add_passenger(self, name): if not self.open_seats(): return False self.passengers.append(name) return True def open_seats(self): return self.capacity - len(self.passengers) flight = f_light(3) print(flight) people = ['HIK HIK', 'HIKAL LAKIH', 'LAKIH HIKAL', 'OTHER USER'] for person in people: success = flight.add_passenger(person) if success: print(f'Added {person}, ') else: print(f'No available seats for {person} ')
fact = 1 n = int(input('Enter the no you want to factorial \t')) for i in range(1,n+1): fact = fact*i print('FACTORIAL Of ',n,"is",fact)
fact = 1 n = int(input('Enter the no you want to factorial \t')) for i in range(1, n + 1): fact = fact * i print('FACTORIAL Of ', n, 'is', fact)
entries = [ { 'env-title': 'mujoco-half-cheetah', 'score': 1668.58, }, { 'env-title': 'mujoco-hopper', 'score': 2316.16, }, { 'env-title': 'mujoco-inverted-pendulum', 'score': 809.43, }, { 'env-title': 'mujoco-swimmer', 'score': 111.19, }, { 'env-title': 'mujoco-inverted-double-pendulum', 'score': 7102.91, }, { 'env-title': 'mujoco-reacher', 'score': -6.71, }, { 'env-title': 'mujoco-walker2d', 'score': 3424.95, }, ]
entries = [{'env-title': 'mujoco-half-cheetah', 'score': 1668.58}, {'env-title': 'mujoco-hopper', 'score': 2316.16}, {'env-title': 'mujoco-inverted-pendulum', 'score': 809.43}, {'env-title': 'mujoco-swimmer', 'score': 111.19}, {'env-title': 'mujoco-inverted-double-pendulum', 'score': 7102.91}, {'env-title': 'mujoco-reacher', 'score': -6.71}, {'env-title': 'mujoco-walker2d', 'score': 3424.95}]
class Solution: def isStable(self, board, numRows, numCols): positions = set() for row in range(numRows): for col in range(numCols): if board[row][col]: if col > 1 and (board[row][col] == board[row][col-1] == board[row][col-2]): positions = positions.union({(row, col), (row, col-1), (row, col-2)}) # positions.add((row, col)) # positions.add((row, col-1)) # positions.add((row, col-2)) if row > 1 and (board[row][col] == board[row-1][col] == board[row-2][col]): positions = positions.union({(row, col), (row-1, col), (row-2, col)}) # positions.add((row, col)) # positions.add((row-1, col)) # positions.add((row-2, col)) return positions def crush(self, board, positions): for (row, col) in positions: board[row][col] = 0 def fall(self, board, numRows, numCols): for col in range(numCols): # start from bottom most 0 zeroIdx = numRows-1 for row in range(numRows-1, -1, -1): if board[row][col]: board[zeroIdx][col] = board[row][col] zeroIdx -= 1 for row in range(zeroIdx+1): board[row][col] = 0 def candyCrush(self, board: List[List[int]]) -> List[List[int]]: numRows = len(board) numCols = len(board[0]) while True: positions = self.isStable(board, numRows, numCols) if not positions: break self.crush(board, positions) self.fall(board, numRows, numCols) return board
class Solution: def is_stable(self, board, numRows, numCols): positions = set() for row in range(numRows): for col in range(numCols): if board[row][col]: if col > 1 and board[row][col] == board[row][col - 1] == board[row][col - 2]: positions = positions.union({(row, col), (row, col - 1), (row, col - 2)}) if row > 1 and board[row][col] == board[row - 1][col] == board[row - 2][col]: positions = positions.union({(row, col), (row - 1, col), (row - 2, col)}) return positions def crush(self, board, positions): for (row, col) in positions: board[row][col] = 0 def fall(self, board, numRows, numCols): for col in range(numCols): zero_idx = numRows - 1 for row in range(numRows - 1, -1, -1): if board[row][col]: board[zeroIdx][col] = board[row][col] zero_idx -= 1 for row in range(zeroIdx + 1): board[row][col] = 0 def candy_crush(self, board: List[List[int]]) -> List[List[int]]: num_rows = len(board) num_cols = len(board[0]) while True: positions = self.isStable(board, numRows, numCols) if not positions: break self.crush(board, positions) self.fall(board, numRows, numCols) return board
def sp_cls_count(sp_cls, n_seg_cls=8): """ Get the count (number of superpixels) for each segmentation class Args: sp_cls (dict): the key is the superpixel ID, and the value is the class ID for the corresponding superpixel. There are n elements in the sp_cls. Here, we use zero-indexing, which means the class are in range [0, k) n_seg_cls (int): number of segmentation classes Output: counts (list): a list for the count, where each index is the count for the corresponding segmentation class. The length of the list equals to the number of semantic segmentation classes. """ counts = [0] * n_seg_cls for k in sp_cls.keys(): counts[sp_cls[k]] += 1 return counts
def sp_cls_count(sp_cls, n_seg_cls=8): """ Get the count (number of superpixels) for each segmentation class Args: sp_cls (dict): the key is the superpixel ID, and the value is the class ID for the corresponding superpixel. There are n elements in the sp_cls. Here, we use zero-indexing, which means the class are in range [0, k) n_seg_cls (int): number of segmentation classes Output: counts (list): a list for the count, where each index is the count for the corresponding segmentation class. The length of the list equals to the number of semantic segmentation classes. """ counts = [0] * n_seg_cls for k in sp_cls.keys(): counts[sp_cls[k]] += 1 return counts
s = 0 for x in range(1, 10+1): s = s+x print("x:", x, "sum:", s)
s = 0 for x in range(1, 10 + 1): s = s + x print('x:', x, 'sum:', s)
class Solution: def findMaxForm(self, strs: List[str], m: int, n: int) -> int: ''' T: O(len(strs) * m * n) and S: (m * n) ''' dp = [[0 for _ in range(n+1)] for _ in range(m+1)] for s in strs: count = collections.Counter(s) for i in range(m, count["0"] - 1, -1): for j in range(n, count["1"] - 1, -1): dp[i][j] = max(dp[i][j], dp[i-count["0"]][j-count["1"]] + 1) return dp[m][n]
class Solution: def find_max_form(self, strs: List[str], m: int, n: int) -> int: """ T: O(len(strs) * m * n) and S: (m * n) """ dp = [[0 for _ in range(n + 1)] for _ in range(m + 1)] for s in strs: count = collections.Counter(s) for i in range(m, count['0'] - 1, -1): for j in range(n, count['1'] - 1, -1): dp[i][j] = max(dp[i][j], dp[i - count['0']][j - count['1']] + 1) return dp[m][n]
def roman_nums(): """Generator for roman numerals.""" mapping = [ (1, 'i'), (4, 'iv'), (5, 'v'), (9, 'ix'), (10, 'x'), (40, 'xl'), (50, 'l'), (90, 'xc'), (100, 'c'), (400, 'cd'), (500, 'd'), (900, 'cm'), (1000, 'm') ] i = 1 while True: next_str = '' remaining_int = i remaining_mapping = list(mapping) while remaining_mapping: (amount, chars) = remaining_mapping.pop() while remaining_int >= amount: next_str += chars remaining_int -= amount yield next_str i += 1 def title_body(text): """Split text into its first line (the title) and the rest of the text.""" newline = text.find("\n") if newline < 0: return text, "" return text[:newline], text[newline:] def flatten(list_of_lists): """List[List[X]] -> List[X]""" return sum(list_of_lists, [])
def roman_nums(): """Generator for roman numerals.""" mapping = [(1, 'i'), (4, 'iv'), (5, 'v'), (9, 'ix'), (10, 'x'), (40, 'xl'), (50, 'l'), (90, 'xc'), (100, 'c'), (400, 'cd'), (500, 'd'), (900, 'cm'), (1000, 'm')] i = 1 while True: next_str = '' remaining_int = i remaining_mapping = list(mapping) while remaining_mapping: (amount, chars) = remaining_mapping.pop() while remaining_int >= amount: next_str += chars remaining_int -= amount yield next_str i += 1 def title_body(text): """Split text into its first line (the title) and the rest of the text.""" newline = text.find('\n') if newline < 0: return (text, '') return (text[:newline], text[newline:]) def flatten(list_of_lists): """List[List[X]] -> List[X]""" return sum(list_of_lists, [])
expected_output={ 'interfaces': { 'HundredGigE1/0/21': { 'neighbors': { '3.3.3.3': { 'priority': 0, 'state': 'FULL/ -', 'dead_time': '00:00:35', 'interface_id': 23 } } } } }
expected_output = {'interfaces': {'HundredGigE1/0/21': {'neighbors': {'3.3.3.3': {'priority': 0, 'state': 'FULL/ -', 'dead_time': '00:00:35', 'interface_id': 23}}}}}
config = { "cmip6": { "base_dir": "/badc/cmip6/data/CMIP6", "facets": "mip_era activity_id institution_id source_id experiment_id member_id table_id variable_id grid_label version".split(), "scan_depth": 5, "mappings": {"variable": "variable_id", "project": "mip_era"} }, "cmip5": { "base_dir": "/badc/cmip5/data/cmip5", "facets": "activity product institute model experiment frequency realm mip_table ensemble_member version variable".split(), "scan_depth": 5, "mappings": {"project": "activity"}, "deeper_scan": 1, "exclude": ("derived", "retracted") }, "cordex": { "base_dir": "/badc/cordex/data/cordex", "facets": "project product domain institute driving_model experiment ensemble rcm_name rcm_version time_frequency variable version".split(), "scan_depth": 5, "mappings": {"project": "project"}, "renamers": {"CORDEX": "cordex"} } }
config = {'cmip6': {'base_dir': '/badc/cmip6/data/CMIP6', 'facets': 'mip_era activity_id institution_id source_id experiment_id member_id table_id variable_id grid_label version'.split(), 'scan_depth': 5, 'mappings': {'variable': 'variable_id', 'project': 'mip_era'}}, 'cmip5': {'base_dir': '/badc/cmip5/data/cmip5', 'facets': 'activity product institute model experiment frequency realm mip_table ensemble_member version variable'.split(), 'scan_depth': 5, 'mappings': {'project': 'activity'}, 'deeper_scan': 1, 'exclude': ('derived', 'retracted')}, 'cordex': {'base_dir': '/badc/cordex/data/cordex', 'facets': 'project product domain institute driving_model experiment ensemble rcm_name rcm_version time_frequency variable version'.split(), 'scan_depth': 5, 'mappings': {'project': 'project'}, 'renamers': {'CORDEX': 'cordex'}}}
''' Created on May 12, 2022 @author: mballance ''' class PoolSize(object): def __init__(self, sz): self._sz = sz def __int__(self): return self._sz
""" Created on May 12, 2022 @author: mballance """ class Poolsize(object): def __init__(self, sz): self._sz = sz def __int__(self): return self._sz
""" base on http://www.ibm.com/developerworks/cn/opensource/os-cn-pythonwith/index.html demo-5 """ class DummyResource: def __init__(self, tag): self.tag = tag print('Resource [%s]' % tag) def __enter__(self): print('[Enter %s]: Allocate resource.' % self.tag) return self # can return different object def __exit__(self, exc_type, exc_value, exc_tb): print('[Exit %s]: Free resource.' % self.tag) if exc_tb is None: print('[Exit %s]: Exited without exception.' % self.tag) else: print('[Exit %s]: Exited with exception raised.' % self.tag) return False """ self-implement: what if ctx_expr it self raised an exception """ class DummyResorceChild(DummyResource): def __init__(self, tag, trigger = False): if trigger: raise Exception DummyResource.__init__(self, tag) if __name__ == '__main__': with DummyResource('Normal'): print('[with-body] Run without exceptions.') try: with DummyResource('With-Exception'): # NOTICE: ctx_expr it self has an exception will irectly raised!(in this case) print('[with-body] Run with exception.') raise Exception ## forcing an exception and the ctx manager will handle it print('[with-body] Run with exception. Failed to finish statement-body!') # NOTICE: will not execute this line except Exception: pass with DummyResorceChild('Without ctx_expr exception'): print('[with-body] Run without exceptions.') with DummyResorceChild('With ctx_expr exception', True): print('[with-body] Run passed with exception in ctx_expr!')
""" base on http://www.ibm.com/developerworks/cn/opensource/os-cn-pythonwith/index.html demo-5 """ class Dummyresource: def __init__(self, tag): self.tag = tag print('Resource [%s]' % tag) def __enter__(self): print('[Enter %s]: Allocate resource.' % self.tag) return self def __exit__(self, exc_type, exc_value, exc_tb): print('[Exit %s]: Free resource.' % self.tag) if exc_tb is None: print('[Exit %s]: Exited without exception.' % self.tag) else: print('[Exit %s]: Exited with exception raised.' % self.tag) return False ' self-implement: what if ctx_expr it self raised an exception ' class Dummyresorcechild(DummyResource): def __init__(self, tag, trigger=False): if trigger: raise Exception DummyResource.__init__(self, tag) if __name__ == '__main__': with dummy_resource('Normal'): print('[with-body] Run without exceptions.') try: with dummy_resource('With-Exception'): print('[with-body] Run with exception.') raise Exception print('[with-body] Run with exception. Failed to finish statement-body!') except Exception: pass with dummy_resorce_child('Without ctx_expr exception'): print('[with-body] Run without exceptions.') with dummy_resorce_child('With ctx_expr exception', True): print('[with-body] Run passed with exception in ctx_expr!')
n = int(input()) for i in range (97 , 97 + n ): for m in range (97, 97 + n): for k in range (97, 97 + n): print(chr(i) + chr(m) + chr(k))
n = int(input()) for i in range(97, 97 + n): for m in range(97, 97 + n): for k in range(97, 97 + n): print(chr(i) + chr(m) + chr(k))
def trainer(model, data): model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']) (x, y) = data('train') def result(epochs, batch_size): model.fit(x, y, batch_size=batch_size, epochs=epochs, validation_split=0.1) return model return result
def trainer(model, data): model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']) (x, y) = data('train') def result(epochs, batch_size): model.fit(x, y, batch_size=batch_size, epochs=epochs, validation_split=0.1) return model return result
# print([callable(getattr(__builtins__, attr)) for attr in dir(__builtins__)]) print([(attr,type(getattr(__builtins__, attr))) for attr in dir(__builtins__)]) # print 'hello'*100
print([(attr, type(getattr(__builtins__, attr))) for attr in dir(__builtins__)])
# https://leetcode.com/problems/count-univalue-subtrees/ # Definition for a binary tree node. # class TreeNode: # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution: def countUnivalSubtrees(self, root): """ :type root: TreeNode :rtype: int """ def search(root): lu, lv = True, root.val ru, rv = True, root.val if root.left: lu, lv = search(root.left) if root.right: ru, rv = search(root.right) if not lu: return False, root.val if not ru: return False, root.val if not (lv == rv == root.val): return False, root.val nonlocal univalue_subtrees univalue_subtrees += 1 return True, root.val univalue_subtrees = 0 if root: search(root) return univalue_subtrees
class Solution: def count_unival_subtrees(self, root): """ :type root: TreeNode :rtype: int """ def search(root): (lu, lv) = (True, root.val) (ru, rv) = (True, root.val) if root.left: (lu, lv) = search(root.left) if root.right: (ru, rv) = search(root.right) if not lu: return (False, root.val) if not ru: return (False, root.val) if not lv == rv == root.val: return (False, root.val) nonlocal univalue_subtrees univalue_subtrees += 1 return (True, root.val) univalue_subtrees = 0 if root: search(root) return univalue_subtrees
#criar um programa que leia um valor em metros e o exiba convertido em centimetros e milimetros. m = float(input('Digite um valor em metros: ')) km = m/1000 hm = m/100 dam= m/10 dm = m*10 cm = m*100 mm = m*1000 print("A medida de {} metros corresponde a {:.0f} dm, {:.0f} cm e {:.0f} mm".format(m, dm, cm, mm)) print("A medida de {} metros corresponde a {} km, {} hm e {} dam".format(m, km, hm, dam))
m = float(input('Digite um valor em metros: ')) km = m / 1000 hm = m / 100 dam = m / 10 dm = m * 10 cm = m * 100 mm = m * 1000 print('A medida de {} metros corresponde a {:.0f} dm, {:.0f} cm e {:.0f} mm'.format(m, dm, cm, mm)) print('A medida de {} metros corresponde a {} km, {} hm e {} dam'.format(m, km, hm, dam))
#! /usr/bin/python # Copyright Notice: # Copyright 2019-2020 DMTF. All rights reserved. # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Tacklebox/blob/master/LICENSE.md """ Resets Module File : resets.py Brief : This file contains the common definitions and functionalities for reset operations """ reset_types = [ "On", "ForceOff", "GracefulShutdown", "GracefulRestart", "ForceRestart", "Nmi", "ForceOn", "PushPowerButton", "PowerCycle" ]
""" Resets Module File : resets.py Brief : This file contains the common definitions and functionalities for reset operations """ reset_types = ['On', 'ForceOff', 'GracefulShutdown', 'GracefulRestart', 'ForceRestart', 'Nmi', 'ForceOn', 'PushPowerButton', 'PowerCycle']
_base_ = './cascade_rcnn_r50_fpn_20e_coco.py' # The new config inherits a base config to highlight the necessary modification _base_ = 'mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py' # We also need to change the num_classes in head to match the dataset's annotation model = dict( roi_head=dict( bbox_head=dict(num_classes=6), mask_head=dict(num_classes=6))) # Modify dataset related settings dataset_type = 'COCODataset' classes = ("badge", "person", "glove", "wrongglove", "operatingbar", "powerchecker") data_root = 'data/gloves/' data = dict( train=dict( img_prefix='', classes=classes, ann_file='1_images/annotation_coco.json'), val=dict( img_prefix='', classes=classes, ann_file='gloves/1_images/val/annotation_coco.json'), test=dict( img_prefix='', classes=classes, ann_file='gloves/1_images/val/annotation_coco.json')) model = dict( type='CascadeRCNN', pretrained='open-mmlab://resnext101_64x4d', backbone=dict( type='ResNeXt', depth=101, groups=64, base_width=4, num_stages=4, out_indices=(0, 1, 2, 3), frozen_stages=1, norm_cfg=dict(type='BN', requires_grad=True), style='pytorch'))
_base_ = './cascade_rcnn_r50_fpn_20e_coco.py' _base_ = 'mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py' model = dict(roi_head=dict(bbox_head=dict(num_classes=6), mask_head=dict(num_classes=6))) dataset_type = 'COCODataset' classes = ('badge', 'person', 'glove', 'wrongglove', 'operatingbar', 'powerchecker') data_root = 'data/gloves/' data = dict(train=dict(img_prefix='', classes=classes, ann_file='1_images/annotation_coco.json'), val=dict(img_prefix='', classes=classes, ann_file='gloves/1_images/val/annotation_coco.json'), test=dict(img_prefix='', classes=classes, ann_file='gloves/1_images/val/annotation_coco.json')) model = dict(type='CascadeRCNN', pretrained='open-mmlab://resnext101_64x4d', backbone=dict(type='ResNeXt', depth=101, groups=64, base_width=4, num_stages=4, out_indices=(0, 1, 2, 3), frozen_stages=1, norm_cfg=dict(type='BN', requires_grad=True), style='pytorch'))
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Aug 11 08:24:01 2020 @author: krishan """ class Student: collegeName = 'KITS' def __init__(self, name): self.name = name # Aggregation[No need for object] print(Student.collegeName) s = Student('Arjun') # Composition[Without object name cannot exist] print(s.name)
""" Created on Tue Aug 11 08:24:01 2020 @author: krishan """ class Student: college_name = 'KITS' def __init__(self, name): self.name = name print(Student.collegeName) s = student('Arjun') print(s.name)
class DateTimeExtensions(object): # no doc def ZZZ(self): """hardcoded/mock instance of the class""" return DateTimeExtensions() instance=ZZZ() """hardcoded/returns an instance of the class""" @staticmethod def AddWorkday(date): """ AddWorkday(date: DateTime) -> DateTime """ pass @staticmethod def IsSameDay(dtSelf,dtOther): """ IsSameDay(dtSelf: DateTime,dtOther: DateTime) -> bool """ pass @staticmethod def IsWorkday(date): """ IsWorkday(date: DateTime) -> bool """ pass __all__=[ 'AddWorkday', 'IsSameDay', 'IsWorkday', ]
class Datetimeextensions(object): def zzz(self): """hardcoded/mock instance of the class""" return date_time_extensions() instance = zzz() 'hardcoded/returns an instance of the class' @staticmethod def add_workday(date): """ AddWorkday(date: DateTime) -> DateTime """ pass @staticmethod def is_same_day(dtSelf, dtOther): """ IsSameDay(dtSelf: DateTime,dtOther: DateTime) -> bool """ pass @staticmethod def is_workday(date): """ IsWorkday(date: DateTime) -> bool """ pass __all__ = ['AddWorkday', 'IsSameDay', 'IsWorkday']
""" The snake ladder board game is represented as one-dimensional array where value in the array are the destination id cell for the snakes (lower numbers) and ladders higher number. """ board = [1,15,3,4,7,6,7,8,27,10,11,12,13,14,15,16,4,29,19,21,22,23,16, 35,26,27,28,29,30,31,30,33,12,35,36] def find_min_throws(position,count, min_count): if position == 36: if count[0] < min_count[0]: min_count[0] = count[0] return for dice_top in [1,2,3,4,5,6]: next_position = dice_top + position if next_position > 36: next_postion = position + (36-position) - (dice_top - (36 -position)) else: next_position = next_position actual_position = board[next_position] count[0] += 1 find_min_throws(actual_position,count,min_count) def play_game(): min_count = [0] count = [0] find_min_throws(1,count,min_count) print(min_count) play_game()
""" The snake ladder board game is represented as one-dimensional array where value in the array are the destination id cell for the snakes (lower numbers) and ladders higher number. """ board = [1, 15, 3, 4, 7, 6, 7, 8, 27, 10, 11, 12, 13, 14, 15, 16, 4, 29, 19, 21, 22, 23, 16, 35, 26, 27, 28, 29, 30, 31, 30, 33, 12, 35, 36] def find_min_throws(position, count, min_count): if position == 36: if count[0] < min_count[0]: min_count[0] = count[0] return for dice_top in [1, 2, 3, 4, 5, 6]: next_position = dice_top + position if next_position > 36: next_postion = position + (36 - position) - (dice_top - (36 - position)) else: next_position = next_position actual_position = board[next_position] count[0] += 1 find_min_throws(actual_position, count, min_count) def play_game(): min_count = [0] count = [0] find_min_throws(1, count, min_count) print(min_count) play_game()
BASE_URL = 'http://api.statbank.dk/v1' DELIMITER = ';' DEFAULT_LANGUAGE = 'da' LOCALES = {'da': 'en_DK.UTF-8', 'en': 'en_US.UTF-8'}
base_url = 'http://api.statbank.dk/v1' delimiter = ';' default_language = 'da' locales = {'da': 'en_DK.UTF-8', 'en': 'en_US.UTF-8'}
class TaxNotKnown(Exception): """ Exception for when a tax-inclusive price is requested but we don't know what the tax applicable is (yet). """ class Price(object): is_tax_known = False def __init__(self, currency, excl_tax, incl_tax=None, tax=None): """ You can either pass the price including tax or simply the tax """ self.currency = currency self.excl_tax = excl_tax if incl_tax is not None: self.incl_tax = incl_tax self.is_tax_known = True self.tax = incl_tax - excl_tax elif tax is not None: self.incl_tax = excl_tax + tax self.is_tax_known = True self.tax = tax
class Taxnotknown(Exception): """ Exception for when a tax-inclusive price is requested but we don't know what the tax applicable is (yet). """ class Price(object): is_tax_known = False def __init__(self, currency, excl_tax, incl_tax=None, tax=None): """ You can either pass the price including tax or simply the tax """ self.currency = currency self.excl_tax = excl_tax if incl_tax is not None: self.incl_tax = incl_tax self.is_tax_known = True self.tax = incl_tax - excl_tax elif tax is not None: self.incl_tax = excl_tax + tax self.is_tax_known = True self.tax = tax
def fibo(n): if n<3: return n-1 else: return fibo(n-1)+fibo(n-2)
def fibo(n): if n < 3: return n - 1 else: return fibo(n - 1) + fibo(n - 2)
""" Number of Islands Given an m x n 2d grid map of '1's (land) and '0's (water), return the number of islands. An island is surrounded by water and is formed by connecting adjacent lands horizontally or vertically. You may assume all four edges of the grid are all surrounded by water. Example 1: Input: grid = [ ["1","1","1","1","0"], ["1","1","0","1","0"], ["1","1","0","0","0"], ["0","0","0","0","0"] ] Output: 1 Example 2: Input: grid = [ ["1","1","0","0","0"], ["1","1","0","0","0"], ["0","0","1","0","0"], ["0","0","0","1","1"] ] Output: 3 Constraints: m == grid.length n == grid[i].length 1 <= m, n <= 300 grid[i][j] is '0' or '1'. """ class Solution: def numIslands(self, grid: List[List[str]]) -> int: self.m = len(grid) self.n = len(grid[0]) self.grid = grid islands = 0 for row in range(self.m): for col in range(self.n): # print("In numIslands:", row, col, islands, "\n", self.grid,"\n") if self.grid[row][col] == "1": self.sink(row, col) islands += 1 # print("Final grid:\n", self.grid, "\n") return islands def sink(self, row, col): # print("In sink:", row, col, "\n", self.grid,"\n") # Marking land as visited. self.grid[row][col] = "-" if row-1 >= 0 and self.grid[row-1][col] == "1": self.sink(row-1, col) if col-1 >= 0 and self.grid[row][col-1] == "1": self.sink(row, col-1) if row+1 < self.m and self.grid[row+1][col] == "1": self.sink(row+1, col) if col+1 < self.n and self.grid[row][col+1] == "1": self.sink(row, col+1)
""" Number of Islands Given an m x n 2d grid map of '1's (land) and '0's (water), return the number of islands. An island is surrounded by water and is formed by connecting adjacent lands horizontally or vertically. You may assume all four edges of the grid are all surrounded by water. Example 1: Input: grid = [ ["1","1","1","1","0"], ["1","1","0","1","0"], ["1","1","0","0","0"], ["0","0","0","0","0"] ] Output: 1 Example 2: Input: grid = [ ["1","1","0","0","0"], ["1","1","0","0","0"], ["0","0","1","0","0"], ["0","0","0","1","1"] ] Output: 3 Constraints: m == grid.length n == grid[i].length 1 <= m, n <= 300 grid[i][j] is '0' or '1'. """ class Solution: def num_islands(self, grid: List[List[str]]) -> int: self.m = len(grid) self.n = len(grid[0]) self.grid = grid islands = 0 for row in range(self.m): for col in range(self.n): if self.grid[row][col] == '1': self.sink(row, col) islands += 1 return islands def sink(self, row, col): self.grid[row][col] = '-' if row - 1 >= 0 and self.grid[row - 1][col] == '1': self.sink(row - 1, col) if col - 1 >= 0 and self.grid[row][col - 1] == '1': self.sink(row, col - 1) if row + 1 < self.m and self.grid[row + 1][col] == '1': self.sink(row + 1, col) if col + 1 < self.n and self.grid[row][col + 1] == '1': self.sink(row, col + 1)
""".""" class HashBrowns: """Makes a hash table.""" def __init__(self): self.size = 753 self.slots = [None] * self.size self.data = [None] * self.size def hashingtons(self, key, size): return key % size def hashemagain(self, oldcountrystylehash, size): return (oldcountrystylehash+1) % size def set(self, key, val): hash_val = self.hashingtons(key, len(self.slots)) def get(self, key): starting_bucket = self.hashingtons(key, len(self.slots)) val = None stop = False start = False position = starting_bucket
""".""" class Hashbrowns: """Makes a hash table.""" def __init__(self): self.size = 753 self.slots = [None] * self.size self.data = [None] * self.size def hashingtons(self, key, size): return key % size def hashemagain(self, oldcountrystylehash, size): return (oldcountrystylehash + 1) % size def set(self, key, val): hash_val = self.hashingtons(key, len(self.slots)) def get(self, key): starting_bucket = self.hashingtons(key, len(self.slots)) val = None stop = False start = False position = starting_bucket
""" Copyright 2021 Robert MacGregor Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository", "new_git_repository") load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") def repositories(): maybe( git_repository, name = "rules_third_party", remote = "https://github.com/DraconicEnt/rules_third_party.git", commit = "ac695d3d2e09f4a2cd1d6e4ed625339255499ba0" ) maybe( http_archive, name = "rules_foreign_cc", strip_prefix = "rules_foreign_cc-master", # FIXME: Ideally this is locked to a specific version #sha256 = "3e6b0691fc57db8217d535393dcc2cf7c1d39fc87e9adb6e7d7bab1483915110", url = "https://github.com/bazelbuild/rules_foreign_cc/archive/master.zip", ) maybe( git_repository, name = "gtest", remote = "https://github.com/google/googletest.git", commit = "703bd9caab50b139428cea1aaff9974ebee5742e" # Tag 1.10 )
""" Copyright 2021 Robert MacGregor Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ load('@bazel_tools//tools/build_defs/repo:git.bzl', 'git_repository', 'new_git_repository') load('@bazel_tools//tools/build_defs/repo:http.bzl', 'http_archive') load('@bazel_tools//tools/build_defs/repo:utils.bzl', 'maybe') def repositories(): maybe(git_repository, name='rules_third_party', remote='https://github.com/DraconicEnt/rules_third_party.git', commit='ac695d3d2e09f4a2cd1d6e4ed625339255499ba0') maybe(http_archive, name='rules_foreign_cc', strip_prefix='rules_foreign_cc-master', url='https://github.com/bazelbuild/rules_foreign_cc/archive/master.zip') maybe(git_repository, name='gtest', remote='https://github.com/google/googletest.git', commit='703bd9caab50b139428cea1aaff9974ebee5742e')
def doincre(delta, entity, cnt, RK, table, PK, tablesvc): if cnt > 0: item = {"PartitionKey": PK, "RowKey": RK, "value": int(entity.value) + delta, "etag": entity.etag} try: tablesvc.merge_entity(table, item) except: result = tablesvc.get_entity(table, PK, RK) doincre(delta, result, cnt - 1, RK, table, PK, tablesvc) def incrementMetric(delta, RK, table, PK, tablesvc): try: result = tablesvc.get_entity(table, PK, RK) doincre(delta, result, 10000, RK, table, PK, tablesvc) except: pass
def doincre(delta, entity, cnt, RK, table, PK, tablesvc): if cnt > 0: item = {'PartitionKey': PK, 'RowKey': RK, 'value': int(entity.value) + delta, 'etag': entity.etag} try: tablesvc.merge_entity(table, item) except: result = tablesvc.get_entity(table, PK, RK) doincre(delta, result, cnt - 1, RK, table, PK, tablesvc) def increment_metric(delta, RK, table, PK, tablesvc): try: result = tablesvc.get_entity(table, PK, RK) doincre(delta, result, 10000, RK, table, PK, tablesvc) except: pass
# Python - 3.4.3 test.describe('Example Tests') InterlacedSpiralCipher = {'encode': encode, 'decode': decode } example1A = 'Romani ite domum' example1B = 'Rntodomiimuea m' test.assert_equals(InterlacedSpiralCipher['encode'](example1A), example1B) test.assert_equals(InterlacedSpiralCipher['decode'](example1B), example1A) example2A = 'Sic transit gloria mundi' example2B = 'Stsgiriuar i ninmd l otac' test.assert_equals(InterlacedSpiralCipher['encode'](example2A), example2B) test.assert_equals(InterlacedSpiralCipher['decode'](example2B), example2A)
test.describe('Example Tests') interlaced_spiral_cipher = {'encode': encode, 'decode': decode} example1_a = 'Romani ite domum' example1_b = 'Rntodomiimuea m' test.assert_equals(InterlacedSpiralCipher['encode'](example1A), example1B) test.assert_equals(InterlacedSpiralCipher['decode'](example1B), example1A) example2_a = 'Sic transit gloria mundi' example2_b = 'Stsgiriuar i ninmd l otac' test.assert_equals(InterlacedSpiralCipher['encode'](example2A), example2B) test.assert_equals(InterlacedSpiralCipher['decode'](example2B), example2A)
RESOURCES = { 'accounts': { 'schema': { 'username': { 'type': 'string', 'required': True, 'unique': True }, 'password': { 'type': 'string', 'required': True }, 'roles': { 'type': 'list', 'allowed': ['user', 'superuser'], 'required': True, }, 'location': { 'type': 'dict', 'schema': { 'country': {'type': 'string'}, 'city': {'type': 'string'}, 'address': {'type': 'string'} }, }, 'born': { 'type': 'datetime', }, }, # Disable endpoint caching. 'cache_control': '', 'cache_expires': 0, } }
resources = {'accounts': {'schema': {'username': {'type': 'string', 'required': True, 'unique': True}, 'password': {'type': 'string', 'required': True}, 'roles': {'type': 'list', 'allowed': ['user', 'superuser'], 'required': True}, 'location': {'type': 'dict', 'schema': {'country': {'type': 'string'}, 'city': {'type': 'string'}, 'address': {'type': 'string'}}}, 'born': {'type': 'datetime'}}, 'cache_control': '', 'cache_expires': 0}}
""" # Sample code to perform I/O: name = input() # Reading input from STDIN print('Hi, %s.' % name) # Writing output to STDOUT # Warning: Printing unwanted or ill-formatted data to output will cause the test cases to fail """ # Write your code here n, k = map(int, input().strip().split()) s = input() mx = 0 ans = 0 count = {i: 0 for i in 'abc'} for i in range(len(s)): count[s[i]] += 1 mx = max(mx, count[s[i]]) if ans - mx < k: ans += 1 else: count[s[i - ans]] -= 1 print(ans)
""" # Sample code to perform I/O: name = input() # Reading input from STDIN print('Hi, %s.' % name) # Writing output to STDOUT # Warning: Printing unwanted or ill-formatted data to output will cause the test cases to fail """ (n, k) = map(int, input().strip().split()) s = input() mx = 0 ans = 0 count = {i: 0 for i in 'abc'} for i in range(len(s)): count[s[i]] += 1 mx = max(mx, count[s[i]]) if ans - mx < k: ans += 1 else: count[s[i - ans]] -= 1 print(ans)
# Definition for a binary tree node. # class TreeNode: # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution: def mergeTrees(self, t1: TreeNode, t2: TreeNode) -> TreeNode: t1_ptr = t1 t2_ptr = t2 t3_ptr = None if t1_ptr and t2_ptr: t3_ptr = TreeNode(t1_ptr.val + t2_ptr.val) t3_ptr.left = self.mergeTrees(t1_ptr.left , t2_ptr.left) t3_ptr.right = self.mergeTrees(t1_ptr.right , t2_ptr.right) elif t1_ptr: t3_ptr = t1_ptr else: t3_ptr = t2_ptr return t3_ptr
class Solution: def merge_trees(self, t1: TreeNode, t2: TreeNode) -> TreeNode: t1_ptr = t1 t2_ptr = t2 t3_ptr = None if t1_ptr and t2_ptr: t3_ptr = tree_node(t1_ptr.val + t2_ptr.val) t3_ptr.left = self.mergeTrees(t1_ptr.left, t2_ptr.left) t3_ptr.right = self.mergeTrees(t1_ptr.right, t2_ptr.right) elif t1_ptr: t3_ptr = t1_ptr else: t3_ptr = t2_ptr return t3_ptr
# Definition for singly-linked list. # class ListNode: # def __init__(self, x): # self.val = x # self.next = None class Solution: # @param head, a ListNode # @return a boolean def isPalindrome(self, head): if head == None: return True prev = None slow = head fast = head while fast != None and fast.next != None: prev = slow slow = slow.next fast = fast.next.next mid = slow curr = mid while curr != None: temp = curr.next curr.next = prev prev = curr curr = temp left = head right = prev while left != mid: if left.val != right.val: return False left = left.next right = right.next return True
class Solution: def is_palindrome(self, head): if head == None: return True prev = None slow = head fast = head while fast != None and fast.next != None: prev = slow slow = slow.next fast = fast.next.next mid = slow curr = mid while curr != None: temp = curr.next curr.next = prev prev = curr curr = temp left = head right = prev while left != mid: if left.val != right.val: return False left = left.next right = right.next return True
load("//third_party/py:python_configure.bzl", "python_configure") load("@io_bazel_rules_python//python:pip.bzl", "pip_repositories") load("@grpc_python_dependencies//:requirements.bzl", "pip_install") load("@org_pubref_rules_protobuf//python:rules.bzl", "py_proto_repositories") def grpc_python_deps(): # TODO(https://github.com/grpc/grpc/issues/18256): Remove conditional. if hasattr(native, "http_archive"): python_configure(name = "local_config_python") pip_repositories() pip_install() py_proto_repositories() else: print("Building Python gRPC with bazel 23.0+ is disabled pending " + "resolution of https://github.com/grpc/grpc/issues/18256.")
load('//third_party/py:python_configure.bzl', 'python_configure') load('@io_bazel_rules_python//python:pip.bzl', 'pip_repositories') load('@grpc_python_dependencies//:requirements.bzl', 'pip_install') load('@org_pubref_rules_protobuf//python:rules.bzl', 'py_proto_repositories') def grpc_python_deps(): if hasattr(native, 'http_archive'): python_configure(name='local_config_python') pip_repositories() pip_install() py_proto_repositories() else: print('Building Python gRPC with bazel 23.0+ is disabled pending ' + 'resolution of https://github.com/grpc/grpc/issues/18256.')
FONT_TITLE = 18 FONT_LEGEND = 16 FONT_LABEL = 14 FONT_STICK = 12 LEGEND_FRAMEALPHA = 0.5 ########## h5py -- dod ############ wake_trim_min = None period_length_sec = 30 h5_out_dir = './h5py/output' ignore_class = None kappa_weights='quadratic' plot_hypnograms=True plot_CMs=True ###### ablation ###### ablation_out_dir = './ablation/output'
font_title = 18 font_legend = 16 font_label = 14 font_stick = 12 legend_framealpha = 0.5 wake_trim_min = None period_length_sec = 30 h5_out_dir = './h5py/output' ignore_class = None kappa_weights = 'quadratic' plot_hypnograms = True plot_c_ms = True ablation_out_dir = './ablation/output'
class CrabNavy: def __init__(self, positions): self.positions = positions @classmethod def from_str(cls, positions_str): positions = [int(c) for c in positions_str.split(",")] return cls(positions) def calculate_consumption(self, alignment): total = 0 for position in self.positions: total += abs(alignment - position) return total @property def ideal_alignment(self): min_consumption = None min_idx = None for idx in range(min(self.positions), max(self.positions)): consumption = self.calculate_consumption(idx) if min_consumption is None or consumption < min_consumption: min_consumption = consumption min_idx = idx return min_idx @property def ideal_alignment_consumption(self): return self.calculate_consumption(self.ideal_alignment) def main(): with open("input", "r") as f: lines_raw = f.read().splitlines() sample_navy = CrabNavy([16, 1, 2, 0, 4, 2, 7, 1, 2, 14]) navy = CrabNavy.from_str(lines_raw[0]) print(sample_navy.ideal_alignment) print(sample_navy.ideal_alignment_consumption) print(navy.ideal_alignment_consumption) if __name__ == "__main__": main()
class Crabnavy: def __init__(self, positions): self.positions = positions @classmethod def from_str(cls, positions_str): positions = [int(c) for c in positions_str.split(',')] return cls(positions) def calculate_consumption(self, alignment): total = 0 for position in self.positions: total += abs(alignment - position) return total @property def ideal_alignment(self): min_consumption = None min_idx = None for idx in range(min(self.positions), max(self.positions)): consumption = self.calculate_consumption(idx) if min_consumption is None or consumption < min_consumption: min_consumption = consumption min_idx = idx return min_idx @property def ideal_alignment_consumption(self): return self.calculate_consumption(self.ideal_alignment) def main(): with open('input', 'r') as f: lines_raw = f.read().splitlines() sample_navy = crab_navy([16, 1, 2, 0, 4, 2, 7, 1, 2, 14]) navy = CrabNavy.from_str(lines_raw[0]) print(sample_navy.ideal_alignment) print(sample_navy.ideal_alignment_consumption) print(navy.ideal_alignment_consumption) if __name__ == '__main__': main()
class Human: pass class Man(Human): pass class Woman(Human): pass def God(): """ god == PEP8 (forced to capitalize by CodeWars) """ return [Man(), Woman()]
class Human: pass class Man(Human): pass class Woman(Human): pass def god(): """ god == PEP8 (forced to capitalize by CodeWars) """ return [man(), woman()]
# Exercise One hash = "#" for i in range(0, 7): print(hash) hash = hash + "#" # Exercise two for i in range(1, 101): if i % 3 == 0: if i % 5 == 0: print("FizzBuzz") else: print("Fizz") elif i % 5 == 0: print("Buzz") else: print(i) # Exercise three size = 8 chess_board = "" for i in range(0, size): for j in range(0, size): if (i + j) % 2 == 0: chess_board = chess_board + " " else: chess_board = chess_board + "#" chess_board = chess_board + "\n" print(chess_board)
hash = '#' for i in range(0, 7): print(hash) hash = hash + '#' for i in range(1, 101): if i % 3 == 0: if i % 5 == 0: print('FizzBuzz') else: print('Fizz') elif i % 5 == 0: print('Buzz') else: print(i) size = 8 chess_board = '' for i in range(0, size): for j in range(0, size): if (i + j) % 2 == 0: chess_board = chess_board + ' ' else: chess_board = chess_board + '#' chess_board = chess_board + '\n' print(chess_board)
'''https://adoptopenjdk.net/upstream.html ''' load("//java:common/structs/KnownOpenJdkRepository.bzl", _KnownOpenJdkRepository = "KnownOpenJdkRepository") def adoptopenjdk_upstream_repositories(): return _KNOWN_OPENJDK_REPOSITORIES def _Repo(**kwargs): kwargs['provider'] = "adoptopenjdk_upstream" return _KnownOpenJdkRepository(**kwargs) _KNOWN_OPENJDK_REPOSITORIES = [ # JDK-11.0.9+11 ########################################################### _Repo( version = "11.0.9+11", url = "https://github.com/AdoptOpenJDK/openjdk11-upstream-binaries/releases/download/jdk-11.0.9%2B11/OpenJDK11U-jdk_x64_linux_11.0.9_11.tar.gz", sha256 = "4fe78ca6a3afbff9c3dd7c93cc84064dcaa15578663362ded2c0d47552201e70", strip_prefix = "openjdk-11.0.9_11", ), _Repo( version = "11.0.9+11", os = "windows", url = "https://github.com/AdoptOpenJDK/openjdk11-upstream-binaries/releases/download/jdk-11.0.9%2B11/OpenJDK11U-jdk_x64_windows_11.0.9_11.zip", sha256 = "a440f37531b44ee3475c9e5466e5d0545681419784fbe98ad371938e034d9d37", strip_prefix = "openjdk-11.0.9_11", ), # JDK8u272-b10 ############################################################ _Repo( version = "8u272-b10", url = "https://github.com/AdoptOpenJDK/openjdk8-upstream-binaries/releases/download/jdk8u272-b10/OpenJDK8U-jdk_x64_linux_8u272b10.tar.gz", sha256 = "654a0b082be0c6830821f22a9e1de5f9e2feb9705db79e3bb0d8c203d1b12c6a", strip_prefix = "openjdk-8u272-b10", ), _Repo( version = "8u272-b10", os = "windows", url = "https://github.com/AdoptOpenJDK/openjdk8-upstream-binaries/releases/download/jdk8u272-b10/OpenJDK8U-jdk_x64_windows_8u272b10.zip", sha256 = "63fe8a555ae6553bd6f6f0937135c31e9adbb3b2ac85232e495596d39f396b1d", strip_prefix = "openjdk-8u272-b10", ), ]
"""https://adoptopenjdk.net/upstream.html """ load('//java:common/structs/KnownOpenJdkRepository.bzl', _KnownOpenJdkRepository='KnownOpenJdkRepository') def adoptopenjdk_upstream_repositories(): return _KNOWN_OPENJDK_REPOSITORIES def __repo(**kwargs): kwargs['provider'] = 'adoptopenjdk_upstream' return __known_open_jdk_repository(**kwargs) _known_openjdk_repositories = [__repo(version='11.0.9+11', url='https://github.com/AdoptOpenJDK/openjdk11-upstream-binaries/releases/download/jdk-11.0.9%2B11/OpenJDK11U-jdk_x64_linux_11.0.9_11.tar.gz', sha256='4fe78ca6a3afbff9c3dd7c93cc84064dcaa15578663362ded2c0d47552201e70', strip_prefix='openjdk-11.0.9_11'), __repo(version='11.0.9+11', os='windows', url='https://github.com/AdoptOpenJDK/openjdk11-upstream-binaries/releases/download/jdk-11.0.9%2B11/OpenJDK11U-jdk_x64_windows_11.0.9_11.zip', sha256='a440f37531b44ee3475c9e5466e5d0545681419784fbe98ad371938e034d9d37', strip_prefix='openjdk-11.0.9_11'), __repo(version='8u272-b10', url='https://github.com/AdoptOpenJDK/openjdk8-upstream-binaries/releases/download/jdk8u272-b10/OpenJDK8U-jdk_x64_linux_8u272b10.tar.gz', sha256='654a0b082be0c6830821f22a9e1de5f9e2feb9705db79e3bb0d8c203d1b12c6a', strip_prefix='openjdk-8u272-b10'), __repo(version='8u272-b10', os='windows', url='https://github.com/AdoptOpenJDK/openjdk8-upstream-binaries/releases/download/jdk8u272-b10/OpenJDK8U-jdk_x64_windows_8u272b10.zip', sha256='63fe8a555ae6553bd6f6f0937135c31e9adbb3b2ac85232e495596d39f396b1d', strip_prefix='openjdk-8u272-b10')]
def map_wiimote_to_key(wiimote_index, wiimote_button, key): # Check the global wiimote object's button state and set the global # keyboard object's corresponding key. if wiimote[wiimote_index].buttons.button_down(wiimote_button): keyboard.setKeyDown(key) else: keyboard.setKeyUp(key) def map_wiimote_to_vJoy(wiimote_index, wiimote_button, key): if wiimote[wiimote_index].buttons.button_down(wiimote_button): vJoy[wiimote_index].setButton(key, True); else: vJoy[wiimote_index].setButton(key, False); def map_wiimote_to_vJoyHat(wiimote_index): if wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadRight): vJoy[wiimote_index].setDigitalPov(0, VJoyPov.Up) if wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadLeft): vJoy[wiimote_index].setDigitalPov(0, VJoyPov.Down) if wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadUp): vJoy[wiimote_index].setDigitalPov(0, VJoyPov.Left) if wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadDown): vJoy[wiimote_index].setDigitalPov(0, VJoyPov.Right) if not wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadDown) and not wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadUp) and not wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadLeft) and not wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadRight): vJoy[wiimote_index].setDigitalPov(wiimote_index, VJoyPov.Nil) def map_wiimote_to_vJoyAHat(wiimote_index): x = 0 y = 0 rotate = -9000 if wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadUp):#up x = 1 if wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadDown):#down x = -1 if wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadLeft):#left y = -1 if wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadRight):#right y = 1 if x == 0 and y == 0: vJoy[wiimote_index].setAnalogPov(wiimote_index, -1)#center else: degrees = (math.atan2(y,x)/math.pi*18000 + rotate)%36000 vJoy[wiimote_index].setAnalogPov(wiimote_index, degrees) #diagnostics.debug("x:" + repr(x)) #diagnostics.debug("y:" + repr(y)) #diagnostics.debug("angle: " + repr(degrees)) def map_wiimote_to_vJoyAxis(wiimote_index): diagnostics.debug("x: " + repr(wiimote[wiimote_index].acceleration.x)) diagnostics.debug("max: " +repr(vJoy[0].axisMax)) vJoy[wiimote_index].rx = (wiimote[wiimote_index].acceleration.x+vJoy[0].axisMax)*255 vJoy[wiimote_index].ry = (wiimote[wiimote_index].acceleration.y+vJoy[0].axisMax)*255 vJoy[wiimote_index].rz = (wiimote[wiimote_index].acceleration.z+vJoy[0].axisMax)*255 def update(): # Sideways controls (DPad). Map each of our desired keys. map_wiimote_to_vJoyAHat(0) map_wiimote_to_vJoy(0, WiimoteButtons.One, 0) map_wiimote_to_vJoy(0, WiimoteButtons.Two, 1) map_wiimote_to_vJoy(0, WiimoteButtons.Plus, 2) map_wiimote_to_vJoy(0, WiimoteButtons.Home, 3) map_wiimote_to_vJoy(0, WiimoteButtons.Minus, 4) map_wiimote_to_vJoy(0, WiimoteButtons.A, 5) map_wiimote_to_vJoy(0, WiimoteButtons.B, 6) # If we're starting up, then hook up our update function. if starting: wiimote[0].buttons.update += update
def map_wiimote_to_key(wiimote_index, wiimote_button, key): if wiimote[wiimote_index].buttons.button_down(wiimote_button): keyboard.setKeyDown(key) else: keyboard.setKeyUp(key) def map_wiimote_to_v_joy(wiimote_index, wiimote_button, key): if wiimote[wiimote_index].buttons.button_down(wiimote_button): vJoy[wiimote_index].setButton(key, True) else: vJoy[wiimote_index].setButton(key, False) def map_wiimote_to_v_joy_hat(wiimote_index): if wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadRight): vJoy[wiimote_index].setDigitalPov(0, VJoyPov.Up) if wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadLeft): vJoy[wiimote_index].setDigitalPov(0, VJoyPov.Down) if wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadUp): vJoy[wiimote_index].setDigitalPov(0, VJoyPov.Left) if wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadDown): vJoy[wiimote_index].setDigitalPov(0, VJoyPov.Right) if not wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadDown) and (not wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadUp)) and (not wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadLeft)) and (not wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadRight)): vJoy[wiimote_index].setDigitalPov(wiimote_index, VJoyPov.Nil) def map_wiimote_to_v_joy_a_hat(wiimote_index): x = 0 y = 0 rotate = -9000 if wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadUp): x = 1 if wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadDown): x = -1 if wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadLeft): y = -1 if wiimote[wiimote_index].buttons.button_down(WiimoteButtons.DPadRight): y = 1 if x == 0 and y == 0: vJoy[wiimote_index].setAnalogPov(wiimote_index, -1) else: degrees = (math.atan2(y, x) / math.pi * 18000 + rotate) % 36000 vJoy[wiimote_index].setAnalogPov(wiimote_index, degrees) def map_wiimote_to_v_joy_axis(wiimote_index): diagnostics.debug('x: ' + repr(wiimote[wiimote_index].acceleration.x)) diagnostics.debug('max: ' + repr(vJoy[0].axisMax)) vJoy[wiimote_index].rx = (wiimote[wiimote_index].acceleration.x + vJoy[0].axisMax) * 255 vJoy[wiimote_index].ry = (wiimote[wiimote_index].acceleration.y + vJoy[0].axisMax) * 255 vJoy[wiimote_index].rz = (wiimote[wiimote_index].acceleration.z + vJoy[0].axisMax) * 255 def update(): map_wiimote_to_v_joy_a_hat(0) map_wiimote_to_v_joy(0, WiimoteButtons.One, 0) map_wiimote_to_v_joy(0, WiimoteButtons.Two, 1) map_wiimote_to_v_joy(0, WiimoteButtons.Plus, 2) map_wiimote_to_v_joy(0, WiimoteButtons.Home, 3) map_wiimote_to_v_joy(0, WiimoteButtons.Minus, 4) map_wiimote_to_v_joy(0, WiimoteButtons.A, 5) map_wiimote_to_v_joy(0, WiimoteButtons.B, 6) if starting: wiimote[0].buttons.update += update
# -*- coding: utf-8 -*- """ Created on Mon Sep 12 09:23:59 2016 @author: andre """ num = int(input("Choose a number to convert: ")) if num < 0: isNeg = True num = abs(num) else: isNeg = False result = '' if num == 0: result = '0' while num > 0: result = str(num%2) + result num = num // 2 if isNeg: result = '-' + result print(result)
""" Created on Mon Sep 12 09:23:59 2016 @author: andre """ num = int(input('Choose a number to convert: ')) if num < 0: is_neg = True num = abs(num) else: is_neg = False result = '' if num == 0: result = '0' while num > 0: result = str(num % 2) + result num = num // 2 if isNeg: result = '-' + result print(result)
# OpenWeatherMap API Key weather_api_key = "reset" # Google API Key g_key = "reset"
weather_api_key = 'reset' g_key = 'reset'
def merge_sorted_list(arr1, arr2): # m = len(arr1), n = len(arr1) # since we know arr1 is always greater or equal to (m+n) # we compare arr2[i] with arr1[j] element and check if it # should be placed there i, j = 0, 0 while i < len(arr1) and j < len(arr2): if arr2[j] == arr1[i] or arr2[j] <= arr1[i+1]: arr1.insert(i+1, arr2[j]) arr1.pop() i += 2 j += 1 elif arr2[j] > arr1[i]: if arr1[i+1] == 0: arr1[i+1] = arr2[j] i += 1 j += 1 else: i += 1 elif arr2[j] < arr1[i]: arr1.insert(i, arr2[j]) arr1.pop() i += 1 j += 1 print(arr1) return arr1 def merge_sorted_list_v2(nums1, m, nums2, n): # fill up that empty space with string stop = 0 for i in range(len(nums1)-1, 0, -1): if stop == len(nums2): break else: nums1[i] = "" stop += 1 print(nums1) if m == 0: nums1 = nums2 i, j = 0, 0 empty_space = len(nums2) while j < len(nums2): if nums2[j] <= nums1[i] or i == empty_space: nums1.insert(i, nums2[j]) nums1.pop() j += 1 empty_space += 1 else: i += 1 return nums1 a = [1, 2, 3, 0, 0, 0] b = [2, 5, 6] nums1 = [-1, 0, 0, 3, 3, 3, 0, 0, 0] m = 6 nums2 = [1, 2, 2] n = 3 result = merge_sorted_list_v2(nums1, 6, nums2, 3) print("result: ", result)
def merge_sorted_list(arr1, arr2): (i, j) = (0, 0) while i < len(arr1) and j < len(arr2): if arr2[j] == arr1[i] or arr2[j] <= arr1[i + 1]: arr1.insert(i + 1, arr2[j]) arr1.pop() i += 2 j += 1 elif arr2[j] > arr1[i]: if arr1[i + 1] == 0: arr1[i + 1] = arr2[j] i += 1 j += 1 else: i += 1 elif arr2[j] < arr1[i]: arr1.insert(i, arr2[j]) arr1.pop() i += 1 j += 1 print(arr1) return arr1 def merge_sorted_list_v2(nums1, m, nums2, n): stop = 0 for i in range(len(nums1) - 1, 0, -1): if stop == len(nums2): break else: nums1[i] = '' stop += 1 print(nums1) if m == 0: nums1 = nums2 (i, j) = (0, 0) empty_space = len(nums2) while j < len(nums2): if nums2[j] <= nums1[i] or i == empty_space: nums1.insert(i, nums2[j]) nums1.pop() j += 1 empty_space += 1 else: i += 1 return nums1 a = [1, 2, 3, 0, 0, 0] b = [2, 5, 6] nums1 = [-1, 0, 0, 3, 3, 3, 0, 0, 0] m = 6 nums2 = [1, 2, 2] n = 3 result = merge_sorted_list_v2(nums1, 6, nums2, 3) print('result: ', result)
data = open(0).readlines() e = [i == "#" for i in data[0].strip()] b = [line.strip() for line in data[2:]] d = ((-1,-1),(-1,0),(-1,1),(0,-1),(0,0),(0,1),(1,-1),(1,0),(1,1)) def neighbors(i, j): for k, (di, dj) in enumerate(d): yield 256 >> k, i+di, j+dj def lookup(_s, i, j, bi, bj, flip=False): if i == bi[0] or i + 1 == bi[1] or j == bj[0] or j + 1 == bj[1]: return ((i, j) in _s) ^ flip return e[sum(k for k, ni, nj in neighbors(i, j) if (ni, nj) in _s)] s = frozenset((i, j) for i in range(len(b)) for j in range(len(b[0])) if b[i][j] == "#") bi = (-51, len(b)+51) bj = (-51, len(b[0])+51) def iterate(_s, bi, bj): return frozenset((i, j) for i in range(*bi) for j in range(*bj) if lookup(_s, i, j, bi, bj, e[0])) for _ in range(2): s = iterate(s, bi, bj) print("Part 1", len(s)) for _ in range(48): s = iterate(s, bi, bj) print("Part 2", len(s)) for i in range(*bi): for j in range(*bj): if (i, j) in s: print("#", end="") else: print(".", end="") print()
data = open(0).readlines() e = [i == '#' for i in data[0].strip()] b = [line.strip() for line in data[2:]] d = ((-1, -1), (-1, 0), (-1, 1), (0, -1), (0, 0), (0, 1), (1, -1), (1, 0), (1, 1)) def neighbors(i, j): for (k, (di, dj)) in enumerate(d): yield (256 >> k, i + di, j + dj) def lookup(_s, i, j, bi, bj, flip=False): if i == bi[0] or i + 1 == bi[1] or j == bj[0] or (j + 1 == bj[1]): return ((i, j) in _s) ^ flip return e[sum((k for (k, ni, nj) in neighbors(i, j) if (ni, nj) in _s))] s = frozenset(((i, j) for i in range(len(b)) for j in range(len(b[0])) if b[i][j] == '#')) bi = (-51, len(b) + 51) bj = (-51, len(b[0]) + 51) def iterate(_s, bi, bj): return frozenset(((i, j) for i in range(*bi) for j in range(*bj) if lookup(_s, i, j, bi, bj, e[0]))) for _ in range(2): s = iterate(s, bi, bj) print('Part 1', len(s)) for _ in range(48): s = iterate(s, bi, bj) print('Part 2', len(s)) for i in range(*bi): for j in range(*bj): if (i, j) in s: print('#', end='') else: print('.', end='') print()
distance_success_response = {'destination_addresses': ['Brighton, UK'], 'origin_addresses': ['London, UK'], 'rows': [{'elements': [ {'distance': {'text': '64.6 mi', 'value': 103964}, 'duration': {'text': '1 hour 54 mins', 'value': 6854}, 'status': 'OK'}]}], 'status': 'OK'}
distance_success_response = {'destination_addresses': ['Brighton, UK'], 'origin_addresses': ['London, UK'], 'rows': [{'elements': [{'distance': {'text': '64.6 mi', 'value': 103964}, 'duration': {'text': '1 hour 54 mins', 'value': 6854}, 'status': 'OK'}]}], 'status': 'OK'}
"""Bazel rule for making a zip file.""" def zip_dir(name, srcs, zipname, **kwargs): """Zips up an entire directory or Fileset. Args: name: The name of the target srcs: A single-item list with a directory or fileset zipname: The name of the output zip file **kwargs: Further generic arguments to pass to genrule, e.g. visibility. """ if len(srcs) > 1: fail("More than one directory is not supported by zip_dir yet", attr = srcs) native.genrule( name = name, srcs = srcs, outs = [zipname], cmd = "zip $(OUTS) $(SRCS)", **kwargs )
"""Bazel rule for making a zip file.""" def zip_dir(name, srcs, zipname, **kwargs): """Zips up an entire directory or Fileset. Args: name: The name of the target srcs: A single-item list with a directory or fileset zipname: The name of the output zip file **kwargs: Further generic arguments to pass to genrule, e.g. visibility. """ if len(srcs) > 1: fail('More than one directory is not supported by zip_dir yet', attr=srcs) native.genrule(name=name, srcs=srcs, outs=[zipname], cmd='zip $(OUTS) $(SRCS)', **kwargs)
# Definition for singly-linked list. class ListNode: def __init__(self, val=0, next=None): self.val = val self.next = next class Solution: def getmerge(self,l1,l2): head = ListNode(0) tail = head while l1 is not None and l2 is not None: if l1.val > l2.val: l1,l2 = l2,l1 tail.next = l1 l1 = l1.next tail = tail.next if l1 is None : tail.next = l2 if l2 is None: tail.next = l1 return head.next def sortList(self, head: ListNode) -> ListNode: if head is None or head.next is None: return head slownode = head fastnode = head.next while fastnode is not None and fastnode.next is not None: slownode = slownode.next fastnode = fastnode.next.next mid = slownode.next slownode.next = None return self.getmerge(self.sortList(head),self.sortList(mid)) A = Solution() a1 = ListNode(4) a2 = ListNode(2) a3 = ListNode(1) a4 = ListNode(3) a1.next = a2 a2.next = a3 a3.next = a4 print(A.sortList(a1))
class Listnode: def __init__(self, val=0, next=None): self.val = val self.next = next class Solution: def getmerge(self, l1, l2): head = list_node(0) tail = head while l1 is not None and l2 is not None: if l1.val > l2.val: (l1, l2) = (l2, l1) tail.next = l1 l1 = l1.next tail = tail.next if l1 is None: tail.next = l2 if l2 is None: tail.next = l1 return head.next def sort_list(self, head: ListNode) -> ListNode: if head is None or head.next is None: return head slownode = head fastnode = head.next while fastnode is not None and fastnode.next is not None: slownode = slownode.next fastnode = fastnode.next.next mid = slownode.next slownode.next = None return self.getmerge(self.sortList(head), self.sortList(mid)) a = solution() a1 = list_node(4) a2 = list_node(2) a3 = list_node(1) a4 = list_node(3) a1.next = a2 a2.next = a3 a3.next = a4 print(A.sortList(a1))
string = 'Monty Python' print(string[0:4]) print(string[6:7]) print(string[6:20]) print(string[8:]) data = 'From robin.smorenburg@linkit.nl Sat Jan' position = data.find('@') print(position) space_position = data.find(' ', position) print(space_position) host = data[position+1:space_position] print(host)
string = 'Monty Python' print(string[0:4]) print(string[6:7]) print(string[6:20]) print(string[8:]) data = 'From robin.smorenburg@linkit.nl Sat Jan' position = data.find('@') print(position) space_position = data.find(' ', position) print(space_position) host = data[position + 1:space_position] print(host)
def baz(): tmp = "!" # try to extract this assignment, either with or without this comment baz() def bar(self): pass
def baz(): tmp = '!' baz() def bar(self): pass
# Find duplicates in an array in O(N) Time and O(1) space. # The elements in the array can be only Between 1<=x<=len(array) # Asked in Amazon,D-E-Shaw, Flipkart and many more # Difficulty -> Medium (for O(N) time and O(1) space) # Approaches: # Naive Solution: Loop through all the values checking for multiple occurrences. # This would take O(n^2) time # Use sorting and then look for adjacent elements and if repeated print them. # This would take O(nlogn) time # Use hashmap to keep a track of element and its count and if the count is greater than # 1 then we know the elements are repeated. This would take O(n) time but O(n) space too. # Another solution would be to use array modification to check for multiple occurrences. # As We know that the elements can be only be in between 1 and len(arr) thus we can # subtract one both sides to get the index.Now as we have the index so we go to # the array[value] and negate it and repeat this for the whole array. # So if we encounter the a value and if it is already negated then we know that element is repeated. def findDuplicates(arr): size = len(arr) # Traverse through the whole array for i in range(0, size): # Making sure we don't a negative index idx = abs(arr[i]) - 1 # check if the value at idx is negative or not if arr[idx] < 0: # if negative then we have already encountered thus a repeated element so print it as answer # Not using arr[idx] as it is subtracted by 1. print(abs(arr[i])) # else negating the value arr[idx] = -arr[idx] # uncomment this code to get the original array back. # for i in range(0,size-1): # arr[i]=abs(arr[i]) # Test cases arr1 = [2, 1, 2, 1] arr2 = [1, 2, 3, 1, 3, 6, 6] findDuplicates(arr1) print() findDuplicates(arr2)
def find_duplicates(arr): size = len(arr) for i in range(0, size): idx = abs(arr[i]) - 1 if arr[idx] < 0: print(abs(arr[i])) arr[idx] = -arr[idx] arr1 = [2, 1, 2, 1] arr2 = [1, 2, 3, 1, 3, 6, 6] find_duplicates(arr1) print() find_duplicates(arr2)
# Description: Extract just the intensities for a give Miller array and print ten rows of them. # Source: NA """ Iobs = miller_arrays[${1:0}] iobsdata = Iobs.data() list(iobsdata[${1:100:110}]) """ Iobs = miller_arrays[0] iobsdata = Iobs.data() list(iobsdata[100:110])
""" Iobs = miller_arrays[${1:0}] iobsdata = Iobs.data() list(iobsdata[${1:100:110}]) """ iobs = miller_arrays[0] iobsdata = Iobs.data() list(iobsdata[100:110])
''' 8-2. Favorite Book: Write a function called favorite_book() that accepts one parameter, title. The function should print a message, such as One of my favorite books is Alice in Wonderland. Call the function, making sure to include a book title as an argument in the function call. ''' def favorite_book(title): print(f"One of my favorite books is {title} in Wonderland.") favorite_book('Alice')
""" 8-2. Favorite Book: Write a function called favorite_book() that accepts one parameter, title. The function should print a message, such as One of my favorite books is Alice in Wonderland. Call the function, making sure to include a book title as an argument in the function call. """ def favorite_book(title): print(f'One of my favorite books is {title} in Wonderland.') favorite_book('Alice')
yieldlyDB = {'FMBXOFAQCSAD4UWU4Q7IX5AV4FRV6AKURJQYGXLW3CTPTQ7XBX6MALMSPY' : 'Yieldly - YLDY-YLDY/ALGO', 'VUY44SYOFFJE3ZIDEMA6PT34J3FAZUAE6VVTOTUJ5LZ343V6WZ3ZJQTCD4' : 'Yieldly - YLDY-OPUL', 'U3RJ4NNSASBOIY25KAVVFV6CFDOS22L7YLZBENMIVVVFEWT5WE5GHXH5VQ' : 'Yieldly - GEMS-GEMS', 'BXLXRYBOM7ZNYSCRLWG6THNMO6HASTIRMJGSNJANZFS6EB3X4JY2FZCJNA' : 'Yieldly - YLDY-GEMS', 'AAXO2CVK6SHKHNROZZH3435UAYMQXZP4UTLO5LQCNTDZAXEGJJ2OPHFX54' : 'Yieldly - YLDY-ARCC', '4UPQ2HSD7O6WY3HP33JXMPGYNMV56U3YK5WT6CTSBLGS466JPOFUVCCSTA' : 'Yieldly - ARCC-ARCC', 'YCHXDWES2VJDEKAHWPC344N6WK3FQOZL5VIZYMCDHDIUTTUZPC4IA6DEZY' : 'Yieldly - YLDY-CHOICE', 'KDZS6OV5PAARFJPRZYRRQWCZOCPICB6NJ4YNHZKNCNKIVOLSL5ZCPMY24I' : 'Yieldly - YLDY-SMILE', '55CUF2LA45PJWIUK2KZOGN54N2POJHXAWTSVGR5HFSO4JUUDJ3SOURUVGQ' : 'Yieldly - SMILE-SMILE', 'IR2HQCMN6GPTKGCTR54YXFFUBB4A7FRZR76U2BJAS4XBLNJHZX7RMOPBIQ' : 'Yieldly - YLDY-XET', 'GLHS7QEDDSQVHNTOVFELY3ISMB44TL7I7RQ36BNFW7KMJEZA4SQUFJHV6E' : 'Yieldly - CHOICE-CHOICE', '2RQGRKUSDCZAEFFCXXCQBCVH6KOR7FZW6N3G7B547DIS76AGQJAZZVPPPY' : 'Yieldly - OPUL-OPUL', '3OZ3HAIID3NPKB5N3B6TGFUBX44ZBZDNNRWGY6HSPQLP3NRSQW7D6ZKFEY' : 'Yieldly - XET-XET', 'ZMJVS7F3DXYDE6XIBXGWLEL6VXUYLCN4HTOW57QDLZ2TAMOWZK7EIYAQF4' : 'Yieldly - YLDY-AKITA', '233725844' : 'Yieldly Algo Staking', '233725850' : 'Yieldly Staking', '233725848' : 'Yieldly', '233725850' : 'Yieldly - YLDY-YLDY/ALGO', '348079765' : 'Yieldly - YLDY-OPUL', '419301793' : 'Yieldly - GEMS-GEMS', '393388133' : 'Yieldly - YLDY-GEMS', '385089192' : 'Yieldly - YLDY-ARCC', '498747685' : 'Yieldly - ARCC/ARCC', '447336112' : 'Yieldly - YLDY-CHOICE', '352116819' : 'Yieldly - YLDY-SMILE', '373819681' : 'Yieldly - SMILE-SMILE', '424101057' : 'Yieldly - YLDY-XET', '464365150' : 'Yieldly - CHOICE-CHOICE', '367431051' : 'Yieldly - OPUL-OPUL', '470390215' : 'Yieldly - XET-XET', '511597182' : 'Yieldly - YLDY-AKITA', '511593477' : 'Yieldly - (AKITA/ALGO)LP-YLDY'} algofiDB = {'3EPGHSNBBN5M2LD6V7A63EHZQQLATVQHDBYJQIZ6BLCBTIXA5XR7ZOZEB4' : 'Algofi - Creator', '2SGUKZCOBEVGN3HPKSXPS6DTCXZ7LSP6G3BQF6KVUIUREBBY2QTGSON7WQ' : 'Algofi - Manager', 'TY5N6G67JWHSMWFFFZ252FXWKLRO5UZLBEJ4LGV7TPR5PVSKPLDWH3YRXU' : 'Algofi - ALGO Market', 'ABQHZLNGGPWWZVA5SOQO3HBEECVJSE3OHYLKACOTC7TC4BS52ZHREPF7QY' : 'Algofi - USDC Market', 'W5UCMHDSTGKWBOV6YVLDVPJGPE4L4ISTU6TGXC7WRF63Y7GOVFOBUNJB5Q' : 'Algofi - goBTC Market', 'KATD43XBJJIDXB3U5UCPIFUDU3CZ3YQNVWA5PDDMZVGKSR4E3QWPJX67CY' : 'Algofi - goETH Market', 'OPY7XNB5LVMECF3PHJGQV2U33LZPM5FBUXA3JJPHANAG5B7GEYUPZJVYRE' : 'Algofi - STBL Market', 'DYLJJES76YQCOUK6D4RALIPJ76U5QT7L6A2KP6QTOH63OBLFKLTER2J6IA' : 'Algofi - STBL Staking', 'Z3GWRL5HGCJQYIXP4MINCRWCKWDHZ5VSYJHDLIDLEIOARIZWJX6GLAWWEI' : 'Algofi - STBL/USDC LP Staking', '465818260' : 'Algofi - Manager', '465814065' : 'Algofi - ALGO Market', '465814103' : 'Algofi - USDC Market', '465814149' : 'Algofi - goBTC Market', '465814222' : 'Algofi - goETH Market', '465814278' : 'Algofi - STBL Market', '482608867' : 'Algofi - STBL Staking', '553866305' : 'Algofi - STBL/USDC LP Staking'}
yieldly_db = {'FMBXOFAQCSAD4UWU4Q7IX5AV4FRV6AKURJQYGXLW3CTPTQ7XBX6MALMSPY': 'Yieldly - YLDY-YLDY/ALGO', 'VUY44SYOFFJE3ZIDEMA6PT34J3FAZUAE6VVTOTUJ5LZ343V6WZ3ZJQTCD4': 'Yieldly - YLDY-OPUL', 'U3RJ4NNSASBOIY25KAVVFV6CFDOS22L7YLZBENMIVVVFEWT5WE5GHXH5VQ': 'Yieldly - GEMS-GEMS', 'BXLXRYBOM7ZNYSCRLWG6THNMO6HASTIRMJGSNJANZFS6EB3X4JY2FZCJNA': 'Yieldly - YLDY-GEMS', 'AAXO2CVK6SHKHNROZZH3435UAYMQXZP4UTLO5LQCNTDZAXEGJJ2OPHFX54': 'Yieldly - YLDY-ARCC', '4UPQ2HSD7O6WY3HP33JXMPGYNMV56U3YK5WT6CTSBLGS466JPOFUVCCSTA': 'Yieldly - ARCC-ARCC', 'YCHXDWES2VJDEKAHWPC344N6WK3FQOZL5VIZYMCDHDIUTTUZPC4IA6DEZY': 'Yieldly - YLDY-CHOICE', 'KDZS6OV5PAARFJPRZYRRQWCZOCPICB6NJ4YNHZKNCNKIVOLSL5ZCPMY24I': 'Yieldly - YLDY-SMILE', '55CUF2LA45PJWIUK2KZOGN54N2POJHXAWTSVGR5HFSO4JUUDJ3SOURUVGQ': 'Yieldly - SMILE-SMILE', 'IR2HQCMN6GPTKGCTR54YXFFUBB4A7FRZR76U2BJAS4XBLNJHZX7RMOPBIQ': 'Yieldly - YLDY-XET', 'GLHS7QEDDSQVHNTOVFELY3ISMB44TL7I7RQ36BNFW7KMJEZA4SQUFJHV6E': 'Yieldly - CHOICE-CHOICE', '2RQGRKUSDCZAEFFCXXCQBCVH6KOR7FZW6N3G7B547DIS76AGQJAZZVPPPY': 'Yieldly - OPUL-OPUL', '3OZ3HAIID3NPKB5N3B6TGFUBX44ZBZDNNRWGY6HSPQLP3NRSQW7D6ZKFEY': 'Yieldly - XET-XET', 'ZMJVS7F3DXYDE6XIBXGWLEL6VXUYLCN4HTOW57QDLZ2TAMOWZK7EIYAQF4': 'Yieldly - YLDY-AKITA', '233725844': 'Yieldly Algo Staking', '233725850': 'Yieldly Staking', '233725848': 'Yieldly', '233725850': 'Yieldly - YLDY-YLDY/ALGO', '348079765': 'Yieldly - YLDY-OPUL', '419301793': 'Yieldly - GEMS-GEMS', '393388133': 'Yieldly - YLDY-GEMS', '385089192': 'Yieldly - YLDY-ARCC', '498747685': 'Yieldly - ARCC/ARCC', '447336112': 'Yieldly - YLDY-CHOICE', '352116819': 'Yieldly - YLDY-SMILE', '373819681': 'Yieldly - SMILE-SMILE', '424101057': 'Yieldly - YLDY-XET', '464365150': 'Yieldly - CHOICE-CHOICE', '367431051': 'Yieldly - OPUL-OPUL', '470390215': 'Yieldly - XET-XET', '511597182': 'Yieldly - YLDY-AKITA', '511593477': 'Yieldly - (AKITA/ALGO)LP-YLDY'} algofi_db = {'3EPGHSNBBN5M2LD6V7A63EHZQQLATVQHDBYJQIZ6BLCBTIXA5XR7ZOZEB4': 'Algofi - Creator', '2SGUKZCOBEVGN3HPKSXPS6DTCXZ7LSP6G3BQF6KVUIUREBBY2QTGSON7WQ': 'Algofi - Manager', 'TY5N6G67JWHSMWFFFZ252FXWKLRO5UZLBEJ4LGV7TPR5PVSKPLDWH3YRXU': 'Algofi - ALGO Market', 'ABQHZLNGGPWWZVA5SOQO3HBEECVJSE3OHYLKACOTC7TC4BS52ZHREPF7QY': 'Algofi - USDC Market', 'W5UCMHDSTGKWBOV6YVLDVPJGPE4L4ISTU6TGXC7WRF63Y7GOVFOBUNJB5Q': 'Algofi - goBTC Market', 'KATD43XBJJIDXB3U5UCPIFUDU3CZ3YQNVWA5PDDMZVGKSR4E3QWPJX67CY': 'Algofi - goETH Market', 'OPY7XNB5LVMECF3PHJGQV2U33LZPM5FBUXA3JJPHANAG5B7GEYUPZJVYRE': 'Algofi - STBL Market', 'DYLJJES76YQCOUK6D4RALIPJ76U5QT7L6A2KP6QTOH63OBLFKLTER2J6IA': 'Algofi - STBL Staking', 'Z3GWRL5HGCJQYIXP4MINCRWCKWDHZ5VSYJHDLIDLEIOARIZWJX6GLAWWEI': 'Algofi - STBL/USDC LP Staking', '465818260': 'Algofi - Manager', '465814065': 'Algofi - ALGO Market', '465814103': 'Algofi - USDC Market', '465814149': 'Algofi - goBTC Market', '465814222': 'Algofi - goETH Market', '465814278': 'Algofi - STBL Market', '482608867': 'Algofi - STBL Staking', '553866305': 'Algofi - STBL/USDC LP Staking'}
print ( True or False ) == True print ( True or True ) == True print ( False or False ) == False print ( True and False ) == False print ( True and True ) == True print ( False and False ) == False print ( not True ) == False print ( not False ) == True print ( not True or False ) == ( (not True) or False ) print ( not False or False ) == ( (not False) or False ) print ( not True and True ) == ( (not True) and True ) print ( not False and True ) == ( (not False) and True ) print ( not True and not False or False ) == ( ( (not True) and (not False) ) or False )
print(True or False) == True print(True or True) == True print(False or False) == False print(True and False) == False print(True and True) == True print(False and False) == False print(not True) == False print(not False) == True print(not True or False) == (not True or False) print(not False or False) == (not False or False) print(not True and True) == (not True and True) print(not False and True) == (not False and True) print(not True and (not False) or False) == (not True and (not False) or False)
# # PySNMP MIB module Wellfleet-PGM-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Wellfleet-PGM-MIB # Produced by pysmi-0.3.4 at Wed May 1 15:41:16 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ConstraintsIntersection, ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint", "SingleValueConstraint") NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance") Bits, IpAddress, Integer32, Counter32, MibScalar, MibTable, MibTableRow, MibTableColumn, NotificationType, iso, Counter64, ObjectIdentity, Gauge32, ModuleIdentity, Unsigned32, TimeTicks, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "IpAddress", "Integer32", "Counter32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "NotificationType", "iso", "Counter64", "ObjectIdentity", "Gauge32", "ModuleIdentity", "Unsigned32", "TimeTicks", "MibIdentifier") DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention") wfPgmGroup, = mibBuilder.importSymbols("Wellfleet-COMMON-MIB", "wfPgmGroup") wfPgm = MibIdentifier((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1)) wfPgmCreate = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite") if mibBuilder.loadTexts: wfPgmCreate.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmCreate.setDescription('Create/Delete parameter. Default is created. Users perform a set operation on this object in order to create/delete PGM.') wfPgmEnable = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite") if mibBuilder.loadTexts: wfPgmEnable.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmEnable.setDescription('Enable/Disable Parameter indicates whether this PGM record is enabled or disabled.') wfPgmState = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("init", 3), ("notpres", 4))).clone('notpres')).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmState.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmState.setDescription('The current state of the entire PGM.') wfPgmDebug = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 4), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: wfPgmDebug.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmDebug.setDescription('This is a debug field for PGM. Setting bits cause pgm to gernerate certain log messages. This field will NOT restart PGM. The follow bits maybe set in any combination (LS stands for least significant): 0x00000001 for no display 0x00000002 for interface to MTM 0x00000004 for session addition 0x00000008 for session deletion 0x00000010 for retransmit state addition 0x00000020 for retransmit state deletion 0x00000040 for retransmit state timeout 0x00000080 for cache env 0x00000100 for ') wfPgmSessionLifeTime = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(300)).setMaxAccess("readwrite") if mibBuilder.loadTexts: wfPgmSessionLifeTime.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionLifeTime.setDescription('The length of the idle time (seconds) for which a PGM session will be aged out. An idle PGM session means there is no SPM message received from the upstream.') wfPgmNnakGenerate = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('enabled')).setMaxAccess("readwrite") if mibBuilder.loadTexts: wfPgmNnakGenerate.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmNnakGenerate.setDescription('Router will send NNAK when received the redirect NCF if this parameter is set to enabled.') wfPgmMaxReXmitStates = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readwrite") if mibBuilder.loadTexts: wfPgmMaxReXmitStates.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmMaxReXmitStates.setDescription('The Maxium number of retransmit state entries per slot. If no value is set means network element has no limitation on this mib.') wfPgmTotalReXmitStates = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 8), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmTotalReXmitStates.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmTotalReXmitStates.setDescription('The total number of retransmit state entries in retransmit state table.') wfPgmMaxSessions = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(100)).setMaxAccess("readwrite") if mibBuilder.loadTexts: wfPgmMaxSessions.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmMaxSessions.setDescription('The Maxium number of source path state sessions per slot. If no value is set means network element has no limitation on this mib.') wfPgmTotalSessions = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 10), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmTotalSessions.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmTotalSessions.setDescription('The total number of source path session entries currently in PGM session table') wfPgmTotalReXmitStatesTimedOut = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 11), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmTotalReXmitStatesTimedOut.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmTotalReXmitStatesTimedOut.setDescription('The total number of retransmit state entries got removed becuase of timed-out (no correspondent RDATA received).') wfPgmTotalUniqueNaks = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 12), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmTotalUniqueNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmTotalUniqueNaks.setDescription('The total number of unique Naks received.') wfPgmTotalUniqueParityNaks = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 13), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmTotalUniqueParityNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmTotalUniqueParityNaks.setDescription('The total number of unique Parity Naks received.') wfPgmMaxNakRate = MibScalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(100)).setMaxAccess("readwrite") if mibBuilder.loadTexts: wfPgmMaxNakRate.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmMaxNakRate.setDescription('The maximum number allowed of Nak per second.') wfPgmIfTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2), ) if mibBuilder.loadTexts: wfPgmIfTable.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfTable.setDescription('Table of PGM Interface Statistics') wfPgmIfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1), ).setIndexNames((0, "Wellfleet-PGM-MIB", "wfPgmIfCct")) if mibBuilder.loadTexts: wfPgmIfEntry.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfEntry.setDescription('A PGM Interface Statistics entry') wfPgmIfCreate = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("created", 1), ("deleted", 2))).clone('created')).setMaxAccess("readwrite") if mibBuilder.loadTexts: wfPgmIfCreate.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfCreate.setDescription('Create or delete') wfPgmIfEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite") if mibBuilder.loadTexts: wfPgmIfEnable.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfEnable.setDescription('not used. enabled/Disabled parameter.') wfPgmIfState = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("init", 3), ("notpres", 4))).clone('notpres')).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfState.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfState.setDescription('The current state of the PGM interface.') wfPgmIfCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfCct.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfCct.setDescription('The PGM circuit number') wfPgmIfNakReXmitInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 2147483647)).clone(1000)).setMaxAccess("readwrite") if mibBuilder.loadTexts: wfPgmIfNakReXmitInterval.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfNakReXmitInterval.setDescription('The length of time (milliseconds) for which a network element will retransmit a NAK while waiting for a corresponding NCF. This interval is counted down from the transmission of a NAK') wfPgmIfMaxNakReXmitRate = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(2)).setMaxAccess("readwrite") if mibBuilder.loadTexts: wfPgmIfMaxNakReXmitRate.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfMaxNakReXmitRate.setDescription('The maximum retries of NAK restransmission per second is allowed. ') wfPgmIfNakRdataInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(10)).setMaxAccess("readwrite") if mibBuilder.loadTexts: wfPgmIfNakRdataInterval.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfNakRdataInterval.setDescription('The length of time (seconds) for which a network element will wait for the corresponding RDATA. This interval is counted down from the time a matching NCF is received.') wfPgmIfNakEliminateInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647)).clone(5)).setMaxAccess("readwrite") if mibBuilder.loadTexts: wfPgmIfNakEliminateInterval.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfNakEliminateInterval.setDescription('The length of time (seconds) for which a network element will suspend NAK elimanation for the specific TSI/SQN. .This interval is counted down from the time the first NAK is establish. This value must be smaller than wfPgmNakRdataInterval. If the value of this parameter is set to 1 then all the duplicate NAKs will be elimanated.') wfPgmIfTotalReXmitStates = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 9), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfTotalReXmitStates.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfTotalReXmitStates.setDescription('The total retransmit state entries for this interface.') wfPgmIfTotalReXmitTimedOut = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 10), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfTotalReXmitTimedOut.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfTotalReXmitTimedOut.setDescription('The total time-outed retransmit state entries for this interface.') wfPgmIfInSpms = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 11), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInSpms.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInSpms.setDescription('The total number of SPM received on the PGM interface.') wfPgmIfOutSpms = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 12), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfOutSpms.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutSpms.setDescription('The total number of SPM sent out from the PGM interface.') wfPgmIfInParitySpms = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 13), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInParitySpms.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInParitySpms.setDescription('The total number of parity SPM received on the PGM interface') wfPgmIfOutParitySpms = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 14), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfOutParitySpms.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutParitySpms.setDescription('The total number of parity SPM sent out from the PGM interface') wfPgmIfInSpmPortErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 15), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInSpmPortErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInSpmPortErrors.setDescription('The number of received SPM discarded on the PGM interface for the wrong inbound') wfPgmIfInRdata = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 16), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInRdata.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInRdata.setDescription('The total number of RDATA received on the PGM interface') wfPgmIfOutRdata = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 17), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfOutRdata.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutRdata.setDescription('The total number of RDATA sent out from the PGM interface') wfPgmIfInParityRdata = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 18), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInParityRdata.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInParityRdata.setDescription('The total number of Parity RDATA received on the PGM interface') wfPgmIfOutParityRdata = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 19), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfOutParityRdata.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutParityRdata.setDescription('The total number of parity RDATA sent out from the PGM interface') wfPgmIfInRdataPortErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 20), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInRdataPortErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInRdataPortErrors.setDescription('The number of received RDATA discarded because of wrong inbound') wfPgmIfInRdataNoSessionErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 21), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInRdataNoSessionErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInRdataNoSessionErrors.setDescription('The number of received RDATA discarded because of no session') wfPgmIfUniqueNaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 22), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfUniqueNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfUniqueNaks.setDescription('The total number of unique NAKs received for this interface.') wfPgmIfInNaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 23), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNaks.setDescription('The total number of NAK received on the PGM interface') wfPgmIfOutNaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 24), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfOutNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutNaks.setDescription('The total number of NAK sent out from the PGM interface') wfPgmIfUniqueParityNaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 25), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfUniqueParityNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfUniqueParityNaks.setDescription('The total number of unique parity NAKs received for this interface.') wfPgmIfInParityNaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 26), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInParityNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInParityNaks.setDescription('The total number of parity NAK received on the PGM interface') wfPgmIfOutParityNaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 27), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfOutParityNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutParityNaks.setDescription('The total number of parity NAK sent out from the PGM interface') wfPgmIfInNakPortErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 28), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInNakPortErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNakPortErrors.setDescription('The number of received NAK discarded because of wrong outbound') wfPgmIfInNakNoSessionErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 29), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInNakNoSessionErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNakNoSessionErrors.setDescription('The number of received NAK Discarded because of no session') wfPgmIfInNakSeqErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 30), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInNakSeqErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNakSeqErrors.setDescription('The number of received NAK Discarded because of out of sequence (out of retransmit window).') wfPgmIfInParityNakTgErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 31), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInParityNakTgErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInParityNakTgErrors.setDescription('The number of received parity NAK Discarded because of out of parity TG window.') wfPgmIfInNnaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 32), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInNnaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNnaks.setDescription('The total number of NNAK received on the PGM interface') wfPgmIfOutNnaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 33), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfOutNnaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutNnaks.setDescription('The total number of NNAK sent out from the PGM interface') wfPgmIfInParityNnaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 34), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInParityNnaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInParityNnaks.setDescription('The total number of parity NNAK received on the PGM interface') wfPgmIfOutParityNnaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 35), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfOutParityNnaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutParityNnaks.setDescription('The total number of parity NNAK sent out from the PGM interface') wfPgmIfInNnakPortErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 36), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInNnakPortErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNnakPortErrors.setDescription('The number of received NNAK discarded because of wrong mcast outbound') wfPgmIfInNnakNoSessionErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 37), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInNnakNoSessionErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNnakNoSessionErrors.setDescription('The number of received NNAK discarded because of no session') wfPgmIfInNcfs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 38), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNcfs.setDescription('The total number of NCF received on the PGM interface') wfPgmIfOutNcfs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 39), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfOutNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutNcfs.setDescription('The total number of NCF sent out from the PGM interface') wfPgmIfInParityNcfs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 40), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInParityNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInParityNcfs.setDescription('The total number of parity NCF received on the PGM interface') wfPgmIfOutParityNcfs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 41), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfOutParityNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutParityNcfs.setDescription('The total number of parity NCF sent out from the PGM interface') wfPgmIfInNcfPortErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 42), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInNcfPortErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNcfPortErrors.setDescription('The number of received NCF discarded because of the wrong inbound') wfPgmIfInNcfNoSessionErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 43), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInNcfNoSessionErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNcfNoSessionErrors.setDescription('The number of received NCF discarded because of no session') wfPgmIfInRedirectNcfs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 44), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmIfInRedirectNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInRedirectNcfs.setDescription('The number of redirected NCF received on the PGM interface') wfPgmSessionTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3), ) if mibBuilder.loadTexts: wfPgmSessionTable.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionTable.setDescription('Table of PGM flow for each (port,global id)') wfPgmSessionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1), ).setIndexNames((0, "Wellfleet-PGM-MIB", "wfPgmSessionSource"), (0, "Wellfleet-PGM-MIB", "wfPgmSessionGroup"), (0, "Wellfleet-PGM-MIB", "wfPgmSessionSourcePort"), (0, "Wellfleet-PGM-MIB", "wfPgmSessionGlobalId")) if mibBuilder.loadTexts: wfPgmSessionEntry.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionEntry.setDescription('A PGM Session entry') wfPgmSessionSource = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 1), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionSource.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionSource.setDescription('The source IP address of this entry.') wfPgmSessionGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 2), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionGroup.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionGroup.setDescription('The destination group address of this entry') wfPgmSessionSourcePort = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionSourcePort.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionSourcePort.setDescription('The source port of this pgm session') wfPgmSessionGlobalId = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionGlobalId.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionGlobalId.setDescription('The Global ID this entry') wfPgmSessionUpstreamAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 5), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionUpstreamAddress.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionUpstreamAddress.setDescription('The IP address of the upstream interface for the entry.') wfPgmSessionUpstreamIfCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 6), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionUpstreamIfCct.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionUpstreamIfCct.setDescription('The circuit number of the upstream intf for the entry.') wfPgmSessionTrailEdgeSeq = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 7), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionTrailEdgeSeq.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionTrailEdgeSeq.setDescription('The trailing edge sequence of the transfer window.') wfPgmSessionIncrSeq = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 8), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionIncrSeq.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionIncrSeq.setDescription('The increase sequnce number in the transfer window.') wfPgmSessionLeadEdgeSeq = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 9), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionLeadEdgeSeq.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionLeadEdgeSeq.setDescription('The leading edge sequence of the transfer window.') wfPgmSessionInSpms = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 10), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionInSpms.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInSpms.setDescription('The total number of SPMs received for this session.') wfPgmSessionOutSpms = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 11), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionOutSpms.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutSpms.setDescription('The total number of SPMs sent out for this session.') wfPgmSessionInParitySpms = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 12), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionInParitySpms.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInParitySpms.setDescription('The total number of ParityS PMs received for this session.') wfPgmSessionOutParitySpms = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 13), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionOutParitySpms.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutParitySpms.setDescription('The total number of Parity SPMs sent out for this session.') wfPgmSessionTotalReXmitStates = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 14), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionTotalReXmitStates.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionTotalReXmitStates.setDescription('The total retransmit state entries for this session.') wfPgmSessionTotalReXmitTimedOut = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 15), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionTotalReXmitTimedOut.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionTotalReXmitTimedOut.setDescription('The total time-outed retransmit state entries for this session.') wfPgmSessionInRdata = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 16), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionInRdata.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInRdata.setDescription('The total number of RDATAs received for this session.') wfPgmSessionOutRdata = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 17), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionOutRdata.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutRdata.setDescription('The total number of RDATAs sent out from this session.') wfPgmSessionInParityRdata = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 18), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionInParityRdata.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInParityRdata.setDescription('The total number of parity RDATAs received for this session.') wfPgmSessionOutParityRdata = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 19), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionOutParityRdata.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutParityRdata.setDescription('The total number of parity RDATAs sent out from this session.') wfPgmSessionInRdataNoStateErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 20), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionInRdataNoStateErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInRdataNoStateErrors.setDescription('The total number of received RDATA discarded for no Retransmit state.') wfPgmSessionUniqueNaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 21), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionUniqueNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionUniqueNaks.setDescription('The total number of unique NAKs received for this session.') wfPgmSessionInNaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 22), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionInNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInNaks.setDescription('The total number of NAKs received for this session.') wfPgmSessionOutNaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 23), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionOutNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutNaks.setDescription('The total number of NAKs sent out from this session.') wfPgmSessionUniqueParityNaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 24), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionUniqueParityNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionUniqueParityNaks.setDescription('The total number of unique parity NAKs received for this session.') wfPgmSessionInParityNaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 25), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionInParityNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInParityNaks.setDescription('The total number of parity NAKs received for this session.') wfPgmSessionOutParityNaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 26), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionOutParityNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutParityNaks.setDescription('The total number of parity NAKs sent out from this session.') wfPgmSessionInNakSeqErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 27), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionInNakSeqErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInNakSeqErrors.setDescription('The total number of received NAKs discarded because of out of sequence (out of retransmit window).') wfPgmSessionInNnaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 28), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionInNnaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInNnaks.setDescription('The total number of NNAKs received for this session.') wfPgmSessionOutNnaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 29), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionOutNnaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutNnaks.setDescription('The total number of NNAKs sent out from this session.') wfPgmSessionInParityNnaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 30), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionInParityNnaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInParityNnaks.setDescription('The total number of parity NNAKs received for this session.') wfPgmSessionOutParityNnaks = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 31), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionOutParityNnaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutParityNnaks.setDescription('The total number of Parity NNAKs sent out from this session.') wfPgmSessionInNcfs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 32), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionInNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInNcfs.setDescription('The total number of Ncfs received for this session.') wfPgmSessionOutNcfs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 33), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionOutNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutNcfs.setDescription('The total number of Ncfs sent out from this session.') wfPgmSessionInParityNcfs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 34), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionInParityNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInParityNcfs.setDescription('The total number of Parity Ncfs received for this session.') wfPgmSessionOutParityNcfs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 35), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionOutParityNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutParityNcfs.setDescription('The total number of Parity Ncfs sent out from this session.') wfPgmSessionInRedirectNcfs = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 36), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmSessionInRedirectNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInRedirectNcfs.setDescription('The total number of redirect Ncfs received for this session.') wfPgmReXmitTable = MibTable((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4), ) if mibBuilder.loadTexts: wfPgmReXmitTable.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitTable.setDescription('Table of PGM Retransmit state') wfPgmReXmitEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1), ).setIndexNames((0, "Wellfleet-PGM-MIB", "wfPgmReXmitSource"), (0, "Wellfleet-PGM-MIB", "wfPgmReXmitGroup"), (0, "Wellfleet-PGM-MIB", "wfPgmReXmitSourcePort"), (0, "Wellfleet-PGM-MIB", "wfPgmReXmitGlobalId"), (0, "Wellfleet-PGM-MIB", "wfPgmReXmitSelectiveSeqNum"), (0, "Wellfleet-PGM-MIB", "wfPgmReXmitParityTgSeqNum")) if mibBuilder.loadTexts: wfPgmReXmitEntry.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitEntry.setDescription('A PGM ReXmit entry') wfPgmReXmitSource = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 1), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmReXmitSource.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitSource.setDescription('The source IP address of this entry.') wfPgmReXmitGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 2), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmReXmitGroup.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitGroup.setDescription('The destination group address of this entry') wfPgmReXmitSourcePort = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmReXmitSourcePort.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitSourcePort.setDescription('The source port of this pgm retransmit state') wfPgmReXmitGlobalId = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmReXmitGlobalId.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitGlobalId.setDescription('The Global ID this entry') wfPgmReXmitSelectiveSeqNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 5), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmReXmitSelectiveSeqNum.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitSelectiveSeqNum.setDescription('The Selected Sequence number for this entry.') wfPgmReXmitParityTgSeqNum = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 6), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmReXmitParityTgSeqNum.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitParityTgSeqNum.setDescription('The Requested Parity Tg sequence number for this entry. this value will be the same as wfPgmSessionParityTgSeq.') wfPgmReXmitReqParityTgCount = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 7), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmReXmitReqParityTgCount.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitReqParityTgCount.setDescription('The Requested number of missing Parity packets of specific Tg. The largest counter of the received NAK will be stored in this mib.') wfPgmReXmitUpStreamCct = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 8), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmReXmitUpStreamCct.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitUpStreamCct.setDescription('The upstream interface circuit number.') wfPgmReXmitDownStream = MibTableColumn((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 9), OctetString()).setMaxAccess("readonly") if mibBuilder.loadTexts: wfPgmReXmitDownStream.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitDownStream.setDescription('list of downstream intfs for this entry. Each one is in the format of (intf_addr(u_int32) and intf_cct(u_int16)') mibBuilder.exportSymbols("Wellfleet-PGM-MIB", wfPgmIfInRdataNoSessionErrors=wfPgmIfInRdataNoSessionErrors, wfPgmSessionSource=wfPgmSessionSource, wfPgmIfInNnakNoSessionErrors=wfPgmIfInNnakNoSessionErrors, wfPgmTotalSessions=wfPgmTotalSessions, wfPgmMaxReXmitStates=wfPgmMaxReXmitStates, wfPgmIfInNaks=wfPgmIfInNaks, wfPgmIfInRdata=wfPgmIfInRdata, wfPgmSessionUniqueParityNaks=wfPgmSessionUniqueParityNaks, wfPgmSessionInParitySpms=wfPgmSessionInParitySpms, wfPgmTotalUniqueParityNaks=wfPgmTotalUniqueParityNaks, wfPgmIfNakReXmitInterval=wfPgmIfNakReXmitInterval, wfPgmSessionOutParitySpms=wfPgmSessionOutParitySpms, wfPgmIfInParityNaks=wfPgmIfInParityNaks, wfPgmIfInNnaks=wfPgmIfInNnaks, wfPgmSessionInRdataNoStateErrors=wfPgmSessionInRdataNoStateErrors, wfPgmIfInSpms=wfPgmIfInSpms, wfPgmIfTotalReXmitStates=wfPgmIfTotalReXmitStates, wfPgmSessionInNnaks=wfPgmSessionInNnaks, wfPgmState=wfPgmState, wfPgmIfInRedirectNcfs=wfPgmIfInRedirectNcfs, wfPgmSessionInNcfs=wfPgmSessionInNcfs, wfPgmIfInNnakPortErrors=wfPgmIfInNnakPortErrors, wfPgmSessionInRedirectNcfs=wfPgmSessionInRedirectNcfs, wfPgmMaxNakRate=wfPgmMaxNakRate, wfPgmSessionOutNaks=wfPgmSessionOutNaks, wfPgmSessionTotalReXmitStates=wfPgmSessionTotalReXmitStates, wfPgmIfState=wfPgmIfState, wfPgmSessionUpstreamIfCct=wfPgmSessionUpstreamIfCct, wfPgmMaxSessions=wfPgmMaxSessions, wfPgmIfTable=wfPgmIfTable, wfPgmSessionOutParityNaks=wfPgmSessionOutParityNaks, wfPgmSessionTrailEdgeSeq=wfPgmSessionTrailEdgeSeq, wfPgm=wfPgm, wfPgmIfOutSpms=wfPgmIfOutSpms, wfPgmNnakGenerate=wfPgmNnakGenerate, wfPgmTotalUniqueNaks=wfPgmTotalUniqueNaks, wfPgmSessionUniqueNaks=wfPgmSessionUniqueNaks, wfPgmIfMaxNakReXmitRate=wfPgmIfMaxNakReXmitRate, wfPgmEnable=wfPgmEnable, wfPgmIfInSpmPortErrors=wfPgmIfInSpmPortErrors, wfPgmSessionTable=wfPgmSessionTable, wfPgmSessionTotalReXmitTimedOut=wfPgmSessionTotalReXmitTimedOut, wfPgmIfEnable=wfPgmIfEnable, wfPgmSessionSourcePort=wfPgmSessionSourcePort, wfPgmSessionInNaks=wfPgmSessionInNaks, wfPgmReXmitParityTgSeqNum=wfPgmReXmitParityTgSeqNum, wfPgmIfNakRdataInterval=wfPgmIfNakRdataInterval, wfPgmIfOutParitySpms=wfPgmIfOutParitySpms, wfPgmReXmitSource=wfPgmReXmitSource, wfPgmSessionInParityRdata=wfPgmSessionInParityRdata, wfPgmCreate=wfPgmCreate, wfPgmIfInNcfPortErrors=wfPgmIfInNcfPortErrors, wfPgmReXmitEntry=wfPgmReXmitEntry, wfPgmSessionOutParityNcfs=wfPgmSessionOutParityNcfs, wfPgmIfInParityNnaks=wfPgmIfInParityNnaks, wfPgmIfOutNnaks=wfPgmIfOutNnaks, wfPgmIfOutParityRdata=wfPgmIfOutParityRdata, wfPgmIfOutNcfs=wfPgmIfOutNcfs, wfPgmIfInNcfNoSessionErrors=wfPgmIfInNcfNoSessionErrors, wfPgmSessionOutNcfs=wfPgmSessionOutNcfs, wfPgmSessionLifeTime=wfPgmSessionLifeTime, wfPgmIfInNakNoSessionErrors=wfPgmIfInNakNoSessionErrors, wfPgmSessionIncrSeq=wfPgmSessionIncrSeq, wfPgmIfInNakSeqErrors=wfPgmIfInNakSeqErrors, wfPgmReXmitGroup=wfPgmReXmitGroup, wfPgmReXmitReqParityTgCount=wfPgmReXmitReqParityTgCount, wfPgmIfEntry=wfPgmIfEntry, wfPgmIfTotalReXmitTimedOut=wfPgmIfTotalReXmitTimedOut, wfPgmIfOutRdata=wfPgmIfOutRdata, wfPgmIfCct=wfPgmIfCct, wfPgmIfInParitySpms=wfPgmIfInParitySpms, wfPgmSessionOutParityRdata=wfPgmSessionOutParityRdata, wfPgmReXmitTable=wfPgmReXmitTable, wfPgmSessionInRdata=wfPgmSessionInRdata, wfPgmSessionOutNnaks=wfPgmSessionOutNnaks, wfPgmSessionInParityNcfs=wfPgmSessionInParityNcfs, wfPgmSessionGlobalId=wfPgmSessionGlobalId, wfPgmSessionInParityNaks=wfPgmSessionInParityNaks, wfPgmReXmitUpStreamCct=wfPgmReXmitUpStreamCct, wfPgmIfOutNaks=wfPgmIfOutNaks, wfPgmSessionOutParityNnaks=wfPgmSessionOutParityNnaks, wfPgmSessionInSpms=wfPgmSessionInSpms, wfPgmIfOutParityNaks=wfPgmIfOutParityNaks, wfPgmIfNakEliminateInterval=wfPgmIfNakEliminateInterval, wfPgmIfInNcfs=wfPgmIfInNcfs, wfPgmIfInParityNcfs=wfPgmIfInParityNcfs, wfPgmSessionEntry=wfPgmSessionEntry, wfPgmIfOutParityNcfs=wfPgmIfOutParityNcfs, wfPgmSessionOutSpms=wfPgmSessionOutSpms, wfPgmSessionOutRdata=wfPgmSessionOutRdata, wfPgmDebug=wfPgmDebug, wfPgmIfInParityNakTgErrors=wfPgmIfInParityNakTgErrors, wfPgmReXmitSourcePort=wfPgmReXmitSourcePort, wfPgmReXmitSelectiveSeqNum=wfPgmReXmitSelectiveSeqNum, wfPgmReXmitGlobalId=wfPgmReXmitGlobalId, wfPgmTotalReXmitStatesTimedOut=wfPgmTotalReXmitStatesTimedOut, wfPgmSessionGroup=wfPgmSessionGroup, wfPgmIfCreate=wfPgmIfCreate, wfPgmIfUniqueParityNaks=wfPgmIfUniqueParityNaks, wfPgmSessionLeadEdgeSeq=wfPgmSessionLeadEdgeSeq, wfPgmReXmitDownStream=wfPgmReXmitDownStream, wfPgmIfUniqueNaks=wfPgmIfUniqueNaks, wfPgmSessionUpstreamAddress=wfPgmSessionUpstreamAddress, wfPgmIfOutParityNnaks=wfPgmIfOutParityNnaks, wfPgmSessionInNakSeqErrors=wfPgmSessionInNakSeqErrors, wfPgmIfInNakPortErrors=wfPgmIfInNakPortErrors, wfPgmSessionInParityNnaks=wfPgmSessionInParityNnaks, wfPgmIfInParityRdata=wfPgmIfInParityRdata, wfPgmTotalReXmitStates=wfPgmTotalReXmitStates, wfPgmIfInRdataPortErrors=wfPgmIfInRdataPortErrors)
(object_identifier, octet_string, integer) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'OctetString', 'Integer') (named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues') (constraints_intersection, constraints_union, value_range_constraint, value_size_constraint, single_value_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ConstraintsIntersection', 'ConstraintsUnion', 'ValueRangeConstraint', 'ValueSizeConstraint', 'SingleValueConstraint') (notification_group, module_compliance) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance') (bits, ip_address, integer32, counter32, mib_scalar, mib_table, mib_table_row, mib_table_column, notification_type, iso, counter64, object_identity, gauge32, module_identity, unsigned32, time_ticks, mib_identifier) = mibBuilder.importSymbols('SNMPv2-SMI', 'Bits', 'IpAddress', 'Integer32', 'Counter32', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'NotificationType', 'iso', 'Counter64', 'ObjectIdentity', 'Gauge32', 'ModuleIdentity', 'Unsigned32', 'TimeTicks', 'MibIdentifier') (display_string, textual_convention) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TextualConvention') (wf_pgm_group,) = mibBuilder.importSymbols('Wellfleet-COMMON-MIB', 'wfPgmGroup') wf_pgm = mib_identifier((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1)) wf_pgm_create = mib_scalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 1), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('created', 1), ('deleted', 2))).clone('created')).setMaxAccess('readwrite') if mibBuilder.loadTexts: wfPgmCreate.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmCreate.setDescription('Create/Delete parameter. Default is created. Users perform a set operation on this object in order to create/delete PGM.') wf_pgm_enable = mib_scalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 2), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('enabled', 1), ('disabled', 2))).clone('disabled')).setMaxAccess('readwrite') if mibBuilder.loadTexts: wfPgmEnable.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmEnable.setDescription('Enable/Disable Parameter indicates whether this PGM record is enabled or disabled.') wf_pgm_state = mib_scalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('up', 1), ('down', 2), ('init', 3), ('notpres', 4))).clone('notpres')).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmState.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmState.setDescription('The current state of the entire PGM.') wf_pgm_debug = mib_scalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 4), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: wfPgmDebug.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmDebug.setDescription('This is a debug field for PGM. Setting bits cause pgm to gernerate certain log messages. This field will NOT restart PGM. The follow bits maybe set in any combination (LS stands for least significant): 0x00000001 for no display 0x00000002 for interface to MTM 0x00000004 for session addition 0x00000008 for session deletion 0x00000010 for retransmit state addition 0x00000020 for retransmit state deletion 0x00000040 for retransmit state timeout 0x00000080 for cache env 0x00000100 for ') wf_pgm_session_life_time = mib_scalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 5), integer32().subtype(subtypeSpec=value_range_constraint(0, 2147483647)).clone(300)).setMaxAccess('readwrite') if mibBuilder.loadTexts: wfPgmSessionLifeTime.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionLifeTime.setDescription('The length of the idle time (seconds) for which a PGM session will be aged out. An idle PGM session means there is no SPM message received from the upstream.') wf_pgm_nnak_generate = mib_scalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 6), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('enabled', 1), ('disabled', 2))).clone('enabled')).setMaxAccess('readwrite') if mibBuilder.loadTexts: wfPgmNnakGenerate.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmNnakGenerate.setDescription('Router will send NNAK when received the redirect NCF if this parameter is set to enabled.') wf_pgm_max_re_xmit_states = mib_scalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 7), integer32().subtype(subtypeSpec=value_range_constraint(0, 2147483647))).setMaxAccess('readwrite') if mibBuilder.loadTexts: wfPgmMaxReXmitStates.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmMaxReXmitStates.setDescription('The Maxium number of retransmit state entries per slot. If no value is set means network element has no limitation on this mib.') wf_pgm_total_re_xmit_states = mib_scalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 8), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmTotalReXmitStates.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmTotalReXmitStates.setDescription('The total number of retransmit state entries in retransmit state table.') wf_pgm_max_sessions = mib_scalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 9), integer32().subtype(subtypeSpec=value_range_constraint(0, 2147483647)).clone(100)).setMaxAccess('readwrite') if mibBuilder.loadTexts: wfPgmMaxSessions.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmMaxSessions.setDescription('The Maxium number of source path state sessions per slot. If no value is set means network element has no limitation on this mib.') wf_pgm_total_sessions = mib_scalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 10), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmTotalSessions.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmTotalSessions.setDescription('The total number of source path session entries currently in PGM session table') wf_pgm_total_re_xmit_states_timed_out = mib_scalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 11), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmTotalReXmitStatesTimedOut.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmTotalReXmitStatesTimedOut.setDescription('The total number of retransmit state entries got removed becuase of timed-out (no correspondent RDATA received).') wf_pgm_total_unique_naks = mib_scalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 12), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmTotalUniqueNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmTotalUniqueNaks.setDescription('The total number of unique Naks received.') wf_pgm_total_unique_parity_naks = mib_scalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 13), gauge32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmTotalUniqueParityNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmTotalUniqueParityNaks.setDescription('The total number of unique Parity Naks received.') wf_pgm_max_nak_rate = mib_scalar((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 1, 14), integer32().subtype(subtypeSpec=value_range_constraint(0, 2147483647)).clone(100)).setMaxAccess('readwrite') if mibBuilder.loadTexts: wfPgmMaxNakRate.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmMaxNakRate.setDescription('The maximum number allowed of Nak per second.') wf_pgm_if_table = mib_table((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2)) if mibBuilder.loadTexts: wfPgmIfTable.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfTable.setDescription('Table of PGM Interface Statistics') wf_pgm_if_entry = mib_table_row((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1)).setIndexNames((0, 'Wellfleet-PGM-MIB', 'wfPgmIfCct')) if mibBuilder.loadTexts: wfPgmIfEntry.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfEntry.setDescription('A PGM Interface Statistics entry') wf_pgm_if_create = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 1), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('created', 1), ('deleted', 2))).clone('created')).setMaxAccess('readwrite') if mibBuilder.loadTexts: wfPgmIfCreate.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfCreate.setDescription('Create or delete') wf_pgm_if_enable = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 2), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('enabled', 1), ('disabled', 2))).clone('disabled')).setMaxAccess('readwrite') if mibBuilder.loadTexts: wfPgmIfEnable.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfEnable.setDescription('not used. enabled/Disabled parameter.') wf_pgm_if_state = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('up', 1), ('down', 2), ('init', 3), ('notpres', 4))).clone('notpres')).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfState.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfState.setDescription('The current state of the PGM interface.') wf_pgm_if_cct = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 4), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfCct.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfCct.setDescription('The PGM circuit number') wf_pgm_if_nak_re_xmit_interval = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 5), integer32().subtype(subtypeSpec=value_range_constraint(100, 2147483647)).clone(1000)).setMaxAccess('readwrite') if mibBuilder.loadTexts: wfPgmIfNakReXmitInterval.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfNakReXmitInterval.setDescription('The length of time (milliseconds) for which a network element will retransmit a NAK while waiting for a corresponding NCF. This interval is counted down from the transmission of a NAK') wf_pgm_if_max_nak_re_xmit_rate = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 6), integer32().subtype(subtypeSpec=value_range_constraint(1, 2147483647)).clone(2)).setMaxAccess('readwrite') if mibBuilder.loadTexts: wfPgmIfMaxNakReXmitRate.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfMaxNakReXmitRate.setDescription('The maximum retries of NAK restransmission per second is allowed. ') wf_pgm_if_nak_rdata_interval = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 7), integer32().subtype(subtypeSpec=value_range_constraint(1, 2147483647)).clone(10)).setMaxAccess('readwrite') if mibBuilder.loadTexts: wfPgmIfNakRdataInterval.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfNakRdataInterval.setDescription('The length of time (seconds) for which a network element will wait for the corresponding RDATA. This interval is counted down from the time a matching NCF is received.') wf_pgm_if_nak_eliminate_interval = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 8), integer32().subtype(subtypeSpec=value_range_constraint(0, 2147483647)).clone(5)).setMaxAccess('readwrite') if mibBuilder.loadTexts: wfPgmIfNakEliminateInterval.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfNakEliminateInterval.setDescription('The length of time (seconds) for which a network element will suspend NAK elimanation for the specific TSI/SQN. .This interval is counted down from the time the first NAK is establish. This value must be smaller than wfPgmNakRdataInterval. If the value of this parameter is set to 1 then all the duplicate NAKs will be elimanated.') wf_pgm_if_total_re_xmit_states = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 9), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfTotalReXmitStates.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfTotalReXmitStates.setDescription('The total retransmit state entries for this interface.') wf_pgm_if_total_re_xmit_timed_out = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 10), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfTotalReXmitTimedOut.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfTotalReXmitTimedOut.setDescription('The total time-outed retransmit state entries for this interface.') wf_pgm_if_in_spms = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 11), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInSpms.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInSpms.setDescription('The total number of SPM received on the PGM interface.') wf_pgm_if_out_spms = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 12), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfOutSpms.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutSpms.setDescription('The total number of SPM sent out from the PGM interface.') wf_pgm_if_in_parity_spms = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 13), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInParitySpms.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInParitySpms.setDescription('The total number of parity SPM received on the PGM interface') wf_pgm_if_out_parity_spms = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 14), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfOutParitySpms.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutParitySpms.setDescription('The total number of parity SPM sent out from the PGM interface') wf_pgm_if_in_spm_port_errors = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 15), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInSpmPortErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInSpmPortErrors.setDescription('The number of received SPM discarded on the PGM interface for the wrong inbound') wf_pgm_if_in_rdata = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 16), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInRdata.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInRdata.setDescription('The total number of RDATA received on the PGM interface') wf_pgm_if_out_rdata = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 17), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfOutRdata.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutRdata.setDescription('The total number of RDATA sent out from the PGM interface') wf_pgm_if_in_parity_rdata = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 18), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInParityRdata.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInParityRdata.setDescription('The total number of Parity RDATA received on the PGM interface') wf_pgm_if_out_parity_rdata = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 19), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfOutParityRdata.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutParityRdata.setDescription('The total number of parity RDATA sent out from the PGM interface') wf_pgm_if_in_rdata_port_errors = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 20), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInRdataPortErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInRdataPortErrors.setDescription('The number of received RDATA discarded because of wrong inbound') wf_pgm_if_in_rdata_no_session_errors = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 21), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInRdataNoSessionErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInRdataNoSessionErrors.setDescription('The number of received RDATA discarded because of no session') wf_pgm_if_unique_naks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 22), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfUniqueNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfUniqueNaks.setDescription('The total number of unique NAKs received for this interface.') wf_pgm_if_in_naks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 23), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNaks.setDescription('The total number of NAK received on the PGM interface') wf_pgm_if_out_naks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 24), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfOutNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutNaks.setDescription('The total number of NAK sent out from the PGM interface') wf_pgm_if_unique_parity_naks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 25), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfUniqueParityNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfUniqueParityNaks.setDescription('The total number of unique parity NAKs received for this interface.') wf_pgm_if_in_parity_naks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 26), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInParityNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInParityNaks.setDescription('The total number of parity NAK received on the PGM interface') wf_pgm_if_out_parity_naks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 27), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfOutParityNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutParityNaks.setDescription('The total number of parity NAK sent out from the PGM interface') wf_pgm_if_in_nak_port_errors = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 28), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInNakPortErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNakPortErrors.setDescription('The number of received NAK discarded because of wrong outbound') wf_pgm_if_in_nak_no_session_errors = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 29), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInNakNoSessionErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNakNoSessionErrors.setDescription('The number of received NAK Discarded because of no session') wf_pgm_if_in_nak_seq_errors = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 30), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInNakSeqErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNakSeqErrors.setDescription('The number of received NAK Discarded because of out of sequence (out of retransmit window).') wf_pgm_if_in_parity_nak_tg_errors = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 31), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInParityNakTgErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInParityNakTgErrors.setDescription('The number of received parity NAK Discarded because of out of parity TG window.') wf_pgm_if_in_nnaks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 32), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInNnaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNnaks.setDescription('The total number of NNAK received on the PGM interface') wf_pgm_if_out_nnaks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 33), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfOutNnaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutNnaks.setDescription('The total number of NNAK sent out from the PGM interface') wf_pgm_if_in_parity_nnaks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 34), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInParityNnaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInParityNnaks.setDescription('The total number of parity NNAK received on the PGM interface') wf_pgm_if_out_parity_nnaks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 35), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfOutParityNnaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutParityNnaks.setDescription('The total number of parity NNAK sent out from the PGM interface') wf_pgm_if_in_nnak_port_errors = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 36), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInNnakPortErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNnakPortErrors.setDescription('The number of received NNAK discarded because of wrong mcast outbound') wf_pgm_if_in_nnak_no_session_errors = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 37), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInNnakNoSessionErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNnakNoSessionErrors.setDescription('The number of received NNAK discarded because of no session') wf_pgm_if_in_ncfs = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 38), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNcfs.setDescription('The total number of NCF received on the PGM interface') wf_pgm_if_out_ncfs = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 39), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfOutNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutNcfs.setDescription('The total number of NCF sent out from the PGM interface') wf_pgm_if_in_parity_ncfs = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 40), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInParityNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInParityNcfs.setDescription('The total number of parity NCF received on the PGM interface') wf_pgm_if_out_parity_ncfs = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 41), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfOutParityNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfOutParityNcfs.setDescription('The total number of parity NCF sent out from the PGM interface') wf_pgm_if_in_ncf_port_errors = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 42), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInNcfPortErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNcfPortErrors.setDescription('The number of received NCF discarded because of the wrong inbound') wf_pgm_if_in_ncf_no_session_errors = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 43), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInNcfNoSessionErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInNcfNoSessionErrors.setDescription('The number of received NCF discarded because of no session') wf_pgm_if_in_redirect_ncfs = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 2, 1, 44), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmIfInRedirectNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmIfInRedirectNcfs.setDescription('The number of redirected NCF received on the PGM interface') wf_pgm_session_table = mib_table((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3)) if mibBuilder.loadTexts: wfPgmSessionTable.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionTable.setDescription('Table of PGM flow for each (port,global id)') wf_pgm_session_entry = mib_table_row((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1)).setIndexNames((0, 'Wellfleet-PGM-MIB', 'wfPgmSessionSource'), (0, 'Wellfleet-PGM-MIB', 'wfPgmSessionGroup'), (0, 'Wellfleet-PGM-MIB', 'wfPgmSessionSourcePort'), (0, 'Wellfleet-PGM-MIB', 'wfPgmSessionGlobalId')) if mibBuilder.loadTexts: wfPgmSessionEntry.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionEntry.setDescription('A PGM Session entry') wf_pgm_session_source = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 1), ip_address()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionSource.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionSource.setDescription('The source IP address of this entry.') wf_pgm_session_group = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 2), ip_address()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionGroup.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionGroup.setDescription('The destination group address of this entry') wf_pgm_session_source_port = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 3), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionSourcePort.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionSourcePort.setDescription('The source port of this pgm session') wf_pgm_session_global_id = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 4), octet_string().subtype(subtypeSpec=value_size_constraint(6, 6)).setFixedLength(6)).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionGlobalId.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionGlobalId.setDescription('The Global ID this entry') wf_pgm_session_upstream_address = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 5), ip_address()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionUpstreamAddress.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionUpstreamAddress.setDescription('The IP address of the upstream interface for the entry.') wf_pgm_session_upstream_if_cct = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 6), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionUpstreamIfCct.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionUpstreamIfCct.setDescription('The circuit number of the upstream intf for the entry.') wf_pgm_session_trail_edge_seq = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 7), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionTrailEdgeSeq.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionTrailEdgeSeq.setDescription('The trailing edge sequence of the transfer window.') wf_pgm_session_incr_seq = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 8), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionIncrSeq.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionIncrSeq.setDescription('The increase sequnce number in the transfer window.') wf_pgm_session_lead_edge_seq = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 9), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionLeadEdgeSeq.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionLeadEdgeSeq.setDescription('The leading edge sequence of the transfer window.') wf_pgm_session_in_spms = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 10), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionInSpms.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInSpms.setDescription('The total number of SPMs received for this session.') wf_pgm_session_out_spms = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 11), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionOutSpms.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutSpms.setDescription('The total number of SPMs sent out for this session.') wf_pgm_session_in_parity_spms = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 12), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionInParitySpms.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInParitySpms.setDescription('The total number of ParityS PMs received for this session.') wf_pgm_session_out_parity_spms = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 13), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionOutParitySpms.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutParitySpms.setDescription('The total number of Parity SPMs sent out for this session.') wf_pgm_session_total_re_xmit_states = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 14), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionTotalReXmitStates.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionTotalReXmitStates.setDescription('The total retransmit state entries for this session.') wf_pgm_session_total_re_xmit_timed_out = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 15), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionTotalReXmitTimedOut.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionTotalReXmitTimedOut.setDescription('The total time-outed retransmit state entries for this session.') wf_pgm_session_in_rdata = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 16), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionInRdata.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInRdata.setDescription('The total number of RDATAs received for this session.') wf_pgm_session_out_rdata = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 17), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionOutRdata.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutRdata.setDescription('The total number of RDATAs sent out from this session.') wf_pgm_session_in_parity_rdata = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 18), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionInParityRdata.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInParityRdata.setDescription('The total number of parity RDATAs received for this session.') wf_pgm_session_out_parity_rdata = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 19), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionOutParityRdata.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutParityRdata.setDescription('The total number of parity RDATAs sent out from this session.') wf_pgm_session_in_rdata_no_state_errors = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 20), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionInRdataNoStateErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInRdataNoStateErrors.setDescription('The total number of received RDATA discarded for no Retransmit state.') wf_pgm_session_unique_naks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 21), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionUniqueNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionUniqueNaks.setDescription('The total number of unique NAKs received for this session.') wf_pgm_session_in_naks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 22), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionInNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInNaks.setDescription('The total number of NAKs received for this session.') wf_pgm_session_out_naks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 23), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionOutNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutNaks.setDescription('The total number of NAKs sent out from this session.') wf_pgm_session_unique_parity_naks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 24), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionUniqueParityNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionUniqueParityNaks.setDescription('The total number of unique parity NAKs received for this session.') wf_pgm_session_in_parity_naks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 25), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionInParityNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInParityNaks.setDescription('The total number of parity NAKs received for this session.') wf_pgm_session_out_parity_naks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 26), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionOutParityNaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutParityNaks.setDescription('The total number of parity NAKs sent out from this session.') wf_pgm_session_in_nak_seq_errors = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 27), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionInNakSeqErrors.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInNakSeqErrors.setDescription('The total number of received NAKs discarded because of out of sequence (out of retransmit window).') wf_pgm_session_in_nnaks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 28), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionInNnaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInNnaks.setDescription('The total number of NNAKs received for this session.') wf_pgm_session_out_nnaks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 29), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionOutNnaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutNnaks.setDescription('The total number of NNAKs sent out from this session.') wf_pgm_session_in_parity_nnaks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 30), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionInParityNnaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInParityNnaks.setDescription('The total number of parity NNAKs received for this session.') wf_pgm_session_out_parity_nnaks = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 31), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionOutParityNnaks.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutParityNnaks.setDescription('The total number of Parity NNAKs sent out from this session.') wf_pgm_session_in_ncfs = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 32), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionInNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInNcfs.setDescription('The total number of Ncfs received for this session.') wf_pgm_session_out_ncfs = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 33), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionOutNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutNcfs.setDescription('The total number of Ncfs sent out from this session.') wf_pgm_session_in_parity_ncfs = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 34), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionInParityNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInParityNcfs.setDescription('The total number of Parity Ncfs received for this session.') wf_pgm_session_out_parity_ncfs = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 35), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionOutParityNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionOutParityNcfs.setDescription('The total number of Parity Ncfs sent out from this session.') wf_pgm_session_in_redirect_ncfs = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 3, 1, 36), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmSessionInRedirectNcfs.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmSessionInRedirectNcfs.setDescription('The total number of redirect Ncfs received for this session.') wf_pgm_re_xmit_table = mib_table((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4)) if mibBuilder.loadTexts: wfPgmReXmitTable.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitTable.setDescription('Table of PGM Retransmit state') wf_pgm_re_xmit_entry = mib_table_row((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1)).setIndexNames((0, 'Wellfleet-PGM-MIB', 'wfPgmReXmitSource'), (0, 'Wellfleet-PGM-MIB', 'wfPgmReXmitGroup'), (0, 'Wellfleet-PGM-MIB', 'wfPgmReXmitSourcePort'), (0, 'Wellfleet-PGM-MIB', 'wfPgmReXmitGlobalId'), (0, 'Wellfleet-PGM-MIB', 'wfPgmReXmitSelectiveSeqNum'), (0, 'Wellfleet-PGM-MIB', 'wfPgmReXmitParityTgSeqNum')) if mibBuilder.loadTexts: wfPgmReXmitEntry.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitEntry.setDescription('A PGM ReXmit entry') wf_pgm_re_xmit_source = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 1), ip_address()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmReXmitSource.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitSource.setDescription('The source IP address of this entry.') wf_pgm_re_xmit_group = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 2), ip_address()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmReXmitGroup.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitGroup.setDescription('The destination group address of this entry') wf_pgm_re_xmit_source_port = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 3), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmReXmitSourcePort.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitSourcePort.setDescription('The source port of this pgm retransmit state') wf_pgm_re_xmit_global_id = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 4), octet_string().subtype(subtypeSpec=value_size_constraint(6, 6)).setFixedLength(6)).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmReXmitGlobalId.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitGlobalId.setDescription('The Global ID this entry') wf_pgm_re_xmit_selective_seq_num = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 5), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmReXmitSelectiveSeqNum.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitSelectiveSeqNum.setDescription('The Selected Sequence number for this entry.') wf_pgm_re_xmit_parity_tg_seq_num = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 6), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmReXmitParityTgSeqNum.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitParityTgSeqNum.setDescription('The Requested Parity Tg sequence number for this entry. this value will be the same as wfPgmSessionParityTgSeq.') wf_pgm_re_xmit_req_parity_tg_count = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 7), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmReXmitReqParityTgCount.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitReqParityTgCount.setDescription('The Requested number of missing Parity packets of specific Tg. The largest counter of the received NAK will be stored in this mib.') wf_pgm_re_xmit_up_stream_cct = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 8), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmReXmitUpStreamCct.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitUpStreamCct.setDescription('The upstream interface circuit number.') wf_pgm_re_xmit_down_stream = mib_table_column((1, 3, 6, 1, 4, 1, 18, 3, 5, 28, 4, 1, 9), octet_string()).setMaxAccess('readonly') if mibBuilder.loadTexts: wfPgmReXmitDownStream.setStatus('mandatory') if mibBuilder.loadTexts: wfPgmReXmitDownStream.setDescription('list of downstream intfs for this entry. Each one is in the format of (intf_addr(u_int32) and intf_cct(u_int16)') mibBuilder.exportSymbols('Wellfleet-PGM-MIB', wfPgmIfInRdataNoSessionErrors=wfPgmIfInRdataNoSessionErrors, wfPgmSessionSource=wfPgmSessionSource, wfPgmIfInNnakNoSessionErrors=wfPgmIfInNnakNoSessionErrors, wfPgmTotalSessions=wfPgmTotalSessions, wfPgmMaxReXmitStates=wfPgmMaxReXmitStates, wfPgmIfInNaks=wfPgmIfInNaks, wfPgmIfInRdata=wfPgmIfInRdata, wfPgmSessionUniqueParityNaks=wfPgmSessionUniqueParityNaks, wfPgmSessionInParitySpms=wfPgmSessionInParitySpms, wfPgmTotalUniqueParityNaks=wfPgmTotalUniqueParityNaks, wfPgmIfNakReXmitInterval=wfPgmIfNakReXmitInterval, wfPgmSessionOutParitySpms=wfPgmSessionOutParitySpms, wfPgmIfInParityNaks=wfPgmIfInParityNaks, wfPgmIfInNnaks=wfPgmIfInNnaks, wfPgmSessionInRdataNoStateErrors=wfPgmSessionInRdataNoStateErrors, wfPgmIfInSpms=wfPgmIfInSpms, wfPgmIfTotalReXmitStates=wfPgmIfTotalReXmitStates, wfPgmSessionInNnaks=wfPgmSessionInNnaks, wfPgmState=wfPgmState, wfPgmIfInRedirectNcfs=wfPgmIfInRedirectNcfs, wfPgmSessionInNcfs=wfPgmSessionInNcfs, wfPgmIfInNnakPortErrors=wfPgmIfInNnakPortErrors, wfPgmSessionInRedirectNcfs=wfPgmSessionInRedirectNcfs, wfPgmMaxNakRate=wfPgmMaxNakRate, wfPgmSessionOutNaks=wfPgmSessionOutNaks, wfPgmSessionTotalReXmitStates=wfPgmSessionTotalReXmitStates, wfPgmIfState=wfPgmIfState, wfPgmSessionUpstreamIfCct=wfPgmSessionUpstreamIfCct, wfPgmMaxSessions=wfPgmMaxSessions, wfPgmIfTable=wfPgmIfTable, wfPgmSessionOutParityNaks=wfPgmSessionOutParityNaks, wfPgmSessionTrailEdgeSeq=wfPgmSessionTrailEdgeSeq, wfPgm=wfPgm, wfPgmIfOutSpms=wfPgmIfOutSpms, wfPgmNnakGenerate=wfPgmNnakGenerate, wfPgmTotalUniqueNaks=wfPgmTotalUniqueNaks, wfPgmSessionUniqueNaks=wfPgmSessionUniqueNaks, wfPgmIfMaxNakReXmitRate=wfPgmIfMaxNakReXmitRate, wfPgmEnable=wfPgmEnable, wfPgmIfInSpmPortErrors=wfPgmIfInSpmPortErrors, wfPgmSessionTable=wfPgmSessionTable, wfPgmSessionTotalReXmitTimedOut=wfPgmSessionTotalReXmitTimedOut, wfPgmIfEnable=wfPgmIfEnable, wfPgmSessionSourcePort=wfPgmSessionSourcePort, wfPgmSessionInNaks=wfPgmSessionInNaks, wfPgmReXmitParityTgSeqNum=wfPgmReXmitParityTgSeqNum, wfPgmIfNakRdataInterval=wfPgmIfNakRdataInterval, wfPgmIfOutParitySpms=wfPgmIfOutParitySpms, wfPgmReXmitSource=wfPgmReXmitSource, wfPgmSessionInParityRdata=wfPgmSessionInParityRdata, wfPgmCreate=wfPgmCreate, wfPgmIfInNcfPortErrors=wfPgmIfInNcfPortErrors, wfPgmReXmitEntry=wfPgmReXmitEntry, wfPgmSessionOutParityNcfs=wfPgmSessionOutParityNcfs, wfPgmIfInParityNnaks=wfPgmIfInParityNnaks, wfPgmIfOutNnaks=wfPgmIfOutNnaks, wfPgmIfOutParityRdata=wfPgmIfOutParityRdata, wfPgmIfOutNcfs=wfPgmIfOutNcfs, wfPgmIfInNcfNoSessionErrors=wfPgmIfInNcfNoSessionErrors, wfPgmSessionOutNcfs=wfPgmSessionOutNcfs, wfPgmSessionLifeTime=wfPgmSessionLifeTime, wfPgmIfInNakNoSessionErrors=wfPgmIfInNakNoSessionErrors, wfPgmSessionIncrSeq=wfPgmSessionIncrSeq, wfPgmIfInNakSeqErrors=wfPgmIfInNakSeqErrors, wfPgmReXmitGroup=wfPgmReXmitGroup, wfPgmReXmitReqParityTgCount=wfPgmReXmitReqParityTgCount, wfPgmIfEntry=wfPgmIfEntry, wfPgmIfTotalReXmitTimedOut=wfPgmIfTotalReXmitTimedOut, wfPgmIfOutRdata=wfPgmIfOutRdata, wfPgmIfCct=wfPgmIfCct, wfPgmIfInParitySpms=wfPgmIfInParitySpms, wfPgmSessionOutParityRdata=wfPgmSessionOutParityRdata, wfPgmReXmitTable=wfPgmReXmitTable, wfPgmSessionInRdata=wfPgmSessionInRdata, wfPgmSessionOutNnaks=wfPgmSessionOutNnaks, wfPgmSessionInParityNcfs=wfPgmSessionInParityNcfs, wfPgmSessionGlobalId=wfPgmSessionGlobalId, wfPgmSessionInParityNaks=wfPgmSessionInParityNaks, wfPgmReXmitUpStreamCct=wfPgmReXmitUpStreamCct, wfPgmIfOutNaks=wfPgmIfOutNaks, wfPgmSessionOutParityNnaks=wfPgmSessionOutParityNnaks, wfPgmSessionInSpms=wfPgmSessionInSpms, wfPgmIfOutParityNaks=wfPgmIfOutParityNaks, wfPgmIfNakEliminateInterval=wfPgmIfNakEliminateInterval, wfPgmIfInNcfs=wfPgmIfInNcfs, wfPgmIfInParityNcfs=wfPgmIfInParityNcfs, wfPgmSessionEntry=wfPgmSessionEntry, wfPgmIfOutParityNcfs=wfPgmIfOutParityNcfs, wfPgmSessionOutSpms=wfPgmSessionOutSpms, wfPgmSessionOutRdata=wfPgmSessionOutRdata, wfPgmDebug=wfPgmDebug, wfPgmIfInParityNakTgErrors=wfPgmIfInParityNakTgErrors, wfPgmReXmitSourcePort=wfPgmReXmitSourcePort, wfPgmReXmitSelectiveSeqNum=wfPgmReXmitSelectiveSeqNum, wfPgmReXmitGlobalId=wfPgmReXmitGlobalId, wfPgmTotalReXmitStatesTimedOut=wfPgmTotalReXmitStatesTimedOut, wfPgmSessionGroup=wfPgmSessionGroup, wfPgmIfCreate=wfPgmIfCreate, wfPgmIfUniqueParityNaks=wfPgmIfUniqueParityNaks, wfPgmSessionLeadEdgeSeq=wfPgmSessionLeadEdgeSeq, wfPgmReXmitDownStream=wfPgmReXmitDownStream, wfPgmIfUniqueNaks=wfPgmIfUniqueNaks, wfPgmSessionUpstreamAddress=wfPgmSessionUpstreamAddress, wfPgmIfOutParityNnaks=wfPgmIfOutParityNnaks, wfPgmSessionInNakSeqErrors=wfPgmSessionInNakSeqErrors, wfPgmIfInNakPortErrors=wfPgmIfInNakPortErrors, wfPgmSessionInParityNnaks=wfPgmSessionInParityNnaks, wfPgmIfInParityRdata=wfPgmIfInParityRdata, wfPgmTotalReXmitStates=wfPgmTotalReXmitStates, wfPgmIfInRdataPortErrors=wfPgmIfInRdataPortErrors)
points_str = input("Enter the lead in points: ") points_remaining_int = int(points_str) lead_calculation_float= float(points_remaining_int - 3) has_ball_str = input("Does the lead team have the ball (Yes or No): ") if has_ball_str == "Yes": lead_calculation_float=lead_calculation_float + 0.5 else: lead_calculation_float=lead_calculation_float - 0.5 if lead_calculation_float < 0: lead_calculation_float = 0 lead_calculation_float= lead_calculation_float**2 seconds_remaining_int = int(input("Enter the number of seconds remaining: ")) if lead_calculation_float > seconds_remaining_int: print("Lead is safe.") else: print("Lead is not safe.")
points_str = input('Enter the lead in points: ') points_remaining_int = int(points_str) lead_calculation_float = float(points_remaining_int - 3) has_ball_str = input('Does the lead team have the ball (Yes or No): ') if has_ball_str == 'Yes': lead_calculation_float = lead_calculation_float + 0.5 else: lead_calculation_float = lead_calculation_float - 0.5 if lead_calculation_float < 0: lead_calculation_float = 0 lead_calculation_float = lead_calculation_float ** 2 seconds_remaining_int = int(input('Enter the number of seconds remaining: ')) if lead_calculation_float > seconds_remaining_int: print('Lead is safe.') else: print('Lead is not safe.')
alien_color = 'green' if alien_color == 'green': print("You get 5 points.") else: print("\nYou get 10 points.") alien_color = 'yellow' if alien_color == 'green': print("You get 5 points.") else: print("\nYou get 10 points.")
alien_color = 'green' if alien_color == 'green': print('You get 5 points.') else: print('\nYou get 10 points.') alien_color = 'yellow' if alien_color == 'green': print('You get 5 points.') else: print('\nYou get 10 points.')
class School: def __init__(self): """Initiates database for the school.""" self._db = {} def add_student(self, name, grade): """Add student to the school databsase.""" self._db.setdefault(grade, []).append(name) self._db[grade].sort() def roster(self): """Creates a roster for the school.""" students_list = [] for grades in sorted(self._db): students_list.extend(self._db[grades]) return students_list def grade(self, grade_number): """Returns students of respective grade.""" return self._db.get(grade_number, []).copy()
class School: def __init__(self): """Initiates database for the school.""" self._db = {} def add_student(self, name, grade): """Add student to the school databsase.""" self._db.setdefault(grade, []).append(name) self._db[grade].sort() def roster(self): """Creates a roster for the school.""" students_list = [] for grades in sorted(self._db): students_list.extend(self._db[grades]) return students_list def grade(self, grade_number): """Returns students of respective grade.""" return self._db.get(grade_number, []).copy()
DATABASE_CONFIG = { 'uri': 'postgres://username:password@host/database' } JSREPORT_CONFIG = { 'uri': 'changeme', 'username': 'changeme', 'password': 'changeme', 'template': 'changeme' } ZIP_CONFIG = { 'name': 'flask_batch_download.zip' }
database_config = {'uri': 'postgres://username:password@host/database'} jsreport_config = {'uri': 'changeme', 'username': 'changeme', 'password': 'changeme', 'template': 'changeme'} zip_config = {'name': 'flask_batch_download.zip'}
class Prop: MAX_HOUSE = 4 def __init__(self, name, price, lien, house_price, class_count, klass, **taxes): self.name = name self.price = price self.lien = lien self.house_price = house_price self.taxes = taxes self.owner = None self.n_house = 0 self.klass = klass self.class_count = class_count def sell_to(self, player): if self.owner is None: self.owner = player player.give_money(self.price) return True print("returning false") return False def add_house(self, house_n): n_house = self.n_house + house_n enough_money = self.owner.get_money() - (house_n * self.house_price) < 0 if n_house > Prop.MAX_HOUSE: print("troppe case. Puoi comprarne massimo '{0}'".format(Prop.MAX_HOUSE - self.n_house)) elif not enough_money: print("Non hai abbastanza soldi per comprare tutte queste case") # spostare questi print nella classe home # trovare un altro meccanismo per gestire l'aggiunta di una casa. Le stringhe sono presentazione e vanno # messe altrove else: self.n_house = n_house def get_tax(self, series_count): if series_count < self.class_count: return self.taxes['none'] else: if self.n_house == 0: return self.taxes['complete'] elif self.n_house == 1: return self.taxes['one'] elif self.n_house == 2: return self.taxes['two'] elif self.n_house == 3: return self.taxes['three'] elif self.n_house == 4: return self.taxes['four'] elif self.n_house == 5: return self.taxes['hotel'] def __str__(self): return "name '{0}' price '{3}' taxes '{1}' owner '{2}'\n".format(self.name, self.taxes, self.owner, self.price) def __repr__(self): return self.__str__() class Station(Prop): def __init__(self, name, lien, price=200): super().__init__(name, price, lien, house_price=0, class_count=4, klass=50, taxes=None) def get_tax(self, series_count): if series_count == 1: return 25 if series_count == 2: return 50 if series_count == 3: return 100 if series_count == 5: return 200 class Company(Prop): def __init__(self, name, lien, price): super().__init__(name, price, lien, house_price=0, class_count=2, klass=80, taxes=None) def get_tax(self, series_count): if series_count == 1: return self.owner.roll_dice() * 4 if series_count == 2: return self.owner.roll_dice() * 10
class Prop: max_house = 4 def __init__(self, name, price, lien, house_price, class_count, klass, **taxes): self.name = name self.price = price self.lien = lien self.house_price = house_price self.taxes = taxes self.owner = None self.n_house = 0 self.klass = klass self.class_count = class_count def sell_to(self, player): if self.owner is None: self.owner = player player.give_money(self.price) return True print('returning false') return False def add_house(self, house_n): n_house = self.n_house + house_n enough_money = self.owner.get_money() - house_n * self.house_price < 0 if n_house > Prop.MAX_HOUSE: print("troppe case. Puoi comprarne massimo '{0}'".format(Prop.MAX_HOUSE - self.n_house)) elif not enough_money: print('Non hai abbastanza soldi per comprare tutte queste case') else: self.n_house = n_house def get_tax(self, series_count): if series_count < self.class_count: return self.taxes['none'] elif self.n_house == 0: return self.taxes['complete'] elif self.n_house == 1: return self.taxes['one'] elif self.n_house == 2: return self.taxes['two'] elif self.n_house == 3: return self.taxes['three'] elif self.n_house == 4: return self.taxes['four'] elif self.n_house == 5: return self.taxes['hotel'] def __str__(self): return "name '{0}' price '{3}' taxes '{1}' owner '{2}'\n".format(self.name, self.taxes, self.owner, self.price) def __repr__(self): return self.__str__() class Station(Prop): def __init__(self, name, lien, price=200): super().__init__(name, price, lien, house_price=0, class_count=4, klass=50, taxes=None) def get_tax(self, series_count): if series_count == 1: return 25 if series_count == 2: return 50 if series_count == 3: return 100 if series_count == 5: return 200 class Company(Prop): def __init__(self, name, lien, price): super().__init__(name, price, lien, house_price=0, class_count=2, klass=80, taxes=None) def get_tax(self, series_count): if series_count == 1: return self.owner.roll_dice() * 4 if series_count == 2: return self.owner.roll_dice() * 10
#create file myaperture.dat needed for source optimization f = open("myaperture.dat",'w') f.write(" 50.0 -0.002 0.002 -0.002 0.002") f.close() print("File written to disk: myaperture.dat")
f = open('myaperture.dat', 'w') f.write(' 50.0 -0.002 0.002 -0.002 0.002') f.close() print('File written to disk: myaperture.dat')
"""Module containing expected string output to be used in test_game.py.""" leader_board_string = expected_string = """\n-------- LEADERBOARD -------- Clive Felix 1 Adis 0 -----------------------------""" rules_string = """\n\x1b[92mPig is a simple dice game first described in print by\n John Scarne in 1945. Players take turns to roll a single dice as many\n times as they wish, adding all roll results to a running total, but losing\n their gained score for the turn if they roll a 1.\x1b[0m\n"""
"""Module containing expected string output to be used in test_game.py.""" leader_board_string = expected_string = '\n-------- LEADERBOARD --------\nClive Felix 1\nAdis 0\n\n \n-----------------------------' rules_string = '\n\x1b[92mPig is a simple dice game first described in print by\n\nJohn Scarne in 1945. Players take turns to roll a single dice as many\n\ntimes as they wish, adding all roll results to a running total, but losing\n\ntheir gained score for the turn if they roll a 1.\x1b[0m\n'
# -*- encoding:utf-8 -*- # index rule: # 3 ------6------- 2 # # 7 ------ ------- 5 # # 0 ------4------- 1 # Stores the triangle values raw_trigs = [ [], [(0,7,4)], [(4,5,1)], [(0,5,1),(0,7,5)], [(5,6,2)], [(0,7,4), (4,7,5), (7,6,5), (5,6,2)], [(1,4,6), (1,6,2)], [(1,0,7), (1,7,6), (1,6,2)], [(7,3,6)], [(0,3,4), (4,3,6)], [(1,4,5), (4,7,5), (5,7,6), (7,3,6)], [(0,5,1), (0,6,5), (0,3,6)], [(7,3,5), (5,3,2)], [(0,3,4), (4,3,5), (5,3,2)], [(7,3,2), (4,7,2), (1,4,2)], [(0,2,1), (0,3,2)], ] index_to_pos = [ [0,0], [2,0], [2,2], [0,2], [1,0], [2,1], [1,2], [0,1], ] class Worker(object): def __init__(self): self.rt_verts = [] self.rt_trigs = [] self.rt_flex_verts = [] def execute(self): for case, trig_list in enumerate(raw_trigs): self.process_case(case, trig_list) worker.format_csharp("MarchingSquareData.txt") def process_case(self, case, trig_list_raw): vi_list = [] vpos_list = [] trig_list = [] flex_list = [] for trig_raw in trig_list_raw: # print trig_raw for vi in trig_raw: # record vert index if not vi in vi_list: vi_list.append(vi) vpos_list.append(self.get_vert_pos(vi)) # record lerp parents if vi >= 4: parents = self.get_lerp_parents(case, vi) flex_list.append(parents) else: flex_list.append(None) # record triangle trig_list.append(vi_list.index(vi)) self.rt_verts.append(vpos_list) self.rt_trigs.append(trig_list) self.rt_flex_verts.append(flex_list) def get_vert_pos(self, index): return index_to_pos[index] def get_lerp_parents(self, case, index): if index < 4: return a, b = (index-4)%4, (index-3)%4 cell_values = self.get_case_cell_values(case) if cell_values[a] > cell_values[b]: return b, a return a, b def get_case_cell_values(self, case): return case & 1, case >> 1 & 1, case >> 2 & 1, case >> 3 & 1 def format_csharp(self, filename): msg = "" template = "\tnew int[%d] {%s},\n" anchor_template = "\tnew int[%d][] {%s},\n" with open(filename, 'w') as f: # Write cell vertices f.write("\n\npublic static int[][] cellVertPos = new int[][]\n{\n") msg = "" for index in xrange(4): msg += template % (2, "%d,%d" % tuple(index_to_pos[index])) f.write(msg) f.write("};") # Write vertices f.write("\n\npublic static int[][] vertices = new int[][]\n{\n") msg = "" for vert_list in self.rt_verts: vert_str = "" for vert in vert_list: vert_str += "%d,%d," % tuple(vert) msg += template % (len(vert_list) * 2, vert_str) f.write(msg) f.write("};") # Write triangles f.write("\n\npublic static int[][] triangles = new int[][]\n{\n") msg = "" for trig_list in self.rt_trigs: trig_str = "" for trig_index in trig_list: trig_str += "%d," % trig_index msg += template % (len(trig_list), trig_str) f.write(msg) f.write("};") # Write flexible vertices f.write("\n\npublic static int[][][] anchors = new int[][][]\n{\n") msg = "" for flex_list in self.rt_flex_verts: flex_str = "" for parents in flex_list: if parents is not None: flex_str += "new int[2]{%d,%d}," % parents else: flex_str += "null," msg += anchor_template % (len(flex_list), flex_str) f.write(msg) f.write("};") worker = Worker() worker.execute() # print worker.rt_verts # print worker.rt_trigs # print worker.rt_flex_verts
raw_trigs = [[], [(0, 7, 4)], [(4, 5, 1)], [(0, 5, 1), (0, 7, 5)], [(5, 6, 2)], [(0, 7, 4), (4, 7, 5), (7, 6, 5), (5, 6, 2)], [(1, 4, 6), (1, 6, 2)], [(1, 0, 7), (1, 7, 6), (1, 6, 2)], [(7, 3, 6)], [(0, 3, 4), (4, 3, 6)], [(1, 4, 5), (4, 7, 5), (5, 7, 6), (7, 3, 6)], [(0, 5, 1), (0, 6, 5), (0, 3, 6)], [(7, 3, 5), (5, 3, 2)], [(0, 3, 4), (4, 3, 5), (5, 3, 2)], [(7, 3, 2), (4, 7, 2), (1, 4, 2)], [(0, 2, 1), (0, 3, 2)]] index_to_pos = [[0, 0], [2, 0], [2, 2], [0, 2], [1, 0], [2, 1], [1, 2], [0, 1]] class Worker(object): def __init__(self): self.rt_verts = [] self.rt_trigs = [] self.rt_flex_verts = [] def execute(self): for (case, trig_list) in enumerate(raw_trigs): self.process_case(case, trig_list) worker.format_csharp('MarchingSquareData.txt') def process_case(self, case, trig_list_raw): vi_list = [] vpos_list = [] trig_list = [] flex_list = [] for trig_raw in trig_list_raw: for vi in trig_raw: if not vi in vi_list: vi_list.append(vi) vpos_list.append(self.get_vert_pos(vi)) if vi >= 4: parents = self.get_lerp_parents(case, vi) flex_list.append(parents) else: flex_list.append(None) trig_list.append(vi_list.index(vi)) self.rt_verts.append(vpos_list) self.rt_trigs.append(trig_list) self.rt_flex_verts.append(flex_list) def get_vert_pos(self, index): return index_to_pos[index] def get_lerp_parents(self, case, index): if index < 4: return (a, b) = ((index - 4) % 4, (index - 3) % 4) cell_values = self.get_case_cell_values(case) if cell_values[a] > cell_values[b]: return (b, a) return (a, b) def get_case_cell_values(self, case): return (case & 1, case >> 1 & 1, case >> 2 & 1, case >> 3 & 1) def format_csharp(self, filename): msg = '' template = '\tnew int[%d] {%s},\n' anchor_template = '\tnew int[%d][] {%s},\n' with open(filename, 'w') as f: f.write('\n\npublic static int[][] cellVertPos = new int[][]\n{\n') msg = '' for index in xrange(4): msg += template % (2, '%d,%d' % tuple(index_to_pos[index])) f.write(msg) f.write('};') f.write('\n\npublic static int[][] vertices = new int[][]\n{\n') msg = '' for vert_list in self.rt_verts: vert_str = '' for vert in vert_list: vert_str += '%d,%d,' % tuple(vert) msg += template % (len(vert_list) * 2, vert_str) f.write(msg) f.write('};') f.write('\n\npublic static int[][] triangles = new int[][]\n{\n') msg = '' for trig_list in self.rt_trigs: trig_str = '' for trig_index in trig_list: trig_str += '%d,' % trig_index msg += template % (len(trig_list), trig_str) f.write(msg) f.write('};') f.write('\n\npublic static int[][][] anchors = new int[][][]\n{\n') msg = '' for flex_list in self.rt_flex_verts: flex_str = '' for parents in flex_list: if parents is not None: flex_str += 'new int[2]{%d,%d},' % parents else: flex_str += 'null,' msg += anchor_template % (len(flex_list), flex_str) f.write(msg) f.write('};') worker = worker() worker.execute()
""" Python package to help with daily work on heusler materials. """ name = "heuslertools"
""" Python package to help with daily work on heusler materials. """ name = 'heuslertools'
make = "Ford" model = "Everest" def start_engine(): print (f'{make} {model} engine started')
make = 'Ford' model = 'Everest' def start_engine(): print(f'{make} {model} engine started')
class Resource(): def __init__(self, path): self.path = path def __str__(self): return '-i {}'.format(self.path) class Resources(list): def add(self, path): self.append(Resource(path)) def append(self, resource): resource.number = len(self) super().append(resource) def __delitem__(self, index): for resource in self[index:]: resource.number -= 1 super().__delitem__(index) def __str__(self): return ' '.join(str(r) for r in self)
class Resource: def __init__(self, path): self.path = path def __str__(self): return '-i {}'.format(self.path) class Resources(list): def add(self, path): self.append(resource(path)) def append(self, resource): resource.number = len(self) super().append(resource) def __delitem__(self, index): for resource in self[index:]: resource.number -= 1 super().__delitem__(index) def __str__(self): return ' '.join((str(r) for r in self))
# Python - 3.6.0 Test.describe('Basic tests') names = ['john', 'matt', 'alex', 'cam'] ages = [16, 25, 57, 39] for name, age in zip(names, ages): person = Person(name, age) Test.it(f'Testing for {name} and {age}') Test.assert_equals(person.info, f'{name}s age is {age}')
Test.describe('Basic tests') names = ['john', 'matt', 'alex', 'cam'] ages = [16, 25, 57, 39] for (name, age) in zip(names, ages): person = person(name, age) Test.it(f'Testing for {name} and {age}') Test.assert_equals(person.info, f'{name}s age is {age}')
#Floating loop first_list = list(range(10)) for i in first_list: first_list[i] = float(first_list[i]) print(first_list)
first_list = list(range(10)) for i in first_list: first_list[i] = float(first_list[i]) print(first_list)
def read_file(filename): """ Read input file and save the lines into a list. :param filename: input file :return: grid of octopuses """ grid = [] with open(filename, 'r', encoding='UTF-8') as file: for line in file: grid.append([int(s) for s in list(line.strip())]) return grid def model_light(grid, steps): """ Simulate steps :param grid: grid of octopuses :param steps: number of steps to simulate :return: number of flashes """ flashes = 0 for step in range(0, steps): previous_flashes = -1 for i in range(0, len(grid)): for j in range(0, len(grid[i])): if grid[i][j]: grid[i][j] += 1 else: grid[i][j] = 1 while previous_flashes != flashes: previous_flashes = flashes for i in range(0, len(grid)): for j in range(0, len(grid[i])): if grid[i][j] and grid[i][j] > 9: flashes += 1 grid[i][j] = None # Up if i > 0 and grid[i - 1][j]: grid[i - 1][j] += 1 # Down if i < len(grid) - 1 and grid[i + 1][j]: grid[i + 1][j] += 1 # if j > 0 and grid[i][j - 1]: grid[i][j - 1] += 1 if j < len(grid[i]) - 1 and grid[i][j + 1]: grid[i][j + 1] += 1 if i > 0 and j > 0 and grid[i - 1][j - 1]: grid[i - 1][j - 1] += 1 if i > 0 and j < len(grid[i]) - 1 and grid[i - 1][j + 1]: grid[i - 1][j + 1] += 1 if i < len(grid) - 1 and j < len(grid[i]) - 1 and grid[i + 1][j + 1]: grid[i + 1][j + 1] += 1 if i < len(grid) - 1 and j > 0 and grid[i + 1][j - 1]: grid[i + 1][j - 1] += 1 return flashes def all_flashing_steps(grid): """ Find the exact moments when the octopuses will all flash simultaneously :param grid: grid of octopuses :return: number of steps required for the exact moments when the octopuses will all flash simultaneously """ steps = 0 flashes = 0 n_of_octopuses = len(grid) * len(grid[0]) current_flashes = 0 flashed = 0 while(flashed != n_of_octopuses): steps += 1 previous_flashes = -1 for i in range(0, len(grid)): for j in range(0, len(grid[i])): if grid[i][j]: grid[i][j] += 1 else: grid[i][j] = 1 flashed = 0 while previous_flashes != flashes: previous_flashes = flashes current_flashes = 0 for i in range(0, len(grid)): for j in range(0, len(grid[i])): if grid[i][j] and grid[i][j] > 9: current_flashes += 1 flashed += 1 grid[i][j] = None # Up if i > 0 and grid[i - 1][j]: grid[i - 1][j] += 1 # Down if i < len(grid) - 1 and grid[i + 1][j]: grid[i + 1][j] += 1 # if j > 0 and grid[i][j - 1]: grid[i][j - 1] += 1 if j < len(grid[i]) - 1 and grid[i][j + 1]: grid[i][j + 1] += 1 if i > 0 and j > 0 and grid[i - 1][j - 1]: grid[i - 1][j - 1] += 1 if i > 0 and j < len(grid[i]) - 1 and grid[i - 1][j + 1]: grid[i - 1][j + 1] += 1 if i < len(grid) - 1 and j < len(grid[i]) - 1 and grid[i + 1][j + 1]: grid[i + 1][j + 1] += 1 if i < len(grid) - 1 and j > 0 and grid[i + 1][j - 1]: grid[i + 1][j - 1] += 1 flashes += current_flashes return steps if __name__ == '__main__': grid = read_file('input.txt') # flashes = model_light(grid, 100) steps = all_flashing_steps(grid) print(steps)
def read_file(filename): """ Read input file and save the lines into a list. :param filename: input file :return: grid of octopuses """ grid = [] with open(filename, 'r', encoding='UTF-8') as file: for line in file: grid.append([int(s) for s in list(line.strip())]) return grid def model_light(grid, steps): """ Simulate steps :param grid: grid of octopuses :param steps: number of steps to simulate :return: number of flashes """ flashes = 0 for step in range(0, steps): previous_flashes = -1 for i in range(0, len(grid)): for j in range(0, len(grid[i])): if grid[i][j]: grid[i][j] += 1 else: grid[i][j] = 1 while previous_flashes != flashes: previous_flashes = flashes for i in range(0, len(grid)): for j in range(0, len(grid[i])): if grid[i][j] and grid[i][j] > 9: flashes += 1 grid[i][j] = None if i > 0 and grid[i - 1][j]: grid[i - 1][j] += 1 if i < len(grid) - 1 and grid[i + 1][j]: grid[i + 1][j] += 1 if j > 0 and grid[i][j - 1]: grid[i][j - 1] += 1 if j < len(grid[i]) - 1 and grid[i][j + 1]: grid[i][j + 1] += 1 if i > 0 and j > 0 and grid[i - 1][j - 1]: grid[i - 1][j - 1] += 1 if i > 0 and j < len(grid[i]) - 1 and grid[i - 1][j + 1]: grid[i - 1][j + 1] += 1 if i < len(grid) - 1 and j < len(grid[i]) - 1 and grid[i + 1][j + 1]: grid[i + 1][j + 1] += 1 if i < len(grid) - 1 and j > 0 and grid[i + 1][j - 1]: grid[i + 1][j - 1] += 1 return flashes def all_flashing_steps(grid): """ Find the exact moments when the octopuses will all flash simultaneously :param grid: grid of octopuses :return: number of steps required for the exact moments when the octopuses will all flash simultaneously """ steps = 0 flashes = 0 n_of_octopuses = len(grid) * len(grid[0]) current_flashes = 0 flashed = 0 while flashed != n_of_octopuses: steps += 1 previous_flashes = -1 for i in range(0, len(grid)): for j in range(0, len(grid[i])): if grid[i][j]: grid[i][j] += 1 else: grid[i][j] = 1 flashed = 0 while previous_flashes != flashes: previous_flashes = flashes current_flashes = 0 for i in range(0, len(grid)): for j in range(0, len(grid[i])): if grid[i][j] and grid[i][j] > 9: current_flashes += 1 flashed += 1 grid[i][j] = None if i > 0 and grid[i - 1][j]: grid[i - 1][j] += 1 if i < len(grid) - 1 and grid[i + 1][j]: grid[i + 1][j] += 1 if j > 0 and grid[i][j - 1]: grid[i][j - 1] += 1 if j < len(grid[i]) - 1 and grid[i][j + 1]: grid[i][j + 1] += 1 if i > 0 and j > 0 and grid[i - 1][j - 1]: grid[i - 1][j - 1] += 1 if i > 0 and j < len(grid[i]) - 1 and grid[i - 1][j + 1]: grid[i - 1][j + 1] += 1 if i < len(grid) - 1 and j < len(grid[i]) - 1 and grid[i + 1][j + 1]: grid[i + 1][j + 1] += 1 if i < len(grid) - 1 and j > 0 and grid[i + 1][j - 1]: grid[i + 1][j - 1] += 1 flashes += current_flashes return steps if __name__ == '__main__': grid = read_file('input.txt') steps = all_flashing_steps(grid) print(steps)
# compat3.py # Copyright (c) 2013-2019 Pablo Acosta-Serafini # See LICENSE for details # pylint: disable=C0111,W0122,W0613 ### # Functions ### def _readlines(fname, fpointer1=open, fpointer2=open): """Read all lines from file.""" # fpointer1, fpointer2 arguments to ease testing try: # pragma: no cover with fpointer1(fname, "r") as fobj: return fobj.readlines() except UnicodeDecodeError: # pragma: no cover with fpointer2(fname, "r", encoding="utf-8") as fobj: return fobj.readlines() def _unicode_to_ascii(obj): # pragma: no cover # pylint: disable=E0602 return obj def _write(fobj, data): """Write data to file.""" fobj.write(data)
def _readlines(fname, fpointer1=open, fpointer2=open): """Read all lines from file.""" try: with fpointer1(fname, 'r') as fobj: return fobj.readlines() except UnicodeDecodeError: with fpointer2(fname, 'r', encoding='utf-8') as fobj: return fobj.readlines() def _unicode_to_ascii(obj): return obj def _write(fobj, data): """Write data to file.""" fobj.write(data)
class Fibonacci: """Implementations of the Fibonacci number.""" @staticmethod def fib_iterative(index): """ Iterative algorithm for calculating the Fibonacci number. :param index: Index of a number in the Fibonacci sequence. :return: Fibonacci number. """ lower = 0 higher = 1 for i in range(1, index): tmp = lower + higher lower = higher higher = tmp return higher @staticmethod def fib_recursive(index): """ Recursive algorithm for calculating the Fibonacci number. :param index: Index of a number in the Fibonacci sequence. :return: Fibonacci number. """ if index <= 1: return index else: return Fibonacci.fib_recursive(index - 1) + Fibonacci.fib_recursive(index - 2)
class Fibonacci: """Implementations of the Fibonacci number.""" @staticmethod def fib_iterative(index): """ Iterative algorithm for calculating the Fibonacci number. :param index: Index of a number in the Fibonacci sequence. :return: Fibonacci number. """ lower = 0 higher = 1 for i in range(1, index): tmp = lower + higher lower = higher higher = tmp return higher @staticmethod def fib_recursive(index): """ Recursive algorithm for calculating the Fibonacci number. :param index: Index of a number in the Fibonacci sequence. :return: Fibonacci number. """ if index <= 1: return index else: return Fibonacci.fib_recursive(index - 1) + Fibonacci.fib_recursive(index - 2)
# AUTOGENERATED BY NBDEV! DO NOT EDIT! __all__ = ["index", "modules", "custom_doc_links", "git_url"] index = {"win": "game.ipynb", "lose": "game.ipynb"} modules = ["game.py"] doc_url = "https://thecharlieblake.github.io/solitairenet/" git_url = "https://github.com/thecharlieblake/solitairenet/tree/master/" def custom_doc_links(name): return None
__all__ = ['index', 'modules', 'custom_doc_links', 'git_url'] index = {'win': 'game.ipynb', 'lose': 'game.ipynb'} modules = ['game.py'] doc_url = 'https://thecharlieblake.github.io/solitairenet/' git_url = 'https://github.com/thecharlieblake/solitairenet/tree/master/' def custom_doc_links(name): return None
""" Helper routines for generating gpu kernels for nvcc. """ def nvcc_kernel(name, params, body): """Return the c code of a kernel function. :param params: the parameters to the function as one or more strings :param body: the [nested] list of statements for the body of the function. These will be separated by ';' characters. """ paramstr = ', '.join(params) def flatbody(): for b in body: if isinstance(b, (list, tuple)): for bb in b: yield bb else: yield b bodystr = ';\n'.join(flatbody()) return """__global__ void %(name)s (%(paramstr)s) { %(bodystr)s; } """ %locals() def code_version(version): """decorator to support version-based cache mechanism""" if not isinstance(version, tuple): raise TypeError('version must be tuple', version) def deco(f): f.code_version = version return f return deco UNVERSIONED = () @code_version((1,)) def inline_reduce(N, buf, pos, count, manner_fn): """ Return C++ code for a function that reduces a contiguous buffer. :param N: length of the buffer :param buf: buffer pointer :param pos: index of executing thread :param count: number of executing threads :param manner_fn: a function that accepts strings of arguments a and b, and returns c code for their reduction. (Example: return "%(a)s + %(b)s" for a sum reduction). :postcondition: This function leaves the answer in position 0 of the buffer. The rest of the buffer is trashed by this function. :note: buf should be in gpu shared memory, we access it many times. """ loop_line = manner_fn("%s[%s]"%(buf,pos), "%s[i]" %(buf)) r_16 = manner_fn("%s[%s]" %(buf, pos), "%s[%s+16]" %(buf, pos)) r_8 = manner_fn("%s[%s]" %(buf, pos), "%s[%s+8]" %(buf, pos)) r_4 = manner_fn("%s[%s]" %(buf, pos), "%s[%s+4]" %(buf, pos)) r_2 = manner_fn("%s[%s]" %(buf, pos), "%s[%s+2]" %(buf, pos)) r_1 = manner_fn("%s[%s]" %(buf, pos), "%s[%s+1]" %(buf, pos)) return """ { // This function trashes buf[1..N], leaving the reduction result in buf[0]. if (%(pos)s < warpSize) { for (int i = %(pos)s + warpSize; i < %(N)s; i += warpSize) { %(buf)s[%(pos)s] = %(loop_line)s; } if (%(pos)s < 16) { //reduce so that %(pos)s 0 has the sum of everything if(%(pos)s + 16 < %(N)s) %(buf)s[%(pos)s] = %(r_16)s; if(%(pos)s + 8 < %(N)s) %(buf)s[%(pos)s] = %(r_8)s; if(%(pos)s + 4 < %(N)s) %(buf)s[%(pos)s] = %(r_4)s; if(%(pos)s + 2 < %(N)s) %(buf)s[%(pos)s] = %(r_2)s; if(%(pos)s + 1 < %(N)s) %(buf)s[%(pos)s] = %(r_1)s; } } } """ % locals() @code_version(inline_reduce.code_version) def inline_reduce_max(N, buf, pos, count): return inline_reduce(N, buf, pos, count, lambda a, b: "max(%s, %s)"%(a,b)) @code_version(inline_reduce.code_version) def inline_reduce_sum(N, buf, pos, count): return inline_reduce(N, buf, pos, count, lambda a, b: "%s + %s"%(a,b)) @code_version(inline_reduce.code_version) def inline_reduce_min(N, buf, pos, count): return inline_reduce(N, buf, pos, count, lambda a, b: "min(%s, %s)"%(a,b)) @code_version(inline_reduce.code_version) def inline_reduce_prod(N, buf, pos, count): return inline_reduce(N, buf, pos, count, lambda a, b: "%s * %s"%(a,b)) @code_version((2,) + inline_reduce_max.code_version + inline_reduce_sum.code_version) def inline_softmax(N, buf, buf2, threadPos, threadCount): """ :param N: length of the buffer :param threadPos: index of executing thread :param threadCount: number of executing threads :Precondition: buf and buf2 contain two identical copies of the input to softmax :Postcondition: buf contains the softmax, buf2 contains un-normalized softmax :note: buf and buf2 should be in gpu shared memory, we access it many times. :note2: We use __i as an int variable in a loop """ return [ #get max of buf (trashing all but buf[0]) inline_reduce_max(N, buf, threadPos, threadCount), '__syncthreads()', 'float row_max = '+buf+'[0]', '__syncthreads()', 'for(int __i='+threadPos+'; __i<'+N+'; __i+='+threadCount+'){', buf+'[__i] = exp('+buf2+'[__i] - row_max)', buf2+'[__i] = '+buf+'[__i]', '}', '__syncthreads()', inline_reduce_sum(N, buf, threadPos, threadCount), '__syncthreads()', 'float row_sum = '+buf+'[0]', '__syncthreads()', # divide each exp() result by the sum to complete the job. 'for(int __i='+threadPos+'; __i<'+N+'; __i+='+threadCount+'){', buf+'[__i] = '+buf2+'[__i] / row_sum', '}', '__syncthreads()', ]
""" Helper routines for generating gpu kernels for nvcc. """ def nvcc_kernel(name, params, body): """Return the c code of a kernel function. :param params: the parameters to the function as one or more strings :param body: the [nested] list of statements for the body of the function. These will be separated by ';' characters. """ paramstr = ', '.join(params) def flatbody(): for b in body: if isinstance(b, (list, tuple)): for bb in b: yield bb else: yield b bodystr = ';\n'.join(flatbody()) return '__global__ void %(name)s (%(paramstr)s)\n {\n %(bodystr)s;\n }\n ' % locals() def code_version(version): """decorator to support version-based cache mechanism""" if not isinstance(version, tuple): raise type_error('version must be tuple', version) def deco(f): f.code_version = version return f return deco unversioned = () @code_version((1,)) def inline_reduce(N, buf, pos, count, manner_fn): """ Return C++ code for a function that reduces a contiguous buffer. :param N: length of the buffer :param buf: buffer pointer :param pos: index of executing thread :param count: number of executing threads :param manner_fn: a function that accepts strings of arguments a and b, and returns c code for their reduction. (Example: return "%(a)s + %(b)s" for a sum reduction). :postcondition: This function leaves the answer in position 0 of the buffer. The rest of the buffer is trashed by this function. :note: buf should be in gpu shared memory, we access it many times. """ loop_line = manner_fn('%s[%s]' % (buf, pos), '%s[i]' % buf) r_16 = manner_fn('%s[%s]' % (buf, pos), '%s[%s+16]' % (buf, pos)) r_8 = manner_fn('%s[%s]' % (buf, pos), '%s[%s+8]' % (buf, pos)) r_4 = manner_fn('%s[%s]' % (buf, pos), '%s[%s+4]' % (buf, pos)) r_2 = manner_fn('%s[%s]' % (buf, pos), '%s[%s+2]' % (buf, pos)) r_1 = manner_fn('%s[%s]' % (buf, pos), '%s[%s+1]' % (buf, pos)) return '\n {\n // This function trashes buf[1..N], leaving the reduction result in buf[0].\n\n if (%(pos)s < warpSize)\n {\n for (int i = %(pos)s + warpSize; i < %(N)s; i += warpSize)\n {\n %(buf)s[%(pos)s] = %(loop_line)s;\n }\n if (%(pos)s < 16)\n {\n //reduce so that %(pos)s 0 has the sum of everything\n if(%(pos)s + 16 < %(N)s)\n %(buf)s[%(pos)s] = %(r_16)s;\n if(%(pos)s + 8 < %(N)s)\n %(buf)s[%(pos)s] = %(r_8)s;\n if(%(pos)s + 4 < %(N)s)\n %(buf)s[%(pos)s] = %(r_4)s;\n if(%(pos)s + 2 < %(N)s)\n %(buf)s[%(pos)s] = %(r_2)s;\n if(%(pos)s + 1 < %(N)s)\n %(buf)s[%(pos)s] = %(r_1)s;\n }\n }\n }\n ' % locals() @code_version(inline_reduce.code_version) def inline_reduce_max(N, buf, pos, count): return inline_reduce(N, buf, pos, count, lambda a, b: 'max(%s, %s)' % (a, b)) @code_version(inline_reduce.code_version) def inline_reduce_sum(N, buf, pos, count): return inline_reduce(N, buf, pos, count, lambda a, b: '%s + %s' % (a, b)) @code_version(inline_reduce.code_version) def inline_reduce_min(N, buf, pos, count): return inline_reduce(N, buf, pos, count, lambda a, b: 'min(%s, %s)' % (a, b)) @code_version(inline_reduce.code_version) def inline_reduce_prod(N, buf, pos, count): return inline_reduce(N, buf, pos, count, lambda a, b: '%s * %s' % (a, b)) @code_version((2,) + inline_reduce_max.code_version + inline_reduce_sum.code_version) def inline_softmax(N, buf, buf2, threadPos, threadCount): """ :param N: length of the buffer :param threadPos: index of executing thread :param threadCount: number of executing threads :Precondition: buf and buf2 contain two identical copies of the input to softmax :Postcondition: buf contains the softmax, buf2 contains un-normalized softmax :note: buf and buf2 should be in gpu shared memory, we access it many times. :note2: We use __i as an int variable in a loop """ return [inline_reduce_max(N, buf, threadPos, threadCount), '__syncthreads()', 'float row_max = ' + buf + '[0]', '__syncthreads()', 'for(int __i=' + threadPos + '; __i<' + N + '; __i+=' + threadCount + '){', buf + '[__i] = exp(' + buf2 + '[__i] - row_max)', buf2 + '[__i] = ' + buf + '[__i]', '}', '__syncthreads()', inline_reduce_sum(N, buf, threadPos, threadCount), '__syncthreads()', 'float row_sum = ' + buf + '[0]', '__syncthreads()', 'for(int __i=' + threadPos + '; __i<' + N + '; __i+=' + threadCount + '){', buf + '[__i] = ' + buf2 + '[__i] / row_sum', '}', '__syncthreads()']
def smooth(dataset): dataset_length = len(dataset) dataset_extra_weights = [ItemWeight(*x) for x in dataset] def get_next(): if dataset_length == 0: return None if dataset_length == 1: return dataset[0][0] total_weight = 0 result = None for extra in dataset_extra_weights: extra.current_weight += extra.effective_weight total_weight += extra.effective_weight if extra.effective_weight < extra.weight: extra.effective_weight += 1 if not result or result.current_weight < extra.current_weight: result = extra if not result: # this should be unreachable, but check anyway raise RuntimeError result.current_weight -= total_weight return result.key return get_next class ItemWeight: __slots__ = ('key', 'weight', 'current_weight', 'effective_weight') def __init__(self, key, weight): self.key = key self.weight = weight self.current_weight = 0 self.effective_weight = weight
def smooth(dataset): dataset_length = len(dataset) dataset_extra_weights = [item_weight(*x) for x in dataset] def get_next(): if dataset_length == 0: return None if dataset_length == 1: return dataset[0][0] total_weight = 0 result = None for extra in dataset_extra_weights: extra.current_weight += extra.effective_weight total_weight += extra.effective_weight if extra.effective_weight < extra.weight: extra.effective_weight += 1 if not result or result.current_weight < extra.current_weight: result = extra if not result: raise RuntimeError result.current_weight -= total_weight return result.key return get_next class Itemweight: __slots__ = ('key', 'weight', 'current_weight', 'effective_weight') def __init__(self, key, weight): self.key = key self.weight = weight self.current_weight = 0 self.effective_weight = weight
# O(nlogn) time | O(n) space def mergeSort(array): if len(array) <= 1: return array subarray = array[:] mergeSortHelper(array, 0, len(array)-1) return array def mergeSortHelper(array, l, r): if l == r: return m = (l + r) // 2 mergeSortHelper(array, l, m) mergeSortHelper(array, m + 1, r) merge(array, l, m, r) def merge(arr, l, m, r): subarray = arr[:] i = l j = m + 1 k = l while i <= m and j <= r: if subarray[i] <= subarray[j]: arr[k] = subarray[i] i += 1 else: arr[k] = subarray[j] j += 1 k += 1 while i <= m: arr[k] = subarray[i] i += 1 k += 1 while j <= r: arr[k] = subarray[j] j += 1 k += 1
def merge_sort(array): if len(array) <= 1: return array subarray = array[:] merge_sort_helper(array, 0, len(array) - 1) return array def merge_sort_helper(array, l, r): if l == r: return m = (l + r) // 2 merge_sort_helper(array, l, m) merge_sort_helper(array, m + 1, r) merge(array, l, m, r) def merge(arr, l, m, r): subarray = arr[:] i = l j = m + 1 k = l while i <= m and j <= r: if subarray[i] <= subarray[j]: arr[k] = subarray[i] i += 1 else: arr[k] = subarray[j] j += 1 k += 1 while i <= m: arr[k] = subarray[i] i += 1 k += 1 while j <= r: arr[k] = subarray[j] j += 1 k += 1
# This is the custom function interface. # You should not implement it, or speculate about its implementation class CustomFunction: # Returns f(x, y) for any given positive integers x and y. # Note that f(x, y) is increasing with respect to both x and y. # i.e. f(x, y) < f(x + 1, y), f(x, y) < f(x, y + 1) def f(self, x, y): return x + y class Solution: def findSolution(self, customfunction, z: int): ret = [] i = 1 while customfunction.f(i, 1) <= z: j = 1 while True: if customfunction.f(i, j) == z: ret.append([i, j]) break elif customfunction.f(i, j) > z: break else: j += 1 i += 1 return ret cf = CustomFunction() slu = Solution() print(slu.findSolution(CustomFunction(), 5))
class Customfunction: def f(self, x, y): return x + y class Solution: def find_solution(self, customfunction, z: int): ret = [] i = 1 while customfunction.f(i, 1) <= z: j = 1 while True: if customfunction.f(i, j) == z: ret.append([i, j]) break elif customfunction.f(i, j) > z: break else: j += 1 i += 1 return ret cf = custom_function() slu = solution() print(slu.findSolution(custom_function(), 5))
#!/usr/bin/env python class SulException(Exception): """ """ class ConfigurationException(SulException): pass class ServerException(SulException): pass class DirectoryNotFoundException(SulException): pass class IntegrityException(SulException): pass
class Sulexception(Exception): """ """ class Configurationexception(SulException): pass class Serverexception(SulException): pass class Directorynotfoundexception(SulException): pass class Integrityexception(SulException): pass