content
stringlengths 7
1.05M
| fixed_cases
stringlengths 1
1.28M
|
|---|---|
def Values_Sum_Greater(Test_Dict):
return sum(list(Test_Dict.keys())) < sum(list(Test_Dict.values()))
Test_Dict = {5: 3, 1: 3, 10: 4, 7: 3, 8: 1, 9: 5}
print(Values_Sum_Greater(Test_Dict))
|
def values__sum__greater(Test_Dict):
return sum(list(Test_Dict.keys())) < sum(list(Test_Dict.values()))
test__dict = {5: 3, 1: 3, 10: 4, 7: 3, 8: 1, 9: 5}
print(values__sum__greater(Test_Dict))
|
class Concept:
ID = None
descriptions = None
definition = None
def __init__(self, ID=None, descriptions=None, definition = None):
self.ID = ID
self.descriptions = [] if descriptions is None else descriptions
self.definition = None
class Description:
ID = None
concept_ID = None
term = None
def __init__(self, ID=None, concept_ID=None, term=None):
self.ID = ID
self.concept_ID = concept_ID
self.term = term
class Definition:
ID = None
concept_ID = None
text = None
def __init__(self, ID=None, concept_ID=None, text=None):
self.ID = ID
self.concept_ID = concept_ID
self.text = text
|
class Concept:
id = None
descriptions = None
definition = None
def __init__(self, ID=None, descriptions=None, definition=None):
self.ID = ID
self.descriptions = [] if descriptions is None else descriptions
self.definition = None
class Description:
id = None
concept_id = None
term = None
def __init__(self, ID=None, concept_ID=None, term=None):
self.ID = ID
self.concept_ID = concept_ID
self.term = term
class Definition:
id = None
concept_id = None
text = None
def __init__(self, ID=None, concept_ID=None, text=None):
self.ID = ID
self.concept_ID = concept_ID
self.text = text
|
expected_output = {
'pvst': {
'a': {
'pvst_id': 'a',
'vlans': {
2: {
'vlan_id': 2,
'designated_root_priority': 32768,
'designated_root_address': '0021.1bff.d973',
'designated_root_max_age': 20,
'designated_root_forward_delay': 15,
'bridge_priority': 32768,
'sys_id_ext': 0,
'bridge_address': '8cb6.4fff.6588',
'bridge_max_age': 20,
'bridge_forward_delay': 15,
'bridge_transmit_hold_count': 6,
'interface': {
'GigabitEthernet0/7/0/0': {
'name': 'GigabitEthernet0/7/0/0',
'cost': 20000,
'role': 'DSGN',
'port_priority': 128,
'port_num': 1,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '8cb6.4fff.6588',
'designated_port_priority': 128,
'designated_port_num': 1,
},
'GigabitEthernet0/7/0/1': {
'name': 'GigabitEthernet0/7/0/1',
'cost': 20000,
'role': 'DSGN',
'port_priority': 128,
'port_num': 2,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '8cb6.4fff.6588',
'designated_port_priority': 128,
'designated_port_num': 2,
},
'GigabitEthernet0/7/0/10': {
'name': 'GigabitEthernet0/7/0/10',
'cost': 20000,
'role': 'ROOT',
'port_priority': 128,
'port_num': 3,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '0021.1bff.d973',
'designated_port_priority': 128,
'designated_port_num': 3,
},
'GigabitEthernet0/7/0/11': {
'name': 'GigabitEthernet0/7/0/11',
'cost': 20000,
'role': 'ALT',
'port_priority': 128,
'port_num': 4,
'port_state': 'BLK',
'designated_bridge_priority': 32768,
'designated_bridge_address': '0021.1bff.d973',
'designated_port_priority': 128,
'designated_port_num': 4,
},
},
},
3: {
'vlan_id': 3,
'designated_root_priority': 32768,
'designated_root_address': '0021.1bff.d973',
'designated_root_max_age': 20,
'designated_root_forward_delay': 15,
'bridge_priority': 32768,
'sys_id_ext': 0,
'bridge_address': '8cb6.4fff.6588',
'bridge_max_age': 20,
'bridge_forward_delay': 15,
'bridge_transmit_hold_count': 6,
'interface': {
'GigabitEthernet0/7/0/0': {
'name': 'GigabitEthernet0/7/0/0',
'cost': 20000,
'role': 'DSGN',
'port_priority': 128,
'port_num': 1,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '8cb6.4fff.6588',
'designated_port_priority': 128,
'designated_port_num': 1,
},
'GigabitEthernet0/7/0/1': {
'name': 'GigabitEthernet0/7/0/1',
'cost': 20000,
'role': 'DSGN',
'port_priority': 128,
'port_num': 2,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '8cb6.4fff.6588',
'designated_port_priority': 128,
'designated_port_num': 2,
},
'GigabitEthernet0/7/0/10': {
'name': 'GigabitEthernet0/7/0/10',
'cost': 20000,
'role': 'ROOT',
'port_priority': 128,
'port_num': 3,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '0021.1bff.d973',
'designated_port_priority': 128,
'designated_port_num': 3,
},
'GigabitEthernet0/7/0/11': {
'name': 'GigabitEthernet0/7/0/11',
'cost': 20000,
'role': 'ALT',
'port_priority': 128,
'port_num': 4,
'port_state': 'BLK',
'designated_bridge_priority': 32768,
'designated_bridge_address': '0021.1bff.d973',
'designated_port_priority': 128,
'designated_port_num': 4,
},
},
},
4: {
'vlan_id': 4,
'designated_root_priority': 32768,
'designated_root_address': '0021.1bff.d973',
'designated_root_max_age': 20,
'designated_root_forward_delay': 15,
'bridge_priority': 32768,
'sys_id_ext': 0,
'bridge_address': '8cb6.4fff.6588',
'bridge_max_age': 20,
'bridge_forward_delay': 15,
'bridge_transmit_hold_count': 6,
'interface': {
'GigabitEthernet0/7/0/0': {
'name': 'GigabitEthernet0/7/0/0',
'cost': 20000,
'role': 'DSGN',
'port_priority': 128,
'port_num': 1,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '8cb6.4fff.6588',
'designated_port_priority': 128,
'designated_port_num': 1,
},
'GigabitEthernet0/7/0/1': {
'name': 'GigabitEthernet0/7/0/1',
'cost': 20000,
'role': 'DSGN',
'port_priority': 128,
'port_num': 2,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '8cb6.4fff.6588',
'designated_port_priority': 128,
'designated_port_num': 2,
},
'GigabitEthernet0/7/0/10': {
'name': 'GigabitEthernet0/7/0/10',
'cost': 20000,
'role': 'ROOT',
'port_priority': 128,
'port_num': 3,
'port_state': 'FWD',
'designated_bridge_priority': 32768,
'designated_bridge_address': '0021.1bff.d973',
'designated_port_priority': 128,
'designated_port_num': 3,
},
'GigabitEthernet0/7/0/11': {
'name': 'GigabitEthernet0/7/0/11',
'cost': 20000,
'role': 'ALT',
'port_priority': 128,
'port_num': 4,
'port_state': 'BLK',
'designated_bridge_priority': 32768,
'designated_bridge_address': '0021.1bff.d973',
'designated_port_priority': 128,
'designated_port_num': 4,
},
},
},
},
},
},
}
|
expected_output = {'pvst': {'a': {'pvst_id': 'a', 'vlans': {2: {'vlan_id': 2, 'designated_root_priority': 32768, 'designated_root_address': '0021.1bff.d973', 'designated_root_max_age': 20, 'designated_root_forward_delay': 15, 'bridge_priority': 32768, 'sys_id_ext': 0, 'bridge_address': '8cb6.4fff.6588', 'bridge_max_age': 20, 'bridge_forward_delay': 15, 'bridge_transmit_hold_count': 6, 'interface': {'GigabitEthernet0/7/0/0': {'name': 'GigabitEthernet0/7/0/0', 'cost': 20000, 'role': 'DSGN', 'port_priority': 128, 'port_num': 1, 'port_state': 'FWD', 'designated_bridge_priority': 32768, 'designated_bridge_address': '8cb6.4fff.6588', 'designated_port_priority': 128, 'designated_port_num': 1}, 'GigabitEthernet0/7/0/1': {'name': 'GigabitEthernet0/7/0/1', 'cost': 20000, 'role': 'DSGN', 'port_priority': 128, 'port_num': 2, 'port_state': 'FWD', 'designated_bridge_priority': 32768, 'designated_bridge_address': '8cb6.4fff.6588', 'designated_port_priority': 128, 'designated_port_num': 2}, 'GigabitEthernet0/7/0/10': {'name': 'GigabitEthernet0/7/0/10', 'cost': 20000, 'role': 'ROOT', 'port_priority': 128, 'port_num': 3, 'port_state': 'FWD', 'designated_bridge_priority': 32768, 'designated_bridge_address': '0021.1bff.d973', 'designated_port_priority': 128, 'designated_port_num': 3}, 'GigabitEthernet0/7/0/11': {'name': 'GigabitEthernet0/7/0/11', 'cost': 20000, 'role': 'ALT', 'port_priority': 128, 'port_num': 4, 'port_state': 'BLK', 'designated_bridge_priority': 32768, 'designated_bridge_address': '0021.1bff.d973', 'designated_port_priority': 128, 'designated_port_num': 4}}}, 3: {'vlan_id': 3, 'designated_root_priority': 32768, 'designated_root_address': '0021.1bff.d973', 'designated_root_max_age': 20, 'designated_root_forward_delay': 15, 'bridge_priority': 32768, 'sys_id_ext': 0, 'bridge_address': '8cb6.4fff.6588', 'bridge_max_age': 20, 'bridge_forward_delay': 15, 'bridge_transmit_hold_count': 6, 'interface': {'GigabitEthernet0/7/0/0': {'name': 'GigabitEthernet0/7/0/0', 'cost': 20000, 'role': 'DSGN', 'port_priority': 128, 'port_num': 1, 'port_state': 'FWD', 'designated_bridge_priority': 32768, 'designated_bridge_address': '8cb6.4fff.6588', 'designated_port_priority': 128, 'designated_port_num': 1}, 'GigabitEthernet0/7/0/1': {'name': 'GigabitEthernet0/7/0/1', 'cost': 20000, 'role': 'DSGN', 'port_priority': 128, 'port_num': 2, 'port_state': 'FWD', 'designated_bridge_priority': 32768, 'designated_bridge_address': '8cb6.4fff.6588', 'designated_port_priority': 128, 'designated_port_num': 2}, 'GigabitEthernet0/7/0/10': {'name': 'GigabitEthernet0/7/0/10', 'cost': 20000, 'role': 'ROOT', 'port_priority': 128, 'port_num': 3, 'port_state': 'FWD', 'designated_bridge_priority': 32768, 'designated_bridge_address': '0021.1bff.d973', 'designated_port_priority': 128, 'designated_port_num': 3}, 'GigabitEthernet0/7/0/11': {'name': 'GigabitEthernet0/7/0/11', 'cost': 20000, 'role': 'ALT', 'port_priority': 128, 'port_num': 4, 'port_state': 'BLK', 'designated_bridge_priority': 32768, 'designated_bridge_address': '0021.1bff.d973', 'designated_port_priority': 128, 'designated_port_num': 4}}}, 4: {'vlan_id': 4, 'designated_root_priority': 32768, 'designated_root_address': '0021.1bff.d973', 'designated_root_max_age': 20, 'designated_root_forward_delay': 15, 'bridge_priority': 32768, 'sys_id_ext': 0, 'bridge_address': '8cb6.4fff.6588', 'bridge_max_age': 20, 'bridge_forward_delay': 15, 'bridge_transmit_hold_count': 6, 'interface': {'GigabitEthernet0/7/0/0': {'name': 'GigabitEthernet0/7/0/0', 'cost': 20000, 'role': 'DSGN', 'port_priority': 128, 'port_num': 1, 'port_state': 'FWD', 'designated_bridge_priority': 32768, 'designated_bridge_address': '8cb6.4fff.6588', 'designated_port_priority': 128, 'designated_port_num': 1}, 'GigabitEthernet0/7/0/1': {'name': 'GigabitEthernet0/7/0/1', 'cost': 20000, 'role': 'DSGN', 'port_priority': 128, 'port_num': 2, 'port_state': 'FWD', 'designated_bridge_priority': 32768, 'designated_bridge_address': '8cb6.4fff.6588', 'designated_port_priority': 128, 'designated_port_num': 2}, 'GigabitEthernet0/7/0/10': {'name': 'GigabitEthernet0/7/0/10', 'cost': 20000, 'role': 'ROOT', 'port_priority': 128, 'port_num': 3, 'port_state': 'FWD', 'designated_bridge_priority': 32768, 'designated_bridge_address': '0021.1bff.d973', 'designated_port_priority': 128, 'designated_port_num': 3}, 'GigabitEthernet0/7/0/11': {'name': 'GigabitEthernet0/7/0/11', 'cost': 20000, 'role': 'ALT', 'port_priority': 128, 'port_num': 4, 'port_state': 'BLK', 'designated_bridge_priority': 32768, 'designated_bridge_address': '0021.1bff.d973', 'designated_port_priority': 128, 'designated_port_num': 4}}}}}}}
|
t1 = (1, 2, 3, 'a')
t2 = 4, 5, 6, 'b'
t3 = 1,
# print(t1[3])
# for v in t1:
# print(v)
# print(t1 + t2)
n1, n2, *n = t1
print(n1)
# Processo para mudar valor de uma tupla
t1 = list(t1)
t1[1] = 3000
t1 = tuple(t1)
print(t1)
|
t1 = (1, 2, 3, 'a')
t2 = (4, 5, 6, 'b')
t3 = (1,)
(n1, n2, *n) = t1
print(n1)
t1 = list(t1)
t1[1] = 3000
t1 = tuple(t1)
print(t1)
|
# Medium
# for loop with twoSum
class Solution:
def threeSum(self, nums: List[int]) -> List[List[int]]:
nums.sort()
result = []
for i in range(len(nums)):
if i>0 and nums[i] == nums[i-1]:
continue
tmp = self.twoSum(nums,i+1,0-nums[i])
for t in tmp:
merge = [nums[i]] + [*t]
result.append([*merge])
return result
def twoSum(self,nums,index,tar):
check = set([])
appear = set([])
result = []
for i in range(index,len(nums)):
if tar - nums[i] in check:
if (tar-nums[i],nums[i]) not in appear:
result.append( [tar-nums[i],nums[i]] )
appear.add((tar-nums[i],nums[i]))
check.add(nums[i])
#print('2sum',result)
return result
|
class Solution:
def three_sum(self, nums: List[int]) -> List[List[int]]:
nums.sort()
result = []
for i in range(len(nums)):
if i > 0 and nums[i] == nums[i - 1]:
continue
tmp = self.twoSum(nums, i + 1, 0 - nums[i])
for t in tmp:
merge = [nums[i]] + [*t]
result.append([*merge])
return result
def two_sum(self, nums, index, tar):
check = set([])
appear = set([])
result = []
for i in range(index, len(nums)):
if tar - nums[i] in check:
if (tar - nums[i], nums[i]) not in appear:
result.append([tar - nums[i], nums[i]])
appear.add((tar - nums[i], nums[i]))
check.add(nums[i])
return result
|
def minion_game(string):
string = string.lower()
scoreStuart = 0
scoreKevin = 0
vowels = 'aeiou'
for j, i in enumerate(string):
if i not in vowels:
scoreStuart += len(string) - j
if i in vowels:
scoreKevin += len(string) - j
if scoreStuart > scoreKevin:
print('Stuart', scoreStuart)
elif scoreKevin > scoreStuart:
print('Kevin', scoreKevin)
else:
print('Draw')
if __name__ == '__main__':
s = input()
minion_game(s)
|
def minion_game(string):
string = string.lower()
score_stuart = 0
score_kevin = 0
vowels = 'aeiou'
for (j, i) in enumerate(string):
if i not in vowels:
score_stuart += len(string) - j
if i in vowels:
score_kevin += len(string) - j
if scoreStuart > scoreKevin:
print('Stuart', scoreStuart)
elif scoreKevin > scoreStuart:
print('Kevin', scoreKevin)
else:
print('Draw')
if __name__ == '__main__':
s = input()
minion_game(s)
|
class CoachAthleteTable:
def __init__(self):
pass
TABLE_NAME = "Coach_Athlete"
ATHLETE_ID = "Athlete_Id"
COACH_ID = "Coach_Id"
CAN_ACCESS_TRAINING_LOG = "Can_Access_Training_Log"
CAN_ACCESS_TARGETS = "Can_Access_Targets"
IS_ACTIVE = "Is_Active"
START_DATE = "Start_Date"
INVITE_ID = "Invite_Id"
|
class Coachathletetable:
def __init__(self):
pass
table_name = 'Coach_Athlete'
athlete_id = 'Athlete_Id'
coach_id = 'Coach_Id'
can_access_training_log = 'Can_Access_Training_Log'
can_access_targets = 'Can_Access_Targets'
is_active = 'Is_Active'
start_date = 'Start_Date'
invite_id = 'Invite_Id'
|
T=int(input())
def is_anagram(str1, str2):
list_str1 = list(str1)
list_str1.sort()
list_str2 = list(str2)
#list_str2.sort()
return (list_str1 == list_str2)
for i in range(T):
opt=0
L=int(input())
A=str(input())
B=str(input())
for j in range(len(A)):
for k in range(len(A)):
for l in range(k):
if is_anagram(A[j:k],B[(abs(l))%3:(abs(l-k+1))%3]):
opt+=1
print("Case #"+str(i+1)+": "+str(opt))
|
t = int(input())
def is_anagram(str1, str2):
list_str1 = list(str1)
list_str1.sort()
list_str2 = list(str2)
return list_str1 == list_str2
for i in range(T):
opt = 0
l = int(input())
a = str(input())
b = str(input())
for j in range(len(A)):
for k in range(len(A)):
for l in range(k):
if is_anagram(A[j:k], B[abs(l) % 3:abs(l - k + 1) % 3]):
opt += 1
print('Case #' + str(i + 1) + ': ' + str(opt))
|
__title__ = 'tmuxp'
__package_name__ = 'tmuxp'
__version__ = '0.1.12'
__description__ = 'Manage tmux sessions thru JSON, YAML configs. Features Python API'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013 Tony Narlock'
|
__title__ = 'tmuxp'
__package_name__ = 'tmuxp'
__version__ = '0.1.12'
__description__ = 'Manage tmux sessions thru JSON, YAML configs. Features Python API'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013 Tony Narlock'
|
list=linkedlist()
list.head=node("Monday")
list1=node("Tuesday")
list2=node("Thursday")
list.head.next=list1
list1.next=list2
print("Before insertion:")
list.printing()
print('\n')
list.push_after(list1,"Wednesday")
print("After insertion:")
list.printing()
print('\n')
list.deletion(3)
print("After deleting 4th node")
list.printing()
|
list = linkedlist()
list.head = node('Monday')
list1 = node('Tuesday')
list2 = node('Thursday')
list.head.next = list1
list1.next = list2
print('Before insertion:')
list.printing()
print('\n')
list.push_after(list1, 'Wednesday')
print('After insertion:')
list.printing()
print('\n')
list.deletion(3)
print('After deleting 4th node')
list.printing()
|
# -*- coding: utf-8 -*-
DB_LOCATION = 'timeclock.db'
DEBUG = True
|
db_location = 'timeclock.db'
debug = True
|
#
# Copyright (c) 2020-present, Weibo, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author ZhongXiu Hao <nmred.hao@gmail.com>
"""Build rule for generating C or C++ sources with fbthrift thrift1.
"""
def _fbthrift_cc_library_impl(ctx):
output_path_attr = ctx.attr.name
if ctx.attr.output_path:
output_path_attr = ctx.attr.output_path
outputs = [
ctx.actions.declare_file("%s/gen-cpp2/%s_%s.h" % (output_path_attr, ctx.attr.name, "constants")),
ctx.actions.declare_file("%s/gen-cpp2/%s_%s.cpp" % (output_path_attr, ctx.attr.name, "constants")),
ctx.actions.declare_file("%s/gen-cpp2/%s_%s.h" % (output_path_attr, ctx.attr.name, "data")),
ctx.actions.declare_file("%s/gen-cpp2/%s_%s.cpp" % (output_path_attr, ctx.attr.name, "data")),
ctx.actions.declare_file("%s/gen-cpp2/%s_%s.h" % (output_path_attr, ctx.attr.name, "types")),
ctx.actions.declare_file("%s/gen-cpp2/%s_%s.cpp" % (output_path_attr, ctx.attr.name, "types")),
ctx.actions.declare_file("%s/gen-cpp2/%s_%s.tcc" % (output_path_attr, ctx.attr.name, "types")),
ctx.actions.declare_file("%s/gen-cpp2/%s_%s.h" % (output_path_attr, ctx.attr.name, "metadata")),
ctx.actions.declare_file("%s/gen-cpp2/%s_%s.h" % (output_path_attr, ctx.attr.name, "types_custom_protocol")),
]
service_outs = []
service_outs += [ctx.actions.declare_file("%s/gen-cpp2/%s.h" % (output_path_attr, f)) for f in ctx.attr.service_list]
service_outs += [ctx.actions.declare_file("%s/gen-cpp2/%s.cpp" % (output_path_attr, f)) for f in ctx.attr.service_list]
service_outs += [ctx.actions.declare_file("%s/gen-cpp2/%sAsyncClient.h" % (output_path_attr, f)) for f in ctx.attr.service_list]
service_outs += [ctx.actions.declare_file("%s/gen-cpp2/%sAsyncClient.cpp" % (output_path_attr, f)) for f in ctx.attr.service_list]
service_outs += [ctx.actions.declare_file("%s/gen-cpp2/%s_custom_protocol.h" % (output_path_attr, f)) for f in ctx.attr.service_list]
service_outs += [ctx.actions.declare_file("%s/gen-cpp2/%s_processmap_binary.cpp" % (output_path_attr, f)) for f in ctx.attr.service_list]
service_outs += [ctx.actions.declare_file("%s/gen-cpp2/%s_processmap_compact.cpp" % (output_path_attr, f)) for f in ctx.attr.service_list]
service_outs += [ctx.actions.declare_file("%s/gen-cpp2/%s.tcc" % (output_path_attr, f)) for f in ctx.attr.service_list]
output_path = outputs[0].dirname.replace("gen-cpp2", "")
# Argument list
args = ctx.actions.args()
args.add("--gen", "mstch_cpp2:%s" % ctx.attr.options)
args.add("-o", output_path)
args.add("-I", "`dirname " + ctx.file.src.dirname + "`")
args.add("-I", "`dirname " + ctx.build_file_path + "`")
args.add("-I", ".")
args.add("-v")
args.add(ctx.file.src.path)
output_list = outputs + service_outs
inputs = ctx.files.thrift_include_files + ctx.files.src
ctx.actions.run(
executable = ctx.executable._thrift1,
arguments = [args],
inputs = inputs,
outputs = output_list,
mnemonic = "fbthrift",
progress_message = "Generating %s from %s" %
(
output_path,
ctx.file.src.short_path,
),
)
return [DefaultInfo(files = depset(direct = outputs + service_outs))]
gen_fbthrift = rule(
implementation = _fbthrift_cc_library_impl,
doc = "Generate C/C++-language sources from a Yacc file using fbthrift.",
attrs = {
"src": attr.label(
mandatory = True,
allow_single_file = [".thrift"],
doc = "The .thrift source file for this rule",
),
"thrift_include_files": attr.label_list(
allow_files = [".thrift"],
doc = "The .thrift source file for this rule",
),
"options": attr.string(
doc = "A list of options to pass to thrift1. These are " +
"subject to $(location ...) expansion.",
),
"output_path": attr.string(
doc = "A list of options to pass to thrift1. These are " +
"subject to $(location ...) expansion.",
),
"service_list": attr.string_list(
doc = "A list of options to pass to thrift1. These are " +
"subject to $(location ...) expansion.",
),
"_thrift1": attr.label(
default = Label("@fbthrift//:thrift1"),
executable = True,
cfg = "host",
),
},
output_to_genfiles = True,
)
|
"""Build rule for generating C or C++ sources with fbthrift thrift1.
"""
def _fbthrift_cc_library_impl(ctx):
output_path_attr = ctx.attr.name
if ctx.attr.output_path:
output_path_attr = ctx.attr.output_path
outputs = [ctx.actions.declare_file('%s/gen-cpp2/%s_%s.h' % (output_path_attr, ctx.attr.name, 'constants')), ctx.actions.declare_file('%s/gen-cpp2/%s_%s.cpp' % (output_path_attr, ctx.attr.name, 'constants')), ctx.actions.declare_file('%s/gen-cpp2/%s_%s.h' % (output_path_attr, ctx.attr.name, 'data')), ctx.actions.declare_file('%s/gen-cpp2/%s_%s.cpp' % (output_path_attr, ctx.attr.name, 'data')), ctx.actions.declare_file('%s/gen-cpp2/%s_%s.h' % (output_path_attr, ctx.attr.name, 'types')), ctx.actions.declare_file('%s/gen-cpp2/%s_%s.cpp' % (output_path_attr, ctx.attr.name, 'types')), ctx.actions.declare_file('%s/gen-cpp2/%s_%s.tcc' % (output_path_attr, ctx.attr.name, 'types')), ctx.actions.declare_file('%s/gen-cpp2/%s_%s.h' % (output_path_attr, ctx.attr.name, 'metadata')), ctx.actions.declare_file('%s/gen-cpp2/%s_%s.h' % (output_path_attr, ctx.attr.name, 'types_custom_protocol'))]
service_outs = []
service_outs += [ctx.actions.declare_file('%s/gen-cpp2/%s.h' % (output_path_attr, f)) for f in ctx.attr.service_list]
service_outs += [ctx.actions.declare_file('%s/gen-cpp2/%s.cpp' % (output_path_attr, f)) for f in ctx.attr.service_list]
service_outs += [ctx.actions.declare_file('%s/gen-cpp2/%sAsyncClient.h' % (output_path_attr, f)) for f in ctx.attr.service_list]
service_outs += [ctx.actions.declare_file('%s/gen-cpp2/%sAsyncClient.cpp' % (output_path_attr, f)) for f in ctx.attr.service_list]
service_outs += [ctx.actions.declare_file('%s/gen-cpp2/%s_custom_protocol.h' % (output_path_attr, f)) for f in ctx.attr.service_list]
service_outs += [ctx.actions.declare_file('%s/gen-cpp2/%s_processmap_binary.cpp' % (output_path_attr, f)) for f in ctx.attr.service_list]
service_outs += [ctx.actions.declare_file('%s/gen-cpp2/%s_processmap_compact.cpp' % (output_path_attr, f)) for f in ctx.attr.service_list]
service_outs += [ctx.actions.declare_file('%s/gen-cpp2/%s.tcc' % (output_path_attr, f)) for f in ctx.attr.service_list]
output_path = outputs[0].dirname.replace('gen-cpp2', '')
args = ctx.actions.args()
args.add('--gen', 'mstch_cpp2:%s' % ctx.attr.options)
args.add('-o', output_path)
args.add('-I', '`dirname ' + ctx.file.src.dirname + '`')
args.add('-I', '`dirname ' + ctx.build_file_path + '`')
args.add('-I', '.')
args.add('-v')
args.add(ctx.file.src.path)
output_list = outputs + service_outs
inputs = ctx.files.thrift_include_files + ctx.files.src
ctx.actions.run(executable=ctx.executable._thrift1, arguments=[args], inputs=inputs, outputs=output_list, mnemonic='fbthrift', progress_message='Generating %s from %s' % (output_path, ctx.file.src.short_path))
return [default_info(files=depset(direct=outputs + service_outs))]
gen_fbthrift = rule(implementation=_fbthrift_cc_library_impl, doc='Generate C/C++-language sources from a Yacc file using fbthrift.', attrs={'src': attr.label(mandatory=True, allow_single_file=['.thrift'], doc='The .thrift source file for this rule'), 'thrift_include_files': attr.label_list(allow_files=['.thrift'], doc='The .thrift source file for this rule'), 'options': attr.string(doc='A list of options to pass to thrift1. These are ' + 'subject to $(location ...) expansion.'), 'output_path': attr.string(doc='A list of options to pass to thrift1. These are ' + 'subject to $(location ...) expansion.'), 'service_list': attr.string_list(doc='A list of options to pass to thrift1. These are ' + 'subject to $(location ...) expansion.'), '_thrift1': attr.label(default=label('@fbthrift//:thrift1'), executable=True, cfg='host')}, output_to_genfiles=True)
|
"""
--- Day 23: Coprocessor Conflagration ---
You decide to head directly to the CPU and fix the printer from there. As you get close, you find an experimental coprocessor doing so much work that the local programs are afraid it will halt and catch fire. This would cause serious issues for the rest of the computer, so you head in and see what you can do.
The code it's running seems to be a variant of the kind you saw recently on that tablet. The general functionality seems very similar, but some of the instructions are different:
set X Y sets register X to the value of Y.
sub X Y decreases register X by the value of Y.
mul X Y sets register X to the result of multiplying the value contained in register X by the value of Y.
jnz X Y jumps with an offset of the value of Y, but only if the value of X is not zero. (An offset of 2 skips the next instruction, an offset of -1 jumps to the previous instruction, and so on.)
Only the instructions listed above are used. The eight registers here, named a through h, all start at 0.
The coprocessor is currently set to some kind of debug mode, which allows for testing, but prevents it from doing any meaningful work.
If you run the program (your puzzle input), how many times is the mul instruction invoked?
--- Part Two ---
The Turing machine, and soon the entire computer, springs back to life. A console glows dimly nearby, awaiting your command.
> reboot printer
Error: That command requires priority 50. You currently have priority 0.
You must deposit 50 stars to increase your priority to the required level.
The console flickers for a moment, and then prints another message:
Star accepted.
You must deposit 49 stars to increase your priority to the required level.
The garbage collector winks at you, then continues sweeping.
--- Part Two ---
Now, it's time to fix the problem.
The debug mode switch is wired directly to register a. You flip the switch, which makes register a now start at 1 when the program is executed.
Immediately, the coprocessor begins to overheat. Whoever wrote this program obviously didn't choose a very efficient implementation. You'll need to optimize the program if it has any hope of completing before Santa needs that printer working.
The coprocessor's ultimate goal is to determine the final value left in register h once the program completes. Technically, if it had that... it wouldn't even need to run the program.
After setting register a to 1, if the program were to run to completion, what value would be left in register h?
"""
def read():
with open('inputs/day23.txt') as fd:
return fd.readlines()
def parse(lines):
l = []
for line in lines:
inst, *params = line.strip().split()
l.append((inst, params))
return l
class Registers(dict):
def __missing__(self, key):
try:
v = int(key)
except ValueError:
v = 0
self[key] = 0
return v
class Coprocessor:
def __init__(self):
self.reg = Registers()
self.INSTRUCTIONS = {'set': self.set, 'sub': self.sub,
'mul': self.mul, 'jnz': self.jump}
self.last_played = None
self.mul_counter = 0
self.instructions = None
self.pos = 0
def set(self, reg, val):
self.reg[reg] = self.reg[val]
def sub(self, reg, val):
self.reg[reg] -= self.reg[val]
def mul(self, reg1, reg2):
self.mul_counter += 1
self.reg[reg1] *= self.reg[reg2]
def jump(self, reg, val):
if self.reg[reg] != 0:
self.pos += self.reg[val] -1
def operate(self, instructions):
self.instructions = instructions
self.pos = 0
while True:
inst, params = self.instructions[self.pos]
self.INSTRUCTIONS[inst](*params)
self.pos += 1
if self.pos < 0 or self.pos >= len(self.instructions):
break
print(self.reg)
def part1():
p = Coprocessor()
p.operate(parse(read()))
print(p.mul_counter)
def isprime(value):
for i in range(2, value):
if (value % i) == 0:
return False
return True
def count_primes(init, end, step):
count = 0
for i in range(init, end+1, step):
if isprime(i):
count += 1
return count
def part2():
"""
After optimizing the code it results that
"""
b = 106500
c = 123500
h = (c-b)/17 # each loop b increases 17 until it matches c
h += 1 # there is an extra loop when b == c ??
h -= count_primes(b, c, 17) # on primes, f is set to 0 and h not increased
print(int(h))
""" Code translated to Python
b = 106500
c = 123500
h = 0
g = 1 # added to enter first loop
while True:
f = 1
d = 2
while g != 0:
e = 2
while g != 0:
g = d
g *= e
g -= b
if g == 0:
f = 0
e += 1
g = e
g -= b
d += 1
g = d
g -= b
if f != 0:
h += 1
g = b
g -= c
if g == 0:
break
b += 17
"""
if __name__ == '__main__':
# part1()
part2()
|
"""
--- Day 23: Coprocessor Conflagration ---
You decide to head directly to the CPU and fix the printer from there. As you get close, you find an experimental coprocessor doing so much work that the local programs are afraid it will halt and catch fire. This would cause serious issues for the rest of the computer, so you head in and see what you can do.
The code it's running seems to be a variant of the kind you saw recently on that tablet. The general functionality seems very similar, but some of the instructions are different:
set X Y sets register X to the value of Y.
sub X Y decreases register X by the value of Y.
mul X Y sets register X to the result of multiplying the value contained in register X by the value of Y.
jnz X Y jumps with an offset of the value of Y, but only if the value of X is not zero. (An offset of 2 skips the next instruction, an offset of -1 jumps to the previous instruction, and so on.)
Only the instructions listed above are used. The eight registers here, named a through h, all start at 0.
The coprocessor is currently set to some kind of debug mode, which allows for testing, but prevents it from doing any meaningful work.
If you run the program (your puzzle input), how many times is the mul instruction invoked?
--- Part Two ---
The Turing machine, and soon the entire computer, springs back to life. A console glows dimly nearby, awaiting your command.
> reboot printer
Error: That command requires priority 50. You currently have priority 0.
You must deposit 50 stars to increase your priority to the required level.
The console flickers for a moment, and then prints another message:
Star accepted.
You must deposit 49 stars to increase your priority to the required level.
The garbage collector winks at you, then continues sweeping.
--- Part Two ---
Now, it's time to fix the problem.
The debug mode switch is wired directly to register a. You flip the switch, which makes register a now start at 1 when the program is executed.
Immediately, the coprocessor begins to overheat. Whoever wrote this program obviously didn't choose a very efficient implementation. You'll need to optimize the program if it has any hope of completing before Santa needs that printer working.
The coprocessor's ultimate goal is to determine the final value left in register h once the program completes. Technically, if it had that... it wouldn't even need to run the program.
After setting register a to 1, if the program were to run to completion, what value would be left in register h?
"""
def read():
with open('inputs/day23.txt') as fd:
return fd.readlines()
def parse(lines):
l = []
for line in lines:
(inst, *params) = line.strip().split()
l.append((inst, params))
return l
class Registers(dict):
def __missing__(self, key):
try:
v = int(key)
except ValueError:
v = 0
self[key] = 0
return v
class Coprocessor:
def __init__(self):
self.reg = registers()
self.INSTRUCTIONS = {'set': self.set, 'sub': self.sub, 'mul': self.mul, 'jnz': self.jump}
self.last_played = None
self.mul_counter = 0
self.instructions = None
self.pos = 0
def set(self, reg, val):
self.reg[reg] = self.reg[val]
def sub(self, reg, val):
self.reg[reg] -= self.reg[val]
def mul(self, reg1, reg2):
self.mul_counter += 1
self.reg[reg1] *= self.reg[reg2]
def jump(self, reg, val):
if self.reg[reg] != 0:
self.pos += self.reg[val] - 1
def operate(self, instructions):
self.instructions = instructions
self.pos = 0
while True:
(inst, params) = self.instructions[self.pos]
self.INSTRUCTIONS[inst](*params)
self.pos += 1
if self.pos < 0 or self.pos >= len(self.instructions):
break
print(self.reg)
def part1():
p = coprocessor()
p.operate(parse(read()))
print(p.mul_counter)
def isprime(value):
for i in range(2, value):
if value % i == 0:
return False
return True
def count_primes(init, end, step):
count = 0
for i in range(init, end + 1, step):
if isprime(i):
count += 1
return count
def part2():
"""
After optimizing the code it results that
"""
b = 106500
c = 123500
h = (c - b) / 17
h += 1
h -= count_primes(b, c, 17)
print(int(h))
' Code translated to Python\nb = 106500\nc = 123500\nh = 0\n\ng = 1 # added to enter first loop\n\nwhile True:\n f = 1\n d = 2\n while g != 0:\n e = 2\n while g != 0:\n g = d\n g *= e\n g -= b\n if g == 0:\n f = 0\n e += 1\n g = e\n g -= b\n d += 1\n g = d\n g -= b\n if f != 0:\n h += 1\n g = b\n g -= c\n if g == 0:\n break\n b += 17\n'
if __name__ == '__main__':
part2()
|
class Sources:
"""
Sources class to define source object
"""
def __init__(self, id, name, description, url):
self.id = id
self.name = name
self.description = description
self.url = url
class Articles:
"""
Articles class to define articles object
"""
def __init__(self, author, title, description, url, urlToImage):
self.author = author
self.title = title
self.description = description
self.url = url
self.urlToImage = urlToImage
self.publishedAt = publishedAt
|
class Sources:
"""
Sources class to define source object
"""
def __init__(self, id, name, description, url):
self.id = id
self.name = name
self.description = description
self.url = url
class Articles:
"""
Articles class to define articles object
"""
def __init__(self, author, title, description, url, urlToImage):
self.author = author
self.title = title
self.description = description
self.url = url
self.urlToImage = urlToImage
self.publishedAt = publishedAt
|
class Vehicle(object):
"""A generic vehicle class."""
def __init__(self, position):
self.position = position
def travel(self, destination):
print('Travelling...')
class RadioMixin(object):
def play_song_on_station(self, station):
print('Playing station...')
class Car(Vehicle, RadioMixin):
pass
class Boat(Vehicle):
pass
|
class Vehicle(object):
"""A generic vehicle class."""
def __init__(self, position):
self.position = position
def travel(self, destination):
print('Travelling...')
class Radiomixin(object):
def play_song_on_station(self, station):
print('Playing station...')
class Car(Vehicle, RadioMixin):
pass
class Boat(Vehicle):
pass
|
#!/usr/bin/env python3
n = int(input())
print(n * ((n - 1) % 2))
|
n = int(input())
print(n * ((n - 1) % 2))
|
result = True
another_result = result
print(id(result))
print(id(another_result))
# bool is immutable
result = False
print(id(result))
print(id(another_result))
|
result = True
another_result = result
print(id(result))
print(id(another_result))
result = False
print(id(result))
print(id(another_result))
|
region='' # GCP region e.g. us-central1 etc,
dbusername=''
dbpassword=''
rabbithost=''
rabbitusername=''
rabbitpassword=''
awskey='' # if you intend to import data from S3
awssecret='' # if you intend to import data from S3
mediabucket=''
secretkey=''
superuser=''
superpass=''
superemail=''
cloudfsprefix='gs'
cors_origin='' # to set CORS on the bucket Can be * or specific website e.g. http://example.website.com
redishost = "redis-master"
redispassword = "sadnnasndaslnk"
|
region = ''
dbusername = ''
dbpassword = ''
rabbithost = ''
rabbitusername = ''
rabbitpassword = ''
awskey = ''
awssecret = ''
mediabucket = ''
secretkey = ''
superuser = ''
superpass = ''
superemail = ''
cloudfsprefix = 'gs'
cors_origin = ''
redishost = 'redis-master'
redispassword = 'sadnnasndaslnk'
|
CheckNumber = float(input("enter your number"))
print(str(CheckNumber) +' setnumber')
while CheckNumber != 1:
numcheck = CheckNumber
if CheckNumber % 2 == 0:
CheckNumber = CheckNumber / 2
print(str(CheckNumber) +' output reduced')
else:
CheckNumber = CheckNumber * 3 + 1
print(str(CheckNumber) +' output increased')
#CheckNumber = 1
#for x in range(int(input("please let me know the number youd like to #start end with"))):
#CheckNumber = CheckNumber + x
|
check_number = float(input('enter your number'))
print(str(CheckNumber) + ' setnumber')
while CheckNumber != 1:
numcheck = CheckNumber
if CheckNumber % 2 == 0:
check_number = CheckNumber / 2
print(str(CheckNumber) + ' output reduced')
else:
check_number = CheckNumber * 3 + 1
print(str(CheckNumber) + ' output increased')
|
class Solution:
def asteroidCollision(self, asteroids: List[int]) -> List[int]:
'''
T: (n) and S: O(n)
'''
stack = []
for asteroid in asteroids:
# Case 1: Collision occurs
while stack and (asteroid < 0 and stack[-1] > 0):
if stack[-1] < -asteroid: # Case 1a: asteroid survives after collision
stack.pop()
continue
elif stack[-1] > -asteroid: # Case 1b: stack[-1] survives after collision
break
else: # Case 1c: None survives after collision
stack.pop()
break
else: # No collision, both survive
stack.append(asteroid)
return stack
|
class Solution:
def asteroid_collision(self, asteroids: List[int]) -> List[int]:
"""
T: (n) and S: O(n)
"""
stack = []
for asteroid in asteroids:
while stack and (asteroid < 0 and stack[-1] > 0):
if stack[-1] < -asteroid:
stack.pop()
continue
elif stack[-1] > -asteroid:
break
else:
stack.pop()
break
else:
stack.append(asteroid)
return stack
|
#
# PySNMP MIB module ENTERASYS-SERVICE-LEVEL-REPORTING-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ENTERASYS-SERVICE-LEVEL-REPORTING-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:50:17 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ConstraintsUnion")
etsysModules, = mibBuilder.importSymbols("ENTERASYS-MIB-NAMES", "etsysModules")
InetAddress, InetAddressType = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ObjectGroup, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "NotificationGroup", "ModuleCompliance")
Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, MibIdentifier, Unsigned32, Counter32, Gauge32, iso, ModuleIdentity, ObjectIdentity, Counter64, TimeTicks, NotificationType, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "MibIdentifier", "Unsigned32", "Counter32", "Gauge32", "iso", "ModuleIdentity", "ObjectIdentity", "Counter64", "TimeTicks", "NotificationType", "Bits")
TextualConvention, RowStatus, StorageType, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "RowStatus", "StorageType", "DisplayString")
etsysServiceLevelReportingMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39))
etsysServiceLevelReportingMIB.setRevisions(('2003-11-06 15:15', '2003-10-24 19:02', '2003-10-22 23:32',))
if mibBuilder.loadTexts: etsysServiceLevelReportingMIB.setLastUpdated('200311061515Z')
if mibBuilder.loadTexts: etsysServiceLevelReportingMIB.setOrganization('Enterasys Networks Inc.')
class EtsysSrvcLvlOwnerString(TextualConvention, OctetString):
status = 'current'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 32)
class TimeUnit(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))
namedValues = NamedValues(("year", 1), ("month", 2), ("week", 3), ("day", 4), ("hour", 5), ("second", 6), ("millisecond", 7), ("microsecond", 8), ("nanosecond", 9))
class EtsysSrvcLvlStandardMetrics(TextualConvention, Bits):
status = 'current'
namedValues = NamedValues(("reserved", 0), ("instantUnidirectionConnectivity", 1), ("instantBidirectionConnectivity", 2), ("intervalUnidirectionConnectivity", 3), ("intervalBidirectionConnectivity", 4), ("intervalTemporalConnectivity", 5), ("oneWayDelay", 6), ("oneWayDelayPoissonStream", 7), ("oneWayDelayPercentile", 8), ("oneWayDelayMedian", 9), ("oneWayDelayMinimum", 10), ("oneWayDelayInversePercentile", 11), ("oneWayPacketLoss", 12), ("oneWayPacketLossPoissonStream", 13), ("oneWayPacketLossAverage", 14), ("roundtripDelay", 15), ("roundtripDelayPoissonStream", 16), ("roundtripDelayPercentile", 17), ("roundtripDelayMedian", 18), ("roundtripDelayMinimum", 19), ("roundtripDelayInversePercentile", 20), ("oneWayLossDistanceStream", 21), ("oneWayLossPeriodStream", 22), ("oneWayLossNoticeableRate", 23), ("oneWayLossPeriodTotal", 24), ("oneWayLossPeriodLengths", 25), ("oneWayInterLossPeriodLengths", 26), ("oneWayIpdv", 27), ("oneWayIpdvPoissonStream", 28), ("oneWayIpdvPercentile", 29), ("oneWayIpdvInversePercentile", 30), ("oneWayIpdvJitter", 31), ("oneWayPeakToPeakIpdv", 32), ("oneWayDelayPeriodicStream", 33), ("roundtripDelayAverage", 34), ("roundtripPacketLoss", 35), ("roundtripPacketLossAverage", 36), ("roundtripIpdv", 37))
class GMTTimeStamp(TextualConvention, OctetString):
status = 'current'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(8, 8)
fixedLength = 8
class TypeP(TextualConvention, OctetString):
status = 'current'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 512)
class TypePaddress(TextualConvention, OctetString):
status = 'current'
displayHint = '255a'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 512)
etsysSrvcLvlConfigObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1))
etsysSrvcLvlSystem = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1))
etsysSrvcLvlOwners = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2))
etsysSrvcLvlHistory = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3))
etsysSrvcLvlMeasure = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4))
etsysSrvcLvlSystemTime = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 1), GMTTimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysSrvcLvlSystemTime.setStatus('current')
etsysSrvcLvlSystemClockResolution = MibScalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 2), Integer32()).setUnits('picoseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysSrvcLvlSystemClockResolution.setStatus('current')
etsysSrvcLvlMetricTable = MibTable((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 3), )
if mibBuilder.loadTexts: etsysSrvcLvlMetricTable.setStatus('current')
etsysSrvcLvlMetricEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 3, 1), ).setIndexNames((0, "ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlMetricIndex"))
if mibBuilder.loadTexts: etsysSrvcLvlMetricEntry.setStatus('current')
etsysSrvcLvlMetricIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37))).clone(namedValues=NamedValues(("instantUnidirectionConnectivity", 1), ("instantBidirectionConnectivity", 2), ("intervalUnidirectionConnectivity", 3), ("intervalBidirectionConnectivity", 4), ("intervalTemporalConnectivity", 5), ("oneWayDelay", 6), ("oneWayDelayPoissonStream", 7), ("oneWayDelayPercentile", 8), ("oneWayDelayMedian", 9), ("oneWayDelayMinimum", 10), ("oneWayDelayInversePercentile", 11), ("oneWayPacketLoss", 12), ("oneWayPacketLossPoissonStream", 13), ("oneWayPacketLossAverage", 14), ("roundtripDelay", 15), ("roundtripDelayPoissonStream", 16), ("roundtripDelayPercentile", 17), ("roundtripDelayMedian", 18), ("roundtripDelayMinimum", 19), ("roundtripDelayInversePercentile", 20), ("oneWayLossDistanceStream", 21), ("oneWayLossPeriodStream", 22), ("oneWayLossNoticeableRate", 23), ("oneWayLossPeriodTotal", 24), ("oneWayLossPeriodLengths", 25), ("oneWayInterLossPeriodLengths", 26), ("oneWayIpdv", 27), ("oneWayIpdvPoissonStream", 28), ("oneWayIpdvPercentile", 29), ("oneWayIpdvInversePercentile", 30), ("oneWayIpdvJitter", 31), ("oneWayPeakToPeakIpdv", 32), ("oneWayDelayPeriodicStream", 33), ("roundtripDelayAverage", 34), ("roundtripPacketLoss", 35), ("roundtripPacketLossAverage", 36), ("roundtripIpdv", 37))))
if mibBuilder.loadTexts: etsysSrvcLvlMetricIndex.setStatus('current')
etsysSrvcLvlMetricCapabilities = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("notImplemented", 0), ("implemented", 1))).clone('implemented')).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysSrvcLvlMetricCapabilities.setStatus('current')
etsysSrvcLvlMetricType = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("network", 0), ("aggregated", 1))).clone('aggregated')).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysSrvcLvlMetricType.setStatus('current')
etsysSrvcLvlMetricUnit = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("noUnit", 0), ("second", 1), ("millisecond", 2), ("microsecond", 3), ("nanosecond", 4), ("percentage", 5), ("packet", 6), ("byte", 7), ("kilobyte", 8), ("megabyte", 9)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysSrvcLvlMetricUnit.setStatus('current')
etsysSrvcLvlMetricDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 3, 1, 5), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysSrvcLvlMetricDescription.setStatus('current')
etsysSrvcLvlOwnersTable = MibTable((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1), )
if mibBuilder.loadTexts: etsysSrvcLvlOwnersTable.setStatus('current')
etsysSrvcLvlOwnersEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1), ).setIndexNames((0, "ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlOwnersIndex"))
if mibBuilder.loadTexts: etsysSrvcLvlOwnersEntry.setStatus('current')
etsysSrvcLvlOwnersIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)))
if mibBuilder.loadTexts: etsysSrvcLvlOwnersIndex.setStatus('current')
etsysSrvcLvlOwnersOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 2), EtsysSrvcLvlOwnerString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlOwnersOwner.setStatus('current')
etsysSrvcLvlOwnersGrantedMetrics = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 3), EtsysSrvcLvlStandardMetrics()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlOwnersGrantedMetrics.setStatus('current')
etsysSrvcLvlOwnersQuota = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 4), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlOwnersQuota.setStatus('current')
etsysSrvcLvlOwnersIpAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 5), InetAddressType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlOwnersIpAddressType.setStatus('current')
etsysSrvcLvlOwnersIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 6), InetAddress().subtype(subtypeSpec=ValueSizeConstraint(1, 128))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlOwnersIpAddress.setStatus('current')
etsysSrvcLvlOwnersEmail = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 7), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlOwnersEmail.setStatus('current')
etsysSrvcLvlOwnersSMS = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 8), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlOwnersSMS.setStatus('current')
etsysSrvcLvlOwnersStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 9), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlOwnersStatus.setStatus('current')
etsysSrvcLvlHistoryTable = MibTable((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1), )
if mibBuilder.loadTexts: etsysSrvcLvlHistoryTable.setStatus('current')
etsysSrvcLvlHistoryEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1, 1), ).setIndexNames((0, "ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlHistoryMeasureOwner"), (0, "ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlHistoryMeasureIndex"), (0, "ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlHistoryMetricIndex"), (0, "ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlHistoryIndex"))
if mibBuilder.loadTexts: etsysSrvcLvlHistoryEntry.setStatus('current')
etsysSrvcLvlHistoryMeasureOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1, 1, 1), EtsysSrvcLvlOwnerString())
if mibBuilder.loadTexts: etsysSrvcLvlHistoryMeasureOwner.setStatus('current')
etsysSrvcLvlHistoryMeasureIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)))
if mibBuilder.loadTexts: etsysSrvcLvlHistoryMeasureIndex.setStatus('current')
etsysSrvcLvlHistoryMetricIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)))
if mibBuilder.loadTexts: etsysSrvcLvlHistoryMetricIndex.setStatus('current')
etsysSrvcLvlHistoryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)))
if mibBuilder.loadTexts: etsysSrvcLvlHistoryIndex.setStatus('current')
etsysSrvcLvlHistorySequence = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysSrvcLvlHistorySequence.setStatus('current')
etsysSrvcLvlHistoryTimestamp = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1, 1, 6), GMTTimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysSrvcLvlHistoryTimestamp.setStatus('current')
etsysSrvcLvlHistoryValue = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysSrvcLvlHistoryValue.setStatus('current')
etsysSrvcLvlNetMeasureTable = MibTable((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1), )
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureTable.setStatus('current')
etsysSrvcLvlNetMeasureEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1), ).setIndexNames((0, "ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureOwner"), (0, "ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureIndex"))
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureEntry.setStatus('current')
etsysSrvcLvlNetMeasureOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 1), EtsysSrvcLvlOwnerString())
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureOwner.setStatus('current')
etsysSrvcLvlNetMeasureIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)))
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureIndex.setStatus('current')
etsysSrvcLvlNetMeasureName = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 3), SnmpAdminString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureName.setStatus('current')
etsysSrvcLvlNetMeasureMetrics = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 4), EtsysSrvcLvlStandardMetrics()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureMetrics.setStatus('current')
etsysSrvcLvlNetMeasureBeginTime = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 5), GMTTimeStamp()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureBeginTime.setStatus('current')
etsysSrvcLvlNetMeasureDurationUnit = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 6), TimeUnit().clone('second')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureDurationUnit.setStatus('current')
etsysSrvcLvlNetMeasureDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureDuration.setStatus('current')
etsysSrvcLvlNetMeasureHistorySize = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureHistorySize.setStatus('current')
etsysSrvcLvlNetMeasureFailureMgmtMode = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("auto", 1), ("manual", 2), ("discarded", 3))).clone('auto')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureFailureMgmtMode.setStatus('current')
etsysSrvcLvlNetMeasureResultsMgmt = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("wrap", 1), ("suspend", 2), ("delete", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureResultsMgmt.setStatus('current')
etsysSrvcLvlNetMeasureSrcTypeP = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 11), TypeP().clone('ip')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureSrcTypeP.setStatus('current')
etsysSrvcLvlNetMeasureSrc = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 12), TypePaddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureSrc.setStatus('current')
etsysSrvcLvlNetMeasureDstTypeP = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 13), TypeP().clone('ip')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureDstTypeP.setStatus('current')
etsysSrvcLvlNetMeasureDst = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 14), TypePaddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureDst.setStatus('current')
etsysSrvcLvlNetMeasureTxMode = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("other", 0), ("periodic", 1), ("poisson", 2), ("multiburst", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureTxMode.setStatus('current')
etsysSrvcLvlNetMeasureTxPacketRateUnit = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 16), TimeUnit()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureTxPacketRateUnit.setStatus('current')
etsysSrvcLvlNetMeasureTxPacketRate = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 17), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureTxPacketRate.setStatus('current')
etsysSrvcLvlNetMeasureDevtnOrBurstSize = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 18), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureDevtnOrBurstSize.setStatus('current')
etsysSrvcLvlNetMeasureMedOrIntBurstSize = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 19), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureMedOrIntBurstSize.setStatus('current')
etsysSrvcLvlNetMeasureLossTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 20), Integer32()).setUnits('Milliseconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureLossTimeout.setStatus('current')
etsysSrvcLvlNetMeasureL3PacketSize = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 21), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureL3PacketSize.setStatus('current')
etsysSrvcLvlNetMeasureDataPattern = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 22), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureDataPattern.setStatus('current')
etsysSrvcLvlNetMeasureMap = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 23), SnmpAdminString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureMap.setStatus('current')
etsysSrvcLvlNetMeasureSingletons = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 24), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureSingletons.setStatus('current')
etsysSrvcLvlNetMeasureOperState = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("unknown", 0), ("running", 1), ("stopped", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysSrvcLvlNetMeasureOperState.setStatus('current')
etsysSrvcLvlAggrMeasureTable = MibTable((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2), )
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureTable.setStatus('current')
etsysSrvcLvlAggrMeasureEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1), ).setIndexNames((0, "ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureOwner"), (0, "ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureIndex"))
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureEntry.setStatus('current')
etsysSrvcLvlAggrMeasureOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 1), EtsysSrvcLvlOwnerString())
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureOwner.setStatus('current')
etsysSrvcLvlAggrMeasureIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)))
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureIndex.setStatus('current')
etsysSrvcLvlAggrMeasureName = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 3), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureName.setStatus('current')
etsysSrvcLvlAggrMeasureMetrics = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 4), EtsysSrvcLvlStandardMetrics()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureMetrics.setStatus('current')
etsysSrvcLvlAggrMeasureBeginTime = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 5), GMTTimeStamp()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureBeginTime.setStatus('current')
etsysSrvcLvlAggrMeasureAggrPeriodUnit = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 6), TimeUnit().clone('second')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureAggrPeriodUnit.setStatus('current')
etsysSrvcLvlAggrMeasureAggrPeriod = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 7), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureAggrPeriod.setStatus('current')
etsysSrvcLvlAggrMeasureDurationUnit = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 8), TimeUnit().clone('second')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureDurationUnit.setStatus('current')
etsysSrvcLvlAggrMeasureDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 9), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureDuration.setStatus('current')
etsysSrvcLvlAggrMeasureHistorySize = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 10), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureHistorySize.setStatus('current')
etsysSrvcLvlAggrMeasureStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 11), StorageType().clone('volatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureStorageType.setStatus('current')
etsysSrvcLvlAggrMeasureResultsMgmt = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("wrap", 1), ("suspend", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureResultsMgmt.setStatus('current')
etsysSrvcLvlAggrMeasureHistoryOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 13), EtsysSrvcLvlOwnerString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureHistoryOwner.setStatus('current')
etsysSrvcLvlAggrMeasureHistoryOwnerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureHistoryOwnerIndex.setStatus('current')
etsysSrvcLvlAggrMeasureHistoryMetric = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 15), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureHistoryMetric.setStatus('current')
etsysSrvcLvlAggrMeasureAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("start", 0), ("stop", 1)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureAdminState.setStatus('current')
etsysSrvcLvlAggrMeasureMap = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 17), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureMap.setStatus('current')
etsysSrvcLvlAggrMeasureStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 18), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: etsysSrvcLvlAggrMeasureStatus.setStatus('current')
etsysSrvcLvlReportingConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 2))
etsysSrvcLvlReportingGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 2, 1))
etsysSrvcLvlReportingCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 2, 2))
etsysSrvcLvlSystemGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 2, 1, 1)).setObjects(("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlSystemTime"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlSystemClockResolution"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlMetricCapabilities"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlMetricType"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlMetricUnit"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlMetricDescription"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysSrvcLvlSystemGroup = etsysSrvcLvlSystemGroup.setStatus('current')
etsysSrvcLvlOwnersGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 2, 1, 2)).setObjects(("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlOwnersOwner"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlOwnersGrantedMetrics"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlOwnersQuota"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlOwnersIpAddressType"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlOwnersIpAddress"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlOwnersEmail"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlOwnersSMS"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlOwnersStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysSrvcLvlOwnersGroup = etsysSrvcLvlOwnersGroup.setStatus('current')
etsysSrvcLvlHistoryGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 2, 1, 3)).setObjects(("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlHistorySequence"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlHistoryTimestamp"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlHistoryValue"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysSrvcLvlHistoryGroup = etsysSrvcLvlHistoryGroup.setStatus('current')
etsysSrvcLvlMeasureGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 2, 1, 4)).setObjects(("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureName"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureMetrics"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureBeginTime"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureDurationUnit"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureDuration"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureHistorySize"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureFailureMgmtMode"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureResultsMgmt"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureSrcTypeP"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureSrc"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureDstTypeP"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureDst"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureTxMode"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureTxPacketRateUnit"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureTxPacketRate"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureDevtnOrBurstSize"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureMedOrIntBurstSize"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureLossTimeout"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureL3PacketSize"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureDataPattern"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureMap"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureSingletons"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlNetMeasureOperState"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureName"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureMetrics"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureBeginTime"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureAggrPeriodUnit"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureAggrPeriod"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureDurationUnit"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureDuration"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureHistorySize"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureStorageType"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureResultsMgmt"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureHistoryOwner"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureHistoryOwnerIndex"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureHistoryMetric"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureAdminState"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureMap"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlAggrMeasureStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysSrvcLvlMeasureGroup = etsysSrvcLvlMeasureGroup.setStatus('current')
etsysSrvcLvlReportingCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 2, 2, 1)).setObjects(("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlSystemGroup"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlOwnersGroup"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlHistoryGroup"), ("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", "etsysSrvcLvlMeasureGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsysSrvcLvlReportingCompliance = etsysSrvcLvlReportingCompliance.setStatus('current')
mibBuilder.exportSymbols("ENTERASYS-SERVICE-LEVEL-REPORTING-MIB", etsysSrvcLvlAggrMeasureHistoryMetric=etsysSrvcLvlAggrMeasureHistoryMetric, etsysSrvcLvlHistoryMeasureIndex=etsysSrvcLvlHistoryMeasureIndex, etsysSrvcLvlNetMeasureName=etsysSrvcLvlNetMeasureName, TimeUnit=TimeUnit, etsysSrvcLvlAggrMeasureStatus=etsysSrvcLvlAggrMeasureStatus, etsysSrvcLvlAggrMeasureMetrics=etsysSrvcLvlAggrMeasureMetrics, etsysSrvcLvlAggrMeasureDuration=etsysSrvcLvlAggrMeasureDuration, etsysServiceLevelReportingMIB=etsysServiceLevelReportingMIB, etsysSrvcLvlNetMeasureIndex=etsysSrvcLvlNetMeasureIndex, etsysSrvcLvlReportingGroups=etsysSrvcLvlReportingGroups, etsysSrvcLvlNetMeasureDuration=etsysSrvcLvlNetMeasureDuration, etsysSrvcLvlHistoryEntry=etsysSrvcLvlHistoryEntry, etsysSrvcLvlAggrMeasureIndex=etsysSrvcLvlAggrMeasureIndex, etsysSrvcLvlOwnersTable=etsysSrvcLvlOwnersTable, etsysSrvcLvlNetMeasureDurationUnit=etsysSrvcLvlNetMeasureDurationUnit, EtsysSrvcLvlOwnerString=EtsysSrvcLvlOwnerString, etsysSrvcLvlNetMeasureSrcTypeP=etsysSrvcLvlNetMeasureSrcTypeP, etsysSrvcLvlAggrMeasureBeginTime=etsysSrvcLvlAggrMeasureBeginTime, etsysSrvcLvlSystemClockResolution=etsysSrvcLvlSystemClockResolution, etsysSrvcLvlHistory=etsysSrvcLvlHistory, etsysSrvcLvlConfigObjects=etsysSrvcLvlConfigObjects, etsysSrvcLvlHistoryMetricIndex=etsysSrvcLvlHistoryMetricIndex, PYSNMP_MODULE_ID=etsysServiceLevelReportingMIB, TypePaddress=TypePaddress, etsysSrvcLvlNetMeasureHistorySize=etsysSrvcLvlNetMeasureHistorySize, etsysSrvcLvlReportingConformance=etsysSrvcLvlReportingConformance, etsysSrvcLvlNetMeasureFailureMgmtMode=etsysSrvcLvlNetMeasureFailureMgmtMode, etsysSrvcLvlAggrMeasureMap=etsysSrvcLvlAggrMeasureMap, etsysSrvcLvlNetMeasureMetrics=etsysSrvcLvlNetMeasureMetrics, etsysSrvcLvlNetMeasureOwner=etsysSrvcLvlNetMeasureOwner, etsysSrvcLvlAggrMeasureHistorySize=etsysSrvcLvlAggrMeasureHistorySize, etsysSrvcLvlNetMeasureDevtnOrBurstSize=etsysSrvcLvlNetMeasureDevtnOrBurstSize, etsysSrvcLvlNetMeasureEntry=etsysSrvcLvlNetMeasureEntry, etsysSrvcLvlNetMeasureTxPacketRate=etsysSrvcLvlNetMeasureTxPacketRate, etsysSrvcLvlAggrMeasureOwner=etsysSrvcLvlAggrMeasureOwner, etsysSrvcLvlHistoryTimestamp=etsysSrvcLvlHistoryTimestamp, etsysSrvcLvlOwnersEmail=etsysSrvcLvlOwnersEmail, etsysSrvcLvlAggrMeasureTable=etsysSrvcLvlAggrMeasureTable, etsysSrvcLvlOwnersGroup=etsysSrvcLvlOwnersGroup, etsysSrvcLvlOwnersSMS=etsysSrvcLvlOwnersSMS, etsysSrvcLvlNetMeasureTable=etsysSrvcLvlNetMeasureTable, EtsysSrvcLvlStandardMetrics=EtsysSrvcLvlStandardMetrics, etsysSrvcLvlMetricIndex=etsysSrvcLvlMetricIndex, etsysSrvcLvlOwnersStatus=etsysSrvcLvlOwnersStatus, etsysSrvcLvlHistorySequence=etsysSrvcLvlHistorySequence, etsysSrvcLvlHistoryGroup=etsysSrvcLvlHistoryGroup, etsysSrvcLvlAggrMeasureAggrPeriod=etsysSrvcLvlAggrMeasureAggrPeriod, etsysSrvcLvlNetMeasureTxPacketRateUnit=etsysSrvcLvlNetMeasureTxPacketRateUnit, etsysSrvcLvlOwnersOwner=etsysSrvcLvlOwnersOwner, etsysSrvcLvlAggrMeasureEntry=etsysSrvcLvlAggrMeasureEntry, etsysSrvcLvlNetMeasureL3PacketSize=etsysSrvcLvlNetMeasureL3PacketSize, etsysSrvcLvlNetMeasureSrc=etsysSrvcLvlNetMeasureSrc, etsysSrvcLvlHistoryIndex=etsysSrvcLvlHistoryIndex, etsysSrvcLvlReportingCompliance=etsysSrvcLvlReportingCompliance, etsysSrvcLvlMetricTable=etsysSrvcLvlMetricTable, etsysSrvcLvlOwnersIpAddressType=etsysSrvcLvlOwnersIpAddressType, etsysSrvcLvlOwnersGrantedMetrics=etsysSrvcLvlOwnersGrantedMetrics, etsysSrvcLvlMeasure=etsysSrvcLvlMeasure, etsysSrvcLvlNetMeasureMap=etsysSrvcLvlNetMeasureMap, etsysSrvcLvlNetMeasureMedOrIntBurstSize=etsysSrvcLvlNetMeasureMedOrIntBurstSize, etsysSrvcLvlAggrMeasureResultsMgmt=etsysSrvcLvlAggrMeasureResultsMgmt, etsysSrvcLvlAggrMeasureAggrPeriodUnit=etsysSrvcLvlAggrMeasureAggrPeriodUnit, etsysSrvcLvlOwnersEntry=etsysSrvcLvlOwnersEntry, etsysSrvcLvlHistoryValue=etsysSrvcLvlHistoryValue, etsysSrvcLvlAggrMeasureHistoryOwnerIndex=etsysSrvcLvlAggrMeasureHistoryOwnerIndex, etsysSrvcLvlNetMeasureDataPattern=etsysSrvcLvlNetMeasureDataPattern, etsysSrvcLvlNetMeasureTxMode=etsysSrvcLvlNetMeasureTxMode, etsysSrvcLvlMetricType=etsysSrvcLvlMetricType, etsysSrvcLvlReportingCompliances=etsysSrvcLvlReportingCompliances, etsysSrvcLvlOwnersQuota=etsysSrvcLvlOwnersQuota, etsysSrvcLvlAggrMeasureName=etsysSrvcLvlAggrMeasureName, etsysSrvcLvlMetricCapabilities=etsysSrvcLvlMetricCapabilities, etsysSrvcLvlNetMeasureLossTimeout=etsysSrvcLvlNetMeasureLossTimeout, GMTTimeStamp=GMTTimeStamp, etsysSrvcLvlMetricEntry=etsysSrvcLvlMetricEntry, etsysSrvcLvlOwnersIpAddress=etsysSrvcLvlOwnersIpAddress, etsysSrvcLvlOwners=etsysSrvcLvlOwners, etsysSrvcLvlMeasureGroup=etsysSrvcLvlMeasureGroup, etsysSrvcLvlAggrMeasureDurationUnit=etsysSrvcLvlAggrMeasureDurationUnit, etsysSrvcLvlMetricUnit=etsysSrvcLvlMetricUnit, etsysSrvcLvlNetMeasureSingletons=etsysSrvcLvlNetMeasureSingletons, etsysSrvcLvlNetMeasureDstTypeP=etsysSrvcLvlNetMeasureDstTypeP, etsysSrvcLvlHistoryMeasureOwner=etsysSrvcLvlHistoryMeasureOwner, etsysSrvcLvlSystemGroup=etsysSrvcLvlSystemGroup, etsysSrvcLvlSystem=etsysSrvcLvlSystem, etsysSrvcLvlHistoryTable=etsysSrvcLvlHistoryTable, etsysSrvcLvlNetMeasureBeginTime=etsysSrvcLvlNetMeasureBeginTime, etsysSrvcLvlSystemTime=etsysSrvcLvlSystemTime, etsysSrvcLvlOwnersIndex=etsysSrvcLvlOwnersIndex, etsysSrvcLvlMetricDescription=etsysSrvcLvlMetricDescription, etsysSrvcLvlNetMeasureOperState=etsysSrvcLvlNetMeasureOperState, etsysSrvcLvlAggrMeasureHistoryOwner=etsysSrvcLvlAggrMeasureHistoryOwner, etsysSrvcLvlNetMeasureResultsMgmt=etsysSrvcLvlNetMeasureResultsMgmt, etsysSrvcLvlAggrMeasureAdminState=etsysSrvcLvlAggrMeasureAdminState, TypeP=TypeP, etsysSrvcLvlAggrMeasureStorageType=etsysSrvcLvlAggrMeasureStorageType, etsysSrvcLvlNetMeasureDst=etsysSrvcLvlNetMeasureDst)
|
(object_identifier, octet_string, integer) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'OctetString', 'Integer')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(single_value_constraint, value_range_constraint, value_size_constraint, constraints_intersection, constraints_union) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'SingleValueConstraint', 'ValueRangeConstraint', 'ValueSizeConstraint', 'ConstraintsIntersection', 'ConstraintsUnion')
(etsys_modules,) = mibBuilder.importSymbols('ENTERASYS-MIB-NAMES', 'etsysModules')
(inet_address, inet_address_type) = mibBuilder.importSymbols('INET-ADDRESS-MIB', 'InetAddress', 'InetAddressType')
(snmp_admin_string,) = mibBuilder.importSymbols('SNMP-FRAMEWORK-MIB', 'SnmpAdminString')
(object_group, notification_group, module_compliance) = mibBuilder.importSymbols('SNMPv2-CONF', 'ObjectGroup', 'NotificationGroup', 'ModuleCompliance')
(integer32, mib_scalar, mib_table, mib_table_row, mib_table_column, ip_address, mib_identifier, unsigned32, counter32, gauge32, iso, module_identity, object_identity, counter64, time_ticks, notification_type, bits) = mibBuilder.importSymbols('SNMPv2-SMI', 'Integer32', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'IpAddress', 'MibIdentifier', 'Unsigned32', 'Counter32', 'Gauge32', 'iso', 'ModuleIdentity', 'ObjectIdentity', 'Counter64', 'TimeTicks', 'NotificationType', 'Bits')
(textual_convention, row_status, storage_type, display_string) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'RowStatus', 'StorageType', 'DisplayString')
etsys_service_level_reporting_mib = module_identity((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39))
etsysServiceLevelReportingMIB.setRevisions(('2003-11-06 15:15', '2003-10-24 19:02', '2003-10-22 23:32'))
if mibBuilder.loadTexts:
etsysServiceLevelReportingMIB.setLastUpdated('200311061515Z')
if mibBuilder.loadTexts:
etsysServiceLevelReportingMIB.setOrganization('Enterasys Networks Inc.')
class Etsyssrvclvlownerstring(TextualConvention, OctetString):
status = 'current'
subtype_spec = OctetString.subtypeSpec + value_size_constraint(0, 32)
class Timeunit(TextualConvention, Integer32):
status = 'current'
subtype_spec = Integer32.subtypeSpec + constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6, 7, 8, 9))
named_values = named_values(('year', 1), ('month', 2), ('week', 3), ('day', 4), ('hour', 5), ('second', 6), ('millisecond', 7), ('microsecond', 8), ('nanosecond', 9))
class Etsyssrvclvlstandardmetrics(TextualConvention, Bits):
status = 'current'
named_values = named_values(('reserved', 0), ('instantUnidirectionConnectivity', 1), ('instantBidirectionConnectivity', 2), ('intervalUnidirectionConnectivity', 3), ('intervalBidirectionConnectivity', 4), ('intervalTemporalConnectivity', 5), ('oneWayDelay', 6), ('oneWayDelayPoissonStream', 7), ('oneWayDelayPercentile', 8), ('oneWayDelayMedian', 9), ('oneWayDelayMinimum', 10), ('oneWayDelayInversePercentile', 11), ('oneWayPacketLoss', 12), ('oneWayPacketLossPoissonStream', 13), ('oneWayPacketLossAverage', 14), ('roundtripDelay', 15), ('roundtripDelayPoissonStream', 16), ('roundtripDelayPercentile', 17), ('roundtripDelayMedian', 18), ('roundtripDelayMinimum', 19), ('roundtripDelayInversePercentile', 20), ('oneWayLossDistanceStream', 21), ('oneWayLossPeriodStream', 22), ('oneWayLossNoticeableRate', 23), ('oneWayLossPeriodTotal', 24), ('oneWayLossPeriodLengths', 25), ('oneWayInterLossPeriodLengths', 26), ('oneWayIpdv', 27), ('oneWayIpdvPoissonStream', 28), ('oneWayIpdvPercentile', 29), ('oneWayIpdvInversePercentile', 30), ('oneWayIpdvJitter', 31), ('oneWayPeakToPeakIpdv', 32), ('oneWayDelayPeriodicStream', 33), ('roundtripDelayAverage', 34), ('roundtripPacketLoss', 35), ('roundtripPacketLossAverage', 36), ('roundtripIpdv', 37))
class Gmttimestamp(TextualConvention, OctetString):
status = 'current'
subtype_spec = OctetString.subtypeSpec + value_size_constraint(8, 8)
fixed_length = 8
class Typep(TextualConvention, OctetString):
status = 'current'
subtype_spec = OctetString.subtypeSpec + value_size_constraint(0, 512)
class Typepaddress(TextualConvention, OctetString):
status = 'current'
display_hint = '255a'
subtype_spec = OctetString.subtypeSpec + value_size_constraint(0, 512)
etsys_srvc_lvl_config_objects = mib_identifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1))
etsys_srvc_lvl_system = mib_identifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1))
etsys_srvc_lvl_owners = mib_identifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2))
etsys_srvc_lvl_history = mib_identifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3))
etsys_srvc_lvl_measure = mib_identifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4))
etsys_srvc_lvl_system_time = mib_scalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 1), gmt_time_stamp()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etsysSrvcLvlSystemTime.setStatus('current')
etsys_srvc_lvl_system_clock_resolution = mib_scalar((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 2), integer32()).setUnits('picoseconds').setMaxAccess('readonly')
if mibBuilder.loadTexts:
etsysSrvcLvlSystemClockResolution.setStatus('current')
etsys_srvc_lvl_metric_table = mib_table((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 3))
if mibBuilder.loadTexts:
etsysSrvcLvlMetricTable.setStatus('current')
etsys_srvc_lvl_metric_entry = mib_table_row((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 3, 1)).setIndexNames((0, 'ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlMetricIndex'))
if mibBuilder.loadTexts:
etsysSrvcLvlMetricEntry.setStatus('current')
etsys_srvc_lvl_metric_index = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 3, 1, 1), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37))).clone(namedValues=named_values(('instantUnidirectionConnectivity', 1), ('instantBidirectionConnectivity', 2), ('intervalUnidirectionConnectivity', 3), ('intervalBidirectionConnectivity', 4), ('intervalTemporalConnectivity', 5), ('oneWayDelay', 6), ('oneWayDelayPoissonStream', 7), ('oneWayDelayPercentile', 8), ('oneWayDelayMedian', 9), ('oneWayDelayMinimum', 10), ('oneWayDelayInversePercentile', 11), ('oneWayPacketLoss', 12), ('oneWayPacketLossPoissonStream', 13), ('oneWayPacketLossAverage', 14), ('roundtripDelay', 15), ('roundtripDelayPoissonStream', 16), ('roundtripDelayPercentile', 17), ('roundtripDelayMedian', 18), ('roundtripDelayMinimum', 19), ('roundtripDelayInversePercentile', 20), ('oneWayLossDistanceStream', 21), ('oneWayLossPeriodStream', 22), ('oneWayLossNoticeableRate', 23), ('oneWayLossPeriodTotal', 24), ('oneWayLossPeriodLengths', 25), ('oneWayInterLossPeriodLengths', 26), ('oneWayIpdv', 27), ('oneWayIpdvPoissonStream', 28), ('oneWayIpdvPercentile', 29), ('oneWayIpdvInversePercentile', 30), ('oneWayIpdvJitter', 31), ('oneWayPeakToPeakIpdv', 32), ('oneWayDelayPeriodicStream', 33), ('roundtripDelayAverage', 34), ('roundtripPacketLoss', 35), ('roundtripPacketLossAverage', 36), ('roundtripIpdv', 37))))
if mibBuilder.loadTexts:
etsysSrvcLvlMetricIndex.setStatus('current')
etsys_srvc_lvl_metric_capabilities = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 3, 1, 2), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('notImplemented', 0), ('implemented', 1))).clone('implemented')).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etsysSrvcLvlMetricCapabilities.setStatus('current')
etsys_srvc_lvl_metric_type = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 3, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('network', 0), ('aggregated', 1))).clone('aggregated')).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etsysSrvcLvlMetricType.setStatus('current')
etsys_srvc_lvl_metric_unit = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 3, 1, 4), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=named_values(('noUnit', 0), ('second', 1), ('millisecond', 2), ('microsecond', 3), ('nanosecond', 4), ('percentage', 5), ('packet', 6), ('byte', 7), ('kilobyte', 8), ('megabyte', 9)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etsysSrvcLvlMetricUnit.setStatus('current')
etsys_srvc_lvl_metric_description = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 1, 3, 1, 5), snmp_admin_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etsysSrvcLvlMetricDescription.setStatus('current')
etsys_srvc_lvl_owners_table = mib_table((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1))
if mibBuilder.loadTexts:
etsysSrvcLvlOwnersTable.setStatus('current')
etsys_srvc_lvl_owners_entry = mib_table_row((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1)).setIndexNames((0, 'ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlOwnersIndex'))
if mibBuilder.loadTexts:
etsysSrvcLvlOwnersEntry.setStatus('current')
etsys_srvc_lvl_owners_index = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(1, 65535)))
if mibBuilder.loadTexts:
etsysSrvcLvlOwnersIndex.setStatus('current')
etsys_srvc_lvl_owners_owner = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 2), etsys_srvc_lvl_owner_string()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlOwnersOwner.setStatus('current')
etsys_srvc_lvl_owners_granted_metrics = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 3), etsys_srvc_lvl_standard_metrics()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlOwnersGrantedMetrics.setStatus('current')
etsys_srvc_lvl_owners_quota = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 4), integer32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlOwnersQuota.setStatus('current')
etsys_srvc_lvl_owners_ip_address_type = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 5), inet_address_type()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlOwnersIpAddressType.setStatus('current')
etsys_srvc_lvl_owners_ip_address = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 6), inet_address().subtype(subtypeSpec=value_size_constraint(1, 128))).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlOwnersIpAddress.setStatus('current')
etsys_srvc_lvl_owners_email = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 7), snmp_admin_string()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlOwnersEmail.setStatus('current')
etsys_srvc_lvl_owners_sms = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 8), snmp_admin_string()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlOwnersSMS.setStatus('current')
etsys_srvc_lvl_owners_status = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 2, 1, 1, 9), row_status()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlOwnersStatus.setStatus('current')
etsys_srvc_lvl_history_table = mib_table((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1))
if mibBuilder.loadTexts:
etsysSrvcLvlHistoryTable.setStatus('current')
etsys_srvc_lvl_history_entry = mib_table_row((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1, 1)).setIndexNames((0, 'ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlHistoryMeasureOwner'), (0, 'ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlHistoryMeasureIndex'), (0, 'ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlHistoryMetricIndex'), (0, 'ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlHistoryIndex'))
if mibBuilder.loadTexts:
etsysSrvcLvlHistoryEntry.setStatus('current')
etsys_srvc_lvl_history_measure_owner = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1, 1, 1), etsys_srvc_lvl_owner_string())
if mibBuilder.loadTexts:
etsysSrvcLvlHistoryMeasureOwner.setStatus('current')
etsys_srvc_lvl_history_measure_index = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1, 1, 2), integer32().subtype(subtypeSpec=value_range_constraint(1, 65535)))
if mibBuilder.loadTexts:
etsysSrvcLvlHistoryMeasureIndex.setStatus('current')
etsys_srvc_lvl_history_metric_index = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1, 1, 3), integer32().subtype(subtypeSpec=value_range_constraint(1, 65535)))
if mibBuilder.loadTexts:
etsysSrvcLvlHistoryMetricIndex.setStatus('current')
etsys_srvc_lvl_history_index = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1, 1, 4), integer32().subtype(subtypeSpec=value_range_constraint(1, 65535)))
if mibBuilder.loadTexts:
etsysSrvcLvlHistoryIndex.setStatus('current')
etsys_srvc_lvl_history_sequence = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1, 1, 5), integer32().subtype(subtypeSpec=value_range_constraint(0, 65535))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etsysSrvcLvlHistorySequence.setStatus('current')
etsys_srvc_lvl_history_timestamp = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1, 1, 6), gmt_time_stamp()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etsysSrvcLvlHistoryTimestamp.setStatus('current')
etsys_srvc_lvl_history_value = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 3, 1, 1, 7), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etsysSrvcLvlHistoryValue.setStatus('current')
etsys_srvc_lvl_net_measure_table = mib_table((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1))
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureTable.setStatus('current')
etsys_srvc_lvl_net_measure_entry = mib_table_row((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1)).setIndexNames((0, 'ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureOwner'), (0, 'ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureIndex'))
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureEntry.setStatus('current')
etsys_srvc_lvl_net_measure_owner = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 1), etsys_srvc_lvl_owner_string())
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureOwner.setStatus('current')
etsys_srvc_lvl_net_measure_index = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 2), integer32().subtype(subtypeSpec=value_range_constraint(1, 65535)))
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureIndex.setStatus('current')
etsys_srvc_lvl_net_measure_name = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 3), snmp_admin_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureName.setStatus('current')
etsys_srvc_lvl_net_measure_metrics = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 4), etsys_srvc_lvl_standard_metrics()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureMetrics.setStatus('current')
etsys_srvc_lvl_net_measure_begin_time = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 5), gmt_time_stamp()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureBeginTime.setStatus('current')
etsys_srvc_lvl_net_measure_duration_unit = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 6), time_unit().clone('second')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureDurationUnit.setStatus('current')
etsys_srvc_lvl_net_measure_duration = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 7), integer32()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureDuration.setStatus('current')
etsys_srvc_lvl_net_measure_history_size = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 8), integer32()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureHistorySize.setStatus('current')
etsys_srvc_lvl_net_measure_failure_mgmt_mode = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 9), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3))).clone(namedValues=named_values(('auto', 1), ('manual', 2), ('discarded', 3))).clone('auto')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureFailureMgmtMode.setStatus('current')
etsys_srvc_lvl_net_measure_results_mgmt = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 10), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3))).clone(namedValues=named_values(('wrap', 1), ('suspend', 2), ('delete', 3)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureResultsMgmt.setStatus('current')
etsys_srvc_lvl_net_measure_src_type_p = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 11), type_p().clone('ip')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureSrcTypeP.setStatus('current')
etsys_srvc_lvl_net_measure_src = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 12), type_paddress()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureSrc.setStatus('current')
etsys_srvc_lvl_net_measure_dst_type_p = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 13), type_p().clone('ip')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureDstTypeP.setStatus('current')
etsys_srvc_lvl_net_measure_dst = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 14), type_paddress()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureDst.setStatus('current')
etsys_srvc_lvl_net_measure_tx_mode = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 15), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2, 3))).clone(namedValues=named_values(('other', 0), ('periodic', 1), ('poisson', 2), ('multiburst', 3)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureTxMode.setStatus('current')
etsys_srvc_lvl_net_measure_tx_packet_rate_unit = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 16), time_unit()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureTxPacketRateUnit.setStatus('current')
etsys_srvc_lvl_net_measure_tx_packet_rate = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 17), integer32()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureTxPacketRate.setStatus('current')
etsys_srvc_lvl_net_measure_devtn_or_burst_size = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 18), integer32()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureDevtnOrBurstSize.setStatus('current')
etsys_srvc_lvl_net_measure_med_or_int_burst_size = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 19), integer32()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureMedOrIntBurstSize.setStatus('current')
etsys_srvc_lvl_net_measure_loss_timeout = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 20), integer32()).setUnits('Milliseconds').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureLossTimeout.setStatus('current')
etsys_srvc_lvl_net_measure_l3_packet_size = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 21), integer32()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureL3PacketSize.setStatus('current')
etsys_srvc_lvl_net_measure_data_pattern = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 22), octet_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureDataPattern.setStatus('current')
etsys_srvc_lvl_net_measure_map = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 23), snmp_admin_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureMap.setStatus('current')
etsys_srvc_lvl_net_measure_singletons = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 24), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureSingletons.setStatus('current')
etsys_srvc_lvl_net_measure_oper_state = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 1, 1, 25), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2))).clone(namedValues=named_values(('unknown', 0), ('running', 1), ('stopped', 2)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etsysSrvcLvlNetMeasureOperState.setStatus('current')
etsys_srvc_lvl_aggr_measure_table = mib_table((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2))
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureTable.setStatus('current')
etsys_srvc_lvl_aggr_measure_entry = mib_table_row((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1)).setIndexNames((0, 'ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureOwner'), (0, 'ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureIndex'))
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureEntry.setStatus('current')
etsys_srvc_lvl_aggr_measure_owner = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 1), etsys_srvc_lvl_owner_string())
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureOwner.setStatus('current')
etsys_srvc_lvl_aggr_measure_index = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 2), integer32().subtype(subtypeSpec=value_range_constraint(1, 65535)))
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureIndex.setStatus('current')
etsys_srvc_lvl_aggr_measure_name = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 3), snmp_admin_string()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureName.setStatus('current')
etsys_srvc_lvl_aggr_measure_metrics = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 4), etsys_srvc_lvl_standard_metrics()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureMetrics.setStatus('current')
etsys_srvc_lvl_aggr_measure_begin_time = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 5), gmt_time_stamp()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureBeginTime.setStatus('current')
etsys_srvc_lvl_aggr_measure_aggr_period_unit = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 6), time_unit().clone('second')).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureAggrPeriodUnit.setStatus('current')
etsys_srvc_lvl_aggr_measure_aggr_period = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 7), integer32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureAggrPeriod.setStatus('current')
etsys_srvc_lvl_aggr_measure_duration_unit = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 8), time_unit().clone('second')).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureDurationUnit.setStatus('current')
etsys_srvc_lvl_aggr_measure_duration = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 9), integer32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureDuration.setStatus('current')
etsys_srvc_lvl_aggr_measure_history_size = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 10), integer32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureHistorySize.setStatus('current')
etsys_srvc_lvl_aggr_measure_storage_type = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 11), storage_type().clone('volatile')).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureStorageType.setStatus('current')
etsys_srvc_lvl_aggr_measure_results_mgmt = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 12), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('wrap', 1), ('suspend', 2)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureResultsMgmt.setStatus('current')
etsys_srvc_lvl_aggr_measure_history_owner = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 13), etsys_srvc_lvl_owner_string()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureHistoryOwner.setStatus('current')
etsys_srvc_lvl_aggr_measure_history_owner_index = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 14), integer32().subtype(subtypeSpec=value_range_constraint(1, 65535))).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureHistoryOwnerIndex.setStatus('current')
etsys_srvc_lvl_aggr_measure_history_metric = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 15), integer32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureHistoryMetric.setStatus('current')
etsys_srvc_lvl_aggr_measure_admin_state = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 16), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('start', 0), ('stop', 1)))).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureAdminState.setStatus('current')
etsys_srvc_lvl_aggr_measure_map = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 17), snmp_admin_string()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureMap.setStatus('current')
etsys_srvc_lvl_aggr_measure_status = mib_table_column((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 1, 4, 2, 1, 18), row_status()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
etsysSrvcLvlAggrMeasureStatus.setStatus('current')
etsys_srvc_lvl_reporting_conformance = mib_identifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 2))
etsys_srvc_lvl_reporting_groups = mib_identifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 2, 1))
etsys_srvc_lvl_reporting_compliances = mib_identifier((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 2, 2))
etsys_srvc_lvl_system_group = object_group((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 2, 1, 1)).setObjects(('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlSystemTime'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlSystemClockResolution'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlMetricCapabilities'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlMetricType'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlMetricUnit'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlMetricDescription'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsys_srvc_lvl_system_group = etsysSrvcLvlSystemGroup.setStatus('current')
etsys_srvc_lvl_owners_group = object_group((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 2, 1, 2)).setObjects(('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlOwnersOwner'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlOwnersGrantedMetrics'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlOwnersQuota'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlOwnersIpAddressType'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlOwnersIpAddress'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlOwnersEmail'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlOwnersSMS'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlOwnersStatus'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsys_srvc_lvl_owners_group = etsysSrvcLvlOwnersGroup.setStatus('current')
etsys_srvc_lvl_history_group = object_group((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 2, 1, 3)).setObjects(('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlHistorySequence'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlHistoryTimestamp'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlHistoryValue'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsys_srvc_lvl_history_group = etsysSrvcLvlHistoryGroup.setStatus('current')
etsys_srvc_lvl_measure_group = object_group((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 2, 1, 4)).setObjects(('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureName'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureMetrics'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureBeginTime'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureDurationUnit'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureDuration'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureHistorySize'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureFailureMgmtMode'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureResultsMgmt'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureSrcTypeP'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureSrc'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureDstTypeP'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureDst'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureTxMode'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureTxPacketRateUnit'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureTxPacketRate'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureDevtnOrBurstSize'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureMedOrIntBurstSize'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureLossTimeout'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureL3PacketSize'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureDataPattern'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureMap'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureSingletons'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlNetMeasureOperState'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureName'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureMetrics'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureBeginTime'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureAggrPeriodUnit'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureAggrPeriod'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureDurationUnit'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureDuration'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureHistorySize'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureStorageType'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureResultsMgmt'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureHistoryOwner'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureHistoryOwnerIndex'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureHistoryMetric'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureAdminState'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureMap'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlAggrMeasureStatus'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsys_srvc_lvl_measure_group = etsysSrvcLvlMeasureGroup.setStatus('current')
etsys_srvc_lvl_reporting_compliance = module_compliance((1, 3, 6, 1, 4, 1, 5624, 1, 2, 39, 2, 2, 1)).setObjects(('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlSystemGroup'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlOwnersGroup'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlHistoryGroup'), ('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', 'etsysSrvcLvlMeasureGroup'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
etsys_srvc_lvl_reporting_compliance = etsysSrvcLvlReportingCompliance.setStatus('current')
mibBuilder.exportSymbols('ENTERASYS-SERVICE-LEVEL-REPORTING-MIB', etsysSrvcLvlAggrMeasureHistoryMetric=etsysSrvcLvlAggrMeasureHistoryMetric, etsysSrvcLvlHistoryMeasureIndex=etsysSrvcLvlHistoryMeasureIndex, etsysSrvcLvlNetMeasureName=etsysSrvcLvlNetMeasureName, TimeUnit=TimeUnit, etsysSrvcLvlAggrMeasureStatus=etsysSrvcLvlAggrMeasureStatus, etsysSrvcLvlAggrMeasureMetrics=etsysSrvcLvlAggrMeasureMetrics, etsysSrvcLvlAggrMeasureDuration=etsysSrvcLvlAggrMeasureDuration, etsysServiceLevelReportingMIB=etsysServiceLevelReportingMIB, etsysSrvcLvlNetMeasureIndex=etsysSrvcLvlNetMeasureIndex, etsysSrvcLvlReportingGroups=etsysSrvcLvlReportingGroups, etsysSrvcLvlNetMeasureDuration=etsysSrvcLvlNetMeasureDuration, etsysSrvcLvlHistoryEntry=etsysSrvcLvlHistoryEntry, etsysSrvcLvlAggrMeasureIndex=etsysSrvcLvlAggrMeasureIndex, etsysSrvcLvlOwnersTable=etsysSrvcLvlOwnersTable, etsysSrvcLvlNetMeasureDurationUnit=etsysSrvcLvlNetMeasureDurationUnit, EtsysSrvcLvlOwnerString=EtsysSrvcLvlOwnerString, etsysSrvcLvlNetMeasureSrcTypeP=etsysSrvcLvlNetMeasureSrcTypeP, etsysSrvcLvlAggrMeasureBeginTime=etsysSrvcLvlAggrMeasureBeginTime, etsysSrvcLvlSystemClockResolution=etsysSrvcLvlSystemClockResolution, etsysSrvcLvlHistory=etsysSrvcLvlHistory, etsysSrvcLvlConfigObjects=etsysSrvcLvlConfigObjects, etsysSrvcLvlHistoryMetricIndex=etsysSrvcLvlHistoryMetricIndex, PYSNMP_MODULE_ID=etsysServiceLevelReportingMIB, TypePaddress=TypePaddress, etsysSrvcLvlNetMeasureHistorySize=etsysSrvcLvlNetMeasureHistorySize, etsysSrvcLvlReportingConformance=etsysSrvcLvlReportingConformance, etsysSrvcLvlNetMeasureFailureMgmtMode=etsysSrvcLvlNetMeasureFailureMgmtMode, etsysSrvcLvlAggrMeasureMap=etsysSrvcLvlAggrMeasureMap, etsysSrvcLvlNetMeasureMetrics=etsysSrvcLvlNetMeasureMetrics, etsysSrvcLvlNetMeasureOwner=etsysSrvcLvlNetMeasureOwner, etsysSrvcLvlAggrMeasureHistorySize=etsysSrvcLvlAggrMeasureHistorySize, etsysSrvcLvlNetMeasureDevtnOrBurstSize=etsysSrvcLvlNetMeasureDevtnOrBurstSize, etsysSrvcLvlNetMeasureEntry=etsysSrvcLvlNetMeasureEntry, etsysSrvcLvlNetMeasureTxPacketRate=etsysSrvcLvlNetMeasureTxPacketRate, etsysSrvcLvlAggrMeasureOwner=etsysSrvcLvlAggrMeasureOwner, etsysSrvcLvlHistoryTimestamp=etsysSrvcLvlHistoryTimestamp, etsysSrvcLvlOwnersEmail=etsysSrvcLvlOwnersEmail, etsysSrvcLvlAggrMeasureTable=etsysSrvcLvlAggrMeasureTable, etsysSrvcLvlOwnersGroup=etsysSrvcLvlOwnersGroup, etsysSrvcLvlOwnersSMS=etsysSrvcLvlOwnersSMS, etsysSrvcLvlNetMeasureTable=etsysSrvcLvlNetMeasureTable, EtsysSrvcLvlStandardMetrics=EtsysSrvcLvlStandardMetrics, etsysSrvcLvlMetricIndex=etsysSrvcLvlMetricIndex, etsysSrvcLvlOwnersStatus=etsysSrvcLvlOwnersStatus, etsysSrvcLvlHistorySequence=etsysSrvcLvlHistorySequence, etsysSrvcLvlHistoryGroup=etsysSrvcLvlHistoryGroup, etsysSrvcLvlAggrMeasureAggrPeriod=etsysSrvcLvlAggrMeasureAggrPeriod, etsysSrvcLvlNetMeasureTxPacketRateUnit=etsysSrvcLvlNetMeasureTxPacketRateUnit, etsysSrvcLvlOwnersOwner=etsysSrvcLvlOwnersOwner, etsysSrvcLvlAggrMeasureEntry=etsysSrvcLvlAggrMeasureEntry, etsysSrvcLvlNetMeasureL3PacketSize=etsysSrvcLvlNetMeasureL3PacketSize, etsysSrvcLvlNetMeasureSrc=etsysSrvcLvlNetMeasureSrc, etsysSrvcLvlHistoryIndex=etsysSrvcLvlHistoryIndex, etsysSrvcLvlReportingCompliance=etsysSrvcLvlReportingCompliance, etsysSrvcLvlMetricTable=etsysSrvcLvlMetricTable, etsysSrvcLvlOwnersIpAddressType=etsysSrvcLvlOwnersIpAddressType, etsysSrvcLvlOwnersGrantedMetrics=etsysSrvcLvlOwnersGrantedMetrics, etsysSrvcLvlMeasure=etsysSrvcLvlMeasure, etsysSrvcLvlNetMeasureMap=etsysSrvcLvlNetMeasureMap, etsysSrvcLvlNetMeasureMedOrIntBurstSize=etsysSrvcLvlNetMeasureMedOrIntBurstSize, etsysSrvcLvlAggrMeasureResultsMgmt=etsysSrvcLvlAggrMeasureResultsMgmt, etsysSrvcLvlAggrMeasureAggrPeriodUnit=etsysSrvcLvlAggrMeasureAggrPeriodUnit, etsysSrvcLvlOwnersEntry=etsysSrvcLvlOwnersEntry, etsysSrvcLvlHistoryValue=etsysSrvcLvlHistoryValue, etsysSrvcLvlAggrMeasureHistoryOwnerIndex=etsysSrvcLvlAggrMeasureHistoryOwnerIndex, etsysSrvcLvlNetMeasureDataPattern=etsysSrvcLvlNetMeasureDataPattern, etsysSrvcLvlNetMeasureTxMode=etsysSrvcLvlNetMeasureTxMode, etsysSrvcLvlMetricType=etsysSrvcLvlMetricType, etsysSrvcLvlReportingCompliances=etsysSrvcLvlReportingCompliances, etsysSrvcLvlOwnersQuota=etsysSrvcLvlOwnersQuota, etsysSrvcLvlAggrMeasureName=etsysSrvcLvlAggrMeasureName, etsysSrvcLvlMetricCapabilities=etsysSrvcLvlMetricCapabilities, etsysSrvcLvlNetMeasureLossTimeout=etsysSrvcLvlNetMeasureLossTimeout, GMTTimeStamp=GMTTimeStamp, etsysSrvcLvlMetricEntry=etsysSrvcLvlMetricEntry, etsysSrvcLvlOwnersIpAddress=etsysSrvcLvlOwnersIpAddress, etsysSrvcLvlOwners=etsysSrvcLvlOwners, etsysSrvcLvlMeasureGroup=etsysSrvcLvlMeasureGroup, etsysSrvcLvlAggrMeasureDurationUnit=etsysSrvcLvlAggrMeasureDurationUnit, etsysSrvcLvlMetricUnit=etsysSrvcLvlMetricUnit, etsysSrvcLvlNetMeasureSingletons=etsysSrvcLvlNetMeasureSingletons, etsysSrvcLvlNetMeasureDstTypeP=etsysSrvcLvlNetMeasureDstTypeP, etsysSrvcLvlHistoryMeasureOwner=etsysSrvcLvlHistoryMeasureOwner, etsysSrvcLvlSystemGroup=etsysSrvcLvlSystemGroup, etsysSrvcLvlSystem=etsysSrvcLvlSystem, etsysSrvcLvlHistoryTable=etsysSrvcLvlHistoryTable, etsysSrvcLvlNetMeasureBeginTime=etsysSrvcLvlNetMeasureBeginTime, etsysSrvcLvlSystemTime=etsysSrvcLvlSystemTime, etsysSrvcLvlOwnersIndex=etsysSrvcLvlOwnersIndex, etsysSrvcLvlMetricDescription=etsysSrvcLvlMetricDescription, etsysSrvcLvlNetMeasureOperState=etsysSrvcLvlNetMeasureOperState, etsysSrvcLvlAggrMeasureHistoryOwner=etsysSrvcLvlAggrMeasureHistoryOwner, etsysSrvcLvlNetMeasureResultsMgmt=etsysSrvcLvlNetMeasureResultsMgmt, etsysSrvcLvlAggrMeasureAdminState=etsysSrvcLvlAggrMeasureAdminState, TypeP=TypeP, etsysSrvcLvlAggrMeasureStorageType=etsysSrvcLvlAggrMeasureStorageType, etsysSrvcLvlNetMeasureDst=etsysSrvcLvlNetMeasureDst)
|
#
# @lc app=leetcode id=4 lang=python3
#
# [4] Median of Two Sorted Arrays
#
# https://leetcode.com/problems/median-of-two-sorted-arrays/description/
#
# algorithms
# Hard (30.86%)
# Likes: 9316
# Dislikes: 1441
# Total Accepted: 872.2K
# Total Submissions: 2.8M
# Testcase Example: '[1,3]\n[2]'
#
# Given two sorted arrays nums1 and nums2 of size m and n respectively, return
# the median of the two sorted arrays.
#
#
# Example 1:
#
#
# Input: nums1 = [1,3], nums2 = [2]
# Output: 2.00000
# Explanation: merged array = [1,2,3] and median is 2.
#
#
# Example 2:
#
#
# Input: nums1 = [1,2], nums2 = [3,4]
# Output: 2.50000
# Explanation: merged array = [1,2,3,4] and median is (2 + 3) / 2 = 2.5.
#
#
# Example 3:
#
#
# Input: nums1 = [0,0], nums2 = [0,0]
# Output: 0.00000
#
#
# Example 4:
#
#
# Input: nums1 = [], nums2 = [1]
# Output: 1.00000
#
#
# Example 5:
#
#
# Input: nums1 = [2], nums2 = []
# Output: 2.00000
#
#
#
# Constraints:
#
#
# nums1.length == m
# nums2.length == n
# 0 <= m <= 1000
# 0 <= n <= 1000
# 1 <= m + n <= 2000
# -10^6 <= nums1[i], nums2[i] <= 10^6
#
#
#
# Follow up: The overall run time complexity should be O(log (m+n)).
#
# @lc code=start
class Solution_QuickSelect:
def findMedianSortedArrays(self, nums1: List[int], nums2: List[int]) -> float:
nums = nums1[:] + nums2[:]
length = len(nums)
if length % 2 == 0:
return (self._quick_select(nums, 0, length - 1, length // 2 + 1) + self._quick_select(nums, 0, length - 1, (length - 1) // 2 + 1)) / 2
else:
return self._quick_select(nums, 0, length - 1, length // 2 + 1)
def _quick_select(self, nums, start, end, k):
left, right = start, end
pivot = nums[(left + right) // 2]
while left <= right:
while left <= right and nums[left] > pivot:
left += 1
while left <= right and nums[right] < pivot:
right -= 1
if left <= right:
nums[left], nums[right] = nums[right], nums[left]
left += 1
right -= 1
if start + k - 1 <= right:
return self._quick_select(nums, start, right, k)
if start + k - 1 >= left:
return self._quick_select(nums, left, end, k - (left - start))
return nums[right + 1]
class Solution:
def findMedianSortedArrays(self, nums1: List[int], nums2: List[int]) -> float:
if len(nums1) > len(nums2):
return self.findMedianSortedArrays(nums2, nums1)
l1, l2 = len(nums1), len(nums2)
left, right = 0, l1
while left <= right:
position_x = (left + right) // 2
position_y = (l1 + l2 + 1) // 2 - position_x
max_left_x = nums1[position_x - 1] if position_x != 0 else float('-inf')
max_left_y = nums2[position_y - 1] if position_y != 0 else float('-inf')
min_right_x = nums1[position_x] if position_x < l1 else float('inf')
min_right_y = nums2[position_y] if position_y < l2 else float('inf')
if (max_left_x <= min_right_y and max_left_y <= min_right_x):
# we found the partition
if (l1 + l2) % 2 == 0:
return (max(max_left_x, max_left_y) + min(min_right_x, min_right_y)) / 2
else:
return max(max_left_x, max_left_y)
elif max_left_x > min_right_y:
# we should move left
right = position_x - 1
else:
left = position_x + 1
return 0
# @lc code=end
|
class Solution_Quickselect:
def find_median_sorted_arrays(self, nums1: List[int], nums2: List[int]) -> float:
nums = nums1[:] + nums2[:]
length = len(nums)
if length % 2 == 0:
return (self._quick_select(nums, 0, length - 1, length // 2 + 1) + self._quick_select(nums, 0, length - 1, (length - 1) // 2 + 1)) / 2
else:
return self._quick_select(nums, 0, length - 1, length // 2 + 1)
def _quick_select(self, nums, start, end, k):
(left, right) = (start, end)
pivot = nums[(left + right) // 2]
while left <= right:
while left <= right and nums[left] > pivot:
left += 1
while left <= right and nums[right] < pivot:
right -= 1
if left <= right:
(nums[left], nums[right]) = (nums[right], nums[left])
left += 1
right -= 1
if start + k - 1 <= right:
return self._quick_select(nums, start, right, k)
if start + k - 1 >= left:
return self._quick_select(nums, left, end, k - (left - start))
return nums[right + 1]
class Solution:
def find_median_sorted_arrays(self, nums1: List[int], nums2: List[int]) -> float:
if len(nums1) > len(nums2):
return self.findMedianSortedArrays(nums2, nums1)
(l1, l2) = (len(nums1), len(nums2))
(left, right) = (0, l1)
while left <= right:
position_x = (left + right) // 2
position_y = (l1 + l2 + 1) // 2 - position_x
max_left_x = nums1[position_x - 1] if position_x != 0 else float('-inf')
max_left_y = nums2[position_y - 1] if position_y != 0 else float('-inf')
min_right_x = nums1[position_x] if position_x < l1 else float('inf')
min_right_y = nums2[position_y] if position_y < l2 else float('inf')
if max_left_x <= min_right_y and max_left_y <= min_right_x:
if (l1 + l2) % 2 == 0:
return (max(max_left_x, max_left_y) + min(min_right_x, min_right_y)) / 2
else:
return max(max_left_x, max_left_y)
elif max_left_x > min_right_y:
right = position_x - 1
else:
left = position_x + 1
return 0
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def recoverFromPreorder(self, S: str) -> TreeNode:
def dfs(parent, s, lev):
print(parent.val, s, lev)
if not s: return
i = lev
l = 0
while i < len(s) and s[i].isdigit():
l = l * 10 + int(s[i])
i += 1
parent.left = TreeNode(l)
j = lev
f = '-' * lev
for ind in range(i, len(s)):
if s[ind:].startswith(f) and not s[ind:].startswith(f + '-') and s[ind -1] != '-':
rr = ind
j = ind + lev
r = 0
while j < len(s) and s[j].isdigit():
r = r * 10 + int(s[j])
j += 1
parent.right = TreeNode(r)
dfs(parent.left, s[i:rr], lev + 1)
dfs(parent.right, s[j:], lev + 1)
return
dfs(parent.left, s[i:], lev + 1)
i = num = 0
while i < len(S) and S[i].isdigit():
num = num * 10 + int(S[i])
i += 1
root = TreeNode(num)
dfs(root, S[i:], 1)
return root
|
class Solution:
def recover_from_preorder(self, S: str) -> TreeNode:
def dfs(parent, s, lev):
print(parent.val, s, lev)
if not s:
return
i = lev
l = 0
while i < len(s) and s[i].isdigit():
l = l * 10 + int(s[i])
i += 1
parent.left = tree_node(l)
j = lev
f = '-' * lev
for ind in range(i, len(s)):
if s[ind:].startswith(f) and (not s[ind:].startswith(f + '-')) and (s[ind - 1] != '-'):
rr = ind
j = ind + lev
r = 0
while j < len(s) and s[j].isdigit():
r = r * 10 + int(s[j])
j += 1
parent.right = tree_node(r)
dfs(parent.left, s[i:rr], lev + 1)
dfs(parent.right, s[j:], lev + 1)
return
dfs(parent.left, s[i:], lev + 1)
i = num = 0
while i < len(S) and S[i].isdigit():
num = num * 10 + int(S[i])
i += 1
root = tree_node(num)
dfs(root, S[i:], 1)
return root
|
# Enter your code here. Read input from STDIN. Print output to STDOUT
n=int(input())
country_name=set(input() for i in range(n))
print(len(country_name))
|
n = int(input())
country_name = set((input() for i in range(n)))
print(len(country_name))
|
OP_CODES = {
"inp": 0,
"cla": 1,
"add": 2,
"tac": 3,
"sft": 4,
"out": 5,
"sto": 6,
"sub": 7,
"jmp": 8,
"hlt": 9,
"mul": 10,
"div": 11,
"noop": 12
}
class InstructionError(Exception):
pass
class Assembler(object):
def __init__(self, inputfile):
self.contents = [line.rstrip('\n') for line in inputfile.readlines()]
self.generated_records = ["002", "800"]
self.data_p = 4
self.code_p = 10
# key:value => label:addr
self.symbol_table = {}
def first_pass(self):
""" Collect all symbols in the first pass. """
code_p = self.code_p
data_p = self.data_p
for line in self.contents:
tks = [tk.lower() for tk in line.split()]
#: pass space or tab
if not tks:
continue
#: label
if tks[0] not in OP_CODES and len(tks) >= 3:
label_name = tks[0]
if tks[1] == 'data':
self.symbol_table[label_name] = data_p
else:
self.symbol_table[label_name] = code_p
tks.remove(tks[0])
if len(tks) >= 2 and tks[0] in OP_CODES:
code_p += 1
if len(tks) >= 2 and tks[0] == 'data':
data_p += 1
def second_pass(self):
for line in self.contents:
tks = [tk.lower() for tk in line.split()]
#: pass space or tab
if not tks:
continue
#: label
if tks[0] not in OP_CODES and len(tks) >= 3:
tks.remove(tks[0])
#: data
if len(tks) >= 2 and tks[0] == 'data':
self.generated_records.append(self.pad(self.data_p))
self.generated_records.append(tks[1])
self.data_p += 1
continue
#: instruction
if len(tks) >= 2 and tks[0] in OP_CODES:
operation = tks[0]
address = tks[1]
op = str(OP_CODES[operation])
if address in self.symbol_table:
address = self.pad(self.symbol_table[address], length=2)
code = op + address
self.generated_records.append(self.pad(self.code_p))
self.generated_records.append(code)
self.code_p += 1
continue
raise InstructionError("Instruction error: %s" % (tks,))
self.generated_records.append("002")
self.generated_records.append("810")
def assemble(self):
self.first_pass()
self.second_pass()
@staticmethod
def pad(data, length=3):
"""
Pads either an integer or a number in string format with zeros.
"""
padding = '0' * length
data = '%s%s' % (padding, abs(data))
return data[-length:]
|
op_codes = {'inp': 0, 'cla': 1, 'add': 2, 'tac': 3, 'sft': 4, 'out': 5, 'sto': 6, 'sub': 7, 'jmp': 8, 'hlt': 9, 'mul': 10, 'div': 11, 'noop': 12}
class Instructionerror(Exception):
pass
class Assembler(object):
def __init__(self, inputfile):
self.contents = [line.rstrip('\n') for line in inputfile.readlines()]
self.generated_records = ['002', '800']
self.data_p = 4
self.code_p = 10
self.symbol_table = {}
def first_pass(self):
""" Collect all symbols in the first pass. """
code_p = self.code_p
data_p = self.data_p
for line in self.contents:
tks = [tk.lower() for tk in line.split()]
if not tks:
continue
if tks[0] not in OP_CODES and len(tks) >= 3:
label_name = tks[0]
if tks[1] == 'data':
self.symbol_table[label_name] = data_p
else:
self.symbol_table[label_name] = code_p
tks.remove(tks[0])
if len(tks) >= 2 and tks[0] in OP_CODES:
code_p += 1
if len(tks) >= 2 and tks[0] == 'data':
data_p += 1
def second_pass(self):
for line in self.contents:
tks = [tk.lower() for tk in line.split()]
if not tks:
continue
if tks[0] not in OP_CODES and len(tks) >= 3:
tks.remove(tks[0])
if len(tks) >= 2 and tks[0] == 'data':
self.generated_records.append(self.pad(self.data_p))
self.generated_records.append(tks[1])
self.data_p += 1
continue
if len(tks) >= 2 and tks[0] in OP_CODES:
operation = tks[0]
address = tks[1]
op = str(OP_CODES[operation])
if address in self.symbol_table:
address = self.pad(self.symbol_table[address], length=2)
code = op + address
self.generated_records.append(self.pad(self.code_p))
self.generated_records.append(code)
self.code_p += 1
continue
raise instruction_error('Instruction error: %s' % (tks,))
self.generated_records.append('002')
self.generated_records.append('810')
def assemble(self):
self.first_pass()
self.second_pass()
@staticmethod
def pad(data, length=3):
"""
Pads either an integer or a number in string format with zeros.
"""
padding = '0' * length
data = '%s%s' % (padding, abs(data))
return data[-length:]
|
PIPELINE = {
#'PIPELINE_ENABLED': True,
'STYLESHEETS': {
'global': {
'source_filenames': (
'foundation-sites/dist/foundation.min.css',
'pikaday/css/pikaday.css',
'jt.timepicker/jquery.timepicker.css',
'foundation-icon-fonts/foundation-icons.css',
'routes/css/routes.css',
),
'output_filename': 'css/global.css',
'extra_context': {
'media': 'screen,projection',
},
},
},
'JAVASCRIPT': {
'global': {
'source_filenames': (
'jquery/dist/jquery.min.js',
'common/js/csrf.js',
'foundation-sites/dist/foundation.min.js',
'pikaday/pikaday.js',
'pikaday/plugins/pikaday.jquery.js',
'jt.timepicker/jquery.timepicker.min.js',
'moment/min/moment.min.js',
'moment-timezone/builds/moment-timezone-with-data.min.js',
'Chart.js/dist/Chart.min.js',
'vue/dist/vue.min.js',
'routes/js/routes.js',
),
'output_filename': 'js/global.js',
}
}
}
|
pipeline = {'STYLESHEETS': {'global': {'source_filenames': ('foundation-sites/dist/foundation.min.css', 'pikaday/css/pikaday.css', 'jt.timepicker/jquery.timepicker.css', 'foundation-icon-fonts/foundation-icons.css', 'routes/css/routes.css'), 'output_filename': 'css/global.css', 'extra_context': {'media': 'screen,projection'}}}, 'JAVASCRIPT': {'global': {'source_filenames': ('jquery/dist/jquery.min.js', 'common/js/csrf.js', 'foundation-sites/dist/foundation.min.js', 'pikaday/pikaday.js', 'pikaday/plugins/pikaday.jquery.js', 'jt.timepicker/jquery.timepicker.min.js', 'moment/min/moment.min.js', 'moment-timezone/builds/moment-timezone-with-data.min.js', 'Chart.js/dist/Chart.min.js', 'vue/dist/vue.min.js', 'routes/js/routes.js'), 'output_filename': 'js/global.js'}}}
|
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
class ResourceBase(object):
def __init__(self, **kwargs):
self._location = kwargs.get('location')
self._tags = kwargs.get('tags')
@property
def location(self):
"""
Gets the location of the resource.
"""
return self._location
@location.setter
def location(self, value):
self._location = value
@property
def tags(self):
"""
Gets the tags attached to the resource.
"""
return self._tags
@tags.setter
def tags(self, value):
self._tags = value
class ResourceBaseExtended(ResourceBase):
def __init__(self, **kwargs):
super(ResourceBaseExtended, self).__init__(**kwargs)
self._id = kwargs.get('id')
self._name = kwargs.get('name')
self._type = kwargs.get('type')
@property
def id(self):
"""
Gets the ID of the resource.
"""
return self._id
@id.setter
def id(self, value):
self._id = value
@property
def name(self):
"""
Gets the name of the resource.
"""
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def type(self):
"""
Gets the type of the resource.
"""
return self._type
@type.setter
def type(self, value):
self._type = value
|
class Resourcebase(object):
def __init__(self, **kwargs):
self._location = kwargs.get('location')
self._tags = kwargs.get('tags')
@property
def location(self):
"""
Gets the location of the resource.
"""
return self._location
@location.setter
def location(self, value):
self._location = value
@property
def tags(self):
"""
Gets the tags attached to the resource.
"""
return self._tags
@tags.setter
def tags(self, value):
self._tags = value
class Resourcebaseextended(ResourceBase):
def __init__(self, **kwargs):
super(ResourceBaseExtended, self).__init__(**kwargs)
self._id = kwargs.get('id')
self._name = kwargs.get('name')
self._type = kwargs.get('type')
@property
def id(self):
"""
Gets the ID of the resource.
"""
return self._id
@id.setter
def id(self, value):
self._id = value
@property
def name(self):
"""
Gets the name of the resource.
"""
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def type(self):
"""
Gets the type of the resource.
"""
return self._type
@type.setter
def type(self, value):
self._type = value
|
#
# @lc app=leetcode id=430 lang=python3
#
# [430] Flatten a Multilevel Doubly Linked List
#
# @lc code=start
"""
# Definition for a Node.
class Node:
def __init__(self, val, prev, next, child):
self.val = val
self.prev = prev
self.next = next
self.child = child
"""
class Solution:
def flatten(self, head: 'Node') -> 'Node':
if not head:
return
save_ls = []
pointer = head
def traveller(head):
if head.child:
if head.next:
save_ls.append(head.next)
head.next = head.child
head.child = None
head.next.prev = head
head = head.next
elif head.next:
head = head.next
elif save_ls:
newnxt = save_ls.pop()
head.next = newnxt
newnxt.prev = head
head = newnxt
else:
return
traveller(head)
traveller(pointer)
return head
# @lc code=end
|
"""
# Definition for a Node.
class Node:
def __init__(self, val, prev, next, child):
self.val = val
self.prev = prev
self.next = next
self.child = child
"""
class Solution:
def flatten(self, head: 'Node') -> 'Node':
if not head:
return
save_ls = []
pointer = head
def traveller(head):
if head.child:
if head.next:
save_ls.append(head.next)
head.next = head.child
head.child = None
head.next.prev = head
head = head.next
elif head.next:
head = head.next
elif save_ls:
newnxt = save_ls.pop()
head.next = newnxt
newnxt.prev = head
head = newnxt
else:
return
traveller(head)
traveller(pointer)
return head
|
TITLE = "Jump game"
WIDTH = 480
HEIGHT = 600
FPS = 30
WHITE = (255, 255, 255)
BLACK = (0,0,0)
RED = (240, 55, 66)
|
title = 'Jump game'
width = 480
height = 600
fps = 30
white = (255, 255, 255)
black = (0, 0, 0)
red = (240, 55, 66)
|
class Tool:
def __init__(self, name, make):
self.name = name
self.make = make
|
class Tool:
def __init__(self, name, make):
self.name = name
self.make = make
|
colors = {
"bg0": " #fbf1c7",
"bg1": " #ebdbb2",
"bg2": " #d5c4a1",
"bg3": " #bdae93",
"bg4": " #a89984",
"gry": " #928374",
"fg4": " #7c6f64",
"fg3": " #665c54",
"fg2": " #504945",
"fg1": " #3c3836",
"fg0": " #282828",
"red": " #cc241d",
"red2": " #9d0006",
"orange": " #d65d0e",
"orange2": " #af3a03",
"yellow": " #d79921",
"yellow2": " #b57614",
"green": " #98971a",
"green2": " #79740e",
"aqua": " #689d6a",
"aqua2": " #427b58",
"blue": " #458588",
"blue2": " #076678",
"purple": " #b16286",
"purple2": " #8f3f71",
}
html_theme = "furo"
html_theme_options = {
"light_css_variables": {
"font-stack": "Fira Sans, sans-serif",
"font-stack--monospace": "Fira Code, monospace",
"color-brand-primary": colors["purple2"],
"color-brand-content": colors["blue2"],
},
"dark_css_variables": {
"color-brand-primary": colors["purple"],
"color-brand-content": colors["blue"],
"color-background-primary": colors["fg1"],
"color-background-secondary": colors["fg0"],
"color-foreground-primary": colors["bg0"],
"color-foreground-secondary": colors["bg1"],
"color-highlighted-background": colors["yellow"],
"color-highlight-on-target": colors["fg2"],
},
}
panels_css_variables = {
"tabs-color-label-active": colors["purple2"],
"tabs-color-label-inactive": colors["purple"],
"tabs-color-overline": colors['purple'],
"tabs-color-underline": colors["purple2"],
"tabs-size-label": "1rem",
}
highlight_language = "python3"
pygments_style = "gruvbox-light"
pygments_dark_style = "gruvbox-dark"
|
colors = {'bg0': ' #fbf1c7', 'bg1': ' #ebdbb2', 'bg2': ' #d5c4a1', 'bg3': ' #bdae93', 'bg4': ' #a89984', 'gry': ' #928374', 'fg4': ' #7c6f64', 'fg3': ' #665c54', 'fg2': ' #504945', 'fg1': ' #3c3836', 'fg0': ' #282828', 'red': ' #cc241d', 'red2': ' #9d0006', 'orange': ' #d65d0e', 'orange2': ' #af3a03', 'yellow': ' #d79921', 'yellow2': ' #b57614', 'green': ' #98971a', 'green2': ' #79740e', 'aqua': ' #689d6a', 'aqua2': ' #427b58', 'blue': ' #458588', 'blue2': ' #076678', 'purple': ' #b16286', 'purple2': ' #8f3f71'}
html_theme = 'furo'
html_theme_options = {'light_css_variables': {'font-stack': 'Fira Sans, sans-serif', 'font-stack--monospace': 'Fira Code, monospace', 'color-brand-primary': colors['purple2'], 'color-brand-content': colors['blue2']}, 'dark_css_variables': {'color-brand-primary': colors['purple'], 'color-brand-content': colors['blue'], 'color-background-primary': colors['fg1'], 'color-background-secondary': colors['fg0'], 'color-foreground-primary': colors['bg0'], 'color-foreground-secondary': colors['bg1'], 'color-highlighted-background': colors['yellow'], 'color-highlight-on-target': colors['fg2']}}
panels_css_variables = {'tabs-color-label-active': colors['purple2'], 'tabs-color-label-inactive': colors['purple'], 'tabs-color-overline': colors['purple'], 'tabs-color-underline': colors['purple2'], 'tabs-size-label': '1rem'}
highlight_language = 'python3'
pygments_style = 'gruvbox-light'
pygments_dark_style = 'gruvbox-dark'
|
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def addTwoNumbers(self, l1: ListNode, l2: ListNode) -> ListNode:
result = ListNode(0)
result_tail = result
carry = 0
while l1 or l2 or carry:
val1 = (l1.val if l1 else 0)
val2 = (l2.val if l2 else 0)
carry, out = divmod(val1+val2 + carry, 10)
result_tail.next = ListNode(out)
result_tail = result_tail.next
l1 = (l1.next if l1 else None)
l2 = (l2.next if l2 else None)
return result.next
|
class Solution:
def add_two_numbers(self, l1: ListNode, l2: ListNode) -> ListNode:
result = list_node(0)
result_tail = result
carry = 0
while l1 or l2 or carry:
val1 = l1.val if l1 else 0
val2 = l2.val if l2 else 0
(carry, out) = divmod(val1 + val2 + carry, 10)
result_tail.next = list_node(out)
result_tail = result_tail.next
l1 = l1.next if l1 else None
l2 = l2.next if l2 else None
return result.next
|
class User:
"""
Class that generates new users login system
"""
def __init__(self,fullname, email, username, password):
self.fullname = fullname
self.email = email
self.username = username
self.password = password
user_list = []
def save_user(self):
"""
method that saves user object to user_list
"""
User.user_list.append(self)
@classmethod
def user_exists(cls, username):
"""
Method that checks user existense in the user list.
Args:
username: user to search if the username exists
Returns Boolean: True or false accordingly
"""
for user in cls.user_list:
if user.username == username:
return True
else:
return False
@classmethod
def find_by_username(cls,username):
for user in cls.user_list:
if user.username == username:
return user
else:
return 0
|
class User:
"""
Class that generates new users login system
"""
def __init__(self, fullname, email, username, password):
self.fullname = fullname
self.email = email
self.username = username
self.password = password
user_list = []
def save_user(self):
"""
method that saves user object to user_list
"""
User.user_list.append(self)
@classmethod
def user_exists(cls, username):
"""
Method that checks user existense in the user list.
Args:
username: user to search if the username exists
Returns Boolean: True or false accordingly
"""
for user in cls.user_list:
if user.username == username:
return True
else:
return False
@classmethod
def find_by_username(cls, username):
for user in cls.user_list:
if user.username == username:
return user
else:
return 0
|
budget = float(input())
season = str(input())
region = str()
final_budget = 0
accommodation = str()
if budget <= 100:
region = "Bulgaria"
if season == "summer":
accommodation = "Camp"
final_budget = budget * 0.7
else:
accommodation = "Hotel"
final_budget = budget * 0.30
elif 100 < budget <= 1000:
region = "Balkans"
if season == "summer":
accommodation = "Camp"
final_budget = budget * 0.6
else:
accommodation = "Hotel"
final_budget = budget * 0.20
else:
region = "Europe"
accommodation = "Hotel"
if season == "summer":
final_budget = budget * 0.1
else:
final_budget = budget * 0.1
print(f"Somewhere in {region}")
print(f"{accommodation} - {budget - final_budget:.2f}")
|
budget = float(input())
season = str(input())
region = str()
final_budget = 0
accommodation = str()
if budget <= 100:
region = 'Bulgaria'
if season == 'summer':
accommodation = 'Camp'
final_budget = budget * 0.7
else:
accommodation = 'Hotel'
final_budget = budget * 0.3
elif 100 < budget <= 1000:
region = 'Balkans'
if season == 'summer':
accommodation = 'Camp'
final_budget = budget * 0.6
else:
accommodation = 'Hotel'
final_budget = budget * 0.2
else:
region = 'Europe'
accommodation = 'Hotel'
if season == 'summer':
final_budget = budget * 0.1
else:
final_budget = budget * 0.1
print(f'Somewhere in {region}')
print(f'{accommodation} - {budget - final_budget:.2f}')
|
class RequestConnectionError(Exception):
pass
class ReferralError(Exception):
pass
class DataRegistryCaseUpdateError(Exception):
pass
|
class Requestconnectionerror(Exception):
pass
class Referralerror(Exception):
pass
class Dataregistrycaseupdateerror(Exception):
pass
|
class ATTR:
CAND_CSV = "cand_csv"
CANDIDATE = "candidate"
COLOR = "color"
CONFIRMED = "confirmed"
CSV_ENDING = ".csv"
EMAIL = "Email"
FIRST_NAME = "First Name"
LAST_NAME = "Last Name"
NEXT = "next"
NUM_BITBYTES = "num_bitbytes"
NUM_CONFIRMED = "num_confirmed"
NUM_PENDING = "num_pending"
NUM_REJECTED = "num_rejected"
POST = "POST"
STATUS = "status"
TITLE = "title"
UNCONFIRMED = "unconfirmed"
UTF8 = "utf-8"
UTF8SIG = "utf-8-sig"
DOMAINS = [
"@berkeley.edu",
"@hkn.eecs.berkeley.edu",
]
class CandidateDTO:
def __init__(self, candidate_attributes: dict):
self.email = candidate_attributes.get(ATTR.EMAIL, None)
self.first_name = candidate_attributes.get(ATTR.FIRST_NAME, None)
self.last_name = candidate_attributes.get(ATTR.LAST_NAME, None)
self.username = self.email
if self.email is not None:
for d in DOMAINS:
if self.email.endswith(d):
self.username = self.email.replace(d, "")
break
self.validate()
def validate(self):
assert self.email, "Candidate email must not be empty"
assert any(
self.email.endswith(d) for d in DOMAINS
), "Candidate email must be an @berkeley.edu or @hkn.eecs.berkeley.edu email"
assert self.first_name, "Candidate first name must not be empty"
assert self.last_name, "Candidate last name must not be empty"
assert self.username, "Candidate username must not be empty"
DEFAULT_RANDOM_PASSWORD_LENGTH = 20
# Default hard-coded event types for candidate semester
# NOTE: these strings are also hard-coded in candidate/index.html
class EVENT_NAMES:
MANDATORY = "Mandatory"
BITBYTE = "bitbyte"
HANGOUT = "officer_hangout"
CHALLENGE = "officer_challenge"
EITHER = "either"
INTERACTIVITIES = "interactivities"
REQUIREMENT_TITLES_TEMPLATE = "{} ({} required, {} remaining)"
|
class Attr:
cand_csv = 'cand_csv'
candidate = 'candidate'
color = 'color'
confirmed = 'confirmed'
csv_ending = '.csv'
email = 'Email'
first_name = 'First Name'
last_name = 'Last Name'
next = 'next'
num_bitbytes = 'num_bitbytes'
num_confirmed = 'num_confirmed'
num_pending = 'num_pending'
num_rejected = 'num_rejected'
post = 'POST'
status = 'status'
title = 'title'
unconfirmed = 'unconfirmed'
utf8 = 'utf-8'
utf8_sig = 'utf-8-sig'
domains = ['@berkeley.edu', '@hkn.eecs.berkeley.edu']
class Candidatedto:
def __init__(self, candidate_attributes: dict):
self.email = candidate_attributes.get(ATTR.EMAIL, None)
self.first_name = candidate_attributes.get(ATTR.FIRST_NAME, None)
self.last_name = candidate_attributes.get(ATTR.LAST_NAME, None)
self.username = self.email
if self.email is not None:
for d in DOMAINS:
if self.email.endswith(d):
self.username = self.email.replace(d, '')
break
self.validate()
def validate(self):
assert self.email, 'Candidate email must not be empty'
assert any((self.email.endswith(d) for d in DOMAINS)), 'Candidate email must be an @berkeley.edu or @hkn.eecs.berkeley.edu email'
assert self.first_name, 'Candidate first name must not be empty'
assert self.last_name, 'Candidate last name must not be empty'
assert self.username, 'Candidate username must not be empty'
default_random_password_length = 20
class Event_Names:
mandatory = 'Mandatory'
bitbyte = 'bitbyte'
hangout = 'officer_hangout'
challenge = 'officer_challenge'
either = 'either'
interactivities = 'interactivities'
requirement_titles_template = '{} ({} required, {} remaining)'
|
class PartOfSpeech:
NOUN = 'noun'
VERB = 'verb'
ADJECTIVE = 'adjective'
ADVERB = 'adverb'
pos2con = {
'n': [
'NN', 'NNS', 'NNP', 'NNPS', # from WordNet
'NP' # from PPDB
],
'v': [
'VB', 'VBD', 'VBG', 'VBN', 'VBZ', # from WordNet
'VBP' # from PPDB
],
'a': ['JJ', 'JJR', 'JJS', 'IN'],
's': ['JJ', 'JJR', 'JJS', 'IN'], # Adjective Satellite
'r': ['RB', 'RBR', 'RBS'], # Adverb
}
con2pos = {}
poses = []
for key, values in pos2con.items():
poses.extend(values)
for value in values:
if value not in con2pos:
con2pos[value] = []
con2pos[value].append(key)
@staticmethod
def pos2constituent(pos):
if pos in PartOfSpeech.pos2con:
return PartOfSpeech.pos2con[pos]
return []
@staticmethod
def constituent2pos(con):
if con in PartOfSpeech.con2pos:
return PartOfSpeech.con2pos[con]
return []
@staticmethod
def get_pos():
return PartOfSpeech.poses
|
class Partofspeech:
noun = 'noun'
verb = 'verb'
adjective = 'adjective'
adverb = 'adverb'
pos2con = {'n': ['NN', 'NNS', 'NNP', 'NNPS', 'NP'], 'v': ['VB', 'VBD', 'VBG', 'VBN', 'VBZ', 'VBP'], 'a': ['JJ', 'JJR', 'JJS', 'IN'], 's': ['JJ', 'JJR', 'JJS', 'IN'], 'r': ['RB', 'RBR', 'RBS']}
con2pos = {}
poses = []
for (key, values) in pos2con.items():
poses.extend(values)
for value in values:
if value not in con2pos:
con2pos[value] = []
con2pos[value].append(key)
@staticmethod
def pos2constituent(pos):
if pos in PartOfSpeech.pos2con:
return PartOfSpeech.pos2con[pos]
return []
@staticmethod
def constituent2pos(con):
if con in PartOfSpeech.con2pos:
return PartOfSpeech.con2pos[con]
return []
@staticmethod
def get_pos():
return PartOfSpeech.poses
|
def superTuple(name, attributes):
"""Creates a Super Tuple class."""
dct = {}
#Create __new__.
nargs = len(attributes)
def _new_(cls, *args):
if len(args) != nargs:
raise TypeError("%s takes %d arguments (%d given)." % (cls.__name__,
nargs,
len(args)))
return tuple.__new__(cls, args)
dct["__new__"] = staticmethod(_new_)
#Create __repr__.
def _repr_(self):
contents = [repr(elem) for elem in self]
return "%s<%s>" % (self.__class__.__name__,
", ".join(contents))
dct["__repr__"] = _repr_
#Create attribute properties.
def getter(i):
return lambda self: self.__getitem__(i)
for index, attribute in enumerate(attributes):
dct[attribute] = property(getter(index))
#Set slots.
dct["__slots__"] = []
#Return class.
return type(name, (tuple,), dct)
|
def super_tuple(name, attributes):
"""Creates a Super Tuple class."""
dct = {}
nargs = len(attributes)
def _new_(cls, *args):
if len(args) != nargs:
raise type_error('%s takes %d arguments (%d given).' % (cls.__name__, nargs, len(args)))
return tuple.__new__(cls, args)
dct['__new__'] = staticmethod(_new_)
def _repr_(self):
contents = [repr(elem) for elem in self]
return '%s<%s>' % (self.__class__.__name__, ', '.join(contents))
dct['__repr__'] = _repr_
def getter(i):
return lambda self: self.__getitem__(i)
for (index, attribute) in enumerate(attributes):
dct[attribute] = property(getter(index))
dct['__slots__'] = []
return type(name, (tuple,), dct)
|
my_first_name = str(input("My first name is "))
neighbor_first_name = str(input("My neighbor's first name is "))
my_coding = int(input("How many months have I been coding? "))
neighbor_coding = int(input("How many months has my neighbor been coding? "))
print("I am " + my_first_name + " and my neighbor is " + neighbor_first_name)
print(str(my_first_name))
print(str(neighbor_first_name))
print(str(my_coding))
print(str(neighbor_coding))
|
my_first_name = str(input('My first name is '))
neighbor_first_name = str(input("My neighbor's first name is "))
my_coding = int(input('How many months have I been coding? '))
neighbor_coding = int(input('How many months has my neighbor been coding? '))
print('I am ' + my_first_name + ' and my neighbor is ' + neighbor_first_name)
print(str(my_first_name))
print(str(neighbor_first_name))
print(str(my_coding))
print(str(neighbor_coding))
|
class MethodPropertyNotFoundError(Exception):
"""Exception to raise when a class is does not have an expected method or property."""
pass
class PipelineNotFoundError(Exception):
"""An exception raised when a particular pipeline is not found in automl search results"""
pass
class ObjectiveNotFoundError(Exception):
"""Exception to raise when specified objective does not exist."""
pass
class MissingComponentError(Exception):
"""An exception raised when a component is not found in all_components()"""
pass
class ComponentNotYetFittedError(Exception):
"""An exception to be raised when predict/predict_proba/transform is called on a component without fitting first."""
pass
class PipelineNotYetFittedError(Exception):
"""An exception to be raised when predict/predict_proba/transform is called on a pipeline without fitting first."""
pass
class AutoMLSearchException(Exception):
"""Exception raised when all pipelines in an automl batch return a score of NaN for the primary objective."""
pass
class EnsembleMissingPipelinesError(Exception):
"""An exception raised when an ensemble is missing `estimators` (list) as a parameter."""
pass
class PipelineScoreError(Exception):
"""An exception raised when a pipeline errors while scoring any objective in a list of objectives.
Arguments:
exceptions (dict): A dictionary mapping an objective name (str) to a tuple of the form (exception, traceback).
All of the objectives that errored will be stored here.
scored_successfully (dict): A dictionary mapping an objective name (str) to a score value. All of the objectives
that did not error will be stored here.
"""
def __init__(self, exceptions, scored_successfully):
self.exceptions = exceptions
self.scored_successfully = scored_successfully
# Format the traceback message
exception_list = []
for objective, (exception, tb) in exceptions.items():
exception_list.append(
f"{objective} encountered {str(exception.__class__.__name__)} with message ({str(exception)}):\n"
)
exception_list.extend(tb)
message = "\n".join(exception_list)
self.message = message
super().__init__(message)
class DataCheckInitError(Exception):
"""Exception raised when a data check can't initialize with the parameters given."""
class NullsInColumnWarning(UserWarning):
"""Warning thrown when there are null values in the column of interest"""
class ObjectiveCreationError(Exception):
"""Exception when get_objective tries to instantiate an objective and required args are not provided."""
class NoPositiveLabelException(Exception):
"""Exception when a particular classification label for the 'positive' class cannot be found in the column index or unique values"""
|
class Methodpropertynotfounderror(Exception):
"""Exception to raise when a class is does not have an expected method or property."""
pass
class Pipelinenotfounderror(Exception):
"""An exception raised when a particular pipeline is not found in automl search results"""
pass
class Objectivenotfounderror(Exception):
"""Exception to raise when specified objective does not exist."""
pass
class Missingcomponenterror(Exception):
"""An exception raised when a component is not found in all_components()"""
pass
class Componentnotyetfittederror(Exception):
"""An exception to be raised when predict/predict_proba/transform is called on a component without fitting first."""
pass
class Pipelinenotyetfittederror(Exception):
"""An exception to be raised when predict/predict_proba/transform is called on a pipeline without fitting first."""
pass
class Automlsearchexception(Exception):
"""Exception raised when all pipelines in an automl batch return a score of NaN for the primary objective."""
pass
class Ensemblemissingpipelineserror(Exception):
"""An exception raised when an ensemble is missing `estimators` (list) as a parameter."""
pass
class Pipelinescoreerror(Exception):
"""An exception raised when a pipeline errors while scoring any objective in a list of objectives.
Arguments:
exceptions (dict): A dictionary mapping an objective name (str) to a tuple of the form (exception, traceback).
All of the objectives that errored will be stored here.
scored_successfully (dict): A dictionary mapping an objective name (str) to a score value. All of the objectives
that did not error will be stored here.
"""
def __init__(self, exceptions, scored_successfully):
self.exceptions = exceptions
self.scored_successfully = scored_successfully
exception_list = []
for (objective, (exception, tb)) in exceptions.items():
exception_list.append(f'{objective} encountered {str(exception.__class__.__name__)} with message ({str(exception)}):\n')
exception_list.extend(tb)
message = '\n'.join(exception_list)
self.message = message
super().__init__(message)
class Datacheckiniterror(Exception):
"""Exception raised when a data check can't initialize with the parameters given."""
class Nullsincolumnwarning(UserWarning):
"""Warning thrown when there are null values in the column of interest"""
class Objectivecreationerror(Exception):
"""Exception when get_objective tries to instantiate an objective and required args are not provided."""
class Nopositivelabelexception(Exception):
"""Exception when a particular classification label for the 'positive' class cannot be found in the column index or unique values"""
|
data_nascimento = 9
mes_nascimento = 10
ano_nascimento = 1985
print(data_nascimento, mes_nascimento, ano_nascimento, sep="/", end=".\n")
contador = 1
while(contador <= 10):
print(contador)
contador = contador + 2
if(contador == 5):
contador = contador + 2
|
data_nascimento = 9
mes_nascimento = 10
ano_nascimento = 1985
print(data_nascimento, mes_nascimento, ano_nascimento, sep='/', end='.\n')
contador = 1
while contador <= 10:
print(contador)
contador = contador + 2
if contador == 5:
contador = contador + 2
|
def even_spread(M, N):
"""Return a list of target sizes for an even spread.
Output sizes are either M//N or M//N+1
Args:
M: number of elements
N: number of partitons
Returns:
target_sizes : [int]
len(target_sizes) == N
sum(target_sizes) == M
"""
if N == 0:
assert M == 0
return []
tgt = [ M//N ]*N
for i in range(M%N):
tgt[i] += 1
return tgt
def cumsum(blks):
csum = [0]
for i in range(len(blks)):
csum.append(csum[i]+blks[i])
return csum
class Cxn:
def __init__(self, src, dst, s0,s1, d0,d1):
self.src = src
self.dst = dst
self.s0, self.s1 = s0, s1
self.d0, self.d1 = d0, d1
def __str__(self):
return f"src[{self.src}][{self.s0}:{self.s1}] ~> dst[{self.dst}][{self.d0}:{self.d1}]"
def __repr__(self):
return f"Cxn({self.src},{self.dst},{self.s0},{self.s1},{self.d0},{self.d1})"
def segments(src, dst):
"""List out corresponding segments of `src` and `dst`.
Note:
src[0] == 0
dst[0] == 0
src[-1] == dst[-1]
Args:
src: [int] ascending sequence of starting offsets
dst: [int] ascending sequence of starting offsets
Returns:
[Cxn]
"""
assert src[0] == 0 and dst[0] == 0
assert src[-1] == dst[-1], f"Input and output sizes ({src[-1]} and {dst[-1]}) don't match."
ans = []
idx = 0 # current global index
i, j = 1,1 # next blk of src, dst to check
while i < len(src) and j < len(dst):
end = min(src[i], dst[j])
if end-idx > 0:
ans.append( Cxn(i-1,j-1,
idx-src[i-1],end-src[i-1],
idx-dst[j-1],end-dst[j-1])
)
if end == src[i]:
i += 1
if end == dst[j]:
j += 1
idx = end
return ans
def segments_e(blks, N):
# Compute segments for mapping N even groups
# (see segments and even_spread)
#
oblk = even_spread(sum(i for i in blks), N)
return segments(cumsum(blks), cumsum(oblk))
if __name__=="__main__":
M = 200
a = even_spread(M, 6)
b = even_spread(M, 9)
print(a)
print(b)
ans = segments(cumsum(a), cumsum(b))
for g in ans:
print(g)
print()
ans = segments(cumsum(b), cumsum(a))
for g in ans:
print(g)
|
def even_spread(M, N):
"""Return a list of target sizes for an even spread.
Output sizes are either M//N or M//N+1
Args:
M: number of elements
N: number of partitons
Returns:
target_sizes : [int]
len(target_sizes) == N
sum(target_sizes) == M
"""
if N == 0:
assert M == 0
return []
tgt = [M // N] * N
for i in range(M % N):
tgt[i] += 1
return tgt
def cumsum(blks):
csum = [0]
for i in range(len(blks)):
csum.append(csum[i] + blks[i])
return csum
class Cxn:
def __init__(self, src, dst, s0, s1, d0, d1):
self.src = src
self.dst = dst
(self.s0, self.s1) = (s0, s1)
(self.d0, self.d1) = (d0, d1)
def __str__(self):
return f'src[{self.src}][{self.s0}:{self.s1}] ~> dst[{self.dst}][{self.d0}:{self.d1}]'
def __repr__(self):
return f'Cxn({self.src},{self.dst},{self.s0},{self.s1},{self.d0},{self.d1})'
def segments(src, dst):
"""List out corresponding segments of `src` and `dst`.
Note:
src[0] == 0
dst[0] == 0
src[-1] == dst[-1]
Args:
src: [int] ascending sequence of starting offsets
dst: [int] ascending sequence of starting offsets
Returns:
[Cxn]
"""
assert src[0] == 0 and dst[0] == 0
assert src[-1] == dst[-1], f"Input and output sizes ({src[-1]} and {dst[-1]}) don't match."
ans = []
idx = 0
(i, j) = (1, 1)
while i < len(src) and j < len(dst):
end = min(src[i], dst[j])
if end - idx > 0:
ans.append(cxn(i - 1, j - 1, idx - src[i - 1], end - src[i - 1], idx - dst[j - 1], end - dst[j - 1]))
if end == src[i]:
i += 1
if end == dst[j]:
j += 1
idx = end
return ans
def segments_e(blks, N):
oblk = even_spread(sum((i for i in blks)), N)
return segments(cumsum(blks), cumsum(oblk))
if __name__ == '__main__':
m = 200
a = even_spread(M, 6)
b = even_spread(M, 9)
print(a)
print(b)
ans = segments(cumsum(a), cumsum(b))
for g in ans:
print(g)
print()
ans = segments(cumsum(b), cumsum(a))
for g in ans:
print(g)
|
BASE_URL = 'https://caseinfo.arcourts.gov/cconnect/PROD/public/'
### Search Results ###
PERSON_SUFFIX = 'ck_public_qry_cpty.cp_personcase_srch_details?backto=P&'
JUDGEMENT_SUFFIX = 'ck_public_qry_judg.cp_judgment_srch_rslt?'
CASE_SUFFIX = 'ck_public_qry_doct.cp_dktrpt_docket_report?backto=D&'
DATE_SUFFIX = 'ck_public_qry_doct.cp_dktrpt_new_case_report?backto=C&'
DOCKET_SUFFIX = 'ck_public_qry_doct.cp_dktrpt_new_case_report?backto=F&'
SEARCH_TYPE_CONVERTER = {
'name': PERSON_SUFFIX,
'judgement': JUDGEMENT_SUFFIX,
'case': CASE_SUFFIX,
'date': DATE_SUFFIX,
'docket': DOCKET_SUFFIX
}
### Details for Known ID's ###
CASE_ID = 'ck_public_qry_doct.cp_dktrpt_docket_report?case_id='
### Case Page Navigation ###
HEADINGS = [
'Report Selection Criteria',
'Case Description',
'Case Event Schedule',
'Case Parties',
'Violations',
'Sentence',
'Milestone Tracks',
'Docket Entries'
]
CASE_DETAIL_HANDLER = {
'Report Selection Criteria': '_parse_report_or_desc',
'Case Description': '_parse_report_or_desc',
'Case Event Schedule': '_parse_events',
'Case Parties': '_parse_parties_or_docket',
'Violations': '_parse_violations',
'Sentence': '_parse_sentence',
'Milestone Tracks': '_skip_parse',
'Docket Entries': '_parse_parties_or_docket'
}
NON_TABLE_DATA = [
'Violations',
'Sentence',
'Milestone Tracks'
]
UNAVAILABLE_STATEMENTS = {
'Case Event Schedule': 'No case events were found.',
'Sentence': 'No Sentence Info Found.',
'Milestone Tracks': 'No Milestone Tracks found.'
}
### New Web Layout ###
CASE_ID_SEARCH = 'https://caseinfonew.arcourts.gov/pls/apexpcc/f?p=313:15:206076974987427::NO:::'
|
base_url = 'https://caseinfo.arcourts.gov/cconnect/PROD/public/'
person_suffix = 'ck_public_qry_cpty.cp_personcase_srch_details?backto=P&'
judgement_suffix = 'ck_public_qry_judg.cp_judgment_srch_rslt?'
case_suffix = 'ck_public_qry_doct.cp_dktrpt_docket_report?backto=D&'
date_suffix = 'ck_public_qry_doct.cp_dktrpt_new_case_report?backto=C&'
docket_suffix = 'ck_public_qry_doct.cp_dktrpt_new_case_report?backto=F&'
search_type_converter = {'name': PERSON_SUFFIX, 'judgement': JUDGEMENT_SUFFIX, 'case': CASE_SUFFIX, 'date': DATE_SUFFIX, 'docket': DOCKET_SUFFIX}
case_id = 'ck_public_qry_doct.cp_dktrpt_docket_report?case_id='
headings = ['Report Selection Criteria', 'Case Description', 'Case Event Schedule', 'Case Parties', 'Violations', 'Sentence', 'Milestone Tracks', 'Docket Entries']
case_detail_handler = {'Report Selection Criteria': '_parse_report_or_desc', 'Case Description': '_parse_report_or_desc', 'Case Event Schedule': '_parse_events', 'Case Parties': '_parse_parties_or_docket', 'Violations': '_parse_violations', 'Sentence': '_parse_sentence', 'Milestone Tracks': '_skip_parse', 'Docket Entries': '_parse_parties_or_docket'}
non_table_data = ['Violations', 'Sentence', 'Milestone Tracks']
unavailable_statements = {'Case Event Schedule': 'No case events were found.', 'Sentence': 'No Sentence Info Found.', 'Milestone Tracks': 'No Milestone Tracks found.'}
case_id_search = 'https://caseinfonew.arcourts.gov/pls/apexpcc/f?p=313:15:206076974987427::NO:::'
|
value = 2 ** 1000
value_string = str(value)
value_sum = 0
for _ in value_string:
value_sum += int(_)
print(value_sum)
|
value = 2 ** 1000
value_string = str(value)
value_sum = 0
for _ in value_string:
value_sum += int(_)
print(value_sum)
|
# parsetab.py
# This file is automatically generated. Do not edit.
# pylint: disable=W,C,R
_tabversion = '3.10'
_lr_method = 'LALR'
_lr_signature = 'leftplusminusnonassocadvdisadvadv dice disadv div minus newline number plus space star tabcommand : roll_list\n | mod_list\n |\n roll_list : roll\n | roll roll_list\n roll : number dice mod_list\n | number dice\n | dice mod_list\n | dice\n | number\n | number mod_list\n mod_list : mod\n | mod mod_list\n mod : plus number\n | minus number\n | star number\n | div number\n | adv\n | disadv\n '
_lr_action_items = {'$end':([0,1,2,3,4,5,6,7,12,13,14,15,16,17,18,19,20,21,22,23,],[-3,0,-1,-2,-4,-12,-10,-9,-18,-19,-5,-13,-7,-11,-8,-14,-15,-16,-17,-6,]),'number':([0,4,5,6,7,8,9,10,11,12,13,15,16,17,18,19,20,21,22,23,],[6,6,-12,-10,-9,19,20,21,22,-18,-19,-13,-7,-11,-8,-14,-15,-16,-17,-6,]),'dice':([0,4,5,6,7,12,13,15,16,17,18,19,20,21,22,23,],[7,7,-12,16,-9,-18,-19,-13,-7,-11,-8,-14,-15,-16,-17,-6,]),'plus':([0,5,6,7,12,13,16,19,20,21,22,],[8,8,8,8,-18,-19,8,-14,-15,-16,-17,]),'minus':([0,5,6,7,12,13,16,19,20,21,22,],[9,9,9,9,-18,-19,9,-14,-15,-16,-17,]),'star':([0,5,6,7,12,13,16,19,20,21,22,],[10,10,10,10,-18,-19,10,-14,-15,-16,-17,]),'div':([0,5,6,7,12,13,16,19,20,21,22,],[11,11,11,11,-18,-19,11,-14,-15,-16,-17,]),'adv':([0,5,6,7,12,13,16,19,20,21,22,],[12,12,12,12,-18,-19,12,-14,-15,-16,-17,]),'disadv':([0,5,6,7,12,13,16,19,20,21,22,],[13,13,13,13,-18,-19,13,-14,-15,-16,-17,]),}
_lr_action = {}
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = {}
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'command':([0,],[1,]),'roll_list':([0,4,],[2,14,]),'mod_list':([0,5,6,7,16,],[3,15,17,18,23,]),'roll':([0,4,],[4,4,]),'mod':([0,5,6,7,16,],[5,5,5,5,5,]),}
_lr_goto = {}
for _k, _v in _lr_goto_items.items():
for _x, _y in zip(_v[0], _v[1]):
if not _x in _lr_goto: _lr_goto[_x] = {}
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> command","S'",1,None,None,None),
('command -> roll_list','command',1,'p_command','parser.py',28),
('command -> mod_list','command',1,'p_command','parser.py',29),
('command -> <empty>','command',0,'p_command','parser.py',30),
('roll_list -> roll','roll_list',1,'p_roll_list','parser.py',41),
('roll_list -> roll roll_list','roll_list',2,'p_roll_list','parser.py',42),
('roll -> number dice mod_list','roll',3,'p_roll','parser.py',50),
('roll -> number dice','roll',2,'p_roll','parser.py',51),
('roll -> dice mod_list','roll',2,'p_roll','parser.py',52),
('roll -> dice','roll',1,'p_roll','parser.py',53),
('roll -> number','roll',1,'p_roll','parser.py',54),
('roll -> number mod_list','roll',2,'p_roll','parser.py',55),
('mod_list -> mod','mod_list',1,'p_mod_list','parser.py',71),
('mod_list -> mod mod_list','mod_list',2,'p_mod_list','parser.py',72),
('mod -> plus number','mod',2,'p_mod','parser.py',80),
('mod -> minus number','mod',2,'p_mod','parser.py',81),
('mod -> star number','mod',2,'p_mod','parser.py',82),
('mod -> div number','mod',2,'p_mod','parser.py',83),
('mod -> adv','mod',1,'p_mod','parser.py',84),
('mod -> disadv','mod',1,'p_mod','parser.py',85),
]
|
_tabversion = '3.10'
_lr_method = 'LALR'
_lr_signature = 'leftplusminusnonassocadvdisadvadv dice disadv div minus newline number plus space star tabcommand : roll_list\n | mod_list\n |\n roll_list : roll\n | roll roll_list\n roll : number dice mod_list\n | number dice\n | dice mod_list\n | dice\n | number\n | number mod_list\n mod_list : mod\n | mod mod_list\n mod : plus number\n | minus number\n | star number\n | div number\n | adv\n | disadv\n '
_lr_action_items = {'$end': ([0, 1, 2, 3, 4, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23], [-3, 0, -1, -2, -4, -12, -10, -9, -18, -19, -5, -13, -7, -11, -8, -14, -15, -16, -17, -6]), 'number': ([0, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23], [6, 6, -12, -10, -9, 19, 20, 21, 22, -18, -19, -13, -7, -11, -8, -14, -15, -16, -17, -6]), 'dice': ([0, 4, 5, 6, 7, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23], [7, 7, -12, 16, -9, -18, -19, -13, -7, -11, -8, -14, -15, -16, -17, -6]), 'plus': ([0, 5, 6, 7, 12, 13, 16, 19, 20, 21, 22], [8, 8, 8, 8, -18, -19, 8, -14, -15, -16, -17]), 'minus': ([0, 5, 6, 7, 12, 13, 16, 19, 20, 21, 22], [9, 9, 9, 9, -18, -19, 9, -14, -15, -16, -17]), 'star': ([0, 5, 6, 7, 12, 13, 16, 19, 20, 21, 22], [10, 10, 10, 10, -18, -19, 10, -14, -15, -16, -17]), 'div': ([0, 5, 6, 7, 12, 13, 16, 19, 20, 21, 22], [11, 11, 11, 11, -18, -19, 11, -14, -15, -16, -17]), 'adv': ([0, 5, 6, 7, 12, 13, 16, 19, 20, 21, 22], [12, 12, 12, 12, -18, -19, 12, -14, -15, -16, -17]), 'disadv': ([0, 5, 6, 7, 12, 13, 16, 19, 20, 21, 22], [13, 13, 13, 13, -18, -19, 13, -14, -15, -16, -17])}
_lr_action = {}
for (_k, _v) in _lr_action_items.items():
for (_x, _y) in zip(_v[0], _v[1]):
if not _x in _lr_action:
_lr_action[_x] = {}
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'command': ([0], [1]), 'roll_list': ([0, 4], [2, 14]), 'mod_list': ([0, 5, 6, 7, 16], [3, 15, 17, 18, 23]), 'roll': ([0, 4], [4, 4]), 'mod': ([0, 5, 6, 7, 16], [5, 5, 5, 5, 5])}
_lr_goto = {}
for (_k, _v) in _lr_goto_items.items():
for (_x, _y) in zip(_v[0], _v[1]):
if not _x in _lr_goto:
_lr_goto[_x] = {}
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [("S' -> command", "S'", 1, None, None, None), ('command -> roll_list', 'command', 1, 'p_command', 'parser.py', 28), ('command -> mod_list', 'command', 1, 'p_command', 'parser.py', 29), ('command -> <empty>', 'command', 0, 'p_command', 'parser.py', 30), ('roll_list -> roll', 'roll_list', 1, 'p_roll_list', 'parser.py', 41), ('roll_list -> roll roll_list', 'roll_list', 2, 'p_roll_list', 'parser.py', 42), ('roll -> number dice mod_list', 'roll', 3, 'p_roll', 'parser.py', 50), ('roll -> number dice', 'roll', 2, 'p_roll', 'parser.py', 51), ('roll -> dice mod_list', 'roll', 2, 'p_roll', 'parser.py', 52), ('roll -> dice', 'roll', 1, 'p_roll', 'parser.py', 53), ('roll -> number', 'roll', 1, 'p_roll', 'parser.py', 54), ('roll -> number mod_list', 'roll', 2, 'p_roll', 'parser.py', 55), ('mod_list -> mod', 'mod_list', 1, 'p_mod_list', 'parser.py', 71), ('mod_list -> mod mod_list', 'mod_list', 2, 'p_mod_list', 'parser.py', 72), ('mod -> plus number', 'mod', 2, 'p_mod', 'parser.py', 80), ('mod -> minus number', 'mod', 2, 'p_mod', 'parser.py', 81), ('mod -> star number', 'mod', 2, 'p_mod', 'parser.py', 82), ('mod -> div number', 'mod', 2, 'p_mod', 'parser.py', 83), ('mod -> adv', 'mod', 1, 'p_mod', 'parser.py', 84), ('mod -> disadv', 'mod', 1, 'p_mod', 'parser.py', 85)]
|
output = 'Char\tisdigit\tisdecimal\tisnumeric'
html_output = '''<table border="1">
<thead>
<tr>
<th>Char</th>
<th>isdigit</th>
<th>isdecimal</th>
<th>isnumeric</th>
</tr>
</thead>
<tbody>'''
for i in range(1, 1114111):
if chr(i).isdigit() or chr(i).isdecimal() or chr(i).isnumeric():
output += f'\n{chr(i)}\t{chr(i).isdigit()}'
output += f'\t{chr(i).isdecimal()}\t{chr(i).isnumeric()}'
if not (chr(i).isdigit() and chr(i).isdecimal() and chr(i).isnumeric()):
# one is False
color = 'red'
else:
color = 'black'
html_output += f'''<tr style="color:{color}">
<td>{chr(i)}</td>
<td>{chr(i).isdigit()}</td>
<td>{chr(i).isdecimal()}</td>
<td>{chr(i).isnumeric()}</td>
</tr>'''
html_output += '</tbody></table>'
with open('numbers.txt', 'w', encoding="utf-8") as f:
f.write(output)
print('Look in the strings/Demos directory for a new file.')
with open('numbers.html', 'w', encoding="utf-8") as f:
f.write(html_output)
print('''Look in the strings/Demos directory for new \
numbers.txt and numbers.html files.''')
|
output = 'Char\tisdigit\tisdecimal\tisnumeric'
html_output = '<table border="1">\n<thead>\n <tr>\n <th>Char</th>\n <th>isdigit</th>\n <th>isdecimal</th>\n <th>isnumeric</th>\n </tr>\n</thead>\n<tbody>'
for i in range(1, 1114111):
if chr(i).isdigit() or chr(i).isdecimal() or chr(i).isnumeric():
output += f'\n{chr(i)}\t{chr(i).isdigit()}'
output += f'\t{chr(i).isdecimal()}\t{chr(i).isnumeric()}'
if not (chr(i).isdigit() and chr(i).isdecimal() and chr(i).isnumeric()):
color = 'red'
else:
color = 'black'
html_output += f'<tr style="color:{color}">\n <td>{chr(i)}</td> \n <td>{chr(i).isdigit()}</td> \n <td>{chr(i).isdecimal()}</td> \n <td>{chr(i).isnumeric()}</td> \n</tr>'
html_output += '</tbody></table>'
with open('numbers.txt', 'w', encoding='utf-8') as f:
f.write(output)
print('Look in the strings/Demos directory for a new file.')
with open('numbers.html', 'w', encoding='utf-8') as f:
f.write(html_output)
print('Look in the strings/Demos directory for new numbers.txt and numbers.html files.')
|
# question can be found at leetcode.com/problems/sqrtx/
# The original implementation does a Binary search
class Solution:
def mySqrt(self, x: int) -> int:
if x == 0 or x == 1:
return x
i, result = 1, 1
while result <= x:
i += 1
result = pow(i, 2)
return i - 1
# or literally return x ** 0.5 :lel:
|
class Solution:
def my_sqrt(self, x: int) -> int:
if x == 0 or x == 1:
return x
(i, result) = (1, 1)
while result <= x:
i += 1
result = pow(i, 2)
return i - 1
|
'''
Problem 13
@author: mat.000
'''
numbers = """37107287533902102798797998220837590246510135740250
46376937677490009712648124896970078050417018260538
74324986199524741059474233309513058123726617309629
91942213363574161572522430563301811072406154908250
23067588207539346171171980310421047513778063246676
89261670696623633820136378418383684178734361726757
28112879812849979408065481931592621691275889832738
44274228917432520321923589422876796487670272189318
47451445736001306439091167216856844588711603153276
70386486105843025439939619828917593665686757934951
62176457141856560629502157223196586755079324193331
64906352462741904929101432445813822663347944758178
92575867718337217661963751590579239728245598838407
58203565325359399008402633568948830189458628227828
80181199384826282014278194139940567587151170094390
35398664372827112653829987240784473053190104293586
86515506006295864861532075273371959191420517255829
71693888707715466499115593487603532921714970056938
54370070576826684624621495650076471787294438377604
53282654108756828443191190634694037855217779295145
36123272525000296071075082563815656710885258350721
45876576172410976447339110607218265236877223636045
17423706905851860660448207621209813287860733969412
81142660418086830619328460811191061556940512689692
51934325451728388641918047049293215058642563049483
62467221648435076201727918039944693004732956340691
15732444386908125794514089057706229429197107928209
55037687525678773091862540744969844508330393682126
18336384825330154686196124348767681297534375946515
80386287592878490201521685554828717201219257766954
78182833757993103614740356856449095527097864797581
16726320100436897842553539920931837441497806860984
48403098129077791799088218795327364475675590848030
87086987551392711854517078544161852424320693150332
59959406895756536782107074926966537676326235447210
69793950679652694742597709739166693763042633987085
41052684708299085211399427365734116182760315001271
65378607361501080857009149939512557028198746004375
35829035317434717326932123578154982629742552737307
94953759765105305946966067683156574377167401875275
88902802571733229619176668713819931811048770190271
25267680276078003013678680992525463401061632866526
36270218540497705585629946580636237993140746255962
24074486908231174977792365466257246923322810917141
91430288197103288597806669760892938638285025333403
34413065578016127815921815005561868836468420090470
23053081172816430487623791969842487255036638784583
11487696932154902810424020138335124462181441773470
63783299490636259666498587618221225225512486764533
67720186971698544312419572409913959008952310058822
95548255300263520781532296796249481641953868218774
76085327132285723110424803456124867697064507995236
37774242535411291684276865538926205024910326572967
23701913275725675285653248258265463092207058596522
29798860272258331913126375147341994889534765745501
18495701454879288984856827726077713721403798879715
38298203783031473527721580348144513491373226651381
34829543829199918180278916522431027392251122869539
40957953066405232632538044100059654939159879593635
29746152185502371307642255121183693803580388584903
41698116222072977186158236678424689157993532961922
62467957194401269043877107275048102390895523597457
23189706772547915061505504953922979530901129967519
86188088225875314529584099251203829009407770775672
11306739708304724483816533873502340845647058077308
82959174767140363198008187129011875491310547126581
97623331044818386269515456334926366572897563400500
42846280183517070527831839425882145521227251250327
55121603546981200581762165212827652751691296897789
32238195734329339946437501907836945765883352399886
75506164965184775180738168837861091527357929701337
62177842752192623401942399639168044983993173312731
32924185707147349566916674687634660915035914677504
99518671430235219628894890102423325116913619626622
73267460800591547471830798392868535206946944540724
76841822524674417161514036427982273348055556214818
97142617910342598647204516893989422179826088076852
87783646182799346313767754307809363333018982642090
10848802521674670883215120185883543223812876952786
71329612474782464538636993009049310363619763878039
62184073572399794223406235393808339651327408011116
66627891981488087797941876876144230030984490851411
60661826293682836764744779239180335110989069790714
85786944089552990653640447425576083659976645795096
66024396409905389607120198219976047599490197230297
64913982680032973156037120041377903785566085089252
16730939319872750275468906903707539413042652315011
94809377245048795150954100921645863754710598436791
78639167021187492431995700641917969777599028300699
15368713711936614952811305876380278410754449733078
40789923115535562561142322423255033685442488917353
44889911501440648020369068063960672322193204149535
41503128880339536053299340368006977710650566631954
81234880673210146739058568557934581403627822703280
82616570773948327592232845941706525094512325230608
22918802058777319719839450180888072429661980811197
77158542502016545090413245809786882778948721859617
72107838435069186155435662884062257473692284509516
20849603980134001723930671666823555245252804609722
53503534226472524250874054075591789781264330331690"""
number_list = numbers.split()
def sum_of_list(number_list):
sum_of_list = 0
for element in number_list:
sum_of_list += int(element)
return sum_of_list
def first_ten_digits_of_number(number):
string = str(number)
return int(string[:10])
sum_of_list = sum_of_list(number_list)
print("Sum: " + str(sum_of_list))
print("First ten digits: " + str(first_ten_digits_of_number(sum_of_list)))
|
"""
Problem 13
@author: mat.000
"""
numbers = '37107287533902102798797998220837590246510135740250\n46376937677490009712648124896970078050417018260538\n74324986199524741059474233309513058123726617309629\n91942213363574161572522430563301811072406154908250\n23067588207539346171171980310421047513778063246676\n89261670696623633820136378418383684178734361726757\n28112879812849979408065481931592621691275889832738\n44274228917432520321923589422876796487670272189318\n47451445736001306439091167216856844588711603153276\n70386486105843025439939619828917593665686757934951\n62176457141856560629502157223196586755079324193331\n64906352462741904929101432445813822663347944758178\n92575867718337217661963751590579239728245598838407\n58203565325359399008402633568948830189458628227828\n80181199384826282014278194139940567587151170094390\n35398664372827112653829987240784473053190104293586\n86515506006295864861532075273371959191420517255829\n71693888707715466499115593487603532921714970056938\n54370070576826684624621495650076471787294438377604\n53282654108756828443191190634694037855217779295145\n36123272525000296071075082563815656710885258350721\n45876576172410976447339110607218265236877223636045\n17423706905851860660448207621209813287860733969412\n81142660418086830619328460811191061556940512689692\n51934325451728388641918047049293215058642563049483\n62467221648435076201727918039944693004732956340691\n15732444386908125794514089057706229429197107928209\n55037687525678773091862540744969844508330393682126\n18336384825330154686196124348767681297534375946515\n80386287592878490201521685554828717201219257766954\n78182833757993103614740356856449095527097864797581\n16726320100436897842553539920931837441497806860984\n48403098129077791799088218795327364475675590848030\n87086987551392711854517078544161852424320693150332\n59959406895756536782107074926966537676326235447210\n69793950679652694742597709739166693763042633987085\n41052684708299085211399427365734116182760315001271\n65378607361501080857009149939512557028198746004375\n35829035317434717326932123578154982629742552737307\n94953759765105305946966067683156574377167401875275\n88902802571733229619176668713819931811048770190271\n25267680276078003013678680992525463401061632866526\n36270218540497705585629946580636237993140746255962\n24074486908231174977792365466257246923322810917141\n91430288197103288597806669760892938638285025333403\n34413065578016127815921815005561868836468420090470\n23053081172816430487623791969842487255036638784583\n11487696932154902810424020138335124462181441773470\n63783299490636259666498587618221225225512486764533\n67720186971698544312419572409913959008952310058822\n95548255300263520781532296796249481641953868218774\n76085327132285723110424803456124867697064507995236\n37774242535411291684276865538926205024910326572967\n23701913275725675285653248258265463092207058596522\n29798860272258331913126375147341994889534765745501\n18495701454879288984856827726077713721403798879715\n38298203783031473527721580348144513491373226651381\n34829543829199918180278916522431027392251122869539\n40957953066405232632538044100059654939159879593635\n29746152185502371307642255121183693803580388584903\n41698116222072977186158236678424689157993532961922\n62467957194401269043877107275048102390895523597457\n23189706772547915061505504953922979530901129967519\n86188088225875314529584099251203829009407770775672\n11306739708304724483816533873502340845647058077308\n82959174767140363198008187129011875491310547126581\n97623331044818386269515456334926366572897563400500\n42846280183517070527831839425882145521227251250327\n55121603546981200581762165212827652751691296897789\n32238195734329339946437501907836945765883352399886\n75506164965184775180738168837861091527357929701337\n62177842752192623401942399639168044983993173312731\n32924185707147349566916674687634660915035914677504\n99518671430235219628894890102423325116913619626622\n73267460800591547471830798392868535206946944540724\n76841822524674417161514036427982273348055556214818\n97142617910342598647204516893989422179826088076852\n87783646182799346313767754307809363333018982642090\n10848802521674670883215120185883543223812876952786\n71329612474782464538636993009049310363619763878039\n62184073572399794223406235393808339651327408011116\n66627891981488087797941876876144230030984490851411\n60661826293682836764744779239180335110989069790714\n85786944089552990653640447425576083659976645795096\n66024396409905389607120198219976047599490197230297\n64913982680032973156037120041377903785566085089252\n16730939319872750275468906903707539413042652315011\n94809377245048795150954100921645863754710598436791\n78639167021187492431995700641917969777599028300699\n15368713711936614952811305876380278410754449733078\n40789923115535562561142322423255033685442488917353\n44889911501440648020369068063960672322193204149535\n41503128880339536053299340368006977710650566631954\n81234880673210146739058568557934581403627822703280\n82616570773948327592232845941706525094512325230608\n22918802058777319719839450180888072429661980811197\n77158542502016545090413245809786882778948721859617\n72107838435069186155435662884062257473692284509516\n20849603980134001723930671666823555245252804609722\n53503534226472524250874054075591789781264330331690'
number_list = numbers.split()
def sum_of_list(number_list):
sum_of_list = 0
for element in number_list:
sum_of_list += int(element)
return sum_of_list
def first_ten_digits_of_number(number):
string = str(number)
return int(string[:10])
sum_of_list = sum_of_list(number_list)
print('Sum: ' + str(sum_of_list))
print('First ten digits: ' + str(first_ten_digits_of_number(sum_of_list)))
|
"""ct.py: Constant time(ish) functions"""
# WARNING: Pure Python is not amenable to the implementation of truly
# constant time cryptography. For more information, please see the
# "Security Notice" section in python/README.md.
def select(subject, result_if_one, result_if_zero):
# type: (int, int, int) -> int
"""Perform a constant time(-ish) branch operation"""
return (~(subject - 1) & result_if_one) | ((subject - 1) & result_if_zero)
|
"""ct.py: Constant time(ish) functions"""
def select(subject, result_if_one, result_if_zero):
"""Perform a constant time(-ish) branch operation"""
return ~(subject - 1) & result_if_one | subject - 1 & result_if_zero
|
# indices for weight, magnitude, and phase lists
M, P, W = 0, 1, 2
# Set max for model weighting. Minimum is 1.0
MAX_MODEL_WEIGHT = 100.0
# Rich text control for red font
RICH_TEXT_RED = "<FONT COLOR='red'>"
# Color definitions
# (see http://www.w3schools.com/tags/ref_colorpicker.asp )
M_COLOR = "#6f93ff" # magnitude data
P_COLOR = "#ffcc00" # phase data
DM_COLOR = "#3366ff" # drawn magnitude
DP_COLOR = "#b38f00" # drawn phase
DW_COLOR = "#33cc33" # drawn weight
MM_COLOR = "000000" # modeled magnitude lines
MP_COLOR = "000000" # modeled phase lines
# Line styles for modeled data
MM_STYLE = "-"
MP_STYLE = "--"
# Boolean for whether to allow negative param results
ALLOW_NEG = True
# File name for parameter save/restore
PARAM_FILE = "params.csv"
# Path and file name for preferred text editor for editing model scripts
# TODO: make this usable from os.startfile() -- not successful so far
EDITOR = "notepad++.exe"
EDIT_PATH = "C:\\Program Files\\Notepad++\\"
# List of fitting methods available to lmfit
# (Dogleg and Newton CG are not included since they require a Jacobian
# to be supplied)
METHODS = [
("Levenberg-Marquardt", "leastsq"),
("Nelder-Mead", "nelder"),
("L-BFGS-B", "lbfgsb"),
("Powell", "powell"),
("Conjugate Gradient", "cg"),
("COBYLA", "cobyla"),
("Truncated Newton", "tnc"),
("Sequential Linear Squares", "slsqp"),
("Differential Evolution", "differential_evolution")
]
# Define one or the other:
LIMITS = "lmfit"
# LIMITS = "zfit"
|
(m, p, w) = (0, 1, 2)
max_model_weight = 100.0
rich_text_red = "<FONT COLOR='red'>"
m_color = '#6f93ff'
p_color = '#ffcc00'
dm_color = '#3366ff'
dp_color = '#b38f00'
dw_color = '#33cc33'
mm_color = '000000'
mp_color = '000000'
mm_style = '-'
mp_style = '--'
allow_neg = True
param_file = 'params.csv'
editor = 'notepad++.exe'
edit_path = 'C:\\Program Files\\Notepad++\\'
methods = [('Levenberg-Marquardt', 'leastsq'), ('Nelder-Mead', 'nelder'), ('L-BFGS-B', 'lbfgsb'), ('Powell', 'powell'), ('Conjugate Gradient', 'cg'), ('COBYLA', 'cobyla'), ('Truncated Newton', 'tnc'), ('Sequential Linear Squares', 'slsqp'), ('Differential Evolution', 'differential_evolution')]
limits = 'lmfit'
|
# Lost Memories Found [Hayato] (57172)
recoveredMemory = 7081
mouriMotonari = 9130008
sm.setSpeakerID(mouriMotonari)
sm.sendNext("I've been watching you fight for Maple World, Hayato. "
"Your dedication is impressive.")
sm.sendSay("I, Mouri Motonari, hope that you will call me an ally. "
"The two of us have a great future together.")
sm.sendSay("Continue your quest, and I shall ensure we go down in history.")
sm.startQuest(parentID)
sm.completeQuest(parentID)
sm.startQuest(recoveredMemory)
sm.setQRValue(recoveredMemory, "1", False)
|
recovered_memory = 7081
mouri_motonari = 9130008
sm.setSpeakerID(mouriMotonari)
sm.sendNext("I've been watching you fight for Maple World, Hayato. Your dedication is impressive.")
sm.sendSay('I, Mouri Motonari, hope that you will call me an ally. The two of us have a great future together.')
sm.sendSay('Continue your quest, and I shall ensure we go down in history.')
sm.startQuest(parentID)
sm.completeQuest(parentID)
sm.startQuest(recoveredMemory)
sm.setQRValue(recoveredMemory, '1', False)
|
"""
##################################################################################################
# Copyright Info : Copyright (c) Davar Lab @ Hikvision Research Institute. All rights reserved.
# Filename : test_base_setting.py
# Abstract : Base recognition Model test setting
# Current Version: 1.0.0
# Date : 2021-06-11
##################################################################################################
"""
# encoding=utf-8
# recognition dictionary
character = "/data1/open-source/demo/text_recognition/__dictionary__/Scene_text_68.txt"
# dataset settings
dataset_type = 'DavarMultiDataset'
img_norm_cfg = dict(
mean=[127.5], std=[127.5])
ppld = {
'LMDB_Standard': 'LoadImageFromLMDB', # open-source LMDB data
# Davar dataset type
'LMDB_Davar': 'RCGLoadImageFromLMDB',
'File': 'RCGLoadImageFromFile',
'Loose': 'RCGLoadImageFromLoose',
'Tight': 'RCGLoadImageFromTight',
}
test_pipeline = [
dict(type='LoadImageFromLMDB',
character=character,
sensitive=False,
color_types=['gray'],
fil_ops=True),
dict(type='ResizeNormalize',
size=(100, 32),
interpolation=2,
mean=[127.5],
std=[127.5]),
dict(type='DavarDefaultFormatBundle'),
dict(type='DavarCollect',
keys=['img'],
meta_keys=[])]
testsets = [
{
'Name': 'IIIT5k',
'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/',
'AnnFile': 'IIIT5k_3000/',
'Type': 'LMDB_Standard',
'PipeLine': test_pipeline,
},
{
'Name': 'SVT',
'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/',
'AnnFile': 'SVT/',
'Type': 'LMDB_Standard',
'PipeLine': test_pipeline,
},
{
'Name': 'IC03_860',
'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/',
'AnnFile': 'IC03_860/',
'Type': 'LMDB_Standard',
'PipeLine': test_pipeline,
},
{
'Name': 'IC03_867',
'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/',
'AnnFile': 'IC03_867/',
'Type': 'LMDB_Standard',
'PipeLine': test_pipeline,
},
{
'Name': 'IC13_857',
'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/',
'AnnFile': 'IC13_857/',
'Type': 'LMDB_Standard',
'PipeLine': test_pipeline,
},
{
'Name': 'IC13_1015',
'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/',
'AnnFile': 'IC13_1015/',
'Type': 'LMDB_Standard',
'PipeLine': test_pipeline,
},
{
'Name': 'IC15_1811',
'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/',
'AnnFile': 'IC15_1811/',
'Type': 'LMDB_Standard',
'PipeLine': test_pipeline,
},
{
'Name': 'IC15_2077',
'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/',
'AnnFile': 'IC15_2077/',
'Type': 'LMDB_Standard',
'PipeLine': test_pipeline,
},
{
'Name': 'SVTP',
'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/',
'AnnFile': 'SVTP/',
'Type': 'LMDB_Standard',
'PipeLine': test_pipeline,
},
{
'Name': 'CUTE80',
'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/',
'AnnFile': 'CUTE80/',
'Type': 'LMDB_Standard',
'PipeLine': test_pipeline,
},
]
# data setting
data = dict(
imgs_per_gpu=400, # 128
workers_per_gpu=2, # 2
sampler=dict(
type='BatchBalancedSampler',
mode=0,),
train=None,
test=dict(
type="DavarRCGDataset",
info=testsets,
batch_max_length=25,
used_ratio=1,
test_mode=True,
pipeline=test_pipeline)
)
# runtime setting
dist_params = dict(backend='nccl')
launcher = 'none'
|
"""
##################################################################################################
# Copyright Info : Copyright (c) Davar Lab @ Hikvision Research Institute. All rights reserved.
# Filename : test_base_setting.py
# Abstract : Base recognition Model test setting
# Current Version: 1.0.0
# Date : 2021-06-11
##################################################################################################
"""
character = '/data1/open-source/demo/text_recognition/__dictionary__/Scene_text_68.txt'
dataset_type = 'DavarMultiDataset'
img_norm_cfg = dict(mean=[127.5], std=[127.5])
ppld = {'LMDB_Standard': 'LoadImageFromLMDB', 'LMDB_Davar': 'RCGLoadImageFromLMDB', 'File': 'RCGLoadImageFromFile', 'Loose': 'RCGLoadImageFromLoose', 'Tight': 'RCGLoadImageFromTight'}
test_pipeline = [dict(type='LoadImageFromLMDB', character=character, sensitive=False, color_types=['gray'], fil_ops=True), dict(type='ResizeNormalize', size=(100, 32), interpolation=2, mean=[127.5], std=[127.5]), dict(type='DavarDefaultFormatBundle'), dict(type='DavarCollect', keys=['img'], meta_keys=[])]
testsets = [{'Name': 'IIIT5k', 'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/', 'AnnFile': 'IIIT5k_3000/', 'Type': 'LMDB_Standard', 'PipeLine': test_pipeline}, {'Name': 'SVT', 'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/', 'AnnFile': 'SVT/', 'Type': 'LMDB_Standard', 'PipeLine': test_pipeline}, {'Name': 'IC03_860', 'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/', 'AnnFile': 'IC03_860/', 'Type': 'LMDB_Standard', 'PipeLine': test_pipeline}, {'Name': 'IC03_867', 'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/', 'AnnFile': 'IC03_867/', 'Type': 'LMDB_Standard', 'PipeLine': test_pipeline}, {'Name': 'IC13_857', 'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/', 'AnnFile': 'IC13_857/', 'Type': 'LMDB_Standard', 'PipeLine': test_pipeline}, {'Name': 'IC13_1015', 'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/', 'AnnFile': 'IC13_1015/', 'Type': 'LMDB_Standard', 'PipeLine': test_pipeline}, {'Name': 'IC15_1811', 'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/', 'AnnFile': 'IC15_1811/', 'Type': 'LMDB_Standard', 'PipeLine': test_pipeline}, {'Name': 'IC15_2077', 'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/', 'AnnFile': 'IC15_2077/', 'Type': 'LMDB_Standard', 'PipeLine': test_pipeline}, {'Name': 'SVTP', 'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/', 'AnnFile': 'SVTP/', 'Type': 'LMDB_Standard', 'PipeLine': test_pipeline}, {'Name': 'CUTE80', 'FilePre': '/dataset/chengzhanzhan/TextRecognition/LMDB/BenchEn/evaluation/', 'AnnFile': 'CUTE80/', 'Type': 'LMDB_Standard', 'PipeLine': test_pipeline}]
data = dict(imgs_per_gpu=400, workers_per_gpu=2, sampler=dict(type='BatchBalancedSampler', mode=0), train=None, test=dict(type='DavarRCGDataset', info=testsets, batch_max_length=25, used_ratio=1, test_mode=True, pipeline=test_pipeline))
dist_params = dict(backend='nccl')
launcher = 'none'
|
def enc():
code=input('Write what you want to encrypt: ')
new_code_1=''
for i in range(0,len(code)-1,1):
new_code_1=new_code_1+code[i+1]
new_code_1=new_code_1+code[i]
print(new_code_1)
def dec():
code=input('Write what you want to deccrypt: ')
new_code=''
if len(code)>1:
new_code=new_code+code[1]
new_code=new_code+code[0]
for i in range(2,len(code)-1,2):
new_code=new_code+code[i]
else:
print(code)
print(new_code)
def main():
print('Helo, this is task 1. Its goal to encrypt or decrypt your letter')
choice=input('What do you want to do? ')
if choice=='encrypt':
enc()
elif choice=='decrypt':
dec()
else:
print('Error')
if __name__=='__main__':
main()
|
def enc():
code = input('Write what you want to encrypt: ')
new_code_1 = ''
for i in range(0, len(code) - 1, 1):
new_code_1 = new_code_1 + code[i + 1]
new_code_1 = new_code_1 + code[i]
print(new_code_1)
def dec():
code = input('Write what you want to deccrypt: ')
new_code = ''
if len(code) > 1:
new_code = new_code + code[1]
new_code = new_code + code[0]
for i in range(2, len(code) - 1, 2):
new_code = new_code + code[i]
else:
print(code)
print(new_code)
def main():
print('Helo, this is task 1. Its goal to encrypt or decrypt your letter')
choice = input('What do you want to do? ')
if choice == 'encrypt':
enc()
elif choice == 'decrypt':
dec()
else:
print('Error')
if __name__ == '__main__':
main()
|
class Node:
def __init__(self, key):
self.data = key
self.left = None
self.right = None
def getNodeCount(root : Node) -> int:
if root is None:
return 0
count = 0
count += 1
count += (getNodeCount(root.left) + getNodeCount(root.right))
return count
if __name__ == '__main__':
root = Node(2)
root.left = Node(7)
root.right = Node(5)
root.left.right = Node(6)
root.left.right.left = Node(1)
root.left.right.right = Node(11)
root.right.right = Node(9)
root.right.right.left = Node(4)
print(f"node count: {getNodeCount(root)}")
|
class Node:
def __init__(self, key):
self.data = key
self.left = None
self.right = None
def get_node_count(root: Node) -> int:
if root is None:
return 0
count = 0
count += 1
count += get_node_count(root.left) + get_node_count(root.right)
return count
if __name__ == '__main__':
root = node(2)
root.left = node(7)
root.right = node(5)
root.left.right = node(6)
root.left.right.left = node(1)
root.left.right.right = node(11)
root.right.right = node(9)
root.right.right.left = node(4)
print(f'node count: {get_node_count(root)}')
|
# Copyright 2021 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Follows convention set in objectivec_helpers.cc in the protobuf ObjC compiler.
"""
Contains generic helper utilities.
"""
_upper_segments_list = ["url", "http", "https"]
def strip_extension(str):
return str.rpartition(".")[0]
def capitalize(word):
if word in _upper_segments_list:
return word.upper()
else:
return word.capitalize()
def lower_underscore_to_upper_camel(str):
"""Converts from lower underscore case to upper camel case.
Args:
str: The snake case string to convert.
Returns:
The title case version of str.
"""
str = strip_extension(str)
camel_case_str = ""
word = ""
for c in str.elems(): # NB: assumes ASCII!
if c.isalpha():
word += c.lower()
else:
# Last word is finished.
if len(word):
camel_case_str += capitalize(word)
word = ""
if c.isdigit():
camel_case_str += c
# Otherwise, drop the character. See UnderscoresToCamelCase in:
# third_party/protobuf/src/google/protobuf/compiler/objectivec/objectivec_helpers.cc
if len(word):
camel_case_str += capitalize(word)
return camel_case_str
def file_to_upper_camel(src):
elements = src.rpartition("/")
upper_camel = lower_underscore_to_upper_camel(elements[-1])
return "".join(list(elements[:-1]) + [upper_camel])
def file_with_extension(src, ext):
elements = src.rpartition("/")
return "".join(list(elements[:-1]) + [elements[-1], "." + ext])
def to_upper_camel_with_extension(src, ext):
src = file_to_upper_camel(src)
return file_with_extension(src, ext)
|
"""
Contains generic helper utilities.
"""
_upper_segments_list = ['url', 'http', 'https']
def strip_extension(str):
return str.rpartition('.')[0]
def capitalize(word):
if word in _upper_segments_list:
return word.upper()
else:
return word.capitalize()
def lower_underscore_to_upper_camel(str):
"""Converts from lower underscore case to upper camel case.
Args:
str: The snake case string to convert.
Returns:
The title case version of str.
"""
str = strip_extension(str)
camel_case_str = ''
word = ''
for c in str.elems():
if c.isalpha():
word += c.lower()
else:
if len(word):
camel_case_str += capitalize(word)
word = ''
if c.isdigit():
camel_case_str += c
if len(word):
camel_case_str += capitalize(word)
return camel_case_str
def file_to_upper_camel(src):
elements = src.rpartition('/')
upper_camel = lower_underscore_to_upper_camel(elements[-1])
return ''.join(list(elements[:-1]) + [upper_camel])
def file_with_extension(src, ext):
elements = src.rpartition('/')
return ''.join(list(elements[:-1]) + [elements[-1], '.' + ext])
def to_upper_camel_with_extension(src, ext):
src = file_to_upper_camel(src)
return file_with_extension(src, ext)
|
"""Possible user action events"""
EVENT_BROWSE_FILES = "BrowseFiles"
EVENT_DELETE_SONG = "DeleteSong"
EVENT_PLAY_SONG = "PlaySong"
EVENT_SELECT_SONGS = "SelectSingleSong"
EVENT_CREATE_PLAYLIST = "CreatePlaylist"
EVENT_DELETE_PLAYLIST = "DeletePlaylist"
EVENT_SELECT_PLAYLIST = "SelectPlaylist"
EVENT_PLAY_PLAYLIST = "PlayPlaylist"
EVENT_INSERT_COIN = "InsertCoin"
EVENT_GET_CHANGE = "GetChange"
EVENT_SEARCH_STREAMING = "SearchStreaming"
EVENT_STOP = "Stop"
|
"""Possible user action events"""
event_browse_files = 'BrowseFiles'
event_delete_song = 'DeleteSong'
event_play_song = 'PlaySong'
event_select_songs = 'SelectSingleSong'
event_create_playlist = 'CreatePlaylist'
event_delete_playlist = 'DeletePlaylist'
event_select_playlist = 'SelectPlaylist'
event_play_playlist = 'PlayPlaylist'
event_insert_coin = 'InsertCoin'
event_get_change = 'GetChange'
event_search_streaming = 'SearchStreaming'
event_stop = 'Stop'
|
# import torch
# import torch.nn as nn
# import numpy as np
class NetworkFit(object):
def __init__(self, model, optimizer, soft_criterion):
self.model = model
self.optimizer = optimizer
self.soft_criterion = soft_criterion
def train(self, inputs, labels):
self.optimizer.zero_grad()
self.model.train()
outputs = self.model(inputs)
soft_loss = self.soft_criterion(outputs, labels)
loss = soft_loss
loss.backward()
self.optimizer.step()
def test(self, inputs, labels):
self.model.eval()
outputs = self.model(inputs)
soft_loss = self.soft_criterion(outputs, labels)
loss = soft_loss
_, predicted = outputs.max(1)
correct = (predicted == labels).sum().item()
return [loss.item()], [correct]
def get_model(self):
return self.model
|
class Networkfit(object):
def __init__(self, model, optimizer, soft_criterion):
self.model = model
self.optimizer = optimizer
self.soft_criterion = soft_criterion
def train(self, inputs, labels):
self.optimizer.zero_grad()
self.model.train()
outputs = self.model(inputs)
soft_loss = self.soft_criterion(outputs, labels)
loss = soft_loss
loss.backward()
self.optimizer.step()
def test(self, inputs, labels):
self.model.eval()
outputs = self.model(inputs)
soft_loss = self.soft_criterion(outputs, labels)
loss = soft_loss
(_, predicted) = outputs.max(1)
correct = (predicted == labels).sum().item()
return ([loss.item()], [correct])
def get_model(self):
return self.model
|
#!/usr/bin/python3
def no_c(my_string):
newStr = ""
for i in range(len(my_string)):
if my_string[i] != "c" and my_string[i] != "C":
newStr += my_string[i]
return (newStr)
|
def no_c(my_string):
new_str = ''
for i in range(len(my_string)):
if my_string[i] != 'c' and my_string[i] != 'C':
new_str += my_string[i]
return newStr
|
height = int(input())
for i in range(1, height + 1):
for j in range(1, height + 1):
if(i == 1 or i == height or j == 1 or j == height):
print(1,end=" ")
else:
print(0,end=" ")
print()
# Sample Input :- 5
# Output :-
# 1 1 1 1 1
# 1 0 0 0 1
# 1 0 0 0 1
# 1 0 0 0 1
# 1 1 1 1 1
|
height = int(input())
for i in range(1, height + 1):
for j in range(1, height + 1):
if i == 1 or i == height or j == 1 or (j == height):
print(1, end=' ')
else:
print(0, end=' ')
print()
|
class Trie(object):
def __init__(self):
self.root = TrieNode()
def add(self, word):
"""
Add `word` to trie
"""
current_node = self.root
for char in word:
current_node = current_node.children[char]
current_node.is_word = True
def exists(self, word):
"""
Check if word exists in trie
"""
current_node = self.root
for char in word:
if char not in current_node.children:
return False
current_node = current_node.children[char]
return current_node.is_word
|
class Trie(object):
def __init__(self):
self.root = trie_node()
def add(self, word):
"""
Add `word` to trie
"""
current_node = self.root
for char in word:
current_node = current_node.children[char]
current_node.is_word = True
def exists(self, word):
"""
Check if word exists in trie
"""
current_node = self.root
for char in word:
if char not in current_node.children:
return False
current_node = current_node.children[char]
return current_node.is_word
|
input = """
p(1) :- #count{X:q(X)}=1.
q(X) :- p(X).
"""
output = """
p(1) :- #count{X:q(X)}=1.
q(X) :- p(X).
"""
|
input = '\np(1) :- #count{X:q(X)}=1.\nq(X) :- p(X).\n'
output = '\np(1) :- #count{X:q(X)}=1.\nq(X) :- p(X).\n'
|
def _find_patterns(content, pos, patterns):
max = len(content)
for i in range(pos, max):
for p in enumerate(patterns):
if content.startswith(p[1], i):
return struct(
pos = i,
pattern = p[0]
)
return None
_find_ending_escapes = {
'(': ')',
'"': '"',
"'": "'",
'{': '}',
}
def _find_ending(content, pos, endch, escapes = _find_ending_escapes):
max = len(content)
ending_search_stack = [ endch ]
for i in range(pos, max):
ch = content[i]
if ch == ending_search_stack[0]:
ending_search_stack.pop(0)
if not ending_search_stack:
return i
continue
for start, end in escapes.items():
if ch == start:
ending_search_stack.insert(0, end)
break
return None
_whitespace_chars = [ ' ', '\t', '\n' ]
def _is_whitespace(content, pos, end_pos, ws = _whitespace_chars):
for i in range(pos, end_pos):
if not content[i] in ws:
return False
return True
parse = struct(
find_patterns = _find_patterns,
find_ending = _find_ending,
is_whitespace = _is_whitespace,
)
|
def _find_patterns(content, pos, patterns):
max = len(content)
for i in range(pos, max):
for p in enumerate(patterns):
if content.startswith(p[1], i):
return struct(pos=i, pattern=p[0])
return None
_find_ending_escapes = {'(': ')', '"': '"', "'": "'", '{': '}'}
def _find_ending(content, pos, endch, escapes=_find_ending_escapes):
max = len(content)
ending_search_stack = [endch]
for i in range(pos, max):
ch = content[i]
if ch == ending_search_stack[0]:
ending_search_stack.pop(0)
if not ending_search_stack:
return i
continue
for (start, end) in escapes.items():
if ch == start:
ending_search_stack.insert(0, end)
break
return None
_whitespace_chars = [' ', '\t', '\n']
def _is_whitespace(content, pos, end_pos, ws=_whitespace_chars):
for i in range(pos, end_pos):
if not content[i] in ws:
return False
return True
parse = struct(find_patterns=_find_patterns, find_ending=_find_ending, is_whitespace=_is_whitespace)
|
# Time: O(n)
# Space: O(n)
# 946
# Given two sequences pushed and popped with distinct values, return true if and only if this could have been the
# result of a sequence of push and pop operations on an initially empty stack.
# 0 <= pushed.length == popped.length <= 1000
# 0 <= pushed[i], popped[i] < 1000
# pushed is a permutation of popped.
# pushed and popped have distinct values.
# Solution: Greedy
# We have to push the items in order. Greedily pop values from top of stack if they are
# the next values to pop.
class Solution(object):
def validateStackSequences(self, pushed, popped):
"""
:type pushed: List[int]
:type popped: List[int]
:rtype: bool
"""
j = 0
stk = []
for v in pushed:
stk.append(v)
while stk and stk[-1] == popped[j]:
stk.pop()
j += 1
return j == len(popped) # ideally should be all popped
print(Solution().validateStackSequences([1,2,3,4,5], [4,5,3,2,1])) # True
print(Solution().validateStackSequences([1,2,3,4,5], [4,3,5,1,2])) # False
|
class Solution(object):
def validate_stack_sequences(self, pushed, popped):
"""
:type pushed: List[int]
:type popped: List[int]
:rtype: bool
"""
j = 0
stk = []
for v in pushed:
stk.append(v)
while stk and stk[-1] == popped[j]:
stk.pop()
j += 1
return j == len(popped)
print(solution().validateStackSequences([1, 2, 3, 4, 5], [4, 5, 3, 2, 1]))
print(solution().validateStackSequences([1, 2, 3, 4, 5], [4, 3, 5, 1, 2]))
|
def num_of_likes(names):
if len(names) == 0:
return 'no one likes this'
elif len(names) == 1:
return names[0] + ' likes this'
elif len(names) == 2:
return names[0] + ' and ' + names[1] + ' like this'
elif len(names) == 3:
return names[0] + ', ' + names[1] + ' and ' + names[2] + ' like this'
else:
return names[0] + ', ' + names[1] + ' and '+ str(len(names)-2) + ' others like this'
|
def num_of_likes(names):
if len(names) == 0:
return 'no one likes this'
elif len(names) == 1:
return names[0] + ' likes this'
elif len(names) == 2:
return names[0] + ' and ' + names[1] + ' like this'
elif len(names) == 3:
return names[0] + ', ' + names[1] + ' and ' + names[2] + ' like this'
else:
return names[0] + ', ' + names[1] + ' and ' + str(len(names) - 2) + ' others like this'
|
def singleton(theClass):
""" decorator for a class to make a singleton out of it """
classInstances = {}
def getInstance(*args, **kwargs):
""" creating or just return the one and only class instance.
The singleton depends on the parameters used in __init__ """
key = (theClass, args, str(kwargs))
if key not in classInstances:
classInstances[key] = theClass(*args, **kwargs)
return classInstances[key]
return getInstance
# Example
@singleton
class A:
""" test class """
def __init__(self, key=None, subkey=None):
self.key = key
self.subkey = subkey
def __repr__(self):
return "A(id=%d, %s,%s)" % (id(self), self.key, self.subkey)
def tests():
""" some basic tests """
testCases = [ (None, None), (10, 20), (30, None), (None, 30) ]
instances = set()
instance1 = None
instance2 = None
for key, subkey in testCases:
if key == None:
if subkey == None: instance1, instance2 = A(), A()
else: instance1, instance2 = A(subkey=subkey), A(subkey=subkey)
else:
if subkey == None: instance1, instance2 = A(key), A(key)
else: instance1, instance2 = A(key, subkey=subkey), A(key, subkey=subkey)
print("instance1: %-25s" % instance1, " instance2: %-25s" % instance2)
assert instance1 == instance2
assert instance1.key == key and instance1.subkey == subkey
instances.add(instance1)
assert len(instances) == len(testCases)
tests()
|
def singleton(theClass):
""" decorator for a class to make a singleton out of it """
class_instances = {}
def get_instance(*args, **kwargs):
""" creating or just return the one and only class instance.
The singleton depends on the parameters used in __init__ """
key = (theClass, args, str(kwargs))
if key not in classInstances:
classInstances[key] = the_class(*args, **kwargs)
return classInstances[key]
return getInstance
@singleton
class A:
""" test class """
def __init__(self, key=None, subkey=None):
self.key = key
self.subkey = subkey
def __repr__(self):
return 'A(id=%d, %s,%s)' % (id(self), self.key, self.subkey)
def tests():
""" some basic tests """
test_cases = [(None, None), (10, 20), (30, None), (None, 30)]
instances = set()
instance1 = None
instance2 = None
for (key, subkey) in testCases:
if key == None:
if subkey == None:
(instance1, instance2) = (a(), a())
else:
(instance1, instance2) = (a(subkey=subkey), a(subkey=subkey))
elif subkey == None:
(instance1, instance2) = (a(key), a(key))
else:
(instance1, instance2) = (a(key, subkey=subkey), a(key, subkey=subkey))
print('instance1: %-25s' % instance1, ' instance2: %-25s' % instance2)
assert instance1 == instance2
assert instance1.key == key and instance1.subkey == subkey
instances.add(instance1)
assert len(instances) == len(testCases)
tests()
|
"""
These are functions that create a sequence by adding the first number to the
second number and then adding the third number to the second, and so on.
"""
def fibonacci(n):
"""
This function assumed the first number is 0 and the second number is 1.
fibonacci(nth number in sequence you want returned)
"""
first = 0
second = 1
if n == 1:
return first
elif n == 2:
return second
else:
for i in range(n - 2):
next_num = first + second
first, second = second, next_num
return next_num
# 0, 1, 1, 2, 3, 5, 8, 13, 21, 34 ...
def lucas(n):
"""
This function assumes the first number is 2 and the second number is 1.
lucas(nth number in sequence you want returned)
"""
first = 2
second = 1
if n == 1:
return first
elif n == 2:
return second
else:
for i in range(n - 2):
next_num = first + second
first, second = second, next_num
return next_num
# 2, 1, 3, 4, 7, 11, 18, 29, 47, 76 ...
def sum_series(n, first=0, second=1):
"""
This function accepts up to three arguments. The first arg is the number
of the sequence you would like to return. The second and third args are
optional, where the second arg is the first number in the sequence and the
third arg is the second number in the sequence.
The second arg will default to 0 if not supplied.
The third arg will default to 1 if not supplied.
sum_series(nth number in sequence you want returned,
[first num in sequence], [second num in sequence])
"""
first = first
second = second
if n == 1:
return first
elif n == 2:
return second
else:
for i in range(n - 2):
next_num = first + second
first, second = second, next_num
return next_num
# 4, 9, 13, 22, 35, 57, 92, 149, 241, 390
|
"""
These are functions that create a sequence by adding the first number to the
second number and then adding the third number to the second, and so on.
"""
def fibonacci(n):
"""
This function assumed the first number is 0 and the second number is 1.
fibonacci(nth number in sequence you want returned)
"""
first = 0
second = 1
if n == 1:
return first
elif n == 2:
return second
else:
for i in range(n - 2):
next_num = first + second
(first, second) = (second, next_num)
return next_num
def lucas(n):
"""
This function assumes the first number is 2 and the second number is 1.
lucas(nth number in sequence you want returned)
"""
first = 2
second = 1
if n == 1:
return first
elif n == 2:
return second
else:
for i in range(n - 2):
next_num = first + second
(first, second) = (second, next_num)
return next_num
def sum_series(n, first=0, second=1):
"""
This function accepts up to three arguments. The first arg is the number
of the sequence you would like to return. The second and third args are
optional, where the second arg is the first number in the sequence and the
third arg is the second number in the sequence.
The second arg will default to 0 if not supplied.
The third arg will default to 1 if not supplied.
sum_series(nth number in sequence you want returned,
[first num in sequence], [second num in sequence])
"""
first = first
second = second
if n == 1:
return first
elif n == 2:
return second
else:
for i in range(n - 2):
next_num = first + second
(first, second) = (second, next_num)
return next_num
|
def is_none_us_symbol(symbol: str) -> bool:
return symbol.endswith(".HK") or symbol.endswith(".SZ") or symbol.endswith(".SH")
def is_us_symbol(symbol: str) -> bool:
return not is_none_us_symbol(symbol)
|
def is_none_us_symbol(symbol: str) -> bool:
return symbol.endswith('.HK') or symbol.endswith('.SZ') or symbol.endswith('.SH')
def is_us_symbol(symbol: str) -> bool:
return not is_none_us_symbol(symbol)
|
# table definition
table = {
'table_name' : 'adm_functions',
'module_id' : 'adm',
'short_descr' : 'Functions',
'long_descr' : 'Functional breakdwon of the organisation',
'sub_types' : None,
'sub_trans' : None,
'sequence' : ['seq', ['parent_id'], None],
'tree_params' : [None, ['function_id', 'descr', 'parent_id', 'seq'],
['function_type', [['root', 'Root']], None]],
'roll_params' : None,
'indexes' : None,
'ledger_col' : None,
'defn_company' : None,
'data_company' : None,
'read_only' : False,
}
# column definitions
cols = []
cols.append ({
'col_name' : 'row_id',
'data_type' : 'AUTO',
'short_descr': 'Row id',
'long_descr' : 'Row id',
'col_head' : 'Row',
'key_field' : 'Y',
'data_source': 'gen',
'condition' : None,
'allow_null' : False,
'allow_amend': False,
'max_len' : 0,
'db_scale' : 0,
'scale_ptr' : None,
'dflt_val' : None,
'dflt_rule' : None,
'col_checks' : None,
'fkey' : None,
'choices' : None,
})
cols.append ({
'col_name' : 'created_id',
'data_type' : 'INT',
'short_descr': 'Created id',
'long_descr' : 'Created row id',
'col_head' : 'Created',
'key_field' : 'N',
'data_source': 'gen',
'condition' : None,
'allow_null' : False,
'allow_amend': False,
'max_len' : 0,
'db_scale' : 0,
'scale_ptr' : None,
'dflt_val' : '0',
'dflt_rule' : None,
'col_checks' : None,
'fkey' : None,
'choices' : None,
})
cols.append ({
'col_name' : 'deleted_id',
'data_type' : 'INT',
'short_descr': 'Deleted id',
'long_descr' : 'Deleted row id',
'col_head' : 'Deleted',
'key_field' : 'N',
'data_source': 'gen',
'condition' : None,
'allow_null' : False,
'allow_amend': False,
'max_len' : 0,
'db_scale' : 0,
'scale_ptr' : None,
'dflt_val' : '0',
'dflt_rule' : None,
'col_checks' : None,
'fkey' : None,
'choices' : None,
})
cols.append ({
'col_name' : 'function_id',
'data_type' : 'TEXT',
'short_descr': 'Function id',
'long_descr' : 'Function id',
'col_head' : 'Fcn',
'key_field' : 'A',
'data_source': 'input',
'condition' : None,
'allow_null' : False,
'allow_amend': False,
'max_len' : 15,
'db_scale' : 0,
'scale_ptr' : None,
'dflt_val' : None,
'dflt_rule' : None,
'col_checks' : None,
'fkey' : None,
'choices' : None,
})
cols.append ({
'col_name' : 'descr',
'data_type' : 'TEXT',
'short_descr': 'Description',
'long_descr' : 'Function description',
'col_head' : 'Description',
'key_field' : 'N',
'data_source': 'input',
'condition' : None,
'allow_null' : False,
'allow_amend': True,
'max_len' : 30,
'db_scale' : 0,
'scale_ptr' : None,
'dflt_val' : None,
'dflt_rule' : None,
'col_checks' : None,
'fkey' : None,
'choices' : None,
})
cols.append ({
'col_name' : 'function_type',
'data_type' : 'TEXT',
'short_descr': 'Type of function code',
'long_descr' : (
"Type of function code.\n"
"If fixed levels are defined for this table (see tree_params),\n"
" user must specify a 'type column', and define a 'type code' for each level.\n"
"This is the type column where the type codes are stored.\n"
"At run-time, tree_params is inspected, and if fixed levels are detected,\n"
" 'choices' for this column is populated with valid codes which is then\n"
" used for validation."
),
'col_head' : 'Type',
'key_field' : 'N',
'data_source': 'input',
'condition' : None,
'allow_null' : False,
'allow_amend': False,
'max_len' : 10,
'db_scale' : 0,
'scale_ptr' : None,
'dflt_val' : None,
'dflt_rule' : None,
'col_checks' : None,
'fkey' : None,
'choices' : None,
})
cols.append ({
'col_name' : 'parent_id',
'data_type' : 'INT',
'short_descr': 'Parent id',
'long_descr' : 'Parent id',
'col_head' : 'Parent',
'key_field' : 'N',
'data_source': 'input',
'condition' : None,
'allow_null' : True,
'allow_amend': True,
'max_len' : 0,
'db_scale' : 0,
'scale_ptr' : None,
'dflt_val' : None,
'dflt_rule' : None,
'col_checks' : None,
'fkey' : ['adm_functions', 'row_id', 'parent', 'function_id', False, None],
'choices' : None,
})
cols.append ({
'col_name' : 'seq',
'data_type' : 'INT',
'short_descr': 'Sequence',
'long_descr' : 'Sequence',
'col_head' : 'Seq',
'key_field' : 'N',
'data_source': 'seq',
'condition' : None,
'allow_null' : False,
'allow_amend': True,
'max_len' : 0,
'db_scale' : 0,
'scale_ptr' : None,
'dflt_val' : None,
'dflt_rule' : None,
'col_checks' : None,
'fkey' : None,
'choices' : None,
})
# virtual column definitions
virt = []
virt.append ({
'col_name' : 'children',
'data_type' : 'INT',
'short_descr': 'Children',
'long_descr' : 'Number of children',
'col_head' : '',
'sql' : "SELECT count(*) FROM {company}.adm_functions b "
"WHERE b.parent_id = a.row_id AND b.deleted_id = 0",
})
virt.append ({
'col_name' : 'is_leaf',
'data_type' : 'BOOL',
'short_descr': 'Is leaf node?',
'long_descr' : 'Is this node a leaf node? Over-ridden at run-time in db.objects if levels added.',
'col_head' : '',
'sql' : '$True',
})
# cursor definitions
cursors = []
cursors.append({
'cursor_name': 'functions',
'title': 'Maintain functions',
'columns': [
['function_id', 100, False, False],
['descr', 260, True, True],
],
'filter': [['WHERE', '', 'function_type', '!=', "'root'", '']],
'sequence': [['function_id', False]],
})
# actions
actions = []
actions.append([
'after_commit', '<pyfunc name="db.cache.param_updated"/>'
])
|
table = {'table_name': 'adm_functions', 'module_id': 'adm', 'short_descr': 'Functions', 'long_descr': 'Functional breakdwon of the organisation', 'sub_types': None, 'sub_trans': None, 'sequence': ['seq', ['parent_id'], None], 'tree_params': [None, ['function_id', 'descr', 'parent_id', 'seq'], ['function_type', [['root', 'Root']], None]], 'roll_params': None, 'indexes': None, 'ledger_col': None, 'defn_company': None, 'data_company': None, 'read_only': False}
cols = []
cols.append({'col_name': 'row_id', 'data_type': 'AUTO', 'short_descr': 'Row id', 'long_descr': 'Row id', 'col_head': 'Row', 'key_field': 'Y', 'data_source': 'gen', 'condition': None, 'allow_null': False, 'allow_amend': False, 'max_len': 0, 'db_scale': 0, 'scale_ptr': None, 'dflt_val': None, 'dflt_rule': None, 'col_checks': None, 'fkey': None, 'choices': None})
cols.append({'col_name': 'created_id', 'data_type': 'INT', 'short_descr': 'Created id', 'long_descr': 'Created row id', 'col_head': 'Created', 'key_field': 'N', 'data_source': 'gen', 'condition': None, 'allow_null': False, 'allow_amend': False, 'max_len': 0, 'db_scale': 0, 'scale_ptr': None, 'dflt_val': '0', 'dflt_rule': None, 'col_checks': None, 'fkey': None, 'choices': None})
cols.append({'col_name': 'deleted_id', 'data_type': 'INT', 'short_descr': 'Deleted id', 'long_descr': 'Deleted row id', 'col_head': 'Deleted', 'key_field': 'N', 'data_source': 'gen', 'condition': None, 'allow_null': False, 'allow_amend': False, 'max_len': 0, 'db_scale': 0, 'scale_ptr': None, 'dflt_val': '0', 'dflt_rule': None, 'col_checks': None, 'fkey': None, 'choices': None})
cols.append({'col_name': 'function_id', 'data_type': 'TEXT', 'short_descr': 'Function id', 'long_descr': 'Function id', 'col_head': 'Fcn', 'key_field': 'A', 'data_source': 'input', 'condition': None, 'allow_null': False, 'allow_amend': False, 'max_len': 15, 'db_scale': 0, 'scale_ptr': None, 'dflt_val': None, 'dflt_rule': None, 'col_checks': None, 'fkey': None, 'choices': None})
cols.append({'col_name': 'descr', 'data_type': 'TEXT', 'short_descr': 'Description', 'long_descr': 'Function description', 'col_head': 'Description', 'key_field': 'N', 'data_source': 'input', 'condition': None, 'allow_null': False, 'allow_amend': True, 'max_len': 30, 'db_scale': 0, 'scale_ptr': None, 'dflt_val': None, 'dflt_rule': None, 'col_checks': None, 'fkey': None, 'choices': None})
cols.append({'col_name': 'function_type', 'data_type': 'TEXT', 'short_descr': 'Type of function code', 'long_descr': "Type of function code.\nIf fixed levels are defined for this table (see tree_params),\n user must specify a 'type column', and define a 'type code' for each level.\nThis is the type column where the type codes are stored.\nAt run-time, tree_params is inspected, and if fixed levels are detected,\n 'choices' for this column is populated with valid codes which is then\n used for validation.", 'col_head': 'Type', 'key_field': 'N', 'data_source': 'input', 'condition': None, 'allow_null': False, 'allow_amend': False, 'max_len': 10, 'db_scale': 0, 'scale_ptr': None, 'dflt_val': None, 'dflt_rule': None, 'col_checks': None, 'fkey': None, 'choices': None})
cols.append({'col_name': 'parent_id', 'data_type': 'INT', 'short_descr': 'Parent id', 'long_descr': 'Parent id', 'col_head': 'Parent', 'key_field': 'N', 'data_source': 'input', 'condition': None, 'allow_null': True, 'allow_amend': True, 'max_len': 0, 'db_scale': 0, 'scale_ptr': None, 'dflt_val': None, 'dflt_rule': None, 'col_checks': None, 'fkey': ['adm_functions', 'row_id', 'parent', 'function_id', False, None], 'choices': None})
cols.append({'col_name': 'seq', 'data_type': 'INT', 'short_descr': 'Sequence', 'long_descr': 'Sequence', 'col_head': 'Seq', 'key_field': 'N', 'data_source': 'seq', 'condition': None, 'allow_null': False, 'allow_amend': True, 'max_len': 0, 'db_scale': 0, 'scale_ptr': None, 'dflt_val': None, 'dflt_rule': None, 'col_checks': None, 'fkey': None, 'choices': None})
virt = []
virt.append({'col_name': 'children', 'data_type': 'INT', 'short_descr': 'Children', 'long_descr': 'Number of children', 'col_head': '', 'sql': 'SELECT count(*) FROM {company}.adm_functions b WHERE b.parent_id = a.row_id AND b.deleted_id = 0'})
virt.append({'col_name': 'is_leaf', 'data_type': 'BOOL', 'short_descr': 'Is leaf node?', 'long_descr': 'Is this node a leaf node? Over-ridden at run-time in db.objects if levels added.', 'col_head': '', 'sql': '$True'})
cursors = []
cursors.append({'cursor_name': 'functions', 'title': 'Maintain functions', 'columns': [['function_id', 100, False, False], ['descr', 260, True, True]], 'filter': [['WHERE', '', 'function_type', '!=', "'root'", '']], 'sequence': [['function_id', False]]})
actions = []
actions.append(['after_commit', '<pyfunc name="db.cache.param_updated"/>'])
|
# Commutable Islands
#
# There are n islands and there are many bridges connecting them. Each bridge has
# some cost attached to it.
#
# We need to find bridges with minimal cost such that all islands are connected.
#
# It is guaranteed that input data will contain at least one possible scenario in which
# all islands are connected with each other.
#
# Example :
# Input
#
# Number of islands ( n ) = 4
# 1 2 1
# 2 3 4
# 1 4 3
# 4 3 2
# 1 3 10
#
# In this example, we have number of islands(n) = 4 . Each row then represents a bridge
# configuration. In each row first two numbers represent the islands number which are connected
# by this bridge and the third integer is the cost associated with this bridge.
#
# In the above example, we can select bridges 1(connecting islands 1 and 2 with cost 1),
# 3(connecting islands 1 and 4 with cost 3), 4(connecting islands 4 and 3 with cost 2). Thus we
# will have all islands connected with the minimum possible cost(1+3+2 = 6).
# In any other case, cost incurred will be more.
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
class Solution:
class Edges(list):
def __lt__(self, other):
for i in [2, 0, 1]:
if self[i] == other[i]:
continue
return self[i] < other[i]
class DisjoinSet:
def __init__(self, i):
self.parent = i
self.lvl = 0
def __repr__(self):
return "{}<{}>".format(self.parent, self.lvl)
@staticmethod
def findSet(x, S):
if S[x].parent == x:
return x
S[x].parent = Solution.findSet(S[x].parent, S)
return S[x].parent
@staticmethod
def unionSet(a, b, S):
set_a = Solution.findSet(a, S)
set_b = Solution.findSet(b, S)
if S[set_a].lvl < S[set_b].lvl:
S[set_a].parent = set_b
elif S[set_a].lvl > S[set_b].lvl:
S[set_b].parent = set_a
else:
S[set_b].parent = set_a
S[set_a].lvl += 1
# @param A : integer
# @param B : list of list of integers
# @return an integer
def solve(self, A, B):
B.sort(key=Solution.Edges)
S = [None] + [Solution.DisjoinSet(i + 1) for i in range(A)]
components, weigth = A - 1, 0
for edge in B:
if components == 0:
break
start = Solution.findSet(edge[0], S)
end = Solution.findSet(edge[1], S)
if start == end:
continue
Solution.unionSet(start, end, S)
components -= 1
weigth += edge[2]
return weigth
|
class Solution:
class Edges(list):
def __lt__(self, other):
for i in [2, 0, 1]:
if self[i] == other[i]:
continue
return self[i] < other[i]
class Disjoinset:
def __init__(self, i):
self.parent = i
self.lvl = 0
def __repr__(self):
return '{}<{}>'.format(self.parent, self.lvl)
@staticmethod
def find_set(x, S):
if S[x].parent == x:
return x
S[x].parent = Solution.findSet(S[x].parent, S)
return S[x].parent
@staticmethod
def union_set(a, b, S):
set_a = Solution.findSet(a, S)
set_b = Solution.findSet(b, S)
if S[set_a].lvl < S[set_b].lvl:
S[set_a].parent = set_b
elif S[set_a].lvl > S[set_b].lvl:
S[set_b].parent = set_a
else:
S[set_b].parent = set_a
S[set_a].lvl += 1
def solve(self, A, B):
B.sort(key=Solution.Edges)
s = [None] + [Solution.DisjoinSet(i + 1) for i in range(A)]
(components, weigth) = (A - 1, 0)
for edge in B:
if components == 0:
break
start = Solution.findSet(edge[0], S)
end = Solution.findSet(edge[1], S)
if start == end:
continue
Solution.unionSet(start, end, S)
components -= 1
weigth += edge[2]
return weigth
|
x=[2,25,34,56,72,34,54]
val=int(input("Enter the value you want to get searched : "))
for i in x:
if i==val:
print(x.index(i))
break
elif x.index(i)==(len(x)-1) and i!=val:
print("The Val u want to search is not there in the list")
|
x = [2, 25, 34, 56, 72, 34, 54]
val = int(input('Enter the value you want to get searched : '))
for i in x:
if i == val:
print(x.index(i))
break
elif x.index(i) == len(x) - 1 and i != val:
print('The Val u want to search is not there in the list')
|
# subsequence: a sequence that can be derived from another sequence by deleting one or more elements,
# without changing the order
# unlike substrings, subsequences are not required to occupy consecutive positions within the parent sequence
# so ACE is a valid subsequence of ABCDE
# find the length of the longest common subsequence that is common to two given strings
# naive recursive implementation:
# compare the characters at index 0
# if they are the same, add one to the length of the LCS, then compare the characters at index 1
# otherwise, the LCS is composed of characters from either s1[:] and s2[1:], or s1[1:] and s2[:]
# so find out which of those has the longest
def longest_common_subsequence(s1, s2, i1=0, i2=0):
# if at end of either string, no further additions to subsequence possible
if i1 >= len(s1) or i2 >= len(s2):
return 0
if s1[i1] == s2[i2]:
return 1 + longest_common_subsequence(s1, s2, i1 + 1, i2 + 1)
branch_1 = longest_common_subsequence(s1, s2, i1, i2 + 1)
branch_2 = longest_common_subsequence(s1, s2, i1 + 1, i2)
return max(branch_1, branch_2)
s1 = "elephant"
s2 = "eretpat"
print(longest_common_subsequence(s1, s2))
|
def longest_common_subsequence(s1, s2, i1=0, i2=0):
if i1 >= len(s1) or i2 >= len(s2):
return 0
if s1[i1] == s2[i2]:
return 1 + longest_common_subsequence(s1, s2, i1 + 1, i2 + 1)
branch_1 = longest_common_subsequence(s1, s2, i1, i2 + 1)
branch_2 = longest_common_subsequence(s1, s2, i1 + 1, i2)
return max(branch_1, branch_2)
s1 = 'elephant'
s2 = 'eretpat'
print(longest_common_subsequence(s1, s2))
|
"""Constants for the Purple Air integration."""
AQI_BREAKPOINTS = {
'pm2_5': [
{ 'pm_low': 500.5, 'pm_high': 999.9, 'aqi_low': 501, 'aqi_high': 999 },
{ 'pm_low': 350.5, 'pm_high': 500.4, 'aqi_low': 401, 'aqi_high': 500 },
{ 'pm_low': 250.5, 'pm_high': 350.4, 'aqi_low': 301, 'aqi_high': 400 },
{ 'pm_low': 150.5, 'pm_high': 250.4, 'aqi_low': 201, 'aqi_high': 300 },
{ 'pm_low': 55.5, 'pm_high': 150.4, 'aqi_low': 151, 'aqi_high': 200 },
{ 'pm_low': 35.5, 'pm_high': 55.4, 'aqi_low': 101, 'aqi_high': 150 },
{ 'pm_low': 12.1, 'pm_high': 35.4, 'aqi_low': 51, 'aqi_high': 100 },
{ 'pm_low': 0, 'pm_high': 12.0, 'aqi_low': 0, 'aqi_high': 50 },
],
}
DISPATCHER_PURPLE_AIR = 'dispatcher_purple_air'
DOMAIN = "purpleair"
JSON_PROPERTIES = ['pm1_0_atm', 'pm2_5_atm', 'pm10_0_atm', 'humidity', 'temp_f', 'pressure']
SCAN_INTERVAL = 300
PUBLIC_URL = "https://www.purpleair.com/json?show={nodes}"
PRIVATE_URL = "https://www.purpleair.com/json?show={nodes}&key={key}"
|
"""Constants for the Purple Air integration."""
aqi_breakpoints = {'pm2_5': [{'pm_low': 500.5, 'pm_high': 999.9, 'aqi_low': 501, 'aqi_high': 999}, {'pm_low': 350.5, 'pm_high': 500.4, 'aqi_low': 401, 'aqi_high': 500}, {'pm_low': 250.5, 'pm_high': 350.4, 'aqi_low': 301, 'aqi_high': 400}, {'pm_low': 150.5, 'pm_high': 250.4, 'aqi_low': 201, 'aqi_high': 300}, {'pm_low': 55.5, 'pm_high': 150.4, 'aqi_low': 151, 'aqi_high': 200}, {'pm_low': 35.5, 'pm_high': 55.4, 'aqi_low': 101, 'aqi_high': 150}, {'pm_low': 12.1, 'pm_high': 35.4, 'aqi_low': 51, 'aqi_high': 100}, {'pm_low': 0, 'pm_high': 12.0, 'aqi_low': 0, 'aqi_high': 50}]}
dispatcher_purple_air = 'dispatcher_purple_air'
domain = 'purpleair'
json_properties = ['pm1_0_atm', 'pm2_5_atm', 'pm10_0_atm', 'humidity', 'temp_f', 'pressure']
scan_interval = 300
public_url = 'https://www.purpleair.com/json?show={nodes}'
private_url = 'https://www.purpleair.com/json?show={nodes}&key={key}'
|
# Input: arr[] = {1, 20, 2, 10}
# Output: 72
def single_rotation(arr,l):
temp=arr[0]
for i in range(l-1):
arr[i]=arr[i+1]
arr[l-1]=temp
def sum_calculate(arr,l):
sum=0
for i in range(l):
sum=sum+arr[i]*(i)
return sum
def max_finder(arr,l):
max=arr[0]
for i in range(l):
if max<arr[i]:
max=arr[i]
maximum=max
for i in range(l):
if max == arr[i]:
temp=i
index=temp+1
for j in range(index):
single_rotation(arr,len(arr))
arr=[10, 1, 2, 3, 4, 5, 6, 7, 8, 9]
max_finder(arr,len(arr))
result=sum_calculate(arr,len(arr))
print("Max sum is: "+ str(result))
#optimized approach
# '''Python program to find maximum value of Sum(i*arr[i])'''
# # returns max possible value of Sum(i*arr[i])
# def maxSum(arr):
# # stores sum of arr[i]
# arrSum = 0
# # stores sum of i*arr[i]
# currVal = 0
# n = len(arr)
# for i in range(0, n):
# arrSum = arrSum + arr[i]
# currVal = currVal + (i*arr[i])
# # initialize result
# maxVal = currVal
# # try all rotations one by one and find the maximum
# # rotation sum
# for j in range(1, n):
# currVal = currVal + arrSum-n*arr[n-j]
# if currVal > maxVal:
# maxVal = currVal
# # return result
# return maxVal
# # test maxsum(arr) function
# arr = [10, 1, 2, 3, 4, 5, 6, 7, 8, 9]
# print("Max sum is: ", maxSum(arr))
|
def single_rotation(arr, l):
temp = arr[0]
for i in range(l - 1):
arr[i] = arr[i + 1]
arr[l - 1] = temp
def sum_calculate(arr, l):
sum = 0
for i in range(l):
sum = sum + arr[i] * i
return sum
def max_finder(arr, l):
max = arr[0]
for i in range(l):
if max < arr[i]:
max = arr[i]
maximum = max
for i in range(l):
if max == arr[i]:
temp = i
index = temp + 1
for j in range(index):
single_rotation(arr, len(arr))
arr = [10, 1, 2, 3, 4, 5, 6, 7, 8, 9]
max_finder(arr, len(arr))
result = sum_calculate(arr, len(arr))
print('Max sum is: ' + str(result))
|
class UsageError(Exception):
"""Error in plugin usage."""
__module__ = "builtins"
|
class Usageerror(Exception):
"""Error in plugin usage."""
__module__ = 'builtins'
|
"""Constants for the Nuvo Multi-zone Amplifier Media Player component."""
DOMAIN = "nuvo_serial"
CONF_ZONES = "zones"
CONF_SOURCES = "sources"
ZONE = "zone"
SOURCE = "source"
CONF_SOURCE_1 = "source_1"
CONF_SOURCE_2 = "source_2"
CONF_SOURCE_3 = "source_3"
CONF_SOURCE_4 = "source_4"
CONF_SOURCE_5 = "source_5"
CONF_SOURCE_6 = "source_6"
CONF_VOLUME_STEP = "volume_step"
CONF_NOT_FIRST_RUN = "not_first_run"
CONTROL_EQ_BASS = "bass"
CONTROL_EQ_TREBLE = "treble"
CONTROL_EQ_BALANCE = "balance"
CONTROL_EQ_LOUDCMP = "loudcmp"
CONTROL_SOURCE_GAIN = "gain"
CONTROL_VOLUME = "volume"
CONTROL_VOLUME_MAX = "max_vol"
CONTROL_VOLUME_INI = "ini_vol"
CONTROL_VOLUME_PAGE = "page_vol"
CONTROL_VOLUME_PARTY = "party_vol"
CONTROL_VOLUME_RESET = "vol_rst"
GROUP_MEMBER = 1
GROUP_NON_MEMBER = 0
SERVICE_SNAPSHOT = "snapshot"
SERVICE_RESTORE = "restore"
SERVICE_PAGE_ON = "page_on"
SERVICE_PAGE_OFF = "page_off"
SERVICE_PARTY_ON = "party_on"
SERVICE_PARTY_OFF = "party_off"
SERVICE_ALL_OFF = "all_off"
SERVICE_SIMULATE_PLAY_PAUSE = "simulate_play_pause_button"
SERVICE_SIMULATE_PREV = "simulate_prev_button"
SERVICE_SIMULATE_NEXT = "simulate_next_button"
FIRST_RUN = "first_run"
NUVO_OBJECT = "nuvo_object"
UNDO_UPDATE_LISTENER = "update_update_listener"
DOMAIN_EVENT = "nuvo_serial_event"
EVENT_KEYPAD_PLAY_PAUSE = "keypad_play_pause"
EVENT_KEYPAD_PREV = "keypad_prev"
EVENT_KEYPAD_NEXT = "keypad_next"
KEYPAD_BUTTON_PLAYPAUSE = "PLAYPAUSE"
KEYPAD_BUTTON_PREV = "PREV"
KEYPAD_BUTTON_NEXT = "NEXT"
KEYPAD_BUTTON_TO_EVENT = {
KEYPAD_BUTTON_PLAYPAUSE: EVENT_KEYPAD_PLAY_PAUSE,
KEYPAD_BUTTON_PREV: EVENT_KEYPAD_PREV,
KEYPAD_BUTTON_NEXT: EVENT_KEYPAD_NEXT,
}
COMMAND_RESPONSE_TIMEOUT = 3
|
"""Constants for the Nuvo Multi-zone Amplifier Media Player component."""
domain = 'nuvo_serial'
conf_zones = 'zones'
conf_sources = 'sources'
zone = 'zone'
source = 'source'
conf_source_1 = 'source_1'
conf_source_2 = 'source_2'
conf_source_3 = 'source_3'
conf_source_4 = 'source_4'
conf_source_5 = 'source_5'
conf_source_6 = 'source_6'
conf_volume_step = 'volume_step'
conf_not_first_run = 'not_first_run'
control_eq_bass = 'bass'
control_eq_treble = 'treble'
control_eq_balance = 'balance'
control_eq_loudcmp = 'loudcmp'
control_source_gain = 'gain'
control_volume = 'volume'
control_volume_max = 'max_vol'
control_volume_ini = 'ini_vol'
control_volume_page = 'page_vol'
control_volume_party = 'party_vol'
control_volume_reset = 'vol_rst'
group_member = 1
group_non_member = 0
service_snapshot = 'snapshot'
service_restore = 'restore'
service_page_on = 'page_on'
service_page_off = 'page_off'
service_party_on = 'party_on'
service_party_off = 'party_off'
service_all_off = 'all_off'
service_simulate_play_pause = 'simulate_play_pause_button'
service_simulate_prev = 'simulate_prev_button'
service_simulate_next = 'simulate_next_button'
first_run = 'first_run'
nuvo_object = 'nuvo_object'
undo_update_listener = 'update_update_listener'
domain_event = 'nuvo_serial_event'
event_keypad_play_pause = 'keypad_play_pause'
event_keypad_prev = 'keypad_prev'
event_keypad_next = 'keypad_next'
keypad_button_playpause = 'PLAYPAUSE'
keypad_button_prev = 'PREV'
keypad_button_next = 'NEXT'
keypad_button_to_event = {KEYPAD_BUTTON_PLAYPAUSE: EVENT_KEYPAD_PLAY_PAUSE, KEYPAD_BUTTON_PREV: EVENT_KEYPAD_PREV, KEYPAD_BUTTON_NEXT: EVENT_KEYPAD_NEXT}
command_response_timeout = 3
|
# Script will correct .hoc file from neuromorpho.org.
# In order to make the correction, same data is needed from
# Neuron's import3D tool.
# CNS-GROUP, Tampere University
def fix_commas(IMPORT3D_FILE, _3DVIEWER_FILE):
"""
This will correct commas, change "user7" to "dendrite"
and will seek "OrginalDendrite" value. Returns False if
there was a problem opening the file.
:return orgdend = OrginalDendrite
"""
k = 0
a = 0
orgdend = "Error"
try:
with open(IMPORT3D_FILE, 'r+') as i3file:
lines_i = i3file.readlines()
with open(_3DVIEWER_FILE, 'r+') as _3dfile:
lines_view = _3dfile.readlines()
_3dfile.seek(0)
for line_v in lines_view:
# fix commas
if "pt3dadd" in line_v:
k += 1
for line_i in lines_i:
if "pt3dadd" in line_i:
a = a + 1
if k == a and "]" not in line_i: # when line is normal
line_i = line_i.strip('\t')
line_i = " " + line_i
_3dfile.write(line_i)
a = 0
break
elif k == a and "]" in line_i: # line is unnormal
parts = line_i.split('\t')
line_i = " " + parts[1]
_3dfile.write(line_i)
a = 0
break
elif "create user7" in line_v:
b = line_v.split("[")
orgdend = str(b[1]).replace("]", "")
orgdend = orgdend.replace("}", "")
elif "] connect" in line_v:
list_ = line_v.split(" ")
if "," in list_[3]:
list_[3] = list_[3].replace(",", ".")
new_line = " ".join(list_)
new_line = new_line.replace("user7", "dendrite")
_3dfile.write(new_line)
else:
line_v = line_v.replace("user7", "dendrite")
_3dfile.write(line_v)
elif "user7" in line_v:
new_line = line_v.replace("user7", "dendrite")
_3dfile.write(new_line)
else:
_3dfile.write(line_v)
_3dfile.truncate()
return str(orgdend)
except:
print("There was a problem opening import3D file.")
return False
def delete_curly_braces(_3DVIEWER_FILE):
""" Deletes unneeded braces. """
with open(_3DVIEWER_FILE, 'r+') as _3dfile:
lines_view = _3dfile.readlines()
_3dfile.seek(0)
for line_v in lines_view:
if "create" in line_v or "access" in line_v:
new_line = line_v.replace("{", "")
new_line = new_line.replace("}", "")
_3dfile.write(new_line)
else:
_3dfile.write(line_v)
_3dfile.truncate()
def insert(orgdend, _3DVIEWER_FILE):
""" Adds needed lines to 3Dviewer code. """
with open(_3DVIEWER_FILE, 'r+') as _3dfile:
lines = _3dfile.readlines()
lines.insert(5, "OriginalDendrite=" + str(orgdend) + "\n")
lines.insert(6, "NumberDendrites=OriginalDendrite+2*(OriginalDendrite-1)" + "\n")
lines.insert(7, "SeedNumber=OriginalDendrite-1" + "\n")
lines.insert(8, "\n")
lines.insert(10, "create dendrite[NumberDendrites]" + "\n")
_3dfile.seek(0)
_3dfile.truncate()
_3dfile.writelines(lines)
print("Corrections was made to", _3DVIEWER_FILE, "file.")
def check_file(_3DVIEWER_FILE):
""" This checks if file has already been fixed with this program. """
try:
with open(_3DVIEWER_FILE, 'r') as _3dfile:
lines = _3dfile.readlines()
for line in lines:
if "OriginalDendrite=" in line:
print(_3DVIEWER_FILE, "is not orginal file from neuromorpho 3Dviewer.")
return False
return True
except:
print("There was a problem opening 3Dviewer file.")
return False
def main():
""" Correct file from 3D viewer. """
IMPORT3D_FILE = input("Import3D file: ")
_3DVIEWER_FILE = input("3Dviewer file: ")
if check_file(_3DVIEWER_FILE):
orgdend = fix_commas(IMPORT3D_FILE, _3DVIEWER_FILE)
if not orgdend:
return
else:
delete_curly_braces(_3DVIEWER_FILE)
insert(orgdend, _3DVIEWER_FILE)
main()
|
def fix_commas(IMPORT3D_FILE, _3DVIEWER_FILE):
"""
This will correct commas, change "user7" to "dendrite"
and will seek "OrginalDendrite" value. Returns False if
there was a problem opening the file.
:return orgdend = OrginalDendrite
"""
k = 0
a = 0
orgdend = 'Error'
try:
with open(IMPORT3D_FILE, 'r+') as i3file:
lines_i = i3file.readlines()
with open(_3DVIEWER_FILE, 'r+') as _3dfile:
lines_view = _3dfile.readlines()
_3dfile.seek(0)
for line_v in lines_view:
if 'pt3dadd' in line_v:
k += 1
for line_i in lines_i:
if 'pt3dadd' in line_i:
a = a + 1
if k == a and ']' not in line_i:
line_i = line_i.strip('\t')
line_i = ' ' + line_i
_3dfile.write(line_i)
a = 0
break
elif k == a and ']' in line_i:
parts = line_i.split('\t')
line_i = ' ' + parts[1]
_3dfile.write(line_i)
a = 0
break
elif 'create user7' in line_v:
b = line_v.split('[')
orgdend = str(b[1]).replace(']', '')
orgdend = orgdend.replace('}', '')
elif '] connect' in line_v:
list_ = line_v.split(' ')
if ',' in list_[3]:
list_[3] = list_[3].replace(',', '.')
new_line = ' '.join(list_)
new_line = new_line.replace('user7', 'dendrite')
_3dfile.write(new_line)
else:
line_v = line_v.replace('user7', 'dendrite')
_3dfile.write(line_v)
elif 'user7' in line_v:
new_line = line_v.replace('user7', 'dendrite')
_3dfile.write(new_line)
else:
_3dfile.write(line_v)
_3dfile.truncate()
return str(orgdend)
except:
print('There was a problem opening import3D file.')
return False
def delete_curly_braces(_3DVIEWER_FILE):
""" Deletes unneeded braces. """
with open(_3DVIEWER_FILE, 'r+') as _3dfile:
lines_view = _3dfile.readlines()
_3dfile.seek(0)
for line_v in lines_view:
if 'create' in line_v or 'access' in line_v:
new_line = line_v.replace('{', '')
new_line = new_line.replace('}', '')
_3dfile.write(new_line)
else:
_3dfile.write(line_v)
_3dfile.truncate()
def insert(orgdend, _3DVIEWER_FILE):
""" Adds needed lines to 3Dviewer code. """
with open(_3DVIEWER_FILE, 'r+') as _3dfile:
lines = _3dfile.readlines()
lines.insert(5, 'OriginalDendrite=' + str(orgdend) + '\n')
lines.insert(6, 'NumberDendrites=OriginalDendrite+2*(OriginalDendrite-1)' + '\n')
lines.insert(7, 'SeedNumber=OriginalDendrite-1' + '\n')
lines.insert(8, '\n')
lines.insert(10, 'create dendrite[NumberDendrites]' + '\n')
_3dfile.seek(0)
_3dfile.truncate()
_3dfile.writelines(lines)
print('Corrections was made to', _3DVIEWER_FILE, 'file.')
def check_file(_3DVIEWER_FILE):
""" This checks if file has already been fixed with this program. """
try:
with open(_3DVIEWER_FILE, 'r') as _3dfile:
lines = _3dfile.readlines()
for line in lines:
if 'OriginalDendrite=' in line:
print(_3DVIEWER_FILE, 'is not orginal file from neuromorpho 3Dviewer.')
return False
return True
except:
print('There was a problem opening 3Dviewer file.')
return False
def main():
""" Correct file from 3D viewer. """
import3_d_file = input('Import3D file: ')
_3_dviewer_file = input('3Dviewer file: ')
if check_file(_3DVIEWER_FILE):
orgdend = fix_commas(IMPORT3D_FILE, _3DVIEWER_FILE)
if not orgdend:
return
else:
delete_curly_braces(_3DVIEWER_FILE)
insert(orgdend, _3DVIEWER_FILE)
main()
|
# program checks if the string is palindrome or not.
def function(string):
if(string == string[:: - 1]):
print("This is a Palindrome String")
else:
print("This is Not a Palindrome String")
string = input("Please enter your own String : ")
function(string)
|
def function(string):
if string == string[::-1]:
print('This is a Palindrome String')
else:
print('This is Not a Palindrome String')
string = input('Please enter your own String : ')
function(string)
|
#!/usr/bin/env python
# coding: utf8
""" Pythonic Redis backed data structure """
__version__ = '1.0.0'
__authors__ = 'Felix Voituret <felix@voituret.fr>'
|
""" Pythonic Redis backed data structure """
__version__ = '1.0.0'
__authors__ = 'Felix Voituret <felix@voituret.fr>'
|
_base_ = './deit-small_pt-4xb256_in1k.py'
# model settings
model = dict(
backbone=dict(type='DistilledVisionTransformer', arch='deit-base'),
head=dict(type='DeiTClsHead', in_channels=768),
)
# data settings
data = dict(samples_per_gpu=64, workers_per_gpu=5)
|
_base_ = './deit-small_pt-4xb256_in1k.py'
model = dict(backbone=dict(type='DistilledVisionTransformer', arch='deit-base'), head=dict(type='DeiTClsHead', in_channels=768))
data = dict(samples_per_gpu=64, workers_per_gpu=5)
|
class Solution:
def findRestaurant(self, list1: List[str], list2: List[str]) -> List[str]:
h = {v: i for i, v in enumerate(list1)}
result = []
m = inf
for i, v in enumerate(list2):
if v in h:
r = h[v] + i
if r < m:
m = r
result = [v]
elif r == m:
result.append(v)
return result
|
class Solution:
def find_restaurant(self, list1: List[str], list2: List[str]) -> List[str]:
h = {v: i for (i, v) in enumerate(list1)}
result = []
m = inf
for (i, v) in enumerate(list2):
if v in h:
r = h[v] + i
if r < m:
m = r
result = [v]
elif r == m:
result.append(v)
return result
|
#
# This helps us not have to pass so many things (caches, resolvers,
# transcoders...) around by letting us set class properties on the IIIFRequest
# at startup. Here's a basic example of how this pattern works:
#
# >>> class MyMeta(type): # Note we subclass type, not object
# ... _something = None
# ... def _get_something(self):
# ... return self._something
# ... def _set_something(self, value):
# ... self._something = value
# ... something = property(_get_something, _set_something)
# ...
# >>> class MyFoo(metaclass=MyMeta):
# ... pass
# >>> print(MyFoo.something)
# None
# >>> MyFoo.something = 'bar'
# >>> MyFoo.something
# 'bar'
#
class MetaRequest(type):
_compliance = None
_info_cache = None
_extractors = None
_app_configs = None
_transcoders = None
_resolvers = None
def _get_compliance(self):
return self._compliance
def _set_compliance(self, compliance):
self._compliance = compliance
compliance = property(_get_compliance, _set_compliance)
def _get_info_cache(self):
return self._info_cache
def _set_info_cache(self, info_cache):
self._info_cache = info_cache
info_cache = property(_get_info_cache, _set_info_cache)
def _get_extractors(self):
return self._extractors
def _set_extractors(self, extractors):
self._extractors = extractors
extractors = property(_get_extractors, _set_extractors)
def _get_app_configs(self):
return self._app_configs
def _set_app_configs(self, app_configs):
self._app_configs = app_configs
app_configs = property(_get_app_configs, _set_app_configs)
def _get_transcoders(self):
return self._transcoders
def _set_transcoders(self, transcoders):
self._transcoders = transcoders
transcoders = property(_get_transcoders, _set_transcoders)
def _get_resolvers(self):
return self._resolvers
def _set_resolvers(self, resolvers):
self._resolvers = resolvers
resolvers = property(_get_resolvers, _set_resolvers)
|
class Metarequest(type):
_compliance = None
_info_cache = None
_extractors = None
_app_configs = None
_transcoders = None
_resolvers = None
def _get_compliance(self):
return self._compliance
def _set_compliance(self, compliance):
self._compliance = compliance
compliance = property(_get_compliance, _set_compliance)
def _get_info_cache(self):
return self._info_cache
def _set_info_cache(self, info_cache):
self._info_cache = info_cache
info_cache = property(_get_info_cache, _set_info_cache)
def _get_extractors(self):
return self._extractors
def _set_extractors(self, extractors):
self._extractors = extractors
extractors = property(_get_extractors, _set_extractors)
def _get_app_configs(self):
return self._app_configs
def _set_app_configs(self, app_configs):
self._app_configs = app_configs
app_configs = property(_get_app_configs, _set_app_configs)
def _get_transcoders(self):
return self._transcoders
def _set_transcoders(self, transcoders):
self._transcoders = transcoders
transcoders = property(_get_transcoders, _set_transcoders)
def _get_resolvers(self):
return self._resolvers
def _set_resolvers(self, resolvers):
self._resolvers = resolvers
resolvers = property(_get_resolvers, _set_resolvers)
|
def anderson_iteration(X, U, V, labels, p, logger):
def multi_update_V(V, U, X):
delta_V = 100
while delta_V > 1e-1:
new_V = update_V(V, U, X, epsilon)
delta_V = l21_norm(new_V - V)
V = new_V
return V
V_len = V.flatten().shape[0]
mAA = 0
V_old = V
U_old = U
iterations = t = 0
mmax = p.mmax or 4
AAstart = p.AAstart or 0
droptol = p.droptol or 1e10
gamma = p.gamma
epsilon = p.epsilon
max_iteration = p.max_iterations or 300
fold = gold = None
g = np.ndarray(shape=(V_len, 0))
Q = np.ndarray(shape=(V_len, 1))
R = np.ndarray(shape=(1, 1))
old_E = np.Infinity
VAUt = None
while True:
U_new = solve_U(X, V_old, gamma, epsilon)
delta_U, is_converged = U_converged(U_new, U_old)
new_E = E(U_new, V_old, X, gamma, epsilon)
if is_converged:
return U_new, V_old, t, metric(U_new, labels)
if t > max_iteration:
return U_new, V_old, t, metric(U_new, labels)
if new_E >= old_E:
mAA = 0
iterations = 0
V_old = VAUt
g = np.ndarray(shape=(V_len, 0))
Q = np.ndarray(shape=(V_len, 1))
R = np.ndarray(shape=(1, 1))
U_new = solve_U(X, V_old, gamma, epsilon)
old_E = E(U_new, V_old, X, gamma, epsilon)
# AA Start
# VAUt = gcur = update_V(V_old, U_new, X, epsilon)
VAUt = gcur = multi_update_V(V_old, U_new, X)
fcur = gcur - V_old
if iterations > AAstart:
delta_f = (fcur - fold).reshape((V_len, 1))
delta_g = (gcur - gold).reshape((V_len, 1))
if mAA < mmax:
g = np.hstack((g, delta_g))
else:
g = np.hstack((g[:, 1:mAA], delta_g))
mAA += 1
fold, gold = fcur, gcur
if mAA == 0:
V_new = gcur
else:
if mAA == 1:
delta_f_norm = l21_norm(delta_f)
Q[:, 0] = delta_f.flatten() / delta_f_norm
R[0, 0] = delta_f_norm
else:
if mAA > mmax:
Q, R = qr_delete(Q, R, 1)
mAA -= 1
R = np.resize(R, (mAA, mAA))
Q = np.resize(Q, (V_len, mAA))
for i in range(0, mAA - 1):
R[i, mAA - 1] = Q[:, i].T @ delta_f
delta_f = delta_f - (R[i, mAA - 1] * Q[:, i]).reshape((V_len, 1))
delta_f_norm = l21_norm(delta_f)
Q[:, mAA - 1] = delta_f.flatten() / delta_f_norm
R[mAA - 1, mAA - 1] = delta_f_norm
while np.linalg.cond(R) > droptol and mAA > 1:
Q, R = qr_delete(Q, R, 1)
mAA -= 1
Gamma = scipy.linalg.solve(R, Q.T @ fcur.reshape(V_len, 1))
V_new = gcur - (g @ Gamma).reshape(V.shape)
delta_V, _ = U_converged(V_new, V_old)
V_old = V_new
U_old = U_new
old_E = new_E
logger.log_middle(E(U_new, V_new, X, gamma, epsilon), metric(U_new, labels))
t += 1
iterations += 1
|
def anderson_iteration(X, U, V, labels, p, logger):
def multi_update_v(V, U, X):
delta_v = 100
while delta_V > 0.1:
new_v = update_v(V, U, X, epsilon)
delta_v = l21_norm(new_V - V)
v = new_V
return V
v_len = V.flatten().shape[0]
m_aa = 0
v_old = V
u_old = U
iterations = t = 0
mmax = p.mmax or 4
a_astart = p.AAstart or 0
droptol = p.droptol or 10000000000.0
gamma = p.gamma
epsilon = p.epsilon
max_iteration = p.max_iterations or 300
fold = gold = None
g = np.ndarray(shape=(V_len, 0))
q = np.ndarray(shape=(V_len, 1))
r = np.ndarray(shape=(1, 1))
old_e = np.Infinity
va_ut = None
while True:
u_new = solve_u(X, V_old, gamma, epsilon)
(delta_u, is_converged) = u_converged(U_new, U_old)
new_e = e(U_new, V_old, X, gamma, epsilon)
if is_converged:
return (U_new, V_old, t, metric(U_new, labels))
if t > max_iteration:
return (U_new, V_old, t, metric(U_new, labels))
if new_E >= old_E:
m_aa = 0
iterations = 0
v_old = VAUt
g = np.ndarray(shape=(V_len, 0))
q = np.ndarray(shape=(V_len, 1))
r = np.ndarray(shape=(1, 1))
u_new = solve_u(X, V_old, gamma, epsilon)
old_e = e(U_new, V_old, X, gamma, epsilon)
va_ut = gcur = multi_update_v(V_old, U_new, X)
fcur = gcur - V_old
if iterations > AAstart:
delta_f = (fcur - fold).reshape((V_len, 1))
delta_g = (gcur - gold).reshape((V_len, 1))
if mAA < mmax:
g = np.hstack((g, delta_g))
else:
g = np.hstack((g[:, 1:mAA], delta_g))
m_aa += 1
(fold, gold) = (fcur, gcur)
if mAA == 0:
v_new = gcur
else:
if mAA == 1:
delta_f_norm = l21_norm(delta_f)
Q[:, 0] = delta_f.flatten() / delta_f_norm
R[0, 0] = delta_f_norm
else:
if mAA > mmax:
(q, r) = qr_delete(Q, R, 1)
m_aa -= 1
r = np.resize(R, (mAA, mAA))
q = np.resize(Q, (V_len, mAA))
for i in range(0, mAA - 1):
R[i, mAA - 1] = Q[:, i].T @ delta_f
delta_f = delta_f - (R[i, mAA - 1] * Q[:, i]).reshape((V_len, 1))
delta_f_norm = l21_norm(delta_f)
Q[:, mAA - 1] = delta_f.flatten() / delta_f_norm
R[mAA - 1, mAA - 1] = delta_f_norm
while np.linalg.cond(R) > droptol and mAA > 1:
(q, r) = qr_delete(Q, R, 1)
m_aa -= 1
gamma = scipy.linalg.solve(R, Q.T @ fcur.reshape(V_len, 1))
v_new = gcur - (g @ Gamma).reshape(V.shape)
(delta_v, _) = u_converged(V_new, V_old)
v_old = V_new
u_old = U_new
old_e = new_E
logger.log_middle(e(U_new, V_new, X, gamma, epsilon), metric(U_new, labels))
t += 1
iterations += 1
|
"""Set metadata for chat-downloader"""
__title__ = 'chat-downloader'
__program__ = 'chat_downloader'
__summary__ = 'A simple tool used to retrieve chat messages from livestreams, videos, clips and past broadcasts. No authentication needed!'
__author__ = 'xenova'
__email__ = 'admin@xenova.com'
__copyright__ = '2020, 2021 xenova'
__url__ = 'https://github.com/xenova/chat-downloader'
__version__ = '0.1.5'
|
"""Set metadata for chat-downloader"""
__title__ = 'chat-downloader'
__program__ = 'chat_downloader'
__summary__ = 'A simple tool used to retrieve chat messages from livestreams, videos, clips and past broadcasts. No authentication needed!'
__author__ = 'xenova'
__email__ = 'admin@xenova.com'
__copyright__ = '2020, 2021 xenova'
__url__ = 'https://github.com/xenova/chat-downloader'
__version__ = '0.1.5'
|
def move(cc, order, value):
if order == "N":
cc[1] += value
if order == "S":
cc[1] -= value
if order == "E":
cc[0] += value
if order == "W":
cc[0] -= value
return cc
def stage1(inp):
direct = ["S", "W", "N", "E"]
facing = "E"
current_coord = [0, 0]
for order in inp:
value = int(order[1:])
if order[0] in ["S", "W", "N", "E"]:
current_coord = move(current_coord, order[0], value)
if order[0] == "R":
facing = direct[(direct.index(facing) + int(value / 90)) % 4]
if order[0] == "L":
nd = direct.index(facing) - int(value / 90)
if nd < 0:
nd = 4 - abs(nd)
facing = direct[nd]
if order[0] == "F":
current_coord = move(current_coord, facing, value)
return abs(current_coord[0]) + abs(current_coord[1])
def stage2(inp):
current_w_coord = [10, 1]
current_s_coord = [0, 0]
for order in inp:
value = int(order[1:])
if order[0] in ["S", "W", "N", "E"]:
current_w_coord = move(current_w_coord, order[0], value)
if order[0] == "R":
for _ in range(0, int(value / 90)):
cc = current_w_coord[1]
current_w_coord[1] = -current_w_coord[0]
current_w_coord[0] = cc
if order[0] == "L":
for _ in range(0, int(value / 90)):
cc = current_w_coord[1]
current_w_coord[1] = current_w_coord[0]
current_w_coord[0] = -cc
if order[0] == "F":
if current_w_coord[0] > 0:
current_s_coord = move(current_s_coord, "E", value * current_w_coord[0])
else:
current_s_coord = move(
current_s_coord, "W", value * abs(current_w_coord[0])
)
if current_w_coord[1] > 0:
current_s_coord = move(current_s_coord, "N", value * current_w_coord[1])
else:
current_s_coord = move(
current_s_coord, "S", value * abs(current_w_coord[1])
)
return abs(current_s_coord[0]) + abs(current_s_coord[1])
def solve(inp):
s1, s2 = (0, 0)
s1 = stage1(inp)
s2 = stage2(inp)
return s1, s2
|
def move(cc, order, value):
if order == 'N':
cc[1] += value
if order == 'S':
cc[1] -= value
if order == 'E':
cc[0] += value
if order == 'W':
cc[0] -= value
return cc
def stage1(inp):
direct = ['S', 'W', 'N', 'E']
facing = 'E'
current_coord = [0, 0]
for order in inp:
value = int(order[1:])
if order[0] in ['S', 'W', 'N', 'E']:
current_coord = move(current_coord, order[0], value)
if order[0] == 'R':
facing = direct[(direct.index(facing) + int(value / 90)) % 4]
if order[0] == 'L':
nd = direct.index(facing) - int(value / 90)
if nd < 0:
nd = 4 - abs(nd)
facing = direct[nd]
if order[0] == 'F':
current_coord = move(current_coord, facing, value)
return abs(current_coord[0]) + abs(current_coord[1])
def stage2(inp):
current_w_coord = [10, 1]
current_s_coord = [0, 0]
for order in inp:
value = int(order[1:])
if order[0] in ['S', 'W', 'N', 'E']:
current_w_coord = move(current_w_coord, order[0], value)
if order[0] == 'R':
for _ in range(0, int(value / 90)):
cc = current_w_coord[1]
current_w_coord[1] = -current_w_coord[0]
current_w_coord[0] = cc
if order[0] == 'L':
for _ in range(0, int(value / 90)):
cc = current_w_coord[1]
current_w_coord[1] = current_w_coord[0]
current_w_coord[0] = -cc
if order[0] == 'F':
if current_w_coord[0] > 0:
current_s_coord = move(current_s_coord, 'E', value * current_w_coord[0])
else:
current_s_coord = move(current_s_coord, 'W', value * abs(current_w_coord[0]))
if current_w_coord[1] > 0:
current_s_coord = move(current_s_coord, 'N', value * current_w_coord[1])
else:
current_s_coord = move(current_s_coord, 'S', value * abs(current_w_coord[1]))
return abs(current_s_coord[0]) + abs(current_s_coord[1])
def solve(inp):
(s1, s2) = (0, 0)
s1 = stage1(inp)
s2 = stage2(inp)
return (s1, s2)
|
A = int(input('Enter the value of A: '))
B = int(input('Enter the value of B: '))
#A = int(A)
#B = int(B)
C = A
A = B
B = C
print('Value of A', A, 'Value of B', B)
|
a = int(input('Enter the value of A: '))
b = int(input('Enter the value of B: '))
c = A
a = B
b = C
print('Value of A', A, 'Value of B', B)
|
# Input:
# 1
# 8
# 1 2 2 4 5 6 7 8
#
# Output:
# 2 1 4 2 6 5 8 7
def pairWiseSwap(head):
if head == None or head.next == None:
return head
prev = None
cur = head
count = 2
while count > 0 and cur != None:
temp = cur.next
cur.next = prev
prev = cur
cur = temp
count -= 1
head.next = pairWiseSwap(cur)
return prev
|
def pair_wise_swap(head):
if head == None or head.next == None:
return head
prev = None
cur = head
count = 2
while count > 0 and cur != None:
temp = cur.next
cur.next = prev
prev = cur
cur = temp
count -= 1
head.next = pair_wise_swap(cur)
return prev
|
#!/usr/bin/env python
#
# Test function for sct_dmri_concat_b0_and_dwi
#
# Copyright (c) 2019 Polytechnique Montreal <www.neuro.polymtl.ca>
# Author: Julien Cohen-Adad
#
# About the license: see the file LICENSE.TXT
def init(param_test):
"""
Initialize class: param_test
"""
# initialization
default_args = ['-i dmri/dmri_T0000.nii.gz dmri/dmri.nii.gz -bvec dmri/bvecs.txt -bval dmri/bvals.txt '
'-order b0 dwi -o b0_dwi_concat.nii -obval bvals_concat.txt -obvec bvecs_concat.txt']
# assign default params
if not param_test.args:
param_test.args = default_args
return param_test
def test_integrity(param_test):
"""
Test integrity of function
"""
param_test.output += '\nNot implemented.'
return param_test
|
def init(param_test):
"""
Initialize class: param_test
"""
default_args = ['-i dmri/dmri_T0000.nii.gz dmri/dmri.nii.gz -bvec dmri/bvecs.txt -bval dmri/bvals.txt -order b0 dwi -o b0_dwi_concat.nii -obval bvals_concat.txt -obvec bvecs_concat.txt']
if not param_test.args:
param_test.args = default_args
return param_test
def test_integrity(param_test):
"""
Test integrity of function
"""
param_test.output += '\nNot implemented.'
return param_test
|
a = 5 > 3
b = 5 > 4
c = 5 > 5
d = 5 > 6
e = 5 > 7
|
a = 5 > 3
b = 5 > 4
c = 5 > 5
d = 5 > 6
e = 5 > 7
|
names = []
startingletter = ""
# Open file and getting all names from it
with open("./Input/Names/invited_names.txt") as file:
for line in file:
names.append(line.strip())
# Getting the text from the starting letter
with open("./Input/Letters/starting_letter.txt") as file:
startingletter = file.read()
# Looping through the list of names and writing the final version of the letter for each person
for name in names:
with open(f"./Output/ReadyToSend/letter_for_{name}", "w") as readytosend:
readytosend.write(startingletter.replace("[name]", name))
|
names = []
startingletter = ''
with open('./Input/Names/invited_names.txt') as file:
for line in file:
names.append(line.strip())
with open('./Input/Letters/starting_letter.txt') as file:
startingletter = file.read()
for name in names:
with open(f'./Output/ReadyToSend/letter_for_{name}', 'w') as readytosend:
readytosend.write(startingletter.replace('[name]', name))
|
def Counting_Sort(A, k=-1):
if(k==-1): k=max(A)
C = [0 for x in range(k+1)]
for x in A: C[x]+=1
for x in range(1, k+1):C[x]+=C[x-1]
B = [0 for x in range(len(A))]
for i in range(len(A)-1, -1, -1):
x = A[i]
B[C[x]-1] = x
C[x]-=1
return B
A = [13,20,18,20,12,15,7]
print(Counting_Sort(A))
|
def counting__sort(A, k=-1):
if k == -1:
k = max(A)
c = [0 for x in range(k + 1)]
for x in A:
C[x] += 1
for x in range(1, k + 1):
C[x] += C[x - 1]
b = [0 for x in range(len(A))]
for i in range(len(A) - 1, -1, -1):
x = A[i]
B[C[x] - 1] = x
C[x] -= 1
return B
a = [13, 20, 18, 20, 12, 15, 7]
print(counting__sort(A))
|
class Solution:
def minWindow(self, s: str, t: str) -> str:
target, window = defaultdict(int), defaultdict(int)
left, right, match = 0, 0, 0
d = float("inf")
for c in t:
target[c] += 1
while right < len(s):
c = s[right]
if c in target:
window[c] += 1
if window[c] == target[c]:
match += 1
right += 1
while (match == len(target)):
if right - left < d:
ans = s[left:right]
d = right - left
c = s[left]
left += 1
if c in target:
if window[c] == target[c]:
match -= 1
window[c] -= 1
return "" if d == float("inf") else ans
|
class Solution:
def min_window(self, s: str, t: str) -> str:
(target, window) = (defaultdict(int), defaultdict(int))
(left, right, match) = (0, 0, 0)
d = float('inf')
for c in t:
target[c] += 1
while right < len(s):
c = s[right]
if c in target:
window[c] += 1
if window[c] == target[c]:
match += 1
right += 1
while match == len(target):
if right - left < d:
ans = s[left:right]
d = right - left
c = s[left]
left += 1
if c in target:
if window[c] == target[c]:
match -= 1
window[c] -= 1
return '' if d == float('inf') else ans
|
# From: http://wiki.python.org/moin/SimplePrograms, with permission from the author, Steve Howell
BOARD_SIZE = 8
def under_attack(col, queens):
return col in queens or \
any(abs(col - x) == len(queens)-i for i,x in enumerate(queens))
def solve(n):
solutions = [[]]
for row in range(n):
solutions = [solution+[i+1]
for solution in solutions
for i in range(BOARD_SIZE)
if not under_attack(i+1, solution)]
return solutions
for answer in solve(BOARD_SIZE): print(list(enumerate(answer, start=1)))
|
board_size = 8
def under_attack(col, queens):
return col in queens or any((abs(col - x) == len(queens) - i for (i, x) in enumerate(queens)))
def solve(n):
solutions = [[]]
for row in range(n):
solutions = [solution + [i + 1] for solution in solutions for i in range(BOARD_SIZE) if not under_attack(i + 1, solution)]
return solutions
for answer in solve(BOARD_SIZE):
print(list(enumerate(answer, start=1)))
|
# -*- coding: utf-8 -*-
""" VITA Person Registry, Controllers
@author: nursix
@see: U{http://eden.sahanafoundation.org/wiki/BluePrintVITA}
"""
prefix = request.controller
resourcename = request.function
# -----------------------------------------------------------------------------
# Options Menu (available in all Functions' Views)
def shn_menu():
response.menu_options = [
[T("Home"), False, URL(r=request, f="index")],
[T("Search for a Person"), False, URL(r=request, f="person", args="search")],
[T("Persons"), False, URL(r=request, f="person"), [
[T("List"), False, URL(r=request, f="person")],
[T("Add"), False, URL(r=request, f="person", args="create")],
]],
[T("Groups"), False, URL(r=request, f="group"), [
[T("List"), False, URL(r=request, f="group")],
[T("Add"), False, URL(r=request, f="group", args="create")],
]]]
#De-activating until fixed:
#if s3_has_role(1):
#response.menu_options.append([T("De-duplicator"), False, URL(r=request, f="person_duplicates")])
menu_selected = []
if session.rcvars and "pr_group" in session.rcvars:
group = db.pr_group
query = (group.id == session.rcvars["pr_group"])
record = db(query).select(group.id, group.name, limitby=(0, 1)).first()
if record:
name = record.name
menu_selected.append(["%s: %s" % (T("Group"), name), False,
URL(r=request, f="group", args=[record.id])])
if session.rcvars and "pr_person" in session.rcvars:
person = db.pr_person
query = (person.id == session.rcvars["pr_person"])
record = db(query).select(person.id, limitby=(0, 1)).first()
if record:
name = shn_pr_person_represent(record.id)
menu_selected.append(["%s: %s" % (T("Person"), name), False,
URL(r=request, f="person", args=[record.id])])
if menu_selected:
menu_selected = [T("Open recent"), True, None, menu_selected]
response.menu_options.append(menu_selected)
shn_menu()
# -----------------------------------------------------------------------------
def index():
""" Module's Home Page """
try:
module_name = deployment_settings.modules[prefix].name_nice
except:
module_name = T("Person Registry")
def prep(r):
if r.representation == "html":
if not r.id:
r.method = "search"
else:
redirect(URL(r=request, f="person", args=[r.id]))
return True
response.s3.prep = prep
def postp(r, output):
if isinstance(output, dict):
gender = []
for g_opt in pr_gender_opts:
count = db((db.pr_person.deleted == False) & \
(db.pr_person.gender == g_opt)).count()
gender.append([str(pr_gender_opts[g_opt]), int(count)])
age = []
for a_opt in pr_age_group_opts:
count = db((db.pr_person.deleted == False) & \
(db.pr_person.age_group == a_opt)).count()
age.append([str(pr_age_group_opts[a_opt]), int(count)])
total = int(db(db.pr_person.deleted == False).count())
output.update(module_name=module_name, gender=gender, age=age, total=total)
if r.representation in shn_interactive_view_formats:
if not r.component:
label = READ
else:
label = UPDATE
linkto = r.resource.crud._linkto(r)("[id]")
response.s3.actions = [
dict(label=str(label), _class="action-btn", url=str(linkto))
]
r.next = None
return output
response.s3.postp = postp
if auth.s3_logged_in():
add_btn = A(T("Add Person"),
_class="action-btn",
_href=URL(r=request, f="person", args="create"))
else:
add_btn = None
output = s3_rest_controller("pr", "person",
add_btn=add_btn)
response.view = "pr/index.html"
response.title = module_name
shn_menu()
return output
# -----------------------------------------------------------------------------
def person():
""" RESTful CRUD controller """
def prep(r):
if r.component_name == "config":
_config = db.gis_config
defaults = db(_config.id == 1).select(limitby=(0, 1)).first()
for key in defaults.keys():
if key not in ["id", "uuid", "mci", "update_record", "delete_record"]:
_config[key].default = defaults[key]
if r.representation == "popup":
# Hide "pe_label" and "missing" fields in person popups
r.table.pe_label.readable = False
r.table.pe_label.writable = False
r.table.missing.readable = False
r.table.missing.writable = False
return True
response.s3.prep = prep
s3xrc.model.configure(db.pr_group_membership,
list_fields=["id",
"group_id",
"group_head",
"description"])
table = db.pr_person
s3xrc.model.configure(table, listadd = False, insertable = True)
output = s3_rest_controller(prefix, resourcename,
main="first_name",
extra="last_name",
rheader=lambda r: shn_pr_rheader(r,
tabs = [(T("Basic Details"), None),
(T("Images"), "image"),
(T("Identity"), "identity"),
(T("Address"), "address"),
(T("Contact Data"), "pe_contact"),
(T("Memberships"), "group_membership"),
(T("Presence Log"), "presence"),
(T("Subscriptions"), "pe_subscription"),
(T("Map Settings"), "config")
]))
shn_menu()
return output
# -----------------------------------------------------------------------------
def group():
""" RESTful CRUD controller """
tablename = "%s_%s" % (prefix, resourcename)
table = db[tablename]
response.s3.filter = (db.pr_group.system == False) # do not show system groups
s3xrc.model.configure(db.pr_group_membership,
list_fields=["id",
"person_id",
"group_head",
"description"])
output = s3_rest_controller(prefix, resourcename,
rheader=lambda r: shn_pr_rheader(r,
tabs = [(T("Group Details"), None),
(T("Address"), "address"),
(T("Contact Data"), "pe_contact"),
(T("Members"), "group_membership")]))
shn_menu()
return output
# -----------------------------------------------------------------------------
def image():
""" RESTful CRUD controller """
return s3_rest_controller(prefix, resourcename)
# -----------------------------------------------------------------------------
def pe_contact():
""" RESTful CRUD controller """
table = db.pr_pe_contact
table.pe_id.label = T("Person/Group")
table.pe_id.readable = True
table.pe_id.writable = True
return s3_rest_controller(prefix, resourcename)
# -----------------------------------------------------------------------------
#def group_membership():
#""" RESTful CRUD controller """
#return s3_rest_controller(prefix, resourcename)
# -----------------------------------------------------------------------------
def pentity():
""" RESTful CRUD controller """
return s3_rest_controller(prefix, resourcename)
# -----------------------------------------------------------------------------
def download():
""" Download a file.
@todo: deprecate? (individual download handler probably not needed)
"""
return response.download(request, db)
# -----------------------------------------------------------------------------
def tooltip():
""" Ajax tooltips """
if "formfield" in request.vars:
response.view = "pr/ajaxtips/%s.html" % request.vars.formfield
return dict()
#------------------------------------------------------------------------------------------------------------------
def person_duplicates():
""" Handle De-duplication of People
@todo: permissions, audit, update super entity, PEP8, optimization?
@todo: check for component data!
@todo: user accounts, subscriptions?
"""
# Shortcut
persons = db.pr_person
table_header = THEAD(TR(TH(T("Person 1")),
TH(T("Person 2")),
TH(T("Match Percentage")),
TH(T("Resolve"))))
# Calculate max possible combinations of records
# To handle the AJAX requests by the dataTables jQuery plugin.
totalRecords = db(persons.id > 0).count()
item_list = []
if request.vars.iDisplayStart:
end = int(request.vars.iDisplayLength) + int(request.vars.iDisplayStart)
records = db((persons.id > 0) & \
(persons.deleted == False) & \
(persons.first_name != None)).select(persons.id, # Should this be persons.ALL?
persons.pe_label,
persons.missing,
persons.first_name,
persons.middle_name,
persons.last_name,
persons.preferred_name,
persons.local_name,
persons.age_group,
persons.gender,
persons.date_of_birth,
persons.nationality,
persons.country,
persons.religion,
persons.marital_status,
persons.occupation,
persons.tags,
persons.comments)
# Calculate the match percentage using Jaro wrinkler Algorithm
count = 1
i = 0
for onePerson in records: #[:len(records)/2]:
soundex1= soundex(onePerson.first_name)
array1 = []
array1.append(onePerson.pe_label)
array1.append(str(onePerson.missing))
array1.append(onePerson.first_name)
array1.append(onePerson.middle_name)
array1.append(onePerson.last_name)
array1.append(onePerson.preferred_name)
array1.append(onePerson.local_name)
array1.append(pr_age_group_opts.get(onePerson.age_group, T("None")))
array1.append(pr_gender_opts.get(onePerson.gender, T("None")))
array1.append(str(onePerson.date_of_birth))
array1.append(pr_nations.get(onePerson.nationality, T("None")))
array1.append(pr_nations.get(onePerson.country, T("None")))
array1.append(pr_religion_opts.get(onePerson.religion, T("None")))
array1.append(pr_marital_status_opts.get(onePerson.marital_status, T("None")))
array1.append(onePerson.occupation)
# Format tags into an array
if onePerson.tags != None:
tagname = []
for item in onePerson.tags:
tagname.append(pr_impact_tags.get(item, T("None")))
array1.append(tagname)
else:
array1.append(onePerson.tags)
array1.append(onePerson.comments)
i = i + 1
j = 0
for anotherPerson in records: #[len(records)/2:]:
soundex2 = soundex(anotherPerson.first_name)
if j >= i:
array2 =[]
array2.append(anotherPerson.pe_label)
array2.append(str(anotherPerson.missing))
array2.append(anotherPerson.first_name)
array2.append(anotherPerson.middle_name)
array2.append(anotherPerson.last_name)
array2.append(anotherPerson.preferred_name)
array2.append(anotherPerson.local_name)
array2.append(pr_age_group_opts.get(anotherPerson.age_group, T("None")))
array2.append(pr_gender_opts.get(anotherPerson.gender, T("None")))
array2.append(str(anotherPerson.date_of_birth))
array2.append(pr_nations.get(anotherPerson.nationality, T("None")))
array2.append(pr_nations.get(anotherPerson.country, T("None")))
array2.append(pr_religion_opts.get(anotherPerson.religion, T("None")))
array2.append(pr_marital_status_opts.get(anotherPerson.marital_status, T("None")))
array2.append(anotherPerson.occupation)
# Format tags into an array
if anotherPerson.tags != None:
tagname = []
for item in anotherPerson.tags:
tagname.append(pr_impact_tags.get(item, T("None")))
array2.append(tagname)
else:
array2.append(anotherPerson.tags)
array2.append(anotherPerson.comments)
if count > end and request.vars.max != "undefined":
count = int(request.vars.max)
break;
if onePerson.id == anotherPerson.id:
continue
else:
mpercent = jaro_winkler_distance_row(array1, array2)
# Pick all records with match percentage is >50 or whose soundex values of first name are equal
if int(mpercent) > 50 or (soundex1 == soundex2):
count = count + 1
item_list.append([onePerson.first_name,
anotherPerson.first_name,
mpercent,
"<a href=\"../pr/person_resolve?perID1=%i&perID2=%i\", class=\"action-btn\">Resolve</a>" % (onePerson.id, anotherPerson.id)
])
else:
continue
j = j + 1
item_list = item_list[int(request.vars.iDisplayStart):end]
# Convert data to JSON
result = []
result.append({
"sEcho" : request.vars.sEcho,
"iTotalRecords" : count,
"iTotalDisplayRecords" : count,
"aaData" : item_list
})
output = json.dumps(result)
# Remove unwanted brackets
output = output[1:]
output = output[:-1]
return output
else:
# Don't load records except via dataTables (saves duplicate loading & less confusing for user)
items = DIV((TABLE(table_header, TBODY(), _id="list", _class="display")))
return(dict(items=items))
#----------------------------------------------------------------------------------------------------------
def delete_person():
""" To delete references to the old record and replace it with the new one.
@todo: components??? cannot simply be re-linked!
@todo: user accounts?
@todo: super entity not updated!
"""
# @ToDo: Error gracefully if conditions not satisfied
old = request.vars.old
new = request.vars.new
# Find all tables which link to the pr_person table
tables = shn_table_links("pr_person")
for table in tables:
for count in range(len(tables[table])):
field = tables[str(db[table])][count]
query = db[table][field] == old
db(query).update(**{field:new})
# Remove the record
db(db.pr_person.id == old).update(deleted=True)
return "Other Record Deleted, Linked Records Updated Successfully"
#------------------------------------------------------------------------------------------------------------------
def person_resolve():
""" This opens a popup screen where the de-duplication process takes place.
@todo: components??? cannot simply re-link!
@todo: user accounts linked to these records?
@todo: update the super entity!
@todo: use S3Resources, implement this as a method handler
"""
# @ToDo: Error gracefully if conditions not satisfied
perID1 = request.vars.perID1
perID2 = request.vars.perID2
# Shortcut
persons = db.pr_person
count = 0
for field in persons:
id1 = str(count) + "Right" # Gives a unique number to each of the arrow keys
id2 = str(count) + "Left"
count = count + 1;
# Comment field filled with buttons
field.comment = DIV(TABLE(TR(TD(INPUT(_type="button", _id=id1, _class="rightArrows", _value="-->")),
TD(INPUT(_type="button", _id=id2, _class="leftArrows", _value="<--")))))
record = persons[perID1]
myUrl = URL(r=request, c="pr", f="person")
form1 = SQLFORM(persons, record, _id="form1", _action=("%s/%s" % (myUrl, perID1)))
# For the second record remove all the comments to save space.
for field in persons:
field.comment = None
record = persons[perID2]
form2 = SQLFORM(persons, record, _id="form2", _action=("%s/%s" % (myUrl, perID2)))
return dict(form1=form1, form2=form2, perID1=perID1, perID2=perID2)
# -----------------------------------------------------------------------------
|
""" VITA Person Registry, Controllers
@author: nursix
@see: U{http://eden.sahanafoundation.org/wiki/BluePrintVITA}
"""
prefix = request.controller
resourcename = request.function
def shn_menu():
response.menu_options = [[t('Home'), False, url(r=request, f='index')], [t('Search for a Person'), False, url(r=request, f='person', args='search')], [t('Persons'), False, url(r=request, f='person'), [[t('List'), False, url(r=request, f='person')], [t('Add'), False, url(r=request, f='person', args='create')]]], [t('Groups'), False, url(r=request, f='group'), [[t('List'), False, url(r=request, f='group')], [t('Add'), False, url(r=request, f='group', args='create')]]]]
menu_selected = []
if session.rcvars and 'pr_group' in session.rcvars:
group = db.pr_group
query = group.id == session.rcvars['pr_group']
record = db(query).select(group.id, group.name, limitby=(0, 1)).first()
if record:
name = record.name
menu_selected.append(['%s: %s' % (t('Group'), name), False, url(r=request, f='group', args=[record.id])])
if session.rcvars and 'pr_person' in session.rcvars:
person = db.pr_person
query = person.id == session.rcvars['pr_person']
record = db(query).select(person.id, limitby=(0, 1)).first()
if record:
name = shn_pr_person_represent(record.id)
menu_selected.append(['%s: %s' % (t('Person'), name), False, url(r=request, f='person', args=[record.id])])
if menu_selected:
menu_selected = [t('Open recent'), True, None, menu_selected]
response.menu_options.append(menu_selected)
shn_menu()
def index():
""" Module's Home Page """
try:
module_name = deployment_settings.modules[prefix].name_nice
except:
module_name = t('Person Registry')
def prep(r):
if r.representation == 'html':
if not r.id:
r.method = 'search'
else:
redirect(url(r=request, f='person', args=[r.id]))
return True
response.s3.prep = prep
def postp(r, output):
if isinstance(output, dict):
gender = []
for g_opt in pr_gender_opts:
count = db((db.pr_person.deleted == False) & (db.pr_person.gender == g_opt)).count()
gender.append([str(pr_gender_opts[g_opt]), int(count)])
age = []
for a_opt in pr_age_group_opts:
count = db((db.pr_person.deleted == False) & (db.pr_person.age_group == a_opt)).count()
age.append([str(pr_age_group_opts[a_opt]), int(count)])
total = int(db(db.pr_person.deleted == False).count())
output.update(module_name=module_name, gender=gender, age=age, total=total)
if r.representation in shn_interactive_view_formats:
if not r.component:
label = READ
else:
label = UPDATE
linkto = r.resource.crud._linkto(r)('[id]')
response.s3.actions = [dict(label=str(label), _class='action-btn', url=str(linkto))]
r.next = None
return output
response.s3.postp = postp
if auth.s3_logged_in():
add_btn = a(t('Add Person'), _class='action-btn', _href=url(r=request, f='person', args='create'))
else:
add_btn = None
output = s3_rest_controller('pr', 'person', add_btn=add_btn)
response.view = 'pr/index.html'
response.title = module_name
shn_menu()
return output
def person():
""" RESTful CRUD controller """
def prep(r):
if r.component_name == 'config':
_config = db.gis_config
defaults = db(_config.id == 1).select(limitby=(0, 1)).first()
for key in defaults.keys():
if key not in ['id', 'uuid', 'mci', 'update_record', 'delete_record']:
_config[key].default = defaults[key]
if r.representation == 'popup':
r.table.pe_label.readable = False
r.table.pe_label.writable = False
r.table.missing.readable = False
r.table.missing.writable = False
return True
response.s3.prep = prep
s3xrc.model.configure(db.pr_group_membership, list_fields=['id', 'group_id', 'group_head', 'description'])
table = db.pr_person
s3xrc.model.configure(table, listadd=False, insertable=True)
output = s3_rest_controller(prefix, resourcename, main='first_name', extra='last_name', rheader=lambda r: shn_pr_rheader(r, tabs=[(t('Basic Details'), None), (t('Images'), 'image'), (t('Identity'), 'identity'), (t('Address'), 'address'), (t('Contact Data'), 'pe_contact'), (t('Memberships'), 'group_membership'), (t('Presence Log'), 'presence'), (t('Subscriptions'), 'pe_subscription'), (t('Map Settings'), 'config')]))
shn_menu()
return output
def group():
""" RESTful CRUD controller """
tablename = '%s_%s' % (prefix, resourcename)
table = db[tablename]
response.s3.filter = db.pr_group.system == False
s3xrc.model.configure(db.pr_group_membership, list_fields=['id', 'person_id', 'group_head', 'description'])
output = s3_rest_controller(prefix, resourcename, rheader=lambda r: shn_pr_rheader(r, tabs=[(t('Group Details'), None), (t('Address'), 'address'), (t('Contact Data'), 'pe_contact'), (t('Members'), 'group_membership')]))
shn_menu()
return output
def image():
""" RESTful CRUD controller """
return s3_rest_controller(prefix, resourcename)
def pe_contact():
""" RESTful CRUD controller """
table = db.pr_pe_contact
table.pe_id.label = t('Person/Group')
table.pe_id.readable = True
table.pe_id.writable = True
return s3_rest_controller(prefix, resourcename)
def pentity():
""" RESTful CRUD controller """
return s3_rest_controller(prefix, resourcename)
def download():
""" Download a file.
@todo: deprecate? (individual download handler probably not needed)
"""
return response.download(request, db)
def tooltip():
""" Ajax tooltips """
if 'formfield' in request.vars:
response.view = 'pr/ajaxtips/%s.html' % request.vars.formfield
return dict()
def person_duplicates():
""" Handle De-duplication of People
@todo: permissions, audit, update super entity, PEP8, optimization?
@todo: check for component data!
@todo: user accounts, subscriptions?
"""
persons = db.pr_person
table_header = thead(tr(th(t('Person 1')), th(t('Person 2')), th(t('Match Percentage')), th(t('Resolve'))))
total_records = db(persons.id > 0).count()
item_list = []
if request.vars.iDisplayStart:
end = int(request.vars.iDisplayLength) + int(request.vars.iDisplayStart)
records = db((persons.id > 0) & (persons.deleted == False) & (persons.first_name != None)).select(persons.id, persons.pe_label, persons.missing, persons.first_name, persons.middle_name, persons.last_name, persons.preferred_name, persons.local_name, persons.age_group, persons.gender, persons.date_of_birth, persons.nationality, persons.country, persons.religion, persons.marital_status, persons.occupation, persons.tags, persons.comments)
count = 1
i = 0
for one_person in records:
soundex1 = soundex(onePerson.first_name)
array1 = []
array1.append(onePerson.pe_label)
array1.append(str(onePerson.missing))
array1.append(onePerson.first_name)
array1.append(onePerson.middle_name)
array1.append(onePerson.last_name)
array1.append(onePerson.preferred_name)
array1.append(onePerson.local_name)
array1.append(pr_age_group_opts.get(onePerson.age_group, t('None')))
array1.append(pr_gender_opts.get(onePerson.gender, t('None')))
array1.append(str(onePerson.date_of_birth))
array1.append(pr_nations.get(onePerson.nationality, t('None')))
array1.append(pr_nations.get(onePerson.country, t('None')))
array1.append(pr_religion_opts.get(onePerson.religion, t('None')))
array1.append(pr_marital_status_opts.get(onePerson.marital_status, t('None')))
array1.append(onePerson.occupation)
if onePerson.tags != None:
tagname = []
for item in onePerson.tags:
tagname.append(pr_impact_tags.get(item, t('None')))
array1.append(tagname)
else:
array1.append(onePerson.tags)
array1.append(onePerson.comments)
i = i + 1
j = 0
for another_person in records:
soundex2 = soundex(anotherPerson.first_name)
if j >= i:
array2 = []
array2.append(anotherPerson.pe_label)
array2.append(str(anotherPerson.missing))
array2.append(anotherPerson.first_name)
array2.append(anotherPerson.middle_name)
array2.append(anotherPerson.last_name)
array2.append(anotherPerson.preferred_name)
array2.append(anotherPerson.local_name)
array2.append(pr_age_group_opts.get(anotherPerson.age_group, t('None')))
array2.append(pr_gender_opts.get(anotherPerson.gender, t('None')))
array2.append(str(anotherPerson.date_of_birth))
array2.append(pr_nations.get(anotherPerson.nationality, t('None')))
array2.append(pr_nations.get(anotherPerson.country, t('None')))
array2.append(pr_religion_opts.get(anotherPerson.religion, t('None')))
array2.append(pr_marital_status_opts.get(anotherPerson.marital_status, t('None')))
array2.append(anotherPerson.occupation)
if anotherPerson.tags != None:
tagname = []
for item in anotherPerson.tags:
tagname.append(pr_impact_tags.get(item, t('None')))
array2.append(tagname)
else:
array2.append(anotherPerson.tags)
array2.append(anotherPerson.comments)
if count > end and request.vars.max != 'undefined':
count = int(request.vars.max)
break
if onePerson.id == anotherPerson.id:
continue
else:
mpercent = jaro_winkler_distance_row(array1, array2)
if int(mpercent) > 50 or soundex1 == soundex2:
count = count + 1
item_list.append([onePerson.first_name, anotherPerson.first_name, mpercent, '<a href="../pr/person_resolve?perID1=%i&perID2=%i", class="action-btn">Resolve</a>' % (onePerson.id, anotherPerson.id)])
else:
continue
j = j + 1
item_list = item_list[int(request.vars.iDisplayStart):end]
result = []
result.append({'sEcho': request.vars.sEcho, 'iTotalRecords': count, 'iTotalDisplayRecords': count, 'aaData': item_list})
output = json.dumps(result)
output = output[1:]
output = output[:-1]
return output
else:
items = div(table(table_header, tbody(), _id='list', _class='display'))
return dict(items=items)
def delete_person():
""" To delete references to the old record and replace it with the new one.
@todo: components??? cannot simply be re-linked!
@todo: user accounts?
@todo: super entity not updated!
"""
old = request.vars.old
new = request.vars.new
tables = shn_table_links('pr_person')
for table in tables:
for count in range(len(tables[table])):
field = tables[str(db[table])][count]
query = db[table][field] == old
db(query).update(**{field: new})
db(db.pr_person.id == old).update(deleted=True)
return 'Other Record Deleted, Linked Records Updated Successfully'
def person_resolve():
""" This opens a popup screen where the de-duplication process takes place.
@todo: components??? cannot simply re-link!
@todo: user accounts linked to these records?
@todo: update the super entity!
@todo: use S3Resources, implement this as a method handler
"""
per_id1 = request.vars.perID1
per_id2 = request.vars.perID2
persons = db.pr_person
count = 0
for field in persons:
id1 = str(count) + 'Right'
id2 = str(count) + 'Left'
count = count + 1
field.comment = div(table(tr(td(input(_type='button', _id=id1, _class='rightArrows', _value='-->')), td(input(_type='button', _id=id2, _class='leftArrows', _value='<--')))))
record = persons[perID1]
my_url = url(r=request, c='pr', f='person')
form1 = sqlform(persons, record, _id='form1', _action='%s/%s' % (myUrl, perID1))
for field in persons:
field.comment = None
record = persons[perID2]
form2 = sqlform(persons, record, _id='form2', _action='%s/%s' % (myUrl, perID2))
return dict(form1=form1, form2=form2, perID1=perID1, perID2=perID2)
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
INITIAL_DATA_TO_COMPLETE = [
'valid_0',
'valid_1',
'valid_10',
'valid_100',
'valid_101',
'valid_102',
'valid_103',
'valid_105',
'valid_106',
'valid_107',
'valid_108',
'valid_109',
'valid_11',
'valid_110',
'valid_111',
'valid_112',
'valid_115',
'valid_116',
'valid_117',
'valid_119',
'valid_12',
'valid_120',
'valid_121',
'valid_122',
'valid_123',
'valid_124',
'valid_125',
'valid_13',
'valid_130',
'valid_131',
'valid_133',
'valid_136',
'valid_138',
'valid_139',
'valid_14',
'valid_140',
'valid_141',
'valid_143',
'valid_144',
'valid_145',
'valid_146',
'valid_147',
'valid_148',
'valid_15',
'valid_152',
'valid_153',
'valid_154',
'valid_155',
'valid_156',
'valid_158',
'valid_160',
'valid_162',
'valid_163',
'valid_166',
'valid_169',
'valid_17',
'valid_171',
'valid_172',
'valid_174',
'valid_175',
'valid_176',
'valid_177',
'valid_178',
'valid_18',
'valid_181',
'valid_182',
'valid_184',
'valid_187',
'valid_19',
'valid_190',
'valid_191',
'valid_192',
'valid_193',
'valid_194',
'valid_196',
'valid_2',
'valid_20',
'valid_202',
'valid_203',
'valid_205',
'valid_206',
'valid_207',
'valid_208',
'valid_212',
'valid_214',
'valid_215',
'valid_216',
'valid_217',
'valid_219',
'valid_223',
'valid_225',
'valid_227',
'valid_228',
'valid_23',
'valid_230',
'valid_231',
'valid_232',
'valid_233',
'valid_234',
'valid_236',
]
COMMON_CONFIG = {
'task': 'msc:SessionBaseMsc',
'num_examples': -1,
'label_speaker_id': 'their',
'session_id': 4,
'datatype': 'valid',
}
MODEL_OPT = {
'BST90M': {
'previous_persona_type': 'none',
'num_previous_sessions_msg': 10,
'include_time_gap': False,
}
}
UI_OPT = {'BST90M': {'previous_persona_type': 'both', 'include_time_gap': False}}
|
initial_data_to_complete = ['valid_0', 'valid_1', 'valid_10', 'valid_100', 'valid_101', 'valid_102', 'valid_103', 'valid_105', 'valid_106', 'valid_107', 'valid_108', 'valid_109', 'valid_11', 'valid_110', 'valid_111', 'valid_112', 'valid_115', 'valid_116', 'valid_117', 'valid_119', 'valid_12', 'valid_120', 'valid_121', 'valid_122', 'valid_123', 'valid_124', 'valid_125', 'valid_13', 'valid_130', 'valid_131', 'valid_133', 'valid_136', 'valid_138', 'valid_139', 'valid_14', 'valid_140', 'valid_141', 'valid_143', 'valid_144', 'valid_145', 'valid_146', 'valid_147', 'valid_148', 'valid_15', 'valid_152', 'valid_153', 'valid_154', 'valid_155', 'valid_156', 'valid_158', 'valid_160', 'valid_162', 'valid_163', 'valid_166', 'valid_169', 'valid_17', 'valid_171', 'valid_172', 'valid_174', 'valid_175', 'valid_176', 'valid_177', 'valid_178', 'valid_18', 'valid_181', 'valid_182', 'valid_184', 'valid_187', 'valid_19', 'valid_190', 'valid_191', 'valid_192', 'valid_193', 'valid_194', 'valid_196', 'valid_2', 'valid_20', 'valid_202', 'valid_203', 'valid_205', 'valid_206', 'valid_207', 'valid_208', 'valid_212', 'valid_214', 'valid_215', 'valid_216', 'valid_217', 'valid_219', 'valid_223', 'valid_225', 'valid_227', 'valid_228', 'valid_23', 'valid_230', 'valid_231', 'valid_232', 'valid_233', 'valid_234', 'valid_236']
common_config = {'task': 'msc:SessionBaseMsc', 'num_examples': -1, 'label_speaker_id': 'their', 'session_id': 4, 'datatype': 'valid'}
model_opt = {'BST90M': {'previous_persona_type': 'none', 'num_previous_sessions_msg': 10, 'include_time_gap': False}}
ui_opt = {'BST90M': {'previous_persona_type': 'both', 'include_time_gap': False}}
|
def inicio():
print ("--PRINCIPAL--")
print("1. AGREGAR")
print("2. ELIMINAR")
print("3. VER")
opc = input ("------> ")
return opc
|
def inicio():
print('--PRINCIPAL--')
print('1. AGREGAR')
print('2. ELIMINAR')
print('3. VER')
opc = input('------> ')
return opc
|
n, m = map(int, input().strip().split())
matrix = [list(map(int, input().strip().split())) for _ in range(n)]
k = int(input().strip())
for lst in sorted(matrix, key=lambda l: l[k]):
print(*lst)
|
(n, m) = map(int, input().strip().split())
matrix = [list(map(int, input().strip().split())) for _ in range(n)]
k = int(input().strip())
for lst in sorted(matrix, key=lambda l: l[k]):
print(*lst)
|
Nsweeps = 100
size = 32
for beta in [0.1, 0.8, 1.6]:
g = Grid(size, beta)
m = g.do_sweeps(0, Nsweeps)
grid = g.cells
mag = g.magnetisation(grid)
e_plus = np.zeros((size, size))
e_minus = np.zeros((size, size))
for i in np.arange(size):
for j in np.arange(size):
e_plus[i,j], e_minus[i,j] = g.energy(i, j, beta, grid)
if not os.path.exists(filename):
filename = 'test_data_beta_%0.1f_2.pickle'%beta
f = open(filename, 'wb')
pickle.dump((grid, mag, e_plus, e_minus, beta), f)
f.close()
if not os.path.exists(filename):
filename = 'test_data_beta_%0.1f_grid_only_2.pickle'%beta
f = open(filename, 'wb')
pickle.dump((grid, beta), f)
f.close()
|
nsweeps = 100
size = 32
for beta in [0.1, 0.8, 1.6]:
g = grid(size, beta)
m = g.do_sweeps(0, Nsweeps)
grid = g.cells
mag = g.magnetisation(grid)
e_plus = np.zeros((size, size))
e_minus = np.zeros((size, size))
for i in np.arange(size):
for j in np.arange(size):
(e_plus[i, j], e_minus[i, j]) = g.energy(i, j, beta, grid)
if not os.path.exists(filename):
filename = 'test_data_beta_%0.1f_2.pickle' % beta
f = open(filename, 'wb')
pickle.dump((grid, mag, e_plus, e_minus, beta), f)
f.close()
if not os.path.exists(filename):
filename = 'test_data_beta_%0.1f_grid_only_2.pickle' % beta
f = open(filename, 'wb')
pickle.dump((grid, beta), f)
f.close()
|
class Listing:
def extrem(self, nvar1="", nvar2="", ninc="", **kwargs):
"""Lists the extreme values for variables.
APDL Command: EXTREM
Parameters
----------
nvar1, nvar2, ninc
List extremes for variables NVAR1 through NVAR2 in steps of NINC.
Variable range defaults to its maximum. NINC defaults to 1.
Notes
-----
Lists the extreme values (and the corresponding times) for stored and
calculated variables. Extremes for stored variables are automatically
listed as they are stored. Only the real part of a complex number is
used. Extreme values may also be assigned to parameters [*GET].
"""
command = f"EXTREM,{nvar1},{nvar2},{ninc}"
return self.run(command, **kwargs)
def lines(self, n="", **kwargs):
"""Specifies the length of a printed page.
APDL Command: LINES
Parameters
----------
n
Number of lines per page (defaults to 20). (Minimum allowed = 11).
Notes
-----
Specifies the length of a printed page (for use in reports, etc.).
"""
command = f"LINES,{n}"
return self.run(command, **kwargs)
def nprint(self, n="", **kwargs):
"""Defines which time points stored are to be listed.
APDL Command: NPRINT
Parameters
----------
n
List data associated with every N time (or frequency) point(s),
beginning with the first point stored (defaults to 1).
Notes
-----
Defines which time (or frequency) points within the range stored are to
be listed.
"""
command = f"NPRINT,{n}"
return self.run(command, **kwargs)
def prcplx(self, key="", **kwargs):
"""Defines the output form for complex variables.
APDL Command: PRCPLX
Parameters
----------
key
Output form key:
0 - Real and imaginary parts.
1 - Amplitude and phase angle. Stored real and imaginary data are converted to
amplitude and phase angle upon output. Data remain stored as
real and imaginary parts.
Notes
-----
Defines the output form for complex variables. Used only with harmonic
analyses (ANTYPE,HARMIC).
All results data are stored in the form of real and imaginary
components and converted to amplitude and/or phase angle as specified
via the PRCPLX command. The conversion is not valid for derived
results (such as principal stress/strain, equivalent stress/strain and
USUM).
"""
command = f"PRCPLX,{key}"
return self.run(command, **kwargs)
def prtime(self, tmin="", tmax="", **kwargs):
"""Defines the time range for which data are to be listed.
APDL Command: PRTIME
Parameters
----------
tmin
Minimum time (defaults to the first point stored).
tmax
Maximum time (defaults to the last point stored).
Notes
-----
Defines the time (or frequency) range (within the range stored) for
which data are to be listed.
"""
command = f"PRTIME,{tmin},{tmax}"
return self.run(command, **kwargs)
def prvar(self, nvar1="", nvar2="", nvar3="", nvar4="", nvar5="", nvar6="",
**kwargs):
"""Lists variables vs. time (or frequency).
APDL Command: PRVAR
Parameters
----------
nvar1, nvar2, nvar3, . . . , nvar6
Variables to be displayed, defined either by the reference number
or a unique thirty-two character name. If duplicate names are used
the command will print the data for the lowest-numbered variable
with that name.
Notes
-----
Lists variables vs. time (or frequency). Up to six variables may be
listed across the line. Time column output format can be changed using
the /FORMAT command arguments Ftype, NWIDTH, and DSIGNF.
"""
command = f"PRVAR,{nvar1},{nvar2},{nvar3},{nvar4},{nvar5},{nvar6}"
return self.run(command, **kwargs)
|
class Listing:
def extrem(self, nvar1='', nvar2='', ninc='', **kwargs):
"""Lists the extreme values for variables.
APDL Command: EXTREM
Parameters
----------
nvar1, nvar2, ninc
List extremes for variables NVAR1 through NVAR2 in steps of NINC.
Variable range defaults to its maximum. NINC defaults to 1.
Notes
-----
Lists the extreme values (and the corresponding times) for stored and
calculated variables. Extremes for stored variables are automatically
listed as they are stored. Only the real part of a complex number is
used. Extreme values may also be assigned to parameters [*GET].
"""
command = f'EXTREM,{nvar1},{nvar2},{ninc}'
return self.run(command, **kwargs)
def lines(self, n='', **kwargs):
"""Specifies the length of a printed page.
APDL Command: LINES
Parameters
----------
n
Number of lines per page (defaults to 20). (Minimum allowed = 11).
Notes
-----
Specifies the length of a printed page (for use in reports, etc.).
"""
command = f'LINES,{n}'
return self.run(command, **kwargs)
def nprint(self, n='', **kwargs):
"""Defines which time points stored are to be listed.
APDL Command: NPRINT
Parameters
----------
n
List data associated with every N time (or frequency) point(s),
beginning with the first point stored (defaults to 1).
Notes
-----
Defines which time (or frequency) points within the range stored are to
be listed.
"""
command = f'NPRINT,{n}'
return self.run(command, **kwargs)
def prcplx(self, key='', **kwargs):
"""Defines the output form for complex variables.
APDL Command: PRCPLX
Parameters
----------
key
Output form key:
0 - Real and imaginary parts.
1 - Amplitude and phase angle. Stored real and imaginary data are converted to
amplitude and phase angle upon output. Data remain stored as
real and imaginary parts.
Notes
-----
Defines the output form for complex variables. Used only with harmonic
analyses (ANTYPE,HARMIC).
All results data are stored in the form of real and imaginary
components and converted to amplitude and/or phase angle as specified
via the PRCPLX command. The conversion is not valid for derived
results (such as principal stress/strain, equivalent stress/strain and
USUM).
"""
command = f'PRCPLX,{key}'
return self.run(command, **kwargs)
def prtime(self, tmin='', tmax='', **kwargs):
"""Defines the time range for which data are to be listed.
APDL Command: PRTIME
Parameters
----------
tmin
Minimum time (defaults to the first point stored).
tmax
Maximum time (defaults to the last point stored).
Notes
-----
Defines the time (or frequency) range (within the range stored) for
which data are to be listed.
"""
command = f'PRTIME,{tmin},{tmax}'
return self.run(command, **kwargs)
def prvar(self, nvar1='', nvar2='', nvar3='', nvar4='', nvar5='', nvar6='', **kwargs):
"""Lists variables vs. time (or frequency).
APDL Command: PRVAR
Parameters
----------
nvar1, nvar2, nvar3, . . . , nvar6
Variables to be displayed, defined either by the reference number
or a unique thirty-two character name. If duplicate names are used
the command will print the data for the lowest-numbered variable
with that name.
Notes
-----
Lists variables vs. time (or frequency). Up to six variables may be
listed across the line. Time column output format can be changed using
the /FORMAT command arguments Ftype, NWIDTH, and DSIGNF.
"""
command = f'PRVAR,{nvar1},{nvar2},{nvar3},{nvar4},{nvar5},{nvar6}'
return self.run(command, **kwargs)
|
load("@rules_maven_third_party//:import_external.bzl", import_external = "import_external")
def dependencies():
import_external(
name = "org_apache_maven_resolver_maven_resolver_api",
artifact = "org.apache.maven.resolver:maven-resolver-api:1.4.0",
artifact_sha256 = "85aac254240e8bf387d737acf5fcd18f07163ae55a0223b107c7e2af1dfdc6e6",
srcjar_sha256 = "be7f42679a5485fbe30c475afa05c12dd9a2beb83bbcebbb3d2e79eb8aeff9c4",
)
import_external(
name = "org_apache_maven_resolver_maven_resolver_connector_basic",
artifact = "org.apache.maven.resolver:maven-resolver-connector-basic:1.4.0",
artifact_sha256 = "4283db771d9265136615637bd22d02929cfd548c8d351f76ecb88a3006b5faf7",
srcjar_sha256 = "556163b53b1f98df263adf1d26b269cd45316a827f169e0ede514ca5fca0c5d1",
deps = [
"@org_apache_maven_resolver_maven_resolver_api",
"@org_apache_maven_resolver_maven_resolver_spi",
"@org_apache_maven_resolver_maven_resolver_util",
"@org_slf4j_slf4j_api",
],
)
import_external(
name = "org_apache_maven_resolver_maven_resolver_impl",
artifact = "org.apache.maven.resolver:maven-resolver-impl:1.4.0",
artifact_sha256 = "004662079feeed66251480ad76fedbcabff96ee53db29c59f6aa564647c5bfe6",
srcjar_sha256 = "b544f134261f813b1a44ffcc97590236d3d6e2519722d55dea395a96fef18206",
deps = [
"@org_apache_maven_resolver_maven_resolver_api",
"@org_apache_maven_resolver_maven_resolver_spi",
"@org_apache_maven_resolver_maven_resolver_util",
"@org_slf4j_slf4j_api",
],
)
import_external(
name = "org_apache_maven_resolver_maven_resolver_spi",
artifact = "org.apache.maven.resolver:maven-resolver-spi:1.4.0",
artifact_sha256 = "8a2985eb28135eae4c40db446081b1533c1813c251bb370756777697e0b7114e",
srcjar_sha256 = "89099a02006b6ce46096d89f021675bf000e96300bcdc0ff439a86d6e322c761",
deps = [
"@org_apache_maven_resolver_maven_resolver_api",
],
)
import_external(
name = "org_apache_maven_resolver_maven_resolver_transport_file",
artifact = "org.apache.maven.resolver:maven-resolver-transport-file:1.4.0",
artifact_sha256 = "94eb9bcc073ac1591002b26a4cf558324b12d8f76b6d5628151d7f87733436f6",
srcjar_sha256 = "17abd750063fa74cbf754e803ba27ca0216b0bebc8e45e1872cd9ed5a1e5e719",
deps = [
"@org_apache_maven_resolver_maven_resolver_api",
"@org_apache_maven_resolver_maven_resolver_spi",
"@org_slf4j_slf4j_api",
],
)
import_external(
name = "org_apache_maven_resolver_maven_resolver_transport_http",
artifact = "org.apache.maven.resolver:maven-resolver-transport-http:1.4.0",
artifact_sha256 = "8dddd83ec6244bde5ef63ae679a0ce5d7e8fc566369d7391c8814206e2a7114f",
srcjar_sha256 = "5af0150a1ab714b164763d1daca4b8fdd1ab6dd445ec3c57e7ec916ccbdf7e4e",
deps = [
"@org_apache_httpcomponents_httpclient",
"@org_apache_httpcomponents_httpcore",
"@org_apache_maven_resolver_maven_resolver_api",
"@org_apache_maven_resolver_maven_resolver_spi",
"@org_apache_maven_resolver_maven_resolver_util",
"@org_slf4j_jcl_over_slf4j",
"@org_slf4j_slf4j_api",
],
)
import_external(
name = "org_apache_maven_resolver_maven_resolver_util",
artifact = "org.apache.maven.resolver:maven-resolver-util:1.4.0",
artifact_sha256 = "e83b6c2de4b8b8d99d3c226f5e447f70df808834824336c360aa615fc4d7beac",
srcjar_sha256 = "74dd3696e2df175db39b944079f7b49941e39e57f98e469f942635a2ba1cae57",
deps = [
"@org_apache_maven_resolver_maven_resolver_api",
],
)
|
load('@rules_maven_third_party//:import_external.bzl', import_external='import_external')
def dependencies():
import_external(name='org_apache_maven_resolver_maven_resolver_api', artifact='org.apache.maven.resolver:maven-resolver-api:1.4.0', artifact_sha256='85aac254240e8bf387d737acf5fcd18f07163ae55a0223b107c7e2af1dfdc6e6', srcjar_sha256='be7f42679a5485fbe30c475afa05c12dd9a2beb83bbcebbb3d2e79eb8aeff9c4')
import_external(name='org_apache_maven_resolver_maven_resolver_connector_basic', artifact='org.apache.maven.resolver:maven-resolver-connector-basic:1.4.0', artifact_sha256='4283db771d9265136615637bd22d02929cfd548c8d351f76ecb88a3006b5faf7', srcjar_sha256='556163b53b1f98df263adf1d26b269cd45316a827f169e0ede514ca5fca0c5d1', deps=['@org_apache_maven_resolver_maven_resolver_api', '@org_apache_maven_resolver_maven_resolver_spi', '@org_apache_maven_resolver_maven_resolver_util', '@org_slf4j_slf4j_api'])
import_external(name='org_apache_maven_resolver_maven_resolver_impl', artifact='org.apache.maven.resolver:maven-resolver-impl:1.4.0', artifact_sha256='004662079feeed66251480ad76fedbcabff96ee53db29c59f6aa564647c5bfe6', srcjar_sha256='b544f134261f813b1a44ffcc97590236d3d6e2519722d55dea395a96fef18206', deps=['@org_apache_maven_resolver_maven_resolver_api', '@org_apache_maven_resolver_maven_resolver_spi', '@org_apache_maven_resolver_maven_resolver_util', '@org_slf4j_slf4j_api'])
import_external(name='org_apache_maven_resolver_maven_resolver_spi', artifact='org.apache.maven.resolver:maven-resolver-spi:1.4.0', artifact_sha256='8a2985eb28135eae4c40db446081b1533c1813c251bb370756777697e0b7114e', srcjar_sha256='89099a02006b6ce46096d89f021675bf000e96300bcdc0ff439a86d6e322c761', deps=['@org_apache_maven_resolver_maven_resolver_api'])
import_external(name='org_apache_maven_resolver_maven_resolver_transport_file', artifact='org.apache.maven.resolver:maven-resolver-transport-file:1.4.0', artifact_sha256='94eb9bcc073ac1591002b26a4cf558324b12d8f76b6d5628151d7f87733436f6', srcjar_sha256='17abd750063fa74cbf754e803ba27ca0216b0bebc8e45e1872cd9ed5a1e5e719', deps=['@org_apache_maven_resolver_maven_resolver_api', '@org_apache_maven_resolver_maven_resolver_spi', '@org_slf4j_slf4j_api'])
import_external(name='org_apache_maven_resolver_maven_resolver_transport_http', artifact='org.apache.maven.resolver:maven-resolver-transport-http:1.4.0', artifact_sha256='8dddd83ec6244bde5ef63ae679a0ce5d7e8fc566369d7391c8814206e2a7114f', srcjar_sha256='5af0150a1ab714b164763d1daca4b8fdd1ab6dd445ec3c57e7ec916ccbdf7e4e', deps=['@org_apache_httpcomponents_httpclient', '@org_apache_httpcomponents_httpcore', '@org_apache_maven_resolver_maven_resolver_api', '@org_apache_maven_resolver_maven_resolver_spi', '@org_apache_maven_resolver_maven_resolver_util', '@org_slf4j_jcl_over_slf4j', '@org_slf4j_slf4j_api'])
import_external(name='org_apache_maven_resolver_maven_resolver_util', artifact='org.apache.maven.resolver:maven-resolver-util:1.4.0', artifact_sha256='e83b6c2de4b8b8d99d3c226f5e447f70df808834824336c360aa615fc4d7beac', srcjar_sha256='74dd3696e2df175db39b944079f7b49941e39e57f98e469f942635a2ba1cae57', deps=['@org_apache_maven_resolver_maven_resolver_api'])
|
# Generic uwsgi_param headers
CONTENT_LENGTH = 'CONTENT_LENGTH'
CONTENT_TYPE = 'CONTENT_TYPE'
DOCUMENT_ROOT = 'DOCUMENT_ROOT'
QUERY_STRING = 'QUERY_STRING'
PATH_INFO = 'PATH_INFO'
REMOTE_ADDR = 'REMOTE_ADDR'
REMOTE_PORT = 'REMOTE_PORT'
REQUEST_METHOD = 'REQUEST_METHOD'
REQUEST_URI = 'REQUEST_URI'
SERVER_ADDR = 'SERVER_ADDR'
SERVER_NAME = 'SERVER_NAME'
SERVER_PORT = 'SERVER_PORT'
SERVER_PROTOCOL = 'SERVER_PROTOCOL'
# SSL uwsgi_param headers
CLIENT_SSL_CERT = 'CLIENT_SSL_CERT'
|
content_length = 'CONTENT_LENGTH'
content_type = 'CONTENT_TYPE'
document_root = 'DOCUMENT_ROOT'
query_string = 'QUERY_STRING'
path_info = 'PATH_INFO'
remote_addr = 'REMOTE_ADDR'
remote_port = 'REMOTE_PORT'
request_method = 'REQUEST_METHOD'
request_uri = 'REQUEST_URI'
server_addr = 'SERVER_ADDR'
server_name = 'SERVER_NAME'
server_port = 'SERVER_PORT'
server_protocol = 'SERVER_PROTOCOL'
client_ssl_cert = 'CLIENT_SSL_CERT'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.