content stringlengths 7 1.05M | fixed_cases stringlengths 1 1.28M |
|---|---|
class Solution:
# my solution
def removeDuplicates(self, nums: List[int]) -> int:
i = 1
# while i < len(nums):
for j in range(len(nums)-1):
if nums[j] != nums[j+1]:
nums[i] = nums[j+1]
i += 1
return i | class Solution:
def remove_duplicates(self, nums: List[int]) -> int:
i = 1
for j in range(len(nums) - 1):
if nums[j] != nums[j + 1]:
nums[i] = nums[j + 1]
i += 1
return i |
load("@io_bazel_rules_docker//container:providers.bzl", "PushInfo")
def _run_deployment_impl(ctx):
registry = ctx.attr.image_push[PushInfo].registry
repository = ctx.attr.image_push[PushInfo].repository
image = "%s/%s" % (registry, repository)
deploy_deps_string = "\n".join([
label[DefaultInfo].files_to_run.executable.short_path + " " + name
for label, name in ctx.attr.deps.items()
])
env_vars_string = ",".join(["%s=%s" % (k, v) for k, v in ctx.attr.env.items()])
service_name_prefix = ctx.label.name.replace("_", "-")
if service_name_prefix.startswith("deploy-"):
service_name_prefix = service_name_prefix[len("deploy-"):][0:30]
ctx.actions.expand_template(
template = ctx.file._deploy_template,
output = ctx.outputs.deploy_script,
substitutions = {
"{DEPLOY_BIN}": ctx.attr._deploy[DefaultInfo].files_to_run.executable.short_path,
"{DEPLOY_DEPS}": deploy_deps_string,
"{PUSH_IMAGE}": ctx.attr.image_push[DefaultInfo].files_to_run.executable.short_path,
"{IMAGE}": image,
"{ENV_VARS}": env_vars_string,
"{CONCURRENCY}": str(ctx.attr.concurrency),
"{REGION}": ctx.attr.region,
"{MEMORY}": ctx.attr.memory,
"{PROJECT_ID}": ctx.attr.project_id,
"{SERVICE_NAME_PREFIX}": service_name_prefix,
"{GENERATE_UNIQUE_NAME}": str(ctx.attr.generate_unique_name),
},
is_executable = False,
)
runfiles = ctx.runfiles(
files = [
ctx.attr.image_push[PushInfo].digest,
] + ctx.attr.image_push[DefaultInfo].files.to_list(),
)
runfiles = runfiles.merge(ctx.attr.image_push[DefaultInfo].default_runfiles)
runfiles = runfiles.merge(ctx.attr._deploy[DefaultInfo].default_runfiles)
for label in ctx.attr.deps:
runfiles = runfiles.merge(label[DefaultInfo].default_runfiles)
return [
DefaultInfo(
executable = ctx.outputs.deploy_script,
runfiles = runfiles,
),
]
run_deployment = rule(
implementation = _run_deployment_impl,
attrs = {
"env": attr.string_dict(),
"image_push": attr.label(
mandatory = True,
providers = [PushInfo, DefaultInfo],
),
"concurrency": attr.int(
default = 80,
),
"memory": attr.string(
default = "512Mi",
),
"region": attr.string(
default = "europe-west1",
),
"project_id": attr.string(
mandatory = True,
),
"deps": attr.label_keyed_string_dict(),
"generate_unique_name": attr.bool(
default = True,
),
"_deploy_template": attr.label(
default = Label("//run:deploy.sh"),
allow_single_file = True,
),
"_deploy": attr.label(
default = Label("//run/src/main/java/com/github/iljakroonen/rules/run:Deploy"),
),
},
outputs = {
"deploy_script": "%{name}.sh",
},
executable = True,
)
| load('@io_bazel_rules_docker//container:providers.bzl', 'PushInfo')
def _run_deployment_impl(ctx):
registry = ctx.attr.image_push[PushInfo].registry
repository = ctx.attr.image_push[PushInfo].repository
image = '%s/%s' % (registry, repository)
deploy_deps_string = '\n'.join([label[DefaultInfo].files_to_run.executable.short_path + ' ' + name for (label, name) in ctx.attr.deps.items()])
env_vars_string = ','.join(['%s=%s' % (k, v) for (k, v) in ctx.attr.env.items()])
service_name_prefix = ctx.label.name.replace('_', '-')
if service_name_prefix.startswith('deploy-'):
service_name_prefix = service_name_prefix[len('deploy-'):][0:30]
ctx.actions.expand_template(template=ctx.file._deploy_template, output=ctx.outputs.deploy_script, substitutions={'{DEPLOY_BIN}': ctx.attr._deploy[DefaultInfo].files_to_run.executable.short_path, '{DEPLOY_DEPS}': deploy_deps_string, '{PUSH_IMAGE}': ctx.attr.image_push[DefaultInfo].files_to_run.executable.short_path, '{IMAGE}': image, '{ENV_VARS}': env_vars_string, '{CONCURRENCY}': str(ctx.attr.concurrency), '{REGION}': ctx.attr.region, '{MEMORY}': ctx.attr.memory, '{PROJECT_ID}': ctx.attr.project_id, '{SERVICE_NAME_PREFIX}': service_name_prefix, '{GENERATE_UNIQUE_NAME}': str(ctx.attr.generate_unique_name)}, is_executable=False)
runfiles = ctx.runfiles(files=[ctx.attr.image_push[PushInfo].digest] + ctx.attr.image_push[DefaultInfo].files.to_list())
runfiles = runfiles.merge(ctx.attr.image_push[DefaultInfo].default_runfiles)
runfiles = runfiles.merge(ctx.attr._deploy[DefaultInfo].default_runfiles)
for label in ctx.attr.deps:
runfiles = runfiles.merge(label[DefaultInfo].default_runfiles)
return [default_info(executable=ctx.outputs.deploy_script, runfiles=runfiles)]
run_deployment = rule(implementation=_run_deployment_impl, attrs={'env': attr.string_dict(), 'image_push': attr.label(mandatory=True, providers=[PushInfo, DefaultInfo]), 'concurrency': attr.int(default=80), 'memory': attr.string(default='512Mi'), 'region': attr.string(default='europe-west1'), 'project_id': attr.string(mandatory=True), 'deps': attr.label_keyed_string_dict(), 'generate_unique_name': attr.bool(default=True), '_deploy_template': attr.label(default=label('//run:deploy.sh'), allow_single_file=True), '_deploy': attr.label(default=label('//run/src/main/java/com/github/iljakroonen/rules/run:Deploy'))}, outputs={'deploy_script': '%{name}.sh'}, executable=True) |
"""
SPIFlash.py
Contains code needed to emulate SPI flash memory.
Copyright (c) 2018, wchill
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the organization nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
class SPIFlash(object):
def __init__(self, size=0x80000, filename=None, data=None):
if filename is not None:
with open(filename, 'rb') as f:
self._mem = list(f.read())
elif data is not None:
self._mem = list(data)
else:
self._mem = [0xff] * size
def __repr__(self):
return 'SPIFlash(data={})'.format(bytes(self._mem))
def __str__(self):
return 'SPI flash memory ({} bytes)'.format(len(self._mem))
def __len__(self):
return len(self._mem)
def __setitem__(self, key, value):
if not isinstance(key, int):
raise KeyError('Key must be an integer')
if isinstance(value, int):
self.write(key, [value])
elif isinstance(value, bytes):
self.write(key, list(value))
elif isinstance(value, list):
self.write(key, value)
else:
raise ValueError('Value must be an int, bytes or list')
def __getitem__(self, key):
if isinstance(key, int):
return self.read(key, 1)
elif isinstance(key, slice):
return self._mem[key]
raise KeyError('Key must be an int or slice')
def __iter__(self):
return iter(self._mem)
def load(self, filename):
with open(filename, 'rb') as f:
self.write(0, list(f.read()))
def save(self, filename):
with open(filename, 'wb') as f:
f.write(self.read(0, len(self._mem)))
def read(self, address, length):
if address < 0 or address >= len(self._mem):
raise ValueError(
'Invalid address {:04x} - valid range is 0 to {:04x}'
.format(address, len(self._mem) - 1))
end_addr = address + length
if end_addr <= 0 or end_addr > len(self._mem):
raise ValueError(
'Invalid end address {:04x} - valid range is 1 to {:04x}'
.format(end_addr, len(self._mem)))
return bytes(self._mem[address:end_addr])
def write(self, address, data):
if address < 0 or address >= len(self._mem):
raise ValueError(
'Invalid address {:04x} - valid range is 0 to {:04x}'
.format(address, len(self._mem) - 1))
end_addr = address + len(data)
if end_addr <= 0 or end_addr > len(self._mem):
raise ValueError(
'Invalid end address {:04x} - valid range is 1 to {:04x}'
.format(end_addr, len(self._mem)))
self._mem = self._mem[:address] + data + self._mem[end_addr:]
def erase(self, address):
if address & 4095:
raise ValueError(
'Invalid erase address {:04x} - value must be 4kb aligned'
.format(address))
for i in range(address, address+4096):
self._mem[i] = 0xff
| """
SPIFlash.py
Contains code needed to emulate SPI flash memory.
Copyright (c) 2018, wchill
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the organization nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
class Spiflash(object):
def __init__(self, size=524288, filename=None, data=None):
if filename is not None:
with open(filename, 'rb') as f:
self._mem = list(f.read())
elif data is not None:
self._mem = list(data)
else:
self._mem = [255] * size
def __repr__(self):
return 'SPIFlash(data={})'.format(bytes(self._mem))
def __str__(self):
return 'SPI flash memory ({} bytes)'.format(len(self._mem))
def __len__(self):
return len(self._mem)
def __setitem__(self, key, value):
if not isinstance(key, int):
raise key_error('Key must be an integer')
if isinstance(value, int):
self.write(key, [value])
elif isinstance(value, bytes):
self.write(key, list(value))
elif isinstance(value, list):
self.write(key, value)
else:
raise value_error('Value must be an int, bytes or list')
def __getitem__(self, key):
if isinstance(key, int):
return self.read(key, 1)
elif isinstance(key, slice):
return self._mem[key]
raise key_error('Key must be an int or slice')
def __iter__(self):
return iter(self._mem)
def load(self, filename):
with open(filename, 'rb') as f:
self.write(0, list(f.read()))
def save(self, filename):
with open(filename, 'wb') as f:
f.write(self.read(0, len(self._mem)))
def read(self, address, length):
if address < 0 or address >= len(self._mem):
raise value_error('Invalid address {:04x} - valid range is 0 to {:04x}'.format(address, len(self._mem) - 1))
end_addr = address + length
if end_addr <= 0 or end_addr > len(self._mem):
raise value_error('Invalid end address {:04x} - valid range is 1 to {:04x}'.format(end_addr, len(self._mem)))
return bytes(self._mem[address:end_addr])
def write(self, address, data):
if address < 0 or address >= len(self._mem):
raise value_error('Invalid address {:04x} - valid range is 0 to {:04x}'.format(address, len(self._mem) - 1))
end_addr = address + len(data)
if end_addr <= 0 or end_addr > len(self._mem):
raise value_error('Invalid end address {:04x} - valid range is 1 to {:04x}'.format(end_addr, len(self._mem)))
self._mem = self._mem[:address] + data + self._mem[end_addr:]
def erase(self, address):
if address & 4095:
raise value_error('Invalid erase address {:04x} - value must be 4kb aligned'.format(address))
for i in range(address, address + 4096):
self._mem[i] = 255 |
class Solution:
def trap(self, height: List[int]) -> int:
if not height: return 0
s = []
result = 0
for i in range(len(height)):
while s and height[i] > height[s[-1]]:
top = s.pop()
if not s: break
distance = i - s[-1] - 1
bounded_height = min(height[i], height[s[-1]]) - height[top]
result += distance * bounded_height
s.append(i)
return result
| class Solution:
def trap(self, height: List[int]) -> int:
if not height:
return 0
s = []
result = 0
for i in range(len(height)):
while s and height[i] > height[s[-1]]:
top = s.pop()
if not s:
break
distance = i - s[-1] - 1
bounded_height = min(height[i], height[s[-1]]) - height[top]
result += distance * bounded_height
s.append(i)
return result |
"""
Search in a rotated sorted array - Distinct
There is an integer array nums sorted in
ascending order (with distinct values).
Prior to being passed to your function, nums is possibly
rotated at an unknown pivot index k (1 <= k < nums.length)
such that the resulting array is [nums[k], nums[k+1], ...,
nums[n-1], nums[0], nums[1], ..., nums[k-1]] (0-indexed).
For example, [0,1,2,4,5,6,7] might be rotated at pivot
index 3 and become [4,5,6,7,0,1,2].
Given the array nums after the possible rotation and an
integer target, return the index of target if it is in nums,
or -1 if it is not in nums.
You must write an algorithm with O(log n) runtime complexity.
Example 1:
Input: nums = [4,5,6,7,0,1,2], target = 0
Output: 4
Example 2:
Input: nums = [4,5,6,7,0,1,2], target = 3
Output: -1
Example 3:
Input: nums = [1], target = 0
Output: -1
Constraints:
1 <= nums.length <= 5000
-104 <= nums[i] <= 104
All values of nums are unique.
nums is an ascending array that is possibly rotated.
-104 <= target <= 104
"""
# Binary search
"""
Time complexity : O(logN).
Space complexity : O(1).
"""
class Solution:
def search(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
def find_rotate_index(left, right):
if nums[left] < nums[right]:
return 0
while left <= right:
pivot = (left + right) // 2
if nums[pivot] > nums[pivot + 1]:
return pivot + 1
else:
if nums[pivot] < nums[left]:
right = pivot - 1
else:
left = pivot + 1
def search(left, right):
"""
Binary search
"""
while left <= right:
pivot = (left + right) // 2
if nums[pivot] == target:
return pivot
else:
if target < nums[pivot]:
right = pivot - 1
else:
left = pivot + 1
return -1
n = len(nums)
if n == 1:
return 0 if nums[0] == target else -1
rotate_index = find_rotate_index(0, n - 1)
# if target is the smallest element
if nums[rotate_index] == target:
return rotate_index
# if array is not rotated, search in the entire array
if rotate_index == 0:
return search(0, n - 1)
if target < nums[0]:
# search on the right side
return search(rotate_index, n - 1)
# search on the left side
return search(0, rotate_index)
# One-pass Binary Search
"""
Time complexity : O(logN).
Space complexity : O(1).
"""
class Solution:
def search(self, nums: List[int], target: int) -> int:
start, end = 0, len(nums) - 1
while start <= end:
mid = start + (end - start) // 2
if nums[mid] == target:
return mid
elif nums[mid] >= nums[start]:
if target >= nums[start] and target < nums[mid]:
end = mid - 1
else:
start = mid + 1
else:
if target <= nums[end] and target > nums[mid]:
start = mid + 1
else:
end = mid - 1
return -1
| """
Search in a rotated sorted array - Distinct
There is an integer array nums sorted in
ascending order (with distinct values).
Prior to being passed to your function, nums is possibly
rotated at an unknown pivot index k (1 <= k < nums.length)
such that the resulting array is [nums[k], nums[k+1], ...,
nums[n-1], nums[0], nums[1], ..., nums[k-1]] (0-indexed).
For example, [0,1,2,4,5,6,7] might be rotated at pivot
index 3 and become [4,5,6,7,0,1,2].
Given the array nums after the possible rotation and an
integer target, return the index of target if it is in nums,
or -1 if it is not in nums.
You must write an algorithm with O(log n) runtime complexity.
Example 1:
Input: nums = [4,5,6,7,0,1,2], target = 0
Output: 4
Example 2:
Input: nums = [4,5,6,7,0,1,2], target = 3
Output: -1
Example 3:
Input: nums = [1], target = 0
Output: -1
Constraints:
1 <= nums.length <= 5000
-104 <= nums[i] <= 104
All values of nums are unique.
nums is an ascending array that is possibly rotated.
-104 <= target <= 104
"""
'\nTime complexity : O(logN).\nSpace complexity : O(1).\n'
class Solution:
def search(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
def find_rotate_index(left, right):
if nums[left] < nums[right]:
return 0
while left <= right:
pivot = (left + right) // 2
if nums[pivot] > nums[pivot + 1]:
return pivot + 1
elif nums[pivot] < nums[left]:
right = pivot - 1
else:
left = pivot + 1
def search(left, right):
"""
Binary search
"""
while left <= right:
pivot = (left + right) // 2
if nums[pivot] == target:
return pivot
elif target < nums[pivot]:
right = pivot - 1
else:
left = pivot + 1
return -1
n = len(nums)
if n == 1:
return 0 if nums[0] == target else -1
rotate_index = find_rotate_index(0, n - 1)
if nums[rotate_index] == target:
return rotate_index
if rotate_index == 0:
return search(0, n - 1)
if target < nums[0]:
return search(rotate_index, n - 1)
return search(0, rotate_index)
'\nTime complexity : O(logN).\nSpace complexity : O(1).\n'
class Solution:
def search(self, nums: List[int], target: int) -> int:
(start, end) = (0, len(nums) - 1)
while start <= end:
mid = start + (end - start) // 2
if nums[mid] == target:
return mid
elif nums[mid] >= nums[start]:
if target >= nums[start] and target < nums[mid]:
end = mid - 1
else:
start = mid + 1
elif target <= nums[end] and target > nums[mid]:
start = mid + 1
else:
end = mid - 1
return -1 |
def test_i8():
i: i8
i = 5
print(i)
def test_i16():
i: i16
i = 4
print(i)
def test_i32():
i: i32
i = 3
print(i)
def test_i64():
i: i64
i = 2
print(i)
| def test_i8():
i: i8
i = 5
print(i)
def test_i16():
i: i16
i = 4
print(i)
def test_i32():
i: i32
i = 3
print(i)
def test_i64():
i: i64
i = 2
print(i) |
class MyQueue:
def __init__(self):
self.queue = []
self.behind = -1
self.front = -1
def push(self, x: int) -> None:
self.queue.append(x)
self.behind = self.behind + 1
def pop(self) -> int:
popResult = self.queue.pop(0)
self.front = self.front + 1
return popResult
def peek(self) -> int:
peekResult = self.queue[0]
return peekResult
def empty(self) -> bool:
if self.behind == self.front:
return True
return False
# Your MyQueue object will be instantiated and called as such:
# obj = MyQueue()
# obj.push(x)
# param_2 = obj.pop()
# param_3 = obj.peek()
# param_4 = obj.empty() | class Myqueue:
def __init__(self):
self.queue = []
self.behind = -1
self.front = -1
def push(self, x: int) -> None:
self.queue.append(x)
self.behind = self.behind + 1
def pop(self) -> int:
pop_result = self.queue.pop(0)
self.front = self.front + 1
return popResult
def peek(self) -> int:
peek_result = self.queue[0]
return peekResult
def empty(self) -> bool:
if self.behind == self.front:
return True
return False |
def age_assignment(*args, **kwargs):
result = {}
for n in args:
result[n] = kwargs[n[0]]
return result
| def age_assignment(*args, **kwargs):
result = {}
for n in args:
result[n] = kwargs[n[0]]
return result |
hrs = input("Enter Hours:")
h = float(hrs)
rate = input("Enter rate: ")
r = float(rate)
if h <= 40:
pay = h * r
else:
pay = r * 40 + (r * 1.5) * (h - 40)
print(pay)
| hrs = input('Enter Hours:')
h = float(hrs)
rate = input('Enter rate: ')
r = float(rate)
if h <= 40:
pay = h * r
else:
pay = r * 40 + r * 1.5 * (h - 40)
print(pay) |
f = open("/home/gangserver/dev/data/new.txt", 'r')
while True:
line = f.readline()
if not line: break
print(line)
f.close()
print("*" * 30)
f = open("/home/gangserver/dev/data/new.txt", 'r')
lines = f.readlines()
for line in lines:
print(line)
f.close()
print("*" * 30)
f = open("/home/gangserver/dev/data/new.txt", 'r')
data = f.read()
print(data)
f.close()
| f = open('/home/gangserver/dev/data/new.txt', 'r')
while True:
line = f.readline()
if not line:
break
print(line)
f.close()
print('*' * 30)
f = open('/home/gangserver/dev/data/new.txt', 'r')
lines = f.readlines()
for line in lines:
print(line)
f.close()
print('*' * 30)
f = open('/home/gangserver/dev/data/new.txt', 'r')
data = f.read()
print(data)
f.close() |
s = input()
answer = []
for e in s:
if not e in ["a", "i", "u", "e", "o"]:
answer.append(e)
print("".join(answer)) | s = input()
answer = []
for e in s:
if not e in ['a', 'i', 'u', 'e', 'o']:
answer.append(e)
print(''.join(answer)) |
#inheritance
class Employee:
def __init__(self):
self.__id=10
self._name="ABC"
self.salary=100
class Student(Employee):
def Display(self):
#print(self.__id) unavailable to object
print(self._name)
print(self.salary)
ob=Student()
ob.Display()
print(ob._name,ob.salary)#both unavailable to object
| class Employee:
def __init__(self):
self.__id = 10
self._name = 'ABC'
self.salary = 100
class Student(Employee):
def display(self):
print(self._name)
print(self.salary)
ob = student()
ob.Display()
print(ob._name, ob.salary) |
"""
Date: 06/06/2021
Name: Rio Weil
Title: errors.py
Description: Custom errors for project.
"""
"""
Base class for other exceptions
"""
class Error(Exception):
pass
"""
Error raised when initializing board with zero space or bombs
"""
class ZeroException(Error):
pass
"""
Error raised when more bombs than possible spaces on board
"""
class TooManyBombsException(Error):
pass | """
Date: 06/06/2021
Name: Rio Weil
Title: errors.py
Description: Custom errors for project.
"""
'\nBase class for other exceptions\n'
class Error(Exception):
pass
'\nError raised when initializing board with zero space or bombs\n'
class Zeroexception(Error):
pass
'\nError raised when more bombs than possible spaces on board\n'
class Toomanybombsexception(Error):
pass |
#Slicing Strings
"""
b = "hello, word!"
print(b[2:5])
"""
"""
b = "hello, word!"
print(b[2:5])
"""
"""
b = "hello, word!"
print(b[2:])
"""
b = "hello, word!"
print(b[-5:-2]) | """
b = "hello, word!"
print(b[2:5])
"""
'\nb = "hello, word!"\nprint(b[2:5])\n'
'\nb = "hello, word!"\nprint(b[2:])\n'
b = 'hello, word!'
print(b[-5:-2]) |
"""
A function which formats a duration, given as a
number of seconds, in a human-friendly way.
"""
# Created by Egor Kostan.
# GitHub: https://github.com/ikostan
# LinkedIn: https://www.linkedin.com/in/egor-kostan/
def format_duration(seconds: int) -> str:
"""
A function which formats a duration, given as a
number of seconds, in a human-friendly way.
The resulting expression is made of components like 4 seconds,
1 year, etc. In general, a positive integer and one of the
valid units of time, separated by a space. The unit of time
is used in plural if the integer is greater than 1.
The components are separated by a comma and a space (", ").
Except the last component, which is separated by " and ", just
like it would be written in English.
A more significant units of time will occur before than a least
significant one. Therefore, 1 second and 1 year is not correct,
but 1 year and 1 second is.
Different components have different unit of times. So there is
not repeated units like in 5 seconds and 1 second.
A component will not appear at all if its value happens to be zero.
Hence, 1 minute and 0 seconds is not valid, but it should be just 1 minute.
A unit of time must be used "as much as possible". It means that the
function should not return 61 seconds, but 1 minute and 1 second instead.
Formally, the duration specified by of a component must not be greater than
any valid more significant unit of time.
:param seconds:
:return:
"""
if seconds == 0:
return 'now'
result = ''
years = calc_years(seconds)
days = calc_days(seconds)
hours = calc_hours(seconds)
minutes = calc_minutes(seconds)
seconds = calc_seconds(seconds)
if years > 0:
result += '{}'.format(get_string(years, 'year'))
if days > 0:
if result != '':
result += ', {}'.format(get_string(days, 'day'))
else:
result += '{}'.format(get_string(days, 'day'))
if hours > 0:
if result != '':
result += ', {}'.format(get_string(hours, 'hour'))
else:
result += '{}'.format(get_string(hours, 'hour'))
if minutes > 0:
if result != '':
if seconds == 0:
result += ' and {}'.format(get_string(minutes, 'minute'))
else:
result += ', {}'.format(get_string(minutes, 'minute'))
else:
result += '{}'.format(get_string(minutes, 'minute'))
if seconds > 0:
if result != '':
result += ' and {}'.format(get_string(seconds, 'second'))
else:
result += '{}'.format(get_string(seconds, 'second'))
return result
def get_string(number: int, string: str) -> str:
"""
Concatenate string result
:param number:
:param string:
:return:
"""
result: str = ''
if number > 0:
if number == 1:
result = '{} {}'.format(number, string)
else:
result = '{} {}s'.format(number, string)
return result
def calc_seconds(seconds: int) -> int:
"""
Calculate seconds
:param seconds:
:return:
"""
if seconds < 60:
return seconds
return seconds % 60
def calc_minutes(seconds: int) -> int:
"""
calculate minutes
:param seconds:
:return:
"""
minutes = seconds // 60
if minutes < 60:
return minutes
return minutes % 60
def calc_hours(seconds: int) -> int:
"""
Calculate hours
:param seconds:
:return:
"""
hours = seconds // (60 * 60)
if hours < 24:
return hours
return hours % 24
def calc_days(seconds: int) -> int:
"""
Calculate days
:param seconds:
:return:
"""
days = seconds // (60 * 60 * 24)
if days < 365:
return days
return days % 365
def calc_years(seconds: int) -> int:
"""
Calculate years
:param seconds:
:return:
"""
return seconds // (60 * 60 * 24 * 365)
| """
A function which formats a duration, given as a
number of seconds, in a human-friendly way.
"""
def format_duration(seconds: int) -> str:
"""
A function which formats a duration, given as a
number of seconds, in a human-friendly way.
The resulting expression is made of components like 4 seconds,
1 year, etc. In general, a positive integer and one of the
valid units of time, separated by a space. The unit of time
is used in plural if the integer is greater than 1.
The components are separated by a comma and a space (", ").
Except the last component, which is separated by " and ", just
like it would be written in English.
A more significant units of time will occur before than a least
significant one. Therefore, 1 second and 1 year is not correct,
but 1 year and 1 second is.
Different components have different unit of times. So there is
not repeated units like in 5 seconds and 1 second.
A component will not appear at all if its value happens to be zero.
Hence, 1 minute and 0 seconds is not valid, but it should be just 1 minute.
A unit of time must be used "as much as possible". It means that the
function should not return 61 seconds, but 1 minute and 1 second instead.
Formally, the duration specified by of a component must not be greater than
any valid more significant unit of time.
:param seconds:
:return:
"""
if seconds == 0:
return 'now'
result = ''
years = calc_years(seconds)
days = calc_days(seconds)
hours = calc_hours(seconds)
minutes = calc_minutes(seconds)
seconds = calc_seconds(seconds)
if years > 0:
result += '{}'.format(get_string(years, 'year'))
if days > 0:
if result != '':
result += ', {}'.format(get_string(days, 'day'))
else:
result += '{}'.format(get_string(days, 'day'))
if hours > 0:
if result != '':
result += ', {}'.format(get_string(hours, 'hour'))
else:
result += '{}'.format(get_string(hours, 'hour'))
if minutes > 0:
if result != '':
if seconds == 0:
result += ' and {}'.format(get_string(minutes, 'minute'))
else:
result += ', {}'.format(get_string(minutes, 'minute'))
else:
result += '{}'.format(get_string(minutes, 'minute'))
if seconds > 0:
if result != '':
result += ' and {}'.format(get_string(seconds, 'second'))
else:
result += '{}'.format(get_string(seconds, 'second'))
return result
def get_string(number: int, string: str) -> str:
"""
Concatenate string result
:param number:
:param string:
:return:
"""
result: str = ''
if number > 0:
if number == 1:
result = '{} {}'.format(number, string)
else:
result = '{} {}s'.format(number, string)
return result
def calc_seconds(seconds: int) -> int:
"""
Calculate seconds
:param seconds:
:return:
"""
if seconds < 60:
return seconds
return seconds % 60
def calc_minutes(seconds: int) -> int:
"""
calculate minutes
:param seconds:
:return:
"""
minutes = seconds // 60
if minutes < 60:
return minutes
return minutes % 60
def calc_hours(seconds: int) -> int:
"""
Calculate hours
:param seconds:
:return:
"""
hours = seconds // (60 * 60)
if hours < 24:
return hours
return hours % 24
def calc_days(seconds: int) -> int:
"""
Calculate days
:param seconds:
:return:
"""
days = seconds // (60 * 60 * 24)
if days < 365:
return days
return days % 365
def calc_years(seconds: int) -> int:
"""
Calculate years
:param seconds:
:return:
"""
return seconds // (60 * 60 * 24 * 365) |
def normalize(state):
temp_state = []
temp_state.append(0.001*state[0])
temp_state.append((state[1]-2478330.0752)/188151.0787)
temp_state.append((state[2]-9696.347622)/14234.47008)
temp_state.append((state[3]-2515.821571)/2615.29795)
temp_state.append(0.01*state[4])
temp_state.append(0.01*state[5])
temp_state.append(0.01*state[6])
temp_state.append((state[7]-5177.579363)/16123.41258)
temp_state.append((state[8]-270.7733533)/1248.568074)
temp_state.append((state[9]-35.71445465)/269.3077132)
temp_state.append((state[10]-54.79755959)/10.66533212)
temp_state.append((state[11]-52.87394922)/11.70261057)
temp_state.append((state[12]-52.65588396)/8.861974927)
temp_state.append((state[13]-33.72752522)/120.876015)
temp_state.append((state[14]-25.8489596)/122.6781467)
temp_state.append((state[15]-25.2647541)/119.2325979)
temp_state.append((state[16]-29.77151467)/21.96257966)
temp_state.append((state[17]-30.84369715)/21.36197042)
temp_state.append((state[18]-27.72517914)/20.37034246)
return temp_state
| def normalize(state):
temp_state = []
temp_state.append(0.001 * state[0])
temp_state.append((state[1] - 2478330.0752) / 188151.0787)
temp_state.append((state[2] - 9696.347622) / 14234.47008)
temp_state.append((state[3] - 2515.821571) / 2615.29795)
temp_state.append(0.01 * state[4])
temp_state.append(0.01 * state[5])
temp_state.append(0.01 * state[6])
temp_state.append((state[7] - 5177.579363) / 16123.41258)
temp_state.append((state[8] - 270.7733533) / 1248.568074)
temp_state.append((state[9] - 35.71445465) / 269.3077132)
temp_state.append((state[10] - 54.79755959) / 10.66533212)
temp_state.append((state[11] - 52.87394922) / 11.70261057)
temp_state.append((state[12] - 52.65588396) / 8.861974927)
temp_state.append((state[13] - 33.72752522) / 120.876015)
temp_state.append((state[14] - 25.8489596) / 122.6781467)
temp_state.append((state[15] - 25.2647541) / 119.2325979)
temp_state.append((state[16] - 29.77151467) / 21.96257966)
temp_state.append((state[17] - 30.84369715) / 21.36197042)
temp_state.append((state[18] - 27.72517914) / 20.37034246)
return temp_state |
#!/usr/bin/env python3
inp = "(((())))()((((((((())()(()))(()((((()(()(((()((()((()(()()()()()))(((()(()((((((((((())(()()((())()(((())))()(()(()((()(()))(()()()()((()((()(((()()(((((((()()())()((((()()(((((()(())()(())((())()()))()(((((((())(()())(()(((())(()))((())))(()((()())))()())((((())))(()(((((()(())(((()()((()((()((((((((((())(()())))))()))())()()((((()()()()()()((((((())())(((()())()((()()(((()()()))(((((()))(((()(()()()(()(()(((())()))(()(((()((())()(()())())))((()()()(()()(((()))(((()((((()(((((()()(()())((()())())(()((((((()(()()))((((()))))())((())()()((()(()))))((((((((()))(()()(((())())(())()((()()()()((()((()((()()(((())))(()((())()((((((((()((()(()()(((())())())))(())())))()((((()))))))())))()()))()())((()())()((()()()))(()()(((()(())((((())())((((((((()()()()())))()()()((((()()))))))()((((()(((()))(()()())))((()()(((()))()()())())(((())((()()(())()()()(((())))))()())((()))()))((())()()())()())()()(()))())))())()))(())((()(())))(()(())(()))))(()(())())(()(())(()(()))))((()())()))()((((()()))))())))()()())((())()((()()()))()(((()(()))))(())()()))(((()())))))))))(((())))()))())()))))()()(((())))))))()(()()(()))((()))))((())))((()((())))())))()()(()))())()(()((()())(()(()()())())(()()))()))))(()())()()))()()()()))(()(()(()))))))()(()))()))()()(()((())(()(())))()(((())(())())))))()(()(()))))()))(()()()(())()(()(())))()))))()()(((((())))))())()())())())()())()))))()))))))))())()()()()()()())))()))((())()))())))()((())()))))()))())))))))())()()()))()()(()((((()(((((((()(())((()())((()()))()))))(())))()()()(())((())()())))(())))(())))(((()()))()(())(((()(()))((())))())()))((((()))())()))))))))()(())())))(()))()(()()))())()()(())())))())()()(()())))()((()())(()(())(())))))))))))))(()))))()))))))()()())(()(((((()(()())))())()))(()))()))(()()))()())(()))())()(())((()()))))))())))())()(((())))(()(()))()()))()(()))))))((()())(()))))))()())))()()))))))))((((((((()()()(()))))))()())))())))()()((())()))((())(())))())())))()()()((()((()(())))())()(())))))))))()())))()()()()()()))()))((())())(()(()))))))(()()))()))(())))()))))))))))))(()))))))))()))))()))()())()))()()))))))()))))((()))))(()))())()(())))(()())((((()())))()))))(()))()(()()(())))))())))))()))))))())))())))))())))())())))())(()))))(())()(())))())()))((()()))))))())))((())))))))())))(())))))()()())))))())))))()))))))()))()()()(()(((()())())())(()))())))))((()(())(()))))))))(())))()()()())())(()))))()()()))()))())())())()(())))()(((()((((())))))))()))))))))))))))))))))((())()())(()))))()()))))))(()()(())())))())))((())))((())))))))))))))()))))()(()))))))())))))()))(()()())(()())))))))))()))))))(())))))()()))()())(((())))()))(()))))))))(())())))())))())())())()()))((())()(())()())()))()())(())(()))))()())))(()(((()))))))()(()())()()()))()))))))))()()()(())()())()(((((()))()())())(()))))()()()(())))())))()((()())))(()))())()(()())())(()))()()))((()()))((()()()()())))(())()))(()(())))((()()))))))))())))))))())()()))))))))))))))))(())()(())(())()())())()))()(()))))())())))))()())()(()))()()(())))(())())))))(()))))))))))))))())())(())(())))(((()))()))))())((())(()))())))))))())))))())))()))()))))))))))))())()))))()))))((()))(())))()(())))(())()))()))())))())))))))()(()())())))()()())))(())))))(()))))))))))))(()))()))()))())))(((()()()(())((()())))()())(((()))(())()))((()()()())))())(())(()))))()(((((())))(()))())())))))))((((()()()))())())()(()(()())))))))))()())())))(())))()())(((()(())())()()))())())))))))((()())((()()(()))(()(())))()))()))(()))(()))()()(()(((())((((()))()(()))((())()(()(()())()(()))()())))))(()))()))())()())))())))(())))((())(()())))))()))(())(()))()())()(()()((()(()))))))()(())(()())(())()))(((())()))(()()(()()()))))(()(())))()))))())))))())(()()()()()()(((())))(()()))()((())(((((()()())))(()))(()))()()))(((())())()(((()()()()))))(()))(())())))()())(()()())())))))))()))))((())))()())(()))(()(()))())))))())(())))))()()())())()))()()(())))(()))(())((((((())(()))(()))())()))(()()(())))()))(()()))()))()(())))(())))((()(()))(())()()())())))(((()()())(())()))))))()(((()(((((()()(((())(())))())()((()))))((()())()(())(((())))(((()((()(()(()))(()()))())(()))(())(())))()))))))((((()))()((((()(()))()))()()))))()(()(()))()(()((()(((()(()()(((()))))()(((()(()(()(((()(()())())()()(()(()())())(()((((())(()))()))(((((()()())(())()((()()())))()()(((()()))()((((((((()(())))())((()))))(())))(()))))((()((((()()(())(((((()))(((((((((((((()())))((((()(((()((())())()))((()))()(()()((()()()()(()()(()(()(((())()(()((((((()((()()((())()((((()((()()(()()())((()()()((()((())()(()(((()((())((((())(()))((()(()))(()())()((((((((()(((((((((((()))(()(((()(()()()((((())((())()())()))(())((())(()))(((()((()(())))(()))))((()()))))((((()(()(()())(()(())((((((((()((((()((()(((((()))())()(()))(()()((()(())(((((()(())()(((((()()))))))()(((())()(()()((((())()((())((()(((())(((()))((()()((((()(())))))((()((((()((()((()(((())((()))(((((((()(((()((((((((())()))((((())(((((()((((((((()(((()((()(((()()(((()((((((()()(()((((((((()()(()(()(())((((()())()))))(((()))((((())((((()())((()(())()((()((((((()((((((()(())))()())(((())())())()(())()(()())((()()((((())((((((())(()(((((()((((())()((((()(()(())(()())(((())()((())((((()))()((((((())(()(((()(((()((((((()(((()))(()()())())((()((()())()((((())(((()(()(((((((((())(())))()((()()()()(())((()))(((((((()(((((((((()(()))))(()((((((((()((((()((()()((((((()()(((((((()(()(())()(())((()()()((()(((((()())()(((((()())()()((()(()())(()()()(((()()(((((()((((((()()((()(()()()((((((((((((()((((((((()()(((()())))()(((()()(())())((((()((((()((((()()()(())(())((()(()(((((((((((((((()(())(())))))()()))((()(((()(())((()(((()(()()((((()()(((()(((()(((((()()((()(()(((()))((((((()((((((((()((()((())(((((()(((())(())())((()()))((((())()()((()(((()(((((()()(((()))(((()(()(((((((((((((()))((((((((()(((()))))())((((((((((((())((())((()())(((())((())(()((((((((((()(((())((()()(()((())(((((((((((()))((((((((((((()(()())((()((()((()(()(((()((((((((()()(()((()(()(((()))((()))(((((((((((((()(())((((((())(((()(())(()(()(()((()()))((((()((((()((((())))())((((()((((()))((((((()((((((()((()(((())))((())(()))(()((()((((()((()(((()()))((((()()()(((((((())(((())(()))())((((()())(((()(((((((((((()(()(()((()(((((((((((((((()()((((()((((((((()(((()()((()((((()))(((()(())((((((()((((())()((((()((()))(())()(()(((()((())())((((((()(()(())())(((())(()(()())(((((()((()((())()())(())))(((()(())))))))(((()(((()))()((()(((()()((()())()()))())))(((()))(()(((()(((((((((()(()(((((()()(((()())()()))))()(((()))(((()(()(()(()(()))()(())()))(()(((())))(()))))))))))(())((()((())((()(())()(())((()()((((()()((()()))((())(((()((()(())(())))()(()(((((()((()))())()(((((()()(((()(()((((((())(()))(())()))((()(()()))(())())()))(((())))(()((()(((())(())())))((()()((((((((((((((()((()(()()(()(((()))())()()((()()()(())(()))(()())(((())((())()(())()()(()()(())))((()(((()))))(((()()(()()))())((()((())()))((((()()()())((())))(((()(())(((((()(((((()((()(()((((()()(((()()()(((()())(((()()((((())(()))(((()))(())())((()))(((()((()))(((()()((())((()(((((()((((()()())((()))()((((()((()(()()()("
def part1():
result = inp.count("(") - inp.count(")")
print("Final floor: {}".format(result))
def part2():
floor = 0
for i in range(len(inp)):
x = inp[i]
if x == "(":
floor += 1
elif x == ")":
floor -= 1
if floor < 0:
print("Reaches basement at position {}".format(i + 1))
return
print("Santa never reached the basement")
part1()
part2()
| inp = '(((())))()((((((((())()(()))(()((((()(()(((()((()((()(()()()()()))(((()(()((((((((((())(()()((())()(((())))()(()(()((()(()))(()()()()((()((()(((()()(((((((()()())()((((()()(((((()(())()(())((())()()))()(((((((())(()())(()(((())(()))((())))(()((()())))()())((((())))(()(((((()(())(((()()((()((()((((((((((())(()())))))()))())()()((((()()()()()()((((((())())(((()())()((()()(((()()()))(((((()))(((()(()()()(()(()(((())()))(()(((()((())()(()())())))((()()()(()()(((()))(((()((((()(((((()()(()())((()())())(()((((((()(()()))((((()))))())((())()()((()(()))))((((((((()))(()()(((())())(())()((()()()()((()((()((()()(((())))(()((())()((((((((()((()(()()(((())())())))(())())))()((((()))))))())))()()))()())((()())()((()()()))(()()(((()(())((((())())((((((((()()()()())))()()()((((()()))))))()((((()(((()))(()()())))((()()(((()))()()())())(((())((()()(())()()()(((())))))()())((()))()))((())()()())()())()()(()))())))())()))(())((()(())))(()(())(()))))(()(())())(()(())(()(()))))((()())()))()((((()()))))())))()()())((())()((()()()))()(((()(()))))(())()()))(((()())))))))))(((())))()))())()))))()()(((())))))))()(()()(()))((()))))((())))((()((())))())))()()(()))())()(()((()())(()(()()())())(()()))()))))(()())()()))()()()()))(()(()(()))))))()(()))()))()()(()((())(()(())))()(((())(())())))))()(()(()))))()))(()()()(())()(()(())))()))))()()(((((())))))())()())())())()())()))))()))))))))())()()()()()()())))()))((())()))())))()((())()))))()))())))))))())()()()))()()(()((((()(((((((()(())((()())((()()))()))))(())))()()()(())((())()())))(())))(())))(((()()))()(())(((()(()))((())))())()))((((()))())()))))))))()(())())))(()))()(()()))())()()(())())))())()()(()())))()((()())(()(())(())))))))))))))(()))))()))))))()()())(()(((((()(()())))())()))(()))()))(()()))()())(()))())()(())((()()))))))())))())()(((())))(()(()))()()))()(()))))))((()())(()))))))()())))()()))))))))((((((((()()()(()))))))()())))())))()()((())()))((())(())))())())))()()()((()((()(())))())()(())))))))))()())))()()()()()()))()))((())())(()(()))))))(()()))()))(())))()))))))))))))(()))))))))()))))()))()())()))()()))))))()))))((()))))(()))())()(())))(()())((((()())))()))))(()))()(()()(())))))())))))()))))))())))())))))())))())())))())(()))))(())()(())))())()))((()()))))))())))((())))))))())))(())))))()()())))))())))))()))))))()))()()()(()(((()())())())(()))())))))((()(())(()))))))))(())))()()()())())(()))))()()()))()))())())())()(())))()(((()((((())))))))()))))))))))))))))))))((())()())(()))))()()))))))(()()(())())))())))((())))((())))))))))))))()))))()(()))))))())))))()))(()()())(()())))))))))()))))))(())))))()()))()())(((())))()))(()))))))))(())())))())))())())())()()))((())()(())()())()))()())(())(()))))()())))(()(((()))))))()(()())()()()))()))))))))()()()(())()())()(((((()))()())())(()))))()()()(())))())))()((()())))(()))())()(()())())(()))()()))((()()))((()()()()())))(())()))(()(())))((()()))))))))())))))))())()()))))))))))))))))(())()(())(())()())())()))()(()))))())())))))()())()(()))()()(())))(())())))))(()))))))))))))))())())(())(())))(((()))()))))())((())(()))())))))))())))))())))()))()))))))))))))())()))))()))))((()))(())))()(())))(())()))()))())))())))))))()(()())())))()()())))(())))))(()))))))))))))(()))()))()))())))(((()()()(())((()())))()())(((()))(())()))((()()()())))())(())(()))))()(((((())))(()))())())))))))((((()()()))())())()(()(()())))))))))()())())))(())))()())(((()(())())()()))())())))))))((()())((()()(()))(()(())))()))()))(()))(()))()()(()(((())((((()))()(()))((())()(()(()())()(()))()())))))(()))()))())()())))())))(())))((())(()())))))()))(())(()))()())()(()()((()(()))))))()(())(()())(())()))(((())()))(()()(()()()))))(()(())))()))))())))))())(()()()()()()(((())))(()()))()((())(((((()()())))(()))(()))()()))(((())())()(((()()()()))))(()))(())())))()())(()()())())))))))()))))((())))()())(()))(()(()))())))))())(())))))()()())())()))()()(())))(()))(())((((((())(()))(()))())()))(()()(())))()))(()()))()))()(())))(())))((()(()))(())()()())())))(((()()())(())()))))))()(((()(((((()()(((())(())))())()((()))))((()())()(())(((())))(((()((()(()(()))(()()))())(()))(())(())))()))))))((((()))()((((()(()))()))()()))))()(()(()))()(()((()(((()(()()(((()))))()(((()(()(()(((()(()())())()()(()(()())())(()((((())(()))()))(((((()()())(())()((()()())))()()(((()()))()((((((((()(())))())((()))))(())))(()))))((()((((()()(())(((((()))(((((((((((((()())))((((()(((()((())())()))((()))()(()()((()()()()(()()(()(()(((())()(()((((((()((()()((())()((((()((()()(()()())((()()()((()((())()(()(((()((())((((())(()))((()(()))(()())()((((((((()(((((((((((()))(()(((()(()()()((((())((())()())()))(())((())(()))(((()((()(())))(()))))((()()))))((((()(()(()())(()(())((((((((()((((()((()(((((()))())()(()))(()()((()(())(((((()(())()(((((()()))))))()(((())()(()()((((())()((())((()(((())(((()))((()()((((()(())))))((()((((()((()((()(((())((()))(((((((()(((()((((((((())()))((((())(((((()((((((((()(((()((()(((()()(((()((((((()()(()((((((((()()(()(()(())((((()())()))))(((()))((((())((((()())((()(())()((()((((((()((((((()(())))()())(((())())())()(())()(()())((()()((((())((((((())(()(((((()((((())()((((()(()(())(()())(((())()((())((((()))()((((((())(()(((()(((()((((((()(((()))(()()())())((()((()())()((((())(((()(()(((((((((())(())))()((()()()()(())((()))(((((((()(((((((((()(()))))(()((((((((()((((()((()()((((((()()(((((((()(()(())()(())((()()()((()(((((()())()(((((()())()()((()(()())(()()()(((()()(((((()((((((()()((()(()()()((((((((((((()((((((((()()(((()())))()(((()()(())())((((()((((()((((()()()(())(())((()(()(((((((((((((((()(())(())))))()()))((()(((()(())((()(((()(()()((((()()(((()(((()(((((()()((()(()(((()))((((((()((((((((()((()((())(((((()(((())(())())((()()))((((())()()((()(((()(((((()()(((()))(((()(()(((((((((((((()))((((((((()(((()))))())((((((((((((())((())((()())(((())((())(()((((((((((()(((())((()()(()((())(((((((((((()))((((((((((((()(()())((()((()((()(()(((()((((((((()()(()((()(()(((()))((()))(((((((((((((()(())((((((())(((()(())(()(()(()((()()))((((()((((()((((())))())((((()((((()))((((((()((((((()((()(((())))((())(()))(()((()((((()((()(((()()))((((()()()(((((((())(((())(()))())((((()())(((()(((((((((((()(()(()((()(((((((((((((((()()((((()((((((((()(((()()((()((((()))(((()(())((((((()((((())()((((()((()))(())()(()(((()((())())((((((()(()(())())(((())(()(()())(((((()((()((())()())(())))(((()(())))))))(((()(((()))()((()(((()()((()())()()))())))(((()))(()(((()(((((((((()(()(((((()()(((()())()()))))()(((()))(((()(()(()(()(()))()(())()))(()(((())))(()))))))))))(())((()((())((()(())()(())((()()((((()()((()()))((())(((()((()(())(())))()(()(((((()((()))())()(((((()()(((()(()((((((())(()))(())()))((()(()()))(())())()))(((())))(()((()(((())(())())))((()()((((((((((((((()((()(()()(()(((()))())()()((()()()(())(()))(()())(((())((())()(())()()(()()(())))((()(((()))))(((()()(()()))())((()((())()))((((()()()())((())))(((()(())(((((()(((((()((()(()((((()()(((()()()(((()())(((()()((((())(()))(((()))(())())((()))(((()((()))(((()()((())((()(((((()((((()()())((()))()((((()((()(()()()('
def part1():
result = inp.count('(') - inp.count(')')
print('Final floor: {}'.format(result))
def part2():
floor = 0
for i in range(len(inp)):
x = inp[i]
if x == '(':
floor += 1
elif x == ')':
floor -= 1
if floor < 0:
print('Reaches basement at position {}'.format(i + 1))
return
print('Santa never reached the basement')
part1()
part2() |
# -*- coding: utf-8 -*-
"""
exceptions in xalpha packages
"""
class XalphaException(Exception):
pass
class FundTypeError(XalphaException):
"""
The code mismatches the fund type obj, fundinfo/mfundinfo
"""
pass
class FundNotExistError(XalphaException):
"""
There is no fund with given code
"""
pass
class TradeBehaviorError(XalphaException):
"""
Used for unreal trade attempt, such as selling before buying
"""
pass
class ParserFailure(XalphaException):
"""
Used for exception when parsing fund APIs
"""
pass
| """
exceptions in xalpha packages
"""
class Xalphaexception(Exception):
pass
class Fundtypeerror(XalphaException):
"""
The code mismatches the fund type obj, fundinfo/mfundinfo
"""
pass
class Fundnotexisterror(XalphaException):
"""
There is no fund with given code
"""
pass
class Tradebehaviorerror(XalphaException):
"""
Used for unreal trade attempt, such as selling before buying
"""
pass
class Parserfailure(XalphaException):
"""
Used for exception when parsing fund APIs
"""
pass |
INPUT_DIR = "/mnt/input"
OUTPUT_DIR = "/mnt/output"
CONFIG_FILE_NAME = 'config.yml'
CONFIG_SECTION = 'fc_survival_svm'
| input_dir = '/mnt/input'
output_dir = '/mnt/output'
config_file_name = 'config.yml'
config_section = 'fc_survival_svm' |
if __name__ == '__main__':
for _ in range(int(input())):
a = int(input())
A = set(map(int,input().split()))
b = int(input())
B = set(map(int,input().split()))
# if len(A-B) == 0:
# print(True)
# else:
# print(False)
# print(not bool(A.difference(B)))
# print( A.intersection(B) == A)
print(A.issubset(B))
# for _ in range(int(input())):
# x, a, z, b = input(), set(input().split()), input(), set(input().split())
# print(a.issubset(b))
# for i in range(int(input())):
# input()
# A = set(input().split())
# input()
# B = set(input().split())
# print(A <= B)
| if __name__ == '__main__':
for _ in range(int(input())):
a = int(input())
a = set(map(int, input().split()))
b = int(input())
b = set(map(int, input().split()))
print(A.issubset(B)) |
class Node:
def __init__(self, val):
self.val = val
self.next = None
class SinglyLinkedList:
def __init__(self):
self.head = None
def add(self, val):
curNode = self.head
while(curNode.next != None):
curNode = curNode.next
curNode.next = Node(val)
def printLinkedList(self):
curNode = self.head
while(curNode != None):
print(curNode.val)
curNode = curNode.next
list1 = SinglyLinkedList()
list1.head = Node(2)
print(list1.head.val)
| class Node:
def __init__(self, val):
self.val = val
self.next = None
class Singlylinkedlist:
def __init__(self):
self.head = None
def add(self, val):
cur_node = self.head
while curNode.next != None:
cur_node = curNode.next
curNode.next = node(val)
def print_linked_list(self):
cur_node = self.head
while curNode != None:
print(curNode.val)
cur_node = curNode.next
list1 = singly_linked_list()
list1.head = node(2)
print(list1.head.val) |
"""Rules for StringTemplate 4."""
def _impl(ctx):
if not ctx.attr.controller and not ctx.attr.data and not ctx.attr.json:
fail("Missing template data. Provide at least one of \"controller\", \"data\" or \"json\" attributes.")
output = ctx.actions.declare_file(ctx.attr.out)
deps = depset(transitive = [dep.files for dep in ctx.attr.deps])
inputs = ctx.files.src + ctx.files.imports + ctx.files.json
tool_inputs, input_manifests = ctx.resolve_tools(tools = ctx.attr.deps + [ctx.attr._tool] + ctx.attr._deps)
args = ctx.actions.args()
args.add("--adaptor")
args.add(ctx.attr.adaptor)
args.add("--controller")
args.add(ctx.attr.controller)
args.add("--failOnError")
args.add("true" if ctx.attr.failOnError else "false")
args.add("--method")
args.add(ctx.attr.method)
args.add("--encoding")
args.add(ctx.attr.encoding)
args.add("--input")
args.add(ctx.file.src.path)
args.add("--output")
args.add(output.path)
args.add("--raw")
args.add("true" if ctx.attr.raw else "false")
args.add("--startDelim")
args.add(ctx.attr.startDelim)
args.add("--stopDelim")
args.add(ctx.attr.stopDelim)
args.add("--verbose")
args.add("true" if ctx.attr.verbose else "false")
for f in ctx.files.json:
args.add("--json")
args.add(f.path)
for f in ctx.files.imports:
args.add("--import")
args.add(f.path)
# TODO: it would be nice if we could create a custom FilesToRunProvider and use that
# as the executable in order to promote the provided dependencies directly, but
# there is no API for that (see https://github.com/bazelbuild/bazel/issues/7187)
ctx.actions.run(
arguments = [args],
inputs = inputs,
outputs = [output],
mnemonic = "ST4",
executable = ctx.executable._tool,
input_manifests = input_manifests,
progress_message = "Processing ST4 template {}".format(ctx.file.src.path),
tools = tool_inputs,
env = {
"DATA": ctx.attr.data,
"DEPS": ",".join([f.path for f in deps.to_list()] + [f.path for f in ctx.files._deps]),
},
)
return [DefaultInfo(files = depset([output]))]
stringtemplate = rule(
implementation = _impl,
doc = """Runs [StringTemplate 4](https://www.stringtemplate.org/) on a set of grammars.
The template attributes must be provided by at least one of the [`controller`](#stringtemplate-controller),
[`data`](#stringtemplate-data) or [`json`](#stringtemplate-json) attributes.
""",
attrs = {
"adaptor": attr.string(
doc = """The fully-qualified Java class name of the
model adaptor factory to use.
The class must have a public no-args constructor and a
public no-args method named "adaptors" that
returns the mappings between attribute types and model
adaptors as
`java.util.Map<Class<?>, org.stringtemplate.v4.misc.ModelAdaptor>`
""",
),
"controller": attr.string(
doc = """The fully-qualified Java class name of the
controller to use for attribute injection.
The class must have a public no-args constructor and a
public method that returns the attributes as
`java.util.Map<String, Object>`. If no
method name is specified via the "method"
attribute, the method name "attributes" will be
assumed.
""",
),
"deps": attr.label_list(
default = [],
doc = """The dependencies to use. Either to just provide
the necessary dependencies for the controller. But can
also be used for overriding the default
dependencies for StringTemplate.
""",
),
"encoding": attr.string(doc = "The encoding to use for input and output.", default = "UTF-8"),
"failOnError": attr.bool(
default = True,
doc = """Sets whether processing should fail on all errors.
Enabled by default. When disabled, missing or inaccessible
properties don't cause failure.
""",
),
"imports": attr.label_list(
allow_files = True,
doc = """The templates imported by the template to process. Must
include all nested imports as well.
""",
),
"json": attr.label_list(
allow_files = True,
doc = """
The JSON data files to use for attribute injection.
The data is installed into the template after
the results of the controller, if any. If
there are name collisions then the value type
will be automatically converted into a list type by
StringTemplate.
""",
),
"method": attr.string(
doc = """The name of the controller method to invoke.
Can be a static or instance method, the type will
be automatically detected at invocation time. The return type
of the specified method must be of type
`java.util.Map<String, Object>`.
""",
),
"out": attr.string(doc = "The relative path of the resulting file.", mandatory = True),
"data": attr.string(
doc = """The data (in JSON format) to use for attribute injection.
This data is installed last into the template. If
there are name collisions then the value type
will be automatically converted into a list type by
StringTemplate.
""",
),
"raw": attr.bool(
doc = """Use raw template file format (without headers, similar to v3).
Requires StringTemplate 4.0.7 or later.
"""),
"src": attr.label(allow_single_file = True, doc = "The template to process.", mandatory = True),
"startDelim": attr.string(doc = "The character to use as start delimiter in templates.", default = "<"),
"stopDelim": attr.string(doc = "The character to use as stop delimiter in templates.", default = ">"),
"verbose": attr.bool(doc = """Enable verbose output for template construction."""),
"_deps": attr.label_list(
default = [
"@stringtemplate4//jar",
"@antlr3_runtime//jar",
"@javax_json//jar",
],
),
"_tool": attr.label(
executable = True,
cfg = "host",
default = Label("@rules_stringtemplate//src/main/java/org/stringtemplate/bazel"),
),
},
)
| """Rules for StringTemplate 4."""
def _impl(ctx):
if not ctx.attr.controller and (not ctx.attr.data) and (not ctx.attr.json):
fail('Missing template data. Provide at least one of "controller", "data" or "json" attributes.')
output = ctx.actions.declare_file(ctx.attr.out)
deps = depset(transitive=[dep.files for dep in ctx.attr.deps])
inputs = ctx.files.src + ctx.files.imports + ctx.files.json
(tool_inputs, input_manifests) = ctx.resolve_tools(tools=ctx.attr.deps + [ctx.attr._tool] + ctx.attr._deps)
args = ctx.actions.args()
args.add('--adaptor')
args.add(ctx.attr.adaptor)
args.add('--controller')
args.add(ctx.attr.controller)
args.add('--failOnError')
args.add('true' if ctx.attr.failOnError else 'false')
args.add('--method')
args.add(ctx.attr.method)
args.add('--encoding')
args.add(ctx.attr.encoding)
args.add('--input')
args.add(ctx.file.src.path)
args.add('--output')
args.add(output.path)
args.add('--raw')
args.add('true' if ctx.attr.raw else 'false')
args.add('--startDelim')
args.add(ctx.attr.startDelim)
args.add('--stopDelim')
args.add(ctx.attr.stopDelim)
args.add('--verbose')
args.add('true' if ctx.attr.verbose else 'false')
for f in ctx.files.json:
args.add('--json')
args.add(f.path)
for f in ctx.files.imports:
args.add('--import')
args.add(f.path)
ctx.actions.run(arguments=[args], inputs=inputs, outputs=[output], mnemonic='ST4', executable=ctx.executable._tool, input_manifests=input_manifests, progress_message='Processing ST4 template {}'.format(ctx.file.src.path), tools=tool_inputs, env={'DATA': ctx.attr.data, 'DEPS': ','.join([f.path for f in deps.to_list()] + [f.path for f in ctx.files._deps])})
return [default_info(files=depset([output]))]
stringtemplate = rule(implementation=_impl, doc='Runs [StringTemplate 4](https://www.stringtemplate.org/) on a set of grammars.\nThe template attributes must be provided by at least one of the [`controller`](#stringtemplate-controller),\n[`data`](#stringtemplate-data) or [`json`](#stringtemplate-json) attributes.\n', attrs={'adaptor': attr.string(doc='The fully-qualified Java class name of the\n model adaptor factory to use.\n The class must have a public no-args constructor and a\n public no-args method named "adaptors" that\n returns the mappings between attribute types and model\n adaptors as\n `java.util.Map<Class<?>, org.stringtemplate.v4.misc.ModelAdaptor>`\n '), 'controller': attr.string(doc='The fully-qualified Java class name of the\n controller to use for attribute injection.\n The class must have a public no-args constructor and a\n public method that returns the attributes as\n `java.util.Map<String, Object>`. If no\n method name is specified via the "method"\n attribute, the method name "attributes" will be\n assumed.\n '), 'deps': attr.label_list(default=[], doc='The dependencies to use. Either to just provide\n the necessary dependencies for the controller. But can\n also be used for overriding the default\n dependencies for StringTemplate.\n '), 'encoding': attr.string(doc='The encoding to use for input and output.', default='UTF-8'), 'failOnError': attr.bool(default=True, doc="Sets whether processing should fail on all errors.\n Enabled by default. When disabled, missing or inaccessible\n properties don't cause failure.\n "), 'imports': attr.label_list(allow_files=True, doc='The templates imported by the template to process. Must\n include all nested imports as well.\n '), 'json': attr.label_list(allow_files=True, doc='\n The JSON data files to use for attribute injection.\n The data is installed into the template after\n the results of the controller, if any. If\n there are name collisions then the value type\n will be automatically converted into a list type by\n StringTemplate.\n '), 'method': attr.string(doc='The name of the controller method to invoke.\n Can be a static or instance method, the type will\n be automatically detected at invocation time. The return type\n of the specified method must be of type\n `java.util.Map<String, Object>`.\n '), 'out': attr.string(doc='The relative path of the resulting file.', mandatory=True), 'data': attr.string(doc='The data (in JSON format) to use for attribute injection.\n This data is installed last into the template. If\n there are name collisions then the value type\n will be automatically converted into a list type by\n StringTemplate.\n '), 'raw': attr.bool(doc='Use raw template file format (without headers, similar to v3).\n Requires StringTemplate 4.0.7 or later.\n '), 'src': attr.label(allow_single_file=True, doc='The template to process.', mandatory=True), 'startDelim': attr.string(doc='The character to use as start delimiter in templates.', default='<'), 'stopDelim': attr.string(doc='The character to use as stop delimiter in templates.', default='>'), 'verbose': attr.bool(doc='Enable verbose output for template construction.'), '_deps': attr.label_list(default=['@stringtemplate4//jar', '@antlr3_runtime//jar', '@javax_json//jar']), '_tool': attr.label(executable=True, cfg='host', default=label('@rules_stringtemplate//src/main/java/org/stringtemplate/bazel'))}) |
# Reversing linked list.
class Node:
def __init__(self, cargo=None, next=None):
self.cargo = cargo
self.next = next
def __str__(self):
return str(self.cargo)
class SimpleLinkedList:
def __init__(self, init_values=[]):
self.length = 0
self.head = None
if len(init_values) > 0:
for value in init_values:
self.insert(value)
def is_empty(self):
return self.head == 0
def insert(self, cargo):
node = Node(cargo)
if self.head is None:
self.head = node
else:
curr = self.head
while curr.next:
curr = curr.next
curr.next = node
self.length += 1
def remove(self):
if self.head is None:
return None
else:
top = self.head
self.head = self.head.next
self.length -= 1
return top
def reverse(self):
previous = None
current = self.head
if self.head is None:
return None
while current.next:
next = current.next
current.next = previous
previous = current
current = next
current.next = previous
self.head = current
linked_list = SimpleLinkedList(init_values=[1, 2, 3, 4, 5])
linked_list.reverse()
| class Node:
def __init__(self, cargo=None, next=None):
self.cargo = cargo
self.next = next
def __str__(self):
return str(self.cargo)
class Simplelinkedlist:
def __init__(self, init_values=[]):
self.length = 0
self.head = None
if len(init_values) > 0:
for value in init_values:
self.insert(value)
def is_empty(self):
return self.head == 0
def insert(self, cargo):
node = node(cargo)
if self.head is None:
self.head = node
else:
curr = self.head
while curr.next:
curr = curr.next
curr.next = node
self.length += 1
def remove(self):
if self.head is None:
return None
else:
top = self.head
self.head = self.head.next
self.length -= 1
return top
def reverse(self):
previous = None
current = self.head
if self.head is None:
return None
while current.next:
next = current.next
current.next = previous
previous = current
current = next
current.next = previous
self.head = current
linked_list = simple_linked_list(init_values=[1, 2, 3, 4, 5])
linked_list.reverse() |
'''
Facebook error classes also see
http://fbdevwiki.com/wiki/Error_codes#User_Permission_Errors
'''
class OpenFacebookException(Exception):
'''
BaseClass for all open facebook errors
'''
@classmethod
def codes_list(cls):
'''
Returns the codes as a list of instructions
'''
if hasattr(cls, 'codes'):
codes_list = [cls.codes]
if isinstance(cls.codes, list):
codes_list = cls.codes
return codes_list
@classmethod
def range(cls):
'''
Returns for how many codes this Exception, matches with the eventual
goal of matching an error to the most specific error class
'''
range = 0
codes_list = cls.codes_list()
for c in codes_list:
if isinstance(c, tuple):
start, stop = c
range += stop - start + 1
else:
range += 1
#make sure none specific exceptions are last in the order
if not range:
range = 1000
return range
class ParameterException(OpenFacebookException):
'''
100-200
'''
codes = (100, 199)
class UnknownException(OpenFacebookException):
'''
Raised when facebook themselves don't know what went wrong
'''
codes = 1
class OAuthException(OpenFacebookException):
pass
class PermissionException(OAuthException):
'''
200-300
'''
codes = [3, (200, 299)]
class UserPermissionException(PermissionException):
codes = (300, 399)
class FeedActionLimit(UserPermissionException):
'''
When you posted too many times from one user acount
'''
codes = 341
class DuplicateStatusMessage(OpenFacebookException):
codes = 506
class MissingParameter(OpenFacebookException):
pass
class AliasException(OpenFacebookException):
'''
When you send a request to a non existant url facebook gives this error
instead of a 404....
'''
codes = 803
| """
Facebook error classes also see
http://fbdevwiki.com/wiki/Error_codes#User_Permission_Errors
"""
class Openfacebookexception(Exception):
"""
BaseClass for all open facebook errors
"""
@classmethod
def codes_list(cls):
"""
Returns the codes as a list of instructions
"""
if hasattr(cls, 'codes'):
codes_list = [cls.codes]
if isinstance(cls.codes, list):
codes_list = cls.codes
return codes_list
@classmethod
def range(cls):
"""
Returns for how many codes this Exception, matches with the eventual
goal of matching an error to the most specific error class
"""
range = 0
codes_list = cls.codes_list()
for c in codes_list:
if isinstance(c, tuple):
(start, stop) = c
range += stop - start + 1
else:
range += 1
if not range:
range = 1000
return range
class Parameterexception(OpenFacebookException):
"""
100-200
"""
codes = (100, 199)
class Unknownexception(OpenFacebookException):
"""
Raised when facebook themselves don't know what went wrong
"""
codes = 1
class Oauthexception(OpenFacebookException):
pass
class Permissionexception(OAuthException):
"""
200-300
"""
codes = [3, (200, 299)]
class Userpermissionexception(PermissionException):
codes = (300, 399)
class Feedactionlimit(UserPermissionException):
"""
When you posted too many times from one user acount
"""
codes = 341
class Duplicatestatusmessage(OpenFacebookException):
codes = 506
class Missingparameter(OpenFacebookException):
pass
class Aliasexception(OpenFacebookException):
"""
When you send a request to a non existant url facebook gives this error
instead of a 404....
"""
codes = 803 |
def add_native_methods(clazz):
def SCardEstablishContext__int__(a0):
raise NotImplementedError()
def SCardListReaders__long__(a0):
raise NotImplementedError()
def SCardConnect__long__java_lang_String__int__int__(a0, a1, a2, a3):
raise NotImplementedError()
def SCardTransmit__long__int__byte____int__int__(a0, a1, a2, a3, a4):
raise NotImplementedError()
def SCardStatus__long__byte____(a0, a1):
raise NotImplementedError()
def SCardDisconnect__long__int__(a0, a1):
raise NotImplementedError()
def SCardGetStatusChange__long__long__int____java_lang_String____(a0, a1, a2, a3):
raise NotImplementedError()
def SCardBeginTransaction__long__(a0):
raise NotImplementedError()
def SCardEndTransaction__long__int__(a0, a1):
raise NotImplementedError()
def SCardControl__long__int__byte____(a0, a1, a2):
raise NotImplementedError()
clazz.SCardEstablishContext__int__ = staticmethod(SCardEstablishContext__int__)
clazz.SCardListReaders__long__ = staticmethod(SCardListReaders__long__)
clazz.SCardConnect__long__java_lang_String__int__int__ = staticmethod(SCardConnect__long__java_lang_String__int__int__)
clazz.SCardTransmit__long__int__byte____int__int__ = staticmethod(SCardTransmit__long__int__byte____int__int__)
clazz.SCardStatus__long__byte____ = staticmethod(SCardStatus__long__byte____)
clazz.SCardDisconnect__long__int__ = staticmethod(SCardDisconnect__long__int__)
clazz.SCardGetStatusChange__long__long__int____java_lang_String____ = staticmethod(SCardGetStatusChange__long__long__int____java_lang_String____)
clazz.SCardBeginTransaction__long__ = staticmethod(SCardBeginTransaction__long__)
clazz.SCardEndTransaction__long__int__ = staticmethod(SCardEndTransaction__long__int__)
clazz.SCardControl__long__int__byte____ = staticmethod(SCardControl__long__int__byte____)
| def add_native_methods(clazz):
def s_card_establish_context__int__(a0):
raise not_implemented_error()
def s_card_list_readers__long__(a0):
raise not_implemented_error()
def s_card_connect__long__java_lang__string__int__int__(a0, a1, a2, a3):
raise not_implemented_error()
def s_card_transmit__long__int__byte____int__int__(a0, a1, a2, a3, a4):
raise not_implemented_error()
def s_card_status__long__byte____(a0, a1):
raise not_implemented_error()
def s_card_disconnect__long__int__(a0, a1):
raise not_implemented_error()
def s_card_get_status_change__long__long__int____java_lang__string____(a0, a1, a2, a3):
raise not_implemented_error()
def s_card_begin_transaction__long__(a0):
raise not_implemented_error()
def s_card_end_transaction__long__int__(a0, a1):
raise not_implemented_error()
def s_card_control__long__int__byte____(a0, a1, a2):
raise not_implemented_error()
clazz.SCardEstablishContext__int__ = staticmethod(SCardEstablishContext__int__)
clazz.SCardListReaders__long__ = staticmethod(SCardListReaders__long__)
clazz.SCardConnect__long__java_lang_String__int__int__ = staticmethod(SCardConnect__long__java_lang_String__int__int__)
clazz.SCardTransmit__long__int__byte____int__int__ = staticmethod(SCardTransmit__long__int__byte____int__int__)
clazz.SCardStatus__long__byte____ = staticmethod(SCardStatus__long__byte____)
clazz.SCardDisconnect__long__int__ = staticmethod(SCardDisconnect__long__int__)
clazz.SCardGetStatusChange__long__long__int____java_lang_String____ = staticmethod(SCardGetStatusChange__long__long__int____java_lang_String____)
clazz.SCardBeginTransaction__long__ = staticmethod(SCardBeginTransaction__long__)
clazz.SCardEndTransaction__long__int__ = staticmethod(SCardEndTransaction__long__int__)
clazz.SCardControl__long__int__byte____ = staticmethod(SCardControl__long__int__byte____) |
def check_ticket(char, string, integer):
if integer == 20:
print(f'ticket "{string}" - {integer/2:.0f}{char} Jackpot!')
return True
left_part = string[:10]
right_part = string[10:]
is_found = False
left_counter = 0
right_counter = 0
for el in left_part: # 2 "for" cycles to count the symbols in each parts
if el == char:
left_counter += 1
is_found = True
else:
if is_found:
break
is_found = False
for el in right_part:
if el == char:
right_counter += 1
is_found = True
else:
if is_found:
break
mina = min(left_counter, right_counter)
if left_counter in range(6, 11) and right_counter in range(6, 11):
print(f'ticket "{string}" - {mina}{char}')
return True
return False
tickets = [el.strip() for el in input().split(", ")]
symbol = ''
for el in tickets:
result = False
len_el = len(el)
if len_el == 20:
kliomba = el.count("@")
dollar = el.count("$")
ogradka = el.count("#")
kyshta = el.count("^")
digit = max(kliomba, dollar, ogradka, kyshta)
if kliomba in range(12, 21):
symbol = "@"
result = check_ticket(symbol, el, digit)
elif dollar in range(12, 21):
symbol = "$"
result = check_ticket(symbol, el, digit)
elif ogradka in range(12, 21):
symbol = "#"
result = check_ticket(symbol, el, digit)
elif kyshta in range(12, 21):
symbol = "^"
result = check_ticket(symbol, el, digit)
if not result:
print(f'ticket "{el}" - no match')
else:
print("invalid ticket") | def check_ticket(char, string, integer):
if integer == 20:
print(f'ticket "{string}" - {integer / 2:.0f}{char} Jackpot!')
return True
left_part = string[:10]
right_part = string[10:]
is_found = False
left_counter = 0
right_counter = 0
for el in left_part:
if el == char:
left_counter += 1
is_found = True
elif is_found:
break
is_found = False
for el in right_part:
if el == char:
right_counter += 1
is_found = True
elif is_found:
break
mina = min(left_counter, right_counter)
if left_counter in range(6, 11) and right_counter in range(6, 11):
print(f'ticket "{string}" - {mina}{char}')
return True
return False
tickets = [el.strip() for el in input().split(', ')]
symbol = ''
for el in tickets:
result = False
len_el = len(el)
if len_el == 20:
kliomba = el.count('@')
dollar = el.count('$')
ogradka = el.count('#')
kyshta = el.count('^')
digit = max(kliomba, dollar, ogradka, kyshta)
if kliomba in range(12, 21):
symbol = '@'
result = check_ticket(symbol, el, digit)
elif dollar in range(12, 21):
symbol = '$'
result = check_ticket(symbol, el, digit)
elif ogradka in range(12, 21):
symbol = '#'
result = check_ticket(symbol, el, digit)
elif kyshta in range(12, 21):
symbol = '^'
result = check_ticket(symbol, el, digit)
if not result:
print(f'ticket "{el}" - no match')
else:
print('invalid ticket') |
""" We need to divide our data set
Get a pivot point
recursively sort each half of the array """
def particion(arr, low, high):
i = (low-1)
pivot = arr[high]
for j in range(low, high):
if arr[j]<= pivot:
i = i+1
arr[i], arr[j] = arr[j], arr[i]
arr[i+1], arr[high] = arr[high], arr[i+1]
return (i+1)
def quickSort(arr, low, high):
if low < high:
pi = particion(arr, low, high)
quickSort(arr, low, pi-1)
quickSort(arr, pi+1, high)
arr = [1992, 1990, 10, 5, 6, 100, 0, 1, -10]
n = len(arr)
quickSort(arr, 0, n-1)
print("Ordered array:" )
for i in range (n):
print("%d" %arr[i])
| """ We need to divide our data set
Get a pivot point
recursively sort each half of the array """
def particion(arr, low, high):
i = low - 1
pivot = arr[high]
for j in range(low, high):
if arr[j] <= pivot:
i = i + 1
(arr[i], arr[j]) = (arr[j], arr[i])
(arr[i + 1], arr[high]) = (arr[high], arr[i + 1])
return i + 1
def quick_sort(arr, low, high):
if low < high:
pi = particion(arr, low, high)
quick_sort(arr, low, pi - 1)
quick_sort(arr, pi + 1, high)
arr = [1992, 1990, 10, 5, 6, 100, 0, 1, -10]
n = len(arr)
quick_sort(arr, 0, n - 1)
print('Ordered array:')
for i in range(n):
print('%d' % arr[i]) |
# Time: O(nlogn)
# Space: O(1)
# 1231 biweekly contest 11 10/19/2019
# You have one chocolate bar that consists of some chunks. Each chunk has its own sweetness given by the array sweetness.
#
# You want to share the chocolate with your K friends so you start cutting the chocolate bar into K+1 pieces using
# K cuts, each piece consists of some consecutive chunks.
#
# Being generous, you will eat the piece with the minimum total sweetness and give the other pieces to your friends.
# Find the maximum total sweetness of the piece you can get by cutting the chocolate bar optimally.
#
# Constraints:
# 0 <= K < sweetness.length <= 10^4
# 1 <= sweetness[i] <= 10^5
class Solution(object):
def maximizeSweetness(self, sweetness, K):
"""
:type sweetness: List[int]
:type K: int
:rtype: int
"""
def check(K, x):
curr, cuts = 0, 0
for s in sweetness:
curr += s
if curr >= x:
curr = 0
cuts += 1
if cuts >= K+1: return True
return False
l, r = min(sweetness), sum(sweetness) // (K+1)
while l < r:
m = (l+r+1) // 2
if check(K, m):
l = m
else:
r = m - 1
return l
# O(n^2)
def maximizeSweetness_dp(self, sweetness, K):
sz = len(sweetness)
sums = [0]
for s in sweetness:
sums.append(sums[-1]+s)
dp = [[[None] * (K+1) for _ in range(sz)] for _ in range(sz)]
def dfs(i, j, k):
if k == 0:
dp[i][j][k] = sums[j+1]-sums[i]
elif k == j-i:
dp[i][j][k] = min(sweetness[i:j+1])
else:
if dp[i][j][k] == None:
dp[i][j][k] = float('-inf')
for m in range(i, j-k+1):
dp[i][j][k] = max(dp[i][j][k],
min(dfs(i,m,0), dfs(m+1,j,k-1)))
return dp[i][j][k]
return dfs(0, sz-1, K)
print(Solution().maximizeSweetness([1,2,3,4,5,6,7,8,9], 5)) # 6
print(Solution().maximizeSweetness([5,6,7,8,9,1,2,3,4], 8)) # 1
print(Solution().maximizeSweetness([1,2,2,1,2,2,1,2,2], 2)) # 5
print(Solution().maximizeSweetness([1,2,3,4,5,6,7,8,9], 0)) # 45
print(Solution().maximizeSweetness([1,2,2,1,2,2,1,2,2], 0)) # 15 | class Solution(object):
def maximize_sweetness(self, sweetness, K):
"""
:type sweetness: List[int]
:type K: int
:rtype: int
"""
def check(K, x):
(curr, cuts) = (0, 0)
for s in sweetness:
curr += s
if curr >= x:
curr = 0
cuts += 1
if cuts >= K + 1:
return True
return False
(l, r) = (min(sweetness), sum(sweetness) // (K + 1))
while l < r:
m = (l + r + 1) // 2
if check(K, m):
l = m
else:
r = m - 1
return l
def maximize_sweetness_dp(self, sweetness, K):
sz = len(sweetness)
sums = [0]
for s in sweetness:
sums.append(sums[-1] + s)
dp = [[[None] * (K + 1) for _ in range(sz)] for _ in range(sz)]
def dfs(i, j, k):
if k == 0:
dp[i][j][k] = sums[j + 1] - sums[i]
elif k == j - i:
dp[i][j][k] = min(sweetness[i:j + 1])
elif dp[i][j][k] == None:
dp[i][j][k] = float('-inf')
for m in range(i, j - k + 1):
dp[i][j][k] = max(dp[i][j][k], min(dfs(i, m, 0), dfs(m + 1, j, k - 1)))
return dp[i][j][k]
return dfs(0, sz - 1, K)
print(solution().maximizeSweetness([1, 2, 3, 4, 5, 6, 7, 8, 9], 5))
print(solution().maximizeSweetness([5, 6, 7, 8, 9, 1, 2, 3, 4], 8))
print(solution().maximizeSweetness([1, 2, 2, 1, 2, 2, 1, 2, 2], 2))
print(solution().maximizeSweetness([1, 2, 3, 4, 5, 6, 7, 8, 9], 0))
print(solution().maximizeSweetness([1, 2, 2, 1, 2, 2, 1, 2, 2], 0)) |
class BaseModelConfig(object):
scheduler: dict
concurrency_limiter: dict
early_stopping: dict
score_function: dict
reporter: dict
tune_best_trial: dict
stopper: dict
tune_run: dict
optuna_search: dict = {}
# used for creating groups during train/val split in hp optimization
essential_properties: list
# default model setup
model_setup: dict
# transformer specs
transformer_specs: list
| class Basemodelconfig(object):
scheduler: dict
concurrency_limiter: dict
early_stopping: dict
score_function: dict
reporter: dict
tune_best_trial: dict
stopper: dict
tune_run: dict
optuna_search: dict = {}
essential_properties: list
model_setup: dict
transformer_specs: list |
""" Generated Loss CLI parsers """
def binary_crossentropyConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes the binary crossentropy loss.
Standalone usage:
>>> y_true = [[0, 1], [0, 0]]
>>> y_pred = [[0.6, 0.4], [0.4, 0.6]]
>>> loss = tf.keras.losses.binary_crossentropy(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> loss.numpy()
array([0.916 , 0.714], dtype=float32)
"""
argument_parser.add_argument(
"--y_true",
help="Ground truth values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
argument_parser.add_argument(
"--y_pred",
help="The predicted values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
argument_parser.add_argument(
"--from_logits",
type=bool,
help="""Whether `y_pred` is expected to be a logits tensor. By default, we assume that `y_pred` encodes a
probability distribution.""",
required=True,
default=False,
)
argument_parser.add_argument(
"--label_smoothing",
type=int,
help="Float in [0, 1]. If > `0` then smooth the labels.",
required=True,
default=0,
)
return (
argument_parser,
"```K.mean(K.binary_crossentropy(y_true, y_pred, from_logits=from_logits), axis=-1)```",
)
def categorical_crossentropyConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes the categorical crossentropy loss.
Standalone usage:
>>> y_true = [[0, 1, 0], [0, 0, 1]]
>>> y_pred = [[0.05, 0.95, 0], [0.1, 0.8, 0.1]]
>>> loss = tf.keras.losses.categorical_crossentropy(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> loss.numpy()
array([0.0513, 2.303], dtype=float32)
"""
argument_parser.add_argument(
"--y_true", help="Tensor of one-hot true targets.", required=True
)
argument_parser.add_argument(
"--y_pred", help="Tensor of predicted targets.", required=True
)
argument_parser.add_argument(
"--from_logits",
type=bool,
help="""Whether `y_pred` is expected to be a logits tensor. By default, we assume that `y_pred` encodes a
probability distribution.""",
required=True,
default=False,
)
argument_parser.add_argument(
"--label_smoothing",
type=int,
help="Float in [0, 1]. If > `0` then smooth the labels.",
required=True,
default=0,
)
return (
argument_parser,
"```K.categorical_crossentropy(y_true, y_pred, from_logits=from_logits)```",
)
def categorical_hingeConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes the categorical hinge loss between `y_true` and `y_pred`.
`loss = maximum(neg - pos + 1, 0)`
where `neg=maximum((1-y_true)*y_pred) and pos=sum(y_true*y_pred)`
Standalone usage:
>>> y_true = np.random.randint(0, 3, size=(2,))
>>> y_true = tf.keras.utils.to_categorical(y_true, num_classes=3)
>>> y_pred = np.random.random(size=(2, 3))
>>> loss = tf.keras.losses.categorical_hinge(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> pos = np.sum(y_true * y_pred, axis=-1)
>>> neg = np.amax((1. - y_true) * y_pred, axis=-1)
>>> assert np.array_equal(loss.numpy(), np.maximum(0., neg - pos + 1.))
"""
argument_parser.add_argument(
"--y_true",
help="The ground truth values. `y_true` values are expected to be 0 or 1.",
required=True,
)
argument_parser.add_argument(
"--y_pred", help="The predicted values.", required=True
)
return argument_parser, "```math_ops.maximum(neg - pos + 1.0, zero)```"
def cosine_similarityConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes the cosine similarity between labels and predictions.
Note that it is a number between -1 and 1. When it is a negative number
between -1 and 0, 0 indicates orthogonality and values closer to -1
indicate greater similarity. The values closer to 1 indicate greater
dissimilarity. This makes it usable as a loss function in a setting
where you try to maximize the proximity between predictions and
targets. If either `y_true` or `y_pred` is a zero vector, cosine
similarity will be 0 regardless of the proximity between predictions
and targets.
`loss = -sum(l2_norm(y_true) * l2_norm(y_pred))`
Standalone usage:
>>> y_true = [[0., 1.], [1., 1.], [1., 1.]]
>>> y_pred = [[1., 0.], [1., 1.], [-1., -1.]]
>>> loss = tf.keras.losses.cosine_similarity(y_true, y_pred, axis=1)
>>> loss.numpy()
array([-0., -0.999, 0.999], dtype=float32)
"""
argument_parser.add_argument(
"--y_true", help="Tensor of true targets.", required=True
)
argument_parser.add_argument(
"--y_pred", help="Tensor of predicted targets.", required=True
)
argument_parser.add_argument(
"--axis",
type=int,
help="Axis along which to determine similarity.",
required=True,
default=-1,
)
return (argument_parser, "```(-math_ops.reduce_sum(y_true * y_pred, axis=axis))```")
def hingeConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes the hinge loss between `y_true` and `y_pred`.
`loss = mean(maximum(1 - y_true * y_pred, 0), axis=-1)`
Standalone usage:
>>> y_true = np.random.choice([-1, 1], size=(2, 3))
>>> y_pred = np.random.random(size=(2, 3))
>>> loss = tf.keras.losses.hinge(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> assert np.array_equal(
... loss.numpy(),
... np.mean(np.maximum(1. - y_true * y_pred, 0.), axis=-1))
"""
argument_parser.add_argument(
"--y_true",
help="""The ground truth values. `y_true` values are expected to be -1 or 1. If binary (0 or 1) labels are
provided they will be converted to -1 or 1. shape = `[batch_size, d0, .. dN]`.""",
required=True,
)
argument_parser.add_argument(
"--y_pred",
help="The predicted values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
return (
argument_parser,
"```K.mean(math_ops.maximum(1.0 - y_true * y_pred, 0.0), axis=-1)```",
)
def huberConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes Huber loss value.
For each value x in `error = y_true - y_pred`:
```
loss = 0.5 * x^2 if |x| <= d
loss = 0.5 * d^2 + d * (|x| - d) if |x| > d
```
where d is `delta`. See: https://en.wikipedia.org/wiki/Huber_loss
"""
argument_parser.add_argument(
"--y_true", help="tensor of true targets.", required=True
)
argument_parser.add_argument(
"--y_pred", help="tensor of predicted targets.", required=True
)
argument_parser.add_argument(
"--delta",
type=float,
help="A float, the point where the Huber loss function changes from a quadratic to linear.",
required=True,
default=1.0,
)
return (
argument_parser,
"""```K.mean(array_ops.where_v2(abs_error <= delta, half * math_ops.pow(error, 2),
half * math_ops.pow(delta, 2) + delta * (abs_error - delta)), axis=-1)```""",
)
def kldConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes Kullback-Leibler divergence loss between `y_true` and `y_pred`.
`loss = y_true * log(y_true / y_pred)`
See: https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence
Standalone usage:
>>> y_true = np.random.randint(0, 2, size=(2, 3)).astype(np.float64)
>>> y_pred = np.random.random(size=(2, 3))
>>> loss = tf.keras.losses.kullback_leibler_divergence(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> y_true = tf.keras.backend.clip(y_true, 1e-7, 1)
>>> y_pred = tf.keras.backend.clip(y_pred, 1e-7, 1)
>>> assert np.array_equal(
... loss.numpy(), np.sum(y_true * np.log(y_true / y_pred), axis=-1))
"""
argument_parser.add_argument(
"--y_true", help="Tensor of true targets.", required=True
)
argument_parser.add_argument(
"--y_pred", help="Tensor of predicted targets.", required=True
)
return (
argument_parser,
"```math_ops.reduce_sum(y_true * math_ops.log(y_true / y_pred), axis=-1)```",
)
def kl_divergenceConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes Kullback-Leibler divergence loss between `y_true` and `y_pred`.
`loss = y_true * log(y_true / y_pred)`
See: https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence
Standalone usage:
>>> y_true = np.random.randint(0, 2, size=(2, 3)).astype(np.float64)
>>> y_pred = np.random.random(size=(2, 3))
>>> loss = tf.keras.losses.kullback_leibler_divergence(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> y_true = tf.keras.backend.clip(y_true, 1e-7, 1)
>>> y_pred = tf.keras.backend.clip(y_pred, 1e-7, 1)
>>> assert np.array_equal(
... loss.numpy(), np.sum(y_true * np.log(y_true / y_pred), axis=-1))
"""
argument_parser.add_argument(
"--y_true", help="Tensor of true targets.", required=True
)
argument_parser.add_argument(
"--y_pred", help="Tensor of predicted targets.", required=True
)
return (
argument_parser,
"```math_ops.reduce_sum(y_true * math_ops.log(y_true / y_pred), axis=-1)```",
)
def kullback_leibler_divergenceConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes Kullback-Leibler divergence loss between `y_true` and `y_pred`.
`loss = y_true * log(y_true / y_pred)`
See: https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence
Standalone usage:
>>> y_true = np.random.randint(0, 2, size=(2, 3)).astype(np.float64)
>>> y_pred = np.random.random(size=(2, 3))
>>> loss = tf.keras.losses.kullback_leibler_divergence(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> y_true = tf.keras.backend.clip(y_true, 1e-7, 1)
>>> y_pred = tf.keras.backend.clip(y_pred, 1e-7, 1)
>>> assert np.array_equal(
... loss.numpy(), np.sum(y_true * np.log(y_true / y_pred), axis=-1))
"""
argument_parser.add_argument(
"--y_true", help="Tensor of true targets.", required=True
)
argument_parser.add_argument(
"--y_pred", help="Tensor of predicted targets.", required=True
)
return (
argument_parser,
"```math_ops.reduce_sum(y_true * math_ops.log(y_true / y_pred), axis=-1)```",
)
def logcoshConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Logarithm of the hyperbolic cosine of the prediction error.
`log(cosh(x))` is approximately equal to `(x ** 2) / 2` for small `x` and
to `abs(x) - log(2)` for large `x`. This means that 'logcosh' works mostly
like the mean squared error, but will not be so strongly affected by the
occasional wildly incorrect prediction.
Standalone usage:
>>> y_true = np.random.random(size=(2, 3))
>>> y_pred = np.random.random(size=(2, 3))
>>> loss = tf.keras.losses.logcosh(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> x = y_pred - y_true
>>> assert np.allclose(
... loss.numpy(),
... np.mean(x + np.log(np.exp(-2. * x) + 1.) - math_ops.log(2.), axis=-1),
... atol=1e-5)
"""
argument_parser.add_argument(
"--y_true",
help="Ground truth values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
argument_parser.add_argument(
"--y_pred",
help="The predicted values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
return argument_parser, "```K.mean(_logcosh(y_pred - y_true), axis=-1)```"
def maeConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes the mean absolute error between labels and predictions.
`loss = mean(abs(y_true - y_pred), axis=-1)`
Standalone usage:
>>> y_true = np.random.randint(0, 2, size=(2, 3))
>>> y_pred = np.random.random(size=(2, 3))
>>> loss = tf.keras.losses.mean_absolute_error(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> assert np.array_equal(
... loss.numpy(), np.mean(np.abs(y_true - y_pred), axis=-1))
"""
argument_parser.add_argument(
"--y_true",
help="Ground truth values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
argument_parser.add_argument(
"--y_pred",
help="The predicted values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
return (argument_parser, "```K.mean(math_ops.abs(y_pred - y_true), axis=-1)```")
def mapeConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes the mean absolute percentage error between `y_true` and `y_pred`.
`loss = 100 * mean(abs((y_true - y_pred) / y_true), axis=-1)`
Standalone usage:
>>> y_true = np.random.random(size=(2, 3))
>>> y_true = np.maximum(y_true, 1e-7) # Prevent division by zero
>>> y_pred = np.random.random(size=(2, 3))
>>> loss = tf.keras.losses.mean_absolute_percentage_error(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> assert np.array_equal(
... loss.numpy(),
... 100. * np.mean(np.abs((y_true - y_pred) / y_true), axis=-1))
"""
argument_parser.add_argument(
"--y_true",
help="Ground truth values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
argument_parser.add_argument(
"--y_pred",
help="The predicted values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
return argument_parser, "```(100.0 * K.mean(diff, axis=-1))```"
def mean_absolute_errorConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes the mean absolute error between labels and predictions.
`loss = mean(abs(y_true - y_pred), axis=-1)`
Standalone usage:
>>> y_true = np.random.randint(0, 2, size=(2, 3))
>>> y_pred = np.random.random(size=(2, 3))
>>> loss = tf.keras.losses.mean_absolute_error(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> assert np.array_equal(
... loss.numpy(), np.mean(np.abs(y_true - y_pred), axis=-1))
"""
argument_parser.add_argument(
"--y_true",
help="Ground truth values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
argument_parser.add_argument(
"--y_pred",
help="The predicted values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
return (argument_parser, "```K.mean(math_ops.abs(y_pred - y_true), axis=-1)```")
def mean_absolute_percentage_errorConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes the mean absolute percentage error between `y_true` and `y_pred`.
`loss = 100 * mean(abs((y_true - y_pred) / y_true), axis=-1)`
Standalone usage:
>>> y_true = np.random.random(size=(2, 3))
>>> y_true = np.maximum(y_true, 1e-7) # Prevent division by zero
>>> y_pred = np.random.random(size=(2, 3))
>>> loss = tf.keras.losses.mean_absolute_percentage_error(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> assert np.array_equal(
... loss.numpy(),
... 100. * np.mean(np.abs((y_true - y_pred) / y_true), axis=-1))
"""
argument_parser.add_argument(
"--y_true",
help="Ground truth values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
argument_parser.add_argument(
"--y_pred",
help="The predicted values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
return argument_parser, "```(100.0 * K.mean(diff, axis=-1))```"
def mean_squared_errorConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes the mean squared error between labels and predictions.
After computing the squared distance between the inputs, the mean value over
the last dimension is returned.
`loss = mean(square(y_true - y_pred), axis=-1)`
Standalone usage:
>>> y_true = np.random.randint(0, 2, size=(2, 3))
>>> y_pred = np.random.random(size=(2, 3))
>>> loss = tf.keras.losses.mean_squared_error(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> assert np.array_equal(
... loss.numpy(), np.mean(np.square(y_true - y_pred), axis=-1))
"""
argument_parser.add_argument(
"--y_true",
help="Ground truth values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
argument_parser.add_argument(
"--y_pred",
help="The predicted values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
return (
argument_parser,
"```K.mean(math_ops.squared_difference(y_pred, y_true), axis=-1)```",
)
def mean_squared_logarithmic_errorConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes the mean squared logarithmic error between `y_true` and `y_pred`.
`loss = mean(square(log(y_true + 1) - log(y_pred + 1)), axis=-1)`
Standalone usage:
>>> y_true = np.random.randint(0, 2, size=(2, 3))
>>> y_pred = np.random.random(size=(2, 3))
>>> loss = tf.keras.losses.mean_squared_logarithmic_error(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> y_true = np.maximum(y_true, 1e-7)
>>> y_pred = np.maximum(y_pred, 1e-7)
>>> assert np.allclose(
... loss.numpy(),
... np.mean(
... np.square(np.log(y_true + 1.) - np.log(y_pred + 1.)), axis=-1))
"""
argument_parser.add_argument(
"--y_true",
help="Ground truth values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
argument_parser.add_argument(
"--y_pred",
help="The predicted values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
return (
argument_parser,
"```K.mean(math_ops.squared_difference(first_log, second_log), axis=-1)```",
)
def mseConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes the mean squared error between labels and predictions.
After computing the squared distance between the inputs, the mean value over
the last dimension is returned.
`loss = mean(square(y_true - y_pred), axis=-1)`
Standalone usage:
>>> y_true = np.random.randint(0, 2, size=(2, 3))
>>> y_pred = np.random.random(size=(2, 3))
>>> loss = tf.keras.losses.mean_squared_error(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> assert np.array_equal(
... loss.numpy(), np.mean(np.square(y_true - y_pred), axis=-1))
"""
argument_parser.add_argument(
"--y_true",
help="Ground truth values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
argument_parser.add_argument(
"--y_pred",
help="The predicted values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
return (
argument_parser,
"```K.mean(math_ops.squared_difference(y_pred, y_true), axis=-1)```",
)
def msleConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes the mean squared logarithmic error between `y_true` and `y_pred`.
`loss = mean(square(log(y_true + 1) - log(y_pred + 1)), axis=-1)`
Standalone usage:
>>> y_true = np.random.randint(0, 2, size=(2, 3))
>>> y_pred = np.random.random(size=(2, 3))
>>> loss = tf.keras.losses.mean_squared_logarithmic_error(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> y_true = np.maximum(y_true, 1e-7)
>>> y_pred = np.maximum(y_pred, 1e-7)
>>> assert np.allclose(
... loss.numpy(),
... np.mean(
... np.square(np.log(y_true + 1.) - np.log(y_pred + 1.)), axis=-1))
"""
argument_parser.add_argument(
"--y_true",
help="Ground truth values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
argument_parser.add_argument(
"--y_pred",
help="The predicted values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
return (
argument_parser,
"```K.mean(math_ops.squared_difference(first_log, second_log), axis=-1)```",
)
def poissonConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes the Poisson loss between y_true and y_pred.
The Poisson loss is the mean of the elements of the `Tensor`
`y_pred - y_true * log(y_pred)`.
Standalone usage:
>>> y_true = np.random.randint(0, 2, size=(2, 3))
>>> y_pred = np.random.random(size=(2, 3))
>>> loss = tf.keras.losses.poisson(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> y_pred = y_pred + 1e-7
>>> assert np.allclose(
... loss.numpy(), np.mean(y_pred - y_true * np.log(y_pred), axis=-1),
... atol=1e-5)
"""
argument_parser.add_argument(
"--y_true",
help="Ground truth values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
argument_parser.add_argument(
"--y_pred",
help="The predicted values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
return (
argument_parser,
"```K.mean(y_pred - y_true * math_ops.log(y_pred + K.epsilon()), axis=-1)```",
)
def ReductionConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Types of loss reduction.
Contains the following values:
* `AUTO`: Indicates that the reduction option will be determined by the usage
context. For almost all cases this defaults to `SUM_OVER_BATCH_SIZE`. When
used with `tf.distribute.Strategy`, outside of built-in training loops such
as `tf.keras` `compile` and `fit`, we expect reduction value to be
`SUM` or `NONE`. Using `AUTO` in that case will raise an error.
* `NONE`: Weighted losses with one dimension reduced (axis=-1, or axis
specified by loss function). When this reduction type used with built-in
Keras training loops like `fit`/`evaluate`, the unreduced vector loss is
passed to the optimizer but the reported loss will be a scalar value.
* `SUM`: Scalar sum of weighted losses.
* `SUM_OVER_BATCH_SIZE`: Scalar `SUM` divided by number of elements in losses.
This reduction type is not supported when used with
`tf.distribute.Strategy` outside of built-in training loops like `tf.keras`
`compile`/`fit`.
You can implement 'SUM_OVER_BATCH_SIZE' using global batch size like:
```
with strategy.scope():
loss_obj = tf.keras.losses.CategoricalCrossentropy(
reduction=tf.keras.losses.Reduction.NONE)
....
loss = tf.reduce_sum(loss_obj(labels, predictions)) *
(1. / global_batch_size)
```
Please see the
[custom training guide](https://www.tensorflow.org/tutorials/distribute/custom_training) # pylint: disable=line-too-long
for more details on this."""
argument_parser.add_argument("--AUTO", required=True, default="auto")
argument_parser.add_argument("--NONE", required=True, default="none")
argument_parser.add_argument("--SUM", required=True, default="sum")
argument_parser.add_argument(
"--SUM_OVER_BATCH_SIZE", required=True, default="sum_over_batch_size"
)
return argument_parser
def sparse_categorical_crossentropyConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes the sparse categorical crossentropy loss.
Standalone usage:
>>> y_true = [1, 2]
>>> y_pred = [[0.05, 0.95, 0], [0.1, 0.8, 0.1]]
>>> loss = tf.keras.losses.sparse_categorical_crossentropy(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> loss.numpy()
array([0.0513, 2.303], dtype=float32)
"""
argument_parser.add_argument("--y_true", help="Ground truth values.", required=True)
argument_parser.add_argument(
"--y_pred", help="The predicted values.", required=True
)
argument_parser.add_argument(
"--from_logits",
type=bool,
help="""Whether `y_pred` is expected to be a logits tensor. By default, we assume that `y_pred` encodes a
probability distribution.""",
required=True,
default=False,
)
argument_parser.add_argument(
"--axis",
type=int,
help="(Optional) Defaults to -1. The dimension along which the entropy is computed.",
default=-1,
)
return argument_parser, "```None```"
def squared_hingeConfig(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = """Computes the squared hinge loss between `y_true` and `y_pred`.
`loss = mean(square(maximum(1 - y_true * y_pred, 0)), axis=-1)`
Standalone usage:
>>> y_true = np.random.choice([-1, 1], size=(2, 3))
>>> y_pred = np.random.random(size=(2, 3))
>>> loss = tf.keras.losses.squared_hinge(y_true, y_pred)
>>> assert loss.shape == (2,)
>>> assert np.array_equal(
... loss.numpy(),
... np.mean(np.square(np.maximum(1. - y_true * y_pred, 0.)), axis=-1))
"""
argument_parser.add_argument(
"--y_true",
help="""The ground truth values. `y_true` values are expected to be -1 or 1. If binary (0 or 1) labels are
provided we will convert them to -1 or 1. shape = `[batch_size, d0, .. dN]`.""",
required=True,
)
argument_parser.add_argument(
"--y_pred",
help="The predicted values. shape = `[batch_size, d0, .. dN]`.",
required=True,
)
return (
argument_parser,
"```K.mean(math_ops.square(math_ops.maximum(1.0 - y_true * y_pred, 0.0)), axis=-1)```",
)
__all__ = [
"binary_crossentropyConfig",
"categorical_crossentropyConfig",
"categorical_hingeConfig",
"cosine_similarityConfig",
"hingeConfig",
"huberConfig",
"kldConfig",
"kl_divergenceConfig",
"kullback_leibler_divergenceConfig",
"logcoshConfig",
"maeConfig",
"mapeConfig",
"mean_absolute_errorConfig",
"mean_absolute_percentage_errorConfig",
"mean_squared_errorConfig",
"mean_squared_logarithmic_errorConfig",
"mseConfig",
"msleConfig",
"poissonConfig",
"ReductionConfig",
"sparse_categorical_crossentropyConfig",
"squared_hingeConfig",
]
| """ Generated Loss CLI parsers """
def binary_crossentropy_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes the binary crossentropy loss.\n\nStandalone usage:\n\n>>> y_true = [[0, 1], [0, 0]]\n>>> y_pred = [[0.6, 0.4], [0.4, 0.6]]\n>>> loss = tf.keras.losses.binary_crossentropy(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> loss.numpy()\narray([0.916 , 0.714], dtype=float32)\n'
argument_parser.add_argument('--y_true', help='Ground truth values. shape = `[batch_size, d0, .. dN]`.', required=True)
argument_parser.add_argument('--y_pred', help='The predicted values. shape = `[batch_size, d0, .. dN]`.', required=True)
argument_parser.add_argument('--from_logits', type=bool, help='Whether `y_pred` is expected to be a logits tensor. By default, we assume that `y_pred` encodes a\nprobability distribution.', required=True, default=False)
argument_parser.add_argument('--label_smoothing', type=int, help='Float in [0, 1]. If > `0` then smooth the labels.', required=True, default=0)
return (argument_parser, '```K.mean(K.binary_crossentropy(y_true, y_pred, from_logits=from_logits), axis=-1)```')
def categorical_crossentropy_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes the categorical crossentropy loss.\n\nStandalone usage:\n\n>>> y_true = [[0, 1, 0], [0, 0, 1]]\n>>> y_pred = [[0.05, 0.95, 0], [0.1, 0.8, 0.1]]\n>>> loss = tf.keras.losses.categorical_crossentropy(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> loss.numpy()\narray([0.0513, 2.303], dtype=float32)\n'
argument_parser.add_argument('--y_true', help='Tensor of one-hot true targets.', required=True)
argument_parser.add_argument('--y_pred', help='Tensor of predicted targets.', required=True)
argument_parser.add_argument('--from_logits', type=bool, help='Whether `y_pred` is expected to be a logits tensor. By default, we assume that `y_pred` encodes a\nprobability distribution.', required=True, default=False)
argument_parser.add_argument('--label_smoothing', type=int, help='Float in [0, 1]. If > `0` then smooth the labels.', required=True, default=0)
return (argument_parser, '```K.categorical_crossentropy(y_true, y_pred, from_logits=from_logits)```')
def categorical_hinge_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes the categorical hinge loss between `y_true` and `y_pred`.\n\n`loss = maximum(neg - pos + 1, 0)`\nwhere `neg=maximum((1-y_true)*y_pred) and pos=sum(y_true*y_pred)`\n\nStandalone usage:\n\n>>> y_true = np.random.randint(0, 3, size=(2,))\n>>> y_true = tf.keras.utils.to_categorical(y_true, num_classes=3)\n>>> y_pred = np.random.random(size=(2, 3))\n>>> loss = tf.keras.losses.categorical_hinge(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> pos = np.sum(y_true * y_pred, axis=-1)\n>>> neg = np.amax((1. - y_true) * y_pred, axis=-1)\n>>> assert np.array_equal(loss.numpy(), np.maximum(0., neg - pos + 1.))\n'
argument_parser.add_argument('--y_true', help='The ground truth values. `y_true` values are expected to be 0 or 1.', required=True)
argument_parser.add_argument('--y_pred', help='The predicted values.', required=True)
return (argument_parser, '```math_ops.maximum(neg - pos + 1.0, zero)```')
def cosine_similarity_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes the cosine similarity between labels and predictions.\n\nNote that it is a number between -1 and 1. When it is a negative number\nbetween -1 and 0, 0 indicates orthogonality and values closer to -1\nindicate greater similarity. The values closer to 1 indicate greater\ndissimilarity. This makes it usable as a loss function in a setting\nwhere you try to maximize the proximity between predictions and\ntargets. If either `y_true` or `y_pred` is a zero vector, cosine\nsimilarity will be 0 regardless of the proximity between predictions\nand targets.\n\n`loss = -sum(l2_norm(y_true) * l2_norm(y_pred))`\n\nStandalone usage:\n\n>>> y_true = [[0., 1.], [1., 1.], [1., 1.]]\n>>> y_pred = [[1., 0.], [1., 1.], [-1., -1.]]\n>>> loss = tf.keras.losses.cosine_similarity(y_true, y_pred, axis=1)\n>>> loss.numpy()\narray([-0., -0.999, 0.999], dtype=float32)\n'
argument_parser.add_argument('--y_true', help='Tensor of true targets.', required=True)
argument_parser.add_argument('--y_pred', help='Tensor of predicted targets.', required=True)
argument_parser.add_argument('--axis', type=int, help='Axis along which to determine similarity.', required=True, default=-1)
return (argument_parser, '```(-math_ops.reduce_sum(y_true * y_pred, axis=axis))```')
def hinge_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes the hinge loss between `y_true` and `y_pred`.\n\n`loss = mean(maximum(1 - y_true * y_pred, 0), axis=-1)`\n\nStandalone usage:\n\n>>> y_true = np.random.choice([-1, 1], size=(2, 3))\n>>> y_pred = np.random.random(size=(2, 3))\n>>> loss = tf.keras.losses.hinge(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> assert np.array_equal(\n... loss.numpy(),\n... np.mean(np.maximum(1. - y_true * y_pred, 0.), axis=-1))\n'
argument_parser.add_argument('--y_true', help='The ground truth values. `y_true` values are expected to be -1 or 1. If binary (0 or 1) labels are\nprovided they will be converted to -1 or 1. shape = `[batch_size, d0, .. dN]`.', required=True)
argument_parser.add_argument('--y_pred', help='The predicted values. shape = `[batch_size, d0, .. dN]`.', required=True)
return (argument_parser, '```K.mean(math_ops.maximum(1.0 - y_true * y_pred, 0.0), axis=-1)```')
def huber_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes Huber loss value.\n\nFor each value x in `error = y_true - y_pred`:\n\n```\nloss = 0.5 * x^2 if |x| <= d\nloss = 0.5 * d^2 + d * (|x| - d) if |x| > d\n```\nwhere d is `delta`. See: https://en.wikipedia.org/wiki/Huber_loss\n'
argument_parser.add_argument('--y_true', help='tensor of true targets.', required=True)
argument_parser.add_argument('--y_pred', help='tensor of predicted targets.', required=True)
argument_parser.add_argument('--delta', type=float, help='A float, the point where the Huber loss function changes from a quadratic to linear.', required=True, default=1.0)
return (argument_parser, '```K.mean(array_ops.where_v2(abs_error <= delta, half * math_ops.pow(error, 2),\n half * math_ops.pow(delta, 2) + delta * (abs_error - delta)), axis=-1)```')
def kld_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes Kullback-Leibler divergence loss between `y_true` and `y_pred`.\n\n`loss = y_true * log(y_true / y_pred)`\n\nSee: https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence\n\nStandalone usage:\n\n>>> y_true = np.random.randint(0, 2, size=(2, 3)).astype(np.float64)\n>>> y_pred = np.random.random(size=(2, 3))\n>>> loss = tf.keras.losses.kullback_leibler_divergence(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> y_true = tf.keras.backend.clip(y_true, 1e-7, 1)\n>>> y_pred = tf.keras.backend.clip(y_pred, 1e-7, 1)\n>>> assert np.array_equal(\n... loss.numpy(), np.sum(y_true * np.log(y_true / y_pred), axis=-1))\n'
argument_parser.add_argument('--y_true', help='Tensor of true targets.', required=True)
argument_parser.add_argument('--y_pred', help='Tensor of predicted targets.', required=True)
return (argument_parser, '```math_ops.reduce_sum(y_true * math_ops.log(y_true / y_pred), axis=-1)```')
def kl_divergence_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes Kullback-Leibler divergence loss between `y_true` and `y_pred`.\n\n`loss = y_true * log(y_true / y_pred)`\n\nSee: https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence\n\nStandalone usage:\n\n>>> y_true = np.random.randint(0, 2, size=(2, 3)).astype(np.float64)\n>>> y_pred = np.random.random(size=(2, 3))\n>>> loss = tf.keras.losses.kullback_leibler_divergence(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> y_true = tf.keras.backend.clip(y_true, 1e-7, 1)\n>>> y_pred = tf.keras.backend.clip(y_pred, 1e-7, 1)\n>>> assert np.array_equal(\n... loss.numpy(), np.sum(y_true * np.log(y_true / y_pred), axis=-1))\n'
argument_parser.add_argument('--y_true', help='Tensor of true targets.', required=True)
argument_parser.add_argument('--y_pred', help='Tensor of predicted targets.', required=True)
return (argument_parser, '```math_ops.reduce_sum(y_true * math_ops.log(y_true / y_pred), axis=-1)```')
def kullback_leibler_divergence_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes Kullback-Leibler divergence loss between `y_true` and `y_pred`.\n\n`loss = y_true * log(y_true / y_pred)`\n\nSee: https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence\n\nStandalone usage:\n\n>>> y_true = np.random.randint(0, 2, size=(2, 3)).astype(np.float64)\n>>> y_pred = np.random.random(size=(2, 3))\n>>> loss = tf.keras.losses.kullback_leibler_divergence(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> y_true = tf.keras.backend.clip(y_true, 1e-7, 1)\n>>> y_pred = tf.keras.backend.clip(y_pred, 1e-7, 1)\n>>> assert np.array_equal(\n... loss.numpy(), np.sum(y_true * np.log(y_true / y_pred), axis=-1))\n'
argument_parser.add_argument('--y_true', help='Tensor of true targets.', required=True)
argument_parser.add_argument('--y_pred', help='Tensor of predicted targets.', required=True)
return (argument_parser, '```math_ops.reduce_sum(y_true * math_ops.log(y_true / y_pred), axis=-1)```')
def logcosh_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = "Logarithm of the hyperbolic cosine of the prediction error.\n\n`log(cosh(x))` is approximately equal to `(x ** 2) / 2` for small `x` and\nto `abs(x) - log(2)` for large `x`. This means that 'logcosh' works mostly\nlike the mean squared error, but will not be so strongly affected by the\noccasional wildly incorrect prediction.\n\nStandalone usage:\n\n>>> y_true = np.random.random(size=(2, 3))\n>>> y_pred = np.random.random(size=(2, 3))\n>>> loss = tf.keras.losses.logcosh(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> x = y_pred - y_true\n>>> assert np.allclose(\n... loss.numpy(),\n... np.mean(x + np.log(np.exp(-2. * x) + 1.) - math_ops.log(2.), axis=-1),\n... atol=1e-5)\n"
argument_parser.add_argument('--y_true', help='Ground truth values. shape = `[batch_size, d0, .. dN]`.', required=True)
argument_parser.add_argument('--y_pred', help='The predicted values. shape = `[batch_size, d0, .. dN]`.', required=True)
return (argument_parser, '```K.mean(_logcosh(y_pred - y_true), axis=-1)```')
def mae_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes the mean absolute error between labels and predictions.\n\n`loss = mean(abs(y_true - y_pred), axis=-1)`\n\nStandalone usage:\n\n>>> y_true = np.random.randint(0, 2, size=(2, 3))\n>>> y_pred = np.random.random(size=(2, 3))\n>>> loss = tf.keras.losses.mean_absolute_error(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> assert np.array_equal(\n... loss.numpy(), np.mean(np.abs(y_true - y_pred), axis=-1))\n'
argument_parser.add_argument('--y_true', help='Ground truth values. shape = `[batch_size, d0, .. dN]`.', required=True)
argument_parser.add_argument('--y_pred', help='The predicted values. shape = `[batch_size, d0, .. dN]`.', required=True)
return (argument_parser, '```K.mean(math_ops.abs(y_pred - y_true), axis=-1)```')
def mape_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes the mean absolute percentage error between `y_true` and `y_pred`.\n\n`loss = 100 * mean(abs((y_true - y_pred) / y_true), axis=-1)`\n\nStandalone usage:\n\n>>> y_true = np.random.random(size=(2, 3))\n>>> y_true = np.maximum(y_true, 1e-7) # Prevent division by zero\n>>> y_pred = np.random.random(size=(2, 3))\n>>> loss = tf.keras.losses.mean_absolute_percentage_error(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> assert np.array_equal(\n... loss.numpy(),\n... 100. * np.mean(np.abs((y_true - y_pred) / y_true), axis=-1))\n'
argument_parser.add_argument('--y_true', help='Ground truth values. shape = `[batch_size, d0, .. dN]`.', required=True)
argument_parser.add_argument('--y_pred', help='The predicted values. shape = `[batch_size, d0, .. dN]`.', required=True)
return (argument_parser, '```(100.0 * K.mean(diff, axis=-1))```')
def mean_absolute_error_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes the mean absolute error between labels and predictions.\n\n`loss = mean(abs(y_true - y_pred), axis=-1)`\n\nStandalone usage:\n\n>>> y_true = np.random.randint(0, 2, size=(2, 3))\n>>> y_pred = np.random.random(size=(2, 3))\n>>> loss = tf.keras.losses.mean_absolute_error(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> assert np.array_equal(\n... loss.numpy(), np.mean(np.abs(y_true - y_pred), axis=-1))\n'
argument_parser.add_argument('--y_true', help='Ground truth values. shape = `[batch_size, d0, .. dN]`.', required=True)
argument_parser.add_argument('--y_pred', help='The predicted values. shape = `[batch_size, d0, .. dN]`.', required=True)
return (argument_parser, '```K.mean(math_ops.abs(y_pred - y_true), axis=-1)```')
def mean_absolute_percentage_error_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes the mean absolute percentage error between `y_true` and `y_pred`.\n\n`loss = 100 * mean(abs((y_true - y_pred) / y_true), axis=-1)`\n\nStandalone usage:\n\n>>> y_true = np.random.random(size=(2, 3))\n>>> y_true = np.maximum(y_true, 1e-7) # Prevent division by zero\n>>> y_pred = np.random.random(size=(2, 3))\n>>> loss = tf.keras.losses.mean_absolute_percentage_error(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> assert np.array_equal(\n... loss.numpy(),\n... 100. * np.mean(np.abs((y_true - y_pred) / y_true), axis=-1))\n'
argument_parser.add_argument('--y_true', help='Ground truth values. shape = `[batch_size, d0, .. dN]`.', required=True)
argument_parser.add_argument('--y_pred', help='The predicted values. shape = `[batch_size, d0, .. dN]`.', required=True)
return (argument_parser, '```(100.0 * K.mean(diff, axis=-1))```')
def mean_squared_error_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes the mean squared error between labels and predictions.\n\nAfter computing the squared distance between the inputs, the mean value over\nthe last dimension is returned.\n\n`loss = mean(square(y_true - y_pred), axis=-1)`\n\nStandalone usage:\n\n>>> y_true = np.random.randint(0, 2, size=(2, 3))\n>>> y_pred = np.random.random(size=(2, 3))\n>>> loss = tf.keras.losses.mean_squared_error(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> assert np.array_equal(\n... loss.numpy(), np.mean(np.square(y_true - y_pred), axis=-1))\n'
argument_parser.add_argument('--y_true', help='Ground truth values. shape = `[batch_size, d0, .. dN]`.', required=True)
argument_parser.add_argument('--y_pred', help='The predicted values. shape = `[batch_size, d0, .. dN]`.', required=True)
return (argument_parser, '```K.mean(math_ops.squared_difference(y_pred, y_true), axis=-1)```')
def mean_squared_logarithmic_error_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes the mean squared logarithmic error between `y_true` and `y_pred`.\n\n`loss = mean(square(log(y_true + 1) - log(y_pred + 1)), axis=-1)`\n\nStandalone usage:\n\n>>> y_true = np.random.randint(0, 2, size=(2, 3))\n>>> y_pred = np.random.random(size=(2, 3))\n>>> loss = tf.keras.losses.mean_squared_logarithmic_error(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> y_true = np.maximum(y_true, 1e-7)\n>>> y_pred = np.maximum(y_pred, 1e-7)\n>>> assert np.allclose(\n... loss.numpy(),\n... np.mean(\n... np.square(np.log(y_true + 1.) - np.log(y_pred + 1.)), axis=-1))\n'
argument_parser.add_argument('--y_true', help='Ground truth values. shape = `[batch_size, d0, .. dN]`.', required=True)
argument_parser.add_argument('--y_pred', help='The predicted values. shape = `[batch_size, d0, .. dN]`.', required=True)
return (argument_parser, '```K.mean(math_ops.squared_difference(first_log, second_log), axis=-1)```')
def mse_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes the mean squared error between labels and predictions.\n\nAfter computing the squared distance between the inputs, the mean value over\nthe last dimension is returned.\n\n`loss = mean(square(y_true - y_pred), axis=-1)`\n\nStandalone usage:\n\n>>> y_true = np.random.randint(0, 2, size=(2, 3))\n>>> y_pred = np.random.random(size=(2, 3))\n>>> loss = tf.keras.losses.mean_squared_error(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> assert np.array_equal(\n... loss.numpy(), np.mean(np.square(y_true - y_pred), axis=-1))\n'
argument_parser.add_argument('--y_true', help='Ground truth values. shape = `[batch_size, d0, .. dN]`.', required=True)
argument_parser.add_argument('--y_pred', help='The predicted values. shape = `[batch_size, d0, .. dN]`.', required=True)
return (argument_parser, '```K.mean(math_ops.squared_difference(y_pred, y_true), axis=-1)```')
def msle_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes the mean squared logarithmic error between `y_true` and `y_pred`.\n\n`loss = mean(square(log(y_true + 1) - log(y_pred + 1)), axis=-1)`\n\nStandalone usage:\n\n>>> y_true = np.random.randint(0, 2, size=(2, 3))\n>>> y_pred = np.random.random(size=(2, 3))\n>>> loss = tf.keras.losses.mean_squared_logarithmic_error(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> y_true = np.maximum(y_true, 1e-7)\n>>> y_pred = np.maximum(y_pred, 1e-7)\n>>> assert np.allclose(\n... loss.numpy(),\n... np.mean(\n... np.square(np.log(y_true + 1.) - np.log(y_pred + 1.)), axis=-1))\n'
argument_parser.add_argument('--y_true', help='Ground truth values. shape = `[batch_size, d0, .. dN]`.', required=True)
argument_parser.add_argument('--y_pred', help='The predicted values. shape = `[batch_size, d0, .. dN]`.', required=True)
return (argument_parser, '```K.mean(math_ops.squared_difference(first_log, second_log), axis=-1)```')
def poisson_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes the Poisson loss between y_true and y_pred.\n\nThe Poisson loss is the mean of the elements of the `Tensor`\n`y_pred - y_true * log(y_pred)`.\n\nStandalone usage:\n\n>>> y_true = np.random.randint(0, 2, size=(2, 3))\n>>> y_pred = np.random.random(size=(2, 3))\n>>> loss = tf.keras.losses.poisson(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> y_pred = y_pred + 1e-7\n>>> assert np.allclose(\n... loss.numpy(), np.mean(y_pred - y_true * np.log(y_pred), axis=-1),\n... atol=1e-5)\n'
argument_parser.add_argument('--y_true', help='Ground truth values. shape = `[batch_size, d0, .. dN]`.', required=True)
argument_parser.add_argument('--y_pred', help='The predicted values. shape = `[batch_size, d0, .. dN]`.', required=True)
return (argument_parser, '```K.mean(y_pred - y_true * math_ops.log(y_pred + K.epsilon()), axis=-1)```')
def reduction_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = "Types of loss reduction.\n\nContains the following values:\n\n* `AUTO`: Indicates that the reduction option will be determined by the usage\n context. For almost all cases this defaults to `SUM_OVER_BATCH_SIZE`. When\n used with `tf.distribute.Strategy`, outside of built-in training loops such\n as `tf.keras` `compile` and `fit`, we expect reduction value to be\n `SUM` or `NONE`. Using `AUTO` in that case will raise an error.\n* `NONE`: Weighted losses with one dimension reduced (axis=-1, or axis\n specified by loss function). When this reduction type used with built-in\n Keras training loops like `fit`/`evaluate`, the unreduced vector loss is\n passed to the optimizer but the reported loss will be a scalar value.\n* `SUM`: Scalar sum of weighted losses.\n* `SUM_OVER_BATCH_SIZE`: Scalar `SUM` divided by number of elements in losses.\n This reduction type is not supported when used with\n `tf.distribute.Strategy` outside of built-in training loops like `tf.keras`\n `compile`/`fit`.\n\n You can implement 'SUM_OVER_BATCH_SIZE' using global batch size like:\n ```\n with strategy.scope():\n loss_obj = tf.keras.losses.CategoricalCrossentropy(\n reduction=tf.keras.losses.Reduction.NONE)\n ....\n loss = tf.reduce_sum(loss_obj(labels, predictions)) *\n (1. / global_batch_size)\n ```\n\nPlease see the\n[custom training guide](https://www.tensorflow.org/tutorials/distribute/custom_training) # pylint: disable=line-too-long\nfor more details on this."
argument_parser.add_argument('--AUTO', required=True, default='auto')
argument_parser.add_argument('--NONE', required=True, default='none')
argument_parser.add_argument('--SUM', required=True, default='sum')
argument_parser.add_argument('--SUM_OVER_BATCH_SIZE', required=True, default='sum_over_batch_size')
return argument_parser
def sparse_categorical_crossentropy_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes the sparse categorical crossentropy loss.\n\nStandalone usage:\n\n>>> y_true = [1, 2]\n>>> y_pred = [[0.05, 0.95, 0], [0.1, 0.8, 0.1]]\n>>> loss = tf.keras.losses.sparse_categorical_crossentropy(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> loss.numpy()\narray([0.0513, 2.303], dtype=float32)\n'
argument_parser.add_argument('--y_true', help='Ground truth values.', required=True)
argument_parser.add_argument('--y_pred', help='The predicted values.', required=True)
argument_parser.add_argument('--from_logits', type=bool, help='Whether `y_pred` is expected to be a logits tensor. By default, we assume that `y_pred` encodes a\nprobability distribution.', required=True, default=False)
argument_parser.add_argument('--axis', type=int, help='(Optional) Defaults to -1. The dimension along which the entropy is computed.', default=-1)
return (argument_parser, '```None```')
def squared_hinge_config(argument_parser):
"""
Set CLI arguments
:param argument_parser: argument parser
:type argument_parser: ```ArgumentParser```
:returns: argument_parser
:rtype: ```ArgumentParser```
"""
argument_parser.description = 'Computes the squared hinge loss between `y_true` and `y_pred`.\n\n`loss = mean(square(maximum(1 - y_true * y_pred, 0)), axis=-1)`\n\nStandalone usage:\n\n>>> y_true = np.random.choice([-1, 1], size=(2, 3))\n>>> y_pred = np.random.random(size=(2, 3))\n>>> loss = tf.keras.losses.squared_hinge(y_true, y_pred)\n>>> assert loss.shape == (2,)\n>>> assert np.array_equal(\n... loss.numpy(),\n... np.mean(np.square(np.maximum(1. - y_true * y_pred, 0.)), axis=-1))\n'
argument_parser.add_argument('--y_true', help='The ground truth values. `y_true` values are expected to be -1 or 1. If binary (0 or 1) labels are\nprovided we will convert them to -1 or 1. shape = `[batch_size, d0, .. dN]`.', required=True)
argument_parser.add_argument('--y_pred', help='The predicted values. shape = `[batch_size, d0, .. dN]`.', required=True)
return (argument_parser, '```K.mean(math_ops.square(math_ops.maximum(1.0 - y_true * y_pred, 0.0)), axis=-1)```')
__all__ = ['binary_crossentropyConfig', 'categorical_crossentropyConfig', 'categorical_hingeConfig', 'cosine_similarityConfig', 'hingeConfig', 'huberConfig', 'kldConfig', 'kl_divergenceConfig', 'kullback_leibler_divergenceConfig', 'logcoshConfig', 'maeConfig', 'mapeConfig', 'mean_absolute_errorConfig', 'mean_absolute_percentage_errorConfig', 'mean_squared_errorConfig', 'mean_squared_logarithmic_errorConfig', 'mseConfig', 'msleConfig', 'poissonConfig', 'ReductionConfig', 'sparse_categorical_crossentropyConfig', 'squared_hingeConfig'] |
name = input()
def foo(name):
print("Hello, world! Hello, "+name)
foo(name) | name = input()
def foo(name):
print('Hello, world! Hello, ' + name)
foo(name) |
"""
Problem 3:
What is the largest prime factor of the number 600851475143 ?
"""
def is_prime(n):
for i in range(2, n // 2 + 1):
if n % 2 == 0:
return False
return True
class Prime:
def __init__(self):
self.curr = 1
def __next__(self):
new_next = self.curr + 1
while not is_prime(new_next):
new_next += 1
self.curr = new_next
return self.curr
def __iter__(self):
return self
num = 600851475143
largest = 0
for i in Prime():
if num % i == 0:
largest = i
while num % i == 0 and num >=2:
num = num // i
if num == 1:
break
print('answer:', largest)
| """
Problem 3:
What is the largest prime factor of the number 600851475143 ?
"""
def is_prime(n):
for i in range(2, n // 2 + 1):
if n % 2 == 0:
return False
return True
class Prime:
def __init__(self):
self.curr = 1
def __next__(self):
new_next = self.curr + 1
while not is_prime(new_next):
new_next += 1
self.curr = new_next
return self.curr
def __iter__(self):
return self
num = 600851475143
largest = 0
for i in prime():
if num % i == 0:
largest = i
while num % i == 0 and num >= 2:
num = num // i
if num == 1:
break
print('answer:', largest) |
n = int(input())
flag = True
if n == 1 or n == 2:
flag = True
if n % 2 == 0:
flag = False
else:
i = 3
while i*i <= n:
if n % i == 0:
flag = False
break
i += 2
if flag == True:
print("N")
else:
print("S") | n = int(input())
flag = True
if n == 1 or n == 2:
flag = True
if n % 2 == 0:
flag = False
else:
i = 3
while i * i <= n:
if n % i == 0:
flag = False
break
i += 2
if flag == True:
print('N')
else:
print('S') |
def greet(val):
return 'Hello, ' + val + '!'
print(greet('pah'))
| def greet(val):
return 'Hello, ' + val + '!'
print(greet('pah')) |
# import libraries here ...
model = """
data {
int<lower=0> K; // Number of cluster
int<lower=0> N; // Number of observations
real y[N]; // observations
real<lower=0> alpha_shape;
real<lower=0> alpha_rate;
real<lower=0> sigma_shape;
real<lower=0> sigma_rate;
}
parameters {
real mu[K]; // cluster means
// real <lower=0,upper=1> v[K - 1]; // stickbreak components
vector<lower=0,upper=1>[K - 1] v; // stickbreak components
real<lower=0> sigma[K]; // error scale
real<lower=0> alpha; // hyper prior DP(alpha, base)
}
transformed parameters {
simplex[K] eta;
vector<lower=0,upper=1>[K - 1] cumprod_one_minus_v;
cumprod_one_minus_v = exp(cumulative_sum(log1m(v)));
eta[1] = v[1];
eta[2:(K-1)] = v[2:(K-1)] .* cumprod_one_minus_v[1:(K-2)];
eta[K] = cumprod_one_minus_v[K - 1];
}
model {
real ps[K];
// real alpha = 1;
alpha ~ gamma(alpha_shape, alpha_rate); // mean = a/b = shape/rate
sigma ~ gamma(sigma_shape, sigma_rate);
mu ~ normal(0, 3);
v ~ beta(1, alpha);
for(i in 1:N){
for(k in 1:K){
ps[k] = log(eta[k]) + normal_lpdf(y[i] | mu[k], sigma[k]);
}
target += log_sum_exp(ps);
}
}
generated quantities {
real ll;
real ps_[K];
ll = 0;
for(i in 1:N){
for(k in 1:K){
ps_[k] = log(eta[k]) + normal_lpdf(y[i] | mu[k], sigma[k]);
}
ll += log_sum_exp(ps_);
}
}
"""
# Compile the stan model.
sm = pystan.StanModel(model_code=model)
# NOTE: Read data y here ...
# Here, y (a vector of length 500) is noisy univariate draws from a
# mixture distribution with 4 components.
# Construct data dictionary.
data = dict(y=simdata['y'], K=10, N=len(simdata['y']),
alpha_shape=1, alpha_rate=10, sigma_shape=1, sigma_rate=10)
# Approximate posterior via ADVI
# - ADVI is sensitive to starting values. Should run several times and pick run
# that has best fit (e.g. highest ELBO / logliklihood).
# - Variational inference works better with more data. Inference is less accurate
# with small datasets, due to the variational approximation.
fit = sm.vb(data=data, iter=1000, seed=1, algorithm='meanfield',
adapt_iter=1000, verbose=False, grad_samples=1, elbo_samples=100,
adapt_engaged=True, output_samples=1000)
### Settings for MCMC ###
burn = 500 # Number of burn in iterations
nsamples = 500 # Number of sampels to keep
niters = burn + nsamples # Number of MCMC (HMC / NUTS) iterations in total
# Sample from posterior via HMC
# NOTE: num_leapfrog = int_time / stepsize.
hmc_fit = sm.sampling(data=data, iter=niters, chains=1, warmup=burn,
thin=1, seed=1, algorithm='HMC',
control=dict(stepsize=0.01, int_time=1,
adapt_engaged=False))
# Sample from posterior via NUTS
nuts_fit = sm.sampling(data=data, iter=niters, chains=1, warmup=burn, thin=1,
seed=1)
| model = '\ndata {\n int<lower=0> K; // Number of cluster\n int<lower=0> N; // Number of observations\n real y[N]; // observations\n real<lower=0> alpha_shape;\n real<lower=0> alpha_rate;\n real<lower=0> sigma_shape;\n real<lower=0> sigma_rate;\n}\n\nparameters {\n real mu[K]; // cluster means\n // real <lower=0,upper=1> v[K - 1]; // stickbreak components\n vector<lower=0,upper=1>[K - 1] v; // stickbreak components\n real<lower=0> sigma[K]; // error scale\n real<lower=0> alpha; // hyper prior DP(alpha, base)\n}\n\ntransformed parameters {\n simplex[K] eta;\n vector<lower=0,upper=1>[K - 1] cumprod_one_minus_v;\n\n cumprod_one_minus_v = exp(cumulative_sum(log1m(v)));\n eta[1] = v[1];\n eta[2:(K-1)] = v[2:(K-1)] .* cumprod_one_minus_v[1:(K-2)];\n eta[K] = cumprod_one_minus_v[K - 1];\n}\n\nmodel {\n real ps[K];\n // real alpha = 1;\n \n alpha ~ gamma(alpha_shape, alpha_rate); // mean = a/b = shape/rate \n sigma ~ gamma(sigma_shape, sigma_rate);\n mu ~ normal(0, 3);\n v ~ beta(1, alpha);\n\n for(i in 1:N){\n for(k in 1:K){\n ps[k] = log(eta[k]) + normal_lpdf(y[i] | mu[k], sigma[k]);\n }\n target += log_sum_exp(ps);\n }\n}\n\ngenerated quantities {\n real ll;\n real ps_[K];\n \n ll = 0;\n for(i in 1:N){\n for(k in 1:K){\n ps_[k] = log(eta[k]) + normal_lpdf(y[i] | mu[k], sigma[k]);\n }\n ll += log_sum_exp(ps_);\n } \n}\n'
sm = pystan.StanModel(model_code=model)
data = dict(y=simdata['y'], K=10, N=len(simdata['y']), alpha_shape=1, alpha_rate=10, sigma_shape=1, sigma_rate=10)
fit = sm.vb(data=data, iter=1000, seed=1, algorithm='meanfield', adapt_iter=1000, verbose=False, grad_samples=1, elbo_samples=100, adapt_engaged=True, output_samples=1000)
burn = 500
nsamples = 500
niters = burn + nsamples
hmc_fit = sm.sampling(data=data, iter=niters, chains=1, warmup=burn, thin=1, seed=1, algorithm='HMC', control=dict(stepsize=0.01, int_time=1, adapt_engaged=False))
nuts_fit = sm.sampling(data=data, iter=niters, chains=1, warmup=burn, thin=1, seed=1) |
class MyCircularQueue:
def __init__(self, k):
"""
Initialize your data structure here. Set the size of the queue to be k.
"""
self.q = [None] * (k+1)
self.front, self.rear = -1, -1
def enQueue(self, value):
"""
Insert an element into the circular queue. Return true if the operation is successful.
"""
if self.isFull():
return False
elif self.isEmpty():
self.front, self.rear = 0, 0
else:
self.rear = (self.rear + 1) % self.size()
self.q[self.rear] = value
return True
def deQueue(self):
"""
Delete an element from the circular queue. Return true if the operation is successful.
"""
if self.isEmpty():
return False
if self.front == self.rear:
self.front, self.rear = -1, -1
else:
self.front = (self.front + 1) % self.size()
return True
def Front(self):
"""
Get the front item from the queue.
"""
if self.isEmpty():
return -1
item = self.q[self.front]
return item
def Rear(self):
"""
Get the last item from the queue.
"""
if self.isEmpty():
return -1
item = self.q[self.rear]
return item
def isEmpty(self):
"""
Checks whether the circular queue is empty or not.
"""
return self.front == -1 and self.rear == -1
def isFull(self):
"""
Checks whether the circular queue is full or not.
"""
return (self.rear + 1) % self.size() == self.front
def size(self):
return len(self.q) - 1
| class Mycircularqueue:
def __init__(self, k):
"""
Initialize your data structure here. Set the size of the queue to be k.
"""
self.q = [None] * (k + 1)
(self.front, self.rear) = (-1, -1)
def en_queue(self, value):
"""
Insert an element into the circular queue. Return true if the operation is successful.
"""
if self.isFull():
return False
elif self.isEmpty():
(self.front, self.rear) = (0, 0)
else:
self.rear = (self.rear + 1) % self.size()
self.q[self.rear] = value
return True
def de_queue(self):
"""
Delete an element from the circular queue. Return true if the operation is successful.
"""
if self.isEmpty():
return False
if self.front == self.rear:
(self.front, self.rear) = (-1, -1)
else:
self.front = (self.front + 1) % self.size()
return True
def front(self):
"""
Get the front item from the queue.
"""
if self.isEmpty():
return -1
item = self.q[self.front]
return item
def rear(self):
"""
Get the last item from the queue.
"""
if self.isEmpty():
return -1
item = self.q[self.rear]
return item
def is_empty(self):
"""
Checks whether the circular queue is empty or not.
"""
return self.front == -1 and self.rear == -1
def is_full(self):
"""
Checks whether the circular queue is full or not.
"""
return (self.rear + 1) % self.size() == self.front
def size(self):
return len(self.q) - 1 |
class Events:
ALWAYS = "ALWAYS"
ON_LOGIN = "ON_LOGIN"
ON_ITEM_DONE = "ON_ITEM_DONE"
| class Events:
always = 'ALWAYS'
on_login = 'ON_LOGIN'
on_item_done = 'ON_ITEM_DONE' |
#
# PySNMP MIB module HM2-DNS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HM2-DNS-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:18:37 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection, SingleValueConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ConstraintsUnion")
HmActionValue, hm2ConfigurationMibs, HmEnabledStatus = mibBuilder.importSymbols("HM2-TC-MIB", "HmActionValue", "hm2ConfigurationMibs", "HmEnabledStatus")
InetAddressType, InetAddress = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType", "InetAddress")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
TimeTicks, Unsigned32, MibIdentifier, Counter32, ModuleIdentity, Bits, NotificationType, ObjectIdentity, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, iso, Gauge32, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "TimeTicks", "Unsigned32", "MibIdentifier", "Counter32", "ModuleIdentity", "Bits", "NotificationType", "ObjectIdentity", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "iso", "Gauge32", "Integer32")
TextualConvention, DisplayString, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "RowStatus")
hm2DnsMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 248, 11, 90))
hm2DnsMib.setRevisions(('2011-06-17 00:00',))
if mibBuilder.loadTexts: hm2DnsMib.setLastUpdated('201106170000Z')
if mibBuilder.loadTexts: hm2DnsMib.setOrganization('Hirschmann Automation and Control GmbH')
hm2DnsMibNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 248, 11, 90, 0))
hm2DnsMibObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 248, 11, 90, 1))
hm2DnsMibSNMPExtensionGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 248, 11, 90, 3))
hm2DnsClientGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1))
hm2DnsCacheGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 2))
hm2DnsCachingServerGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 3))
hm2DnsClientAdminState = MibScalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 1), HmEnabledStatus().clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2DnsClientAdminState.setStatus('current')
hm2DnsClientConfigSource = MibScalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("user", 1), ("mgmt-dhcp", 2), ("provider", 3))).clone('mgmt-dhcp')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2DnsClientConfigSource.setStatus('current')
hm2DnsClientServerCfgTable = MibTable((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 3), )
if mibBuilder.loadTexts: hm2DnsClientServerCfgTable.setStatus('current')
hm2DnsClientServerCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 3, 1), ).setIndexNames((0, "HM2-DNS-MIB", "hm2DnsClientServerIndex"))
if mibBuilder.loadTexts: hm2DnsClientServerCfgEntry.setStatus('current')
hm2DnsClientServerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4)))
if mibBuilder.loadTexts: hm2DnsClientServerIndex.setStatus('current')
hm2DnsClientServerAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 3, 1, 2), InetAddressType().clone('ipv4')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2DnsClientServerAddressType.setStatus('current')
hm2DnsClientServerAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 3, 1, 3), InetAddress().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2DnsClientServerAddress.setStatus('current')
hm2DnsClientServerRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 3, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hm2DnsClientServerRowStatus.setStatus('current')
hm2DnsClientServerDiagTable = MibTable((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 4), )
if mibBuilder.loadTexts: hm2DnsClientServerDiagTable.setStatus('current')
hm2DnsClientServerDiagEntry = MibTableRow((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 4, 1), ).setIndexNames((0, "HM2-DNS-MIB", "hm2DnsClientServerDiagIndex"))
if mibBuilder.loadTexts: hm2DnsClientServerDiagEntry.setStatus('current')
hm2DnsClientServerDiagIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4)))
if mibBuilder.loadTexts: hm2DnsClientServerDiagIndex.setStatus('current')
hm2DnsClientServerDiagAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 4, 1, 2), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hm2DnsClientServerDiagAddressType.setStatus('current')
hm2DnsClientServerDiagAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 4, 1, 3), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hm2DnsClientServerDiagAddress.setStatus('current')
hm2DnsClientGlobalGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 5))
hm2DnsClientDefaultDomainName = MibScalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 5, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2DnsClientDefaultDomainName.setStatus('current')
hm2DnsClientRequestTimeout = MibScalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 5, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 3600)).clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2DnsClientRequestTimeout.setStatus('current')
hm2DnsClientRequestRetransmits = MibScalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 5, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100)).clone(2)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2DnsClientRequestRetransmits.setStatus('current')
hm2DnsClientCacheAdminState = MibScalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 5, 4), HmEnabledStatus().clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2DnsClientCacheAdminState.setStatus('current')
hm2DnsClientStaticHostConfigTable = MibTable((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 6), )
if mibBuilder.loadTexts: hm2DnsClientStaticHostConfigTable.setStatus('current')
hm2DnsClientStaticHostConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 6, 1), ).setIndexNames((0, "HM2-DNS-MIB", "hm2DnsClientStaticIndex"))
if mibBuilder.loadTexts: hm2DnsClientStaticHostConfigEntry.setStatus('current')
hm2DnsClientStaticIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 6, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 64)))
if mibBuilder.loadTexts: hm2DnsClientStaticIndex.setStatus('current')
hm2DnsClientStaticHostName = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 6, 1, 2), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hm2DnsClientStaticHostName.setStatus('current')
hm2DnsClientStaticHostAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 6, 1, 3), InetAddressType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hm2DnsClientStaticHostAddressType.setStatus('current')
hm2DnsClientStaticHostIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 6, 1, 4), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hm2DnsClientStaticHostIPAddress.setStatus('current')
hm2DnsClientStaticHostStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 6, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hm2DnsClientStaticHostStatus.setStatus('current')
hm2DnsCachingServerGlobalGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 3, 1))
hm2DnsCachingServerAdminState = MibScalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 3, 1, 1), HmEnabledStatus().clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2DnsCachingServerAdminState.setStatus('current')
hm2DnsCacheAdminState = MibScalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 2, 1), HmEnabledStatus().clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2DnsCacheAdminState.setStatus('deprecated')
hm2DnsCacheFlushAction = MibScalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 2, 2), HmActionValue().clone('noop')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hm2DnsCacheFlushAction.setStatus('deprecated')
hm2DnsCHHostNameAlreadyExistsSESError = ObjectIdentity((1, 3, 6, 1, 4, 1, 248, 11, 90, 3, 1))
if mibBuilder.loadTexts: hm2DnsCHHostNameAlreadyExistsSESError.setStatus('current')
hm2DnsCHBadIpNotAcceptedSESError = ObjectIdentity((1, 3, 6, 1, 4, 1, 248, 11, 90, 3, 2))
if mibBuilder.loadTexts: hm2DnsCHBadIpNotAcceptedSESError.setStatus('current')
hm2DnsCHBadRowCannotBeActivatedSESError = ObjectIdentity((1, 3, 6, 1, 4, 1, 248, 11, 90, 3, 3))
if mibBuilder.loadTexts: hm2DnsCHBadRowCannotBeActivatedSESError.setStatus('current')
mibBuilder.exportSymbols("HM2-DNS-MIB", hm2DnsClientServerDiagTable=hm2DnsClientServerDiagTable, hm2DnsCacheFlushAction=hm2DnsCacheFlushAction, hm2DnsCacheAdminState=hm2DnsCacheAdminState, hm2DnsClientServerDiagEntry=hm2DnsClientServerDiagEntry, hm2DnsClientStaticHostAddressType=hm2DnsClientStaticHostAddressType, hm2DnsCacheGroup=hm2DnsCacheGroup, hm2DnsClientServerRowStatus=hm2DnsClientServerRowStatus, hm2DnsCachingServerGlobalGroup=hm2DnsCachingServerGlobalGroup, hm2DnsClientGlobalGroup=hm2DnsClientGlobalGroup, hm2DnsClientServerCfgEntry=hm2DnsClientServerCfgEntry, PYSNMP_MODULE_ID=hm2DnsMib, hm2DnsCHBadRowCannotBeActivatedSESError=hm2DnsCHBadRowCannotBeActivatedSESError, hm2DnsClientStaticHostIPAddress=hm2DnsClientStaticHostIPAddress, hm2DnsMibObjects=hm2DnsMibObjects, hm2DnsCHHostNameAlreadyExistsSESError=hm2DnsCHHostNameAlreadyExistsSESError, hm2DnsClientServerCfgTable=hm2DnsClientServerCfgTable, hm2DnsClientStaticHostStatus=hm2DnsClientStaticHostStatus, hm2DnsClientServerDiagAddress=hm2DnsClientServerDiagAddress, hm2DnsClientCacheAdminState=hm2DnsClientCacheAdminState, hm2DnsCachingServerGroup=hm2DnsCachingServerGroup, hm2DnsMibSNMPExtensionGroup=hm2DnsMibSNMPExtensionGroup, hm2DnsClientRequestTimeout=hm2DnsClientRequestTimeout, hm2DnsClientRequestRetransmits=hm2DnsClientRequestRetransmits, hm2DnsCachingServerAdminState=hm2DnsCachingServerAdminState, hm2DnsClientGroup=hm2DnsClientGroup, hm2DnsMib=hm2DnsMib, hm2DnsMibNotifications=hm2DnsMibNotifications, hm2DnsClientServerAddress=hm2DnsClientServerAddress, hm2DnsClientServerIndex=hm2DnsClientServerIndex, hm2DnsClientServerAddressType=hm2DnsClientServerAddressType, hm2DnsClientAdminState=hm2DnsClientAdminState, hm2DnsClientStaticHostName=hm2DnsClientStaticHostName, hm2DnsCHBadIpNotAcceptedSESError=hm2DnsCHBadIpNotAcceptedSESError, hm2DnsClientServerDiagAddressType=hm2DnsClientServerDiagAddressType, hm2DnsClientStaticHostConfigEntry=hm2DnsClientStaticHostConfigEntry, hm2DnsClientDefaultDomainName=hm2DnsClientDefaultDomainName, hm2DnsClientStaticIndex=hm2DnsClientStaticIndex, hm2DnsClientStaticHostConfigTable=hm2DnsClientStaticHostConfigTable, hm2DnsClientServerDiagIndex=hm2DnsClientServerDiagIndex, hm2DnsClientConfigSource=hm2DnsClientConfigSource)
| (integer, octet_string, object_identifier) = mibBuilder.importSymbols('ASN1', 'Integer', 'OctetString', 'ObjectIdentifier')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(value_range_constraint, value_size_constraint, constraints_intersection, single_value_constraint, constraints_union) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ValueRangeConstraint', 'ValueSizeConstraint', 'ConstraintsIntersection', 'SingleValueConstraint', 'ConstraintsUnion')
(hm_action_value, hm2_configuration_mibs, hm_enabled_status) = mibBuilder.importSymbols('HM2-TC-MIB', 'HmActionValue', 'hm2ConfigurationMibs', 'HmEnabledStatus')
(inet_address_type, inet_address) = mibBuilder.importSymbols('INET-ADDRESS-MIB', 'InetAddressType', 'InetAddress')
(snmp_admin_string,) = mibBuilder.importSymbols('SNMP-FRAMEWORK-MIB', 'SnmpAdminString')
(module_compliance, notification_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'NotificationGroup')
(time_ticks, unsigned32, mib_identifier, counter32, module_identity, bits, notification_type, object_identity, ip_address, mib_scalar, mib_table, mib_table_row, mib_table_column, counter64, iso, gauge32, integer32) = mibBuilder.importSymbols('SNMPv2-SMI', 'TimeTicks', 'Unsigned32', 'MibIdentifier', 'Counter32', 'ModuleIdentity', 'Bits', 'NotificationType', 'ObjectIdentity', 'IpAddress', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Counter64', 'iso', 'Gauge32', 'Integer32')
(textual_convention, display_string, row_status) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'DisplayString', 'RowStatus')
hm2_dns_mib = module_identity((1, 3, 6, 1, 4, 1, 248, 11, 90))
hm2DnsMib.setRevisions(('2011-06-17 00:00',))
if mibBuilder.loadTexts:
hm2DnsMib.setLastUpdated('201106170000Z')
if mibBuilder.loadTexts:
hm2DnsMib.setOrganization('Hirschmann Automation and Control GmbH')
hm2_dns_mib_notifications = mib_identifier((1, 3, 6, 1, 4, 1, 248, 11, 90, 0))
hm2_dns_mib_objects = mib_identifier((1, 3, 6, 1, 4, 1, 248, 11, 90, 1))
hm2_dns_mib_snmp_extension_group = mib_identifier((1, 3, 6, 1, 4, 1, 248, 11, 90, 3))
hm2_dns_client_group = mib_identifier((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1))
hm2_dns_cache_group = mib_identifier((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 2))
hm2_dns_caching_server_group = mib_identifier((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 3))
hm2_dns_client_admin_state = mib_scalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 1), hm_enabled_status().clone('disable')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
hm2DnsClientAdminState.setStatus('current')
hm2_dns_client_config_source = mib_scalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 2), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3))).clone(namedValues=named_values(('user', 1), ('mgmt-dhcp', 2), ('provider', 3))).clone('mgmt-dhcp')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
hm2DnsClientConfigSource.setStatus('current')
hm2_dns_client_server_cfg_table = mib_table((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 3))
if mibBuilder.loadTexts:
hm2DnsClientServerCfgTable.setStatus('current')
hm2_dns_client_server_cfg_entry = mib_table_row((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 3, 1)).setIndexNames((0, 'HM2-DNS-MIB', 'hm2DnsClientServerIndex'))
if mibBuilder.loadTexts:
hm2DnsClientServerCfgEntry.setStatus('current')
hm2_dns_client_server_index = mib_table_column((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 3, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(1, 4)))
if mibBuilder.loadTexts:
hm2DnsClientServerIndex.setStatus('current')
hm2_dns_client_server_address_type = mib_table_column((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 3, 1, 2), inet_address_type().clone('ipv4')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
hm2DnsClientServerAddressType.setStatus('current')
hm2_dns_client_server_address = mib_table_column((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 3, 1, 3), inet_address().clone(hexValue='00000000')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
hm2DnsClientServerAddress.setStatus('current')
hm2_dns_client_server_row_status = mib_table_column((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 3, 1, 4), row_status()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
hm2DnsClientServerRowStatus.setStatus('current')
hm2_dns_client_server_diag_table = mib_table((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 4))
if mibBuilder.loadTexts:
hm2DnsClientServerDiagTable.setStatus('current')
hm2_dns_client_server_diag_entry = mib_table_row((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 4, 1)).setIndexNames((0, 'HM2-DNS-MIB', 'hm2DnsClientServerDiagIndex'))
if mibBuilder.loadTexts:
hm2DnsClientServerDiagEntry.setStatus('current')
hm2_dns_client_server_diag_index = mib_table_column((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 4, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(1, 4)))
if mibBuilder.loadTexts:
hm2DnsClientServerDiagIndex.setStatus('current')
hm2_dns_client_server_diag_address_type = mib_table_column((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 4, 1, 2), inet_address_type()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
hm2DnsClientServerDiagAddressType.setStatus('current')
hm2_dns_client_server_diag_address = mib_table_column((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 4, 1, 3), inet_address()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
hm2DnsClientServerDiagAddress.setStatus('current')
hm2_dns_client_global_group = mib_identifier((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 5))
hm2_dns_client_default_domain_name = mib_scalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 5, 1), snmp_admin_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
hm2DnsClientDefaultDomainName.setStatus('current')
hm2_dns_client_request_timeout = mib_scalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 5, 2), integer32().subtype(subtypeSpec=value_range_constraint(0, 3600)).clone(3)).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
hm2DnsClientRequestTimeout.setStatus('current')
hm2_dns_client_request_retransmits = mib_scalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 5, 3), integer32().subtype(subtypeSpec=value_range_constraint(0, 100)).clone(2)).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
hm2DnsClientRequestRetransmits.setStatus('current')
hm2_dns_client_cache_admin_state = mib_scalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 5, 4), hm_enabled_status().clone('enable')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
hm2DnsClientCacheAdminState.setStatus('current')
hm2_dns_client_static_host_config_table = mib_table((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 6))
if mibBuilder.loadTexts:
hm2DnsClientStaticHostConfigTable.setStatus('current')
hm2_dns_client_static_host_config_entry = mib_table_row((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 6, 1)).setIndexNames((0, 'HM2-DNS-MIB', 'hm2DnsClientStaticIndex'))
if mibBuilder.loadTexts:
hm2DnsClientStaticHostConfigEntry.setStatus('current')
hm2_dns_client_static_index = mib_table_column((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 6, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(1, 64)))
if mibBuilder.loadTexts:
hm2DnsClientStaticIndex.setStatus('current')
hm2_dns_client_static_host_name = mib_table_column((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 6, 1, 2), snmp_admin_string().subtype(subtypeSpec=value_size_constraint(0, 255))).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
hm2DnsClientStaticHostName.setStatus('current')
hm2_dns_client_static_host_address_type = mib_table_column((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 6, 1, 3), inet_address_type()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
hm2DnsClientStaticHostAddressType.setStatus('current')
hm2_dns_client_static_host_ip_address = mib_table_column((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 6, 1, 4), inet_address()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
hm2DnsClientStaticHostIPAddress.setStatus('current')
hm2_dns_client_static_host_status = mib_table_column((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 1, 6, 1, 5), row_status()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
hm2DnsClientStaticHostStatus.setStatus('current')
hm2_dns_caching_server_global_group = mib_identifier((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 3, 1))
hm2_dns_caching_server_admin_state = mib_scalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 3, 1, 1), hm_enabled_status().clone('enable')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
hm2DnsCachingServerAdminState.setStatus('current')
hm2_dns_cache_admin_state = mib_scalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 2, 1), hm_enabled_status().clone('enable')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
hm2DnsCacheAdminState.setStatus('deprecated')
hm2_dns_cache_flush_action = mib_scalar((1, 3, 6, 1, 4, 1, 248, 11, 90, 1, 2, 2), hm_action_value().clone('noop')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
hm2DnsCacheFlushAction.setStatus('deprecated')
hm2_dns_ch_host_name_already_exists_ses_error = object_identity((1, 3, 6, 1, 4, 1, 248, 11, 90, 3, 1))
if mibBuilder.loadTexts:
hm2DnsCHHostNameAlreadyExistsSESError.setStatus('current')
hm2_dns_ch_bad_ip_not_accepted_ses_error = object_identity((1, 3, 6, 1, 4, 1, 248, 11, 90, 3, 2))
if mibBuilder.loadTexts:
hm2DnsCHBadIpNotAcceptedSESError.setStatus('current')
hm2_dns_ch_bad_row_cannot_be_activated_ses_error = object_identity((1, 3, 6, 1, 4, 1, 248, 11, 90, 3, 3))
if mibBuilder.loadTexts:
hm2DnsCHBadRowCannotBeActivatedSESError.setStatus('current')
mibBuilder.exportSymbols('HM2-DNS-MIB', hm2DnsClientServerDiagTable=hm2DnsClientServerDiagTable, hm2DnsCacheFlushAction=hm2DnsCacheFlushAction, hm2DnsCacheAdminState=hm2DnsCacheAdminState, hm2DnsClientServerDiagEntry=hm2DnsClientServerDiagEntry, hm2DnsClientStaticHostAddressType=hm2DnsClientStaticHostAddressType, hm2DnsCacheGroup=hm2DnsCacheGroup, hm2DnsClientServerRowStatus=hm2DnsClientServerRowStatus, hm2DnsCachingServerGlobalGroup=hm2DnsCachingServerGlobalGroup, hm2DnsClientGlobalGroup=hm2DnsClientGlobalGroup, hm2DnsClientServerCfgEntry=hm2DnsClientServerCfgEntry, PYSNMP_MODULE_ID=hm2DnsMib, hm2DnsCHBadRowCannotBeActivatedSESError=hm2DnsCHBadRowCannotBeActivatedSESError, hm2DnsClientStaticHostIPAddress=hm2DnsClientStaticHostIPAddress, hm2DnsMibObjects=hm2DnsMibObjects, hm2DnsCHHostNameAlreadyExistsSESError=hm2DnsCHHostNameAlreadyExistsSESError, hm2DnsClientServerCfgTable=hm2DnsClientServerCfgTable, hm2DnsClientStaticHostStatus=hm2DnsClientStaticHostStatus, hm2DnsClientServerDiagAddress=hm2DnsClientServerDiagAddress, hm2DnsClientCacheAdminState=hm2DnsClientCacheAdminState, hm2DnsCachingServerGroup=hm2DnsCachingServerGroup, hm2DnsMibSNMPExtensionGroup=hm2DnsMibSNMPExtensionGroup, hm2DnsClientRequestTimeout=hm2DnsClientRequestTimeout, hm2DnsClientRequestRetransmits=hm2DnsClientRequestRetransmits, hm2DnsCachingServerAdminState=hm2DnsCachingServerAdminState, hm2DnsClientGroup=hm2DnsClientGroup, hm2DnsMib=hm2DnsMib, hm2DnsMibNotifications=hm2DnsMibNotifications, hm2DnsClientServerAddress=hm2DnsClientServerAddress, hm2DnsClientServerIndex=hm2DnsClientServerIndex, hm2DnsClientServerAddressType=hm2DnsClientServerAddressType, hm2DnsClientAdminState=hm2DnsClientAdminState, hm2DnsClientStaticHostName=hm2DnsClientStaticHostName, hm2DnsCHBadIpNotAcceptedSESError=hm2DnsCHBadIpNotAcceptedSESError, hm2DnsClientServerDiagAddressType=hm2DnsClientServerDiagAddressType, hm2DnsClientStaticHostConfigEntry=hm2DnsClientStaticHostConfigEntry, hm2DnsClientDefaultDomainName=hm2DnsClientDefaultDomainName, hm2DnsClientStaticIndex=hm2DnsClientStaticIndex, hm2DnsClientStaticHostConfigTable=hm2DnsClientStaticHostConfigTable, hm2DnsClientServerDiagIndex=hm2DnsClientServerDiagIndex, hm2DnsClientConfigSource=hm2DnsClientConfigSource) |
"""Exceptions when there is a problem with a call to the LBRY daemons."""
def print_request(request):
"""Print a prepared request to give the user info as to what they're sending.
:param request.PreparedRequest request: PreparedRequest object to be printed
:return: Nothing
"""
start = "-----------START-----------"
req = request.method + " " + request.url
items = '\n'.join('{}: {}'.format(k, v) for k, v in request.headers.items())
body = request.body
print(start)
print(req)
print(items)
print()
print(body)
class LBRYError(Exception):
def __init__(self, message, response, status_code, request):
"""Exception raised when there is a problem with a call to lbrynet.
:param str message: Message to Display
:param dict response: JSON Response received from LBRY
:param int status_code: HTTP Status code received from HTTP request
:param request.PreparedRequest request: PreparedRequest object which raised the exception
"""
super().__init__(message)
self.response = response
self.status_code = status_code
self.request = request
| """Exceptions when there is a problem with a call to the LBRY daemons."""
def print_request(request):
"""Print a prepared request to give the user info as to what they're sending.
:param request.PreparedRequest request: PreparedRequest object to be printed
:return: Nothing
"""
start = '-----------START-----------'
req = request.method + ' ' + request.url
items = '\n'.join(('{}: {}'.format(k, v) for (k, v) in request.headers.items()))
body = request.body
print(start)
print(req)
print(items)
print()
print(body)
class Lbryerror(Exception):
def __init__(self, message, response, status_code, request):
"""Exception raised when there is a problem with a call to lbrynet.
:param str message: Message to Display
:param dict response: JSON Response received from LBRY
:param int status_code: HTTP Status code received from HTTP request
:param request.PreparedRequest request: PreparedRequest object which raised the exception
"""
super().__init__(message)
self.response = response
self.status_code = status_code
self.request = request |
class Node:
def __init__(self,key):
self.key = key
self.parent = None
self.right = None
self.left = None
class Tree:
def __init__(self):
self.root = None
self.node = Node(self.root)
self.find_node = None
def add(self,key):
if (self.root == None):
self.root = Node(key)
else:
self._add(key,self.root)
def _add(self,key,node ):
if key> node.key:
if node.left != None:
self._add(key,node.left)
else:
node.left = Node(key)
node.left.parent = node
else:
if node.right != None:
self._add(key,node.right)
else:
node.right = Node(key)
node.right.parent = node
def search (self,key):
if self.root == None:
print ("This tree is empty, so there is any " + str(key)+ " !")
return
else:
self._search(key,self.root)
def _search(self,key,node):
if node == None:
print ("There is no " + str(key))
return
elif key> node.key:
self._search(key,node.left)
elif key< node.key:
self._search(key,node.right)
else:
print ( "Here its --->" + str(node))
self.find_node = node
return node
def del_node(self,key):
self.search(key)
node = self.find_node
if ( node.right == None and node.left == None):
print (str (node.key) )
print (node.parent.key)
if (node.parent.key > node.key):
node.parent.right = None
else:
node.parent.left = None
elif ( node.right != None and node.left == None):
x=node.right
y=node.parent
print (str (node.key) )
if (node.parent.key > node.key):
node.parent.right = None
else:
node.parent.left = None
if y.key > x.key:
y.right = x
else:
y.left = x
elif (node.right == None and node.left != None):
x = node.left
y = node.parent
print (str (node.key) )
if (node.parent.key > node.key):
node.parent.right = None
else:
node.parent.left = None
if y.key > x.key:
y.right = x
else:
y.left = x
else:
x = node.left
while x.right != None:
print (x.key)
x = x.right
y=node.parent
print (str (node.key))
if (node.parent.key > node.key):
node.parent.right = None
else:
node.parent.left = None
if y.key > x.key:
y.right = x
else:
y.left = x
def print_tree(self):
if(self.root != None):
self._print_tree(self.root)
def _print_tree(self,node):
if(node != None):
self._print_tree(node.left)
print (node.key)
self._print_tree(node.right)
def get_parent(self,key):
self.search(key)
x = self.find_node.parent
if x is not None:
x = x.key
return x
def get_child(self,key):
self.search(key)
x,y = self.find_node.right,self.find_node.left
if x is not None:
x = x.key
if y is not None:
y = y.key
return x,y
def main():
Binary_tree = Tree()
Binary_tree.add(5)
Binary_tree.add(4)
Binary_tree.add(8)
Binary_tree.add(9)
Binary_tree.add(3)
Binary_tree.add(0)
Binary_tree.add(7)
Binary_tree.add(6)
Binary_tree.add(2)
Binary_tree.add(1)
Binary_tree.print_tree()
Binary_tree.search(2)
Binary_tree.print_tree()
Binary_tree.del_node(8)
Binary_tree.print_tree()
Binary_tree.search(8)
Binary_tree.print_tree()
if __name__ == "__main__":
main()
| class Node:
def __init__(self, key):
self.key = key
self.parent = None
self.right = None
self.left = None
class Tree:
def __init__(self):
self.root = None
self.node = node(self.root)
self.find_node = None
def add(self, key):
if self.root == None:
self.root = node(key)
else:
self._add(key, self.root)
def _add(self, key, node):
if key > node.key:
if node.left != None:
self._add(key, node.left)
else:
node.left = node(key)
node.left.parent = node
elif node.right != None:
self._add(key, node.right)
else:
node.right = node(key)
node.right.parent = node
def search(self, key):
if self.root == None:
print('This tree is empty, so there is any ' + str(key) + ' !')
return
else:
self._search(key, self.root)
def _search(self, key, node):
if node == None:
print('There is no ' + str(key))
return
elif key > node.key:
self._search(key, node.left)
elif key < node.key:
self._search(key, node.right)
else:
print('Here its --->' + str(node))
self.find_node = node
return node
def del_node(self, key):
self.search(key)
node = self.find_node
if node.right == None and node.left == None:
print(str(node.key))
print(node.parent.key)
if node.parent.key > node.key:
node.parent.right = None
else:
node.parent.left = None
elif node.right != None and node.left == None:
x = node.right
y = node.parent
print(str(node.key))
if node.parent.key > node.key:
node.parent.right = None
else:
node.parent.left = None
if y.key > x.key:
y.right = x
else:
y.left = x
elif node.right == None and node.left != None:
x = node.left
y = node.parent
print(str(node.key))
if node.parent.key > node.key:
node.parent.right = None
else:
node.parent.left = None
if y.key > x.key:
y.right = x
else:
y.left = x
else:
x = node.left
while x.right != None:
print(x.key)
x = x.right
y = node.parent
print(str(node.key))
if node.parent.key > node.key:
node.parent.right = None
else:
node.parent.left = None
if y.key > x.key:
y.right = x
else:
y.left = x
def print_tree(self):
if self.root != None:
self._print_tree(self.root)
def _print_tree(self, node):
if node != None:
self._print_tree(node.left)
print(node.key)
self._print_tree(node.right)
def get_parent(self, key):
self.search(key)
x = self.find_node.parent
if x is not None:
x = x.key
return x
def get_child(self, key):
self.search(key)
(x, y) = (self.find_node.right, self.find_node.left)
if x is not None:
x = x.key
if y is not None:
y = y.key
return (x, y)
def main():
binary_tree = tree()
Binary_tree.add(5)
Binary_tree.add(4)
Binary_tree.add(8)
Binary_tree.add(9)
Binary_tree.add(3)
Binary_tree.add(0)
Binary_tree.add(7)
Binary_tree.add(6)
Binary_tree.add(2)
Binary_tree.add(1)
Binary_tree.print_tree()
Binary_tree.search(2)
Binary_tree.print_tree()
Binary_tree.del_node(8)
Binary_tree.print_tree()
Binary_tree.search(8)
Binary_tree.print_tree()
if __name__ == '__main__':
main() |
if __name__ == '__main__':
with open('train.csv', 'w') as f:
for i in range(1, 451):
f.write(f'/workspace/data/VCTK-Corpus/wav48/p239/p239_{i:03d}.wav,/workspace/data/VCTK-Corpus/txt/p239/p239_{i:03d}.txt\n')
with open('val.csv', 'w') as f:
for i in range(451, 476):
f.write(f'/workspace/data/VCTK-Corpus/wav48/p239/p239_{i:03d}.wav,/workspace/data/VCTK-Corpus/txt/p239/p239_{i:03d}.txt\n')
with open('test.csv', 'w') as f:
for i in range(476, 504):
f.write(f'/workspace/data/VCTK-Corpus/wav48/p239/p239_{i:03d}.wav,/workspace/data/VCTK-Corpus/txt/p239/p239_{i:03d}.txt\n')
| if __name__ == '__main__':
with open('train.csv', 'w') as f:
for i in range(1, 451):
f.write(f'/workspace/data/VCTK-Corpus/wav48/p239/p239_{i:03d}.wav,/workspace/data/VCTK-Corpus/txt/p239/p239_{i:03d}.txt\n')
with open('val.csv', 'w') as f:
for i in range(451, 476):
f.write(f'/workspace/data/VCTK-Corpus/wav48/p239/p239_{i:03d}.wav,/workspace/data/VCTK-Corpus/txt/p239/p239_{i:03d}.txt\n')
with open('test.csv', 'w') as f:
for i in range(476, 504):
f.write(f'/workspace/data/VCTK-Corpus/wav48/p239/p239_{i:03d}.wav,/workspace/data/VCTK-Corpus/txt/p239/p239_{i:03d}.txt\n') |
number = int(input())
for a in range(1, 9 + 1):
for b in range(1, 9 + 1):
for c in range(1, 9 + 1):
for d in range(1, 9 + 1):
if a + b == c + d and number % (a +b) == 0:
print('{0}{1}{2}{3}'.format(a, b, c, d), end=" ")
| number = int(input())
for a in range(1, 9 + 1):
for b in range(1, 9 + 1):
for c in range(1, 9 + 1):
for d in range(1, 9 + 1):
if a + b == c + d and number % (a + b) == 0:
print('{0}{1}{2}{3}'.format(a, b, c, d), end=' ') |
class DataProcessor(object):
def get_train_examples(self, data_dir):
raise NotImplementedError()
def get_dev_examples(self, data_dir):
raise NotImplementedError()
def get_test_examples(self, data_dir, data_file_name):
raise NotImplementedError()
def get_labels(self):
raise NotImplementedError()
| class Dataprocessor(object):
def get_train_examples(self, data_dir):
raise not_implemented_error()
def get_dev_examples(self, data_dir):
raise not_implemented_error()
def get_test_examples(self, data_dir, data_file_name):
raise not_implemented_error()
def get_labels(self):
raise not_implemented_error() |
"""Same name 2.0
This program has only one variable.
Attributes:
eggs (str): String denoting global variable.
"""
eggs = 'global'
def spam() -> None:
"""Spam
Reassigns global variable called eggs.
Returns:
None.
"""
global eggs
eggs = 'spam' #: Reassign global variable.
def main():
spam()
print(eggs)
if __name__ == '__main__':
main()
| """Same name 2.0
This program has only one variable.
Attributes:
eggs (str): String denoting global variable.
"""
eggs = 'global'
def spam() -> None:
"""Spam
Reassigns global variable called eggs.
Returns:
None.
"""
global eggs
eggs = 'spam'
def main():
spam()
print(eggs)
if __name__ == '__main__':
main() |
"""
coding: utf-8
Created on 11/11/2020
@author: github.com/edrmonteiro
From: Hackerrank challenges
Language: Python
Title: Minimum Swaps 2
You are given an unordered array consisting of consecutive integers [1, 2, 3, ..., n] without any duplicates. You are allowed to swap any two elements. You need to find the minimum number of swaps required to sort the array in ascending order.
For example, given the array we perform the following steps:
i arr swap (indices)
0 [7, 1, 3, 2, 4, 5, 6] swap (0,3)
1 [2, 1, 3, 7, 4, 5, 6] swap (0,1)
2 [1, 2, 3, 7, 4, 5, 6] swap (3,4)
3 [1, 2, 3, 4, 7, 5, 6] swap (4,5)
4 [1, 2, 3, 4, 5, 7, 6] swap (5,6)
5 [1, 2, 3, 4, 5, 6, 7]
It took swaps to sort the array.
Function Description
Complete the function minimumSwaps in the editor below. It must return an integer representing the minimum number of swaps to sort the array.
minimumSwaps has the following parameter(s):
arr: an unordered array of integers
Input Format
The first line contains an integer, , the size of .
The second line contains space-separated integers .
Constraints
Output Format
Return the minimum number of swaps to sort the given array.
Sample Input 0
4
4 3 1 2
Sample Output 0
3
Explanation 0
Given array
After swapping we get
After swapping we get
After swapping we get
So, we need a minimum of swaps to sort the array in ascending order.
Sample Input 1
5
2 3 4 1 5
Sample Output 1
3
Explanation 1
Given array
After swapping we get
After swapping we get
After swapping we get
So, we need a minimum of swaps to sort the array in ascending order.
Sample Input 2
7
1 3 5 2 4 6 7
Sample Output 2
3
Explanation 2
Given array
After swapping we get
After swapping we get
After swapping we get
So, we need a minimum of swaps to sort the array in ascending order.
"""
def minimumSwaps(arr):
#newArr = countShift(arr)
swaps = 0
i = 0
while True:
if i != arr[i] - 1:
item = arr[i]
arr[i], arr[item - 1] = arr[item - 1], arr[i]
swaps += 1
if i == arr[i] - 1:
i += 1
if i == len(arr):
break
return swaps
arr = [7, 1, 3, 2, 4, 5, 6]
print(minimumSwaps(arr))
stop = True | """
coding: utf-8
Created on 11/11/2020
@author: github.com/edrmonteiro
From: Hackerrank challenges
Language: Python
Title: Minimum Swaps 2
You are given an unordered array consisting of consecutive integers [1, 2, 3, ..., n] without any duplicates. You are allowed to swap any two elements. You need to find the minimum number of swaps required to sort the array in ascending order.
For example, given the array we perform the following steps:
i arr swap (indices)
0 [7, 1, 3, 2, 4, 5, 6] swap (0,3)
1 [2, 1, 3, 7, 4, 5, 6] swap (0,1)
2 [1, 2, 3, 7, 4, 5, 6] swap (3,4)
3 [1, 2, 3, 4, 7, 5, 6] swap (4,5)
4 [1, 2, 3, 4, 5, 7, 6] swap (5,6)
5 [1, 2, 3, 4, 5, 6, 7]
It took swaps to sort the array.
Function Description
Complete the function minimumSwaps in the editor below. It must return an integer representing the minimum number of swaps to sort the array.
minimumSwaps has the following parameter(s):
arr: an unordered array of integers
Input Format
The first line contains an integer, , the size of .
The second line contains space-separated integers .
Constraints
Output Format
Return the minimum number of swaps to sort the given array.
Sample Input 0
4
4 3 1 2
Sample Output 0
3
Explanation 0
Given array
After swapping we get
After swapping we get
After swapping we get
So, we need a minimum of swaps to sort the array in ascending order.
Sample Input 1
5
2 3 4 1 5
Sample Output 1
3
Explanation 1
Given array
After swapping we get
After swapping we get
After swapping we get
So, we need a minimum of swaps to sort the array in ascending order.
Sample Input 2
7
1 3 5 2 4 6 7
Sample Output 2
3
Explanation 2
Given array
After swapping we get
After swapping we get
After swapping we get
So, we need a minimum of swaps to sort the array in ascending order.
"""
def minimum_swaps(arr):
swaps = 0
i = 0
while True:
if i != arr[i] - 1:
item = arr[i]
(arr[i], arr[item - 1]) = (arr[item - 1], arr[i])
swaps += 1
if i == arr[i] - 1:
i += 1
if i == len(arr):
break
return swaps
arr = [7, 1, 3, 2, 4, 5, 6]
print(minimum_swaps(arr))
stop = True |
def my_func(name):
print(f"this function is {name}")
return "ha ha ha"
my_func("Carlos's")
the_input = int(input("Tell me something: "))
print(the_input + 2)
| def my_func(name):
print(f'this function is {name}')
return 'ha ha ha'
my_func("Carlos's")
the_input = int(input('Tell me something: '))
print(the_input + 2) |
def get_item_by_name(self, item_name):
for item in self.items:
if item.item_name == item_name:
return item
return None
def get_item_list_by_name(self, item_name):
for item_list in self.item_lists:
if item_list.item_name == item_name:
return item_list
return None
| def get_item_by_name(self, item_name):
for item in self.items:
if item.item_name == item_name:
return item
return None
def get_item_list_by_name(self, item_name):
for item_list in self.item_lists:
if item_list.item_name == item_name:
return item_list
return None |
# -*- coding: utf-8 -*-
"""
Config parser of the shunt connector.
Copyright 2021 Christian Doppler Laboratory for Embedded Machine Learning
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__author__ = 'Bernhard Haas'
__copyright__ = 'Copyright 2021, Christian Doppler Laboratory for ' \
'Embedded Machine Learning'
__credits__ = ['']
__license__ = 'Apache 2.0'
__version__ = '1.0.0'
__maintainer__ = 'Bernhard Haas'
__email__ = 'bernhardhaas55@gmail.com'
__status__ = 'Release'
def _check_param(argument: type, option: str, options: list):
if not argument in options:
raise ValueError("Provided argument {} for option {}, but only {} are legal arguments!".format(argument, option, options))
return argument
def parse_config(self):
config = self.config
if 'GENERAL' in config.keys():
self.general_params = {}
self.general_params['task'] = config['GENERAL'].get('task')
self.general_params['calc_knowledge_quotients'] = config['GENERAL'].getboolean('calc_knowledge_quotients')
self.general_params['train_original_model'] = config['GENERAL'].getboolean('train_original_model')
self.general_params['test_original_model'] = config['GENERAL'].getboolean('test_original_model')
self.general_params['test_shunt_inserted_model'] = config['GENERAL'].getboolean('test_shunt_inserted_model')
self.general_params['train_final_model'] = config['GENERAL'].getboolean('train_final_model')
self.general_params['test_final_model'] = config['GENERAL'].getboolean('test_final_model')
self.general_params['train_shunt_model'] = config['GENERAL'].getboolean('train_shunt_model')
self.general_params['test_shunt_model'] = config['GENERAL'].getboolean('test_shunt_model')
self.general_params['test_fine-tune_strategies'] = config['GENERAL'].getboolean('test_fine-tune_strategies')
self.general_params['test_latency'] = config['GENERAL'].getboolean('test_latency')
if 'DATASET' in config.keys():
self.dataset_params = {}
self.dataset_params['name'] = config['DATASET']['name']
self.dataset_params['path'] = config['DATASET']['path']
self.dataset_params['input_size'] = tuple(map(int, config['DATASET']['input_size'].split(',')))
self.dataset_params['test_batchsize'] = config['DATASET'].getint('test_batchsize')
if 'MODEL' in config.keys():
self.model_params = {}
self.model_params['type'] = config['MODEL']['type']
self.model_params['depth_factor'] = config['MODEL'].getfloat('depth_factor')
self.model_params['from_file'] = config['MODEL'].getboolean('from_file')
self.model_params['filepath'] = config['MODEL']['filepath']
self.model_params['pretrained'] = _check_param(config['MODEL'].get('pretrained'),
'MODEL: pretrained', ['True', 'False', 'imagenet', 'cityscapes'])
self.model_params['weightspath'] = config['MODEL']['weightspath']
self.model_params['number_change_stride_layes'] = config['MODEL'].getint('change_stride_layers')
self.model_params['output_stride'] = config['MODEL'].getint('output_stride')
if 'TRAINING_ORIGINAL_MODEL' in config.keys():
self.train_original_params = {}
# obligatory parameters
self.train_original_params['learning_policy'] = _check_param(config['TRAINING_ORIGINAL_MODEL'].get('learning_policy'),
'TRAINING_ORIGINAL_MODEL: learning_policy', ['two_cycles', 'plateau', 'poly'])
self.train_original_params['batch_size'] = config['TRAINING_ORIGINAL_MODEL'].getint('batchsize')
self.train_original_params['max_epochs'] = config['TRAINING_ORIGINAL_MODEL'].getint('max_epochs')
self.train_original_params['base_learning_rate'] = config['TRAINING_ORIGINAL_MODEL'].getfloat('base_learning_rate')
# necessary parameters for given learning_strategy
if self.train_original_params['learning_policy'] == 'two_cycles':
self.train_original_params['epochs_first_cycle'] = config['TRAINING_ORIGINAL_MODEL'].getint('epochs_param')
self.train_original_params['learning_rate_second_cycle'] = config['TRAINING_ORIGINAL_MODEL'].getfloat('learning_rate_param')
if self.train_original_params['learning_policy'] == 'plateau':
self.train_original_params['factor'] = config['TRAINING_ORIGINAL_MODEL'].getfloat('learning_rate_param')
self.train_original_params['patience'] = config['TRAINING_ORIGINAL_MODEL'].getint('epochs_param')
if self.train_original_params['learning_policy'] == 'poly':
self.train_original_params['power'] = config['TRAINING_ORIGINAL_MODEL'].getfloat('learning_rate_param')
if 'TRAINING_SHUNT_MODEL' in config.keys():
self.train_shunt_params = {}
self.train_shunt_params['learning_policy'] = _check_param(config['TRAINING_SHUNT_MODEL'].get('learning_policy'), 'TRAINING_SHUNT_MODEL: learning_policy', ['two_cycles', 'plateau', 'poly'])
self.train_shunt_params['full_attention_transfer_factor'] = config['TRAINING_SHUNT_MODEL'].getfloat('full_attention_transfer_factor', 1.0)
self.train_shunt_params['use_categorical_crossentropy'] = config['TRAINING_SHUNT_MODEL'].getboolean('use_categorical_crossentropy', False)
self.train_shunt_params['batch_size'] = config['TRAINING_SHUNT_MODEL'].getint('batchsize')
self.train_shunt_params['max_epochs'] = config['TRAINING_SHUNT_MODEL'].getint('max_epochs')
self.train_shunt_params['base_learning_rate'] = config['TRAINING_SHUNT_MODEL'].getfloat('base_learning_rate')
# necessary parameters for given learning_strategy
if self.train_shunt_params['learning_policy'] == 'two_cycles':
self.train_shunt_params['epochs_first_cycle'] = config['TRAINING_SHUNT_MODEL'].getint('epochs_param')
self.train_shunt_params['learning_rate_second_cycle'] = config['TRAINING_SHUNT_MODEL'].getfloat('learning_rate_param')
if self.train_shunt_params['learning_policy'] == 'plateau':
self.train_shunt_params['factor'] = config['TRAINING_SHUNT_MODEL'].getfloat('learning_rate_param')
self.train_shunt_params['patience'] = config['TRAINING_SHUNT_MODEL'].getint('epochs_param')
if self.train_shunt_params['learning_policy'] == 'poly':
self.train_shunt_params['power'] = config['TRAINING_SHUNT_MODEL'].getfloat('learning_rate_param')
if 'TRAINING_FINAL_MODEL' in config.keys():
self.train_final_params = {}
# obligatory parameters
self.train_final_params['learning_policy'] = _check_param(config['TRAINING_FINAL_MODEL'].get('learning_policy'), 'TRAINING_FINAL_MODEL: learning_policy', ['two_cycles', 'plateau', 'poly'])
self.train_final_params['freezing'] = _check_param(config['TRAINING_FINAL_MODEL'].get('freezing'), 'TRAINING_FINAL_MODEL: freezing', ['nothing', 'freeze_before_shunt'])
self.train_final_params['batch_size'] = config['TRAINING_FINAL_MODEL'].getint('batchsize')
self.train_final_params['base_learning_rate'] = config['TRAINING_FINAL_MODEL'].getfloat('base_learning_rate')
self.train_final_params['max_epochs'] = config['TRAINING_FINAL_MODEL'].getint('max_epochs')
# necessary parameters for given learning_strategy
if self.train_final_params['learning_policy'] == 'two_cycles':
self.train_final_params['epochs_first_cycle'] = config['TRAINING_FINAL_MODEL'].getint('epochs_param')
self.train_final_params['learning_rate_second_cycle'] = config['TRAINING_FINAL_MODEL'].getfloat('learning_rate_param')
if self.train_final_params['learning_policy'] == 'plateau':
self.train_final_params['factor'] = config['TRAINING_FINAL_MODEL'].getfloat('learning_rate_param')
self.train_final_params['patience'] = config['TRAINING_FINAL_MODEL'].getint('epochs_param')
if self.train_final_params['learning_policy'] == 'poly':
self.train_final_params['power'] = config['TRAINING_FINAL_MODEL'].getfloat('learning_rate_param')
if 'SHUNT' in config.keys():
self.shunt_params = {}
self.shunt_params['arch'] = config['SHUNT'].getint('arch')
self.shunt_params['locations'] = tuple(map(int, config['SHUNT']['locations'].split(',')))
self.shunt_params['from_file'] = config['SHUNT'].getboolean('from file')
self.shunt_params['filepath'] = config['SHUNT']['filepath']
self.shunt_params['pretrained'] = config['SHUNT'].getboolean('pretrained')
self.shunt_params['weightspath'] = config['SHUNT']['weightspath']
if 'FINAL_MODEL' in config.keys():
self.final_model_params = {}
self.final_model_params['test_after_shunt_insertion'] = config['FINAL_MODEL'].getboolean('test_after_shunt_insertion')
self.final_model_params['pretrained'] = config['FINAL_MODEL'].getboolean('pretrained')
self.final_model_params['weightspath'] = config['FINAL_MODEL']['weightspath']
if 'TEST_LATENCY' in config.keys():
self.test_latency_params = {}
self.test_latency_params['iterations'] = config['TEST_LATENCY'].getint('iterations')
self.test_latency_params['number_of_samples'] = config['TEST_LATENCY'].getint('number_of_samples')
self.test_latency_params['batchsize'] = config['TEST_LATENCY'].getint('batchsize')
| """
Config parser of the shunt connector.
Copyright 2021 Christian Doppler Laboratory for Embedded Machine Learning
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__author__ = 'Bernhard Haas'
__copyright__ = 'Copyright 2021, Christian Doppler Laboratory for Embedded Machine Learning'
__credits__ = ['']
__license__ = 'Apache 2.0'
__version__ = '1.0.0'
__maintainer__ = 'Bernhard Haas'
__email__ = 'bernhardhaas55@gmail.com'
__status__ = 'Release'
def _check_param(argument: type, option: str, options: list):
if not argument in options:
raise value_error('Provided argument {} for option {}, but only {} are legal arguments!'.format(argument, option, options))
return argument
def parse_config(self):
config = self.config
if 'GENERAL' in config.keys():
self.general_params = {}
self.general_params['task'] = config['GENERAL'].get('task')
self.general_params['calc_knowledge_quotients'] = config['GENERAL'].getboolean('calc_knowledge_quotients')
self.general_params['train_original_model'] = config['GENERAL'].getboolean('train_original_model')
self.general_params['test_original_model'] = config['GENERAL'].getboolean('test_original_model')
self.general_params['test_shunt_inserted_model'] = config['GENERAL'].getboolean('test_shunt_inserted_model')
self.general_params['train_final_model'] = config['GENERAL'].getboolean('train_final_model')
self.general_params['test_final_model'] = config['GENERAL'].getboolean('test_final_model')
self.general_params['train_shunt_model'] = config['GENERAL'].getboolean('train_shunt_model')
self.general_params['test_shunt_model'] = config['GENERAL'].getboolean('test_shunt_model')
self.general_params['test_fine-tune_strategies'] = config['GENERAL'].getboolean('test_fine-tune_strategies')
self.general_params['test_latency'] = config['GENERAL'].getboolean('test_latency')
if 'DATASET' in config.keys():
self.dataset_params = {}
self.dataset_params['name'] = config['DATASET']['name']
self.dataset_params['path'] = config['DATASET']['path']
self.dataset_params['input_size'] = tuple(map(int, config['DATASET']['input_size'].split(',')))
self.dataset_params['test_batchsize'] = config['DATASET'].getint('test_batchsize')
if 'MODEL' in config.keys():
self.model_params = {}
self.model_params['type'] = config['MODEL']['type']
self.model_params['depth_factor'] = config['MODEL'].getfloat('depth_factor')
self.model_params['from_file'] = config['MODEL'].getboolean('from_file')
self.model_params['filepath'] = config['MODEL']['filepath']
self.model_params['pretrained'] = _check_param(config['MODEL'].get('pretrained'), 'MODEL: pretrained', ['True', 'False', 'imagenet', 'cityscapes'])
self.model_params['weightspath'] = config['MODEL']['weightspath']
self.model_params['number_change_stride_layes'] = config['MODEL'].getint('change_stride_layers')
self.model_params['output_stride'] = config['MODEL'].getint('output_stride')
if 'TRAINING_ORIGINAL_MODEL' in config.keys():
self.train_original_params = {}
self.train_original_params['learning_policy'] = _check_param(config['TRAINING_ORIGINAL_MODEL'].get('learning_policy'), 'TRAINING_ORIGINAL_MODEL: learning_policy', ['two_cycles', 'plateau', 'poly'])
self.train_original_params['batch_size'] = config['TRAINING_ORIGINAL_MODEL'].getint('batchsize')
self.train_original_params['max_epochs'] = config['TRAINING_ORIGINAL_MODEL'].getint('max_epochs')
self.train_original_params['base_learning_rate'] = config['TRAINING_ORIGINAL_MODEL'].getfloat('base_learning_rate')
if self.train_original_params['learning_policy'] == 'two_cycles':
self.train_original_params['epochs_first_cycle'] = config['TRAINING_ORIGINAL_MODEL'].getint('epochs_param')
self.train_original_params['learning_rate_second_cycle'] = config['TRAINING_ORIGINAL_MODEL'].getfloat('learning_rate_param')
if self.train_original_params['learning_policy'] == 'plateau':
self.train_original_params['factor'] = config['TRAINING_ORIGINAL_MODEL'].getfloat('learning_rate_param')
self.train_original_params['patience'] = config['TRAINING_ORIGINAL_MODEL'].getint('epochs_param')
if self.train_original_params['learning_policy'] == 'poly':
self.train_original_params['power'] = config['TRAINING_ORIGINAL_MODEL'].getfloat('learning_rate_param')
if 'TRAINING_SHUNT_MODEL' in config.keys():
self.train_shunt_params = {}
self.train_shunt_params['learning_policy'] = _check_param(config['TRAINING_SHUNT_MODEL'].get('learning_policy'), 'TRAINING_SHUNT_MODEL: learning_policy', ['two_cycles', 'plateau', 'poly'])
self.train_shunt_params['full_attention_transfer_factor'] = config['TRAINING_SHUNT_MODEL'].getfloat('full_attention_transfer_factor', 1.0)
self.train_shunt_params['use_categorical_crossentropy'] = config['TRAINING_SHUNT_MODEL'].getboolean('use_categorical_crossentropy', False)
self.train_shunt_params['batch_size'] = config['TRAINING_SHUNT_MODEL'].getint('batchsize')
self.train_shunt_params['max_epochs'] = config['TRAINING_SHUNT_MODEL'].getint('max_epochs')
self.train_shunt_params['base_learning_rate'] = config['TRAINING_SHUNT_MODEL'].getfloat('base_learning_rate')
if self.train_shunt_params['learning_policy'] == 'two_cycles':
self.train_shunt_params['epochs_first_cycle'] = config['TRAINING_SHUNT_MODEL'].getint('epochs_param')
self.train_shunt_params['learning_rate_second_cycle'] = config['TRAINING_SHUNT_MODEL'].getfloat('learning_rate_param')
if self.train_shunt_params['learning_policy'] == 'plateau':
self.train_shunt_params['factor'] = config['TRAINING_SHUNT_MODEL'].getfloat('learning_rate_param')
self.train_shunt_params['patience'] = config['TRAINING_SHUNT_MODEL'].getint('epochs_param')
if self.train_shunt_params['learning_policy'] == 'poly':
self.train_shunt_params['power'] = config['TRAINING_SHUNT_MODEL'].getfloat('learning_rate_param')
if 'TRAINING_FINAL_MODEL' in config.keys():
self.train_final_params = {}
self.train_final_params['learning_policy'] = _check_param(config['TRAINING_FINAL_MODEL'].get('learning_policy'), 'TRAINING_FINAL_MODEL: learning_policy', ['two_cycles', 'plateau', 'poly'])
self.train_final_params['freezing'] = _check_param(config['TRAINING_FINAL_MODEL'].get('freezing'), 'TRAINING_FINAL_MODEL: freezing', ['nothing', 'freeze_before_shunt'])
self.train_final_params['batch_size'] = config['TRAINING_FINAL_MODEL'].getint('batchsize')
self.train_final_params['base_learning_rate'] = config['TRAINING_FINAL_MODEL'].getfloat('base_learning_rate')
self.train_final_params['max_epochs'] = config['TRAINING_FINAL_MODEL'].getint('max_epochs')
if self.train_final_params['learning_policy'] == 'two_cycles':
self.train_final_params['epochs_first_cycle'] = config['TRAINING_FINAL_MODEL'].getint('epochs_param')
self.train_final_params['learning_rate_second_cycle'] = config['TRAINING_FINAL_MODEL'].getfloat('learning_rate_param')
if self.train_final_params['learning_policy'] == 'plateau':
self.train_final_params['factor'] = config['TRAINING_FINAL_MODEL'].getfloat('learning_rate_param')
self.train_final_params['patience'] = config['TRAINING_FINAL_MODEL'].getint('epochs_param')
if self.train_final_params['learning_policy'] == 'poly':
self.train_final_params['power'] = config['TRAINING_FINAL_MODEL'].getfloat('learning_rate_param')
if 'SHUNT' in config.keys():
self.shunt_params = {}
self.shunt_params['arch'] = config['SHUNT'].getint('arch')
self.shunt_params['locations'] = tuple(map(int, config['SHUNT']['locations'].split(',')))
self.shunt_params['from_file'] = config['SHUNT'].getboolean('from file')
self.shunt_params['filepath'] = config['SHUNT']['filepath']
self.shunt_params['pretrained'] = config['SHUNT'].getboolean('pretrained')
self.shunt_params['weightspath'] = config['SHUNT']['weightspath']
if 'FINAL_MODEL' in config.keys():
self.final_model_params = {}
self.final_model_params['test_after_shunt_insertion'] = config['FINAL_MODEL'].getboolean('test_after_shunt_insertion')
self.final_model_params['pretrained'] = config['FINAL_MODEL'].getboolean('pretrained')
self.final_model_params['weightspath'] = config['FINAL_MODEL']['weightspath']
if 'TEST_LATENCY' in config.keys():
self.test_latency_params = {}
self.test_latency_params['iterations'] = config['TEST_LATENCY'].getint('iterations')
self.test_latency_params['number_of_samples'] = config['TEST_LATENCY'].getint('number_of_samples')
self.test_latency_params['batchsize'] = config['TEST_LATENCY'].getint('batchsize') |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Jan 22 18:10:02 2022
@author: victor
"""
fav_numbers = {'eric': 17, 'ever': 4}
for name, number in fav_numbers.items():
print(name + ' loves ' + str(number)) | """
Created on Sat Jan 22 18:10:02 2022
@author: victor
"""
fav_numbers = {'eric': 17, 'ever': 4}
for (name, number) in fav_numbers.items():
print(name + ' loves ' + str(number)) |
EXAMPLES = {
"pyxel": [],
"pygame": [],
"cli": [],
"matplotlib": [],
"pyglet": [],
"streamlit": [],
"kivy": [],
}
class TestExamples:
def test_all_examples_were_registered(self):
...
class BaseRunner:
def run(self, file):
"""
Run file
"""
class TestPyxelExample:
...
| examples = {'pyxel': [], 'pygame': [], 'cli': [], 'matplotlib': [], 'pyglet': [], 'streamlit': [], 'kivy': []}
class Testexamples:
def test_all_examples_were_registered(self):
...
class Baserunner:
def run(self, file):
"""
Run file
"""
class Testpyxelexample:
... |
#characterization of a linguistic value
#it has a belonging function and possibly its inverse function
class fuzzy_linguistic_value:
#builder (belonging function and its inverse)
def __init__(self, function, inverse_function = None):
self.function = function
self.inverse_function = inverse_function
#return the minimum between the threshold and the value x evaluated in function
def defuzzy_function(function, maximum, x):
return min(maximum, function(x))
#definition of a fuzzy variable(value, domain, agregation, defuzzyfication methods)
class fuzzy_variable:
#builder (initial value and the domain)
def __init__(self, initial_value, domain):
self.domain = domain
self.value = initial_value
self.agregation = {}
#change the value of the variable to "new_value"
def change_value(self, new_value):
self.value = new_value
#get belonging value associated with the state "state"
def get_value(self, state):
return self.domain[state].function(self.value)
#reset current agregation
def clear_agregation(self):
self.agregation = {}
#add more outputs to the agregation
def agregate(self, pairs):
for p in pairs:
#if this value was already part of the agregation we take the minimum of the 2 values
if p[0] in self.agregation.keys():
self.agregation[p[0]] = (self.domain[p[0]].function, min(p[1], self.agregation[p[0]][1]))
else: #if not, we add it to the agregation
self.agregation[p[0]] = (self.domain[p[0]].function, p[1])
#defuzzyfication methods
#centroid
def defuzzyfication_centroid(self, low = int(-1e5), high = int(1e5), delta = 1):
a, b = 0, 0
for x in range(low, high, delta):
eval = -1e15
for k in self.agregation.keys():
eval = max(eval, defuzzy_function(self.agregation[k][0], self.agregation[k][1], x))
a += 1.0 * x * eval
b += eval
if b == 0:
return 0.0
else:
return a / b
#maximum
def defuzzyfication_max(self):
fuzzy, maximum = None, -1e15
for k in self.agregation.keys():
if self.agregation[k][1] > maximum:
fuzzy, maximum = k, self.agregation[k][1]
if fuzzy == None:
raise RuntimeError('Agregation function not defined')
return self.domain[fuzzy].inverse_function(maximum)
#bisection
def defuzzyfication_bisection(self, low = int(-1e5), high = int(1e5), delta = 1):
first, last = 0, 0
for x in range(low, high, delta):
first = x
eval = -1e15
for k in self.agregation.keys():
eval = max(eval, defuzzy_function(self.agregation[k][0], self.agregation[k][1], x))
if eval > 0 and first == 0:
break
for x in range(high, low, -delta):
last = x
eval = -1e15
for k in self.agregation.keys():
eval = max(eval, defuzzy_function(self.agregation[k][0], self.agregation[k][1], x))
if eval > 0 and last == 0:
break
return (low + first) / 2.0
| class Fuzzy_Linguistic_Value:
def __init__(self, function, inverse_function=None):
self.function = function
self.inverse_function = inverse_function
def defuzzy_function(function, maximum, x):
return min(maximum, function(x))
class Fuzzy_Variable:
def __init__(self, initial_value, domain):
self.domain = domain
self.value = initial_value
self.agregation = {}
def change_value(self, new_value):
self.value = new_value
def get_value(self, state):
return self.domain[state].function(self.value)
def clear_agregation(self):
self.agregation = {}
def agregate(self, pairs):
for p in pairs:
if p[0] in self.agregation.keys():
self.agregation[p[0]] = (self.domain[p[0]].function, min(p[1], self.agregation[p[0]][1]))
else:
self.agregation[p[0]] = (self.domain[p[0]].function, p[1])
def defuzzyfication_centroid(self, low=int(-100000.0), high=int(100000.0), delta=1):
(a, b) = (0, 0)
for x in range(low, high, delta):
eval = -1000000000000000.0
for k in self.agregation.keys():
eval = max(eval, defuzzy_function(self.agregation[k][0], self.agregation[k][1], x))
a += 1.0 * x * eval
b += eval
if b == 0:
return 0.0
else:
return a / b
def defuzzyfication_max(self):
(fuzzy, maximum) = (None, -1000000000000000.0)
for k in self.agregation.keys():
if self.agregation[k][1] > maximum:
(fuzzy, maximum) = (k, self.agregation[k][1])
if fuzzy == None:
raise runtime_error('Agregation function not defined')
return self.domain[fuzzy].inverse_function(maximum)
def defuzzyfication_bisection(self, low=int(-100000.0), high=int(100000.0), delta=1):
(first, last) = (0, 0)
for x in range(low, high, delta):
first = x
eval = -1000000000000000.0
for k in self.agregation.keys():
eval = max(eval, defuzzy_function(self.agregation[k][0], self.agregation[k][1], x))
if eval > 0 and first == 0:
break
for x in range(high, low, -delta):
last = x
eval = -1000000000000000.0
for k in self.agregation.keys():
eval = max(eval, defuzzy_function(self.agregation[k][0], self.agregation[k][1], x))
if eval > 0 and last == 0:
break
return (low + first) / 2.0 |
s = input()
ans = 0
for i in range(len(s) // 2):
if s[i] != s[len(s) - 1 - i]:
ans += 1
print(ans)
| s = input()
ans = 0
for i in range(len(s) // 2):
if s[i] != s[len(s) - 1 - i]:
ans += 1
print(ans) |
# test basic await expression
# adapted from PEP0492
async def abinary(n):
print(n)
if n <= 0:
return 1
l = await abinary(n - 1)
r = await abinary(n - 1)
return l + 1 + r
o = abinary(4)
try:
while True:
o.send(None)
except StopIteration:
print('finished')
| async def abinary(n):
print(n)
if n <= 0:
return 1
l = await abinary(n - 1)
r = await abinary(n - 1)
return l + 1 + r
o = abinary(4)
try:
while True:
o.send(None)
except StopIteration:
print('finished') |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
print('[+] First Read')
with open('file.txt', 'r') as file:
print(file.read())
print('\n[+] Second Read')
with open('file.txt', 'r') as file:
while True:
line = file.readline()
print(line, end='')
if not line:
break
print('\n\n[+] Third Read')
with open('file.txt', 'r') as file:
for line in file:
print(line, end='')
print('\n\n[+] First Writing')
with open('file_output.txt', 'w') as file_output:
data_to_write = 'Some text\n' * 100
file_output.write(data_to_write)
print('\t[|] Done writing file!')
data = {'ip': '127.0.0.1'}
print('\n\n[+] Second Writing')
with open('file_output_dict.txt', 'w') as file_output_dict:
file_output_dict.write(str(data))
print('\t[|] Done writing dict to a file!')
print('\n\n[+] Reading and Writing')
with open('file.txt', 'r') as file_input, open('file_output_2.txt', 'w') as file_output:
file_output.write(file_input.read())
print('\t[|] Done copying files!') | print('[+] First Read')
with open('file.txt', 'r') as file:
print(file.read())
print('\n[+] Second Read')
with open('file.txt', 'r') as file:
while True:
line = file.readline()
print(line, end='')
if not line:
break
print('\n\n[+] Third Read')
with open('file.txt', 'r') as file:
for line in file:
print(line, end='')
print('\n\n[+] First Writing')
with open('file_output.txt', 'w') as file_output:
data_to_write = 'Some text\n' * 100
file_output.write(data_to_write)
print('\t[|] Done writing file!')
data = {'ip': '127.0.0.1'}
print('\n\n[+] Second Writing')
with open('file_output_dict.txt', 'w') as file_output_dict:
file_output_dict.write(str(data))
print('\t[|] Done writing dict to a file!')
print('\n\n[+] Reading and Writing')
with open('file.txt', 'r') as file_input, open('file_output_2.txt', 'w') as file_output:
file_output.write(file_input.read())
print('\t[|] Done copying files!') |
rows =13
mid=int(rows/2)
for row in range(rows):
if row <=mid:
for blank in range(mid-row):
print(" ",end="")
for star in range(2*row+1):
print("*",end="")
else:
for blank in range(row-mid):
print(" ",end="")
for star in range(rows-2*(row-mid)):
print("*",end="")
print("")
| rows = 13
mid = int(rows / 2)
for row in range(rows):
if row <= mid:
for blank in range(mid - row):
print(' ', end='')
for star in range(2 * row + 1):
print('*', end='')
else:
for blank in range(row - mid):
print(' ', end='')
for star in range(rows - 2 * (row - mid)):
print('*', end='')
print('') |
#!/usr/bin/env python
#
# Cloudlet Infrastructure for Mobile Computing
#
# Author: Kiryong Ha <krha@cmu.edu>
# Zhuo Chen <zhuoc@cs.cmu.edu>
#
# Copyright (C) 2011-2013 Carnegie Mellon University
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class Protocol_client(object):
JSON_KEY_CONTROL_MESSAGE = "control"
JSON_KEY_RESULT_MESSAGE = "result"
JSON_KEY_FRAME_ID = "frame_id"
JSON_KEY_ENGINE_ID = "engine_id"
JSON_KEY_TOKEN_INJECT = "token_inject"
JSON_KEY_TRAIN = "training"
JSON_KEY_ADD_PERSON = "add_person"
JSON_KEY_RM_PERSON = "remove_person"
JSON_KEY_SET_WHITELIST = "set_whitelist"
class AppDataProtocol():
TYPE_add_person = "add_person"
TYPE_get_person = "get_person"
TYPE_train = "train"
TYPE_detect = "detect"
TYPE_get_state = "get_state"
TYPE_load_state = "load_state"
TYPE_reset = "reset"
TYPE_remove_person = "remove_person"
class Protocol_application(object):
JSON_KEY_SENSOR_TYPE = "sensor_type"
JSON_VALUE_SENSOR_TYPE_JPEG = "mjepg"
JSON_VALUE_SENSOR_TYPE_ACC = "acc"
JSON_VALUE_SENSOR_TYPE_GPS = "gps"
JSON_VALUE_SENSOR_TYPE_AUDIO = "audio"
class Protocol_measurement(object):
JSON_KEY_CONTROL_RECV_FROM_MOBILE_TIME = "control_recv_from_mobile_time"
JSON_KEY_APP_RECV_TIME = "app_recv_time"
JSON_KEY_APP_SYMBOLIC_TIME = "app_symbolic_time"
JSON_KEY_APP_SENT_TIME = "app_sent_time"
JSON_KEY_UCOMM_RECV_TIME = "ucomm_recv_time"
JSON_KEY_UCOMM_SENT_TIME = "ucomm_sent_time"
JSON_KEY_CONTROL_SENT_TO_MOBILE_TIME = "control_sent_to_mobile_time"
class Protocol_result(object):
JSON_KEY_STATUS = "status"
JSON_KEY_IMAGE = "image"
JSON_KEY_SPEECH = "speech"
JSON_KEY_IMAGES_ANIMATION = "animation"
JSON_KEY_VIDEO = "video"
| class Protocol_Client(object):
json_key_control_message = 'control'
json_key_result_message = 'result'
json_key_frame_id = 'frame_id'
json_key_engine_id = 'engine_id'
json_key_token_inject = 'token_inject'
json_key_train = 'training'
json_key_add_person = 'add_person'
json_key_rm_person = 'remove_person'
json_key_set_whitelist = 'set_whitelist'
class Appdataprotocol:
type_add_person = 'add_person'
type_get_person = 'get_person'
type_train = 'train'
type_detect = 'detect'
type_get_state = 'get_state'
type_load_state = 'load_state'
type_reset = 'reset'
type_remove_person = 'remove_person'
class Protocol_Application(object):
json_key_sensor_type = 'sensor_type'
json_value_sensor_type_jpeg = 'mjepg'
json_value_sensor_type_acc = 'acc'
json_value_sensor_type_gps = 'gps'
json_value_sensor_type_audio = 'audio'
class Protocol_Measurement(object):
json_key_control_recv_from_mobile_time = 'control_recv_from_mobile_time'
json_key_app_recv_time = 'app_recv_time'
json_key_app_symbolic_time = 'app_symbolic_time'
json_key_app_sent_time = 'app_sent_time'
json_key_ucomm_recv_time = 'ucomm_recv_time'
json_key_ucomm_sent_time = 'ucomm_sent_time'
json_key_control_sent_to_mobile_time = 'control_sent_to_mobile_time'
class Protocol_Result(object):
json_key_status = 'status'
json_key_image = 'image'
json_key_speech = 'speech'
json_key_images_animation = 'animation'
json_key_video = 'video' |
# -*- coding: utf-8 -*-
z = 'watevah'
def blah(a,k='b'):
def blorp(b):
print(b) #3
print(a) #4
print(z) #5
print(__name__) #6
l = lambda x: x
#def l(x): return x
print(l("zoom!")) #1
print(a,k) #2
blorp("boom")
blah("floom")
| z = 'watevah'
def blah(a, k='b'):
def blorp(b):
print(b)
print(a)
print(z)
print(__name__)
l = lambda x: x
print(l('zoom!'))
print(a, k)
blorp('boom')
blah('floom') |
class Solution:
"""
@param numbers: Give an array numbers of n integer
@param target: An integer
@return: return the sum of the three integers, the sum closest target.
"""
def threeSumClosest(self, numbers, target):
if len(numbers) < 3:
return 0
numbers.sort()
closest = sys.maxsize
closestSum = 0
for i in range(2, len(numbers)):
start = 0
end = i - 1
while start < end:
currSum = numbers[i] + numbers[start] + numbers[end]
diff = currSum - target
if abs(diff) < closest:
closest = abs(diff)
closestSum = currSum
if diff > 0:
end -= 1
elif diff < 0:
start += 1
else:
return currSum
return closestSum | class Solution:
"""
@param numbers: Give an array numbers of n integer
@param target: An integer
@return: return the sum of the three integers, the sum closest target.
"""
def three_sum_closest(self, numbers, target):
if len(numbers) < 3:
return 0
numbers.sort()
closest = sys.maxsize
closest_sum = 0
for i in range(2, len(numbers)):
start = 0
end = i - 1
while start < end:
curr_sum = numbers[i] + numbers[start] + numbers[end]
diff = currSum - target
if abs(diff) < closest:
closest = abs(diff)
closest_sum = currSum
if diff > 0:
end -= 1
elif diff < 0:
start += 1
else:
return currSum
return closestSum |
#max(x,y) = (x+y+|x-y|)/2
#max(x,y,z) = max(x,max(y,z))
def AlgebraicallyFindMaxTwo(x,y):
return(x+y+abs(x-y))/2
AlgebraicallyFindMaxTwo(3,4)
def AlgebraicallyFindMaxThree(x,y,z):
print (AlgebraicallyFindMaxTwo(AlgebraicallyFindMaxTwo(x,y), z))
AlgebraicallyFindMaxThree(3,4,5)
| def algebraically_find_max_two(x, y):
return (x + y + abs(x - y)) / 2
algebraically_find_max_two(3, 4)
def algebraically_find_max_three(x, y, z):
print(algebraically_find_max_two(algebraically_find_max_two(x, y), z))
algebraically_find_max_three(3, 4, 5) |
# common values for app boards
MESSAGE_FIELD_SIZE = 8000
BOARD_NAME_SIZE = 30
DESCRIPTION_SIZE = 100
TOPIC_SUBJECT_SIZE = 255
POSTS_PER_PAGE = 2
TOPICS_PER_PAGE = 10
HAS_MANY_PAGES_LIMIT = 3
POST_SUBJECT_SIZE = 255
EXCLUDE_USERS = ['default_user', 'moderator']
MAX_SYMBOLS_ALLOWED = 20
# common values for app newsfeed
DELAY_FACTOR = 35
MIN_CHARS = 350
BANNER_LENGTH = 150
HELP_ARROWS = 'Use left/ right arrow to toggle news items. '
HELP_BANNER = 'Press Banner to toggle banner on/ off. '
IMG_WIDTH_PX = 900
IMG_WIDTH_PERC = '100%'
RIGHT_ARROW = '\u25B6'
LEFT_ARROW = '\u25C0'
# common values for app stock
BASE_CURRENCIES = [('EUR', 'EUR'), ('USD', 'USD')]
CARET_UP = '\u25B2' # up triangle
CARET_DOWN = '\u25BC' # down triangle
CARET_NO_CHANGE = '\u25AC' # rectangle
| message_field_size = 8000
board_name_size = 30
description_size = 100
topic_subject_size = 255
posts_per_page = 2
topics_per_page = 10
has_many_pages_limit = 3
post_subject_size = 255
exclude_users = ['default_user', 'moderator']
max_symbols_allowed = 20
delay_factor = 35
min_chars = 350
banner_length = 150
help_arrows = 'Use left/ right arrow to toggle news items. '
help_banner = 'Press Banner to toggle banner on/ off. '
img_width_px = 900
img_width_perc = '100%'
right_arrow = '▶'
left_arrow = '◀'
base_currencies = [('EUR', 'EUR'), ('USD', 'USD')]
caret_up = '▲'
caret_down = '▼'
caret_no_change = '▬' |
#
# PySNMP MIB module ADTRAN-AOS-MEDIAGATEWAY-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ADTRAN-AOS-MEDIAGATEWAY-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:14:04 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
adGenAOSVoice, = mibBuilder.importSymbols("ADTRAN-AOS", "adGenAOSVoice")
adIdentity, adShared = mibBuilder.importSymbols("ADTRAN-MIB", "adIdentity", "adShared")
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint")
ObjectGroup, ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "ModuleCompliance", "NotificationGroup")
ObjectIdentity, Counter64, iso, NotificationType, TimeTicks, ModuleIdentity, Bits, IpAddress, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, Unsigned32, MibIdentifier, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "ObjectIdentity", "Counter64", "iso", "NotificationType", "TimeTicks", "ModuleIdentity", "Bits", "IpAddress", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "Unsigned32", "MibIdentifier", "Gauge32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
adGenAOSMediaGatewayMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 664, 6, 10000, 53, 5, 2))
adGenAOSMediaGatewayMIB.setRevisions(('2012-08-22 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: adGenAOSMediaGatewayMIB.setRevisionsDescriptions(('The following OIDs have been obsoleted and will no longer be supported. adGenAOSRtpSessionRxPacketsLost, adGenAOSRtpSessionRxFrameLateDiscards, adGenAOSRtpSessionRxFrameOverflows, adGenAOSRtpSessionTotalsRxPacketsLost, adGenAOSRtpSessionTotalsRxFrameLateDiscards, adGenAOSRtpSessionTotalsRxFrameOverflows, adGenAOSRtpChannelTotalRxPacketsLost, adGenAOSRtpChannelTotalRxMaxDepth, adGenAOSRtpChannelTotalRxFrameLateDiscards, adGenAOSRtpChannelTotalRxFrameOverflows',))
if mibBuilder.loadTexts: adGenAOSMediaGatewayMIB.setLastUpdated('200504190000Z')
if mibBuilder.loadTexts: adGenAOSMediaGatewayMIB.setOrganization('ADTRAN, Inc.')
if mibBuilder.loadTexts: adGenAOSMediaGatewayMIB.setContactInfo(' Technical Support Dept. Postal: ADTRAN, Inc. 901 Explorer Blvd. Huntsville, AL 35806 Tel: +1 800 923 8726 Fax: +1 256 963 6217 E-mail: support@adtran.com')
if mibBuilder.loadTexts: adGenAOSMediaGatewayMIB.setDescription('The MIB module for AdtranOS Media-gateway statistics.')
adGenAOSMediaGateway = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2))
adGenAOSMediaGatewayObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1))
adGenAOSMediaGatewayConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 99))
adGenAOSMediaGatewayCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 99, 1))
adGenAOSMediaGatewayMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 99, 2))
adGenAOSRtpSessionTable = MibTable((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1), )
if mibBuilder.loadTexts: adGenAOSRtpSessionTable.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTable.setDescription('The AdtranOS RTP session table.')
adGenAOSRtpSessionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1), ).setIndexNames((0, "ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionChannelId"))
if mibBuilder.loadTexts: adGenAOSRtpSessionEntry.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionEntry.setDescription('An entry in the AdtranOS RTP session table.')
adGenAOSRtpSessionChannelId = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionChannelId.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionChannelId.setDescription('Identifier value for the media-gateway channel used by this RTP session.')
adGenAOSRtpSessionChannelIdName = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionChannelIdName.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionChannelIdName.setDescription('Identifier name of the media-gateway channel used by this RTP session.')
adGenAOSRtpSessionStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("unavailable", 0), ("available", 1), ("allocated", 2), ("active", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionStatus.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionStatus.setDescription('Current status of the RTP session.')
adGenAOSRtpSessionStartTime = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionStartTime.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionStartTime.setDescription('Start time of the current RTP session.')
adGenAOSRtpSessionDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionDuration.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionDuration.setDescription('Duration of the current RTP session.')
adGenAOSRtpSessionVocoder = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39))).clone(namedValues=NamedValues(("none", 0), ("g711ulaw", 1), ("gsm", 2), ("g723", 3), ("g711alaw", 4), ("g722", 5), ("g728", 6), ("g729a", 7), ("dynamic96", 8), ("dynamic97", 9), ("dynamic98", 10), ("dynamic99", 11), ("dynamic100", 12), ("dynamic101", 13), ("dynamic102", 14), ("dynamic103", 15), ("dynamic104", 16), ("dynamic105", 17), ("dynamic106", 18), ("dynamic107", 19), ("dynamic108", 20), ("dynamic109", 21), ("dynamic110", 22), ("dynamic111", 23), ("dynamic112", 24), ("dynamic113", 25), ("dynamic114", 26), ("dynamic115", 27), ("dynamic116", 28), ("dynamic117", 29), ("dynamic118", 30), ("dynamic119", 31), ("dynamic120", 32), ("dynamic121", 33), ("dynamic122", 34), ("dynamic123", 35), ("dynamic124", 36), ("dynamic125", 37), ("dynamic126", 38), ("dynamic127", 39)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionVocoder.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionVocoder.setDescription('Vocoder used in the current RTP session.')
adGenAOSRtpSessionVAD = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionVAD.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionVAD.setDescription('Current status of the voice activity detector.')
adGenAOSRtpSessionTdmPortDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTdmPortDescription.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTdmPortDescription.setDescription('Description of the timed-division-multiplex resource associated with this RTP session.')
adGenAOSRtpSessionLocalIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 9), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionLocalIPAddress.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionLocalIPAddress.setDescription('Local Internet Protocol address used in current RTP session.')
adGenAOSRtpSessionLocalUdpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionLocalUdpPort.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionLocalUdpPort.setDescription('Local UDP address used in current RTP session.')
adGenAOSRtpSessionSIPPortDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 11), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionSIPPortDescription.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionSIPPortDescription.setDescription('Description String of the SIP resource associated with this RTP session.')
adGenAOSRtpSessionRemoteIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 12), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionRemoteIPAddress.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionRemoteIPAddress.setDescription('Remote Internet Protocol address used in current RTP session.')
adGenAOSRtpSessionRemoteUdpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionRemoteUdpPort.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionRemoteUdpPort.setDescription('Remote UDP address used in current RTP session.')
adGenAOSRtpSessionTxFramesPerPacket = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTxFramesPerPacket.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTxFramesPerPacket.setDescription('Number of sample frames packed into a given RTP packet.')
adGenAOSRtpSessionEchoCancellerEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionEchoCancellerEnabled.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionEchoCancellerEnabled.setDescription('State (enable or disabled) of the echo-canceller.')
adGenAOSRtpSessionRxPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 22), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionRxPackets.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionRxPackets.setDescription('Number of packets received in the current RTP session.')
adGenAOSRtpSessionRxOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 23), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionRxOctets.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionRxOctets.setDescription('Number of bytes received in the current RTP session.')
adGenAOSRtpSessionRxPacketsLost = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 24), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionRxPacketsLost.setStatus('obsolete')
if mibBuilder.loadTexts: adGenAOSRtpSessionRxPacketsLost.setDescription('Number of packets lost in the current RTP session as determined by missing sequence numbers.')
adGenAOSRtpSessionRxPacketsUnknown = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 25), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionRxPacketsUnknown.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionRxPacketsUnknown.setDescription('Number of received packets with unknown payload type.')
adGenAOSRtpSessionRxJitterBufferDepth = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 26), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionRxJitterBufferDepth.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionRxJitterBufferDepth.setDescription('Current depth of jitter buffer in packets for this RTP session.')
adGenAOSRtpSessionRxMaxJitterBufferDepth = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 27), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionRxMaxJitterBufferDepth.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionRxMaxJitterBufferDepth.setDescription('Maximum depth of jitter buffer in packets for this RTP session.')
adGenAOSRtpSessionRxFrameLateDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 30), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionRxFrameLateDiscards.setStatus('obsolete')
if mibBuilder.loadTexts: adGenAOSRtpSessionRxFrameLateDiscards.setDescription('Number of received frames that have been discarded by the jitter buffer for being late.')
adGenAOSRtpSessionRxFrameOverflows = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 31), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionRxFrameOverflows.setStatus('obsolete')
if mibBuilder.loadTexts: adGenAOSRtpSessionRxFrameOverflows.setDescription('Number of received frames that overflow jitter buffer.')
adGenAOSRtpSessionRxFrameOutOfOrders = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 33), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionRxFrameOutOfOrders.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionRxFrameOutOfOrders.setDescription('Number of received frames that are out of order.')
adGenAOSRtpSessionRxSyncSource = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 34), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionRxSyncSource.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionRxSyncSource.setDescription('Sync source of the receiver.')
adGenAOSRtpSessionTxPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 35), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTxPackets.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTxPackets.setDescription('Number of packets transmitted in the current RTP session.')
adGenAOSRtpSessionTxOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 36), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTxOctets.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTxOctets.setDescription('Number of bytes transmitted in the current RTP session.')
adGenAOSRtpSessionTxSyncSource = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 37), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTxSyncSource.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTxSyncSource.setDescription('Sync source of the sender.')
adGenAOSRtpSessionTotalsTable = MibTable((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2), )
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsTable.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsTable.setDescription('The AdtranOS RTP session totals table.')
adGenAOSRtpSessionTotalsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1), ).setIndexNames((0, "ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTotalsSessions"))
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsEntry.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsEntry.setDescription('An entry in the AdtranOS RTP session totals table.')
adGenAOSRtpSessionTotalsSessions = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsSessions.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsSessions.setDescription('The totals number RTP sessions that have occured including sessions still currently active.')
adGenAOSRtpSessionTotalsSessionDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsSessionDuration.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsSessionDuration.setDescription('Total duration for all RTP sessions.')
adGenAOSRtpSessionTotalsRxPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsRxPackets.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsRxPackets.setDescription('Number of packets recieved for all sessions.')
adGenAOSRtpSessionTotalsRxOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsRxOctets.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsRxOctets.setDescription('Number of octets recieved for all sessions.')
adGenAOSRtpSessionTotalsRxPacketsLost = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsRxPacketsLost.setStatus('obsolete')
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsRxPacketsLost.setDescription('Number of recieve packets lost for all sessions.')
adGenAOSRtpSessionTotalsRxPacketsUnknown = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsRxPacketsUnknown.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsRxPacketsUnknown.setDescription('Number of received packets with unknown payload type during all sessions.')
adGenAOSRtpSessionTotalsTxPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsTxPackets.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsTxPackets.setDescription('Number of packets transmited for all sessions.')
adGenAOSRtpSessionTotalsTxOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsTxOctets.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsTxOctets.setDescription('Number of octets transmited for all sessions.')
adGenAOSRtpSessionTotalsRxFrameLateDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsRxFrameLateDiscards.setStatus('obsolete')
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsRxFrameLateDiscards.setDescription('Number of frames received late and discarded by the jitter buffer.')
adGenAOSRtpSessionTotalsRxFrameOverflows = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsRxFrameOverflows.setStatus('obsolete')
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsRxFrameOverflows.setDescription('Number of received frames that overflow the jitter buffer.')
adGenAOSRtpSessionTotalsRxFrameOutOfOrders = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsRxFrameOutOfOrders.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsRxFrameOutOfOrders.setDescription('Number of received frames that are declared out-of-order by the jitter buffer.')
adGenAOSRtpSessionTotalsClearCounters = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 13), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsClearCounters.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsClearCounters.setDescription('Clear the accumulated totals for all RTP sessions.')
adGenAOSRtpSessionTotalsTimeSinceLastClearCounters = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 14), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsTimeSinceLastClearCounters.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpSessionTotalsTimeSinceLastClearCounters.setDescription('Time elapsed since last clear counters for RTP session totals.')
adGenAOSMediaGatewayInfoTable = MibTable((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 3), )
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoTable.setStatus('current')
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoTable.setDescription('The AdtranOS media-gateway processor information table.')
adGenAOSMediaGatewayInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 3, 1), ).setIndexNames((0, "ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSMediaGatewayInfoIdentifier"))
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoEntry.setStatus('current')
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoEntry.setDescription('An entry in the AdtranOS RTP session table.')
adGenAOSMediaGatewayInfoIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoIdentifier.setStatus('current')
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoIdentifier.setDescription('The indentifier of the media-gateway processor.')
adGenAOSMediaGatewayInfoSoftwareVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 3, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoSoftwareVersion.setStatus('current')
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoSoftwareVersion.setDescription('The software version running on the media-gateway processor.')
adGenAOSMediaGatewayInfoUtilization = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 3, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoUtilization.setStatus('current')
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoUtilization.setDescription('Current processor utilization of the media-gateway processor.')
adGenAOSMediaGatewayInfoUtilizationMaximum = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 3, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoUtilizationMaximum.setStatus('current')
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoUtilizationMaximum.setDescription('Current processor utilization of the media-gateway processor.')
adGenAOSMediaGatewayInfoFreePacketBuffers = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 3, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoFreePacketBuffers.setStatus('current')
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoFreePacketBuffers.setDescription('Current number of free packet buffers on the media-gateway processor.')
adGenAOSMediaGatewayInfoUptime = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 3, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoUptime.setStatus('current')
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoUptime.setDescription('Current uptime of the media-gateway processor.')
adGenAOSRtpChannelTotalTable = MibTable((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4), )
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalTable.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalTable.setDescription('The AdtranOS Media-gateway channel totals table.')
adGenAOSRtpChannelTotalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1), ).setIndexNames((0, "ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelTotalId"))
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalEntry.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalEntry.setDescription('An entry in the AdtranOS RTP session table.')
adGenAOSRtpChannelTotalId = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalId.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalId.setDescription('Identifier value for the channel on the media-gateway processor.')
adGenAOSRtpChannelTotalIdName = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalIdName.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalIdName.setDescription('Identifier name of the channel on the media-gateway processor.')
adGenAOSRtpChannelTotalSessions = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalSessions.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalSessions.setDescription('Number of RTP sessions that have transpired on a given media-gateway channel.')
adGenAOSRtpChannelTotalSessionDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalSessionDuration.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalSessionDuration.setDescription('Duration of all RTP sessions for a given media-gateway channel.')
adGenAOSRtpChannelTotalRxPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalRxPackets.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalRxPackets.setDescription('Number of received packets for all RTP sessions for a given media-gateway channel.')
adGenAOSRtpChannelTotalRxOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalRxOctets.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalRxOctets.setDescription('Number of received octets for all RTP sessions for a given media-gateway channel.')
adGenAOSRtpChannelTotalRxPacketsLost = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalRxPacketsLost.setStatus('obsolete')
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalRxPacketsLost.setDescription('Number of receive packets declared lost for all RTP sessions for a given media-gateway channel.')
adGenAOSRtpChannelTotalRxPacketsUnknown = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalRxPacketsUnknown.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalRxPacketsUnknown.setDescription('Number of receive packets declared unknown for all RTP sessions for a given media-gateway channel.')
adGenAOSRtpChannelTotalTxPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalTxPackets.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalTxPackets.setDescription('Number of transmitted packets for all RTP sessions for a given media-gateway channel.')
adGenAOSRtpChannelTotalTxOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalTxOctets.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalTxOctets.setDescription('The duration of all RTP sessions for a given media-gateway channel.')
adGenAOSRtpChannelTotalRxMaxDepth = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalRxMaxDepth.setStatus('obsolete')
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalRxMaxDepth.setDescription('Maximum depth fo jitter buffer in packets for this RTP session.')
adGenAOSRtpChannelTotalRxFrameLateDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalRxFrameLateDiscards.setStatus('obsolete')
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalRxFrameLateDiscards.setDescription('Number of late frames discarded for all RTP sessions for a given media-gateway channel.')
adGenAOSRtpChannelTotalRxFrameOverflows = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalRxFrameOverflows.setStatus('obsolete')
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalRxFrameOverflows.setDescription('Number of received frames that overflow the jitter buffer for all RTP sessions for a given media-gateway channel.')
adGenAOSRtpChannelTotalRxFrameOutOfOrders = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalRxFrameOutOfOrders.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpChannelTotalRxFrameOutOfOrders.setDescription('Number of received frames that are declared out-of-order by the jitter buffer for all RTP sessions for a given media-gateway channel.')
adGenAOSRtpChannelClearCounters = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 16), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: adGenAOSRtpChannelClearCounters.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpChannelClearCounters.setDescription('Clear the accumulated channel totals for all RTP sessions.')
adGenAOSRtpChannelTimeSinceLastClearCounters = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 17), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adGenAOSRtpChannelTimeSinceLastClearCounters.setStatus('current')
if mibBuilder.loadTexts: adGenAOSRtpChannelTimeSinceLastClearCounters.setDescription('Time elapsed since last clear counters for RTP session totals.')
adGenAOSMediaGatewayCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 99, 1, 1)).setObjects(("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSMediaGatewayRtpSessionGroup"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSMediaGatewayRtpSessionTotalsGroup"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSMediaGatewayInfoGroup"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSMediaGatewayRtpChannelTotalsGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
adGenAOSMediaGatewayCompliance = adGenAOSMediaGatewayCompliance.setStatus('current')
if mibBuilder.loadTexts: adGenAOSMediaGatewayCompliance.setDescription('The compliance statement for SNMPv2 entities which implement the adGenAOSMediaGateway MIB.')
adGenAOSMediaGatewayRtpSessionGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 99, 2, 1)).setObjects(("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionChannelId"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionChannelIdName"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionStatus"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionStartTime"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionDuration"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionVocoder"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionVAD"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTdmPortDescription"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionLocalIPAddress"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionLocalUdpPort"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionSIPPortDescription"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionRemoteIPAddress"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionRemoteUdpPort"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTxFramesPerPacket"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionEchoCancellerEnabled"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionRxPackets"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionRxOctets"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionRxPacketsLost"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionRxPacketsUnknown"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionRxJitterBufferDepth"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionRxMaxJitterBufferDepth"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionRxFrameLateDiscards"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionRxFrameOverflows"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionRxFrameOutOfOrders"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionRxSyncSource"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTxPackets"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTxOctets"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTxSyncSource"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
adGenAOSMediaGatewayRtpSessionGroup = adGenAOSMediaGatewayRtpSessionGroup.setStatus('current')
if mibBuilder.loadTexts: adGenAOSMediaGatewayRtpSessionGroup.setDescription('The Media-Gateway Real-Time Protocol Session Group.')
adGenAOSMediaGatewayRtpSessionTotalsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 99, 2, 2)).setObjects(("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTotalsSessions"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTotalsSessionDuration"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTotalsRxPackets"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTotalsRxOctets"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTotalsRxPacketsLost"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTotalsRxPacketsUnknown"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTotalsTxPackets"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTotalsTxOctets"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTotalsRxFrameLateDiscards"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTotalsRxFrameOverflows"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTotalsRxFrameOutOfOrders"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTotalsClearCounters"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpSessionTotalsTimeSinceLastClearCounters"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
adGenAOSMediaGatewayRtpSessionTotalsGroup = adGenAOSMediaGatewayRtpSessionTotalsGroup.setStatus('current')
if mibBuilder.loadTexts: adGenAOSMediaGatewayRtpSessionTotalsGroup.setDescription('The Media-Gateway Real-Time Protocol Session Totals Group.')
adGenAOSMediaGatewayInfoGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 99, 2, 3)).setObjects(("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSMediaGatewayInfoIdentifier"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSMediaGatewayInfoSoftwareVersion"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSMediaGatewayInfoUtilization"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSMediaGatewayInfoUtilizationMaximum"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSMediaGatewayInfoFreePacketBuffers"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSMediaGatewayInfoUptime"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
adGenAOSMediaGatewayInfoGroup = adGenAOSMediaGatewayInfoGroup.setStatus('current')
if mibBuilder.loadTexts: adGenAOSMediaGatewayInfoGroup.setDescription('The Media-Gateway Information Group.')
adGenAOSMediaGatewayRtpChannelTotalsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 99, 2, 4)).setObjects(("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelTotalId"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelTotalIdName"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelTotalSessions"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelTotalSessionDuration"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelTotalRxPackets"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelTotalRxOctets"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelTotalRxPacketsLost"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelTotalRxPacketsUnknown"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelTotalTxPackets"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelTotalTxOctets"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelTotalRxMaxDepth"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelTotalRxFrameLateDiscards"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelTotalRxFrameOverflows"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelTotalRxFrameOutOfOrders"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelClearCounters"), ("ADTRAN-AOS-MEDIAGATEWAY-MIB", "adGenAOSRtpChannelTimeSinceLastClearCounters"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
adGenAOSMediaGatewayRtpChannelTotalsGroup = adGenAOSMediaGatewayRtpChannelTotalsGroup.setStatus('current')
if mibBuilder.loadTexts: adGenAOSMediaGatewayRtpChannelTotalsGroup.setDescription('The Media-Gateway Real-Time Protocol Channel Totals Group.')
mibBuilder.exportSymbols("ADTRAN-AOS-MEDIAGATEWAY-MIB", adGenAOSMediaGatewayConformance=adGenAOSMediaGatewayConformance, PYSNMP_MODULE_ID=adGenAOSMediaGatewayMIB, adGenAOSRtpSessionTxFramesPerPacket=adGenAOSRtpSessionTxFramesPerPacket, adGenAOSRtpChannelTotalSessionDuration=adGenAOSRtpChannelTotalSessionDuration, adGenAOSRtpSessionTotalsTxOctets=adGenAOSRtpSessionTotalsTxOctets, adGenAOSRtpSessionTotalsTimeSinceLastClearCounters=adGenAOSRtpSessionTotalsTimeSinceLastClearCounters, adGenAOSMediaGatewayCompliance=adGenAOSMediaGatewayCompliance, adGenAOSRtpSessionTotalsRxFrameLateDiscards=adGenAOSRtpSessionTotalsRxFrameLateDiscards, adGenAOSRtpSessionDuration=adGenAOSRtpSessionDuration, adGenAOSRtpSessionEchoCancellerEnabled=adGenAOSRtpSessionEchoCancellerEnabled, adGenAOSMediaGatewayMIB=adGenAOSMediaGatewayMIB, adGenAOSRtpSessionTotalsRxFrameOverflows=adGenAOSRtpSessionTotalsRxFrameOverflows, adGenAOSRtpSessionSIPPortDescription=adGenAOSRtpSessionSIPPortDescription, adGenAOSRtpSessionRxFrameOutOfOrders=adGenAOSRtpSessionRxFrameOutOfOrders, adGenAOSMediaGateway=adGenAOSMediaGateway, adGenAOSMediaGatewayInfoIdentifier=adGenAOSMediaGatewayInfoIdentifier, adGenAOSRtpSessionTotalsTxPackets=adGenAOSRtpSessionTotalsTxPackets, adGenAOSRtpSessionTotalsClearCounters=adGenAOSRtpSessionTotalsClearCounters, adGenAOSMediaGatewayObjects=adGenAOSMediaGatewayObjects, adGenAOSRtpSessionRxOctets=adGenAOSRtpSessionRxOctets, adGenAOSMediaGatewayInfoFreePacketBuffers=adGenAOSMediaGatewayInfoFreePacketBuffers, adGenAOSMediaGatewayInfoUtilization=adGenAOSMediaGatewayInfoUtilization, adGenAOSRtpSessionTotalsRxFrameOutOfOrders=adGenAOSRtpSessionTotalsRxFrameOutOfOrders, adGenAOSMediaGatewayRtpChannelTotalsGroup=adGenAOSMediaGatewayRtpChannelTotalsGroup, adGenAOSRtpSessionTotalsRxOctets=adGenAOSRtpSessionTotalsRxOctets, adGenAOSRtpChannelTotalRxFrameOverflows=adGenAOSRtpChannelTotalRxFrameOverflows, adGenAOSMediaGatewayMIBGroups=adGenAOSMediaGatewayMIBGroups, adGenAOSRtpSessionRemoteIPAddress=adGenAOSRtpSessionRemoteIPAddress, adGenAOSMediaGatewayInfoTable=adGenAOSMediaGatewayInfoTable, adGenAOSMediaGatewayInfoUtilizationMaximum=adGenAOSMediaGatewayInfoUtilizationMaximum, adGenAOSRtpChannelTotalId=adGenAOSRtpChannelTotalId, adGenAOSRtpChannelTotalRxPacketsLost=adGenAOSRtpChannelTotalRxPacketsLost, adGenAOSRtpSessionTotalsSessionDuration=adGenAOSRtpSessionTotalsSessionDuration, adGenAOSRtpSessionRxJitterBufferDepth=adGenAOSRtpSessionRxJitterBufferDepth, adGenAOSMediaGatewayInfoUptime=adGenAOSMediaGatewayInfoUptime, adGenAOSRtpChannelTotalSessions=adGenAOSRtpChannelTotalSessions, adGenAOSRtpSessionTxSyncSource=adGenAOSRtpSessionTxSyncSource, adGenAOSRtpSessionEntry=adGenAOSRtpSessionEntry, adGenAOSRtpChannelTotalRxPackets=adGenAOSRtpChannelTotalRxPackets, adGenAOSRtpSessionRxFrameLateDiscards=adGenAOSRtpSessionRxFrameLateDiscards, adGenAOSRtpSessionRxPackets=adGenAOSRtpSessionRxPackets, adGenAOSRtpSessionChannelIdName=adGenAOSRtpSessionChannelIdName, adGenAOSRtpSessionTotalsEntry=adGenAOSRtpSessionTotalsEntry, adGenAOSRtpSessionLocalUdpPort=adGenAOSRtpSessionLocalUdpPort, adGenAOSRtpChannelTotalTxPackets=adGenAOSRtpChannelTotalTxPackets, adGenAOSRtpSessionVAD=adGenAOSRtpSessionVAD, adGenAOSRtpChannelTotalIdName=adGenAOSRtpChannelTotalIdName, adGenAOSRtpSessionTxPackets=adGenAOSRtpSessionTxPackets, adGenAOSMediaGatewayRtpSessionTotalsGroup=adGenAOSMediaGatewayRtpSessionTotalsGroup, adGenAOSRtpSessionStatus=adGenAOSRtpSessionStatus, adGenAOSRtpChannelTotalEntry=adGenAOSRtpChannelTotalEntry, adGenAOSRtpSessionRxSyncSource=adGenAOSRtpSessionRxSyncSource, adGenAOSRtpSessionRxPacketsLost=adGenAOSRtpSessionRxPacketsLost, adGenAOSRtpSessionRxMaxJitterBufferDepth=adGenAOSRtpSessionRxMaxJitterBufferDepth, adGenAOSRtpChannelTotalRxMaxDepth=adGenAOSRtpChannelTotalRxMaxDepth, adGenAOSRtpSessionTotalsRxPacketsUnknown=adGenAOSRtpSessionTotalsRxPacketsUnknown, adGenAOSMediaGatewayInfoEntry=adGenAOSMediaGatewayInfoEntry, adGenAOSRtpSessionTable=adGenAOSRtpSessionTable, adGenAOSRtpChannelClearCounters=adGenAOSRtpChannelClearCounters, adGenAOSRtpChannelTotalRxFrameOutOfOrders=adGenAOSRtpChannelTotalRxFrameOutOfOrders, adGenAOSRtpSessionVocoder=adGenAOSRtpSessionVocoder, adGenAOSMediaGatewayCompliances=adGenAOSMediaGatewayCompliances, adGenAOSRtpSessionTotalsTable=adGenAOSRtpSessionTotalsTable, adGenAOSMediaGatewayInfoSoftwareVersion=adGenAOSMediaGatewayInfoSoftwareVersion, adGenAOSRtpSessionTotalsSessions=adGenAOSRtpSessionTotalsSessions, adGenAOSRtpChannelTotalRxPacketsUnknown=adGenAOSRtpChannelTotalRxPacketsUnknown, adGenAOSRtpSessionRemoteUdpPort=adGenAOSRtpSessionRemoteUdpPort, adGenAOSRtpChannelTotalRxFrameLateDiscards=adGenAOSRtpChannelTotalRxFrameLateDiscards, adGenAOSRtpSessionTotalsRxPackets=adGenAOSRtpSessionTotalsRxPackets, adGenAOSRtpSessionTdmPortDescription=adGenAOSRtpSessionTdmPortDescription, adGenAOSRtpSessionTxOctets=adGenAOSRtpSessionTxOctets, adGenAOSRtpSessionTotalsRxPacketsLost=adGenAOSRtpSessionTotalsRxPacketsLost, adGenAOSMediaGatewayRtpSessionGroup=adGenAOSMediaGatewayRtpSessionGroup, adGenAOSRtpSessionLocalIPAddress=adGenAOSRtpSessionLocalIPAddress, adGenAOSRtpChannelTotalRxOctets=adGenAOSRtpChannelTotalRxOctets, adGenAOSMediaGatewayInfoGroup=adGenAOSMediaGatewayInfoGroup, adGenAOSRtpSessionRxFrameOverflows=adGenAOSRtpSessionRxFrameOverflows, adGenAOSRtpSessionChannelId=adGenAOSRtpSessionChannelId, adGenAOSRtpSessionStartTime=adGenAOSRtpSessionStartTime, adGenAOSRtpSessionRxPacketsUnknown=adGenAOSRtpSessionRxPacketsUnknown, adGenAOSRtpChannelTotalTable=adGenAOSRtpChannelTotalTable, adGenAOSRtpChannelTotalTxOctets=adGenAOSRtpChannelTotalTxOctets, adGenAOSRtpChannelTimeSinceLastClearCounters=adGenAOSRtpChannelTimeSinceLastClearCounters)
| (ad_gen_aos_voice,) = mibBuilder.importSymbols('ADTRAN-AOS', 'adGenAOSVoice')
(ad_identity, ad_shared) = mibBuilder.importSymbols('ADTRAN-MIB', 'adIdentity', 'adShared')
(object_identifier, octet_string, integer) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'OctetString', 'Integer')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(single_value_constraint, value_range_constraint, constraints_intersection, constraints_union, value_size_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'SingleValueConstraint', 'ValueRangeConstraint', 'ConstraintsIntersection', 'ConstraintsUnion', 'ValueSizeConstraint')
(object_group, module_compliance, notification_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ObjectGroup', 'ModuleCompliance', 'NotificationGroup')
(object_identity, counter64, iso, notification_type, time_ticks, module_identity, bits, ip_address, integer32, mib_scalar, mib_table, mib_table_row, mib_table_column, counter32, unsigned32, mib_identifier, gauge32) = mibBuilder.importSymbols('SNMPv2-SMI', 'ObjectIdentity', 'Counter64', 'iso', 'NotificationType', 'TimeTicks', 'ModuleIdentity', 'Bits', 'IpAddress', 'Integer32', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Counter32', 'Unsigned32', 'MibIdentifier', 'Gauge32')
(display_string, textual_convention) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TextualConvention')
ad_gen_aos_media_gateway_mib = module_identity((1, 3, 6, 1, 4, 1, 664, 6, 10000, 53, 5, 2))
adGenAOSMediaGatewayMIB.setRevisions(('2012-08-22 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts:
adGenAOSMediaGatewayMIB.setRevisionsDescriptions(('The following OIDs have been obsoleted and will no longer be supported. adGenAOSRtpSessionRxPacketsLost, adGenAOSRtpSessionRxFrameLateDiscards, adGenAOSRtpSessionRxFrameOverflows, adGenAOSRtpSessionTotalsRxPacketsLost, adGenAOSRtpSessionTotalsRxFrameLateDiscards, adGenAOSRtpSessionTotalsRxFrameOverflows, adGenAOSRtpChannelTotalRxPacketsLost, adGenAOSRtpChannelTotalRxMaxDepth, adGenAOSRtpChannelTotalRxFrameLateDiscards, adGenAOSRtpChannelTotalRxFrameOverflows',))
if mibBuilder.loadTexts:
adGenAOSMediaGatewayMIB.setLastUpdated('200504190000Z')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayMIB.setOrganization('ADTRAN, Inc.')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayMIB.setContactInfo(' Technical Support Dept. Postal: ADTRAN, Inc. 901 Explorer Blvd. Huntsville, AL 35806 Tel: +1 800 923 8726 Fax: +1 256 963 6217 E-mail: support@adtran.com')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayMIB.setDescription('The MIB module for AdtranOS Media-gateway statistics.')
ad_gen_aos_media_gateway = mib_identifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2))
ad_gen_aos_media_gateway_objects = mib_identifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1))
ad_gen_aos_media_gateway_conformance = mib_identifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 99))
ad_gen_aos_media_gateway_compliances = mib_identifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 99, 1))
ad_gen_aos_media_gateway_mib_groups = mib_identifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 99, 2))
ad_gen_aos_rtp_session_table = mib_table((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1))
if mibBuilder.loadTexts:
adGenAOSRtpSessionTable.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTable.setDescription('The AdtranOS RTP session table.')
ad_gen_aos_rtp_session_entry = mib_table_row((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1)).setIndexNames((0, 'ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionChannelId'))
if mibBuilder.loadTexts:
adGenAOSRtpSessionEntry.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionEntry.setDescription('An entry in the AdtranOS RTP session table.')
ad_gen_aos_rtp_session_channel_id = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 1), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionChannelId.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionChannelId.setDescription('Identifier value for the media-gateway channel used by this RTP session.')
ad_gen_aos_rtp_session_channel_id_name = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 2), display_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionChannelIdName.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionChannelIdName.setDescription('Identifier name of the media-gateway channel used by this RTP session.')
ad_gen_aos_rtp_session_status = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2, 3))).clone(namedValues=named_values(('unavailable', 0), ('available', 1), ('allocated', 2), ('active', 3)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionStatus.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionStatus.setDescription('Current status of the RTP session.')
ad_gen_aos_rtp_session_start_time = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 4), display_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionStartTime.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionStartTime.setDescription('Start time of the current RTP session.')
ad_gen_aos_rtp_session_duration = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 5), display_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionDuration.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionDuration.setDescription('Duration of the current RTP session.')
ad_gen_aos_rtp_session_vocoder = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 6), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39))).clone(namedValues=named_values(('none', 0), ('g711ulaw', 1), ('gsm', 2), ('g723', 3), ('g711alaw', 4), ('g722', 5), ('g728', 6), ('g729a', 7), ('dynamic96', 8), ('dynamic97', 9), ('dynamic98', 10), ('dynamic99', 11), ('dynamic100', 12), ('dynamic101', 13), ('dynamic102', 14), ('dynamic103', 15), ('dynamic104', 16), ('dynamic105', 17), ('dynamic106', 18), ('dynamic107', 19), ('dynamic108', 20), ('dynamic109', 21), ('dynamic110', 22), ('dynamic111', 23), ('dynamic112', 24), ('dynamic113', 25), ('dynamic114', 26), ('dynamic115', 27), ('dynamic116', 28), ('dynamic117', 29), ('dynamic118', 30), ('dynamic119', 31), ('dynamic120', 32), ('dynamic121', 33), ('dynamic122', 34), ('dynamic123', 35), ('dynamic124', 36), ('dynamic125', 37), ('dynamic126', 38), ('dynamic127', 39)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionVocoder.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionVocoder.setDescription('Vocoder used in the current RTP session.')
ad_gen_aos_rtp_session_vad = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 7), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('disabled', 0), ('enabled', 1)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionVAD.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionVAD.setDescription('Current status of the voice activity detector.')
ad_gen_aos_rtp_session_tdm_port_description = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 8), display_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTdmPortDescription.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTdmPortDescription.setDescription('Description of the timed-division-multiplex resource associated with this RTP session.')
ad_gen_aos_rtp_session_local_ip_address = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 9), ip_address()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionLocalIPAddress.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionLocalIPAddress.setDescription('Local Internet Protocol address used in current RTP session.')
ad_gen_aos_rtp_session_local_udp_port = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 10), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionLocalUdpPort.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionLocalUdpPort.setDescription('Local UDP address used in current RTP session.')
ad_gen_aos_rtp_session_sip_port_description = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 11), display_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionSIPPortDescription.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionSIPPortDescription.setDescription('Description String of the SIP resource associated with this RTP session.')
ad_gen_aos_rtp_session_remote_ip_address = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 12), ip_address()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRemoteIPAddress.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRemoteIPAddress.setDescription('Remote Internet Protocol address used in current RTP session.')
ad_gen_aos_rtp_session_remote_udp_port = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 13), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRemoteUdpPort.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRemoteUdpPort.setDescription('Remote UDP address used in current RTP session.')
ad_gen_aos_rtp_session_tx_frames_per_packet = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 14), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTxFramesPerPacket.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTxFramesPerPacket.setDescription('Number of sample frames packed into a given RTP packet.')
ad_gen_aos_rtp_session_echo_canceller_enabled = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 15), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('disabled', 0), ('enabled', 1)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionEchoCancellerEnabled.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionEchoCancellerEnabled.setDescription('State (enable or disabled) of the echo-canceller.')
ad_gen_aos_rtp_session_rx_packets = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 22), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxPackets.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxPackets.setDescription('Number of packets received in the current RTP session.')
ad_gen_aos_rtp_session_rx_octets = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 23), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxOctets.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxOctets.setDescription('Number of bytes received in the current RTP session.')
ad_gen_aos_rtp_session_rx_packets_lost = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 24), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxPacketsLost.setStatus('obsolete')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxPacketsLost.setDescription('Number of packets lost in the current RTP session as determined by missing sequence numbers.')
ad_gen_aos_rtp_session_rx_packets_unknown = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 25), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxPacketsUnknown.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxPacketsUnknown.setDescription('Number of received packets with unknown payload type.')
ad_gen_aos_rtp_session_rx_jitter_buffer_depth = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 26), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxJitterBufferDepth.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxJitterBufferDepth.setDescription('Current depth of jitter buffer in packets for this RTP session.')
ad_gen_aos_rtp_session_rx_max_jitter_buffer_depth = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 27), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxMaxJitterBufferDepth.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxMaxJitterBufferDepth.setDescription('Maximum depth of jitter buffer in packets for this RTP session.')
ad_gen_aos_rtp_session_rx_frame_late_discards = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 30), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxFrameLateDiscards.setStatus('obsolete')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxFrameLateDiscards.setDescription('Number of received frames that have been discarded by the jitter buffer for being late.')
ad_gen_aos_rtp_session_rx_frame_overflows = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 31), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxFrameOverflows.setStatus('obsolete')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxFrameOverflows.setDescription('Number of received frames that overflow jitter buffer.')
ad_gen_aos_rtp_session_rx_frame_out_of_orders = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 33), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxFrameOutOfOrders.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxFrameOutOfOrders.setDescription('Number of received frames that are out of order.')
ad_gen_aos_rtp_session_rx_sync_source = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 34), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxSyncSource.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionRxSyncSource.setDescription('Sync source of the receiver.')
ad_gen_aos_rtp_session_tx_packets = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 35), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTxPackets.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTxPackets.setDescription('Number of packets transmitted in the current RTP session.')
ad_gen_aos_rtp_session_tx_octets = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 36), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTxOctets.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTxOctets.setDescription('Number of bytes transmitted in the current RTP session.')
ad_gen_aos_rtp_session_tx_sync_source = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 1, 1, 37), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTxSyncSource.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTxSyncSource.setDescription('Sync source of the sender.')
ad_gen_aos_rtp_session_totals_table = mib_table((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2))
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsTable.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsTable.setDescription('The AdtranOS RTP session totals table.')
ad_gen_aos_rtp_session_totals_entry = mib_table_row((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1)).setIndexNames((0, 'ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTotalsSessions'))
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsEntry.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsEntry.setDescription('An entry in the AdtranOS RTP session totals table.')
ad_gen_aos_rtp_session_totals_sessions = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 1), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsSessions.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsSessions.setDescription('The totals number RTP sessions that have occured including sessions still currently active.')
ad_gen_aos_rtp_session_totals_session_duration = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 2), display_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsSessionDuration.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsSessionDuration.setDescription('Total duration for all RTP sessions.')
ad_gen_aos_rtp_session_totals_rx_packets = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 3), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsRxPackets.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsRxPackets.setDescription('Number of packets recieved for all sessions.')
ad_gen_aos_rtp_session_totals_rx_octets = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 4), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsRxOctets.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsRxOctets.setDescription('Number of octets recieved for all sessions.')
ad_gen_aos_rtp_session_totals_rx_packets_lost = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 5), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsRxPacketsLost.setStatus('obsolete')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsRxPacketsLost.setDescription('Number of recieve packets lost for all sessions.')
ad_gen_aos_rtp_session_totals_rx_packets_unknown = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 6), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsRxPacketsUnknown.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsRxPacketsUnknown.setDescription('Number of received packets with unknown payload type during all sessions.')
ad_gen_aos_rtp_session_totals_tx_packets = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 7), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsTxPackets.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsTxPackets.setDescription('Number of packets transmited for all sessions.')
ad_gen_aos_rtp_session_totals_tx_octets = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 8), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsTxOctets.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsTxOctets.setDescription('Number of octets transmited for all sessions.')
ad_gen_aos_rtp_session_totals_rx_frame_late_discards = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 9), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsRxFrameLateDiscards.setStatus('obsolete')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsRxFrameLateDiscards.setDescription('Number of frames received late and discarded by the jitter buffer.')
ad_gen_aos_rtp_session_totals_rx_frame_overflows = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 11), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsRxFrameOverflows.setStatus('obsolete')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsRxFrameOverflows.setDescription('Number of received frames that overflow the jitter buffer.')
ad_gen_aos_rtp_session_totals_rx_frame_out_of_orders = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 12), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsRxFrameOutOfOrders.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsRxFrameOutOfOrders.setDescription('Number of received frames that are declared out-of-order by the jitter buffer.')
ad_gen_aos_rtp_session_totals_clear_counters = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 13), integer32()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsClearCounters.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsClearCounters.setDescription('Clear the accumulated totals for all RTP sessions.')
ad_gen_aos_rtp_session_totals_time_since_last_clear_counters = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 2, 1, 14), display_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsTimeSinceLastClearCounters.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpSessionTotalsTimeSinceLastClearCounters.setDescription('Time elapsed since last clear counters for RTP session totals.')
ad_gen_aos_media_gateway_info_table = mib_table((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 3))
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoTable.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoTable.setDescription('The AdtranOS media-gateway processor information table.')
ad_gen_aos_media_gateway_info_entry = mib_table_row((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 3, 1)).setIndexNames((0, 'ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSMediaGatewayInfoIdentifier'))
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoEntry.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoEntry.setDescription('An entry in the AdtranOS RTP session table.')
ad_gen_aos_media_gateway_info_identifier = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 3, 1, 1), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoIdentifier.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoIdentifier.setDescription('The indentifier of the media-gateway processor.')
ad_gen_aos_media_gateway_info_software_version = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 3, 1, 2), display_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoSoftwareVersion.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoSoftwareVersion.setDescription('The software version running on the media-gateway processor.')
ad_gen_aos_media_gateway_info_utilization = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 3, 1, 3), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoUtilization.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoUtilization.setDescription('Current processor utilization of the media-gateway processor.')
ad_gen_aos_media_gateway_info_utilization_maximum = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 3, 1, 4), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoUtilizationMaximum.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoUtilizationMaximum.setDescription('Current processor utilization of the media-gateway processor.')
ad_gen_aos_media_gateway_info_free_packet_buffers = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 3, 1, 5), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoFreePacketBuffers.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoFreePacketBuffers.setDescription('Current number of free packet buffers on the media-gateway processor.')
ad_gen_aos_media_gateway_info_uptime = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 3, 1, 6), display_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoUptime.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoUptime.setDescription('Current uptime of the media-gateway processor.')
ad_gen_aos_rtp_channel_total_table = mib_table((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4))
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalTable.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalTable.setDescription('The AdtranOS Media-gateway channel totals table.')
ad_gen_aos_rtp_channel_total_entry = mib_table_row((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1)).setIndexNames((0, 'ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelTotalId'))
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalEntry.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalEntry.setDescription('An entry in the AdtranOS RTP session table.')
ad_gen_aos_rtp_channel_total_id = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 1), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalId.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalId.setDescription('Identifier value for the channel on the media-gateway processor.')
ad_gen_aos_rtp_channel_total_id_name = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 2), display_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalIdName.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalIdName.setDescription('Identifier name of the channel on the media-gateway processor.')
ad_gen_aos_rtp_channel_total_sessions = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 3), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalSessions.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalSessions.setDescription('Number of RTP sessions that have transpired on a given media-gateway channel.')
ad_gen_aos_rtp_channel_total_session_duration = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 4), display_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalSessionDuration.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalSessionDuration.setDescription('Duration of all RTP sessions for a given media-gateway channel.')
ad_gen_aos_rtp_channel_total_rx_packets = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 5), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalRxPackets.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalRxPackets.setDescription('Number of received packets for all RTP sessions for a given media-gateway channel.')
ad_gen_aos_rtp_channel_total_rx_octets = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 6), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalRxOctets.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalRxOctets.setDescription('Number of received octets for all RTP sessions for a given media-gateway channel.')
ad_gen_aos_rtp_channel_total_rx_packets_lost = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 7), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalRxPacketsLost.setStatus('obsolete')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalRxPacketsLost.setDescription('Number of receive packets declared lost for all RTP sessions for a given media-gateway channel.')
ad_gen_aos_rtp_channel_total_rx_packets_unknown = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 8), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalRxPacketsUnknown.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalRxPacketsUnknown.setDescription('Number of receive packets declared unknown for all RTP sessions for a given media-gateway channel.')
ad_gen_aos_rtp_channel_total_tx_packets = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 9), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalTxPackets.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalTxPackets.setDescription('Number of transmitted packets for all RTP sessions for a given media-gateway channel.')
ad_gen_aos_rtp_channel_total_tx_octets = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 10), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalTxOctets.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalTxOctets.setDescription('The duration of all RTP sessions for a given media-gateway channel.')
ad_gen_aos_rtp_channel_total_rx_max_depth = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 11), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalRxMaxDepth.setStatus('obsolete')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalRxMaxDepth.setDescription('Maximum depth fo jitter buffer in packets for this RTP session.')
ad_gen_aos_rtp_channel_total_rx_frame_late_discards = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 12), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalRxFrameLateDiscards.setStatus('obsolete')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalRxFrameLateDiscards.setDescription('Number of late frames discarded for all RTP sessions for a given media-gateway channel.')
ad_gen_aos_rtp_channel_total_rx_frame_overflows = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 14), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalRxFrameOverflows.setStatus('obsolete')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalRxFrameOverflows.setDescription('Number of received frames that overflow the jitter buffer for all RTP sessions for a given media-gateway channel.')
ad_gen_aos_rtp_channel_total_rx_frame_out_of_orders = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 15), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalRxFrameOutOfOrders.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTotalRxFrameOutOfOrders.setDescription('Number of received frames that are declared out-of-order by the jitter buffer for all RTP sessions for a given media-gateway channel.')
ad_gen_aos_rtp_channel_clear_counters = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 16), integer32()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
adGenAOSRtpChannelClearCounters.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpChannelClearCounters.setDescription('Clear the accumulated channel totals for all RTP sessions.')
ad_gen_aos_rtp_channel_time_since_last_clear_counters = mib_table_column((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 1, 4, 1, 17), display_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTimeSinceLastClearCounters.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSRtpChannelTimeSinceLastClearCounters.setDescription('Time elapsed since last clear counters for RTP session totals.')
ad_gen_aos_media_gateway_compliance = module_compliance((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 99, 1, 1)).setObjects(('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSMediaGatewayRtpSessionGroup'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSMediaGatewayRtpSessionTotalsGroup'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSMediaGatewayInfoGroup'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSMediaGatewayRtpChannelTotalsGroup'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ad_gen_aos_media_gateway_compliance = adGenAOSMediaGatewayCompliance.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayCompliance.setDescription('The compliance statement for SNMPv2 entities which implement the adGenAOSMediaGateway MIB.')
ad_gen_aos_media_gateway_rtp_session_group = object_group((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 99, 2, 1)).setObjects(('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionChannelId'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionChannelIdName'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionStatus'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionStartTime'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionDuration'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionVocoder'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionVAD'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTdmPortDescription'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionLocalIPAddress'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionLocalUdpPort'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionSIPPortDescription'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionRemoteIPAddress'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionRemoteUdpPort'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTxFramesPerPacket'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionEchoCancellerEnabled'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionRxPackets'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionRxOctets'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionRxPacketsLost'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionRxPacketsUnknown'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionRxJitterBufferDepth'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionRxMaxJitterBufferDepth'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionRxFrameLateDiscards'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionRxFrameOverflows'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionRxFrameOutOfOrders'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionRxSyncSource'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTxPackets'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTxOctets'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTxSyncSource'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ad_gen_aos_media_gateway_rtp_session_group = adGenAOSMediaGatewayRtpSessionGroup.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayRtpSessionGroup.setDescription('The Media-Gateway Real-Time Protocol Session Group.')
ad_gen_aos_media_gateway_rtp_session_totals_group = object_group((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 99, 2, 2)).setObjects(('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTotalsSessions'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTotalsSessionDuration'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTotalsRxPackets'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTotalsRxOctets'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTotalsRxPacketsLost'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTotalsRxPacketsUnknown'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTotalsTxPackets'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTotalsTxOctets'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTotalsRxFrameLateDiscards'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTotalsRxFrameOverflows'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTotalsRxFrameOutOfOrders'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTotalsClearCounters'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpSessionTotalsTimeSinceLastClearCounters'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ad_gen_aos_media_gateway_rtp_session_totals_group = adGenAOSMediaGatewayRtpSessionTotalsGroup.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayRtpSessionTotalsGroup.setDescription('The Media-Gateway Real-Time Protocol Session Totals Group.')
ad_gen_aos_media_gateway_info_group = object_group((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 99, 2, 3)).setObjects(('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSMediaGatewayInfoIdentifier'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSMediaGatewayInfoSoftwareVersion'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSMediaGatewayInfoUtilization'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSMediaGatewayInfoUtilizationMaximum'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSMediaGatewayInfoFreePacketBuffers'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSMediaGatewayInfoUptime'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ad_gen_aos_media_gateway_info_group = adGenAOSMediaGatewayInfoGroup.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayInfoGroup.setDescription('The Media-Gateway Information Group.')
ad_gen_aos_media_gateway_rtp_channel_totals_group = object_group((1, 3, 6, 1, 4, 1, 664, 5, 53, 5, 2, 99, 2, 4)).setObjects(('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelTotalId'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelTotalIdName'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelTotalSessions'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelTotalSessionDuration'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelTotalRxPackets'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelTotalRxOctets'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelTotalRxPacketsLost'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelTotalRxPacketsUnknown'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelTotalTxPackets'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelTotalTxOctets'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelTotalRxMaxDepth'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelTotalRxFrameLateDiscards'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelTotalRxFrameOverflows'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelTotalRxFrameOutOfOrders'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelClearCounters'), ('ADTRAN-AOS-MEDIAGATEWAY-MIB', 'adGenAOSRtpChannelTimeSinceLastClearCounters'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ad_gen_aos_media_gateway_rtp_channel_totals_group = adGenAOSMediaGatewayRtpChannelTotalsGroup.setStatus('current')
if mibBuilder.loadTexts:
adGenAOSMediaGatewayRtpChannelTotalsGroup.setDescription('The Media-Gateway Real-Time Protocol Channel Totals Group.')
mibBuilder.exportSymbols('ADTRAN-AOS-MEDIAGATEWAY-MIB', adGenAOSMediaGatewayConformance=adGenAOSMediaGatewayConformance, PYSNMP_MODULE_ID=adGenAOSMediaGatewayMIB, adGenAOSRtpSessionTxFramesPerPacket=adGenAOSRtpSessionTxFramesPerPacket, adGenAOSRtpChannelTotalSessionDuration=adGenAOSRtpChannelTotalSessionDuration, adGenAOSRtpSessionTotalsTxOctets=adGenAOSRtpSessionTotalsTxOctets, adGenAOSRtpSessionTotalsTimeSinceLastClearCounters=adGenAOSRtpSessionTotalsTimeSinceLastClearCounters, adGenAOSMediaGatewayCompliance=adGenAOSMediaGatewayCompliance, adGenAOSRtpSessionTotalsRxFrameLateDiscards=adGenAOSRtpSessionTotalsRxFrameLateDiscards, adGenAOSRtpSessionDuration=adGenAOSRtpSessionDuration, adGenAOSRtpSessionEchoCancellerEnabled=adGenAOSRtpSessionEchoCancellerEnabled, adGenAOSMediaGatewayMIB=adGenAOSMediaGatewayMIB, adGenAOSRtpSessionTotalsRxFrameOverflows=adGenAOSRtpSessionTotalsRxFrameOverflows, adGenAOSRtpSessionSIPPortDescription=adGenAOSRtpSessionSIPPortDescription, adGenAOSRtpSessionRxFrameOutOfOrders=adGenAOSRtpSessionRxFrameOutOfOrders, adGenAOSMediaGateway=adGenAOSMediaGateway, adGenAOSMediaGatewayInfoIdentifier=adGenAOSMediaGatewayInfoIdentifier, adGenAOSRtpSessionTotalsTxPackets=adGenAOSRtpSessionTotalsTxPackets, adGenAOSRtpSessionTotalsClearCounters=adGenAOSRtpSessionTotalsClearCounters, adGenAOSMediaGatewayObjects=adGenAOSMediaGatewayObjects, adGenAOSRtpSessionRxOctets=adGenAOSRtpSessionRxOctets, adGenAOSMediaGatewayInfoFreePacketBuffers=adGenAOSMediaGatewayInfoFreePacketBuffers, adGenAOSMediaGatewayInfoUtilization=adGenAOSMediaGatewayInfoUtilization, adGenAOSRtpSessionTotalsRxFrameOutOfOrders=adGenAOSRtpSessionTotalsRxFrameOutOfOrders, adGenAOSMediaGatewayRtpChannelTotalsGroup=adGenAOSMediaGatewayRtpChannelTotalsGroup, adGenAOSRtpSessionTotalsRxOctets=adGenAOSRtpSessionTotalsRxOctets, adGenAOSRtpChannelTotalRxFrameOverflows=adGenAOSRtpChannelTotalRxFrameOverflows, adGenAOSMediaGatewayMIBGroups=adGenAOSMediaGatewayMIBGroups, adGenAOSRtpSessionRemoteIPAddress=adGenAOSRtpSessionRemoteIPAddress, adGenAOSMediaGatewayInfoTable=adGenAOSMediaGatewayInfoTable, adGenAOSMediaGatewayInfoUtilizationMaximum=adGenAOSMediaGatewayInfoUtilizationMaximum, adGenAOSRtpChannelTotalId=adGenAOSRtpChannelTotalId, adGenAOSRtpChannelTotalRxPacketsLost=adGenAOSRtpChannelTotalRxPacketsLost, adGenAOSRtpSessionTotalsSessionDuration=adGenAOSRtpSessionTotalsSessionDuration, adGenAOSRtpSessionRxJitterBufferDepth=adGenAOSRtpSessionRxJitterBufferDepth, adGenAOSMediaGatewayInfoUptime=adGenAOSMediaGatewayInfoUptime, adGenAOSRtpChannelTotalSessions=adGenAOSRtpChannelTotalSessions, adGenAOSRtpSessionTxSyncSource=adGenAOSRtpSessionTxSyncSource, adGenAOSRtpSessionEntry=adGenAOSRtpSessionEntry, adGenAOSRtpChannelTotalRxPackets=adGenAOSRtpChannelTotalRxPackets, adGenAOSRtpSessionRxFrameLateDiscards=adGenAOSRtpSessionRxFrameLateDiscards, adGenAOSRtpSessionRxPackets=adGenAOSRtpSessionRxPackets, adGenAOSRtpSessionChannelIdName=adGenAOSRtpSessionChannelIdName, adGenAOSRtpSessionTotalsEntry=adGenAOSRtpSessionTotalsEntry, adGenAOSRtpSessionLocalUdpPort=adGenAOSRtpSessionLocalUdpPort, adGenAOSRtpChannelTotalTxPackets=adGenAOSRtpChannelTotalTxPackets, adGenAOSRtpSessionVAD=adGenAOSRtpSessionVAD, adGenAOSRtpChannelTotalIdName=adGenAOSRtpChannelTotalIdName, adGenAOSRtpSessionTxPackets=adGenAOSRtpSessionTxPackets, adGenAOSMediaGatewayRtpSessionTotalsGroup=adGenAOSMediaGatewayRtpSessionTotalsGroup, adGenAOSRtpSessionStatus=adGenAOSRtpSessionStatus, adGenAOSRtpChannelTotalEntry=adGenAOSRtpChannelTotalEntry, adGenAOSRtpSessionRxSyncSource=adGenAOSRtpSessionRxSyncSource, adGenAOSRtpSessionRxPacketsLost=adGenAOSRtpSessionRxPacketsLost, adGenAOSRtpSessionRxMaxJitterBufferDepth=adGenAOSRtpSessionRxMaxJitterBufferDepth, adGenAOSRtpChannelTotalRxMaxDepth=adGenAOSRtpChannelTotalRxMaxDepth, adGenAOSRtpSessionTotalsRxPacketsUnknown=adGenAOSRtpSessionTotalsRxPacketsUnknown, adGenAOSMediaGatewayInfoEntry=adGenAOSMediaGatewayInfoEntry, adGenAOSRtpSessionTable=adGenAOSRtpSessionTable, adGenAOSRtpChannelClearCounters=adGenAOSRtpChannelClearCounters, adGenAOSRtpChannelTotalRxFrameOutOfOrders=adGenAOSRtpChannelTotalRxFrameOutOfOrders, adGenAOSRtpSessionVocoder=adGenAOSRtpSessionVocoder, adGenAOSMediaGatewayCompliances=adGenAOSMediaGatewayCompliances, adGenAOSRtpSessionTotalsTable=adGenAOSRtpSessionTotalsTable, adGenAOSMediaGatewayInfoSoftwareVersion=adGenAOSMediaGatewayInfoSoftwareVersion, adGenAOSRtpSessionTotalsSessions=adGenAOSRtpSessionTotalsSessions, adGenAOSRtpChannelTotalRxPacketsUnknown=adGenAOSRtpChannelTotalRxPacketsUnknown, adGenAOSRtpSessionRemoteUdpPort=adGenAOSRtpSessionRemoteUdpPort, adGenAOSRtpChannelTotalRxFrameLateDiscards=adGenAOSRtpChannelTotalRxFrameLateDiscards, adGenAOSRtpSessionTotalsRxPackets=adGenAOSRtpSessionTotalsRxPackets, adGenAOSRtpSessionTdmPortDescription=adGenAOSRtpSessionTdmPortDescription, adGenAOSRtpSessionTxOctets=adGenAOSRtpSessionTxOctets, adGenAOSRtpSessionTotalsRxPacketsLost=adGenAOSRtpSessionTotalsRxPacketsLost, adGenAOSMediaGatewayRtpSessionGroup=adGenAOSMediaGatewayRtpSessionGroup, adGenAOSRtpSessionLocalIPAddress=adGenAOSRtpSessionLocalIPAddress, adGenAOSRtpChannelTotalRxOctets=adGenAOSRtpChannelTotalRxOctets, adGenAOSMediaGatewayInfoGroup=adGenAOSMediaGatewayInfoGroup, adGenAOSRtpSessionRxFrameOverflows=adGenAOSRtpSessionRxFrameOverflows, adGenAOSRtpSessionChannelId=adGenAOSRtpSessionChannelId, adGenAOSRtpSessionStartTime=adGenAOSRtpSessionStartTime, adGenAOSRtpSessionRxPacketsUnknown=adGenAOSRtpSessionRxPacketsUnknown, adGenAOSRtpChannelTotalTable=adGenAOSRtpChannelTotalTable, adGenAOSRtpChannelTotalTxOctets=adGenAOSRtpChannelTotalTxOctets, adGenAOSRtpChannelTimeSinceLastClearCounters=adGenAOSRtpChannelTimeSinceLastClearCounters) |
# -*- coding: utf-8 -*-
def find_floor(string: str) -> int:
floor = 0
for direction in string:
if direction == '(':
floor += 1
else:
floor -= 1
return floor
def find_position(string: str) -> int:
floor = 0
for i, s in enumerate(string):
floor = floor + find_floor(s)
if floor == -1:
break
return i+1
if __name__ == '__main__':
floors = []
for string in [
'(())',
'()()',
'(((',
'(()(()(',
'))(((((',
'())',
'))(',
')))',
')())())'
]:
floors.append(find_floor(string))
print(floors == [0, 0, 3, 3, 3, -1, -1, -3, -3])
d = '()()(()()()(()()((()((()))((()((((()()((((()))()((((())(((((((()(((((((((()(((())(()()(()((()()(()(())(()((((()((()()()((((())((((((()(()(((()())(()((((()))())(())(()(()()))))))))((((((((((((()())()())())(())))(((()()()((((()(((()(()(()()(()(()()(()(((((((())(())(())())))((()())()((((()()((()))(((()()()())))(())))((((())(((()())(())(()))(()((((()())))())((()(())(((()((((()((()(())())))((()))()()(()(()))))((((((((()())((((()()((((()(()())(((((()(()())()))())(((()))()(()(()(()((((()(())(()))(((((()()(()()()(()(((())())(((()()(()()))(((()()(((())())(()(())())()()(())()()()((()(((()(())((()()((())()))((()()))((()()())((((()(()()(()(((()))()(()))))((()(((()()()))(()(((())()(()((()())(()(()()(()())(())()(((()(()())()((((()((()))))())()))((()()()()(())()())()()()((((()))))(()(((()()(((((((())()))()((((()((())()(()())(())()))(()(()())(((((((())))(((()))())))))()))())((())(()()((())()())()))))()((()()())(())((())((((()())())()()()(((()))())))()()))())(()()()(()((((((()()))())()))()(((()(((())((((()()()(()))())()()))))())()))())((())()())(((((())())((())())))(((())(((())(((((()(((((())(()(()())())(()(())(()))(()((((()))())()))))())))((()(()))))())))(((((())()))())()))))()))))(((()))()))))((()))((()((()(()(())()())))(()()()(())()))()((((())))))))(())(()((()()))(()))(()))(()((()))))))()()((((()()))()())()))))))()()()))(()((())(()))((()()()())()(((()((((())())))()((((()(()))))))())))()()())()))(()))))(()())()))))))((())))))))())()))()((())())))(()((()))()))(())))))(()))()())()()))((()(()))()()()()))))())()()))())(())()()))()))((()))))()()(()())))))()()()))((((()))()))))(()(())))(()())))((())())(()))()))))()())))()())()())))))))))()()))))())))((())((()))))())))(((()())))))))(()))()()))(()))()))))()())))))())((((()())))))))())))()()))))))))()))()))))()))))))(())))))))))())))))))))))))))())())((())))))))))()))((())))()))))))))())()(()))))))())))))()()()())()(()()()(()())(()))()()()(()())))())())))()))))())))))))()()()()())(())())()())()))))(()()()()()))))()))())())))((()())()())))()))()))))(()())))()))))))))(((()))()()))))))))))))))))))))(()))(()((()))())))())(()))(()(()(())))))()(()))()))()()))))))))))))()((()())(())())()(())))))())()())((()()))))(()()))))())()(())()))))))))))))))))))))()))(()(()())))))))()()((()))()))))))((())))()))))))))((()))())()()))())()()))((()))())))))))))))(()())()))(())((()(()()))(()())(())))()())(()(())()()))))()))()(()))))))(()))))))))))(()))())))))))))())))))())))(())))))()))))(())())))))))))()(()))))()())))())(()))()())))))))))))))())()()))))()))))))())))))()))))(())(()()()()((())()))())(()))((())()))())())(())(()()))))()))(())()()((())(())))(())))()))())))))))))()(((((())())))(())()))))(())))((()))()(((((((()))))()()))(())))))()(()))))(()()))()))())))))))(()())()))))))))())))(()))())()))(())()((())())()())())(()(()))))()))))))((()())(())()()(()())))()()))(())(())(()))())))()))(()))()()))((((()))))()))((()()()))))()))()))())))(()))()))))(())))()))())()(()))()())))())))))))())))())))()()))))))(()))())())))()))()()())())))))))))))))())))()))(()()))))())))())()(())))())))))))))))))))))()()())())))))()()()((()(()))()()(())()())()))()))))()()()))))))((()))))))))()(()(()((((((()()((()())))))))))))()))())))))((())())(()))())))())))))())()()())(())))())))()())())(())))))))()()(())))()))())))())())())()))))))))()))(()()()())())())))(())())))))))()()())()))))())))())()(())())))))))()())()))(()()(())())))()(()((()()((()()(((((())(()())()))(())()))(())))(())))))))()))()))((()))()))()))))))))()))))))))((()()())(()))(((()))(())))()))((())(((())))()())))())))))((())))))(())())((((((())())()(()))()(()((()())))((())()(()(()))))(())(()()())(())))())((()(((())())))(((()())())))())()(())())((((()()))))())((()))()()()()(())(((((((()()()((()))())(()())))(())())((((()()(()))))()((())))((())()))()(((()))())))()))((()(()))(())(()((((())((((()()(()()))(((())(()))))((((()(()))(())))))((()))(()))((()(((()(()))(()(()((()(())(()(()(()(()()((()))())(((())(()(()))))(()))()()))(())))(())()(((())(()))()((((()()))))())(()))))((())()((((()(((()))())())(((()))()())((())(())())(())()(())()(()()((((((()()))))()()(((()()))))()())()(((()(()))(()(()())(()(()))))(((((()(((())())))))(((((()((()()((())())((((((()(())(()()((()()()()()()()(()()))()(((()))()))(((((((())(((()((()())()((((())(((()(())))()((()(()()()((())((()())()))()))())))())((((((()))(()(()()()))(()((()(()(()))()((()(((()()()((())(((((())()(()))())())((()(())))(()(()())(())((())())())(((()()()(())))))())(()))))))()))))))())((()()()))((()((((((()))(((()((((()()()(((()))())()(()()(((()((()()()()())()()))()()()(()(())((()))))(()))())))))))()(()()(((((())()(()(((((()((()(()()())(()((((((((()((((((())()((((()()()((()((()((((((()))((())))))))())()))((()(()))()(()()(()((())((()()((((((((((((()())(()()()))((((()((((((())(()))())(()()((()()))()(((((((()((()()((((((()(((())))((())))((((((((()()(((((((())(((((()())(((())((())()((((()(((((((()(()(((()((((((()(((()(((((((((((()()((()()(()))((()()(((()(((())))((((())()(()(((())()(()(((())(((((((((((()))())))((((((())((()()((((()())())((((()()))((())(((((()(()()(()()()((())(()((()()((((()(((((()((()(()((((()())((((((()(((((()()(()(()((((())))(())(())(())((((()(()()((((()((((()()((()((((((())))(((((()))))()))(()((((((((()(((())())(((())))(()(()((())(((()((()()(((((()((()()(((())()(()))(((((((())(()(((((()))((()((()((()))(())())((((()((((())()(()))(((()(((((((((((((((())(((((((((()))(((()(()()()()((((((()((())()((((((((()(())(((((((((((()(()((())()((()()(()(()()((((()()((())(()((()()(()()((((()(((((((())))((((())(())()(((()()((()()((((()((()(((()((())(((()()()((((()((((()()(()(()((((((((())(()(((((())(()())(((((((()())()(()((((()((())(()()())((((()()(((()((((())(())(()()(((((((((()()))()(((())(()(()((((((())(()()())(()))()()(((()(((()((())(()(((((((()(()(()((()(((((()(()((()(()((((((()((((()()((((()(((()((())(()(()((()()((((()()(())()(())(((())(()((((((((()())(((((((((()(())()((((())))()))()()(((((()()((((((())(()()(((()(()(((((((()(()(((((((())(())((((()((()(())))((((()()())(()))((()())((((()(((((()(()(())(()(()()())(((((()(((((()((((()()((((((((()()))(()((((((())((((())()(()(((()()()(((()(()(())(())(((((()(())())((((())(())(()(((()(((((())((((())())((()(((((((()(((())(()(()))(((((((((()((()((()()(()((((())(((()((())((((())(()(((()(((()(()((((()(((())(()(((()(()()(()(()((()()(()())(())())((()(()(((()(((()(((()()(((((((((()(((((((((()()(((()(((()())((((()(()(((()()()((())((((((((((())(()(((()((((()())((((()((()))(((()()()(((((()(((((((())((()())(()((((())((((((((())(()((()((((((((((()()((()((()()))(((()())()())()(((()())()()(()(()(((((((())()))(())()))())()()((())()((()((((()((()((())(((((()((((((()(())))(()))())(((()))((()()(()(((()))((((())()(((()))))()(()(())()(((((())(()(()(())(())()((()()()((((()(())((()())(()(()))(()(()(()()(())()()(()((())()((()))))()))((()(()()()()((()())(()))())()(()(((((((((())())((()((()((((((())()((((())(((())((()(()()()((())(()((())(((()((((()()((()(()(((((())()))()((((((()))((())(((()()))(((())(())()))(((((((())(())())()(())(((((()))()((()))()(()()((()()()()()())(((((((' # noqa
pos = find_position(d)
print(pos)
| def find_floor(string: str) -> int:
floor = 0
for direction in string:
if direction == '(':
floor += 1
else:
floor -= 1
return floor
def find_position(string: str) -> int:
floor = 0
for (i, s) in enumerate(string):
floor = floor + find_floor(s)
if floor == -1:
break
return i + 1
if __name__ == '__main__':
floors = []
for string in ['(())', '()()', '(((', '(()(()(', '))(((((', '())', '))(', ')))', ')())())']:
floors.append(find_floor(string))
print(floors == [0, 0, 3, 3, 3, -1, -1, -3, -3])
d = '()()(()()()(()()((()((()))((()((((()()((((()))()((((())(((((((()(((((((((()(((())(()()(()((()()(()(())(()((((()((()()()((((())((((((()(()(((()())(()((((()))())(())(()(()()))))))))((((((((((((()())()())())(())))(((()()()((((()(((()(()(()()(()(()()(()(((((((())(())(())())))((()())()((((()()((()))(((()()()())))(())))((((())(((()())(())(()))(()((((()())))())((()(())(((()((((()((()(())())))((()))()()(()(()))))((((((((()())((((()()((((()(()())(((((()(()())()))())(((()))()(()(()(()((((()(())(()))(((((()()(()()()(()(((())())(((()()(()()))(((()()(((())())(()(())())()()(())()()()((()(((()(())((()()((())()))((()()))((()()())((((()(()()(()(((()))()(()))))((()(((()()()))(()(((())()(()((()())(()(()()(()())(())()(((()(()())()((((()((()))))())()))((()()()()(())()())()()()((((()))))(()(((()()(((((((())()))()((((()((())()(()())(())()))(()(()())(((((((())))(((()))())))))()))())((())(()()((())()())()))))()((()()())(())((())((((()())())()()()(((()))())))()()))())(()()()(()((((((()()))())()))()(((()(((())((((()()()(()))())()()))))())()))())((())()())(((((())())((())())))(((())(((())(((((()(((((())(()(()())())(()(())(()))(()((((()))())()))))())))((()(()))))())))(((((())()))())()))))()))))(((()))()))))((()))((()((()(()(())()())))(()()()(())()))()((((())))))))(())(()((()()))(()))(()))(()((()))))))()()((((()()))()())()))))))()()()))(()((())(()))((()()()())()(((()((((())())))()((((()(()))))))())))()()())()))(()))))(()())()))))))((())))))))())()))()((())())))(()((()))()))(())))))(()))()())()()))((()(()))()()()()))))())()()))())(())()()))()))((()))))()()(()())))))()()()))((((()))()))))(()(())))(()())))((())())(()))()))))()())))()())()())))))))))()()))))())))((())((()))))())))(((()())))))))(()))()()))(()))()))))()())))))())((((()())))))))())))()()))))))))()))()))))()))))))(())))))))))())))))))))))))))())())((())))))))))()))((())))()))))))))())()(()))))))())))))()()()())()(()()()(()())(()))()()()(()())))())())))()))))())))))))()()()()())(())())()())()))))(()()()()()))))()))())())))((()())()())))()))()))))(()())))()))))))))(((()))()()))))))))))))))))))))(()))(()((()))())))())(()))(()(()(())))))()(()))()))()()))))))))))))()((()())(())())()(())))))())()())((()()))))(()()))))())()(())()))))))))))))))))))))()))(()(()())))))))()()((()))()))))))((())))()))))))))((()))())()()))())()()))((()))())))))))))))(()())()))(())((()(()()))(()())(())))()())(()(())()()))))()))()(()))))))(()))))))))))(()))())))))))))())))))())))(())))))()))))(())())))))))))()(()))))()())))())(()))()())))))))))))))())()()))))()))))))())))))()))))(())(()()()()((())()))())(()))((())()))())())(())(()()))))()))(())()()((())(())))(())))()))())))))))))()(((((())())))(())()))))(())))((()))()(((((((()))))()()))(())))))()(()))))(()()))()))())))))))(()())()))))))))())))(()))())()))(())()((())())()())())(()(()))))()))))))((()())(())()()(()())))()()))(())(())(()))())))()))(()))()()))((((()))))()))((()()()))))()))()))())))(()))()))))(())))()))())()(()))()())))())))))))())))())))()()))))))(()))())())))()))()()())())))))))))))))())))()))(()()))))())))())()(())))())))))))))))))))))()()())())))))()()()((()(()))()()(())()())()))()))))()()()))))))((()))))))))()(()(()((((((()()((()())))))))))))()))())))))((())())(()))())))())))))())()()())(())))())))()())())(())))))))()()(())))()))())))())())())()))))))))()))(()()()())())())))(())())))))))()()())()))))())))())()(())())))))))()())()))(()()(())())))()(()((()()((()()(((((())(()())()))(())()))(())))(())))))))()))()))((()))()))()))))))))()))))))))((()()())(()))(((()))(())))()))((())(((())))()())))())))))((())))))(())())((((((())())()(()))()(()((()())))((())()(()(()))))(())(()()())(())))())((()(((())())))(((()())())))())()(())())((((()()))))())((()))()()()()(())(((((((()()()((()))())(()())))(())())((((()()(()))))()((())))((())()))()(((()))())))()))((()(()))(())(()((((())((((()()(()()))(((())(()))))((((()(()))(())))))((()))(()))((()(((()(()))(()(()((()(())(()(()(()(()()((()))())(((())(()(()))))(()))()()))(())))(())()(((())(()))()((((()()))))())(()))))((())()((((()(((()))())())(((()))()())((())(())())(())()(())()(()()((((((()()))))()()(((()()))))()())()(((()(()))(()(()())(()(()))))(((((()(((())())))))(((((()((()()((())())((((((()(())(()()((()()()()()()()(()()))()(((()))()))(((((((())(((()((()())()((((())(((()(())))()((()(()()()((())((()())()))()))())))())((((((()))(()(()()()))(()((()(()(()))()((()(((()()()((())(((((())()(()))())())((()(())))(()(()())(())((())())())(((()()()(())))))())(()))))))()))))))())((()()()))((()((((((()))(((()((((()()()(((()))())()(()()(((()((()()()()())()()))()()()(()(())((()))))(()))())))))))()(()()(((((())()(()(((((()((()(()()())(()((((((((()((((((())()((((()()()((()((()((((((()))((())))))))())()))((()(()))()(()()(()((())((()()((((((((((((()())(()()()))((((()((((((())(()))())(()()((()()))()(((((((()((()()((((((()(((())))((())))((((((((()()(((((((())(((((()())(((())((())()((((()(((((((()(()(((()((((((()(((()(((((((((((()()((()()(()))((()()(((()(((())))((((())()(()(((())()(()(((())(((((((((((()))())))((((((())((()()((((()())())((((()()))((())(((((()(()()(()()()((())(()((()()((((()(((((()((()(()((((()())((((((()(((((()()(()(()((((())))(())(())(())((((()(()()((((()((((()()((()((((((())))(((((()))))()))(()((((((((()(((())())(((())))(()(()((())(((()((()()(((((()((()()(((())()(()))(((((((())(()(((((()))((()((()((()))(())())((((()((((())()(()))(((()(((((((((((((((())(((((((((()))(((()(()()()()((((((()((())()((((((((()(())(((((((((((()(()((())()((()()(()(()()((((()()((())(()((()()(()()((((()(((((((())))((((())(())()(((()()((()()((((()((()(((()((())(((()()()((((()((((()()(()(()((((((((())(()(((((())(()())(((((((()())()(()((((()((())(()()())((((()()(((()((((())(())(()()(((((((((()()))()(((())(()(()((((((())(()()())(()))()()(((()(((()((())(()(((((((()(()(()((()(((((()(()((()(()((((((()((((()()((((()(((()((())(()(()((()()((((()()(())()(())(((())(()((((((((()())(((((((((()(())()((((())))()))()()(((((()()((((((())(()()(((()(()(((((((()(()(((((((())(())((((()((()(())))((((()()())(()))((()())((((()(((((()(()(())(()(()()())(((((()(((((()((((()()((((((((()()))(()((((((())((((())()(()(((()()()(((()(()(())(())(((((()(())())((((())(())(()(((()(((((())((((())())((()(((((((()(((())(()(()))(((((((((()((()((()()(()((((())(((()((())((((())(()(((()(((()(()((((()(((())(()(((()(()()(()(()((()()(()())(())())((()(()(((()(((()(((()()(((((((((()(((((((((()()(((()(((()())((((()(()(((()()()((())((((((((((())(()(((()((((()())((((()((()))(((()()()(((((()(((((((())((()())(()((((())((((((((())(()((()((((((((((()()((()((()()))(((()())()())()(((()())()()(()(()(((((((())()))(())()))())()()((())()((()((((()((()((())(((((()((((((()(())))(()))())(((()))((()()(()(((()))((((())()(((()))))()(()(())()(((((())(()(()(())(())()((()()()((((()(())((()())(()(()))(()(()(()()(())()()(()((())()((()))))()))((()(()()()()((()())(()))())()(()(((((((((())())((()((()((((((())()((((())(((())((()(()()()((())(()((())(((()((((()()((()(()(((((())()))()((((((()))((())(((()()))(((())(())()))(((((((())(())())()(())(((((()))()((()))()(()()((()()()()()())((((((('
pos = find_position(d)
print(pos) |
def get_input():
with open("input.txt") as f:
lines = f.read().splitlines()
food_list = []
for line in lines:
split = line.split("(")
ingredients = set(split[0].split())
allergens = set(split[1][9:-1].split(", "))
food_list.append((ingredients, allergens))
return food_list
def get_allergen_dict(food_list):
algn2ingrdnt = {}
for ingredients, allergens in food_list:
for a in allergens:
if not a in algn2ingrdnt.keys():
algn2ingrdnt[a] = ingredients
else:
algn2ingrdnt[a] = algn2ingrdnt[a].intersection(ingredients)
return algn2ingrdnt
| def get_input():
with open('input.txt') as f:
lines = f.read().splitlines()
food_list = []
for line in lines:
split = line.split('(')
ingredients = set(split[0].split())
allergens = set(split[1][9:-1].split(', '))
food_list.append((ingredients, allergens))
return food_list
def get_allergen_dict(food_list):
algn2ingrdnt = {}
for (ingredients, allergens) in food_list:
for a in allergens:
if not a in algn2ingrdnt.keys():
algn2ingrdnt[a] = ingredients
else:
algn2ingrdnt[a] = algn2ingrdnt[a].intersection(ingredients)
return algn2ingrdnt |
a = int(input())
b = 1
for i in range(1, a + 1):
b *= i
print(b) | a = int(input())
b = 1
for i in range(1, a + 1):
b *= i
print(b) |
# wczytanie slow
with open('../dane/dane.txt') as f:
data = []
for line in f.readlines():
data.append(line[:-1])
# licznik liczb parzystych
even = 0
for word in data:
# zamieniam na male litery, potem na system dziesietny
# i wtedy sprawdzam czy jest podzielne przez 2
if int(word.lower(), base=16) % 2 == 0:
even += 1
# wyswietlenie odpowiedzi
answer = f'6 b) Ilosc parzystych liczb: {even}'
print(answer)
| with open('../dane/dane.txt') as f:
data = []
for line in f.readlines():
data.append(line[:-1])
even = 0
for word in data:
if int(word.lower(), base=16) % 2 == 0:
even += 1
answer = f'6 b) Ilosc parzystych liczb: {even}'
print(answer) |
"""Version file."""
VERSION_INFO = (0, 1, 0, "dev0")
__version__ = ".".join((str(version) for version in VERSION_INFO))
| """Version file."""
version_info = (0, 1, 0, 'dev0')
__version__ = '.'.join((str(version) for version in VERSION_INFO)) |
range_start = 367479
range_stop = 893698
def isvalid(n):
ns = str(n)
if not len(ns) == 6:
return False
has_same = False
for pos in range(len(ns) - 1):
if ns[pos] > ns[pos + 1]:
return False
if ns[pos] == ns[pos + 1]:
has_same = True
return has_same
counter = 0
for i in range(range_start, range_stop + 1):
if isvalid(i):
counter += 1
print(counter)
| range_start = 367479
range_stop = 893698
def isvalid(n):
ns = str(n)
if not len(ns) == 6:
return False
has_same = False
for pos in range(len(ns) - 1):
if ns[pos] > ns[pos + 1]:
return False
if ns[pos] == ns[pos + 1]:
has_same = True
return has_same
counter = 0
for i in range(range_start, range_stop + 1):
if isvalid(i):
counter += 1
print(counter) |
x = f"There are {10} types of people."
binary = 'binary'
do_not = "don't"
y = "Those who know {binary} adn those who {do_not}."
print(x)
print(y)
print('I said: {x}.')
print('I also said: {y}')
hilarious = False
joke_evaluation = f"Isn't that joke so funny?! {hilarious}"
print(joke_evaluation)
w = "This is the left side of ..."
e = "a string with a right side."
print(w + e) | x = f'There are {10} types of people.'
binary = 'binary'
do_not = "don't"
y = 'Those who know {binary} adn those who {do_not}.'
print(x)
print(y)
print('I said: {x}.')
print('I also said: {y}')
hilarious = False
joke_evaluation = f"Isn't that joke so funny?! {hilarious}"
print(joke_evaluation)
w = 'This is the left side of ...'
e = 'a string with a right side.'
print(w + e) |
def romanToInt(s):
roman_val = {'I' : 1, 'V': 5, 'X':10, 'L':50, 'C':100, 'D':500, 'M':1000}
int_val = 0
for i in range(len(s)):
print(s, i, s[i], roman_val[s[i]], roman_val[s[i - 1]])
if i > 0 and roman_val[s[i]] > roman_val[s[i - 1]]:
int_val += roman_val[s[i]] - 2 * roman_val[s[i - 1]]
print(roman_val[s[i]], 2 * roman_val[s[i - 1]])
else:
int_val += roman_val[s[i]]
print (roman_val[s[i]])
return int_val
print(romanToInt('MCMXCIV')) | def roman_to_int(s):
roman_val = {'I': 1, 'V': 5, 'X': 10, 'L': 50, 'C': 100, 'D': 500, 'M': 1000}
int_val = 0
for i in range(len(s)):
print(s, i, s[i], roman_val[s[i]], roman_val[s[i - 1]])
if i > 0 and roman_val[s[i]] > roman_val[s[i - 1]]:
int_val += roman_val[s[i]] - 2 * roman_val[s[i - 1]]
print(roman_val[s[i]], 2 * roman_val[s[i - 1]])
else:
int_val += roman_val[s[i]]
print(roman_val[s[i]])
return int_val
print(roman_to_int('MCMXCIV')) |
class Loan:
'Base class for all loans.'
LoanCount = 0
def __init__(self, m, y, v, r, score, term):
LoanCount += 1
self.PolicyNumber = LoanCount
self.OriginMonth = m
self.OriginYear = y
self.OriginValue = v
self.PrincipalValue = v
self.Balance = v
self.APR = r
self.CreditScore = score
self.Term = term
self.TermsPassed = 0
self.ScheduledPayment =(self.OriginValue/((((1+(self.APR/100/12))^self.Term)-1)/((self.APR/100/12)*(1+(self.APR/100/12))^self.Term)))
self.MissedPayments = 0
def TakePayment():
if self.ScheduledPayment > self.Balance:
self.ScheduledPayment = self.Balance
self.Balance -= self.ScheduledPayment
def ApplyInterest():
self.Balance += (self.Balalnce * (self.APR / 12))
def Term():
ApplyInterest()
TakePayment()
TermsPassed += 1
| class Loan:
"""Base class for all loans."""
loan_count = 0
def __init__(self, m, y, v, r, score, term):
loan_count += 1
self.PolicyNumber = LoanCount
self.OriginMonth = m
self.OriginYear = y
self.OriginValue = v
self.PrincipalValue = v
self.Balance = v
self.APR = r
self.CreditScore = score
self.Term = term
self.TermsPassed = 0
self.ScheduledPayment = self.OriginValue / (((1 + self.APR / 100 / 12 ^ self.Term) - 1) / (self.APR / 100 / 12 * (1 + self.APR / 100 / 12) ^ self.Term))
self.MissedPayments = 0
def take_payment():
if self.ScheduledPayment > self.Balance:
self.ScheduledPayment = self.Balance
self.Balance -= self.ScheduledPayment
def apply_interest():
self.Balance += self.Balalnce * (self.APR / 12)
def term():
apply_interest()
take_payment()
terms_passed += 1 |
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def isSymmetric(self, root: TreeNode) -> bool:
def find(r,l):
if(r == None and l==None):
return True
if(r!=None and l!=None and r.val==l.val):
return find(r.right,l.left) and find(r.left,l.right)
return find(root.right,root.left)
| class Solution:
def is_symmetric(self, root: TreeNode) -> bool:
def find(r, l):
if r == None and l == None:
return True
if r != None and l != None and (r.val == l.val):
return find(r.right, l.left) and find(r.left, l.right)
return find(root.right, root.left) |
def pattern_elevan(strings):
'''Pattern elevan
K A T H M A N D U
A T H M A N D U
T H M A N D U
H M A N D U
M A N D U
A N D U
N D U
D U
U
'''
if not str(strings).isalpha():
strings = str(strings) # If provided is integer then converting to string
for x in range(len(strings)):
get_string = ' '.join(strings[x:])
print(get_string)
if __name__ == '__main__':
try:
pattern_elevan('KATHMANDU')
except NameError:
print('String or Integer was expected')
| def pattern_elevan(strings):
"""Pattern elevan
K A T H M A N D U
A T H M A N D U
T H M A N D U
H M A N D U
M A N D U
A N D U
N D U
D U
U
"""
if not str(strings).isalpha():
strings = str(strings)
for x in range(len(strings)):
get_string = ' '.join(strings[x:])
print(get_string)
if __name__ == '__main__':
try:
pattern_elevan('KATHMANDU')
except NameError:
print('String or Integer was expected') |
# Created by Egor Kostan.
# GitHub: https://github.com/ikostan
# LinkedIn: https://www.linkedin.com/in/egor-kostan/
class EpidemicTestData:
def __init__(self, tm, n, s0, i0, b, a, expected):
self.__tm = tm
self.__n = n
self.__s0 = s0
self.__i0 = i0
self.__b = b
self.__a = a
self.__expected = expected
@property
def tm(self):
return self.__tm
@property
def n(self):
return self.__n
@property
def s0(self):
return self.__s0
@property
def i0(self):
return self.__i0
@property
def b(self):
return self.__b
@property
def a(self):
return self.__a
@property
def expected(self):
return self.__expected
def __repr__(self):
return 'tm: {}, n: {}, s0: {}, i0: {}, b: {}, a: {}, expected: {}'.format(self.tm,
self.n,
self.s0,
self.i0,
self.b,
self.a,
self.expected)
def __eq__(self, other):
"""
Object comparison
Override the default Equals behavior
:param other:
:return:
"""
return self.__class__ == other.__class__ and \
self.tm == other.tm and \
self.n == other.n and \
self.s0 == other.s0 and \
self.i0 == other.i0 and \
self.a == other.a and \
self.b == other.b and \
self.expected == other.expected
| class Epidemictestdata:
def __init__(self, tm, n, s0, i0, b, a, expected):
self.__tm = tm
self.__n = n
self.__s0 = s0
self.__i0 = i0
self.__b = b
self.__a = a
self.__expected = expected
@property
def tm(self):
return self.__tm
@property
def n(self):
return self.__n
@property
def s0(self):
return self.__s0
@property
def i0(self):
return self.__i0
@property
def b(self):
return self.__b
@property
def a(self):
return self.__a
@property
def expected(self):
return self.__expected
def __repr__(self):
return 'tm: {}, n: {}, s0: {}, i0: {}, b: {}, a: {}, expected: {}'.format(self.tm, self.n, self.s0, self.i0, self.b, self.a, self.expected)
def __eq__(self, other):
"""
Object comparison
Override the default Equals behavior
:param other:
:return:
"""
return self.__class__ == other.__class__ and self.tm == other.tm and (self.n == other.n) and (self.s0 == other.s0) and (self.i0 == other.i0) and (self.a == other.a) and (self.b == other.b) and (self.expected == other.expected) |
class Solution:
def solve(self, lst1, lst2):
lst1.sort()
lst2.sort()
n = len(lst1)-1
m = len(lst2)-1
i = 0
j = 0
ans = float('inf')
while i <= n and j <= m:
ans = min(abs(lst1[i]-lst2[j]),ans)
if lst1[i] < lst2[j]:
i += 1
elif lst1[i] > lst2[j]:
j += 1
else:
return ans
return ans
| class Solution:
def solve(self, lst1, lst2):
lst1.sort()
lst2.sort()
n = len(lst1) - 1
m = len(lst2) - 1
i = 0
j = 0
ans = float('inf')
while i <= n and j <= m:
ans = min(abs(lst1[i] - lst2[j]), ans)
if lst1[i] < lst2[j]:
i += 1
elif lst1[i] > lst2[j]:
j += 1
else:
return ans
return ans |
n = int(input())
arr = list(map(int, input().split()))
b=max(arr)
while max(arr)==b:
arr.remove(max(arr))
print(max(arr))
| n = int(input())
arr = list(map(int, input().split()))
b = max(arr)
while max(arr) == b:
arr.remove(max(arr))
print(max(arr)) |
class GraphQlVariable(object):
"""A GraphQL variable definition.
Public attributes:
object default_value - A Python object indicating the default value
of the variable, if any.
basestring name - The name of the variable.
GraphQlType variable_type - The type of the variable.
"""
def __init__(self, name, variable_type, default_value):
self.name = name
self.variable_type = variable_type
self.default_value = default_value
| class Graphqlvariable(object):
"""A GraphQL variable definition.
Public attributes:
object default_value - A Python object indicating the default value
of the variable, if any.
basestring name - The name of the variable.
GraphQlType variable_type - The type of the variable.
"""
def __init__(self, name, variable_type, default_value):
self.name = name
self.variable_type = variable_type
self.default_value = default_value |
# https://docs.python.org/3/library/__main__.html
if __name__ == "__main__":
pass
| if __name__ == '__main__':
pass |
print('Enter the three sides of the triangle')
a = float(input('Enter first side: '))
b = float(input('Enter second side: '))
c = float(input('Enter third side: '))
s = (a + b + c) / 2
area = (s*(s-a)*(s-b)*(s-c)) ** 0.5
print('Area of the triangle = %0.3f' %area) | print('Enter the three sides of the triangle')
a = float(input('Enter first side: '))
b = float(input('Enter second side: '))
c = float(input('Enter third side: '))
s = (a + b + c) / 2
area = (s * (s - a) * (s - b) * (s - c)) ** 0.5
print('Area of the triangle = %0.3f' % area) |
def transform_test_oa18_to_oa11_kinetics(oa18_test_stacks, oa11_kinetics_test_stacks):
cls_to_new_class = {2:1, 3:2, 4:3,
5:4, 7:5, 9:6,
11:7, 13:8, 15:9,
17:10, 18:11}
lines = []
with open(oa18_test_stacks) as fr:
for line in fr:
fname, starting, cls = line.split()
if int(cls.strip()) in cls_to_new_class:
cls = str(cls_to_new_class[int(cls.strip())])
lines.append(fname + " " + starting + " " + cls + "\n")
with open(oa11_kinetics_test_stacks, "w") as fw:
fw.writelines(lines)
transform_test_oa18_to_oa11_kinetics("/home/bassel/data/office-actions/office_actions_19/short_clips/labels/test_stack_list.txt",
"/home/bassel/data/oa_kinetics/lbls/oa18_test_stack_mapped_oa11_kinetics.txt") | def transform_test_oa18_to_oa11_kinetics(oa18_test_stacks, oa11_kinetics_test_stacks):
cls_to_new_class = {2: 1, 3: 2, 4: 3, 5: 4, 7: 5, 9: 6, 11: 7, 13: 8, 15: 9, 17: 10, 18: 11}
lines = []
with open(oa18_test_stacks) as fr:
for line in fr:
(fname, starting, cls) = line.split()
if int(cls.strip()) in cls_to_new_class:
cls = str(cls_to_new_class[int(cls.strip())])
lines.append(fname + ' ' + starting + ' ' + cls + '\n')
with open(oa11_kinetics_test_stacks, 'w') as fw:
fw.writelines(lines)
transform_test_oa18_to_oa11_kinetics('/home/bassel/data/office-actions/office_actions_19/short_clips/labels/test_stack_list.txt', '/home/bassel/data/oa_kinetics/lbls/oa18_test_stack_mapped_oa11_kinetics.txt') |
n, l = map(int, input().split())
list = [str(input()) for i in range(n)]
list.sort()
s = ''.join(list)
print(s)
| (n, l) = map(int, input().split())
list = [str(input()) for i in range(n)]
list.sort()
s = ''.join(list)
print(s) |
def lcs(s1,s2):
l1 = len(s1)
l2 = len(s2)
memo = [[-1] * (l1+1) for x in range(l2+1)]
sol = [[None for x in range(l1+1)] for x in range(l2+1)]
for i in range(l1):
for j in range(l2):
if i == 0 or j == 0:
q = 0
memo[i][j] = 0
sol[i][j] = 0
elif s1[i-1] == s2[j-1]:
memo[i][j] = memo[i-1][j-1] + 1
sol[i][j] = 'd'
elif memo[i-1][j] >= memo[i][j-1]:
memo[i][j] = memo[i-1][j]
sol[i][j] = 'l'
else:
memo[i][j] = memo[i][j-1]
sol[i][j] = 'u'
a=[]
a.append(memo)
a.append(sol)
return a
def print_lcs(s1,b,i,j):
l1 = len(s1)
if i == 0 or j == 0:
return
if b[i][j] == 'd':
print_lcs(s1,b,i-1,j-1)
print(b[i])
elif b[i][j] == 'u':
print_lcs(s1,b,i,j-1)
else:
print_lcs(s1,b,i-1,j)
s1 = "hey im good"
s2 = "howdy yall"
a = lcs(s1,s2)
print_lcs(s1,a[1],len(s1),len(s2))
| def lcs(s1, s2):
l1 = len(s1)
l2 = len(s2)
memo = [[-1] * (l1 + 1) for x in range(l2 + 1)]
sol = [[None for x in range(l1 + 1)] for x in range(l2 + 1)]
for i in range(l1):
for j in range(l2):
if i == 0 or j == 0:
q = 0
memo[i][j] = 0
sol[i][j] = 0
elif s1[i - 1] == s2[j - 1]:
memo[i][j] = memo[i - 1][j - 1] + 1
sol[i][j] = 'd'
elif memo[i - 1][j] >= memo[i][j - 1]:
memo[i][j] = memo[i - 1][j]
sol[i][j] = 'l'
else:
memo[i][j] = memo[i][j - 1]
sol[i][j] = 'u'
a = []
a.append(memo)
a.append(sol)
return a
def print_lcs(s1, b, i, j):
l1 = len(s1)
if i == 0 or j == 0:
return
if b[i][j] == 'd':
print_lcs(s1, b, i - 1, j - 1)
print(b[i])
elif b[i][j] == 'u':
print_lcs(s1, b, i, j - 1)
else:
print_lcs(s1, b, i - 1, j)
s1 = 'hey im good'
s2 = 'howdy yall'
a = lcs(s1, s2)
print_lcs(s1, a[1], len(s1), len(s2)) |
class Tile:
def __init__(self, tile_type):
self.tyle_type = tile_type
| class Tile:
def __init__(self, tile_type):
self.tyle_type = tile_type |
# Copyright (c) 2021.
# The copyright lies with Timo Hirsch-Hoffmann, the further use is only permitted with reference to source
class ChampionMastery:
champion_id: int
champion_level: int
champion_points: int
last_play_time: int
champion_points_since_last_level: int
champion_points_until_next_level: int
chest_granted: bool
tokens_earned: int
summoner_id: str
def __init__(self, champion_id: int, champion_level: int, champion_points: int, last_play_time: int,
champion_points_since_last_level: int, champion_points_until_next_level: int, chest_granted: bool,
tokens_earned: int, summoner_id: str) -> None:
self.champion_id = champion_id
self.champion_level = champion_level
self.champion_points = champion_points
self.last_play_time = last_play_time
self.champion_points_since_last_level = champion_points_since_last_level
self.champion_points_until_next_level = champion_points_until_next_level
self.chest_granted = chest_granted
self.tokens_earned = tokens_earned
self.summoner_id = summoner_id
| class Championmastery:
champion_id: int
champion_level: int
champion_points: int
last_play_time: int
champion_points_since_last_level: int
champion_points_until_next_level: int
chest_granted: bool
tokens_earned: int
summoner_id: str
def __init__(self, champion_id: int, champion_level: int, champion_points: int, last_play_time: int, champion_points_since_last_level: int, champion_points_until_next_level: int, chest_granted: bool, tokens_earned: int, summoner_id: str) -> None:
self.champion_id = champion_id
self.champion_level = champion_level
self.champion_points = champion_points
self.last_play_time = last_play_time
self.champion_points_since_last_level = champion_points_since_last_level
self.champion_points_until_next_level = champion_points_until_next_level
self.chest_granted = chest_granted
self.tokens_earned = tokens_earned
self.summoner_id = summoner_id |
def mode():
tally = {}
M = nums[0]
for x in nums:
tally[x] = tally.get(x, 0) + 1
if tally[x] > tally[M]:
M = x
return M
def max(nums):
M = nums[0]
for x in nums:
if x > M:
M = x
return M
def min(nums):
M = nums[0]
for x in nums:
if x < M:
M =x
return M
def mean(nums):
if len(nums) == 0:
return None
M = 0
for x in nums:
M += x
M /= len(nums)
return M
def median(nums):
temp = sorted(nums)
if len(temp) % 2 == 0:
mid1 = int(len(temp)/2) - 1
mid2 = int(len(temp)/2)
return (temp[mid1] + temp[mid2])/2
else:
mid = int(len(temp)/2)
return temp[mid]
| def mode():
tally = {}
m = nums[0]
for x in nums:
tally[x] = tally.get(x, 0) + 1
if tally[x] > tally[M]:
m = x
return M
def max(nums):
m = nums[0]
for x in nums:
if x > M:
m = x
return M
def min(nums):
m = nums[0]
for x in nums:
if x < M:
m = x
return M
def mean(nums):
if len(nums) == 0:
return None
m = 0
for x in nums:
m += x
m /= len(nums)
return M
def median(nums):
temp = sorted(nums)
if len(temp) % 2 == 0:
mid1 = int(len(temp) / 2) - 1
mid2 = int(len(temp) / 2)
return (temp[mid1] + temp[mid2]) / 2
else:
mid = int(len(temp) / 2)
return temp[mid] |
NODE_SIGNING_KEY = NotImplemented
SENTRY_DSN = None
SENTRY_EVENT_LEVEL = 'WARNING'
NODE_NETWORK_ADDRESSES: list[str] = []
NODE_PORT = 8555
APPEND_AUTO_DETECTED_NETWORK_ADDRESS = True
NODE_FEE = 4
IN_DOCKER = False
NODE_LIST_JSON_PATH = None
SYNC_BATCH_SIZE = 10
SCHEDULE_CAPACITY = 20
SUPPRESS_WARNINGS_TB = True
LOCK_DEFAULT_TIMEOUT_SECONDS = 1
USE_ON_COMMIT_HOOK = False
| node_signing_key = NotImplemented
sentry_dsn = None
sentry_event_level = 'WARNING'
node_network_addresses: list[str] = []
node_port = 8555
append_auto_detected_network_address = True
node_fee = 4
in_docker = False
node_list_json_path = None
sync_batch_size = 10
schedule_capacity = 20
suppress_warnings_tb = True
lock_default_timeout_seconds = 1
use_on_commit_hook = False |
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
#Approach :
'''
Put Node2.next which is n3->n4->...nn into Node1.next
Now put entire Node1 which is n1-n3-n4... into Node2.next . with this step n1 and n2 are swapped.
In the first iteration save the pointer of new 2nd node which is node1. This pointer node keeps the link between Pari1..Pari2...Pair3....etc.
From the second iteration onwards, swap and link the first node from the pair to the second node from last iteration.
Move the pointer 2nd node of the pair.
'''
class Solution(object):
def swapPairs(self, head: ListNode) -> ListNode:
tempNode = head
if tempNode is None: return None
if tempNode.next is not None:
newheadNode= tempNode.next
else:
return head
pointerNode = ListNode()
first=0
while tempNode is not None and tempNode.next is not None:
Node1 = tempNode
Node2 = tempNode.next
tempNode= tempNode.next.next
Node1.next=Node2.next
Node2.next=Node1
if first==0:
pointerNode= Node1
first=1
else:
pointerNode.next=Node2
pointerNode=Node1
return newheadNode
| """
Put Node2.next which is n3->n4->...nn into Node1.next
Now put entire Node1 which is n1-n3-n4... into Node2.next . with this step n1 and n2 are swapped.
In the first iteration save the pointer of new 2nd node which is node1. This pointer node keeps the link between Pari1..Pari2...Pair3....etc.
From the second iteration onwards, swap and link the first node from the pair to the second node from last iteration.
Move the pointer 2nd node of the pair.
"""
class Solution(object):
def swap_pairs(self, head: ListNode) -> ListNode:
temp_node = head
if tempNode is None:
return None
if tempNode.next is not None:
newhead_node = tempNode.next
else:
return head
pointer_node = list_node()
first = 0
while tempNode is not None and tempNode.next is not None:
node1 = tempNode
node2 = tempNode.next
temp_node = tempNode.next.next
Node1.next = Node2.next
Node2.next = Node1
if first == 0:
pointer_node = Node1
first = 1
else:
pointerNode.next = Node2
pointer_node = Node1
return newheadNode |
for t in range(int(input())):
wordList=(" ".join(input())).split(" ")
numOfHy=int(input())
locOfHy=list(map(int,input().split()))
numOfHyEachLoc=[0 for i in range(len(wordList)+1)]
for i in locOfHy:
numOfHyEachLoc[i]+=1
print(f"#{t+1}",numOfHyEachLoc[0]*"-",end="")
for i in range(len(wordList)):
print(wordList[i]+numOfHyEachLoc[i+1]*"-",end="") | for t in range(int(input())):
word_list = ' '.join(input()).split(' ')
num_of_hy = int(input())
loc_of_hy = list(map(int, input().split()))
num_of_hy_each_loc = [0 for i in range(len(wordList) + 1)]
for i in locOfHy:
numOfHyEachLoc[i] += 1
print(f'#{t + 1}', numOfHyEachLoc[0] * '-', end='')
for i in range(len(wordList)):
print(wordList[i] + numOfHyEachLoc[i + 1] * '-', end='') |
"""
for i in range(size):
c = buf[i]
enc += table[(c // 10) * 16 + c % 10]
"""
with open("babyrev.exe", "rb") as f:
f.seek(0x8fb0)
table = f.read(0x100)
with open("enc.txt", "rb") as f:
cipher = f.read()
flag = ""
for i, c in enumerate(cipher):
for x in range(0x100):
if table[(x // 10) * 16 + (x % 10)] == c ^ (0x16 - (i%2)):
flag += chr(x)
break
print(flag)
| """
for i in range(size):
c = buf[i]
enc += table[(c // 10) * 16 + c % 10]
"""
with open('babyrev.exe', 'rb') as f:
f.seek(36784)
table = f.read(256)
with open('enc.txt', 'rb') as f:
cipher = f.read()
flag = ''
for (i, c) in enumerate(cipher):
for x in range(256):
if table[x // 10 * 16 + x % 10] == c ^ 22 - i % 2:
flag += chr(x)
break
print(flag) |
# -*- coding: utf-8 -*-
class NotRecognizeCommandException(Exception):
pass
class NotRecognizeProtocolException(Exception):
pass
| class Notrecognizecommandexception(Exception):
pass
class Notrecognizeprotocolexception(Exception):
pass |
#
# @lc app=leetcode id=765 lang=python3
#
# [765] Couples Holding Hands
#
# @lc code=start
class Solution:
def minSwapsCouples(self, row: List[int]) -> int:
d, swap={}, 0
for i, x in enumerate(row):
d[x]=i
for i in range(0, len(row), 2):
partner=row[i]+1 if row[i]%2==0 else row[i]-1
j=d[partner]
if j-i!=1:
row[i+1], row[j]=row[j], row[i+1]
d[row[j]]=j
swap+=1
return swap
# @lc code=end
| class Solution:
def min_swaps_couples(self, row: List[int]) -> int:
(d, swap) = ({}, 0)
for (i, x) in enumerate(row):
d[x] = i
for i in range(0, len(row), 2):
partner = row[i] + 1 if row[i] % 2 == 0 else row[i] - 1
j = d[partner]
if j - i != 1:
(row[i + 1], row[j]) = (row[j], row[i + 1])
d[row[j]] = j
swap += 1
return swap |
# -*- coding: utf-8 -*-
'''package requirements are checked at runtime and installation time'''
#-----------------------------------------------------------------------------
# Copyright (c) 2013-2020, NeXpy Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING, distributed with this software.
#-----------------------------------------------------------------------------
pkg_requirements = [
'numpy>=1.16',
'scipy',
'h5py',
'versioneer'
]
| """package requirements are checked at runtime and installation time"""
pkg_requirements = ['numpy>=1.16', 'scipy', 'h5py', 'versioneer'] |
# multiple.py
def fail(exception_cls):
raise exception_cls("Ooops")
try:
fail(ValueError)
except Exception as exc:
print("Can catch ValueError as Exception.")
print(exc.__class__)
except ValueError as exc:
print("Can catch ValueError as ValueError.")
print(exc.__class__)
try:
fail(ValueError)
except ValueError as exc:
print("Can catch ValueError as ValueError.")
print(exc.__class__)
except Exception as exc:
print("Can catch ValueError as Exception.")
print(exc.__class__)
try:
fail(ValueError)
except (ValueError, Exception) as exc:
print("Can catch multiple exceptions in a single except block")
print(exc.__class__)
| def fail(exception_cls):
raise exception_cls('Ooops')
try:
fail(ValueError)
except Exception as exc:
print('Can catch ValueError as Exception.')
print(exc.__class__)
except ValueError as exc:
print('Can catch ValueError as ValueError.')
print(exc.__class__)
try:
fail(ValueError)
except ValueError as exc:
print('Can catch ValueError as ValueError.')
print(exc.__class__)
except Exception as exc:
print('Can catch ValueError as Exception.')
print(exc.__class__)
try:
fail(ValueError)
except (ValueError, Exception) as exc:
print('Can catch multiple exceptions in a single except block')
print(exc.__class__) |
#!/usr/bin/env python
"""
Fraunhofer IML
Department Automation and Embedded Systems
Tabsize : 4
Charset : UTF-8
"""
__author__ = "Dennis Luensch"
__maintainer__ = "Dennis Luensch"
__email__ = "dennis.luensch@iml.fraunhofer.de"
TP_EXCEPTION_PATH_ALREADY_EXISTS = "Path to vertex alreay exists!"
class TopologyException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(str(self.value))
| """
Fraunhofer IML
Department Automation and Embedded Systems
Tabsize : 4
Charset : UTF-8
"""
__author__ = 'Dennis Luensch'
__maintainer__ = 'Dennis Luensch'
__email__ = 'dennis.luensch@iml.fraunhofer.de'
tp_exception_path_already_exists = 'Path to vertex alreay exists!'
class Topologyexception(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(str(self.value)) |
# https://www.hackerrank.com/challenges/python-string-formatting/problem
def print_formatted(number):
for i in range(1, number + 1):
pad = number.bit_length()
print(f"{i:{pad}d} {i:{pad}o} {i:{pad}X} {i:{pad}b}")
| def print_formatted(number):
for i in range(1, number + 1):
pad = number.bit_length()
print(f'{i:{pad}d} {i:{pad}o} {i:{pad}X} {i:{pad}b}') |
# Generic
CPREF_COMPONENT = "([0-9A-Z-]|%2F|%23)+"
NUMERIC_COMPONENT = "(0|[1-9]\d*)"
GS3A3_CHAR = "((%[0-9a-fA-F])|([a-zA-Z0-9!'()*+,-.:;=_]))"
GS3A3_COMPONENT = f"{GS3A3_CHAR}+"
PADDED_NUMERIC_COMPONENT = "\d+"
PADDED_NUMERIC_COMPONENT_OR_EMPTY = "\d*"
VERIFY_GS3A3_CHARS = "[a-zA-Z0-9!'()*+,-.:;=_\"%&/<>?]+"
GS1_ELEM_CHARS = "[a-zA-Z0-9!'()*+,-.:;=_\"%&/<>?]"
GS1_ELEM_CHARS_CPI = "[0-9A-Z\/\-\#]"
DIGIT = "\d"
FOUR_PADDED_NUMERIC_COMPONENTS = f"{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}"
# EPC Pure Identity URIs
SGTIN_URI_BODY = (
f"{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}\.{GS3A3_COMPONENT}"
)
SGTIN_URI = f"urn:epc:id:sgtin:{SGTIN_URI_BODY}"
SSCC_URI_BODY = f"{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}"
SSCC_URI = f"urn:epc:id:sscc:{SSCC_URI_BODY}"
SGLN_URI_BODY = f"{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\.{GS3A3_COMPONENT}"
SGLN_URI = f"urn:epc:id:sgln:{SGLN_URI_BODY}"
GRAI_URI_BODY = f"{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\.{GS3A3_COMPONENT}"
GRAI_URI = f"urn:epc:id:grai:{GRAI_URI_BODY}"
GIAI_URI_BODY = f"{PADDED_NUMERIC_COMPONENT}\.{GS3A3_COMPONENT}"
GIAI_URI = f"urn:epc:id:giai:{GIAI_URI_BODY}"
GSRN_URI_BODY = f"{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}"
GSRN_URI = f"urn:epc:id:gsrn:{GSRN_URI_BODY}"
GSRNP_URI_BODY = f"{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}"
GSRNP_URI = f"urn:epc:id:gsrnp:{GSRN_URI_BODY}"
GDTI_URI_BODY = f"{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\.{GS3A3_COMPONENT}"
GDTI_URI = f"urn:epc:id:gdti:{GDTI_URI_BODY}"
CPI_URI_BODY = f"{PADDED_NUMERIC_COMPONENT}\.{CPREF_COMPONENT}\.{NUMERIC_COMPONENT}"
CPI_URI = f"urn:epc:id:cpi:{CPI_URI_BODY}"
SGCN_URI_BODY = f"{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\.{PADDED_NUMERIC_COMPONENT}"
SGCN_URI = f"urn:epc:id:sgcn:{SGCN_URI_BODY}"
GINC_URI_BODY = f"{PADDED_NUMERIC_COMPONENT}\.{GS3A3_COMPONENT}"
GINC_URI = f"urn:epc:id:ginc:{GINC_URI_BODY}"
GSIN_URI_BODY = f"{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}"
GSIN_URI = f"urn:epc:id:gsin:{GSIN_URI_BODY}"
ITIP_URI_BODY = f"{FOUR_PADDED_NUMERIC_COMPONENTS}\.{GS3A3_COMPONENT}"
ITIP_URI = f"urn:epc:id:itip:{ITIP_URI_BODY}"
UPUI_URI_BODY = f"{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\.{GS3A3_COMPONENT}"
UPUI_URI = f"urn:epc:id:upui:{UPUI_URI_BODY}"
PGLN_URI_BODY = f"{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}"
PGLN_URI = f"urn:epc:id:pgln:{PGLN_URI_BODY}"
GID_URI_BODY = f"{NUMERIC_COMPONENT}\.{NUMERIC_COMPONENT}\.{NUMERIC_COMPONENT}"
GID_URI = f"urn:epc:id:gid:{GID_URI_BODY}"
CAGE_CODE_OR_DODAAC = "([0-9A-HJ-NP-Z]){5,6}"
USDOD_URI_BODY = f"{CAGE_CODE_OR_DODAAC}\.{NUMERIC_COMPONENT}"
USDOD_URI = f"urn:epc:id:usdod:{USDOD_URI_BODY}"
ADI_CHAR = "([A-Z0-9-]|(%2F))"
ADI_URI_BODY = f"{CAGE_CODE_OR_DODAAC}\.{ADI_CHAR}*\.(%23)?{ADI_CHAR}+"
ADI_URI = f"urn:epc:id:adi:{ADI_URI_BODY}"
BIC_URI_BODY = "[A-HJ-NP-Z]{3}[JUZ][0-9]{7}"
BIC_URI = f"urn:epc:id:bic:{BIC_URI_BODY}"
IMOVN_URI_BODY = "[0-9]{7}"
IMOVN_URI = f"urn:epc:id:imovn:{IMOVN_URI_BODY}"
LGTIN_CLASS = f"urn:epc:class:lgtin:{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}\.{GS3A3_COMPONENT}"
EPC_URI = (
f"({SGTIN_URI}|{SSCC_URI}|{SGLN_URI}|{GRAI_URI}|{GIAI_URI}|{GSRN_URI}|{GSRNP_URI}|{GDTI_URI}"
f"|{CPI_URI}|{SGCN_URI}|{GINC_URI}|{GSIN_URI}|{ITIP_URI}|{UPUI_URI}|{PGLN_URI}|{GID_URI}"
f"|{USDOD_URI}|{ADI_URI}|{BIC_URI}|{IMOVN_URI}|{LGTIN_CLASS})"
)
# EPC IDPAT URIs
SGTIN_IDPAT_URI_BODY = f"sgtin:({PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}\.{GS3A3_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}\.\*|{PADDED_NUMERIC_COMPONENT}\.\*\.\*|\*\.\*\.\*)"
SSCC_IDPAT_URI_BODY = f"sscc:({PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\.\*|\*\.\*)"
SGLN_GRAI_IDPAT_URI_BODY_MAIN = f"({PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\.{GS3A3_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\.\*|{PADDED_NUMERIC_COMPONENT}\.\*\.\*|\*\.\*\.\*)"
SGLN_IDPAT_URI_BODY = f"sgln:{SGLN_GRAI_IDPAT_URI_BODY_MAIN}"
GRAI_IDPAT_URI_BODY = f"grai:{SGLN_GRAI_IDPAT_URI_BODY_MAIN}"
GIAI_IDPAT_URI_BODY = f"giai:({PADDED_NUMERIC_COMPONENT}\.{GS3A3_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\.\*|\*\.\*)"
GSRN_IDPAT_URI_BODY = f"gsrn:({PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\.\*|\*\.\*)"
GSRNP_IDPAT_URI_BODY = f"gsrnp:({PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\.\*|\*\.\*)"
GDTI_IDPAT_URI_BODY = f"gdti:({PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\.{GS3A3_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\.\*|{PADDED_NUMERIC_COMPONENT}\.\*\.\*|\*\.\*\.\*)"
CPI_IDPAT_URI_BODY = f"cpi:({PADDED_NUMERIC_COMPONENT}\.{CPREF_COMPONENT}\.{NUMERIC_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\.{CPREF_COMPONENT}\.\*|{PADDED_NUMERIC_COMPONENT}\.\*\.\*|\*\.\*\.\*)"
SGCN_IDPAT_URI_BODY = f"sgcn:({PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\.{PADDED_NUMERIC_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\.\*|{PADDED_NUMERIC_COMPONENT}\.\*\.\*|\*\.\*\.\*)"
GINC_IDPAT_URI_BODY = f"ginc:({PADDED_NUMERIC_COMPONENT}\.{GS3A3_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\.\*|\*\.\*)"
GSIN_IDPAT_URI_BODY = f"gsin:({PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\.\*|\*\.\*)"
ITIP_IDPAT_URI_BODY = f"itip:({FOUR_PADDED_NUMERIC_COMPONENTS}\.{GS3A3_COMPONENT}|{FOUR_PADDED_NUMERIC_COMPONENTS}\.\*|{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}\.\*\.\*\.\*|{PADDED_NUMERIC_COMPONENT}\.\*\.\*\.\*\.\*|\*\.\*\*\.\*\.\*)"
UPUI_IDPAT_URI_BODY = f"upui:({PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}\.{GS3A3_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT}\.\*|{PADDED_NUMERIC_COMPONENT}\.\*\.\*|\*\.\*\.\*)"
PGLN_IDPAT_URI_BODY = f"pgln:({PADDED_NUMERIC_COMPONENT}\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}|{PADDED_NUMERIC_COMPONENT}\.\*|\*\.\*)"
GID_IDPAT_URI_BODY = f"gid:({NUMERIC_COMPONENT}\.{NUMERIC_COMPONENT}\.{NUMERIC_COMPONENT}|{NUMERIC_COMPONENT}\.{NUMERIC_COMPONENT}\.\*|{NUMERIC_COMPONENT}\.\*\.\*|\*\.\*\.\*)"
USDOD_IDPAT_URI_BODY = f"usdod:({CAGE_CODE_OR_DODAAC}\.{NUMERIC_COMPONENT}|{CAGE_CODE_OR_DODAAC}\.\*|\*\.\*)"
ADI_IDPAT_URI_BODY = f"adi:({CAGE_CODE_OR_DODAAC}\.{ADI_CHAR}*\.(%23)?{ADI_CHAR}+|{CAGE_CODE_OR_DODAAC}\.{ADI_CHAR}*\.\*|{CAGE_CODE_OR_DODAAC}\.\*\.\*|\*\.\*\.\*)"
IDPAT_BODY = (
f"({SGTIN_IDPAT_URI_BODY}|{SSCC_IDPAT_URI_BODY}|{SGLN_IDPAT_URI_BODY}|{GRAI_IDPAT_URI_BODY}"
f"|{GIAI_IDPAT_URI_BODY}|{GSRN_IDPAT_URI_BODY}|{GSRNP_IDPAT_URI_BODY}|{GDTI_IDPAT_URI_BODY}"
f"|{CPI_IDPAT_URI_BODY}|{SGCN_IDPAT_URI_BODY}|{GINC_IDPAT_URI_BODY}|{GSIN_IDPAT_URI_BODY}"
f"|{ITIP_IDPAT_URI_BODY}|{UPUI_IDPAT_URI_BODY}|{PGLN_IDPAT_URI_BODY}|{GID_IDPAT_URI_BODY}"
f"|{USDOD_IDPAT_URI_BODY}|{ADI_IDPAT_URI_BODY})"
)
IDPAT_URI = f"urn:epc:idpat:{IDPAT_BODY}"
# EPC Tag URIs
SGTIN_TAG_URI_BODY = f"(sgtin-96|sgtin-198):{NUMERIC_COMPONENT}\.{SGTIN_URI_BODY}"
SSCC_TAG_URI_BODY = f"sscc-96:{NUMERIC_COMPONENT}\.{SSCC_URI_BODY}"
SGLN_TAG_URI_BODY = f"(sgln-96|sgln-195):{NUMERIC_COMPONENT}\.{SGLN_URI_BODY}"
GRAI_TAG_URI_BODY = f"(grai-96|grai-170):{NUMERIC_COMPONENT}\.{GRAI_URI_BODY}"
GIAI_TAG_URI_BODY = f"(giai-96|giai-202):{NUMERIC_COMPONENT}\.{GIAI_URI_BODY}"
GSRN_TAG_URI_BODY = f"gsrn-96:{NUMERIC_COMPONENT}\.{GSRN_URI_BODY}"
GSRNP_TAG_URI_BODY = f"gsrnp-96:{NUMERIC_COMPONENT}\.{GSRNP_URI_BODY}"
GDTI_TAG_URI_BODY = f"(gdti-96|gdti-174):{NUMERIC_COMPONENT}\.{GDTI_URI_BODY}"
CPI_TAG_URI_BODY = f"(cpi-96|cpi-var):{NUMERIC_COMPONENT}\.{CPI_URI_BODY}"
SGCN_TAG_URI_BODY = f"sgcn-96:{NUMERIC_COMPONENT}\.{SGCN_URI_BODY}"
ITIP_TAG_URI_BODY = f"(itip-110|itip-212):{NUMERIC_COMPONENT}\.{ITIP_URI_BODY}"
GID_TAG_URI_BODY = f"gid-96:{GID_URI_BODY}"
USDOD_TAG_URI_BODY = f"usdod-96:{NUMERIC_COMPONENT}\.{USDOD_URI_BODY}"
ADI_TAG_URI_BODY = f"adi-var:{NUMERIC_COMPONENT}\.{ADI_URI_BODY}"
TAG_URI_BODY = (
f"({SGTIN_TAG_URI_BODY}|{SSCC_TAG_URI_BODY}|{SGLN_TAG_URI_BODY}|{GRAI_TAG_URI_BODY}"
f"|{GIAI_TAG_URI_BODY}|{GSRN_TAG_URI_BODY}|{GSRNP_TAG_URI_BODY}|{GDTI_TAG_URI_BODY}"
f"|{CPI_TAG_URI_BODY}|{SGCN_TAG_URI_BODY}|{ITIP_TAG_URI_BODY}|{GID_TAG_URI_BODY}"
f"|{USDOD_TAG_URI_BODY}|{ADI_TAG_URI_BODY})"
)
TAG_URI = f"urn:epc:tag:{TAG_URI_BODY}"
# GS1 element strings
SGTIN_GS1_ELEMENT_STRING = f"\(01\){DIGIT}{{14}}\(21\){GS1_ELEM_CHARS}{{1,20}}"
SSCC_GS1_ELEMENT_STRING = f"\(00\){DIGIT}{{18}}"
SGLN_GS1_ELEMENT_STRING = f"\(414\){DIGIT}{{13}}\(254\){GS1_ELEM_CHARS}{{1,20}}"
GRAI_GS1_ELEMENT_STRING = f"\(8003\)0{DIGIT}{{13}}{GS1_ELEM_CHARS}{{1,16}}"
GIAI_GS1_ELEMENT_STRING = f"\(8004\){DIGIT}{{6,12}}{GS1_ELEM_CHARS}{{1,24}}"
GSRN_GS1_ELEMENT_STRING = f"\(8018\){DIGIT}{{18}}"
GSRNP_GS1_ELEMENT_STRING = f"\(8017\){DIGIT}{{18}}"
GDTI_GS1_ELEMENT_STRING = f"\(253\){DIGIT}{{13}}{GS1_ELEM_CHARS}{{1,17}}"
CPI_GS1_ELEMENT_STRING = (
f"\(8010\){DIGIT}{{6,12}}{GS1_ELEM_CHARS_CPI}{{,24}}\(8011\){DIGIT}{{1,12}}"
)
SGCN_GS1_ELEMENT_STRING = f"\(255\){DIGIT}{{13}}{GS1_ELEM_CHARS}{{1,12}}"
GINC_GS1_ELEMENT_STRING = f"\(401\){DIGIT}{{6,12}}{GS1_ELEM_CHARS}{{1,24}}"
GSIN_GS1_ELEMENT_STRING = f"\(402\){DIGIT}{{17}}"
ITIP_GS1_ELEMENT_STRING = f"\(8006\){DIGIT}{{18}}\(21\){GS1_ELEM_CHARS}{{1,20}}"
UPUI_GS1_ELEMENT_STRING = f"\(01\){DIGIT}{{14}}\(235\){GS1_ELEM_CHARS}{{1,28}}"
PGLN_GS1_ELEMENT_STRING = f"\(417\){DIGIT}{{13}}"
LGTIN_GS1_ELEMENT_STRING = f"\(01\){DIGIT}{{14}}\(10\){GS1_ELEM_CHARS}{{1,20}}"
GS1_ELEMENT_STRING = (
f"({SGTIN_GS1_ELEMENT_STRING}|{SSCC_GS1_ELEMENT_STRING}|{SGLN_GS1_ELEMENT_STRING}"
f"|{GRAI_GS1_ELEMENT_STRING}|{GIAI_GS1_ELEMENT_STRING}|{GSRN_GS1_ELEMENT_STRING}"
f"|{GSRNP_GS1_ELEMENT_STRING}|{GDTI_GS1_ELEMENT_STRING}|{CPI_GS1_ELEMENT_STRING}"
f"|{SGCN_GS1_ELEMENT_STRING}|{GINC_GS1_ELEMENT_STRING}|{GSIN_GS1_ELEMENT_STRING}"
f"|{ITIP_GS1_ELEMENT_STRING}|{UPUI_GS1_ELEMENT_STRING}|{PGLN_GS1_ELEMENT_STRING}"
f"|{LGTIN_GS1_ELEMENT_STRING})"
)
| cpref_component = '([0-9A-Z-]|%2F|%23)+'
numeric_component = '(0|[1-9]\\d*)'
gs3_a3_char = "((%[0-9a-fA-F])|([a-zA-Z0-9!'()*+,-.:;=_]))"
gs3_a3_component = f'{GS3A3_CHAR}+'
padded_numeric_component = '\\d+'
padded_numeric_component_or_empty = '\\d*'
verify_gs3_a3_chars = '[a-zA-Z0-9!\'()*+,-.:;=_"%&/<>?]+'
gs1_elem_chars = '[a-zA-Z0-9!\'()*+,-.:;=_"%&/<>?]'
gs1_elem_chars_cpi = '[0-9A-Z\\/\\-\\#]'
digit = '\\d'
four_padded_numeric_components = f'{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}'
sgtin_uri_body = f'{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}\\.{GS3A3_COMPONENT}'
sgtin_uri = f'urn:epc:id:sgtin:{SGTIN_URI_BODY}'
sscc_uri_body = f'{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}'
sscc_uri = f'urn:epc:id:sscc:{SSCC_URI_BODY}'
sgln_uri_body = f'{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\\.{GS3A3_COMPONENT}'
sgln_uri = f'urn:epc:id:sgln:{SGLN_URI_BODY}'
grai_uri_body = f'{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\\.{GS3A3_COMPONENT}'
grai_uri = f'urn:epc:id:grai:{GRAI_URI_BODY}'
giai_uri_body = f'{PADDED_NUMERIC_COMPONENT}\\.{GS3A3_COMPONENT}'
giai_uri = f'urn:epc:id:giai:{GIAI_URI_BODY}'
gsrn_uri_body = f'{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}'
gsrn_uri = f'urn:epc:id:gsrn:{GSRN_URI_BODY}'
gsrnp_uri_body = f'{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}'
gsrnp_uri = f'urn:epc:id:gsrnp:{GSRN_URI_BODY}'
gdti_uri_body = f'{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\\.{GS3A3_COMPONENT}'
gdti_uri = f'urn:epc:id:gdti:{GDTI_URI_BODY}'
cpi_uri_body = f'{PADDED_NUMERIC_COMPONENT}\\.{CPREF_COMPONENT}\\.{NUMERIC_COMPONENT}'
cpi_uri = f'urn:epc:id:cpi:{CPI_URI_BODY}'
sgcn_uri_body = f'{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\\.{PADDED_NUMERIC_COMPONENT}'
sgcn_uri = f'urn:epc:id:sgcn:{SGCN_URI_BODY}'
ginc_uri_body = f'{PADDED_NUMERIC_COMPONENT}\\.{GS3A3_COMPONENT}'
ginc_uri = f'urn:epc:id:ginc:{GINC_URI_BODY}'
gsin_uri_body = f'{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}'
gsin_uri = f'urn:epc:id:gsin:{GSIN_URI_BODY}'
itip_uri_body = f'{FOUR_PADDED_NUMERIC_COMPONENTS}\\.{GS3A3_COMPONENT}'
itip_uri = f'urn:epc:id:itip:{ITIP_URI_BODY}'
upui_uri_body = f'{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\\.{GS3A3_COMPONENT}'
upui_uri = f'urn:epc:id:upui:{UPUI_URI_BODY}'
pgln_uri_body = f'{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}'
pgln_uri = f'urn:epc:id:pgln:{PGLN_URI_BODY}'
gid_uri_body = f'{NUMERIC_COMPONENT}\\.{NUMERIC_COMPONENT}\\.{NUMERIC_COMPONENT}'
gid_uri = f'urn:epc:id:gid:{GID_URI_BODY}'
cage_code_or_dodaac = '([0-9A-HJ-NP-Z]){5,6}'
usdod_uri_body = f'{CAGE_CODE_OR_DODAAC}\\.{NUMERIC_COMPONENT}'
usdod_uri = f'urn:epc:id:usdod:{USDOD_URI_BODY}'
adi_char = '([A-Z0-9-]|(%2F))'
adi_uri_body = f'{CAGE_CODE_OR_DODAAC}\\.{ADI_CHAR}*\\.(%23)?{ADI_CHAR}+'
adi_uri = f'urn:epc:id:adi:{ADI_URI_BODY}'
bic_uri_body = '[A-HJ-NP-Z]{3}[JUZ][0-9]{7}'
bic_uri = f'urn:epc:id:bic:{BIC_URI_BODY}'
imovn_uri_body = '[0-9]{7}'
imovn_uri = f'urn:epc:id:imovn:{IMOVN_URI_BODY}'
lgtin_class = f'urn:epc:class:lgtin:{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}\\.{GS3A3_COMPONENT}'
epc_uri = f'({SGTIN_URI}|{SSCC_URI}|{SGLN_URI}|{GRAI_URI}|{GIAI_URI}|{GSRN_URI}|{GSRNP_URI}|{GDTI_URI}|{CPI_URI}|{SGCN_URI}|{GINC_URI}|{GSIN_URI}|{ITIP_URI}|{UPUI_URI}|{PGLN_URI}|{GID_URI}|{USDOD_URI}|{ADI_URI}|{BIC_URI}|{IMOVN_URI}|{LGTIN_CLASS})'
sgtin_idpat_uri_body = f'sgtin:({PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}\\.{GS3A3_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}\\.\\*|{PADDED_NUMERIC_COMPONENT}\\.\\*\\.\\*|\\*\\.\\*\\.\\*)'
sscc_idpat_uri_body = f'sscc:({PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\\.\\*|\\*\\.\\*)'
sgln_grai_idpat_uri_body_main = f'({PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\\.{GS3A3_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\\.\\*|{PADDED_NUMERIC_COMPONENT}\\.\\*\\.\\*|\\*\\.\\*\\.\\*)'
sgln_idpat_uri_body = f'sgln:{SGLN_GRAI_IDPAT_URI_BODY_MAIN}'
grai_idpat_uri_body = f'grai:{SGLN_GRAI_IDPAT_URI_BODY_MAIN}'
giai_idpat_uri_body = f'giai:({PADDED_NUMERIC_COMPONENT}\\.{GS3A3_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\\.\\*|\\*\\.\\*)'
gsrn_idpat_uri_body = f'gsrn:({PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\\.\\*|\\*\\.\\*)'
gsrnp_idpat_uri_body = f'gsrnp:({PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\\.\\*|\\*\\.\\*)'
gdti_idpat_uri_body = f'gdti:({PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\\.{GS3A3_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\\.\\*|{PADDED_NUMERIC_COMPONENT}\\.\\*\\.\\*|\\*\\.\\*\\.\\*)'
cpi_idpat_uri_body = f'cpi:({PADDED_NUMERIC_COMPONENT}\\.{CPREF_COMPONENT}\\.{NUMERIC_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\\.{CPREF_COMPONENT}\\.\\*|{PADDED_NUMERIC_COMPONENT}\\.\\*\\.\\*|\\*\\.\\*\\.\\*)'
sgcn_idpat_uri_body = f'sgcn:({PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\\.{PADDED_NUMERIC_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}\\.\\*|{PADDED_NUMERIC_COMPONENT}\\.\\*\\.\\*|\\*\\.\\*\\.\\*)'
ginc_idpat_uri_body = f'ginc:({PADDED_NUMERIC_COMPONENT}\\.{GS3A3_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\\.\\*|\\*\\.\\*)'
gsin_idpat_uri_body = f'gsin:({PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\\.\\*|\\*\\.\\*)'
itip_idpat_uri_body = f'itip:({FOUR_PADDED_NUMERIC_COMPONENTS}\\.{GS3A3_COMPONENT}|{FOUR_PADDED_NUMERIC_COMPONENTS}\\.\\*|{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}\\.\\*\\.\\*\\.\\*|{PADDED_NUMERIC_COMPONENT}\\.\\*\\.\\*\\.\\*\\.\\*|\\*\\.\\*\\*\\.\\*\\.\\*)'
upui_idpat_uri_body = f'upui:({PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}\\.{GS3A3_COMPONENT}|{PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT}\\.\\*|{PADDED_NUMERIC_COMPONENT}\\.\\*\\.\\*|\\*\\.\\*\\.\\*)'
pgln_idpat_uri_body = f'pgln:({PADDED_NUMERIC_COMPONENT}\\.{PADDED_NUMERIC_COMPONENT_OR_EMPTY}|{PADDED_NUMERIC_COMPONENT}\\.\\*|\\*\\.\\*)'
gid_idpat_uri_body = f'gid:({NUMERIC_COMPONENT}\\.{NUMERIC_COMPONENT}\\.{NUMERIC_COMPONENT}|{NUMERIC_COMPONENT}\\.{NUMERIC_COMPONENT}\\.\\*|{NUMERIC_COMPONENT}\\.\\*\\.\\*|\\*\\.\\*\\.\\*)'
usdod_idpat_uri_body = f'usdod:({CAGE_CODE_OR_DODAAC}\\.{NUMERIC_COMPONENT}|{CAGE_CODE_OR_DODAAC}\\.\\*|\\*\\.\\*)'
adi_idpat_uri_body = f'adi:({CAGE_CODE_OR_DODAAC}\\.{ADI_CHAR}*\\.(%23)?{ADI_CHAR}+|{CAGE_CODE_OR_DODAAC}\\.{ADI_CHAR}*\\.\\*|{CAGE_CODE_OR_DODAAC}\\.\\*\\.\\*|\\*\\.\\*\\.\\*)'
idpat_body = f'({SGTIN_IDPAT_URI_BODY}|{SSCC_IDPAT_URI_BODY}|{SGLN_IDPAT_URI_BODY}|{GRAI_IDPAT_URI_BODY}|{GIAI_IDPAT_URI_BODY}|{GSRN_IDPAT_URI_BODY}|{GSRNP_IDPAT_URI_BODY}|{GDTI_IDPAT_URI_BODY}|{CPI_IDPAT_URI_BODY}|{SGCN_IDPAT_URI_BODY}|{GINC_IDPAT_URI_BODY}|{GSIN_IDPAT_URI_BODY}|{ITIP_IDPAT_URI_BODY}|{UPUI_IDPAT_URI_BODY}|{PGLN_IDPAT_URI_BODY}|{GID_IDPAT_URI_BODY}|{USDOD_IDPAT_URI_BODY}|{ADI_IDPAT_URI_BODY})'
idpat_uri = f'urn:epc:idpat:{IDPAT_BODY}'
sgtin_tag_uri_body = f'(sgtin-96|sgtin-198):{NUMERIC_COMPONENT}\\.{SGTIN_URI_BODY}'
sscc_tag_uri_body = f'sscc-96:{NUMERIC_COMPONENT}\\.{SSCC_URI_BODY}'
sgln_tag_uri_body = f'(sgln-96|sgln-195):{NUMERIC_COMPONENT}\\.{SGLN_URI_BODY}'
grai_tag_uri_body = f'(grai-96|grai-170):{NUMERIC_COMPONENT}\\.{GRAI_URI_BODY}'
giai_tag_uri_body = f'(giai-96|giai-202):{NUMERIC_COMPONENT}\\.{GIAI_URI_BODY}'
gsrn_tag_uri_body = f'gsrn-96:{NUMERIC_COMPONENT}\\.{GSRN_URI_BODY}'
gsrnp_tag_uri_body = f'gsrnp-96:{NUMERIC_COMPONENT}\\.{GSRNP_URI_BODY}'
gdti_tag_uri_body = f'(gdti-96|gdti-174):{NUMERIC_COMPONENT}\\.{GDTI_URI_BODY}'
cpi_tag_uri_body = f'(cpi-96|cpi-var):{NUMERIC_COMPONENT}\\.{CPI_URI_BODY}'
sgcn_tag_uri_body = f'sgcn-96:{NUMERIC_COMPONENT}\\.{SGCN_URI_BODY}'
itip_tag_uri_body = f'(itip-110|itip-212):{NUMERIC_COMPONENT}\\.{ITIP_URI_BODY}'
gid_tag_uri_body = f'gid-96:{GID_URI_BODY}'
usdod_tag_uri_body = f'usdod-96:{NUMERIC_COMPONENT}\\.{USDOD_URI_BODY}'
adi_tag_uri_body = f'adi-var:{NUMERIC_COMPONENT}\\.{ADI_URI_BODY}'
tag_uri_body = f'({SGTIN_TAG_URI_BODY}|{SSCC_TAG_URI_BODY}|{SGLN_TAG_URI_BODY}|{GRAI_TAG_URI_BODY}|{GIAI_TAG_URI_BODY}|{GSRN_TAG_URI_BODY}|{GSRNP_TAG_URI_BODY}|{GDTI_TAG_URI_BODY}|{CPI_TAG_URI_BODY}|{SGCN_TAG_URI_BODY}|{ITIP_TAG_URI_BODY}|{GID_TAG_URI_BODY}|{USDOD_TAG_URI_BODY}|{ADI_TAG_URI_BODY})'
tag_uri = f'urn:epc:tag:{TAG_URI_BODY}'
sgtin_gs1_element_string = f'\\(01\\){DIGIT}{{14}}\\(21\\){GS1_ELEM_CHARS}{{1,20}}'
sscc_gs1_element_string = f'\\(00\\){DIGIT}{{18}}'
sgln_gs1_element_string = f'\\(414\\){DIGIT}{{13}}\\(254\\){GS1_ELEM_CHARS}{{1,20}}'
grai_gs1_element_string = f'\\(8003\\)0{DIGIT}{{13}}{GS1_ELEM_CHARS}{{1,16}}'
giai_gs1_element_string = f'\\(8004\\){DIGIT}{{6,12}}{GS1_ELEM_CHARS}{{1,24}}'
gsrn_gs1_element_string = f'\\(8018\\){DIGIT}{{18}}'
gsrnp_gs1_element_string = f'\\(8017\\){DIGIT}{{18}}'
gdti_gs1_element_string = f'\\(253\\){DIGIT}{{13}}{GS1_ELEM_CHARS}{{1,17}}'
cpi_gs1_element_string = f'\\(8010\\){DIGIT}{{6,12}}{GS1_ELEM_CHARS_CPI}{{,24}}\\(8011\\){DIGIT}{{1,12}}'
sgcn_gs1_element_string = f'\\(255\\){DIGIT}{{13}}{GS1_ELEM_CHARS}{{1,12}}'
ginc_gs1_element_string = f'\\(401\\){DIGIT}{{6,12}}{GS1_ELEM_CHARS}{{1,24}}'
gsin_gs1_element_string = f'\\(402\\){DIGIT}{{17}}'
itip_gs1_element_string = f'\\(8006\\){DIGIT}{{18}}\\(21\\){GS1_ELEM_CHARS}{{1,20}}'
upui_gs1_element_string = f'\\(01\\){DIGIT}{{14}}\\(235\\){GS1_ELEM_CHARS}{{1,28}}'
pgln_gs1_element_string = f'\\(417\\){DIGIT}{{13}}'
lgtin_gs1_element_string = f'\\(01\\){DIGIT}{{14}}\\(10\\){GS1_ELEM_CHARS}{{1,20}}'
gs1_element_string = f'({SGTIN_GS1_ELEMENT_STRING}|{SSCC_GS1_ELEMENT_STRING}|{SGLN_GS1_ELEMENT_STRING}|{GRAI_GS1_ELEMENT_STRING}|{GIAI_GS1_ELEMENT_STRING}|{GSRN_GS1_ELEMENT_STRING}|{GSRNP_GS1_ELEMENT_STRING}|{GDTI_GS1_ELEMENT_STRING}|{CPI_GS1_ELEMENT_STRING}|{SGCN_GS1_ELEMENT_STRING}|{GINC_GS1_ELEMENT_STRING}|{GSIN_GS1_ELEMENT_STRING}|{ITIP_GS1_ELEMENT_STRING}|{UPUI_GS1_ELEMENT_STRING}|{PGLN_GS1_ELEMENT_STRING}|{LGTIN_GS1_ELEMENT_STRING})' |
#Code to check a string has unique charecters
#using hash_map
def is_unique(inp_str):
c_dict = {}
for c in inp_str:
if c in inp_str:
return False
else:
c_dict[c]=1
return True
#no data structure , bruteforce
def is_unique_bf(inp_str):
for i in range(len(inp_str)-1):
for j in range(i+1,len(inp_str)):
if inp_str[i] == inp_str[j]:
return False
return True
#no data structure , sorting
def is_unique_sort(inp_str):
inp_str =sorted(inp_str)
for i in range(1,len(inp_str)):
if inp_str[i]==inp_str[i-1]:
return False
return True | def is_unique(inp_str):
c_dict = {}
for c in inp_str:
if c in inp_str:
return False
else:
c_dict[c] = 1
return True
def is_unique_bf(inp_str):
for i in range(len(inp_str) - 1):
for j in range(i + 1, len(inp_str)):
if inp_str[i] == inp_str[j]:
return False
return True
def is_unique_sort(inp_str):
inp_str = sorted(inp_str)
for i in range(1, len(inp_str)):
if inp_str[i] == inp_str[i - 1]:
return False
return True |
string = 'afassdfasdfa'
start = maxLength = 0
mydict = {}
for i, c in enumerate(string):
if c in mydict and start <= mydict[c]:
start = mydict[c] + 1
else:
maxLength = max(maxLength, i-start+1)
mydict[c] = i
print(mydict)
print(maxLength, mydict )
| string = 'afassdfasdfa'
start = max_length = 0
mydict = {}
for (i, c) in enumerate(string):
if c in mydict and start <= mydict[c]:
start = mydict[c] + 1
else:
max_length = max(maxLength, i - start + 1)
mydict[c] = i
print(mydict)
print(maxLength, mydict) |
{
"targets": [
{
"target_name": "addon",
"sources": [
"SensorFusion_Addon.cpp", "SensorFusion.cpp", "Vector3D.cpp"
],
"include_dirs": [
"/usr/include/upm/","/usr/include/mraa/"
],
"libraries": [
"/usr/lib/libupm-lsm9ds0.so"
],
"cflags": [ "-fexceptions" ],
"cflags_cc": [ "-fexceptions" ]
}
]
}
| {'targets': [{'target_name': 'addon', 'sources': ['SensorFusion_Addon.cpp', 'SensorFusion.cpp', 'Vector3D.cpp'], 'include_dirs': ['/usr/include/upm/', '/usr/include/mraa/'], 'libraries': ['/usr/lib/libupm-lsm9ds0.so'], 'cflags': ['-fexceptions'], 'cflags_cc': ['-fexceptions']}]} |
def update_session_credentials(config, session_credentials, profile):
if not config.has_section(profile):
config.add_section(profile)
config.set(profile, "aws_access_key_id", session_credentials.access_key)
config.set(profile, "aws_secret_access_key", session_credentials.secret_key)
config.set(profile, "aws_session_token", session_credentials.token)
return config
def write_session_credentials(config, credentials_path):
with open(credentials_path, "w") as configfile:
config.write(configfile)
def export_session_credentials(session_credentials, profile):
print(f"export AWS_ACCESS_KEY_ID={session_credentials.access_key}")
print(f"export AWS_SECRET_ACCESS_KEY={session_credentials.secret_key}")
print(f"export AWS_SESSION_TOKEN={session_credentials.token}")
print(f"export AWS_PROFILE={profile}")
| def update_session_credentials(config, session_credentials, profile):
if not config.has_section(profile):
config.add_section(profile)
config.set(profile, 'aws_access_key_id', session_credentials.access_key)
config.set(profile, 'aws_secret_access_key', session_credentials.secret_key)
config.set(profile, 'aws_session_token', session_credentials.token)
return config
def write_session_credentials(config, credentials_path):
with open(credentials_path, 'w') as configfile:
config.write(configfile)
def export_session_credentials(session_credentials, profile):
print(f'export AWS_ACCESS_KEY_ID={session_credentials.access_key}')
print(f'export AWS_SECRET_ACCESS_KEY={session_credentials.secret_key}')
print(f'export AWS_SESSION_TOKEN={session_credentials.token}')
print(f'export AWS_PROFILE={profile}') |
student = {
"Name": "Shantanu",
"Age": 21,
"feedback": None
}
student["lastName"] = "Kamath"
try:
lastName = student["lastName"]
new_var = 3 + student["lastName"]
except KeyError:
print("Error finding last name")
except TypeError as error:
print("Cant add these two together", error)
except Exception:
print("Unknown exception")
print("This code executes") | student = {'Name': 'Shantanu', 'Age': 21, 'feedback': None}
student['lastName'] = 'Kamath'
try:
last_name = student['lastName']
new_var = 3 + student['lastName']
except KeyError:
print('Error finding last name')
except TypeError as error:
print('Cant add these two together', error)
except Exception:
print('Unknown exception')
print('This code executes') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.