content stringlengths 7 1.05M | fixed_cases stringlengths 1 1.28M |
|---|---|
def swap(array, i, j):
temp = array[i]
array[i] = array[j]
array[j] = temp
def bubble_sort(array):
for i in range(len(array)):
for j in range(len(array)-1, i, -1):
if array[j-1] > array[j]:
swap(array, j-1, j)
return array
if __name__ == "__main__":
print(bubble_sort([5,4,8,10,6,3,2,1])) | def swap(array, i, j):
temp = array[i]
array[i] = array[j]
array[j] = temp
def bubble_sort(array):
for i in range(len(array)):
for j in range(len(array) - 1, i, -1):
if array[j - 1] > array[j]:
swap(array, j - 1, j)
return array
if __name__ == '__main__':
print(bubble_sort([5, 4, 8, 10, 6, 3, 2, 1])) |
"""
Sample Python File with No PEP8 Errors.
Used for testing the pep8.Tool
"""
def does_something(thing):
"""Do a thing and then return."""
return thing.buzz()
| """
Sample Python File with No PEP8 Errors.
Used for testing the pep8.Tool
"""
def does_something(thing):
"""Do a thing and then return."""
return thing.buzz() |
__packagename__ = "sira"
__description__ = "Systemic Infrastructure Resilience Analysis"
__url__ = "https://github.com/GeoscienceAustralia/sira"
__version__ = "0.1.0"
__author__ = "Geoscience Australia"
__email__ = "maruf.rahman@ga.gov.au"
__license__ = "Apache License, Version 2.0"
__copyright__ = "2020 %s" % __author__
| __packagename__ = 'sira'
__description__ = 'Systemic Infrastructure Resilience Analysis'
__url__ = 'https://github.com/GeoscienceAustralia/sira'
__version__ = '0.1.0'
__author__ = 'Geoscience Australia'
__email__ = 'maruf.rahman@ga.gov.au'
__license__ = 'Apache License, Version 2.0'
__copyright__ = '2020 %s' % __author__ |
def singleton(clazz):
assert clazz
assert type(clazz) == type
clazz.instance = clazz()
clazz.INSTANCE = clazz.instance
return clazz
#
| def singleton(clazz):
assert clazz
assert type(clazz) == type
clazz.instance = clazz()
clazz.INSTANCE = clazz.instance
return clazz |
class StartupPlugin:
pass
class EventHandlerPlugin:
pass
class TemplatePlugin:
pass
class SettingsPlugin:
pass
class SimpleApiPlugin:
pass
class AssetPlugin:
pass
| class Startupplugin:
pass
class Eventhandlerplugin:
pass
class Templateplugin:
pass
class Settingsplugin:
pass
class Simpleapiplugin:
pass
class Assetplugin:
pass |
"""
# Example:
not:10 # Table definition, arity 1.
1=not 0 # Assertion (as documentation/check)
0=not 1
and:0001 # Table definition, arity 2.
0=and 0 0
0=and 0 1
0=and 1 0
1=and 1 1
or:0111
0=or 0 0
1=or 0 1
1=or 1 0
1=or 1 1
mux:00110101 # Table definition, arity 3.
t<and 0 0 # Assignment, arity 0.
1=not t # Use of an assigned variable (in an assertion).
t<and 1 1 # It can be reassigned. (The last assignment to a variable persists into the next cycle.)
0=not t
mem 0 t<or t 1 # Assignment, arity 2. Use for addressable memories.
1=mem 0 t
0=mem 1 1 # Unassigned variables are 0 (unless set by the simulator, e.g. an input device).
"""
def eval_hdl(text, env):
"""`text` is a machine description following the grammar above.
`env` is a dict, for two purposes:
* sequential-circuit state (and RAM)
* I/O devices
To use this for a particular machine, call eval_hdl() in a loop,
once for each most-basic machine cycle, reading/writing the data
of any special devices from/to env."""
XXX
| """
# Example:
not:10 # Table definition, arity 1.
1=not 0 # Assertion (as documentation/check)
0=not 1
and:0001 # Table definition, arity 2.
0=and 0 0
0=and 0 1
0=and 1 0
1=and 1 1
or:0111
0=or 0 0
1=or 0 1
1=or 1 0
1=or 1 1
mux:00110101 # Table definition, arity 3.
t<and 0 0 # Assignment, arity 0.
1=not t # Use of an assigned variable (in an assertion).
t<and 1 1 # It can be reassigned. (The last assignment to a variable persists into the next cycle.)
0=not t
mem 0 t<or t 1 # Assignment, arity 2. Use for addressable memories.
1=mem 0 t
0=mem 1 1 # Unassigned variables are 0 (unless set by the simulator, e.g. an input device).
"""
def eval_hdl(text, env):
"""`text` is a machine description following the grammar above.
`env` is a dict, for two purposes:
* sequential-circuit state (and RAM)
* I/O devices
To use this for a particular machine, call eval_hdl() in a loop,
once for each most-basic machine cycle, reading/writing the data
of any special devices from/to env."""
XXX |
#!/usr/bin/env python3 -tt
print(2+3)
print("hello")
print("after commit")
| print(2 + 3)
print('hello')
print('after commit') |
#start reading file with datarefs
file = open("datarefex.txt")
line = file.read().replace("\n", "\n")
print(len(line))
#end reading file with datarefs
#convert into proper amount of parts
separated_string = line.splitlines()
print(len(separated_string))
#end
string2 = '"'
string ='": { "prefix": "'
string4 = '", "body": [ "'
string3 = '" ], "description": "WIP" },'
my_new_list = [string2 + x + string + x + string4 + x + string3 for x in separated_string]
print(my_new_list)
my_lst = my_new_list
my_lst_str = ''.join(map(str, my_lst))
print(my_lst_str)
with open('afterconv.txt', 'w') as f:
f.write(my_lst_str) | file = open('datarefex.txt')
line = file.read().replace('\n', '\n')
print(len(line))
separated_string = line.splitlines()
print(len(separated_string))
string2 = '"'
string = '": { "prefix": "'
string4 = '", "body": [ "'
string3 = '" ], "description": "WIP" },'
my_new_list = [string2 + x + string + x + string4 + x + string3 for x in separated_string]
print(my_new_list)
my_lst = my_new_list
my_lst_str = ''.join(map(str, my_lst))
print(my_lst_str)
with open('afterconv.txt', 'w') as f:
f.write(my_lst_str) |
#!/usr/bin/python
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def addTwoNumbers(self, l1, l2):
carry = 0
ret = ListNode(0)
curr = ret
while l1 != None or l2 != None or carry != 0:
if l1 == None and l2 == None:
val = carry
elif l1 == None:
val = l2.val + carry
elif l2 == None:
val = l1.val + carry
else:
val = l1.val + l2.val + carry
carry = 0
if val >= 10:
val = val - 10
carry = 1
carr.val = val
if l1 != None:
l1 = l1.next
if l2 != None:
l2 = l2.next
if l1 != None or l2 != None or carry != 0:
nextnode = ListNode(0)
curr.next = nextnode
curr = nextnode
return ret
| class Listnode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def add_two_numbers(self, l1, l2):
carry = 0
ret = list_node(0)
curr = ret
while l1 != None or l2 != None or carry != 0:
if l1 == None and l2 == None:
val = carry
elif l1 == None:
val = l2.val + carry
elif l2 == None:
val = l1.val + carry
else:
val = l1.val + l2.val + carry
carry = 0
if val >= 10:
val = val - 10
carry = 1
carr.val = val
if l1 != None:
l1 = l1.next
if l2 != None:
l2 = l2.next
if l1 != None or l2 != None or carry != 0:
nextnode = list_node(0)
curr.next = nextnode
curr = nextnode
return ret |
"""
47 / 47 test cases passed.
Runtime: 36 ms
Memory Usage: 15 MB
"""
class Solution:
def deleteAndEarn(self, nums: List[int]) -> int:
freq = [0] * (max(nums) + 1)
for num in nums:
freq[num] += num
last, curr = 0, 0
for cnt in freq:
last, curr = curr, max(curr, last + cnt)
return curr
| """
47 / 47 test cases passed.
Runtime: 36 ms
Memory Usage: 15 MB
"""
class Solution:
def delete_and_earn(self, nums: List[int]) -> int:
freq = [0] * (max(nums) + 1)
for num in nums:
freq[num] += num
(last, curr) = (0, 0)
for cnt in freq:
(last, curr) = (curr, max(curr, last + cnt))
return curr |
#program to convert a byte string to a list of integers.
word = b'Darlington'
print()
print(list(word))
print()
# the reverse operation
n = [68,97,114]
print(bytes(n)) | word = b'Darlington'
print()
print(list(word))
print()
n = [68, 97, 114]
print(bytes(n)) |
class Solution:
def addBinary(self, a: str, b: str) -> str:
a = int(a, 2)
b = int(b, 2)
result = a + b
result = bin(result)
return (result[2:]) | class Solution:
def add_binary(self, a: str, b: str) -> str:
a = int(a, 2)
b = int(b, 2)
result = a + b
result = bin(result)
return result[2:] |
# ------------------------------------------------------------------------------------
# Tutorial on f-strings
# Using f-strings is a simple and fast method for String formatting
# ------------------------------------------------------------------------------------
# # Syntax of f-strings:
# We start the string with the keyword f, followed by our string in double quotes and {} is used as a placeholder for the values involved in the formatting.
# The following example makes it clearer:
name = "John"
age = 17
print(f"I am {name} and I am {age} years old")
# Output- I am John and I am 17 years old
# Note that F(in capitals) also works
# Using multi-line f-strings:
name = 'John'
age = 32
occupation = 'Web developer'
msg = (
f'Name: {name}\n'
f'Age: {age}\n'
f'Occupation: {occupation}'
)
print(msg)
# You can also write a multi-lines f-string with double quotes like docstrings:
msg_two = f"""
Name: {name}
Age: {age}
Occupation: {occupation}
"""
print(msg_two)
# Output:
# Name: John
# Age: 32
# Occupation: Web developer
# ------------------------------------------------------------------------------------
# Challenge:
# Using f-strings print the following:
# Hi, I am <name>, my hobby is <hobby>, and I'm from <location>
# name, hobby and location will be inputs from the user
# Do the same for a multi-line output.
# ------------------------------------------------------------------------------------
| name = 'John'
age = 17
print(f'I am {name} and I am {age} years old')
name = 'John'
age = 32
occupation = 'Web developer'
msg = f'Name: {name}\nAge: {age}\nOccupation: {occupation}'
print(msg)
msg_two = f'\nName: {name}\nAge: {age}\nOccupation: {occupation}\n'
print(msg_two) |
a=[1,4,5, 67,6]
print(a)
print(a[3])
print("The index 0 element before changing ", a[0])
a[0]= 7
print("The index 0 element after changing ", a[0])
print(a)
# we can create a list with diff data types
b=[4,"dishant" , False, 8.7]
print(b)
# LIST SLICING
names=["harry", "dishant", "kanta", 45]
print(names[0:2])
print(names[-4:-1])
print(names[4:0:-1])
print(names[4::-1])
list1= [1,2,3,4,5,6,7,8,9]
print(list1[2::-1]) #to get the first number dont input the second condition | a = [1, 4, 5, 67, 6]
print(a)
print(a[3])
print('The index 0 element before changing ', a[0])
a[0] = 7
print('The index 0 element after changing ', a[0])
print(a)
b = [4, 'dishant', False, 8.7]
print(b)
names = ['harry', 'dishant', 'kanta', 45]
print(names[0:2])
print(names[-4:-1])
print(names[4:0:-1])
print(names[4::-1])
list1 = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print(list1[2::-1]) |
programs = {}
test_amount = 2000
def create_entry(string):
number, connections = string.split(" <-> ")
number = int(number)
connections = list(map(int, connections.split(", ")))
programs[number] = [None, connections]
def get_group_mem(group):
programs[group][0] = group
for x in range(0, test_amount):
test_connections(group)
return(sum(map(lambda p: int(programs[p][0] == group), programs)))
def test_connections(group):
for p in programs:
if programs[p][0] is not None:
continue
for c in programs[p][1]:
if programs[c][0] == group:
programs[p][0] = group
continue
def get_groups():
n = 0
groups = {}
for n in range(0, len(programs)):
if programs[n][0] is not None:
continue
print(n)
groups[n] = get_group_mem(n)
test_amount = len(programs) - sum(groups.values())
return(groups)
| programs = {}
test_amount = 2000
def create_entry(string):
(number, connections) = string.split(' <-> ')
number = int(number)
connections = list(map(int, connections.split(', ')))
programs[number] = [None, connections]
def get_group_mem(group):
programs[group][0] = group
for x in range(0, test_amount):
test_connections(group)
return sum(map(lambda p: int(programs[p][0] == group), programs))
def test_connections(group):
for p in programs:
if programs[p][0] is not None:
continue
for c in programs[p][1]:
if programs[c][0] == group:
programs[p][0] = group
continue
def get_groups():
n = 0
groups = {}
for n in range(0, len(programs)):
if programs[n][0] is not None:
continue
print(n)
groups[n] = get_group_mem(n)
test_amount = len(programs) - sum(groups.values())
return groups |
"""
=========
cysofa
=========
Utilities and Python wrappers for sofa module
"""
__version__ = '0.1' | """
=========
cysofa
=========
Utilities and Python wrappers for sofa module
"""
__version__ = '0.1' |
#Dado un string, escribir una funcion que cambie todos los espacios por guiones.
string='Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO'
mi_string = string.replace(' ', '-')
print(mi_string)
| string = 'Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO Hola Mundo hola mundo HOLA MUNDO'
mi_string = string.replace(' ', '-')
print(mi_string) |
class Int64Converter(BaseNumberConverter):
"""
Provides a type converter to convert 64-bit signed integer objects to and from various other representations.
Int64Converter()
"""
| class Int64Converter(BaseNumberConverter):
"""
Provides a type converter to convert 64-bit signed integer objects to and from various other representations.
Int64Converter()
""" |
n=int(input("Enter a Number: "))
p=0
temp=n
while(temp > 0):
p = p+1
temp = temp // 10
sum=0
temp=n
while(temp > 0):
rem = temp % 10
sum = sum + (rem**p)
temp = temp // 10
if(n==sum):
print("Armstrong")
else:
print("Not Armstrong") | n = int(input('Enter a Number: '))
p = 0
temp = n
while temp > 0:
p = p + 1
temp = temp // 10
sum = 0
temp = n
while temp > 0:
rem = temp % 10
sum = sum + rem ** p
temp = temp // 10
if n == sum:
print('Armstrong')
else:
print('Not Armstrong') |
#!/usr/bin/env python
"""
@package nilib
@file file_store.py
@brief Dummy module that indicates that the cache should use all filesystem
@brief for both content and metadata.
@version $Revision: 1.00 $ $Author: elwynd $
@version Copyright (C) 2012 Trinity College Dublin and Folly Consulting Ltd
This is an adjunct to the NI URI library developed as
part of the SAIL project. (http://sail-project.eu)
Specification(s) - note, versions may change
- http://tools.ietf.org/html/draft-farrell-decade-ni-10
- http://tools.ietf.org/html/draft-hallambaker-decade-ni-params-03
- http://tools.ietf.org/html/draft-kutscher-icnrg-netinf-proto-00
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================================================================================
@details
Dummy module that is used by other modules to control the loading of the
correct NetInfCache class version. With this module imported, it is expected
that the cache will use the filesystem to store both metadata and
content files.
@code
Revision History
================
Version Date Author Notes
1.0 10/12/2012 Elwyn Davies Created.
@endcode
"""
#==============================================================================#
# Dummy module - filesystem storage is the default.
use_redis_meta_cache = False
use_file_meta_cache = True
| """
@package nilib
@file file_store.py
@brief Dummy module that indicates that the cache should use all filesystem
@brief for both content and metadata.
@version $Revision: 1.00 $ $Author: elwynd $
@version Copyright (C) 2012 Trinity College Dublin and Folly Consulting Ltd
This is an adjunct to the NI URI library developed as
part of the SAIL project. (http://sail-project.eu)
Specification(s) - note, versions may change
- http://tools.ietf.org/html/draft-farrell-decade-ni-10
- http://tools.ietf.org/html/draft-hallambaker-decade-ni-params-03
- http://tools.ietf.org/html/draft-kutscher-icnrg-netinf-proto-00
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================================================================================
@details
Dummy module that is used by other modules to control the loading of the
correct NetInfCache class version. With this module imported, it is expected
that the cache will use the filesystem to store both metadata and
content files.
@code
Revision History
================
Version Date Author Notes
1.0 10/12/2012 Elwyn Davies Created.
@endcode
"""
use_redis_meta_cache = False
use_file_meta_cache = True |
#
# Copyright 2020 File Based Test DriverL Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Load dependencies. """
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def file_based_test_driver_deps():
"""macro to include File Based Test Driver's dependencies in a WORKSPACE.
"""
# Abseil
if not native.existing_rule("com_google_absl"):
# How to update:
# Abseil generally just does daily (or even subdaily) releases. None are
# special, so just periodically update as necessary.
#
# https://github.com/abseil/abseil-cpp/commits/master
# pick a recent release.
# Hit the 'clipboard with a left arrow' icon to copy the commit hex
# COMMIT=<paste commit hex>
# PREFIX=abseil-cpp-
# REPO=https://github.com/abseil/abseil-cpp/archive
# URL=${REPO}/${COMMIT}.tar.gz
# wget $URL
# SHA256=$(sha256sum ${COMMIT}.tar.gz | cut -f1 -d' ')
# rm ${COMMIT}.tar.gz
# echo \# Commit from $(date --iso-8601=date)
# echo url = \"$URL\",
# echo sha256 = \"$SHA256\",
# echo strip_prefix = \"${PREFIX}${COMMIT}\",
#
http_archive(
name = "com_google_absl",
# Commit from 2020-03-03
url = "https://github.com/abseil/abseil-cpp/archive/b19ba96766db08b1f32605cb4424a0e7ea0c7584.tar.gz",
sha256 = "c7ff8decfbda0add222d44bdc27b47527ca4e76929291311474efe7354f663d3",
strip_prefix = "abseil-cpp-b19ba96766db08b1f32605cb4424a0e7ea0c7584",
)
# GoogleTest/GoogleMock framework. Used by most unit-tests.
if not native.existing_rule("com_google_googletest"):
# How to update:
# Googletest generally just does daily (or even subdaily) releases along
# with occasional numbered releases.
#
# https://github.com/google/googletest/commits/master
# pick a recent release.
# Hit the 'clipboard with a left arrow' icon to copy the commit hex
# COMMIT=<paste commit hex>
# PREFIX=googletest-
# REPO=https://github.com/google/googletest/archive/
# URL=${REPO}/${COMMIT}.tar.gz
# wget $URL
# SHA256=$(sha256sum ${COMMIT}.tar.gz | cut -f1 -d' ')
# rm ${COMMIT}.tar.gz
# echo \# Commit from $(date --iso-8601=date)
# echo url = \"$URL\",
# echo sha256 = \"$SHA256\",
# echo strip_prefix = \"${PREFIX}${COMMIT}\",
#
http_archive(
name = "com_google_googletest",
# Commit from 2020-02-21
url = "https://github.com/google/googletest/archive//6f5fd0d7199b9a19faa9f499ecc266e6ae0329e7.tar.gz",
sha256 = "51e6c4b4449aab8f31e69d0ff89565f49a1f3628a42e24f214e8b02b3526e3bc",
strip_prefix = "googletest-6f5fd0d7199b9a19faa9f499ecc266e6ae0329e7",
)
# RE2 Regex Framework, mostly used in unit tests.
if not native.existing_rule("com_googlesource_code_re2"):
http_archive(
name = "com_googlesource_code_re2",
urls = [
"https://github.com/google/re2/archive/d1394506654e0a19a92f3d8921e26f7c3f4de969.tar.gz",
],
sha256 = "ac855fb93dfa6878f88bc1c399b9a2743fdfcb3dc24b94ea9a568a1c990b1212",
strip_prefix = "re2-d1394506654e0a19a92f3d8921e26f7c3f4de969",
)
| """ Load dependencies. """
load('@bazel_tools//tools/build_defs/repo:http.bzl', 'http_archive')
def file_based_test_driver_deps():
"""macro to include File Based Test Driver's dependencies in a WORKSPACE.
"""
if not native.existing_rule('com_google_absl'):
http_archive(name='com_google_absl', url='https://github.com/abseil/abseil-cpp/archive/b19ba96766db08b1f32605cb4424a0e7ea0c7584.tar.gz', sha256='c7ff8decfbda0add222d44bdc27b47527ca4e76929291311474efe7354f663d3', strip_prefix='abseil-cpp-b19ba96766db08b1f32605cb4424a0e7ea0c7584')
if not native.existing_rule('com_google_googletest'):
http_archive(name='com_google_googletest', url='https://github.com/google/googletest/archive//6f5fd0d7199b9a19faa9f499ecc266e6ae0329e7.tar.gz', sha256='51e6c4b4449aab8f31e69d0ff89565f49a1f3628a42e24f214e8b02b3526e3bc', strip_prefix='googletest-6f5fd0d7199b9a19faa9f499ecc266e6ae0329e7')
if not native.existing_rule('com_googlesource_code_re2'):
http_archive(name='com_googlesource_code_re2', urls=['https://github.com/google/re2/archive/d1394506654e0a19a92f3d8921e26f7c3f4de969.tar.gz'], sha256='ac855fb93dfa6878f88bc1c399b9a2743fdfcb3dc24b94ea9a568a1c990b1212', strip_prefix='re2-d1394506654e0a19a92f3d8921e26f7c3f4de969') |
# Generated by rpcgen.py at Mon Mar 8 11:09:57 2004
__all__ = ['MNTPATHLEN', 'MNTNAMLEN', 'FHSIZE2', 'FHSIZE3', 'MNT3_OK', 'MNT3ERR_PERM', 'MNT3ERR_NOENT', 'MNT3ERR_IO', 'MNT3ERR_ACCES', 'MNT3ERR_NOTDIR', 'MNT3ERR_INVAL', 'MNT3ERR_NAMETOOLONG', 'MNT3ERR_NOTSUPP', 'MNT3ERR_SERVERFAULT', 'mountstat3_id', 'MOUNTPROC_NULL', 'MOUNTPROC_MNT', 'MOUNTPROC_DUMP', 'MOUNTPROC_UMNT', 'MOUNTPROC_UMNTALL', 'MOUNTPROC_EXPORT', 'MOUNTPROC_EXPORTALL', 'MOUNT_V1', 'MOUNTPROC3_NULL', 'MOUNTPROC3_MNT', 'MOUNTPROC3_DUMP', 'MOUNTPROC3_UMNT', 'MOUNTPROC3_UMNTALL', 'MOUNTPROC3_EXPORT', 'MOUNT_V3', 'MOUNT_PROGRAM']
FALSE = 0
TRUE = 1
MNTPATHLEN = 1024
MNTNAMLEN = 255
FHSIZE2 = 32
FHSIZE3 = 64
MNT3_OK = 0
MNT3ERR_PERM = 1
MNT3ERR_NOENT = 2
MNT3ERR_IO = 5
MNT3ERR_ACCES = 13
MNT3ERR_NOTDIR = 20
MNT3ERR_INVAL = 22
MNT3ERR_NAMETOOLONG = 63
MNT3ERR_NOTSUPP = 10004
MNT3ERR_SERVERFAULT = 10006
mountstat3_id = {
MNT3_OK: "MNT3_OK",
MNT3ERR_PERM: "MNT3ERR_PERM",
MNT3ERR_NOENT: "MNT3ERR_NOENT",
MNT3ERR_IO: "MNT3ERR_IO",
MNT3ERR_ACCES: "MNT3ERR_ACCES",
MNT3ERR_NOTDIR: "MNT3ERR_NOTDIR",
MNT3ERR_INVAL: "MNT3ERR_INVAL",
MNT3ERR_NAMETOOLONG: "MNT3ERR_NAMETOOLONG",
MNT3ERR_NOTSUPP: "MNT3ERR_NOTSUPP",
MNT3ERR_SERVERFAULT: "MNT3ERR_SERVERFAULT"
}
MOUNTPROC_NULL = 0
MOUNTPROC_MNT = 1
MOUNTPROC_DUMP = 2
MOUNTPROC_UMNT = 3
MOUNTPROC_UMNTALL = 4
MOUNTPROC_EXPORT = 5
MOUNTPROC_EXPORTALL = 6
MOUNT_V1 = 1
MOUNTPROC3_NULL = 0
MOUNTPROC3_MNT = 1
MOUNTPROC3_DUMP = 2
MOUNTPROC3_UMNT = 3
MOUNTPROC3_UMNTALL = 4
MOUNTPROC3_EXPORT = 5
MOUNT_V3 = 3
MOUNT_PROGRAM = 100005
| __all__ = ['MNTPATHLEN', 'MNTNAMLEN', 'FHSIZE2', 'FHSIZE3', 'MNT3_OK', 'MNT3ERR_PERM', 'MNT3ERR_NOENT', 'MNT3ERR_IO', 'MNT3ERR_ACCES', 'MNT3ERR_NOTDIR', 'MNT3ERR_INVAL', 'MNT3ERR_NAMETOOLONG', 'MNT3ERR_NOTSUPP', 'MNT3ERR_SERVERFAULT', 'mountstat3_id', 'MOUNTPROC_NULL', 'MOUNTPROC_MNT', 'MOUNTPROC_DUMP', 'MOUNTPROC_UMNT', 'MOUNTPROC_UMNTALL', 'MOUNTPROC_EXPORT', 'MOUNTPROC_EXPORTALL', 'MOUNT_V1', 'MOUNTPROC3_NULL', 'MOUNTPROC3_MNT', 'MOUNTPROC3_DUMP', 'MOUNTPROC3_UMNT', 'MOUNTPROC3_UMNTALL', 'MOUNTPROC3_EXPORT', 'MOUNT_V3', 'MOUNT_PROGRAM']
false = 0
true = 1
mntpathlen = 1024
mntnamlen = 255
fhsize2 = 32
fhsize3 = 64
mnt3_ok = 0
mnt3_err_perm = 1
mnt3_err_noent = 2
mnt3_err_io = 5
mnt3_err_acces = 13
mnt3_err_notdir = 20
mnt3_err_inval = 22
mnt3_err_nametoolong = 63
mnt3_err_notsupp = 10004
mnt3_err_serverfault = 10006
mountstat3_id = {MNT3_OK: 'MNT3_OK', MNT3ERR_PERM: 'MNT3ERR_PERM', MNT3ERR_NOENT: 'MNT3ERR_NOENT', MNT3ERR_IO: 'MNT3ERR_IO', MNT3ERR_ACCES: 'MNT3ERR_ACCES', MNT3ERR_NOTDIR: 'MNT3ERR_NOTDIR', MNT3ERR_INVAL: 'MNT3ERR_INVAL', MNT3ERR_NAMETOOLONG: 'MNT3ERR_NAMETOOLONG', MNT3ERR_NOTSUPP: 'MNT3ERR_NOTSUPP', MNT3ERR_SERVERFAULT: 'MNT3ERR_SERVERFAULT'}
mountproc_null = 0
mountproc_mnt = 1
mountproc_dump = 2
mountproc_umnt = 3
mountproc_umntall = 4
mountproc_export = 5
mountproc_exportall = 6
mount_v1 = 1
mountproc3_null = 0
mountproc3_mnt = 1
mountproc3_dump = 2
mountproc3_umnt = 3
mountproc3_umntall = 4
mountproc3_export = 5
mount_v3 = 3
mount_program = 100005 |
for _ in range(int(input())):
n = int(input())
s = input()
if '.' not in s:
print("0")
continue
if '*' not in s:
print("0")
continue
if s.count('*')==1:
print("0")
continue
count = s.count('*')
if count%2==0:
mid = count//2
else:
mid = (count//2) + 1
mid_pos = 0
count_star = 0
for i in range(n):
if s[i]=="*":
count_star += 1
if count_star == mid:
mid_pos = i
break
ans = 0
#print(mid_pos)
for i in range(n):
if s[i]=='*':
ans += abs(i-mid_pos)
#print(ans)
if count%2!=0:
ans -= (mid-1)*mid
else:
ans -= ((mid-1)*mid)//2 + (mid*(mid+1))//2
print(ans) | for _ in range(int(input())):
n = int(input())
s = input()
if '.' not in s:
print('0')
continue
if '*' not in s:
print('0')
continue
if s.count('*') == 1:
print('0')
continue
count = s.count('*')
if count % 2 == 0:
mid = count // 2
else:
mid = count // 2 + 1
mid_pos = 0
count_star = 0
for i in range(n):
if s[i] == '*':
count_star += 1
if count_star == mid:
mid_pos = i
break
ans = 0
for i in range(n):
if s[i] == '*':
ans += abs(i - mid_pos)
if count % 2 != 0:
ans -= (mid - 1) * mid
else:
ans -= (mid - 1) * mid // 2 + mid * (mid + 1) // 2
print(ans) |
"""
Copyright (C) 2020 Argonne, Hariharan Devarajan <hdevarajan@anl.gov>
This file is part of DLProfile
DLIO is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as
published by the Free Software Foundation, either version 3 of the published by the Free Software Foundation, either
version 3 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
details.
You should have received a copy of the GNU General Public License along with this program.
If not, see <http://www.gnu.org/licenses/>.
"""
class ErrorCode(object):
def __init__(self, error_code, error_message):
self.error_code_ = error_code
self.error_message_ = error_message
def __repr__(self):
return {'error_code': self.error_code_, 'error_message': self.error_message_}
def __str__(self):
return self.error_message_.format(self.error_code_)
class ErrorCodes:
EC0000 = {0, "SUCCESSFUL"}
EC1000 = {1000, "ERROR: Incorrect Computation Type"}
EC1001 = {1001, "ERROR: Incorrect Format Type"}
| """
Copyright (C) 2020 Argonne, Hariharan Devarajan <hdevarajan@anl.gov>
This file is part of DLProfile
DLIO is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as
published by the Free Software Foundation, either version 3 of the published by the Free Software Foundation, either
version 3 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
details.
You should have received a copy of the GNU General Public License along with this program.
If not, see <http://www.gnu.org/licenses/>.
"""
class Errorcode(object):
def __init__(self, error_code, error_message):
self.error_code_ = error_code
self.error_message_ = error_message
def __repr__(self):
return {'error_code': self.error_code_, 'error_message': self.error_message_}
def __str__(self):
return self.error_message_.format(self.error_code_)
class Errorcodes:
ec0000 = {0, 'SUCCESSFUL'}
ec1000 = {1000, 'ERROR: Incorrect Computation Type'}
ec1001 = {1001, 'ERROR: Incorrect Format Type'} |
# AUTOGENERATED BY NBDEV! DO NOT EDIT!
__all__ = ["index", "modules", "custom_doc_links", "git_url"]
index = {"StopExecution": "00_core.ipynb",
"skip": "00_core.ipynb",
"run_all": "00_core.ipynb",
"set_google_application_credentials": "00_core.ipynb",
"PROJECT_ID": "01_constants.ipynb",
"LOCATION": "01_constants.ipynb",
"SERVICE_ACCOUNT_KEY_FILE_NAME": "01_constants.ipynb",
"GCS_BUCKET": "01_constants.ipynb",
"GCS_FOLDER": "01_constants.ipynb",
"PRIVATE_GCS_BUCKET": "01_constants.ipynb",
"URL": "01_constants.ipynb",
"DATASET_ID": "01_constants.ipynb",
"TABLE_ID": "01_constants.ipynb",
"BATCH_SIZE": "01_constants.ipynb",
"FULL_TRAINING_DATASET_SIZE": "01_constants.ipynb",
"FULL_VALIDATION_DATASET_SIZE": "01_constants.ipynb",
"SMALL_TRAINING_DATASET_SIZE": "01_constants.ipynb",
"SMALL_VALIDATION_DATASET_SIZE": "01_constants.ipynb",
"TINY_TRAINING_DATASET_SIZE": "01_constants.ipynb",
"TINY_VALIDATION_DATASET_SIZE": "01_constants.ipynb",
"CSV_SCHEMA": "01_constants.ipynb",
"DATASET_SIZE_TYPE": "01_constants.ipynb",
"DATASET_SOURCE_TYPE": "01_constants.ipynb",
"DATASET_TYPE": "01_constants.ipynb",
"EMBEDDINGS_MODE_TYPE": "01_constants.ipynb",
"create_bigquery_dataset_if_necessary": "03_data_import.ipynb",
"load_data_into_bigquery": "03_data_import.ipynb",
"get_file_names_with_validation_split": "03_data_import.ipynb",
"get_dataset_size": "04_data_reader.ipynb",
"get_steps_per_epoch": "04_data_reader.ipynb",
"get_max_steps": "04_data_reader.ipynb",
"get_mean_and_std_dicts": "04_data_reader.ipynb",
"get_vocabulary_size_dict": "04_data_reader.ipynb",
"get_corpus_dict": "04_data_reader.ipynb",
"corpus_to_lookuptable": "04_data_reader.ipynb",
"get_corpus": "04_data_reader.ipynb",
"transform_row": "04_data_reader.ipynb",
"get_bigquery_table_name": "04_data_reader.ipynb",
"read_bigquery": "04_data_reader.ipynb",
"read_gcs": "04_data_reader.ipynb",
"get_dataset": "04_data_reader.ipynb",
"TrainTimeCallback": "05_trainer.ipynb",
"PlotLossesCallback": "05_trainer.ipynb",
"create_categorical_feature_column_with_hash_bucket": "05_trainer.ipynb",
"create_categorical_feature_column_with_vocabulary_list": "05_trainer.ipynb",
"create_embedding": "05_trainer.ipynb",
"create_linear_feature_columns": "05_trainer.ipynb",
"create_categorical_embeddings_feature_columns": "05_trainer.ipynb",
"create_feature_columns": "05_trainer.ipynb",
"create_keras_model_sequential": "05_trainer.ipynb",
"train_and_evaluate_keras_model": "05_trainer.ipynb",
"train_and_evaluate_keras": "05_trainer.ipynb",
"keras_hp_search": "05_trainer.ipynb",
"train_and_evaluate_estimator": "05_trainer.ipynb",
"train_and_evaluate_keras_small": "index.ipynb",
"train_and_evaluate_estimator_small": "index.ipynb",
"run_keras_hp_search": "index.ipynb"}
modules = ["core.py",
"constants.py",
"data_import.py",
"data_reader.py",
"trainer.py",
"index.py",
"setup.py"]
doc_url = "https://all.github.io/criteo_nbdev/"
git_url = "https://github.com/all/criteo_nbdev/tree/master/"
def custom_doc_links(name): return None
| __all__ = ['index', 'modules', 'custom_doc_links', 'git_url']
index = {'StopExecution': '00_core.ipynb', 'skip': '00_core.ipynb', 'run_all': '00_core.ipynb', 'set_google_application_credentials': '00_core.ipynb', 'PROJECT_ID': '01_constants.ipynb', 'LOCATION': '01_constants.ipynb', 'SERVICE_ACCOUNT_KEY_FILE_NAME': '01_constants.ipynb', 'GCS_BUCKET': '01_constants.ipynb', 'GCS_FOLDER': '01_constants.ipynb', 'PRIVATE_GCS_BUCKET': '01_constants.ipynb', 'URL': '01_constants.ipynb', 'DATASET_ID': '01_constants.ipynb', 'TABLE_ID': '01_constants.ipynb', 'BATCH_SIZE': '01_constants.ipynb', 'FULL_TRAINING_DATASET_SIZE': '01_constants.ipynb', 'FULL_VALIDATION_DATASET_SIZE': '01_constants.ipynb', 'SMALL_TRAINING_DATASET_SIZE': '01_constants.ipynb', 'SMALL_VALIDATION_DATASET_SIZE': '01_constants.ipynb', 'TINY_TRAINING_DATASET_SIZE': '01_constants.ipynb', 'TINY_VALIDATION_DATASET_SIZE': '01_constants.ipynb', 'CSV_SCHEMA': '01_constants.ipynb', 'DATASET_SIZE_TYPE': '01_constants.ipynb', 'DATASET_SOURCE_TYPE': '01_constants.ipynb', 'DATASET_TYPE': '01_constants.ipynb', 'EMBEDDINGS_MODE_TYPE': '01_constants.ipynb', 'create_bigquery_dataset_if_necessary': '03_data_import.ipynb', 'load_data_into_bigquery': '03_data_import.ipynb', 'get_file_names_with_validation_split': '03_data_import.ipynb', 'get_dataset_size': '04_data_reader.ipynb', 'get_steps_per_epoch': '04_data_reader.ipynb', 'get_max_steps': '04_data_reader.ipynb', 'get_mean_and_std_dicts': '04_data_reader.ipynb', 'get_vocabulary_size_dict': '04_data_reader.ipynb', 'get_corpus_dict': '04_data_reader.ipynb', 'corpus_to_lookuptable': '04_data_reader.ipynb', 'get_corpus': '04_data_reader.ipynb', 'transform_row': '04_data_reader.ipynb', 'get_bigquery_table_name': '04_data_reader.ipynb', 'read_bigquery': '04_data_reader.ipynb', 'read_gcs': '04_data_reader.ipynb', 'get_dataset': '04_data_reader.ipynb', 'TrainTimeCallback': '05_trainer.ipynb', 'PlotLossesCallback': '05_trainer.ipynb', 'create_categorical_feature_column_with_hash_bucket': '05_trainer.ipynb', 'create_categorical_feature_column_with_vocabulary_list': '05_trainer.ipynb', 'create_embedding': '05_trainer.ipynb', 'create_linear_feature_columns': '05_trainer.ipynb', 'create_categorical_embeddings_feature_columns': '05_trainer.ipynb', 'create_feature_columns': '05_trainer.ipynb', 'create_keras_model_sequential': '05_trainer.ipynb', 'train_and_evaluate_keras_model': '05_trainer.ipynb', 'train_and_evaluate_keras': '05_trainer.ipynb', 'keras_hp_search': '05_trainer.ipynb', 'train_and_evaluate_estimator': '05_trainer.ipynb', 'train_and_evaluate_keras_small': 'index.ipynb', 'train_and_evaluate_estimator_small': 'index.ipynb', 'run_keras_hp_search': 'index.ipynb'}
modules = ['core.py', 'constants.py', 'data_import.py', 'data_reader.py', 'trainer.py', 'index.py', 'setup.py']
doc_url = 'https://all.github.io/criteo_nbdev/'
git_url = 'https://github.com/all/criteo_nbdev/tree/master/'
def custom_doc_links(name):
return None |
if __name__ == '__main__':
n = int(input())
result = []
for _ in range(n):
operator, *operands = input().split()
operands = [int(x) for x in operands]
if (operator == 'insert'):
result.insert(operands[0], operands[1])
elif (operator == 'remove'):
result.remove(operands[0])
elif (operator == 'append'):
result.append(operands[0])
elif (operator == 'print'):
print(result)
elif (operator == 'sort'):
result.sort()
elif (operator == 'pop'):
result.pop()
else:
result.reverse()
| if __name__ == '__main__':
n = int(input())
result = []
for _ in range(n):
(operator, *operands) = input().split()
operands = [int(x) for x in operands]
if operator == 'insert':
result.insert(operands[0], operands[1])
elif operator == 'remove':
result.remove(operands[0])
elif operator == 'append':
result.append(operands[0])
elif operator == 'print':
print(result)
elif operator == 'sort':
result.sort()
elif operator == 'pop':
result.pop()
else:
result.reverse() |
"""A dummy test."""
def test_remove() -> None:
"""A dummy test."""
assert True
| """A dummy test."""
def test_remove() -> None:
"""A dummy test."""
assert True |
class NumberPowerTwo:
"""Class to implement an iterator of powers of two
"""
def __init__(self, max=0):
self.max = max
def __iter__(self):
self.n = 0
return self
def __next__(self):
if self.n <= self.max:
result = 2 ** self.n
self.n += 1
return result
else:
raise StopIteration
def main():
number = 5
numberpower = NumberPowerTwo(number)
itr = iter(numberpower)
try:
for i in range(1, number+3):
print(next(itr))
except StopIteration:
print(f'Stopping the loop.')
print('Using in for loop')
for val in NumberPowerTwo(number):
print(val)
if __name__ == "__main__":
main()
| class Numberpowertwo:
"""Class to implement an iterator of powers of two
"""
def __init__(self, max=0):
self.max = max
def __iter__(self):
self.n = 0
return self
def __next__(self):
if self.n <= self.max:
result = 2 ** self.n
self.n += 1
return result
else:
raise StopIteration
def main():
number = 5
numberpower = number_power_two(number)
itr = iter(numberpower)
try:
for i in range(1, number + 3):
print(next(itr))
except StopIteration:
print(f'Stopping the loop.')
print('Using in for loop')
for val in number_power_two(number):
print(val)
if __name__ == '__main__':
main() |
class PointCloudObject(RhinoObject):
# no doc
def DuplicatePointCloudGeometry(self):
""" DuplicatePointCloudGeometry(self: PointCloudObject) -> PointCloud """
pass
PointCloudGeometry = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Get: PointCloudGeometry(self: PointCloudObject) -> PointCloud
"""
| class Pointcloudobject(RhinoObject):
def duplicate_point_cloud_geometry(self):
""" DuplicatePointCloudGeometry(self: PointCloudObject) -> PointCloud """
pass
point_cloud_geometry = property(lambda self: object(), lambda self, v: None, lambda self: None)
'Get: PointCloudGeometry(self: PointCloudObject) -> PointCloud\n\n\n\n' |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
cornerBuffer = []
cornerSequence = []
class Corner:
cornerCount = 0
cornerIndex = []
cornerColor = []
def solveCorner(self,sides,cornerPriority):
cornerGoal = (sides['U'][0]=='U' and sides['U'][2]=='U' and sides['U'][6]=='U' and sides['U'][8]=='U'
and sides['F'][0]=='F' and sides['F'][2]=='F' and sides['F'][6]=='F' and sides['F'][8]=='F'
and sides['R'][0]=='R' and sides['R'][2]=='R' and sides['R'][6]=='R' and sides['R'][8]=='R'
and sides['D'][0]=='D' and sides['D'][2]=='D' and sides['D'][6]=='D' and sides['D'][8]=='D'
and sides['B'][0]=='B' and sides['B'][2]=='B' and sides['B'][6]=='B' and sides['B'][8]=='B'
and sides['L'][0]=='L' and sides['L'][2]=='L' and sides['L'][6]=='L' and sides['L'][8]=='L')
if (cornerGoal):
print('Corners are already solved!')
else:
if ((sides['L'][0]=='L' and sides['U'][0]=='U' and sides['B'][2]=='B') or
(sides['L'][0]=='U' and sides['U'][0]=='B' and sides['B'][2]=='L') or
(sides['L'][0]=='B' and sides['U'][0]=='L' and sides['B'][2]=='U')):
cornerBuffer = self.cornerBufferChange(sides,cornerPriority)
self.cornerCount = self.cornerCount + 1
if not ((sides['L'][0]=='L' and sides['U'][0]=='U' and sides['B'][2]=='B') or
(sides['L'][0]=='U' and sides['U'][0]=='B' and sides['B'][2]=='L') or
(sides['L'][0]=='B' and sides['U'][0]=='L' and sides['B'][2]=='U')):
cornerBuffer = [sides['L'][0], sides['U'][0], sides['B'][2]]
while(1):
cornerGoalbuffer = (sides['U'][2]=='U' and sides['U'][6]=='U' and sides['U'][8]=='U'
and sides['F'][0]=='F' and sides['F'][2]=='F' and sides['F'][6]=='F' and sides['F'][8]=='F'
and sides['R'][0]=='R' and sides['R'][2]=='R' and sides['R'][6]=='R' and sides['R'][8]=='R'
and sides['D'][0]=='D' and sides['D'][2]=='D' and sides['D'][6]=='D' and sides['D'][8]=='D'
and sides['B'][0]=='B' and sides['B'][6]=='B' and sides['B'][8]=='B'
and sides['L'][2]=='L' and sides['L'][6]=='L' and sides['L'][8]=='L')
if (cornerGoalbuffer):
sides['L'][0] = 'L'
sides['U'][0] = 'U'
sides['B'][2] = 'B'
if not(self.cornerCount%2==0):
sidesCopy = sides.copy()
parity = [sidesCopy['U'][3], sidesCopy['L'][1], sidesCopy['U'][1], sidesCopy['B'][1]]
sides['U'][1] = parity[0]
sides['B'][1] = parity[1]
sides['U'][3] = parity[2]
sides['L'][1] = parity[3]
#print('Corners are solved!')
#print(sides)
break
face = [cornerBuffer[0], cornerBuffer[1], cornerBuffer[2]]
prevBuffer = cornerBuffer
if ((sides['L'][0]=='L' and sides['U'][0]=='U' and sides['B'][2]=='B') or
(sides['L'][0]=='U' and sides['U'][0]=='B' and sides['B'][2]=='L') or
(sides['L'][0]=='B' and sides['U'][0]=='L' and sides['B'][2]=='U')):
cornerBuffer = self.cornerBufferChange(sides,cornerPriority)
else:
cornerSequence.append(cornerBuffer)
self.cornerColor.append(cornerBuffer)
index = self.getCornerIndex(cornerBuffer)
self.cornerIndex.append([cornerBuffer[0],index[0],cornerBuffer[1],index[1],cornerBuffer[2],index[2]])
cornerBuffer = [sides[cornerBuffer[0]][index[0]],sides[cornerBuffer[1]][index[1]],sides[cornerBuffer[2]][index[2]]] #update buffer
sides[prevBuffer[0]][index[0]] = prevBuffer[0]
sides[prevBuffer[1]][index[1]] = prevBuffer[1]
sides[prevBuffer[2]][index[2]] = prevBuffer[2]
sides['L'][0] = cornerBuffer[0]
sides['U'][0] = cornerBuffer[1]
sides['B'][2] = cornerBuffer[2]
self.cornerCount = self.cornerCount + 1
return sides
def cornerBufferChange(self,sides,cornerPriority):
sidesCopy = sides.copy()
a1 = ''
a2 = 0
b1 = ''
b2 = 0
c1 = ''
c2 = 0
if not (sides[cornerPriority[0][0]][cornerPriority[0][1]]=='U'
and sides[cornerPriority[0][2]][cornerPriority[0][3]]=='R'
and sides[cornerPriority[0][4]][cornerPriority[0][5]]=='B'):
a1 = 'U'
a2 = 2
b1 = 'R'
b2 = 2
c1 = 'B'
c2 = 0
cornerBuffer = ['U','R','B']
cornerSequence.append(cornerBuffer)
elif not (sides[cornerPriority[1][0]][cornerPriority[1][1]]=='U'
and sides[cornerPriority[1][2]][cornerPriority[1][3]]=='L'
and sides[cornerPriority[1][4]][cornerPriority[1][5]]=='F'):
a1 = 'U'
a2 = 6
b1 = 'L'
b2 = 2
c1 = 'F'
c2 = 0
cornerBuffer = ['U','L','F']
cornerSequence.append(cornerBuffer)
elif not (sides[cornerPriority[2][0]][cornerPriority[2][1]]=='U'
and sides[cornerPriority[2][2]][cornerPriority[2][3]]=='F'
and sides[cornerPriority[2][4]][cornerPriority[2][5]]=='R'):
a1 = 'U'
a2 = 8
b1 = 'F'
b2 = 2
c1 = 'R'
c2 = 0
cornerBuffer = ['U','F','R']
cornerSequence.append(cornerBuffer)
elif not (sides[cornerPriority[3][0]][cornerPriority[3][1]]=='D'
and sides[cornerPriority[3][2]][cornerPriority[3][3]]=='F'
and sides[cornerPriority[3][4]][cornerPriority[3][5]]=='L'):
a1 = 'D'
a2 = 0
b1 = 'F'
b2 = 6
c1 = 'L'
c2 = 8
cornerBuffer = ['D','F','L']
cornerSequence.append(cornerBuffer)
elif not (sides[cornerPriority[4][0]][cornerPriority[4][1]]=='D'
and sides[cornerPriority[4][2]][cornerPriority[4][3]]=='R'
and sides[cornerPriority[4][4]][cornerPriority[4][5]]=='F'):
a1 = 'D'
a2 = 2
b1 = 'R'
b2 = 6
c1 = 'F'
c2 = 8
cornerBuffer = ['D','R','F']
cornerSequence.append(cornerBuffer)
elif not (sides[cornerPriority[5][0]][cornerPriority[5][1]]=='D'
and sides[cornerPriority[5][2]][cornerPriority[5][3]]=='L'
and sides[cornerPriority[5][4]][cornerPriority[5][5]]=='B'):
a1 = 'D'
a2 = 6
b1 = 'L'
b2 = 6
c1 = 'B'
c2 = 8
cornerBuffer = ['D','L','B']
cornerSequence.append(cornerBuffer)
elif not (sides[cornerPriority[6][0]][cornerPriority[6][1]]=='D'
and sides[cornerPriority[6][2]][cornerPriority[6][3]]=='B'
and sides[cornerPriority[6][4]][cornerPriority[6][5]]=='R'):
a1 = 'D'
a2 = 8
b1 = 'B'
b2 = 6
c1 = 'R'
c2 = 8
cornerBuffer = ['D','B','R']
cornerSequence.append(cornerBuffer)
index = self.getCornerIndex(cornerBuffer)
self.cornerIndex.append([cornerBuffer[0],index[0],cornerBuffer[1],index[1],cornerBuffer[2],index[2]])
swap = [sidesCopy['L'][0], sidesCopy['U'][0], sidesCopy['B'][2],
sidesCopy[a1][a2], sidesCopy[b1][b2], sidesCopy[c1][c2]]
cornerBuffer = [sides[a1][a2],sides[b1][b2],sides[c1][c2]]
if (sides['L'][0]=='L' and sides['U'][0]=='U' and sides['B'][2]=='B'):
self.cornerColor.append(['L','U','B'])
elif (sides['L'][0]=='U' and sides['U'][0]=='B' and sides['B'][2]=='L'):
self.cornerColor.append(['U','B','L'])
elif (sides['L'][0]=='B' and sides['U'][0]=='L' and sides['B'][2]=='U'):
self.cornerColor.append(['B','L','U'])
sides[a1][a2] = swap[0]
sides[b1][b2] = swap[1]
sides[c1][c2] = swap[2]
sides['L'][0] = swap[3]
sides['U'][0] = swap[4]
sides['B'][2] = swap[5]
return cornerBuffer
def getCornerIndex(self,cornerBuffer):
if cornerBuffer[0]=='U':
if cornerBuffer[1]=='R':
if cornerBuffer[2]=='F':
index = [8,0,2]
elif cornerBuffer[2]=='B':
index = [2,2,0]
elif cornerBuffer[1]=='F':
if cornerBuffer[2]=='R':
index = [8,2,0]
elif cornerBuffer[2]=='L':
index = [6,0,2]
elif cornerBuffer[1]=='B':
if cornerBuffer[2]=='R':
index = [2,0,2]
elif cornerBuffer[2]=='L':
index = [0,2,0]
elif cornerBuffer[1]=='L':
if cornerBuffer[2]=='F':
index = [6,2,0]
elif cornerBuffer[2]=='B':
index = [0,0,2]
elif cornerBuffer[0]=='F':
if cornerBuffer[1]=='U':
if cornerBuffer[2]=='R':
index = [2,8,0]
elif cornerBuffer[2]=='L':
index = [0,6,2]
elif cornerBuffer[1]=='R':
if cornerBuffer[2]=='U':
index = [2,0,8]
elif cornerBuffer[2]=='D':
index = [8,6,2]
elif cornerBuffer[1]=='D':
if cornerBuffer[2]=='R':
index = [8,2,6]
elif cornerBuffer[2]=='L':
index = [6,0,8]
elif cornerBuffer[1]=='L':
if cornerBuffer[2]=='U':
index = [0,2,6]
elif cornerBuffer[2]=='D':
index = [6,8,0]
elif cornerBuffer[0]=='R':
if cornerBuffer[1]=='U':
if cornerBuffer[2]=='F':
index = [0,8,2]
elif cornerBuffer[2]=='B':
index = [2,2,0]
elif cornerBuffer[1]=='F':
if cornerBuffer[2]=='U':
index = [0,2,8]
elif cornerBuffer[2]=='D':
index = [6,8,2]
elif cornerBuffer[1]=='D':
if cornerBuffer[2]=='F':
index = [6,2,8]
elif cornerBuffer[2]=='B':
index = [8,8,6]
elif cornerBuffer[1]=='B':
if cornerBuffer[2]=='U':
index = [2,0,2]
elif cornerBuffer[2]=='D':
index = [8,6,8]
elif cornerBuffer[0]=='D':
if cornerBuffer[1]=='F':
if cornerBuffer[2]=='R':
index = [2,8,6]
elif cornerBuffer[2]=='L':
index = [0,6,8]
elif cornerBuffer[1]=='R':
if cornerBuffer[2]=='F':
index = [2,6,8]
elif cornerBuffer[2]=='B':
index = [8,8,6]
elif cornerBuffer[1]=='B':
if cornerBuffer[2]=='R':
index = [8,6,8]
elif cornerBuffer[2]=='L':
index = [6,8,6]
elif cornerBuffer[1]=='L':
if cornerBuffer[2]=='F':
index = [0,8,6]
elif cornerBuffer[2]=='B':
index = [6,6,8]
elif cornerBuffer[0]=='B':
if cornerBuffer[1]=='U':
if cornerBuffer[2]=='R':
index = [0,2,2]
elif cornerBuffer[2]=='L':
index = [2,0,0]
elif cornerBuffer[1]=='R':
if cornerBuffer[2]=='U':
index = [0,2,2]
elif cornerBuffer[2]=='D':
index = [6,8,8]
elif cornerBuffer[1]=='D':
if cornerBuffer[2]=='R':
index = [6,8,8]
elif cornerBuffer[2]=='L':
index = [8,6,6]
elif cornerBuffer[1]=='L':
if cornerBuffer[2]=='U':
index = [2,0,0]
elif cornerBuffer[2]=='D':
index = [8,6,6]
elif cornerBuffer[0]=='L':
if cornerBuffer[1]=='U':
if cornerBuffer[2]=='F':
index = [2,6,0]
elif cornerBuffer[2]=='B':
index = [0,0,2]
elif cornerBuffer[1]=='F':
if cornerBuffer[2]=='U':
index = [2,0,6]
elif cornerBuffer[2]=='D':
index = [8,6,0]
elif cornerBuffer[1]=='D':
if cornerBuffer[2]=='F':
index = [8,0,6]
elif cornerBuffer[2]=='B':
index = [6,6,8]
elif cornerBuffer[1]=='B':
if cornerBuffer[2]=='U':
index = [0,2,0]
elif cornerBuffer[2]=='D':
index = [6,8,6]
return index
def getCornerSequence(self):
return cornerSequence
cubeCorners = Corner() | corner_buffer = []
corner_sequence = []
class Corner:
corner_count = 0
corner_index = []
corner_color = []
def solve_corner(self, sides, cornerPriority):
corner_goal = sides['U'][0] == 'U' and sides['U'][2] == 'U' and (sides['U'][6] == 'U') and (sides['U'][8] == 'U') and (sides['F'][0] == 'F') and (sides['F'][2] == 'F') and (sides['F'][6] == 'F') and (sides['F'][8] == 'F') and (sides['R'][0] == 'R') and (sides['R'][2] == 'R') and (sides['R'][6] == 'R') and (sides['R'][8] == 'R') and (sides['D'][0] == 'D') and (sides['D'][2] == 'D') and (sides['D'][6] == 'D') and (sides['D'][8] == 'D') and (sides['B'][0] == 'B') and (sides['B'][2] == 'B') and (sides['B'][6] == 'B') and (sides['B'][8] == 'B') and (sides['L'][0] == 'L') and (sides['L'][2] == 'L') and (sides['L'][6] == 'L') and (sides['L'][8] == 'L')
if cornerGoal:
print('Corners are already solved!')
else:
if sides['L'][0] == 'L' and sides['U'][0] == 'U' and (sides['B'][2] == 'B') or (sides['L'][0] == 'U' and sides['U'][0] == 'B' and (sides['B'][2] == 'L')) or (sides['L'][0] == 'B' and sides['U'][0] == 'L' and (sides['B'][2] == 'U')):
corner_buffer = self.cornerBufferChange(sides, cornerPriority)
self.cornerCount = self.cornerCount + 1
if not (sides['L'][0] == 'L' and sides['U'][0] == 'U' and (sides['B'][2] == 'B') or (sides['L'][0] == 'U' and sides['U'][0] == 'B' and (sides['B'][2] == 'L')) or (sides['L'][0] == 'B' and sides['U'][0] == 'L' and (sides['B'][2] == 'U'))):
corner_buffer = [sides['L'][0], sides['U'][0], sides['B'][2]]
while 1:
corner_goalbuffer = sides['U'][2] == 'U' and sides['U'][6] == 'U' and (sides['U'][8] == 'U') and (sides['F'][0] == 'F') and (sides['F'][2] == 'F') and (sides['F'][6] == 'F') and (sides['F'][8] == 'F') and (sides['R'][0] == 'R') and (sides['R'][2] == 'R') and (sides['R'][6] == 'R') and (sides['R'][8] == 'R') and (sides['D'][0] == 'D') and (sides['D'][2] == 'D') and (sides['D'][6] == 'D') and (sides['D'][8] == 'D') and (sides['B'][0] == 'B') and (sides['B'][6] == 'B') and (sides['B'][8] == 'B') and (sides['L'][2] == 'L') and (sides['L'][6] == 'L') and (sides['L'][8] == 'L')
if cornerGoalbuffer:
sides['L'][0] = 'L'
sides['U'][0] = 'U'
sides['B'][2] = 'B'
if not self.cornerCount % 2 == 0:
sides_copy = sides.copy()
parity = [sidesCopy['U'][3], sidesCopy['L'][1], sidesCopy['U'][1], sidesCopy['B'][1]]
sides['U'][1] = parity[0]
sides['B'][1] = parity[1]
sides['U'][3] = parity[2]
sides['L'][1] = parity[3]
break
face = [cornerBuffer[0], cornerBuffer[1], cornerBuffer[2]]
prev_buffer = cornerBuffer
if sides['L'][0] == 'L' and sides['U'][0] == 'U' and (sides['B'][2] == 'B') or (sides['L'][0] == 'U' and sides['U'][0] == 'B' and (sides['B'][2] == 'L')) or (sides['L'][0] == 'B' and sides['U'][0] == 'L' and (sides['B'][2] == 'U')):
corner_buffer = self.cornerBufferChange(sides, cornerPriority)
else:
cornerSequence.append(cornerBuffer)
self.cornerColor.append(cornerBuffer)
index = self.getCornerIndex(cornerBuffer)
self.cornerIndex.append([cornerBuffer[0], index[0], cornerBuffer[1], index[1], cornerBuffer[2], index[2]])
corner_buffer = [sides[cornerBuffer[0]][index[0]], sides[cornerBuffer[1]][index[1]], sides[cornerBuffer[2]][index[2]]]
sides[prevBuffer[0]][index[0]] = prevBuffer[0]
sides[prevBuffer[1]][index[1]] = prevBuffer[1]
sides[prevBuffer[2]][index[2]] = prevBuffer[2]
sides['L'][0] = cornerBuffer[0]
sides['U'][0] = cornerBuffer[1]
sides['B'][2] = cornerBuffer[2]
self.cornerCount = self.cornerCount + 1
return sides
def corner_buffer_change(self, sides, cornerPriority):
sides_copy = sides.copy()
a1 = ''
a2 = 0
b1 = ''
b2 = 0
c1 = ''
c2 = 0
if not (sides[cornerPriority[0][0]][cornerPriority[0][1]] == 'U' and sides[cornerPriority[0][2]][cornerPriority[0][3]] == 'R' and (sides[cornerPriority[0][4]][cornerPriority[0][5]] == 'B')):
a1 = 'U'
a2 = 2
b1 = 'R'
b2 = 2
c1 = 'B'
c2 = 0
corner_buffer = ['U', 'R', 'B']
cornerSequence.append(cornerBuffer)
elif not (sides[cornerPriority[1][0]][cornerPriority[1][1]] == 'U' and sides[cornerPriority[1][2]][cornerPriority[1][3]] == 'L' and (sides[cornerPriority[1][4]][cornerPriority[1][5]] == 'F')):
a1 = 'U'
a2 = 6
b1 = 'L'
b2 = 2
c1 = 'F'
c2 = 0
corner_buffer = ['U', 'L', 'F']
cornerSequence.append(cornerBuffer)
elif not (sides[cornerPriority[2][0]][cornerPriority[2][1]] == 'U' and sides[cornerPriority[2][2]][cornerPriority[2][3]] == 'F' and (sides[cornerPriority[2][4]][cornerPriority[2][5]] == 'R')):
a1 = 'U'
a2 = 8
b1 = 'F'
b2 = 2
c1 = 'R'
c2 = 0
corner_buffer = ['U', 'F', 'R']
cornerSequence.append(cornerBuffer)
elif not (sides[cornerPriority[3][0]][cornerPriority[3][1]] == 'D' and sides[cornerPriority[3][2]][cornerPriority[3][3]] == 'F' and (sides[cornerPriority[3][4]][cornerPriority[3][5]] == 'L')):
a1 = 'D'
a2 = 0
b1 = 'F'
b2 = 6
c1 = 'L'
c2 = 8
corner_buffer = ['D', 'F', 'L']
cornerSequence.append(cornerBuffer)
elif not (sides[cornerPriority[4][0]][cornerPriority[4][1]] == 'D' and sides[cornerPriority[4][2]][cornerPriority[4][3]] == 'R' and (sides[cornerPriority[4][4]][cornerPriority[4][5]] == 'F')):
a1 = 'D'
a2 = 2
b1 = 'R'
b2 = 6
c1 = 'F'
c2 = 8
corner_buffer = ['D', 'R', 'F']
cornerSequence.append(cornerBuffer)
elif not (sides[cornerPriority[5][0]][cornerPriority[5][1]] == 'D' and sides[cornerPriority[5][2]][cornerPriority[5][3]] == 'L' and (sides[cornerPriority[5][4]][cornerPriority[5][5]] == 'B')):
a1 = 'D'
a2 = 6
b1 = 'L'
b2 = 6
c1 = 'B'
c2 = 8
corner_buffer = ['D', 'L', 'B']
cornerSequence.append(cornerBuffer)
elif not (sides[cornerPriority[6][0]][cornerPriority[6][1]] == 'D' and sides[cornerPriority[6][2]][cornerPriority[6][3]] == 'B' and (sides[cornerPriority[6][4]][cornerPriority[6][5]] == 'R')):
a1 = 'D'
a2 = 8
b1 = 'B'
b2 = 6
c1 = 'R'
c2 = 8
corner_buffer = ['D', 'B', 'R']
cornerSequence.append(cornerBuffer)
index = self.getCornerIndex(cornerBuffer)
self.cornerIndex.append([cornerBuffer[0], index[0], cornerBuffer[1], index[1], cornerBuffer[2], index[2]])
swap = [sidesCopy['L'][0], sidesCopy['U'][0], sidesCopy['B'][2], sidesCopy[a1][a2], sidesCopy[b1][b2], sidesCopy[c1][c2]]
corner_buffer = [sides[a1][a2], sides[b1][b2], sides[c1][c2]]
if sides['L'][0] == 'L' and sides['U'][0] == 'U' and (sides['B'][2] == 'B'):
self.cornerColor.append(['L', 'U', 'B'])
elif sides['L'][0] == 'U' and sides['U'][0] == 'B' and (sides['B'][2] == 'L'):
self.cornerColor.append(['U', 'B', 'L'])
elif sides['L'][0] == 'B' and sides['U'][0] == 'L' and (sides['B'][2] == 'U'):
self.cornerColor.append(['B', 'L', 'U'])
sides[a1][a2] = swap[0]
sides[b1][b2] = swap[1]
sides[c1][c2] = swap[2]
sides['L'][0] = swap[3]
sides['U'][0] = swap[4]
sides['B'][2] = swap[5]
return cornerBuffer
def get_corner_index(self, cornerBuffer):
if cornerBuffer[0] == 'U':
if cornerBuffer[1] == 'R':
if cornerBuffer[2] == 'F':
index = [8, 0, 2]
elif cornerBuffer[2] == 'B':
index = [2, 2, 0]
elif cornerBuffer[1] == 'F':
if cornerBuffer[2] == 'R':
index = [8, 2, 0]
elif cornerBuffer[2] == 'L':
index = [6, 0, 2]
elif cornerBuffer[1] == 'B':
if cornerBuffer[2] == 'R':
index = [2, 0, 2]
elif cornerBuffer[2] == 'L':
index = [0, 2, 0]
elif cornerBuffer[1] == 'L':
if cornerBuffer[2] == 'F':
index = [6, 2, 0]
elif cornerBuffer[2] == 'B':
index = [0, 0, 2]
elif cornerBuffer[0] == 'F':
if cornerBuffer[1] == 'U':
if cornerBuffer[2] == 'R':
index = [2, 8, 0]
elif cornerBuffer[2] == 'L':
index = [0, 6, 2]
elif cornerBuffer[1] == 'R':
if cornerBuffer[2] == 'U':
index = [2, 0, 8]
elif cornerBuffer[2] == 'D':
index = [8, 6, 2]
elif cornerBuffer[1] == 'D':
if cornerBuffer[2] == 'R':
index = [8, 2, 6]
elif cornerBuffer[2] == 'L':
index = [6, 0, 8]
elif cornerBuffer[1] == 'L':
if cornerBuffer[2] == 'U':
index = [0, 2, 6]
elif cornerBuffer[2] == 'D':
index = [6, 8, 0]
elif cornerBuffer[0] == 'R':
if cornerBuffer[1] == 'U':
if cornerBuffer[2] == 'F':
index = [0, 8, 2]
elif cornerBuffer[2] == 'B':
index = [2, 2, 0]
elif cornerBuffer[1] == 'F':
if cornerBuffer[2] == 'U':
index = [0, 2, 8]
elif cornerBuffer[2] == 'D':
index = [6, 8, 2]
elif cornerBuffer[1] == 'D':
if cornerBuffer[2] == 'F':
index = [6, 2, 8]
elif cornerBuffer[2] == 'B':
index = [8, 8, 6]
elif cornerBuffer[1] == 'B':
if cornerBuffer[2] == 'U':
index = [2, 0, 2]
elif cornerBuffer[2] == 'D':
index = [8, 6, 8]
elif cornerBuffer[0] == 'D':
if cornerBuffer[1] == 'F':
if cornerBuffer[2] == 'R':
index = [2, 8, 6]
elif cornerBuffer[2] == 'L':
index = [0, 6, 8]
elif cornerBuffer[1] == 'R':
if cornerBuffer[2] == 'F':
index = [2, 6, 8]
elif cornerBuffer[2] == 'B':
index = [8, 8, 6]
elif cornerBuffer[1] == 'B':
if cornerBuffer[2] == 'R':
index = [8, 6, 8]
elif cornerBuffer[2] == 'L':
index = [6, 8, 6]
elif cornerBuffer[1] == 'L':
if cornerBuffer[2] == 'F':
index = [0, 8, 6]
elif cornerBuffer[2] == 'B':
index = [6, 6, 8]
elif cornerBuffer[0] == 'B':
if cornerBuffer[1] == 'U':
if cornerBuffer[2] == 'R':
index = [0, 2, 2]
elif cornerBuffer[2] == 'L':
index = [2, 0, 0]
elif cornerBuffer[1] == 'R':
if cornerBuffer[2] == 'U':
index = [0, 2, 2]
elif cornerBuffer[2] == 'D':
index = [6, 8, 8]
elif cornerBuffer[1] == 'D':
if cornerBuffer[2] == 'R':
index = [6, 8, 8]
elif cornerBuffer[2] == 'L':
index = [8, 6, 6]
elif cornerBuffer[1] == 'L':
if cornerBuffer[2] == 'U':
index = [2, 0, 0]
elif cornerBuffer[2] == 'D':
index = [8, 6, 6]
elif cornerBuffer[0] == 'L':
if cornerBuffer[1] == 'U':
if cornerBuffer[2] == 'F':
index = [2, 6, 0]
elif cornerBuffer[2] == 'B':
index = [0, 0, 2]
elif cornerBuffer[1] == 'F':
if cornerBuffer[2] == 'U':
index = [2, 0, 6]
elif cornerBuffer[2] == 'D':
index = [8, 6, 0]
elif cornerBuffer[1] == 'D':
if cornerBuffer[2] == 'F':
index = [8, 0, 6]
elif cornerBuffer[2] == 'B':
index = [6, 6, 8]
elif cornerBuffer[1] == 'B':
if cornerBuffer[2] == 'U':
index = [0, 2, 0]
elif cornerBuffer[2] == 'D':
index = [6, 8, 6]
return index
def get_corner_sequence(self):
return cornerSequence
cube_corners = corner() |
# ========================
# Information
# ========================
# Direct Link: https://www.hackerrank.com/challenges/30-scope/problem
# Difficulty: Easy
# Max Score: 30
# Language: Python
# ========================
# Solution
# ========================
class Difference:
def __init__(self, a):
self.__elements = a
self.maximumDifference = 0
# Add your code here
def computeDifference(self):
l = len(a)
for i in range(0, l):
for j in range(i + 1, l):
difference = abs(a[i] - a[j])
self.maximumDifference = max(difference, self.maximumDifference)
# End of Difference class
_ = input()
a = [int(e) for e in input().split(' ')]
d = Difference(a)
d.computeDifference()
print(d.maximumDifference)
| class Difference:
def __init__(self, a):
self.__elements = a
self.maximumDifference = 0
def compute_difference(self):
l = len(a)
for i in range(0, l):
for j in range(i + 1, l):
difference = abs(a[i] - a[j])
self.maximumDifference = max(difference, self.maximumDifference)
_ = input()
a = [int(e) for e in input().split(' ')]
d = difference(a)
d.computeDifference()
print(d.maximumDifference) |
class Solution:
def numsSameConsecDiff(self, N: int, K: int) -> List[int]:
ans = [i for i in range(1, 10)]
if N == 1:
return [0] + ans
digits = 10 ** (N-1)
while ans[0] / digits < 1:
cur = ans.pop(0)
last_digit = cur % 10
if K == 0:
ans.append(cur * 10 + last_digit)
else:
if last_digit + K < 10:
ans.append(cur * 10 + last_digit + K)
if last_digit - K >= 0:
ans.append(cur * 10 + last_digit - K)
return ans
| class Solution:
def nums_same_consec_diff(self, N: int, K: int) -> List[int]:
ans = [i for i in range(1, 10)]
if N == 1:
return [0] + ans
digits = 10 ** (N - 1)
while ans[0] / digits < 1:
cur = ans.pop(0)
last_digit = cur % 10
if K == 0:
ans.append(cur * 10 + last_digit)
else:
if last_digit + K < 10:
ans.append(cur * 10 + last_digit + K)
if last_digit - K >= 0:
ans.append(cur * 10 + last_digit - K)
return ans |
class Body:
supported_characteristics = ['m', 'mass',
'J', 'inertia']
def __init__(self, options):
for key in options.keys():
if key in Body.supported_characteristics:
val = options[key]
if key in ['m', 'mass']:
self.mass = val
elif key in ['J', 'inertia']:
self.J = val
else:
Log.print('Not supported key:{}'.format(key)) | class Body:
supported_characteristics = ['m', 'mass', 'J', 'inertia']
def __init__(self, options):
for key in options.keys():
if key in Body.supported_characteristics:
val = options[key]
if key in ['m', 'mass']:
self.mass = val
elif key in ['J', 'inertia']:
self.J = val
else:
Log.print('Not supported key:{}'.format(key)) |
#!/usr/bin/env python3
#
## @file
# cache_args.py
#
# Copyright (c) 2020, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
''' Contains the help and description strings for arguments in the
cache command meta data.
'''
COMMAND_DESCRIPTION = ('Manages local caching support for project repos. The goal of this feature '
'is to improve clone performance')
COMMAND_ENABLE_HELP = 'Enables caching support on the system.'
COMMAND_DISABLE_HELP = 'Disables caching support on the system.'
COMMAND_UPDATE_HELP = 'Update the repo cache for all cached projects.'
COMMAND_INFO_HELP = 'Display the current cache information.'
COMMAND_PROJECT_HELP = 'Project to add to the cache.'
| """ Contains the help and description strings for arguments in the
cache command meta data.
"""
command_description = 'Manages local caching support for project repos. The goal of this feature is to improve clone performance'
command_enable_help = 'Enables caching support on the system.'
command_disable_help = 'Disables caching support on the system.'
command_update_help = 'Update the repo cache for all cached projects.'
command_info_help = 'Display the current cache information.'
command_project_help = 'Project to add to the cache.' |
# Binary Tree:
# 1. Every node has at most two children.
# 2. Each child node is labeled as being either a left child or a right child.
# 3. A left child precedes a right child in the order of children of a node.
# proper binary tree: each node has two or zero child.
class Tree:
""" Abstract base class representing a tree structure. """
class Position:
"""An abstraction representing the location of a single element."""
def element(self):
""" Return the element stored at this Position. """
raise NotImplementedError
def __eq__(self, other):
""" Return True if other Position represents the same location. """
raise NotImplementedError
def __ne__(self, other):
""" Return True if other does not represent the same location. """
return not (self == other)
def root(self):
""" Return Position representing the tree's root (or None if it's empty)"""
raise NotImplementedError
def parent(self, p):
""" Return Position representing p's parent (or None if p is root) """
raise NotImplementedError
def num_children(self, p):
""" Return the number of children that Position p has. """
raise NotImplementedError
def children(self, p):
""" Generate an iteration of Positions representing p's children. """
raise NotImplementedError
def __len__(self):
""" Return the total number of elements in the tree. """
raise NotImplementedError
def is_root(self, p):
""" Return True if Position p represents the root of the tree. """
return self.root() == p
def is_leaf(self, p):
""" Return True if Position p does not have any children. """
return self.num_children(p) == 0
def is_empty(self, p):
""" Return True if the tree is empty """
return len(self) == 0
class BinaryTree(Tree):
"""Abstract base class representing a binary tree structure."""
def left(self, p):
""" Return a Position representing p's left child
Return None if p does not have a left child.
"""
raise NotImplementedError
def right(self, p):
""" Return a Position representing p's right child
Return None if p does not have a right child.
"""
raise NotImplementedError
def sibling(self, p):
"""Return a Position representing p's sibling (or None if no Sibling)"""
parent = self.parent(p)
if parent == None:
return None
else:
if p == self.left(parent):
return self.right(parent)
else:
return self.left(parent)
def children(self, p):
""" Generate an iteration of Positions representing p's children."""
if self.left(p) != None:
yield self.left(p)
if self.right(p) != None:
yield self.right(p)
| class Tree:
""" Abstract base class representing a tree structure. """
class Position:
"""An abstraction representing the location of a single element."""
def element(self):
""" Return the element stored at this Position. """
raise NotImplementedError
def __eq__(self, other):
""" Return True if other Position represents the same location. """
raise NotImplementedError
def __ne__(self, other):
""" Return True if other does not represent the same location. """
return not self == other
def root(self):
""" Return Position representing the tree's root (or None if it's empty)"""
raise NotImplementedError
def parent(self, p):
""" Return Position representing p's parent (or None if p is root) """
raise NotImplementedError
def num_children(self, p):
""" Return the number of children that Position p has. """
raise NotImplementedError
def children(self, p):
""" Generate an iteration of Positions representing p's children. """
raise NotImplementedError
def __len__(self):
""" Return the total number of elements in the tree. """
raise NotImplementedError
def is_root(self, p):
""" Return True if Position p represents the root of the tree. """
return self.root() == p
def is_leaf(self, p):
""" Return True if Position p does not have any children. """
return self.num_children(p) == 0
def is_empty(self, p):
""" Return True if the tree is empty """
return len(self) == 0
class Binarytree(Tree):
"""Abstract base class representing a binary tree structure."""
def left(self, p):
""" Return a Position representing p's left child
Return None if p does not have a left child.
"""
raise NotImplementedError
def right(self, p):
""" Return a Position representing p's right child
Return None if p does not have a right child.
"""
raise NotImplementedError
def sibling(self, p):
"""Return a Position representing p's sibling (or None if no Sibling)"""
parent = self.parent(p)
if parent == None:
return None
elif p == self.left(parent):
return self.right(parent)
else:
return self.left(parent)
def children(self, p):
""" Generate an iteration of Positions representing p's children."""
if self.left(p) != None:
yield self.left(p)
if self.right(p) != None:
yield self.right(p) |
class Building(object):
lock = MetalKey()
def unlock(self):
self.lock.attempt_unlock()
class KeyCardMixin(object):
lock = KeyCard()
class ExcellaHQ(KeyCardMixin, Building):
pass
| class Building(object):
lock = metal_key()
def unlock(self):
self.lock.attempt_unlock()
class Keycardmixin(object):
lock = key_card()
class Excellahq(KeyCardMixin, Building):
pass |
def fibonacci(n):
if n == 0:
return 0
elif n==1:
return 1
else:
return fibonacci(n-1) + fibonacci(n-2)
def test():
for i in range(15):
print("fib[" + str(i) + "]: " + str(fibonacci(i)))
test()
| def fibonacci(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fibonacci(n - 1) + fibonacci(n - 2)
def test():
for i in range(15):
print('fib[' + str(i) + ']: ' + str(fibonacci(i)))
test() |
instruct = [] # keeps the splitted version of the instructions
filename = input("Enter the filename: ") # gets the filename as the input
print("Reading...")
with open(filename) as f: # goes through the file line by line, and splits each line from the spaces, and adds them into instruct list
for line in f:
instruct.append(line.split(" "))
f.close()
# goes through the instruct list, and separates the registers and immediate values from commas, and updates instruct list
for x in range(len(instruct)):
instruct[x][1] = instruct[x][1].split(",")
# goes through the instruct list, and separates R letter, and its number which indicates the number of the register
for x in range(len(instruct)):
for t in range(len(instruct[x][1])):
instruct[x][1][t] = instruct[x][1][t].split("R")
# keeps the binary values of the instructions
binary = []
# finds the instructions' opcodes, and adds them to binary list
for i in range(len(instruct)):
if instruct[i][0] == "AND":
binary.append("0010")
elif instruct[i][0] == "OR":
binary.append("0000")
elif instruct[i][0] == "ADD":
binary.append("0100")
elif instruct[i][0] == "LD":
binary.append("0111")
elif instruct[i][0] == "ST":
binary.append("1000")
elif instruct[i][0] == "ANDI":
binary.append("0011")
elif instruct[i][0] == "ORI":
binary.append("0001")
elif instruct[i][0] == "ADDI":
binary.append("0101")
elif instruct[i][0] == "JUMP":
binary.append("0110")
elif instruct[i][0] == "PUSH":
binary.append("1001")
elif instruct[i][0] == "POP":
binary.append("1010")
# temporary list to be used when transforming binOfNum into string
p = []
# converts the immediate values, and the register numbers into binary code, and saves it into first binOfNum, then binary lists
#goes through the instruct list
for i in range(len(instruct)):
t=0;
for j in range(len(instruct[i][1])):
# if the instruction has 3 arguments, then enters here, and sets the values of binOfNum and index
if (len(instruct[i][1]))==3 or (len(instruct[i][1])==2 and instruct[i][1][0][0] == "") and t!=1:
binOfNum = [0, 0, 0, 0]
index = 3
t=1;
# if the instruction has 2 arguments, then enters here, and sets the values of binOfNum and index
elif len(instruct[i][1]) == 2:
binOfNum = [0, 0, 0, 0, 0, 0, 0, 0]
index = 7
# if the instructions has 1 arguments, then enters here
elif len(instruct[i][1]) == 1:
binOfNum = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
index = 11
# if the number represents the register number, then enters here
if instruct[i][1][j][0] == "":
# converts the string value in the instruct list into integer
temp = int(instruct[i][1][j][1])
# finds the corresponding binary code for the temp number
while temp!=0 and index>=0:
binOfNum[index] = temp%2
temp = int(temp/2)
index -= 1
# converts the binOfNum values into string, and passes it into p list, then adds it to the related binary list elements
p = ''.join(str(e) for e in binOfNum)
binary[i] = binary[i] + "" + p
# if it is an immediate value, then enters here
elif instruct[i][1][j][0] != "":
# converts the value into integer
temp = int(instruct[i][1][j][0])
# if the immediate value is larger than zero, enters here
if temp > 0:
# converts temp into binary code
while temp != 0 and index>=0:
binOfNum[index] = temp%2
temp = int(temp/2)
index -= 1
# if the immdiate value is smaller than zero, enters here
elif temp < 0:
# finds its positive value
temp2 = temp*-1
# gets the binary code of the positive version of temp
while temp2!=0 and index >=0:
binOfNum[index] = temp2%2
temp2 = int(temp2/2)
index -=1
# two's complement process starts
# inverts the binary code of the immediate value
for t in range(len(binOfNum)):
if binOfNum[t] == 0:
binOfNum[t] = 1
elif binOfNum[t] == 1:
binOfNum[t] = 0
# part where 1 is added
# if the last bit is zero, it changes it into 1
if binOfNum[len(binOfNum)-1] == 0:
binOfNum[len(binOfNum)-1] = 1
# if it's not zero, then enters here
else:
# finds the iterator value
ite = len(binOfNum)-1
# goes until it finds an element with the value of zero
while ite>=0 and binOfNum[ite]!=0:
binOfNum[ite] = 0
ite -= 1
# when it finishes its job with the loop, changes the value of current binOfNum's value
binOfNum[ite] = 1
# adds it into the corresponding element of the binary list
p = ''.join(str(e) for e in binOfNum)
binary[i] = binary[i] + "" + p
# in this part binary values are transformed into hexadecimal values
sum = 0 # keeps the sum of 4 bits to find its hexadecimal value
hexa =[] # keeps the corresponding hexadecimal values of binary code
sumStr = "" # if the hexadecimal value is a letter, then sumStr is used
# goes through the binary list
for i in range(len(binary)):
hexa.append("")
# evaluates the values according to their bits
for j in range(len(binary[i])):
# if the current element corresponds to the leftmost bit of a 4 bit group,
# then it multiplies that bit value with 8, and adds it to sum
if j%4 == 0 :
sum=0
sum = sum + int(binary[i][j])*8
# if the current element corresponds to the second bit from the left of a 4 bit group,
# then it multiplies that bit value with 4, and adds it to sum
elif j%4 == 1 :
sum = sum + int(binary[i][j])*4
# if the current element corresponds to the third bit from the left of a 4 bit group,
# then it multiplies that bit with 2, and adds it to sum
elif j%4 == 2 :
sum = sum + int(binary[i][j])*2
# if the current element is the last bit of a 4 bit group,
# then it multiplies the value of that bit with 1, and adds it to sum
elif j%4 == 3 :
sum = sum + int(binary[i][j])*1
# after we used the last bit of a 4 bit group, it enters here to calculate its hexadecimal equivalent
if j%4 == 3 and j!= 0 :
# enters here if the sum value is less than 10, and it is added to as a number
if(sum < 10):
hexa[i] = hexa[i] + "" +str(sum)
# if sum is larger than 10, enters here to get its equivalent letter, and adds it as a string
else:
if sum==10:
sumStr="A"
elif sum==11:
sumStr="B"
elif sum==12:
sumStr="C"
elif sum==13:
sumStr="D"
elif sum==14:
sumStr="E"
elif sum==15:
sumStr="F"
hexa[i] = hexa[i] + sumStr
print(hexa[i])
print("Writing...")
# creates the file for logisim, and writes hexadecimal values into it
dest = open("logisim_instructions","w")
dest.write("v2.0 raw")
dest.write("\n")
for i in range(len(hexa)):
dest.write(hexa[i])
dest.write(" ")
dest.close()
# creates the file for verilog, and writes hexadecimal values into it
dest2 = open("verilog_instructions.hex","w")
for i in range(len(hexa)):
dest2.write(hexa[i])
dest2.write(" ")
dest2.close()
| instruct = []
filename = input('Enter the filename: ')
print('Reading...')
with open(filename) as f:
for line in f:
instruct.append(line.split(' '))
f.close()
for x in range(len(instruct)):
instruct[x][1] = instruct[x][1].split(',')
for x in range(len(instruct)):
for t in range(len(instruct[x][1])):
instruct[x][1][t] = instruct[x][1][t].split('R')
binary = []
for i in range(len(instruct)):
if instruct[i][0] == 'AND':
binary.append('0010')
elif instruct[i][0] == 'OR':
binary.append('0000')
elif instruct[i][0] == 'ADD':
binary.append('0100')
elif instruct[i][0] == 'LD':
binary.append('0111')
elif instruct[i][0] == 'ST':
binary.append('1000')
elif instruct[i][0] == 'ANDI':
binary.append('0011')
elif instruct[i][0] == 'ORI':
binary.append('0001')
elif instruct[i][0] == 'ADDI':
binary.append('0101')
elif instruct[i][0] == 'JUMP':
binary.append('0110')
elif instruct[i][0] == 'PUSH':
binary.append('1001')
elif instruct[i][0] == 'POP':
binary.append('1010')
p = []
for i in range(len(instruct)):
t = 0
for j in range(len(instruct[i][1])):
if len(instruct[i][1]) == 3 or ((len(instruct[i][1]) == 2 and instruct[i][1][0][0] == '') and t != 1):
bin_of_num = [0, 0, 0, 0]
index = 3
t = 1
elif len(instruct[i][1]) == 2:
bin_of_num = [0, 0, 0, 0, 0, 0, 0, 0]
index = 7
elif len(instruct[i][1]) == 1:
bin_of_num = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
index = 11
if instruct[i][1][j][0] == '':
temp = int(instruct[i][1][j][1])
while temp != 0 and index >= 0:
binOfNum[index] = temp % 2
temp = int(temp / 2)
index -= 1
p = ''.join((str(e) for e in binOfNum))
binary[i] = binary[i] + '' + p
elif instruct[i][1][j][0] != '':
temp = int(instruct[i][1][j][0])
if temp > 0:
while temp != 0 and index >= 0:
binOfNum[index] = temp % 2
temp = int(temp / 2)
index -= 1
elif temp < 0:
temp2 = temp * -1
while temp2 != 0 and index >= 0:
binOfNum[index] = temp2 % 2
temp2 = int(temp2 / 2)
index -= 1
for t in range(len(binOfNum)):
if binOfNum[t] == 0:
binOfNum[t] = 1
elif binOfNum[t] == 1:
binOfNum[t] = 0
if binOfNum[len(binOfNum) - 1] == 0:
binOfNum[len(binOfNum) - 1] = 1
else:
ite = len(binOfNum) - 1
while ite >= 0 and binOfNum[ite] != 0:
binOfNum[ite] = 0
ite -= 1
binOfNum[ite] = 1
p = ''.join((str(e) for e in binOfNum))
binary[i] = binary[i] + '' + p
sum = 0
hexa = []
sum_str = ''
for i in range(len(binary)):
hexa.append('')
for j in range(len(binary[i])):
if j % 4 == 0:
sum = 0
sum = sum + int(binary[i][j]) * 8
elif j % 4 == 1:
sum = sum + int(binary[i][j]) * 4
elif j % 4 == 2:
sum = sum + int(binary[i][j]) * 2
elif j % 4 == 3:
sum = sum + int(binary[i][j]) * 1
if j % 4 == 3 and j != 0:
if sum < 10:
hexa[i] = hexa[i] + '' + str(sum)
else:
if sum == 10:
sum_str = 'A'
elif sum == 11:
sum_str = 'B'
elif sum == 12:
sum_str = 'C'
elif sum == 13:
sum_str = 'D'
elif sum == 14:
sum_str = 'E'
elif sum == 15:
sum_str = 'F'
hexa[i] = hexa[i] + sumStr
print(hexa[i])
print('Writing...')
dest = open('logisim_instructions', 'w')
dest.write('v2.0 raw')
dest.write('\n')
for i in range(len(hexa)):
dest.write(hexa[i])
dest.write(' ')
dest.close()
dest2 = open('verilog_instructions.hex', 'w')
for i in range(len(hexa)):
dest2.write(hexa[i])
dest2.write(' ')
dest2.close() |
"""A collection of definitions for generated benchmark scenarios.
Each generated scenario is defined by the a number of parameters that
control the size of the problem (see scenario.generator for more info):
There are also some parameters, where default values are used for all
scenarios, see DEFAULTS dict.
"""
# generated environment constants
DEFAULTS = dict(
num_exploits=None,
num_privescs=None,
r_sensitive=100,
r_user=100,
exploit_cost=1,
exploit_probs='mixed',
privesc_cost=1,
privesc_probs=1.0,
service_scan_cost=1,
os_scan_cost=1,
subnet_scan_cost=1,
process_scan_cost=1,
uniform=False,
alpha_H=2.0,
alpha_V=2.0,
lambda_V=1.0,
random_goal=False,
base_host_value=1,
host_discovery_value=1,
step_limit=1000
)
# Generated Scenario definitions
TINY_GEN = {**DEFAULTS,
"name": "tiny-gen",
"num_hosts": 3,
"num_os": 1,
"num_services": 1,
"num_processes": 1,
"restrictiveness": 1}
TINY_GEN_RGOAL = {**DEFAULTS,
"name": "tiny-gen-rangoal",
"num_hosts": 3,
"num_os": 1,
"num_services": 1,
"num_processes": 1,
"restrictiveness": 1,
"random_goal": True}
SMALL_GEN = {**DEFAULTS,
"name": "small-gen",
"num_hosts": 8,
"num_os": 2,
"num_services": 3,
"num_processes": 2,
"restrictiveness": 2}
SMALL_GEN_RGOAL = {**DEFAULTS,
"name": "small-gen-rangoal",
"num_hosts": 8,
"num_os": 2,
"num_services": 3,
"num_processes": 2,
"restrictiveness": 2,
"random_goal": True}
MEDIUM_GEN = {**DEFAULTS,
"name": "medium-gen",
"num_hosts": 16,
"num_os": 2,
"num_services": 5,
"num_processes": 2,
"restrictiveness": 3,
"step_limit": 2000}
LARGE_GEN = {**DEFAULTS,
"name": "large-gen",
"num_hosts": 23,
"num_os": 3,
"num_services": 7,
"num_processes": 3,
"restrictiveness": 3,
"step_limit": 5000}
HUGE_GEN = {**DEFAULTS,
"name": "huge-gen",
"num_hosts": 38,
"num_os": 4,
"num_services": 10,
"num_processes": 4,
"restrictiveness": 3,
"step_limit": 10000}
POCP_1_GEN = {**DEFAULTS,
"name": "pocp-1-gen",
"num_hosts": 35,
"num_os": 2,
"num_services": 50,
"num_exploits": 60,
"num_processes": 2,
"restrictiveness": 5,
"step_limit": 30000}
POCP_2_GEN = {**DEFAULTS,
"name": "pocp-2-gen",
"num_hosts": 95,
"num_os": 3,
"num_services": 10,
"num_exploits": 30,
"num_processes": 3,
"restrictiveness": 5,
"step_limit": 30000}
AVAIL_GEN_BENCHMARKS = {
"tiny-gen": TINY_GEN,
"tiny-gen-rgoal": TINY_GEN_RGOAL,
"small-gen": SMALL_GEN,
"small-gen-rgoal": SMALL_GEN_RGOAL,
"medium-gen": MEDIUM_GEN,
"large-gen": LARGE_GEN,
"huge-gen": HUGE_GEN,
"pocp-1-gen": POCP_1_GEN,
"pocp-2-gen": POCP_2_GEN
}
| """A collection of definitions for generated benchmark scenarios.
Each generated scenario is defined by the a number of parameters that
control the size of the problem (see scenario.generator for more info):
There are also some parameters, where default values are used for all
scenarios, see DEFAULTS dict.
"""
defaults = dict(num_exploits=None, num_privescs=None, r_sensitive=100, r_user=100, exploit_cost=1, exploit_probs='mixed', privesc_cost=1, privesc_probs=1.0, service_scan_cost=1, os_scan_cost=1, subnet_scan_cost=1, process_scan_cost=1, uniform=False, alpha_H=2.0, alpha_V=2.0, lambda_V=1.0, random_goal=False, base_host_value=1, host_discovery_value=1, step_limit=1000)
tiny_gen = {**DEFAULTS, 'name': 'tiny-gen', 'num_hosts': 3, 'num_os': 1, 'num_services': 1, 'num_processes': 1, 'restrictiveness': 1}
tiny_gen_rgoal = {**DEFAULTS, 'name': 'tiny-gen-rangoal', 'num_hosts': 3, 'num_os': 1, 'num_services': 1, 'num_processes': 1, 'restrictiveness': 1, 'random_goal': True}
small_gen = {**DEFAULTS, 'name': 'small-gen', 'num_hosts': 8, 'num_os': 2, 'num_services': 3, 'num_processes': 2, 'restrictiveness': 2}
small_gen_rgoal = {**DEFAULTS, 'name': 'small-gen-rangoal', 'num_hosts': 8, 'num_os': 2, 'num_services': 3, 'num_processes': 2, 'restrictiveness': 2, 'random_goal': True}
medium_gen = {**DEFAULTS, 'name': 'medium-gen', 'num_hosts': 16, 'num_os': 2, 'num_services': 5, 'num_processes': 2, 'restrictiveness': 3, 'step_limit': 2000}
large_gen = {**DEFAULTS, 'name': 'large-gen', 'num_hosts': 23, 'num_os': 3, 'num_services': 7, 'num_processes': 3, 'restrictiveness': 3, 'step_limit': 5000}
huge_gen = {**DEFAULTS, 'name': 'huge-gen', 'num_hosts': 38, 'num_os': 4, 'num_services': 10, 'num_processes': 4, 'restrictiveness': 3, 'step_limit': 10000}
pocp_1_gen = {**DEFAULTS, 'name': 'pocp-1-gen', 'num_hosts': 35, 'num_os': 2, 'num_services': 50, 'num_exploits': 60, 'num_processes': 2, 'restrictiveness': 5, 'step_limit': 30000}
pocp_2_gen = {**DEFAULTS, 'name': 'pocp-2-gen', 'num_hosts': 95, 'num_os': 3, 'num_services': 10, 'num_exploits': 30, 'num_processes': 3, 'restrictiveness': 5, 'step_limit': 30000}
avail_gen_benchmarks = {'tiny-gen': TINY_GEN, 'tiny-gen-rgoal': TINY_GEN_RGOAL, 'small-gen': SMALL_GEN, 'small-gen-rgoal': SMALL_GEN_RGOAL, 'medium-gen': MEDIUM_GEN, 'large-gen': LARGE_GEN, 'huge-gen': HUGE_GEN, 'pocp-1-gen': POCP_1_GEN, 'pocp-2-gen': POCP_2_GEN} |
#
# PySNMP MIB module RBN-ALARM-EXT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/RBN-ALARM-EXT-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:43:53 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
alarmClearDateAndTime, alarmModelEntry, alarmListName, alarmClearIndex = mibBuilder.importSymbols("ALARM-MIB", "alarmClearDateAndTime", "alarmModelEntry", "alarmListName", "alarmClearIndex")
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ConstraintsIntersection, ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ConstraintsIntersection", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint")
rbnModules, = mibBuilder.importSymbols("RBN-SMI", "rbnModules")
NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, Unsigned32, iso, MibIdentifier, NotificationType, TimeTicks, ModuleIdentity, Bits, Gauge32, Counter32, IpAddress, Integer32, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "Unsigned32", "iso", "MibIdentifier", "NotificationType", "TimeTicks", "ModuleIdentity", "Bits", "Gauge32", "Counter32", "IpAddress", "Integer32", "ObjectIdentity")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
rbnAlarmExtMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 2352, 5, 53))
rbnAlarmExtMib.setRevisions(('2009-09-18 18:00',))
if mibBuilder.loadTexts: rbnAlarmExtMib.setLastUpdated('200909181800Z')
if mibBuilder.loadTexts: rbnAlarmExtMib.setOrganization('Ericsson, Inc.')
rbnAlarmObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1))
rbnAlarmModel = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 1))
rbnAlarmActive = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 2))
rbnAlarmClear = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3))
rbnAlarmModelTable = MibTable((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 1, 1), )
if mibBuilder.loadTexts: rbnAlarmModelTable.setStatus('current')
rbnAlarmModelEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 1, 1, 1), )
alarmModelEntry.registerAugmentions(("RBN-ALARM-EXT-MIB", "rbnAlarmModelEntry"))
rbnAlarmModelEntry.setIndexNames(*alarmModelEntry.getIndexNames())
if mibBuilder.loadTexts: rbnAlarmModelEntry.setStatus('current')
rbnAlarmModelResourceIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 1, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(3, 512), ))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: rbnAlarmModelResourceIdx.setStatus('current')
rbnAlarmClearResourceTable = MibTable((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1), )
if mibBuilder.loadTexts: rbnAlarmClearResourceTable.setStatus('current')
rbnAlarmClearResourceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1), ).setIndexNames((0, "ALARM-MIB", "alarmListName"), (0, "ALARM-MIB", "alarmClearDateAndTime"), (0, "ALARM-MIB", "alarmClearIndex"))
if mibBuilder.loadTexts: rbnAlarmClearResourceEntry.setStatus('current')
rbnAlarmClearResourceID = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 1), ObjectIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rbnAlarmClearResourceID.setStatus('current')
rbnAlarmClearResourceValueType = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("counter32", 1), ("unsigned32", 2), ("timeTicks", 3), ("integer32", 4), ("ipAddress", 5), ("octetString", 6), ("objectId", 7), ("counter64", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rbnAlarmClearResourceValueType.setStatus('current')
rbnAlarmClearResourceCounter32Val = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rbnAlarmClearResourceCounter32Val.setStatus('current')
rbnAlarmClearResourceUnsigned32Val = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 5), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rbnAlarmClearResourceUnsigned32Val.setStatus('current')
rbnAlarmClearResourceTimeTicksVal = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 6), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rbnAlarmClearResourceTimeTicksVal.setStatus('current')
rbnAlarmClearResourceInteger32Val = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rbnAlarmClearResourceInteger32Val.setStatus('current')
rbnAlarmClearResourceOctetStringVal = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rbnAlarmClearResourceOctetStringVal.setStatus('current')
rbnAlarmClearResourceIpAddressVal = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 9), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rbnAlarmClearResourceIpAddressVal.setStatus('current')
rbnAlarmClearResourceOidVal = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 10), ObjectIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rbnAlarmClearResourceOidVal.setStatus('current')
rbnAlarmClearResourceCounter64Val = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rbnAlarmClearResourceCounter64Val.setStatus('current')
rbnAlarmExtConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 5, 53, 2))
rbnAlarmExtCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 5, 53, 2, 1))
rbnAlarmExtGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 5, 53, 2, 2))
rbnAlarmExtCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 2352, 5, 53, 2, 1, 1)).setObjects(("RBN-ALARM-EXT-MIB", "rbnAlarmModelGroup"), ("RBN-ALARM-EXT-MIB", "rbnAlarmClearGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rbnAlarmExtCompliance = rbnAlarmExtCompliance.setStatus('current')
rbnAlarmModelGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2352, 5, 53, 2, 2, 1)).setObjects(("RBN-ALARM-EXT-MIB", "rbnAlarmModelResourceIdx"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rbnAlarmModelGroup = rbnAlarmModelGroup.setStatus('current')
rbnAlarmClearGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2352, 5, 53, 2, 2, 2)).setObjects(("RBN-ALARM-EXT-MIB", "rbnAlarmClearResourceID"), ("RBN-ALARM-EXT-MIB", "rbnAlarmClearResourceValueType"), ("RBN-ALARM-EXT-MIB", "rbnAlarmClearResourceCounter32Val"), ("RBN-ALARM-EXT-MIB", "rbnAlarmClearResourceUnsigned32Val"), ("RBN-ALARM-EXT-MIB", "rbnAlarmClearResourceTimeTicksVal"), ("RBN-ALARM-EXT-MIB", "rbnAlarmClearResourceInteger32Val"), ("RBN-ALARM-EXT-MIB", "rbnAlarmClearResourceOctetStringVal"), ("RBN-ALARM-EXT-MIB", "rbnAlarmClearResourceIpAddressVal"), ("RBN-ALARM-EXT-MIB", "rbnAlarmClearResourceOidVal"), ("RBN-ALARM-EXT-MIB", "rbnAlarmClearResourceCounter64Val"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rbnAlarmClearGroup = rbnAlarmClearGroup.setStatus('current')
mibBuilder.exportSymbols("RBN-ALARM-EXT-MIB", rbnAlarmClearResourceUnsigned32Val=rbnAlarmClearResourceUnsigned32Val, rbnAlarmClearResourceOctetStringVal=rbnAlarmClearResourceOctetStringVal, rbnAlarmExtGroups=rbnAlarmExtGroups, rbnAlarmExtMib=rbnAlarmExtMib, rbnAlarmExtCompliances=rbnAlarmExtCompliances, rbnAlarmClearResourceTable=rbnAlarmClearResourceTable, rbnAlarmClearResourceID=rbnAlarmClearResourceID, rbnAlarmClearResourceOidVal=rbnAlarmClearResourceOidVal, rbnAlarmClearGroup=rbnAlarmClearGroup, rbnAlarmClearResourceInteger32Val=rbnAlarmClearResourceInteger32Val, rbnAlarmModelGroup=rbnAlarmModelGroup, rbnAlarmModel=rbnAlarmModel, rbnAlarmClearResourceTimeTicksVal=rbnAlarmClearResourceTimeTicksVal, rbnAlarmClearResourceCounter64Val=rbnAlarmClearResourceCounter64Val, rbnAlarmClear=rbnAlarmClear, rbnAlarmClearResourceValueType=rbnAlarmClearResourceValueType, rbnAlarmClearResourceIpAddressVal=rbnAlarmClearResourceIpAddressVal, rbnAlarmClearResourceCounter32Val=rbnAlarmClearResourceCounter32Val, rbnAlarmExtConformance=rbnAlarmExtConformance, PYSNMP_MODULE_ID=rbnAlarmExtMib, rbnAlarmModelResourceIdx=rbnAlarmModelResourceIdx, rbnAlarmExtCompliance=rbnAlarmExtCompliance, rbnAlarmObjects=rbnAlarmObjects, rbnAlarmClearResourceEntry=rbnAlarmClearResourceEntry, rbnAlarmModelTable=rbnAlarmModelTable, rbnAlarmActive=rbnAlarmActive, rbnAlarmModelEntry=rbnAlarmModelEntry)
| (alarm_clear_date_and_time, alarm_model_entry, alarm_list_name, alarm_clear_index) = mibBuilder.importSymbols('ALARM-MIB', 'alarmClearDateAndTime', 'alarmModelEntry', 'alarmListName', 'alarmClearIndex')
(object_identifier, octet_string, integer) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'OctetString', 'Integer')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(constraints_union, constraints_intersection, value_range_constraint, single_value_constraint, value_size_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ConstraintsUnion', 'ConstraintsIntersection', 'ValueRangeConstraint', 'SingleValueConstraint', 'ValueSizeConstraint')
(rbn_modules,) = mibBuilder.importSymbols('RBN-SMI', 'rbnModules')
(notification_group, module_compliance, object_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance', 'ObjectGroup')
(mib_scalar, mib_table, mib_table_row, mib_table_column, counter64, unsigned32, iso, mib_identifier, notification_type, time_ticks, module_identity, bits, gauge32, counter32, ip_address, integer32, object_identity) = mibBuilder.importSymbols('SNMPv2-SMI', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Counter64', 'Unsigned32', 'iso', 'MibIdentifier', 'NotificationType', 'TimeTicks', 'ModuleIdentity', 'Bits', 'Gauge32', 'Counter32', 'IpAddress', 'Integer32', 'ObjectIdentity')
(display_string, textual_convention) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TextualConvention')
rbn_alarm_ext_mib = module_identity((1, 3, 6, 1, 4, 1, 2352, 5, 53))
rbnAlarmExtMib.setRevisions(('2009-09-18 18:00',))
if mibBuilder.loadTexts:
rbnAlarmExtMib.setLastUpdated('200909181800Z')
if mibBuilder.loadTexts:
rbnAlarmExtMib.setOrganization('Ericsson, Inc.')
rbn_alarm_objects = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1))
rbn_alarm_model = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 1))
rbn_alarm_active = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 2))
rbn_alarm_clear = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3))
rbn_alarm_model_table = mib_table((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 1, 1))
if mibBuilder.loadTexts:
rbnAlarmModelTable.setStatus('current')
rbn_alarm_model_entry = mib_table_row((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 1, 1, 1))
alarmModelEntry.registerAugmentions(('RBN-ALARM-EXT-MIB', 'rbnAlarmModelEntry'))
rbnAlarmModelEntry.setIndexNames(*alarmModelEntry.getIndexNames())
if mibBuilder.loadTexts:
rbnAlarmModelEntry.setStatus('current')
rbn_alarm_model_resource_idx = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 1, 1, 1, 1), unsigned32().subtype(subtypeSpec=constraints_union(value_range_constraint(0, 0), value_range_constraint(3, 512)))).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
rbnAlarmModelResourceIdx.setStatus('current')
rbn_alarm_clear_resource_table = mib_table((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1))
if mibBuilder.loadTexts:
rbnAlarmClearResourceTable.setStatus('current')
rbn_alarm_clear_resource_entry = mib_table_row((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1)).setIndexNames((0, 'ALARM-MIB', 'alarmListName'), (0, 'ALARM-MIB', 'alarmClearDateAndTime'), (0, 'ALARM-MIB', 'alarmClearIndex'))
if mibBuilder.loadTexts:
rbnAlarmClearResourceEntry.setStatus('current')
rbn_alarm_clear_resource_id = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 1), object_identifier()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rbnAlarmClearResourceID.setStatus('current')
rbn_alarm_clear_resource_value_type = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=named_values(('counter32', 1), ('unsigned32', 2), ('timeTicks', 3), ('integer32', 4), ('ipAddress', 5), ('octetString', 6), ('objectId', 7), ('counter64', 8)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rbnAlarmClearResourceValueType.setStatus('current')
rbn_alarm_clear_resource_counter32_val = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 4), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rbnAlarmClearResourceCounter32Val.setStatus('current')
rbn_alarm_clear_resource_unsigned32_val = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 5), unsigned32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rbnAlarmClearResourceUnsigned32Val.setStatus('current')
rbn_alarm_clear_resource_time_ticks_val = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 6), time_ticks()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rbnAlarmClearResourceTimeTicksVal.setStatus('current')
rbn_alarm_clear_resource_integer32_val = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 7), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rbnAlarmClearResourceInteger32Val.setStatus('current')
rbn_alarm_clear_resource_octet_string_val = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 8), octet_string().subtype(subtypeSpec=value_size_constraint(0, 65535))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rbnAlarmClearResourceOctetStringVal.setStatus('current')
rbn_alarm_clear_resource_ip_address_val = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 9), ip_address()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rbnAlarmClearResourceIpAddressVal.setStatus('current')
rbn_alarm_clear_resource_oid_val = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 10), object_identifier()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rbnAlarmClearResourceOidVal.setStatus('current')
rbn_alarm_clear_resource_counter64_val = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 5, 53, 1, 3, 1, 1, 11), counter64()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rbnAlarmClearResourceCounter64Val.setStatus('current')
rbn_alarm_ext_conformance = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 5, 53, 2))
rbn_alarm_ext_compliances = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 5, 53, 2, 1))
rbn_alarm_ext_groups = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 5, 53, 2, 2))
rbn_alarm_ext_compliance = module_compliance((1, 3, 6, 1, 4, 1, 2352, 5, 53, 2, 1, 1)).setObjects(('RBN-ALARM-EXT-MIB', 'rbnAlarmModelGroup'), ('RBN-ALARM-EXT-MIB', 'rbnAlarmClearGroup'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rbn_alarm_ext_compliance = rbnAlarmExtCompliance.setStatus('current')
rbn_alarm_model_group = object_group((1, 3, 6, 1, 4, 1, 2352, 5, 53, 2, 2, 1)).setObjects(('RBN-ALARM-EXT-MIB', 'rbnAlarmModelResourceIdx'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rbn_alarm_model_group = rbnAlarmModelGroup.setStatus('current')
rbn_alarm_clear_group = object_group((1, 3, 6, 1, 4, 1, 2352, 5, 53, 2, 2, 2)).setObjects(('RBN-ALARM-EXT-MIB', 'rbnAlarmClearResourceID'), ('RBN-ALARM-EXT-MIB', 'rbnAlarmClearResourceValueType'), ('RBN-ALARM-EXT-MIB', 'rbnAlarmClearResourceCounter32Val'), ('RBN-ALARM-EXT-MIB', 'rbnAlarmClearResourceUnsigned32Val'), ('RBN-ALARM-EXT-MIB', 'rbnAlarmClearResourceTimeTicksVal'), ('RBN-ALARM-EXT-MIB', 'rbnAlarmClearResourceInteger32Val'), ('RBN-ALARM-EXT-MIB', 'rbnAlarmClearResourceOctetStringVal'), ('RBN-ALARM-EXT-MIB', 'rbnAlarmClearResourceIpAddressVal'), ('RBN-ALARM-EXT-MIB', 'rbnAlarmClearResourceOidVal'), ('RBN-ALARM-EXT-MIB', 'rbnAlarmClearResourceCounter64Val'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rbn_alarm_clear_group = rbnAlarmClearGroup.setStatus('current')
mibBuilder.exportSymbols('RBN-ALARM-EXT-MIB', rbnAlarmClearResourceUnsigned32Val=rbnAlarmClearResourceUnsigned32Val, rbnAlarmClearResourceOctetStringVal=rbnAlarmClearResourceOctetStringVal, rbnAlarmExtGroups=rbnAlarmExtGroups, rbnAlarmExtMib=rbnAlarmExtMib, rbnAlarmExtCompliances=rbnAlarmExtCompliances, rbnAlarmClearResourceTable=rbnAlarmClearResourceTable, rbnAlarmClearResourceID=rbnAlarmClearResourceID, rbnAlarmClearResourceOidVal=rbnAlarmClearResourceOidVal, rbnAlarmClearGroup=rbnAlarmClearGroup, rbnAlarmClearResourceInteger32Val=rbnAlarmClearResourceInteger32Val, rbnAlarmModelGroup=rbnAlarmModelGroup, rbnAlarmModel=rbnAlarmModel, rbnAlarmClearResourceTimeTicksVal=rbnAlarmClearResourceTimeTicksVal, rbnAlarmClearResourceCounter64Val=rbnAlarmClearResourceCounter64Val, rbnAlarmClear=rbnAlarmClear, rbnAlarmClearResourceValueType=rbnAlarmClearResourceValueType, rbnAlarmClearResourceIpAddressVal=rbnAlarmClearResourceIpAddressVal, rbnAlarmClearResourceCounter32Val=rbnAlarmClearResourceCounter32Val, rbnAlarmExtConformance=rbnAlarmExtConformance, PYSNMP_MODULE_ID=rbnAlarmExtMib, rbnAlarmModelResourceIdx=rbnAlarmModelResourceIdx, rbnAlarmExtCompliance=rbnAlarmExtCompliance, rbnAlarmObjects=rbnAlarmObjects, rbnAlarmClearResourceEntry=rbnAlarmClearResourceEntry, rbnAlarmModelTable=rbnAlarmModelTable, rbnAlarmActive=rbnAlarmActive, rbnAlarmModelEntry=rbnAlarmModelEntry) |
"""``utils`` module of ``dataql``.
Provide some simple utilities.
"""
def class_repr(value):
"""Returns a representation of the value class.
Arguments
---------
value
A class or a class instance
Returns
-------
str
The "module.name" representation of the value class.
Example
-------
>>> from datetime import date
>>> class_repr(date)
'datetime.date'
>>> class_repr(date.today())
'datetime.date'
"""
klass = value
if not isinstance(value, type):
klass = klass.__class__
return '.'.join([klass.__module__, klass.__name__])
| """``utils`` module of ``dataql``.
Provide some simple utilities.
"""
def class_repr(value):
"""Returns a representation of the value class.
Arguments
---------
value
A class or a class instance
Returns
-------
str
The "module.name" representation of the value class.
Example
-------
>>> from datetime import date
>>> class_repr(date)
'datetime.date'
>>> class_repr(date.today())
'datetime.date'
"""
klass = value
if not isinstance(value, type):
klass = klass.__class__
return '.'.join([klass.__module__, klass.__name__]) |
#! /usr/bin/env python
class StateUpdate:
def __init__(
self,
position_update=None,
phase_update=None,
phase_level_update=None,
small_phase_update=None,
phase_correction_update=None,
velocity_update=None,
orientation_update=None,
angular_speed_update=None,
teleop_update=None
):
self.position_update = position_update
self.phase_update = phase_update
self.phase_level_update = phase_level_update
self.small_phase_update = small_phase_update
self.phase_correction_update = phase_correction_update
self.velocity_update = velocity_update
self.orientation_update = orientation_update
self.angular_speed_update = angular_speed_update
self.teleop_update = teleop_update
def __str__(self):
return "phase update: {}, \
phase_level_update: {}, \
small_phase_update: {}, \
phase_correction_update: {}, \
position update: {}, \
orientation update: {}, \
velocity update: {}, \
angular speed update: {}".format(
self.phase_update,
self.phase_level_update,
self.small_phase_update,
self.phase_correction_update,
self.position_update,
self.orientation_update,
self.velocity_update,
self.angular_speed_update,
)
| class Stateupdate:
def __init__(self, position_update=None, phase_update=None, phase_level_update=None, small_phase_update=None, phase_correction_update=None, velocity_update=None, orientation_update=None, angular_speed_update=None, teleop_update=None):
self.position_update = position_update
self.phase_update = phase_update
self.phase_level_update = phase_level_update
self.small_phase_update = small_phase_update
self.phase_correction_update = phase_correction_update
self.velocity_update = velocity_update
self.orientation_update = orientation_update
self.angular_speed_update = angular_speed_update
self.teleop_update = teleop_update
def __str__(self):
return 'phase update: {}, phase_level_update: {}, small_phase_update: {}, phase_correction_update: {}, position update: {}, orientation update: {}, velocity update: {}, angular speed update: {}'.format(self.phase_update, self.phase_level_update, self.small_phase_update, self.phase_correction_update, self.position_update, self.orientation_update, self.velocity_update, self.angular_speed_update) |
#!/usr/bin/env python3
def ntchange( variant_frame ):
GC2CG = []
GC2TA = []
GC2AT = []
TA2AT = []
TA2GC = []
TA2CG = []
for ref, alt in zip(variant_frame['REF'], variant_frame['ALT']):
ref = ref.upper()
alt = alt.upper()
if (ref == 'G' and alt == 'C') or (ref == 'C' and alt == 'G'):
GC2CG.append(1)
GC2TA.append(0)
GC2AT.append(0)
TA2AT.append(0)
TA2GC.append(0)
TA2CG.append(0)
elif (ref == 'G' and alt == 'T') or (ref == 'C' and alt == 'A'):
GC2CG.append(0)
GC2TA.append(1)
GC2AT.append(0)
TA2AT.append(0)
TA2GC.append(0)
TA2CG.append(0)
elif (ref == 'G' and alt == 'A') or (ref == 'C' and alt == 'T'):
GC2CG.append(0)
GC2TA.append(0)
GC2AT.append(1)
TA2AT.append(0)
TA2GC.append(0)
TA2CG.append(0)
elif (ref == 'T' and alt == 'A') or (ref == 'A' and alt == 'T'):
GC2CG.append(0)
GC2TA.append(0)
GC2AT.append(0)
TA2AT.append(1)
TA2GC.append(0)
TA2CG.append(0)
elif (ref == 'T' and alt == 'G') or (ref == 'A' and alt == 'C'):
GC2CG.append(0)
GC2TA.append(0)
GC2AT.append(0)
TA2AT.append(0)
TA2GC.append(1)
TA2CG.append(0)
elif (ref == 'T' and alt == 'C') or (ref == 'A' and alt == 'G'):
GC2CG.append(0)
GC2TA.append(0)
GC2AT.append(0)
TA2AT.append(0)
TA2GC.append(0)
TA2CG.append(1)
else:
GC2CG.append(0)
GC2TA.append(0)
GC2AT.append(0)
TA2AT.append(0)
TA2GC.append(0)
TA2CG.append(0)
new_data = variant_frame.assign(GC2CG = GC2CG, GC2TA = GC2CG, GC2AT = GC2CG, TA2AT = GC2CG, TA2GC = GC2CG, TA2CG = GC2CG)
return new_data
| def ntchange(variant_frame):
gc2_cg = []
gc2_ta = []
gc2_at = []
ta2_at = []
ta2_gc = []
ta2_cg = []
for (ref, alt) in zip(variant_frame['REF'], variant_frame['ALT']):
ref = ref.upper()
alt = alt.upper()
if ref == 'G' and alt == 'C' or (ref == 'C' and alt == 'G'):
GC2CG.append(1)
GC2TA.append(0)
GC2AT.append(0)
TA2AT.append(0)
TA2GC.append(0)
TA2CG.append(0)
elif ref == 'G' and alt == 'T' or (ref == 'C' and alt == 'A'):
GC2CG.append(0)
GC2TA.append(1)
GC2AT.append(0)
TA2AT.append(0)
TA2GC.append(0)
TA2CG.append(0)
elif ref == 'G' and alt == 'A' or (ref == 'C' and alt == 'T'):
GC2CG.append(0)
GC2TA.append(0)
GC2AT.append(1)
TA2AT.append(0)
TA2GC.append(0)
TA2CG.append(0)
elif ref == 'T' and alt == 'A' or (ref == 'A' and alt == 'T'):
GC2CG.append(0)
GC2TA.append(0)
GC2AT.append(0)
TA2AT.append(1)
TA2GC.append(0)
TA2CG.append(0)
elif ref == 'T' and alt == 'G' or (ref == 'A' and alt == 'C'):
GC2CG.append(0)
GC2TA.append(0)
GC2AT.append(0)
TA2AT.append(0)
TA2GC.append(1)
TA2CG.append(0)
elif ref == 'T' and alt == 'C' or (ref == 'A' and alt == 'G'):
GC2CG.append(0)
GC2TA.append(0)
GC2AT.append(0)
TA2AT.append(0)
TA2GC.append(0)
TA2CG.append(1)
else:
GC2CG.append(0)
GC2TA.append(0)
GC2AT.append(0)
TA2AT.append(0)
TA2GC.append(0)
TA2CG.append(0)
new_data = variant_frame.assign(GC2CG=GC2CG, GC2TA=GC2CG, GC2AT=GC2CG, TA2AT=GC2CG, TA2GC=GC2CG, TA2CG=GC2CG)
return new_data |
roster = (('ipns adrres one', 'Name one'),
('ipns adrres two', 'Name two'),
('etc', 'etc'))
| roster = (('ipns adrres one', 'Name one'), ('ipns adrres two', 'Name two'), ('etc', 'etc')) |
class Config(object):
pass
class ProdConfig(Config):
DEBUG = False
SQLALCHEMY_DATABASE_URI = (
"postgresql+psycopg2://test:password@postgres:5432/mex_polit_db"
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
class DevConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = (
"postgresql+psycopg2://test:password@localhost:5432/mex_polit_db"
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
class TestConfig(Config):
DEBUG = True
DEBUG_TB_ENABLED = False
SQLALCHEMY_DATABASE_URI = "sqlite:///:memory:"
SQLALCHEMY_TRACK_MODIFICATIONS = False
CACHE_TYPE = "null"
WTF_CSRF_ENABLED = False
| class Config(object):
pass
class Prodconfig(Config):
debug = False
sqlalchemy_database_uri = 'postgresql+psycopg2://test:password@postgres:5432/mex_polit_db'
sqlalchemy_track_modifications = False
class Devconfig(Config):
debug = True
sqlalchemy_database_uri = 'postgresql+psycopg2://test:password@localhost:5432/mex_polit_db'
sqlalchemy_track_modifications = False
class Testconfig(Config):
debug = True
debug_tb_enabled = False
sqlalchemy_database_uri = 'sqlite:///:memory:'
sqlalchemy_track_modifications = False
cache_type = 'null'
wtf_csrf_enabled = False |
def test_construction(sm_config):
assert True is sm_config.debug
viewservice_config = sm_config.serviceconfigs['viewservice']
assert 99 == viewservice_config.flag
| def test_construction(sm_config):
assert True is sm_config.debug
viewservice_config = sm_config.serviceconfigs['viewservice']
assert 99 == viewservice_config.flag |
# Module is an abstraction of a candle, which is the data structure returned when requesting price history
# data from the API. It also contains methods for analysis.
class Candle(object):
def __init__(self, json):
self.openPrice = json['open']
self.closePrice = json['close']
self.lowPrice = json['low']
self.highPrice = json['high']
self.volume = json['volume']
self.datetime = json['datetime']
def PrintAttributes(self):
print("Time: " + str(self.datetime) + "\tOpen: $" + str(self.openPrice) + "\tClose: $" + str(self.closePrice) + "\tLow: $" + str(self.lowPrice) + "\tHigh: $" + str(self.highPrice) + "\tVolume: " + str(self.volume)) | class Candle(object):
def __init__(self, json):
self.openPrice = json['open']
self.closePrice = json['close']
self.lowPrice = json['low']
self.highPrice = json['high']
self.volume = json['volume']
self.datetime = json['datetime']
def print_attributes(self):
print('Time: ' + str(self.datetime) + '\tOpen: $' + str(self.openPrice) + '\tClose: $' + str(self.closePrice) + '\tLow: $' + str(self.lowPrice) + '\tHigh: $' + str(self.highPrice) + '\tVolume: ' + str(self.volume)) |
# deliberatly not in file_dep, so a change to sizes doesn't force rerenders
def get_map_size(obj_name):
map_size = 512
# maybe let a custom attr raise this sometimes, or determine it from obj size?
if obj_name.startswith('gnd.'):
if obj_name != 'gnd.023':
return None, None
map_size = 2048
if obj_name in ['building_022.outer.003']:
map_size = 2048
if obj_name.startswith('leaf.'):
return None, None
return 128, 10
return map_size, 500 | def get_map_size(obj_name):
map_size = 512
if obj_name.startswith('gnd.'):
if obj_name != 'gnd.023':
return (None, None)
map_size = 2048
if obj_name in ['building_022.outer.003']:
map_size = 2048
if obj_name.startswith('leaf.'):
return (None, None)
return (128, 10)
return (map_size, 500) |
def find2020(list):
for i in range(0, len(list)):
for j in range(i+1, len(list)):
for k in range(j+1, len(list)):
if list[i] + list[j] + list[k] == 2020:
print(list[i] * list[j] * list[k])
if __name__ == '__main__':
with open("test2.txt") as file:
find2020([int(a) for a in file.read().splitlines()])
| def find2020(list):
for i in range(0, len(list)):
for j in range(i + 1, len(list)):
for k in range(j + 1, len(list)):
if list[i] + list[j] + list[k] == 2020:
print(list[i] * list[j] * list[k])
if __name__ == '__main__':
with open('test2.txt') as file:
find2020([int(a) for a in file.read().splitlines()]) |
class MyHashMap:
def eval_hash(self, key):
return ((key*1031237) & (1<<20) - 1)>>5
def __init__(self):
self.arr = [[] for _ in range(1<<15)]
def put(self, key, value):
t = self.eval_hash(key)
for i,(k,v) in enumerate(self.arr[t]):
if k == key:
self.arr[t][i] = (k, value)
return
self.arr[t].append((key, value))
def get(self, key):
t = self.eval_hash(key)
for i,(k,v) in enumerate(self.arr[t]):
if k == key: return v
return -1
def remove(self, key: int):
t = self.eval_hash(key)
for i,(k,v) in enumerate(self.arr[t]):
if k == key:
self.arr[t].remove((k,v))
| class Myhashmap:
def eval_hash(self, key):
return (key * 1031237 & (1 << 20) - 1) >> 5
def __init__(self):
self.arr = [[] for _ in range(1 << 15)]
def put(self, key, value):
t = self.eval_hash(key)
for (i, (k, v)) in enumerate(self.arr[t]):
if k == key:
self.arr[t][i] = (k, value)
return
self.arr[t].append((key, value))
def get(self, key):
t = self.eval_hash(key)
for (i, (k, v)) in enumerate(self.arr[t]):
if k == key:
return v
return -1
def remove(self, key: int):
t = self.eval_hash(key)
for (i, (k, v)) in enumerate(self.arr[t]):
if k == key:
self.arr[t].remove((k, v)) |
numWords = 2
mostCommonWords = ["the","of","to","and","a","in","is","it","you","that"]
def addWord(d,group,word):
if d.get(group) == None:
d[group] = {word : 1}
else:
if d[group].get(word) == None:
d[group][word] = 1
else:
d[group][word] = d[group][word] + 1
def makeOptions(options):
result = []
count = 0
for w in options:
result.append("!" + str(count) + ": " + w)
count = count + 1
return result
def getTopWords(d,ws):
if d.get(ws) == None:
return []
else:
opts = list(d.get(ws).items())
opts.sort(key=lambda x: x[1])
opts.reverse()
return [p[0] for p in opts]
# this loop will continue until a command of !quit is given
def mainLoop():
ourDict = {}
textSoFar = []
lastWords = []
while True:
options = []
if len(lastWords) < numWords:
options = mostCommonWords
else:
test = getTopWords(ourDict,tuple(lastWords))
#we append to the options the most common words
options = (test + mostCommonWords)[0:10]
print("Your message: " + " ".join(textSoFar) + "\n")
message = ("Choose from the following options with !#, !quit to exit, or type your own response\n"
+ " ".join(makeOptions(options)))
choice = input(message + "\n")
if choice[0] == '!':
if choice == "!quit":
break
else:
selectedWord = options[int(choice[1:])]
if len(lastWords) == numWords:
addWord(ourDict,tuple(lastWords),selectedWord)
lastWords.append(selectedWord)
lastWords = lastWords[1:]
else:
lastWords.append(selectedWord)
textSoFar.append(selectedWord)
else:
if(len(lastWords) == numWords):
addWord(ourDict,tuple(lastWords),choice)
lastWords.append(choice)
lastWords = lastWords[1:]
else:
lastWords.append(choice)
textSoFar.append(choice)
mainLoop()
| num_words = 2
most_common_words = ['the', 'of', 'to', 'and', 'a', 'in', 'is', 'it', 'you', 'that']
def add_word(d, group, word):
if d.get(group) == None:
d[group] = {word: 1}
elif d[group].get(word) == None:
d[group][word] = 1
else:
d[group][word] = d[group][word] + 1
def make_options(options):
result = []
count = 0
for w in options:
result.append('!' + str(count) + ': ' + w)
count = count + 1
return result
def get_top_words(d, ws):
if d.get(ws) == None:
return []
else:
opts = list(d.get(ws).items())
opts.sort(key=lambda x: x[1])
opts.reverse()
return [p[0] for p in opts]
def main_loop():
our_dict = {}
text_so_far = []
last_words = []
while True:
options = []
if len(lastWords) < numWords:
options = mostCommonWords
else:
test = get_top_words(ourDict, tuple(lastWords))
options = (test + mostCommonWords)[0:10]
print('Your message: ' + ' '.join(textSoFar) + '\n')
message = 'Choose from the following options with !#, !quit to exit, or type your own response\n' + ' '.join(make_options(options))
choice = input(message + '\n')
if choice[0] == '!':
if choice == '!quit':
break
else:
selected_word = options[int(choice[1:])]
if len(lastWords) == numWords:
add_word(ourDict, tuple(lastWords), selectedWord)
lastWords.append(selectedWord)
last_words = lastWords[1:]
else:
lastWords.append(selectedWord)
textSoFar.append(selectedWord)
else:
if len(lastWords) == numWords:
add_word(ourDict, tuple(lastWords), choice)
lastWords.append(choice)
last_words = lastWords[1:]
else:
lastWords.append(choice)
textSoFar.append(choice)
main_loop() |
# -*- encoding: utf-8 -*-
class Scene:
"""
Represents an abstract scene of the game.
A scene is a visible part of the game, like a loading screen
or an option menu. For creating a working scene the object
should be derived from this class.
"""
def __init__(self, director):
self.director = director
def load_settings(self):
"""
Reads and sets the settings of the current game instance
"""
raise NotImplemented("load_settings method should be implemented.")
def on_update(self):
"""
Logic update called directly from the director.
"""
raise NotImplemented("on_update method should be implemented.")
def on_enter(self):
"""
Called directly from the director when entering the scene.
"""
raise NotImplemented("on_enter method should be implemented.")
def on_event(self, events):
"""
Processes pygame events for the concrete scene
"""
raise NotImplemented("on_event method should be implemented.")
def on_draw(self, screen):
"""
Called when something is to be drawn on the screen.
"""
raise NotImplemented("on_draw method should be implemented.")
| class Scene:
"""
Represents an abstract scene of the game.
A scene is a visible part of the game, like a loading screen
or an option menu. For creating a working scene the object
should be derived from this class.
"""
def __init__(self, director):
self.director = director
def load_settings(self):
"""
Reads and sets the settings of the current game instance
"""
raise not_implemented('load_settings method should be implemented.')
def on_update(self):
"""
Logic update called directly from the director.
"""
raise not_implemented('on_update method should be implemented.')
def on_enter(self):
"""
Called directly from the director when entering the scene.
"""
raise not_implemented('on_enter method should be implemented.')
def on_event(self, events):
"""
Processes pygame events for the concrete scene
"""
raise not_implemented('on_event method should be implemented.')
def on_draw(self, screen):
"""
Called when something is to be drawn on the screen.
"""
raise not_implemented('on_draw method should be implemented.') |
"""
MembershipTookit.com Downloader v1.0.0
======================================
Software (C) Copyright 2019 Gideon Tong
All rights reserved.
This piece of software is available to use as "free as in beer", but is
not for redistribution, resell, remixing, reusing, or any other type
of use other than for personal use only. You can download it from the
official GitHub repository on gideontong's profile.
Its use is to download directories of information from directories hosted
on MembershipTookit.com and convert all the pages into a CSV so it
can be parsed in Excel or another program for automation.
""" | """
MembershipTookit.com Downloader v1.0.0
======================================
Software (C) Copyright 2019 Gideon Tong
All rights reserved.
This piece of software is available to use as "free as in beer", but is
not for redistribution, resell, remixing, reusing, or any other type
of use other than for personal use only. You can download it from the
official GitHub repository on gideontong's profile.
Its use is to download directories of information from directories hosted
on MembershipTookit.com and convert all the pages into a CSV so it
can be parsed in Excel or another program for automation.
""" |
# Copyright (c) 2022 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
load(
"@daml//bazel_tools/client_server:client_server_test.bzl",
"client_server_test",
)
load("//bazel_tools:versions.bzl", "version_to_name", "versions")
def daml_script_dar(sdk_version):
daml = "@daml-sdk-{sdk_version}//:daml".format(
sdk_version = sdk_version,
)
native.genrule(
name = "script-example-dar-{sdk_version}".format(
sdk_version = version_to_name(sdk_version),
),
srcs = ["//bazel_tools/daml_script:example/src/ScriptExample.daml"],
outs = ["script-example-{sdk_version}.dar".format(
sdk_version = version_to_name(sdk_version),
)],
tools = [daml],
cmd = """\
set -euo pipefail
TMP_DIR=$$(mktemp -d)
cleanup() {{ rm -rf $$TMP_DIR; }}
trap cleanup EXIT
mkdir -p $$TMP_DIR/src
cp -L $(location //bazel_tools/daml_script:example/src/ScriptExample.daml) $$TMP_DIR/src/
cat <<EOF >$$TMP_DIR/daml.yaml
sdk-version: {sdk_version}
name: script-example
source: src
parties:
- Alice
- Bob
- Bank
version: 0.0.1
dependencies:
- daml-prim
- daml-stdlib
- daml-script
sandbox-options:
- --wall-clock-time
EOF
$(location {daml}) build --project-root=$$TMP_DIR -o $$PWD/$(OUTS)
""".format(
daml = daml,
sdk_version = sdk_version,
),
)
def daml_script_test(compiler_version, runner_version):
name = "daml-script-test-compiler-{compiler_version}-runner-{runner_version}".format(
compiler_version = version_to_name(compiler_version),
runner_version = version_to_name(runner_version),
)
compiled_dar = "//:script-example-dar-{version}".format(
version = version_to_name(compiler_version),
)
daml_runner = "@daml-sdk-{version}//:daml".format(
version = runner_version,
)
# 1.16.0 is the first SDK version that defaulted to LF 1.14, which is the earliest LF version that Canton supports
use_canton = versions.is_at_least("2.0.0", runner_version) and versions.is_at_least("1.16.0", compiler_version)
use_sandbox_on_x = versions.is_at_least("2.0.0", runner_version) and not use_canton
if use_sandbox_on_x:
server = "@daml-sdk-{version}//:sandbox-on-x".format(version = runner_version)
server_args = ["--participant", "participant-id=sandbox,port=6865"]
server_files = []
server_files_prefix = ""
else:
server = daml_runner
server_args = ["sandbox"]
server_files = ["$(rootpath {})".format(compiled_dar)]
server_files_prefix = "--dar=" if use_canton else ""
native.genrule(
name = "{}-client-sh".format(name),
outs = ["{}-client.sh".format(name)],
cmd = """\
cat >$(OUTS) <<'EOF'
#!/usr/bin/env bash
set -euo pipefail
canonicalize_rlocation() {{
# Note (MK): This is a fun one: Let's say $$TEST_WORKSPACE is "compatibility"
# and the argument points to a target from an external workspace, e.g.,
# @daml-sdk-0.0.0//:daml. Then the short path will point to
# ../daml-sdk-0.0.0/daml. Putting things together we end up with
# compatibility/../daml-sdk-0.0.0/daml. On Linux and MacOS this works
# just fine. However, on windows we need to normalize the path
# or rlocation will fail to find the path in the manifest file.
rlocation $$(realpath -L -s -m --relative-to=$$PWD $$TEST_WORKSPACE/$$1)
}}
runner=$$(canonicalize_rlocation $(rootpath {runner}))
# Cleanup the trigger runner process but maintain the script runner exit code.
trap 'status=$$?; kill -TERM $$PID; wait $$PID; exit $$status' INT TERM
if [ {upload_dar} -eq 1 ] ; then
$$runner ledger upload-dar \\
--host localhost \\
--port 6865 \\
$$(canonicalize_rlocation $(rootpath {dar}))
fi
$$runner script \\
--ledger-host localhost \\
--ledger-port 6865 \\
--wall-clock-time \\
--dar $$(canonicalize_rlocation $(rootpath {dar})) \\
--script-name ScriptExample:test
EOF
chmod +x $(OUTS)
""".format(
dar = compiled_dar,
runner = daml_runner,
upload_dar = "1" if use_sandbox_on_x else "0",
),
exec_tools = [
compiled_dar,
daml_runner,
],
)
native.sh_binary(
name = "{}-client".format(name),
srcs = ["{}-client.sh".format(name)],
data = [
compiled_dar,
daml_runner,
],
)
client_server_test(
name = name,
client = "{}-client".format(name),
client_args = [],
client_files = [],
data = [
compiled_dar,
],
runner = "//bazel_tools/client_server:runner",
runner_args = ["6865"],
server = server,
server_args = server_args,
server_files = server_files,
server_files_prefix = server_files_prefix,
tags = ["exclusive"],
)
| load('@daml//bazel_tools/client_server:client_server_test.bzl', 'client_server_test')
load('//bazel_tools:versions.bzl', 'version_to_name', 'versions')
def daml_script_dar(sdk_version):
daml = '@daml-sdk-{sdk_version}//:daml'.format(sdk_version=sdk_version)
native.genrule(name='script-example-dar-{sdk_version}'.format(sdk_version=version_to_name(sdk_version)), srcs=['//bazel_tools/daml_script:example/src/ScriptExample.daml'], outs=['script-example-{sdk_version}.dar'.format(sdk_version=version_to_name(sdk_version))], tools=[daml], cmd='set -euo pipefail\nTMP_DIR=$$(mktemp -d)\ncleanup() {{ rm -rf $$TMP_DIR; }}\ntrap cleanup EXIT\nmkdir -p $$TMP_DIR/src\ncp -L $(location //bazel_tools/daml_script:example/src/ScriptExample.daml) $$TMP_DIR/src/\ncat <<EOF >$$TMP_DIR/daml.yaml\nsdk-version: {sdk_version}\nname: script-example\nsource: src\nparties:\n - Alice\n - Bob\n - Bank\nversion: 0.0.1\ndependencies:\n - daml-prim\n - daml-stdlib\n - daml-script\nsandbox-options:\n - --wall-clock-time\nEOF\n$(location {daml}) build --project-root=$$TMP_DIR -o $$PWD/$(OUTS)\n'.format(daml=daml, sdk_version=sdk_version))
def daml_script_test(compiler_version, runner_version):
name = 'daml-script-test-compiler-{compiler_version}-runner-{runner_version}'.format(compiler_version=version_to_name(compiler_version), runner_version=version_to_name(runner_version))
compiled_dar = '//:script-example-dar-{version}'.format(version=version_to_name(compiler_version))
daml_runner = '@daml-sdk-{version}//:daml'.format(version=runner_version)
use_canton = versions.is_at_least('2.0.0', runner_version) and versions.is_at_least('1.16.0', compiler_version)
use_sandbox_on_x = versions.is_at_least('2.0.0', runner_version) and (not use_canton)
if use_sandbox_on_x:
server = '@daml-sdk-{version}//:sandbox-on-x'.format(version=runner_version)
server_args = ['--participant', 'participant-id=sandbox,port=6865']
server_files = []
server_files_prefix = ''
else:
server = daml_runner
server_args = ['sandbox']
server_files = ['$(rootpath {})'.format(compiled_dar)]
server_files_prefix = '--dar=' if use_canton else ''
native.genrule(name='{}-client-sh'.format(name), outs=['{}-client.sh'.format(name)], cmd='cat >$(OUTS) <<\'EOF\'\n#!/usr/bin/env bash\nset -euo pipefail\ncanonicalize_rlocation() {{\n # Note (MK): This is a fun one: Let\'s say $$TEST_WORKSPACE is "compatibility"\n # and the argument points to a target from an external workspace, e.g.,\n # @daml-sdk-0.0.0//:daml. Then the short path will point to\n # ../daml-sdk-0.0.0/daml. Putting things together we end up with\n # compatibility/../daml-sdk-0.0.0/daml. On Linux and MacOS this works\n # just fine. However, on windows we need to normalize the path\n # or rlocation will fail to find the path in the manifest file.\n rlocation $$(realpath -L -s -m --relative-to=$$PWD $$TEST_WORKSPACE/$$1)\n}}\nrunner=$$(canonicalize_rlocation $(rootpath {runner}))\n# Cleanup the trigger runner process but maintain the script runner exit code.\ntrap \'status=$$?; kill -TERM $$PID; wait $$PID; exit $$status\' INT TERM\n\nif [ {upload_dar} -eq 1 ] ; then\n $$runner ledger upload-dar \\\n --host localhost \\\n --port 6865 \\\n $$(canonicalize_rlocation $(rootpath {dar}))\nfi\n$$runner script \\\n --ledger-host localhost \\\n --ledger-port 6865 \\\n --wall-clock-time \\\n --dar $$(canonicalize_rlocation $(rootpath {dar})) \\\n --script-name ScriptExample:test\nEOF\nchmod +x $(OUTS)\n'.format(dar=compiled_dar, runner=daml_runner, upload_dar='1' if use_sandbox_on_x else '0'), exec_tools=[compiled_dar, daml_runner])
native.sh_binary(name='{}-client'.format(name), srcs=['{}-client.sh'.format(name)], data=[compiled_dar, daml_runner])
client_server_test(name=name, client='{}-client'.format(name), client_args=[], client_files=[], data=[compiled_dar], runner='//bazel_tools/client_server:runner', runner_args=['6865'], server=server, server_args=server_args, server_files=server_files, server_files_prefix=server_files_prefix, tags=['exclusive']) |
# An integer has sequential digits
# if and only if each digit in the number is one more than the previous digit.
# Return a sorted list of all the integers in the range [low, high] inclusive that have sequential digits.
# Example 1:
# Input: low = 100, high = 300
# Output: [123,234]
# Example 2:
# Input: low = 1000, high = 13000
# Output: [1234,2345,3456,4567,5678,6789,12345]
# Constraints:
# 10 <= low <= high <= 10^9
# Hints:
# Generate all numbers with sequential digits and check if they are in the given range.
# Fix the starting digit then do a recursion that tries to append all valid digits.
class Solution:
def sequentialDigits(self, low: int, high: int) -> List[int]:
res = []
for i in range(1, 10):
num = i
for j in range(i+1, 10):
num = num * 10 + j
if low <= num <= high:
res.append(num)
return sorted(res)
| class Solution:
def sequential_digits(self, low: int, high: int) -> List[int]:
res = []
for i in range(1, 10):
num = i
for j in range(i + 1, 10):
num = num * 10 + j
if low <= num <= high:
res.append(num)
return sorted(res) |
#!/usr/bin/python3
with open('05_input', 'r') as f:
lines = f.readlines()
lines = [r.strip() for r in lines]
def get_seat_id(code):
row_str = code[0:7]
col_str = code[7:10]
row_num = row_str.replace('F','0').replace('B','1')
col_num = col_str.replace('L','0').replace('R','1')
row = int(row_num, 2)
col = int(col_num, 2)
seat_id = row * 8 + col
#print(row, col, seat_id)
return seat_id
seat_ids = [get_seat_id(code) for code in lines]
seat_ids = sorted(seat_ids)
first, last = seat_ids[0], seat_ids[-1]
missing = set(range(first, last+1)) - set(seat_ids)
print(next(iter(missing)))
| with open('05_input', 'r') as f:
lines = f.readlines()
lines = [r.strip() for r in lines]
def get_seat_id(code):
row_str = code[0:7]
col_str = code[7:10]
row_num = row_str.replace('F', '0').replace('B', '1')
col_num = col_str.replace('L', '0').replace('R', '1')
row = int(row_num, 2)
col = int(col_num, 2)
seat_id = row * 8 + col
return seat_id
seat_ids = [get_seat_id(code) for code in lines]
seat_ids = sorted(seat_ids)
(first, last) = (seat_ids[0], seat_ids[-1])
missing = set(range(first, last + 1)) - set(seat_ids)
print(next(iter(missing))) |
# pseudo code taken from CLRS book
def lcs(X, Y):
c = {} # c[(i,j)] represents length of lcs of [x0, x1, ... , xi] and [y0, y1, ... , yj]
m = len(X)
n = len(Y)
for i in range(m):
c[(i, -1)] = 0
for j in range(n):
c[(-1, j)] = 0
for i in range(m):
for j in range(n):
if X[i] == Y[j]:
c[(i, j)] = 1 + c[(i-1, j-1)]
elif c[(i-1, j)] >= c[(i, j-1)]:
c[(i, j)] = c[(i-1, j)]
else:
c[(i, j)] = c[(i, j-1)]
lcs = []
i = m-1
j = n-1
while (i != -1 and j != -1):
if X[i] == Y[j]:
lcs = [X[i]] + lcs
i -= 1
j -= 1
elif c[(i, j)] == c[(i-1, j)]:
i -= 1
else:
j -= 1
return lcs
| def lcs(X, Y):
c = {}
m = len(X)
n = len(Y)
for i in range(m):
c[i, -1] = 0
for j in range(n):
c[-1, j] = 0
for i in range(m):
for j in range(n):
if X[i] == Y[j]:
c[i, j] = 1 + c[i - 1, j - 1]
elif c[i - 1, j] >= c[i, j - 1]:
c[i, j] = c[i - 1, j]
else:
c[i, j] = c[i, j - 1]
lcs = []
i = m - 1
j = n - 1
while i != -1 and j != -1:
if X[i] == Y[j]:
lcs = [X[i]] + lcs
i -= 1
j -= 1
elif c[i, j] == c[i - 1, j]:
i -= 1
else:
j -= 1
return lcs |
class TreeNode:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def insert(root, node):
if root is None:
root = node
else:
if root.data >= node.data:
if root.left == None:
root.left = node
else:
insert(root.left, node)
else:
if root.right == None:
root.right = node
else:
insert(root.right, node)
def search_tree(root, data):
if root is None:
return "No Data found"
elif root.data == data:
return root
elif root.data < data:
return search_tree(root.right, data)
return search_tree(root.left, data)
def inorder_print(root, level=0):
if root:
inorder_print(root.left, level+1)
print(" "*level + str(root.data))
inorder_print(root.right, level+1)
def balanced_bst(arr):
if not arr:
return None
mid = len(arr)/2
root = TreeNode(arr[mid]) # mid element as root
# print("mid", arr[mid])
# print("high", arr[mid+1:])
# print("low", arr[:mid])
# recursive for low and high part of array
root.left = balanced_bst(arr[:mid])
root.right = balanced_bst(arr[mid+1:])
return root
if __name__ == "__main__":
arr = []
for i in range(1, 15):
arr.append(i)
r = balanced_bst(arr)
print(inorder_print(r))
| class Treenode:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def insert(root, node):
if root is None:
root = node
elif root.data >= node.data:
if root.left == None:
root.left = node
else:
insert(root.left, node)
elif root.right == None:
root.right = node
else:
insert(root.right, node)
def search_tree(root, data):
if root is None:
return 'No Data found'
elif root.data == data:
return root
elif root.data < data:
return search_tree(root.right, data)
return search_tree(root.left, data)
def inorder_print(root, level=0):
if root:
inorder_print(root.left, level + 1)
print(' ' * level + str(root.data))
inorder_print(root.right, level + 1)
def balanced_bst(arr):
if not arr:
return None
mid = len(arr) / 2
root = tree_node(arr[mid])
root.left = balanced_bst(arr[:mid])
root.right = balanced_bst(arr[mid + 1:])
return root
if __name__ == '__main__':
arr = []
for i in range(1, 15):
arr.append(i)
r = balanced_bst(arr)
print(inorder_print(r)) |
## <hr>Dummy value.
#
# No texture, but the value to be used as 'texture semantic'
# (#aiMaterialProperty::mSemantic) for all material properties
# *not* related to textures.
#
aiTextureType_NONE = 0x0
## <hr>The texture is combined with the result of the diffuse
# lighting equation.
#
aiTextureType_DIFFUSE = 0x1
## <hr>The texture is combined with the result of the specular
# lighting equation.
#
aiTextureType_SPECULAR = 0x2
## <hr>The texture is combined with the result of the ambient
# lighting equation.
#
aiTextureType_AMBIENT = 0x3
## <hr>The texture is added to the result of the lighting
# calculation. It isn't influenced by incoming light.
#
aiTextureType_EMISSIVE = 0x4
## <hr>The texture is a height map.
#
# By convention, higher gray-scale values stand for
# higher elevations from the base height.
#
aiTextureType_HEIGHT = 0x5
## <hr>The texture is a (tangent space) normal-map.
#
# Again, there are several conventions for tangent-space
# normal maps. Assimp does (intentionally) not
# distinguish here.
#
aiTextureType_NORMALS = 0x6
## <hr>The texture defines the glossiness of the material.
#
# The glossiness is in fact the exponent of the specular
# (phong) lighting equation. Usually there is a conversion
# function defined to map the linear color values in the
# texture to a suitable exponent. Have fun.
#
aiTextureType_SHININESS = 0x7
## <hr>The texture defines per-pixel opacity.
#
# Usually 'white' means opaque and 'black' means
# 'transparency'. Or quite the opposite. Have fun.
#
aiTextureType_OPACITY = 0x8
## <hr>Displacement texture
#
# The exact purpose and format is application-dependent.
# Higher color values stand for higher vertex displacements.
#
aiTextureType_DISPLACEMENT = 0x9
## <hr>Lightmap texture (aka Ambient Occlusion)
#
# Both 'Lightmaps' and dedicated 'ambient occlusion maps' are
# covered by this material property. The texture contains a
# scaling value for the final color value of a pixel. Its
# intensity is not affected by incoming light.
#
aiTextureType_LIGHTMAP = 0xA
## <hr>Reflection texture
#
#Contains the color of a perfect mirror reflection.
#Rarely used, almost never for real-time applications.
#
aiTextureType_REFLECTION = 0xB
## <hr>Unknown texture
#
# A texture reference that does not match any of the definitions
# above is considered to be 'unknown'. It is still imported
# but is excluded from any further postprocessing.
#
aiTextureType_UNKNOWN = 0xC
| ai_texture_type_none = 0
ai_texture_type_diffuse = 1
ai_texture_type_specular = 2
ai_texture_type_ambient = 3
ai_texture_type_emissive = 4
ai_texture_type_height = 5
ai_texture_type_normals = 6
ai_texture_type_shininess = 7
ai_texture_type_opacity = 8
ai_texture_type_displacement = 9
ai_texture_type_lightmap = 10
ai_texture_type_reflection = 11
ai_texture_type_unknown = 12 |
# -*- coding: cp852 -*-
#en_EN Locale
class language():
VERSION='Version'
CREATING_BACKUP='Creating backup'
NO_PERM='No Permission'
COPYING_FILES='Copying files'
COULD_NOT_COPY='Could not copy files'
DONE='Done'
REWRITING_FLUXION_BASH='Rewriting fluxion bash'
RECONFIGURE_FLUXION_BASH='Reconfiguring fluxion bash'
INTERNAL_FAILURE='Internal failure'
ERROR='Error'
FATAL_ERROR='FATAL ERROR'
TRYING_TO_RESTORE_BACKUP='Trying to restore from backup'
BACKUP_RESTORED='Backup restored'
SETTING_MODES='Setting modes'
CONTINUE='continue'
VERIFYING_INTEG='Verifying integrity of fluxion'
DELETING_BACKUP='Deleting backup'
SUCCESS='Successfully installed "to your fluxion'
BEGIN_INSTALL='Begin installation'
COULD_NOT_OPEN_FLUX='Could not open fluxion, check permissions. Exiting...'
NO_FLUXION_FOUND='No fluxion installation found.\nPlease use this installer INSIDE the fluxion folder. Exiting...'
CORRUPTED_FLUX='Corrupted fluxion installation. Exiting...'
DOUBLE_INSTALL='Seems like there is already a site with the name'
CONTINUE_ANYWAY='Do you want to continue anyway?'
NOTHING_CHANGED='Nothing changed, your choice. Exiting...' | class Language:
version = 'Version'
creating_backup = 'Creating backup'
no_perm = 'No Permission'
copying_files = 'Copying files'
could_not_copy = 'Could not copy files'
done = 'Done'
rewriting_fluxion_bash = 'Rewriting fluxion bash'
reconfigure_fluxion_bash = 'Reconfiguring fluxion bash'
internal_failure = 'Internal failure'
error = 'Error'
fatal_error = 'FATAL ERROR'
trying_to_restore_backup = 'Trying to restore from backup'
backup_restored = 'Backup restored'
setting_modes = 'Setting modes'
continue = 'continue'
verifying_integ = 'Verifying integrity of fluxion'
deleting_backup = 'Deleting backup'
success = 'Successfully installed "to your fluxion'
begin_install = 'Begin installation'
could_not_open_flux = 'Could not open fluxion, check permissions. Exiting...'
no_fluxion_found = 'No fluxion installation found.\nPlease use this installer INSIDE the fluxion folder. Exiting...'
corrupted_flux = 'Corrupted fluxion installation. Exiting...'
double_install = 'Seems like there is already a site with the name'
continue_anyway = 'Do you want to continue anyway?'
nothing_changed = 'Nothing changed, your choice. Exiting...' |
def sycl_library_path(name):
return "lib/lib{}.so".format(name)
def readlink_command():
return "readlink"
| def sycl_library_path(name):
return 'lib/lib{}.so'.format(name)
def readlink_command():
return 'readlink' |
class Sources:
def __init__(self,id,name,description,language):
self.id = id
self.name = name
self.description = description
self.language = language
class Articles:
def __init__(self,author,title,description,url,publishedAt,urlToImage):
self.author = author
self.title = title
self.description = description
self.url = url
self.publishedAt = publishedAt
self.urlToImage = urlToImage | class Sources:
def __init__(self, id, name, description, language):
self.id = id
self.name = name
self.description = description
self.language = language
class Articles:
def __init__(self, author, title, description, url, publishedAt, urlToImage):
self.author = author
self.title = title
self.description = description
self.url = url
self.publishedAt = publishedAt
self.urlToImage = urlToImage |
num_list = list(map(int, input().split()))
high=max(num_list)
low=min(num_list)
print(low,sum(num_list),high) | num_list = list(map(int, input().split()))
high = max(num_list)
low = min(num_list)
print(low, sum(num_list), high) |
def clock_degree(clock_time):
hour, minutes = (int(a) for a in clock_time.split(':'))
if not (24 > hour >= 0 and 60 > minutes >= 0):
return 'Check your time !'
return '{}:{}'.format((hour % 12) * 30 or 360, minutes * 6 or 360) | def clock_degree(clock_time):
(hour, minutes) = (int(a) for a in clock_time.split(':'))
if not (24 > hour >= 0 and 60 > minutes >= 0):
return 'Check your time !'
return '{}:{}'.format(hour % 12 * 30 or 360, minutes * 6 or 360) |
# def quick_sort(the_list):
# # left index
# l = 0
# # pivot index
# p = len(the_list)
# # right index
# r = pivot - 1
# if l < r:
# # increment l till we have an appropriate value
# while the_list[l] <= the_list[p] && l < len(the_list):
# l += 1
# # decremt r till we have an appropriate value
# while the_list[r] > the_list[p] && r >= l:
# r -= 1
# the_list[l], the_list[r] = the_list[r], the_list[l]
# ------------------------------------------------------------------
def quick_sort(arr, l, r):
if l < r:
# Partition the array by setting the position of the pivot value
position = partition(arr, l, r)
# Sort the l
QuickSort(arr, l, position - 1)
# Sort the r
QuickSort(arr, position + 1, r)
def partition(arr, l, r):
# set a pivot value as a point of reference
p = arr[r]
# create a variable to track the largest index of numbers lower than the defined pivot
# DEFINE low <-- l - 1
# for i = l to r do
# if arr[i] <= pivot
# low++
# Swap(arr, i, low)
# # place the value of the pivot location in the middle.
# # all numbers smaller than the pivot are on the l, larger on the r.
# Swap(arr, r, low + 1)
# # return the pivot index point
# return low + 1
# ALGORITHM Swap(arr, i, low)
# DEFINE temp;
# temp <-- arr[i]
# arr[i] <-- arr[low]
# arr[low] <-- temp
| def quick_sort(arr, l, r):
if l < r:
position = partition(arr, l, r)
quick_sort(arr, l, position - 1)
quick_sort(arr, position + 1, r)
def partition(arr, l, r):
p = arr[r] |
class ProductionConfig():
DEBUG = False
MONGODB_HOST = 'localhost'
MONGODB_DBNAME = 'their_tweets'
MONGODB_SETTINGS = {
'host' : 'localhost',
'DB' : 'new',
'port' : 27017
}
SECRET_KEY = 'thisissecret'
MULTIPLE_AUTH_HEADERS = ['access_token', 'device']
PORT = 8000
PROPOGATE_EXCEPTIONS = True
CONSUMER_KEY = 'bgEZ37ZUH2HeZ3aAyeh9VEsyb'
CONSUMER_SECRET = '9OQqbx5elvBb7eDwfHq4BEKBI9UqOFIUT72mkVsx0XuWhickR5'
REDISHOST = 'localhost'
REDISDB = 1
SESSION_COOKIE_HTTPONLY = False
| class Productionconfig:
debug = False
mongodb_host = 'localhost'
mongodb_dbname = 'their_tweets'
mongodb_settings = {'host': 'localhost', 'DB': 'new', 'port': 27017}
secret_key = 'thisissecret'
multiple_auth_headers = ['access_token', 'device']
port = 8000
propogate_exceptions = True
consumer_key = 'bgEZ37ZUH2HeZ3aAyeh9VEsyb'
consumer_secret = '9OQqbx5elvBb7eDwfHq4BEKBI9UqOFIUT72mkVsx0XuWhickR5'
redishost = 'localhost'
redisdb = 1
session_cookie_httponly = False |
__author__ = ""
__version__ = "0.0.1"
__date__ = ""
__all__ = ["__author__", "__version__", "__date__"]
| __author__ = ''
__version__ = '0.0.1'
__date__ = ''
__all__ = ['__author__', '__version__', '__date__'] |
class C:
def __init__(self, x, y):
"""
Args:
x:
y:
"""
return None | class C:
def __init__(self, x, y):
"""
Args:
x:
y:
"""
return None |
def analog_state(analog_input):
if analog_input.read():
return 'UP'
else:
return 'DOWN'
| def analog_state(analog_input):
if analog_input.read():
return 'UP'
else:
return 'DOWN' |
#
# -*- coding: utf-8 -*-
#
# Copyright 2015-2020 NETCAT (www.netcat.pl)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @author NETCAT <firma@netcat.pl>
# @copyright 2015-2020 NETCAT (www.netcat.pl)
# @license http://www.apache.org/licenses/LICENSE-2.0
#
class AccountStatus:
"""
Account status
"""
def __init__(self):
self.uid = None
self.type = None
self.validTo = None
self.billingPlanName = None
self.subscriptionPrice = None
self.itemPrice = None
self.itemPriceStatus = None
self.itemPriceInvoice = None
self.itemPriceAll = None
self.itemPriceIBAN = None
self.itemPriceWhitelist = None
self.itemPriceSearchVAT = None
self.limit = None
self.requestDelay = None
self.domainLimit = None
self.overPlanAllowed = None
self.terytCodes = None
self.excelAddIn = None
self.JPKVAT = None
self.CLI = None
self.stats = None
self.nipMonitor = None
self.searchByNIP = None
self.searchByREGON = None
self.searchByKRS = None
self.funcIsActive = None
self.funcGetInvoiceData = None
self.funcGetAllData = None
self.funcGetVIESData = None
self.funcGetVATStatus = None
self.funcGetIBANStatus = None
self.funcGetWhitelistStatus = None
self.funcSearchVAT = None
self.invoiceDataCount = None
self.allDataCount = None
self.firmStatusCount = None
self.vatStatusCount = None
self.viesStatusCount = None
self.ibanStatusCount = None
self.whitelistStatusCount = None
self.searchVATCount = None
self.totalCount = None
def __str__(self):
return 'AccountStatus: [uid = ' + str(self.uid) \
+ ', type = ' + str(self.type) \
+ ', validTo = ' + str(self.validTo) \
+ ', billingPlanName = ' + str(self.billingPlanName) \
+ ', subscriptionPrice = ' + str(self.subscriptionPrice) \
+ ', itemPrice = ' + str(self.itemPrice) \
+ ', itemPriceStatus = ' + str(self.itemPriceStatus) \
+ ', itemPriceInvoice = ' + str(self.itemPriceInvoice) \
+ ', itemPriceAll = ' + str(self.itemPriceAll) \
+ ', itemPriceIBAN = ' + str(self.itemPriceIBAN) \
+ ', itemPriceWhitelist = ' + str(self.itemPriceWhitelist) \
+ ', itemPriceSearchVAT = ' + str(self.itemPriceSearchVAT) \
+ ', limit = ' + str(self.limit) \
+ ', requestDelay = ' + str(self.requestDelay) \
+ ', domainLimit = ' + str(self.domainLimit) \
+ ', overPlanAllowed = ' + str(self.overPlanAllowed) \
+ ', terytCodes = ' + str(self.terytCodes) \
+ ', excelAddIn = ' + str(self.excelAddIn) \
+ ', jpkVat = ' + str(self.JPKVAT) \
+ ', cli = ' + str(self.CLI) \
+ ', stats = ' + str(self.stats) \
+ ', NIPMonitor = ' + str(self.nipMonitor) \
+ ', searchByNIP = ' + str(self.searchByNIP) \
+ ', searchByREGON = ' + str(self.searchByREGON) \
+ ', searchByKRS = ' + str(self.searchByKRS) \
+ ', funcIsActive = ' + str(self.funcIsActive) \
+ ', funcGetInvoiceData = ' + str(self.funcGetInvoiceData) \
+ ', funcGetAllData = ' + str(self.funcGetAllData) \
+ ', funcGetVIESData = ' + str(self.funcGetVIESData) \
+ ', funcGetVATStatus = ' + str(self.funcGetVATStatus) \
+ ', funcGetIBANStatus = ' + str(self.funcGetIBANStatus) \
+ ', funcGetWhitelistStatus = ' + str(self.funcGetWhitelistStatus) \
+ ', funcSearchVAT = ' + str(self.funcSearchVAT) \
+ ', invoiceDataCount = ' + str(self.invoiceDataCount) \
+ ', allDataCount = ' + str(self.allDataCount) \
+ ', firmStatusCount = ' + str(self.firmStatusCount) \
+ ', VATStatusCount = ' + str(self.vatStatusCount) \
+ ', VIESStatusCount = ' + str(self.viesStatusCount) \
+ ', IBANStatusCount = ' + str(self.ibanStatusCount) \
+ ', whitelistStatusCount = ' + str(self.whitelistStatusCount) \
+ ', searchVATCount = ' + str(self.searchVATCount) \
+ ', totalCount = ' + str(self.totalCount) \
+ ']'
| class Accountstatus:
"""
Account status
"""
def __init__(self):
self.uid = None
self.type = None
self.validTo = None
self.billingPlanName = None
self.subscriptionPrice = None
self.itemPrice = None
self.itemPriceStatus = None
self.itemPriceInvoice = None
self.itemPriceAll = None
self.itemPriceIBAN = None
self.itemPriceWhitelist = None
self.itemPriceSearchVAT = None
self.limit = None
self.requestDelay = None
self.domainLimit = None
self.overPlanAllowed = None
self.terytCodes = None
self.excelAddIn = None
self.JPKVAT = None
self.CLI = None
self.stats = None
self.nipMonitor = None
self.searchByNIP = None
self.searchByREGON = None
self.searchByKRS = None
self.funcIsActive = None
self.funcGetInvoiceData = None
self.funcGetAllData = None
self.funcGetVIESData = None
self.funcGetVATStatus = None
self.funcGetIBANStatus = None
self.funcGetWhitelistStatus = None
self.funcSearchVAT = None
self.invoiceDataCount = None
self.allDataCount = None
self.firmStatusCount = None
self.vatStatusCount = None
self.viesStatusCount = None
self.ibanStatusCount = None
self.whitelistStatusCount = None
self.searchVATCount = None
self.totalCount = None
def __str__(self):
return 'AccountStatus: [uid = ' + str(self.uid) + ', type = ' + str(self.type) + ', validTo = ' + str(self.validTo) + ', billingPlanName = ' + str(self.billingPlanName) + ', subscriptionPrice = ' + str(self.subscriptionPrice) + ', itemPrice = ' + str(self.itemPrice) + ', itemPriceStatus = ' + str(self.itemPriceStatus) + ', itemPriceInvoice = ' + str(self.itemPriceInvoice) + ', itemPriceAll = ' + str(self.itemPriceAll) + ', itemPriceIBAN = ' + str(self.itemPriceIBAN) + ', itemPriceWhitelist = ' + str(self.itemPriceWhitelist) + ', itemPriceSearchVAT = ' + str(self.itemPriceSearchVAT) + ', limit = ' + str(self.limit) + ', requestDelay = ' + str(self.requestDelay) + ', domainLimit = ' + str(self.domainLimit) + ', overPlanAllowed = ' + str(self.overPlanAllowed) + ', terytCodes = ' + str(self.terytCodes) + ', excelAddIn = ' + str(self.excelAddIn) + ', jpkVat = ' + str(self.JPKVAT) + ', cli = ' + str(self.CLI) + ', stats = ' + str(self.stats) + ', NIPMonitor = ' + str(self.nipMonitor) + ', searchByNIP = ' + str(self.searchByNIP) + ', searchByREGON = ' + str(self.searchByREGON) + ', searchByKRS = ' + str(self.searchByKRS) + ', funcIsActive = ' + str(self.funcIsActive) + ', funcGetInvoiceData = ' + str(self.funcGetInvoiceData) + ', funcGetAllData = ' + str(self.funcGetAllData) + ', funcGetVIESData = ' + str(self.funcGetVIESData) + ', funcGetVATStatus = ' + str(self.funcGetVATStatus) + ', funcGetIBANStatus = ' + str(self.funcGetIBANStatus) + ', funcGetWhitelistStatus = ' + str(self.funcGetWhitelistStatus) + ', funcSearchVAT = ' + str(self.funcSearchVAT) + ', invoiceDataCount = ' + str(self.invoiceDataCount) + ', allDataCount = ' + str(self.allDataCount) + ', firmStatusCount = ' + str(self.firmStatusCount) + ', VATStatusCount = ' + str(self.vatStatusCount) + ', VIESStatusCount = ' + str(self.viesStatusCount) + ', IBANStatusCount = ' + str(self.ibanStatusCount) + ', whitelistStatusCount = ' + str(self.whitelistStatusCount) + ', searchVATCount = ' + str(self.searchVATCount) + ', totalCount = ' + str(self.totalCount) + ']' |
def getRoot(config):
if not config.parent:
return config
return getRoot(config.parent)
root = getRoot(config)
if 'libdispatch' in root.available_features:
additional_cflags = ' -fblocks '
for index, (template, replacement) in enumerate(config.substitutions):
if template in ['%clang_tsan ', '%clangxx_tsan ']:
config.substitutions[index] = (template, replacement + additional_cflags)
else:
config.unsupported = True
config.environment['TSAN_OPTIONS'] += ':ignore_noninstrumented_modules=1'
| def get_root(config):
if not config.parent:
return config
return get_root(config.parent)
root = get_root(config)
if 'libdispatch' in root.available_features:
additional_cflags = ' -fblocks '
for (index, (template, replacement)) in enumerate(config.substitutions):
if template in ['%clang_tsan ', '%clangxx_tsan ']:
config.substitutions[index] = (template, replacement + additional_cflags)
else:
config.unsupported = True
config.environment['TSAN_OPTIONS'] += ':ignore_noninstrumented_modules=1' |
user_modelling_units = {
'item_lookup_field': 'userModellingUnitId',
'item_url': 'regex("[\S]+")',
'item_title': 'league_table',
'resource_methods': ['GET', 'POST', 'DELETE'],
'item_methods': ['GET', 'PATCH', 'DELETE'],
'extra_response_fields': ['userModellingUnitId'],
'doc': {
'body':
"""
<p>user_modelling_units resource to edit user_modelling_units information</p>
<ul>
<li>userModellingUnitId: Required. Unique Id used for identify each result</li>
<li>buildings: Required. List of the modellingUnitId that must be included in results</li>
</ul>
<h6>Resource example</h6>
<pre>
{
"userModellingUnitId": "userModellingUnitId-123",
"buildings": ["modellingUnitId-123", "modellingUnitId-234"]
}
</pre>
""",
'title':'league_table help',
},
'schema' : {
'userModellingUnitId': {
'type': 'string',
'required': True,
'unique': True,
},
'buildings': {
'type' : 'list',
'required' : True
}
}
}
| user_modelling_units = {'item_lookup_field': 'userModellingUnitId', 'item_url': 'regex("[\\S]+")', 'item_title': 'league_table', 'resource_methods': ['GET', 'POST', 'DELETE'], 'item_methods': ['GET', 'PATCH', 'DELETE'], 'extra_response_fields': ['userModellingUnitId'], 'doc': {'body': '\n <p>user_modelling_units resource to edit user_modelling_units information</p>\n <ul>\n <li>userModellingUnitId: Required. Unique Id used for identify each result</li>\n <li>buildings: Required. List of the modellingUnitId that must be included in results</li>\n </ul>\n <h6>Resource example</h6>\n <pre>\n {\n "userModellingUnitId": "userModellingUnitId-123",\n "buildings": ["modellingUnitId-123", "modellingUnitId-234"]\n }\n </pre>\n ', 'title': 'league_table help'}, 'schema': {'userModellingUnitId': {'type': 'string', 'required': True, 'unique': True}, 'buildings': {'type': 'list', 'required': True}}} |
"""
0251. Flatten 2D Vector
Medium
Design and implement an iterator to flatten a 2d vector. It should support the following operations: next and hasNext.
Example:
Vector2D iterator = new Vector2D([[1,2],[3],[4]]);
iterator.next(); // return 1
iterator.next(); // return 2
iterator.next(); // return 3
iterator.hasNext(); // return true
iterator.hasNext(); // return true
iterator.next(); // return 4
iterator.hasNext(); // return false
Notes:
Please remember to RESET your class variables declared in Vector2D, as static/class variables are persisted across multiple test cases. Please see here for more details.
You may assume that next() call will always be valid, that is, there will be at least a next element in the 2d vector when next() is called.
Follow up:
As an added challenge, try to code it using only iterators in C++ or iterators in Java.
"""
class Vector2D:
def __init__(self, v: List[List[int]]):
def it():
for line in v:
for val in line:
self.size -= 1
yield val
self.it = it()
self.size = sum(len(line) for line in v)
def next(self) -> int:
return next(self.it)
def hasNext(self) -> bool:
return self.size
# Your Vector2D object will be instantiated and called as such:
# obj = Vector2D(v)
# param_1 = obj.next()
# param_2 = obj.hasNext() | """
0251. Flatten 2D Vector
Medium
Design and implement an iterator to flatten a 2d vector. It should support the following operations: next and hasNext.
Example:
Vector2D iterator = new Vector2D([[1,2],[3],[4]]);
iterator.next(); // return 1
iterator.next(); // return 2
iterator.next(); // return 3
iterator.hasNext(); // return true
iterator.hasNext(); // return true
iterator.next(); // return 4
iterator.hasNext(); // return false
Notes:
Please remember to RESET your class variables declared in Vector2D, as static/class variables are persisted across multiple test cases. Please see here for more details.
You may assume that next() call will always be valid, that is, there will be at least a next element in the 2d vector when next() is called.
Follow up:
As an added challenge, try to code it using only iterators in C++ or iterators in Java.
"""
class Vector2D:
def __init__(self, v: List[List[int]]):
def it():
for line in v:
for val in line:
self.size -= 1
yield val
self.it = it()
self.size = sum((len(line) for line in v))
def next(self) -> int:
return next(self.it)
def has_next(self) -> bool:
return self.size |
"""
This file includes the Quip class. Quips are responses given by NPCs, when responding to a
DialogEvent.
Classes:
Quip
"""
class Quip:
"""
Attributes:
key: str
This is the name of the object when referenced internally by the game.
text: str
The text printed when this quip is returned by a DialogEvent.
is_said: bool, default False
True if this quip has been said before.
is_repeatable: bool, default False
True if this quip can be said more than once.
Methods:
"""
def __init__(self, key, text, is_said=False, is_repeatable=False):
"""
Constructor of Quip Class.
:param key: str
This is the name of the object when referenced internally by the game.
:param text: str
The text printed when this quip is returned by a DialogEvent.
:param is_said: bool, default False
True if this quip has been said before.
:param is_repeatable: bool, default False
True if this quip can be said more than once.
"""
self.key = key
self.text = text
self.is_said = is_said
self.is_repeatable = is_repeatable
def to_json(self):
"""
Convert the instance of this class to a serializable object.
:return: Dictionary
A dictionary of the object's attributes, containing the key '_class_'.
"""
obj_dict = self.__dict__
obj_dict["_class_"] = self.__class__.__name__
return obj_dict
| """
This file includes the Quip class. Quips are responses given by NPCs, when responding to a
DialogEvent.
Classes:
Quip
"""
class Quip:
"""
Attributes:
key: str
This is the name of the object when referenced internally by the game.
text: str
The text printed when this quip is returned by a DialogEvent.
is_said: bool, default False
True if this quip has been said before.
is_repeatable: bool, default False
True if this quip can be said more than once.
Methods:
"""
def __init__(self, key, text, is_said=False, is_repeatable=False):
"""
Constructor of Quip Class.
:param key: str
This is the name of the object when referenced internally by the game.
:param text: str
The text printed when this quip is returned by a DialogEvent.
:param is_said: bool, default False
True if this quip has been said before.
:param is_repeatable: bool, default False
True if this quip can be said more than once.
"""
self.key = key
self.text = text
self.is_said = is_said
self.is_repeatable = is_repeatable
def to_json(self):
"""
Convert the instance of this class to a serializable object.
:return: Dictionary
A dictionary of the object's attributes, containing the key '_class_'.
"""
obj_dict = self.__dict__
obj_dict['_class_'] = self.__class__.__name__
return obj_dict |
class Currency:
"""
Simple currency domain model, holds value as string
"""
s: str
def __init__(self, currency: str):
if currency == "":
raise TypeError("currency cannot be empty")
self.s = currency
def __str__(self):
return self.String()
def __eq__(self, other) -> bool:
return self.String() == other.String()
def String(self) -> str:
return self.s
def IsZero(self) -> bool:
return self.s == ""
PLN = Currency("PLN")
EUR = Currency("EUR")
| class Currency:
"""
Simple currency domain model, holds value as string
"""
s: str
def __init__(self, currency: str):
if currency == '':
raise type_error('currency cannot be empty')
self.s = currency
def __str__(self):
return self.String()
def __eq__(self, other) -> bool:
return self.String() == other.String()
def string(self) -> str:
return self.s
def is_zero(self) -> bool:
return self.s == ''
pln = currency('PLN')
eur = currency('EUR') |
"""
There are n cities. Some of them are connected, while some are not. If city a is connected directly with city b, and city b is connected directly with city c, then city a is connected indirectly with city c.
A province is a group of directly or indirectly connected cities and no other cities outside of the group.
You are given an n x n matrix isConnected where isConnected[i][j] = 1 if the ith city and the jth city are directly connected, and isConnected[i][j] = 0 otherwise.
Return the total number of provinces.
Example 1:
Input: isConnected = [[1,1,0],[1,1,0],[0,0,1]]
Output: 2
Example 2:
Input: isConnected = [[1,0,0],[0,1,0],[0,0,1]]
Output: 3
Constraints:
* 1 <= n <= 200
* n == isConnected.length
* n == isConnected[i].length
* isConnected[i][j] is 1 or 0.
* isConnected[i][i] == 1
* isConnected[i][j] == isConnected[j][i]
Solution:
The given matrix can be viewed as the Adjacency Matrix of a graph. By viewing the matrix in such a manner,
our problem reduces to the problem of finding the number of connected components in an undirected graph.
* DFS
* BFS
* Union Find
"""
# DFS
# TC: O(N^2), The complete matrix of size N^2 is traversed
# SC: O(N), visited array of size N is used
class Solution:
def findCircleNum(self, isConnected: List[List[int]]) -> int:
n = len(isConnected)
visited = [0] * n
province = 0
def dfs(city_i):
for city_j in range(n):
# if city_i is connected with city_j, target to city_j
if isConnected[city_i][city_j] == 1 and visited[city_j] == 0:
visited[city_j] = 1
dfs(city_j) # search from city_j
for city_i in range(n):
if visited[city_i] == 0:
dfs(city_i)
province += 1
return province
# BFS
# TC: O(N^2), The complete matrix of size N^2 is traversed
# SC: O(N), A queue and visited array of size N is used
class Solution:
def findCircleNum(self, isConnected: List[List[int]]) -> int:
n = len(isConnected)
visited = [0] * n
province = 0
# imagine provinces as a "tree" with loops
# use queue for BFS
queue = []
for city_i in range(n):
if visited[city_i] == 0: # city_i is not visited, bfs
queue.append(city_i)
while queue:
city_p = queue.pop()
visited[city_p] = 1
for city_j in range(n):
if isConnected[city_p][city_j] == 1 and visited[city_j] == 0:
queue.append(city_j)
province += 1
return province
| """
There are n cities. Some of them are connected, while some are not. If city a is connected directly with city b, and city b is connected directly with city c, then city a is connected indirectly with city c.
A province is a group of directly or indirectly connected cities and no other cities outside of the group.
You are given an n x n matrix isConnected where isConnected[i][j] = 1 if the ith city and the jth city are directly connected, and isConnected[i][j] = 0 otherwise.
Return the total number of provinces.
Example 1:
Input: isConnected = [[1,1,0],[1,1,0],[0,0,1]]
Output: 2
Example 2:
Input: isConnected = [[1,0,0],[0,1,0],[0,0,1]]
Output: 3
Constraints:
* 1 <= n <= 200
* n == isConnected.length
* n == isConnected[i].length
* isConnected[i][j] is 1 or 0.
* isConnected[i][i] == 1
* isConnected[i][j] == isConnected[j][i]
Solution:
The given matrix can be viewed as the Adjacency Matrix of a graph. By viewing the matrix in such a manner,
our problem reduces to the problem of finding the number of connected components in an undirected graph.
* DFS
* BFS
* Union Find
"""
class Solution:
def find_circle_num(self, isConnected: List[List[int]]) -> int:
n = len(isConnected)
visited = [0] * n
province = 0
def dfs(city_i):
for city_j in range(n):
if isConnected[city_i][city_j] == 1 and visited[city_j] == 0:
visited[city_j] = 1
dfs(city_j)
for city_i in range(n):
if visited[city_i] == 0:
dfs(city_i)
province += 1
return province
class Solution:
def find_circle_num(self, isConnected: List[List[int]]) -> int:
n = len(isConnected)
visited = [0] * n
province = 0
queue = []
for city_i in range(n):
if visited[city_i] == 0:
queue.append(city_i)
while queue:
city_p = queue.pop()
visited[city_p] = 1
for city_j in range(n):
if isConnected[city_p][city_j] == 1 and visited[city_j] == 0:
queue.append(city_j)
province += 1
return province |
feature_funcs = set()
def feature(func):
""" Feature functions take a request as their single argument. """
feature_funcs.add(func)
return func
@feature
def requirejs(request):
return 'requirejs' in request.GET
@feature
def thread_new(request):
return True
@feature
def lazy_content(request):
return True
| feature_funcs = set()
def feature(func):
""" Feature functions take a request as their single argument. """
feature_funcs.add(func)
return func
@feature
def requirejs(request):
return 'requirejs' in request.GET
@feature
def thread_new(request):
return True
@feature
def lazy_content(request):
return True |
UNBOXED_PRIMITIVE_DEFAULT_EMPTY = "__prim_empty_slot"
UNBOXED_PRIMITIVE_DEFAULT_ZERO = "__prim_zero_slot"
IO_CLASS = "IO"
OBJECT_CLASS = "Object"
INTEGER_CLASS = "Int"
BOOLEAN_CLASS = "Bool"
STRING_CLASS = "String"
BUILT_IN_CLASSES = [
IO_CLASS,
OBJECT_CLASS,
INTEGER_CLASS,
BOOLEAN_CLASS,
STRING_CLASS
]
VOID_TYPE = "Void"
LOCAL_SELF_NAME = "__self"
INIT_CIL_SUFFIX = "_init"
# Built in CIL functions
CONFORMS_FUNC = "__conforms"
ISVOID_FUNC = "_isvoid"
# MIPS names
VOID_MIPS_NAME = "type_void" | unboxed_primitive_default_empty = '__prim_empty_slot'
unboxed_primitive_default_zero = '__prim_zero_slot'
io_class = 'IO'
object_class = 'Object'
integer_class = 'Int'
boolean_class = 'Bool'
string_class = 'String'
built_in_classes = [IO_CLASS, OBJECT_CLASS, INTEGER_CLASS, BOOLEAN_CLASS, STRING_CLASS]
void_type = 'Void'
local_self_name = '__self'
init_cil_suffix = '_init'
conforms_func = '__conforms'
isvoid_func = '_isvoid'
void_mips_name = 'type_void' |
## gera novo arquivo sped a partir do banco de dados
def exec(filename,cursor):
arquivo = open("out/" + filename + '_r.txt', 'w')
for row in cursor.execute('SELECT * FROM principal order by r0'):
line = [i for i in row if i is not None]
line = '|' + '|'.join(line[1:]) + '|' + '\n'
arquivo.write(line)
arquivo.close() | def exec(filename, cursor):
arquivo = open('out/' + filename + '_r.txt', 'w')
for row in cursor.execute('SELECT * FROM principal order by r0'):
line = [i for i in row if i is not None]
line = '|' + '|'.join(line[1:]) + '|' + '\n'
arquivo.write(line)
arquivo.close() |
def array123(nums):
# Note: iterate with length-2, so can use i+1 and i+2 in the loop
for i in range(len(nums)-2):
if nums[i]==1 and nums[i+1]==2 and nums[i+2]==3:
return True
return False
| def array123(nums):
for i in range(len(nums) - 2):
if nums[i] == 1 and nums[i + 1] == 2 and (nums[i + 2] == 3):
return True
return False |
class Solution:
def findSwapValues(self,a, n, b, m):
summA = sum(a)
summB = sum(b)
diff = (summA - summB)
if diff % 2 != 0 :
return -1
diff = diff / 2
# We need to find num1 in a and num2 in b such that
# summA - num1 + num2 = summB - num2 + num1
# Which brings us to
# num1 - num2 = (summA - summB) / 2
i = 0
j = 0
while i < n and j < m :
d = a[i] - b[j]
if d == diff :
return 1
elif d < diff :
i += 1
else :
j += 1
return -1
| class Solution:
def find_swap_values(self, a, n, b, m):
summ_a = sum(a)
summ_b = sum(b)
diff = summA - summB
if diff % 2 != 0:
return -1
diff = diff / 2
i = 0
j = 0
while i < n and j < m:
d = a[i] - b[j]
if d == diff:
return 1
elif d < diff:
i += 1
else:
j += 1
return -1 |
# Copyright (C) 2019 Project AGI
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""HParamMulti class."""
class HParamMulti:
"""
Static helper methods for managing multiple hyperparam objects.
The use case is for Components that have sub components.
We cannot maintain state, because the flat tf hyperparam object is selectively overridden outside of the component.
"""
@staticmethod
def set_hparam_in_subcomponents(subcomponents, hparam_name, val):
"""Sets the common hparams to sub components."""
for comp in subcomponents:
comp.set_hparam(hparam_name, val)
return subcomponents
@staticmethod
def add(multi, source, component):
"""
Prepend component namespace to hparams of source, and store in multi.
:param multi: destination hparams object, set the values of this
:param source: the source hparams object, whose values we'll use to set target
:param component: the component that source corresponds to, used as namespace in target
:return: the target hparam object
"""
if multi is None or source is None or component is None:
raise ValueError("One or more arguments was not defined.")
for param in source.values():
HParamMulti.set_param(multi, param, source.get(param), component)
return multi
@staticmethod
def override(multi, target, component):
""" Override hparams in target with corresponding value from multi """
if multi is None or target is None or component is None:
raise ValueError("One or more arguments was not defined.")
for param in target.values():
param_multi = component + '_' + param
target.set_hparam(param, multi.get(param_multi))
return target
@staticmethod
def set_param(multi, param, value, component):
""" Set an individual hparam value in multi for a given component namespace, if it doesn't exist, add it """
param_multi = component + '_' + param
if multi.__contains__(param_multi):
multi.set_hparam(param_multi, value)
else:
multi.add_hparam(param_multi, value)
| """HParamMulti class."""
class Hparammulti:
"""
Static helper methods for managing multiple hyperparam objects.
The use case is for Components that have sub components.
We cannot maintain state, because the flat tf hyperparam object is selectively overridden outside of the component.
"""
@staticmethod
def set_hparam_in_subcomponents(subcomponents, hparam_name, val):
"""Sets the common hparams to sub components."""
for comp in subcomponents:
comp.set_hparam(hparam_name, val)
return subcomponents
@staticmethod
def add(multi, source, component):
"""
Prepend component namespace to hparams of source, and store in multi.
:param multi: destination hparams object, set the values of this
:param source: the source hparams object, whose values we'll use to set target
:param component: the component that source corresponds to, used as namespace in target
:return: the target hparam object
"""
if multi is None or source is None or component is None:
raise value_error('One or more arguments was not defined.')
for param in source.values():
HParamMulti.set_param(multi, param, source.get(param), component)
return multi
@staticmethod
def override(multi, target, component):
""" Override hparams in target with corresponding value from multi """
if multi is None or target is None or component is None:
raise value_error('One or more arguments was not defined.')
for param in target.values():
param_multi = component + '_' + param
target.set_hparam(param, multi.get(param_multi))
return target
@staticmethod
def set_param(multi, param, value, component):
""" Set an individual hparam value in multi for a given component namespace, if it doesn't exist, add it """
param_multi = component + '_' + param
if multi.__contains__(param_multi):
multi.set_hparam(param_multi, value)
else:
multi.add_hparam(param_multi, value) |
_base_ = ['../../_base_/default_runtime.py']
# dataset settings
dataset_type = 'MOTChallengeDataset'
img_norm_cfg = dict(
mean=[0, 0, 0], std=[255, 255, 255], to_rgb=True)
train_pipeline = [
dict(type='LoadMultiImagesFromFile', to_float32=True),
dict(type='SeqLoadAnnotations', with_bbox=True, with_track=True),
dict(
type='SeqResize',
img_scale=(1280, 720),
share_params=True,
ratio_range=(0.8, 1.2),
keep_ratio=True,
bbox_clip_border=False),
dict(type='SeqPhotoMetricDistortion', share_params=True),
dict(
type='SeqRandomCrop',
share_params=False,
crop_size=(1088, 1088),
bbox_clip_border=False),
dict(type='SeqRandomFlip', share_params=True, flip_ratio=0.5),
dict(type='SeqNormalize', **img_norm_cfg),
dict(type='SeqPad', size_divisor=32),
dict(type='MatchInstances', skip_nomatch=True),
dict(
type='VideoCollect',
keys=[
'img', 'gt_bboxes', 'gt_labels', 'gt_match_indices',
'gt_instance_ids'
]),
dict(type='SeqDefaultFormatBundle', ref_prefix='ref')
]
test_pipeline = [
dict(type='LoadImageFromFile'),
#dict(type='LoadDetections'),
dict(
type='MultiScaleFlipAug',
img_scale=(1280, 720),#resize img to this (w,h). can be [(w1,h1), (w2,h2), ...]
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.0),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='VideoCollect', keys=['img'])
])
]
data_root = 'data/MOT17/'
data = dict(
samples_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
visibility_thr=-1,
ann_file=data_root + 'annotations/half-train_cocoformat.json',
img_prefix=data_root + 'train',
ref_img_sampler=dict(
num_ref_imgs=1,
frame_range=10,
filter_key_img=True,
method='uniform'),
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/half-val_cocoformat.json',
img_prefix=data_root + 'train',
ref_img_sampler=None,
detection_file=data_root + 'annotations/half-val_detections.pkl',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/half-val_cocoformat.json',
img_prefix=data_root + 'train',
ref_img_sampler=None,
detection_file=data_root + 'annotations/half-val_detections.pkl',
pipeline=test_pipeline))
model = dict(
type='DeepSORT',
pretrains=dict(
detector='/data/taofuyu/snapshot/track_det/v2/latest.pth'),
detector=dict(
type='FasterRCNN',
#pretrained='/data/taofuyu/snapshot/track_det/v1/latest.pth',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
norm_eval=True,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_generator=dict(
type='AnchorGenerator',
scales=[8],
ratios=[0.5, 1.0, 2.0],
strides=[4, 8, 16, 32, 64]),
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
clip_border=False,
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0]),
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='L1Loss', loss_weight=1.0)),
roi_head=dict(
type='StandardRoIHead',
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=8,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
clip_border=False,
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2]),
reg_class_agnostic=False,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='L1Loss', loss_weight=1.0))),
# model training and testing settings
train_cfg=dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
match_low_quality=True,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=-1,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_pre=2000,
max_per_img=1000,
nms=dict(type='nms', iou_threshold=0.7),
min_bbox_size=0),
rcnn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False)),
test_cfg=dict(
rpn=dict(
nms_pre=1000,
max_per_img=1000,
nms=dict(type='nms', iou_threshold=0.7),
min_bbox_size=0),
rcnn=dict(
score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.5),
max_per_img=100)
# soft-nms is also supported for rcnn testing
# e.g., nms=dict(type='soft_nms', iou_threshold=0.5, min_score=0.05)
)),
motion=dict(type='KalmanFilter', center_only=False),
tracker=dict(type='SortTracker', obj_score_thr=0.5, match_iou_thr=0.5, reid=None)
)
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=100,
warmup_ratio=1.0 / 100,
step=[3])
# runtime settings
total_epochs = 4
evaluation = dict(metric=['bbox', 'track'], interval=1)
search_metrics = ['MOTA', 'IDF1', 'FN', 'FP', 'IDs', 'MT', 'ML']
| _base_ = ['../../_base_/default_runtime.py']
dataset_type = 'MOTChallengeDataset'
img_norm_cfg = dict(mean=[0, 0, 0], std=[255, 255, 255], to_rgb=True)
train_pipeline = [dict(type='LoadMultiImagesFromFile', to_float32=True), dict(type='SeqLoadAnnotations', with_bbox=True, with_track=True), dict(type='SeqResize', img_scale=(1280, 720), share_params=True, ratio_range=(0.8, 1.2), keep_ratio=True, bbox_clip_border=False), dict(type='SeqPhotoMetricDistortion', share_params=True), dict(type='SeqRandomCrop', share_params=False, crop_size=(1088, 1088), bbox_clip_border=False), dict(type='SeqRandomFlip', share_params=True, flip_ratio=0.5), dict(type='SeqNormalize', **img_norm_cfg), dict(type='SeqPad', size_divisor=32), dict(type='MatchInstances', skip_nomatch=True), dict(type='VideoCollect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_match_indices', 'gt_instance_ids']), dict(type='SeqDefaultFormatBundle', ref_prefix='ref')]
test_pipeline = [dict(type='LoadImageFromFile'), dict(type='MultiScaleFlipAug', img_scale=(1280, 720), flip=False, transforms=[dict(type='Resize', keep_ratio=True), dict(type='RandomFlip', flip_ratio=0.0), dict(type='Normalize', **img_norm_cfg), dict(type='Pad', size_divisor=32), dict(type='ImageToTensor', keys=['img']), dict(type='VideoCollect', keys=['img'])])]
data_root = 'data/MOT17/'
data = dict(samples_per_gpu=2, workers_per_gpu=2, train=dict(type=dataset_type, visibility_thr=-1, ann_file=data_root + 'annotations/half-train_cocoformat.json', img_prefix=data_root + 'train', ref_img_sampler=dict(num_ref_imgs=1, frame_range=10, filter_key_img=True, method='uniform'), pipeline=train_pipeline), val=dict(type=dataset_type, ann_file=data_root + 'annotations/half-val_cocoformat.json', img_prefix=data_root + 'train', ref_img_sampler=None, detection_file=data_root + 'annotations/half-val_detections.pkl', pipeline=test_pipeline), test=dict(type=dataset_type, ann_file=data_root + 'annotations/half-val_cocoformat.json', img_prefix=data_root + 'train', ref_img_sampler=None, detection_file=data_root + 'annotations/half-val_detections.pkl', pipeline=test_pipeline))
model = dict(type='DeepSORT', pretrains=dict(detector='/data/taofuyu/snapshot/track_det/v2/latest.pth'), detector=dict(type='FasterRCNN', backbone=dict(type='ResNet', depth=50, num_stages=4, out_indices=(0, 1, 2, 3), frozen_stages=1, norm_cfg=dict(type='BN', requires_grad=True), norm_eval=True, style='pytorch'), neck=dict(type='FPN', in_channels=[256, 512, 1024, 2048], out_channels=256, num_outs=5), rpn_head=dict(type='RPNHead', in_channels=256, feat_channels=256, anchor_generator=dict(type='AnchorGenerator', scales=[8], ratios=[0.5, 1.0, 2.0], strides=[4, 8, 16, 32, 64]), bbox_coder=dict(type='DeltaXYWHBBoxCoder', clip_border=False, target_means=[0.0, 0.0, 0.0, 0.0], target_stds=[1.0, 1.0, 1.0, 1.0]), loss_cls=dict(type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), loss_bbox=dict(type='L1Loss', loss_weight=1.0)), roi_head=dict(type='StandardRoIHead', bbox_roi_extractor=dict(type='SingleRoIExtractor', roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), out_channels=256, featmap_strides=[4, 8, 16, 32]), bbox_head=dict(type='Shared2FCBBoxHead', in_channels=256, fc_out_channels=1024, roi_feat_size=7, num_classes=8, bbox_coder=dict(type='DeltaXYWHBBoxCoder', clip_border=False, target_means=[0.0, 0.0, 0.0, 0.0], target_stds=[0.1, 0.1, 0.2, 0.2]), reg_class_agnostic=False, loss_cls=dict(type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), loss_bbox=dict(type='L1Loss', loss_weight=1.0))), train_cfg=dict(rpn=dict(assigner=dict(type='MaxIoUAssigner', pos_iou_thr=0.7, neg_iou_thr=0.3, min_pos_iou=0.3, match_low_quality=True, ignore_iof_thr=-1), sampler=dict(type='RandomSampler', num=256, pos_fraction=0.5, neg_pos_ub=-1, add_gt_as_proposals=False), allowed_border=-1, pos_weight=-1, debug=False), rpn_proposal=dict(nms_pre=2000, max_per_img=1000, nms=dict(type='nms', iou_threshold=0.7), min_bbox_size=0), rcnn=dict(assigner=dict(type='MaxIoUAssigner', pos_iou_thr=0.5, neg_iou_thr=0.5, min_pos_iou=0.5, match_low_quality=False, ignore_iof_thr=-1), sampler=dict(type='RandomSampler', num=512, pos_fraction=0.25, neg_pos_ub=-1, add_gt_as_proposals=True), pos_weight=-1, debug=False)), test_cfg=dict(rpn=dict(nms_pre=1000, max_per_img=1000, nms=dict(type='nms', iou_threshold=0.7), min_bbox_size=0), rcnn=dict(score_thr=0.05, nms=dict(type='nms', iou_threshold=0.5), max_per_img=100))), motion=dict(type='KalmanFilter', center_only=False), tracker=dict(type='SortTracker', obj_score_thr=0.5, match_iou_thr=0.5, reid=None))
lr_config = dict(policy='step', warmup='linear', warmup_iters=100, warmup_ratio=1.0 / 100, step=[3])
total_epochs = 4
evaluation = dict(metric=['bbox', 'track'], interval=1)
search_metrics = ['MOTA', 'IDF1', 'FN', 'FP', 'IDs', 'MT', 'ML'] |
def calculate_best_trade(prices):
# Check if there are less than two prices in the list
# If so, the function cannot calculate max profit
# Else, there are at least two prices in the list and so run the function
if len(prices) < 2:
print("List of prices does not have at least two elements! Cannot run the function.")
else:
# Initialize desired number of shares
num_shares = 10000
# Initialize the low and high prices
min_price = 0
max_price = 0
# Iterate over each price in the prices list
for price in prices:
# Check to see if current prices is the first entry
# If so, set the min and max prices as the first entry
if min_price == 0 and max_price == 0:
min_price = price
max_price = price
# Check if price is less than the min price
# If so, set the min price to the current price
elif price < min_price:
min_price = price
# Check if price is greater than the max price
# If so, set the max price to the current price
elif price > max_price:
max_price = price
# Calculate the profit of the trade and round to two decimal places
profit = round((max_price - min_price) * num_shares, 2)
# Return both variables
return profit
# List of stock prices for IAG between 10 AM and 11 AM (5 minute interval)
prices = [1.42, 1.32, 1.45, 1.20, 1.34, 1.74, 1.10, 1.89, 1.42, 1.90, 1.80, 1.85]
# Call the function
best_profit = calculate_best_trade(prices)
# Print the results of the function
print(f"The best profit is ${best_profit}.")
| def calculate_best_trade(prices):
if len(prices) < 2:
print('List of prices does not have at least two elements! Cannot run the function.')
else:
num_shares = 10000
min_price = 0
max_price = 0
for price in prices:
if min_price == 0 and max_price == 0:
min_price = price
max_price = price
elif price < min_price:
min_price = price
elif price > max_price:
max_price = price
profit = round((max_price - min_price) * num_shares, 2)
return profit
prices = [1.42, 1.32, 1.45, 1.2, 1.34, 1.74, 1.1, 1.89, 1.42, 1.9, 1.8, 1.85]
best_profit = calculate_best_trade(prices)
print(f'The best profit is ${best_profit}.') |
#
# PySNMP MIB module Unisphere-Data-SONET-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Unisphere-Data-SONET-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:25:48 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsIntersection, ValueSizeConstraint, ConstraintsUnion, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "ValueSizeConstraint", "ConstraintsUnion", "SingleValueConstraint")
InterfaceIndexOrZero, ifIndex, InterfaceIndex = mibBuilder.importSymbols("IF-MIB", "InterfaceIndexOrZero", "ifIndex", "InterfaceIndex")
NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
Counter32, Integer32, NotificationType, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, iso, Counter64, IpAddress, Unsigned32, ObjectIdentity, Bits, Gauge32, MibIdentifier, ModuleIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "Integer32", "NotificationType", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "iso", "Counter64", "IpAddress", "Unsigned32", "ObjectIdentity", "Bits", "Gauge32", "MibIdentifier", "ModuleIdentity")
TextualConvention, DisplayString, TruthValue, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "TruthValue", "RowStatus")
usDataMibs, = mibBuilder.importSymbols("Unisphere-Data-MIBs", "usDataMibs")
UsdNextIfIndex, = mibBuilder.importSymbols("Unisphere-Data-TC", "UsdNextIfIndex")
usdSonetMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7))
usdSonetMIB.setRevisions(('2001-10-10 20:42', '2001-01-02 18:00', '1998-11-13 00:00',))
if mibBuilder.loadTexts: usdSonetMIB.setLastUpdated('200110102042Z')
if mibBuilder.loadTexts: usdSonetMIB.setOrganization('Unisphere Networks, Inc.')
class UsdSonetLineSpeed(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))
namedValues = NamedValues(("sonetUnknownSpeed", 0), ("sonetOc1Stm0", 1), ("sonetOc3Stm1", 2), ("sonetOc12Stm3", 3), ("sonetOc48Stm16", 4))
class UsdSonetLogicalPathChannel(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 2147483647)
class UsdSonetPathHierarchy(TextualConvention, OctetString):
reference = 'RFC 854: NVT ASCII character set. See SNMPv2-TC.DisplayString DESCRIPTION for a summary.'
status = 'current'
displayHint = '32a'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 32)
class UsdSonetVTType(TextualConvention, Integer32):
status = 'deprecated'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 3, 4, 5))
namedValues = NamedValues(("tribVT15TU11", 0), ("tribVT20TU12", 1), ("tribVT3", 3), ("tribVT6", 4), ("tribVT6c", 5))
usdSonetObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 1))
usdSonetPathObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2))
usdSonetVTObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3))
usdSonetMediumTable = MibTable((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 1, 1), )
if mibBuilder.loadTexts: usdSonetMediumTable.setStatus('current')
usdSonetMediumEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 1, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: usdSonetMediumEntry.setStatus('current')
usdSonetMediumType = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("sonet", 1), ("sdh", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: usdSonetMediumType.setStatus('deprecated')
usdSonetMediumLoopbackConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("sonetNoLoop", 0), ("sonetFacilityLoop", 1), ("sonetTerminalLoop", 2), ("sonetOtherLoop", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: usdSonetMediumLoopbackConfig.setStatus('current')
usdSonetMediumTimingSource = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("loop", 0), ("internalModule", 1), ("internalChassis", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: usdSonetMediumTimingSource.setStatus('current')
usdSonetMediumCircuitIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 1, 1, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: usdSonetMediumCircuitIdentifier.setStatus('deprecated')
usdSonetPathCapabilityTable = MibTable((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 1), )
if mibBuilder.loadTexts: usdSonetPathCapabilityTable.setStatus('current')
usdSonetPathCapabilityEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: usdSonetPathCapabilityEntry.setStatus('current')
usdSonetPathRemoveFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 1, 1, 1), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: usdSonetPathRemoveFlag.setStatus('current')
usdSonetPathChannelized = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 1, 1, 2), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: usdSonetPathChannelized.setStatus('current')
usdSonetPathMaximumChannels = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 1, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: usdSonetPathMaximumChannels.setStatus('current')
usdSonetPathMinimumPathSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 1, 1, 4), UsdSonetLineSpeed()).setMaxAccess("readonly")
if mibBuilder.loadTexts: usdSonetPathMinimumPathSpeed.setStatus('current')
usdSonetPathMaximumPathSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 1, 1, 5), UsdSonetLineSpeed()).setMaxAccess("readonly")
if mibBuilder.loadTexts: usdSonetPathMaximumPathSpeed.setStatus('current')
usdSonetPathNextIfIndex = MibScalar((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 2), UsdNextIfIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: usdSonetPathNextIfIndex.setStatus('current')
usdSonetPathTable = MibTable((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 3), )
if mibBuilder.loadTexts: usdSonetPathTable.setStatus('current')
usdSonetPathEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 3, 1), ).setIndexNames((0, "Unisphere-Data-SONET-MIB", "usdSonetPathIfIndex"))
if mibBuilder.loadTexts: usdSonetPathEntry.setStatus('current')
usdSonetPathIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 3, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: usdSonetPathIfIndex.setStatus('current')
usdSonetPathLogicalChannel = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 3, 1, 2), UsdSonetLogicalPathChannel()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: usdSonetPathLogicalChannel.setStatus('current')
usdSonetPathSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 3, 1, 3), UsdSonetLineSpeed()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: usdSonetPathSpeed.setStatus('current')
usdSonetPathHierarchy = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 3, 1, 4), UsdSonetPathHierarchy()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: usdSonetPathHierarchy.setStatus('current')
usdSonetPathLowerIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 3, 1, 5), InterfaceIndexOrZero()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: usdSonetPathLowerIfIndex.setStatus('current')
usdSonetPathRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 3, 1, 6), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: usdSonetPathRowStatus.setStatus('current')
usdSonetVTNextIfIndex = MibScalar((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 1), UsdNextIfIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: usdSonetVTNextIfIndex.setStatus('current')
usdSonetVTTable = MibTable((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2), )
if mibBuilder.loadTexts: usdSonetVTTable.setStatus('current')
usdSonetVTEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1), ).setIndexNames((0, "Unisphere-Data-SONET-MIB", "usdSonetVTIfIndex"))
if mibBuilder.loadTexts: usdSonetVTEntry.setStatus('current')
usdSonetVTIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: usdSonetVTIfIndex.setStatus('current')
usdSonetVTPathLogicalChannel = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1, 2), UsdSonetLogicalPathChannel()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: usdSonetVTPathLogicalChannel.setStatus('current')
usdSonetVTType = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1, 3), UsdSonetVTType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: usdSonetVTType.setStatus('deprecated')
usdSonetVTPathPayload = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1, 4), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: usdSonetVTPathPayload.setStatus('current')
usdSonetVTTributaryGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1, 5), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: usdSonetVTTributaryGroup.setStatus('current')
usdSonetVTTributarySubChannel = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1, 6), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: usdSonetVTTributarySubChannel.setStatus('current')
usdSonetVTLowerIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1, 7), InterfaceIndexOrZero()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: usdSonetVTLowerIfIndex.setStatus('current')
usdSonetVTRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1, 8), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: usdSonetVTRowStatus.setStatus('current')
usdSonetConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4))
usdSonetCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 1))
usdSonetGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 2))
usdSonetCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 1, 1)).setObjects(("Unisphere-Data-SONET-MIB", "usdSonetGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usdSonetCompliance = usdSonetCompliance.setStatus('obsolete')
usdSonetCompliance2 = ModuleCompliance((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 1, 2)).setObjects(("Unisphere-Data-SONET-MIB", "usdSonetGroup"), ("Unisphere-Data-SONET-MIB", "usdSonetPathGroup"), ("Unisphere-Data-SONET-MIB", "usdSonetVirtualTributaryGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usdSonetCompliance2 = usdSonetCompliance2.setStatus('deprecated')
usdSonetCompliance3 = ModuleCompliance((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 1, 3)).setObjects(("Unisphere-Data-SONET-MIB", "usdSonetGroup2"), ("Unisphere-Data-SONET-MIB", "usdSonetPathGroup"), ("Unisphere-Data-SONET-MIB", "usdSonetVirtualTributaryGroup2"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usdSonetCompliance3 = usdSonetCompliance3.setStatus('current')
usdSonetGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 2, 1)).setObjects(("Unisphere-Data-SONET-MIB", "usdSonetMediumType"), ("Unisphere-Data-SONET-MIB", "usdSonetMediumLoopbackConfig"), ("Unisphere-Data-SONET-MIB", "usdSonetMediumTimingSource"), ("Unisphere-Data-SONET-MIB", "usdSonetMediumCircuitIdentifier"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usdSonetGroup = usdSonetGroup.setStatus('deprecated')
usdSonetPathGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 2, 2)).setObjects(("Unisphere-Data-SONET-MIB", "usdSonetPathRemoveFlag"), ("Unisphere-Data-SONET-MIB", "usdSonetPathChannelized"), ("Unisphere-Data-SONET-MIB", "usdSonetPathMaximumChannels"), ("Unisphere-Data-SONET-MIB", "usdSonetPathMinimumPathSpeed"), ("Unisphere-Data-SONET-MIB", "usdSonetPathMaximumPathSpeed"), ("Unisphere-Data-SONET-MIB", "usdSonetPathNextIfIndex"), ("Unisphere-Data-SONET-MIB", "usdSonetPathLogicalChannel"), ("Unisphere-Data-SONET-MIB", "usdSonetPathSpeed"), ("Unisphere-Data-SONET-MIB", "usdSonetPathHierarchy"), ("Unisphere-Data-SONET-MIB", "usdSonetPathLowerIfIndex"), ("Unisphere-Data-SONET-MIB", "usdSonetPathRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usdSonetPathGroup = usdSonetPathGroup.setStatus('current')
usdSonetVirtualTributaryGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 2, 3)).setObjects(("Unisphere-Data-SONET-MIB", "usdSonetVTNextIfIndex"), ("Unisphere-Data-SONET-MIB", "usdSonetVTPathLogicalChannel"), ("Unisphere-Data-SONET-MIB", "usdSonetVTType"), ("Unisphere-Data-SONET-MIB", "usdSonetVTPathPayload"), ("Unisphere-Data-SONET-MIB", "usdSonetVTTributaryGroup"), ("Unisphere-Data-SONET-MIB", "usdSonetVTTributarySubChannel"), ("Unisphere-Data-SONET-MIB", "usdSonetVTLowerIfIndex"), ("Unisphere-Data-SONET-MIB", "usdSonetVTRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usdSonetVirtualTributaryGroup = usdSonetVirtualTributaryGroup.setStatus('deprecated')
usdSonetGroup2 = ObjectGroup((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 2, 4)).setObjects(("Unisphere-Data-SONET-MIB", "usdSonetMediumLoopbackConfig"), ("Unisphere-Data-SONET-MIB", "usdSonetMediumTimingSource"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usdSonetGroup2 = usdSonetGroup2.setStatus('current')
usdSonetVirtualTributaryGroup2 = ObjectGroup((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 2, 5)).setObjects(("Unisphere-Data-SONET-MIB", "usdSonetVTNextIfIndex"), ("Unisphere-Data-SONET-MIB", "usdSonetVTPathLogicalChannel"), ("Unisphere-Data-SONET-MIB", "usdSonetVTPathPayload"), ("Unisphere-Data-SONET-MIB", "usdSonetVTTributaryGroup"), ("Unisphere-Data-SONET-MIB", "usdSonetVTTributarySubChannel"), ("Unisphere-Data-SONET-MIB", "usdSonetVTLowerIfIndex"), ("Unisphere-Data-SONET-MIB", "usdSonetVTRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usdSonetVirtualTributaryGroup2 = usdSonetVirtualTributaryGroup2.setStatus('current')
mibBuilder.exportSymbols("Unisphere-Data-SONET-MIB", usdSonetMediumCircuitIdentifier=usdSonetMediumCircuitIdentifier, usdSonetVirtualTributaryGroup=usdSonetVirtualTributaryGroup, usdSonetVTObjects=usdSonetVTObjects, usdSonetPathSpeed=usdSonetPathSpeed, usdSonetGroup=usdSonetGroup, usdSonetPathIfIndex=usdSonetPathIfIndex, usdSonetVTTable=usdSonetVTTable, usdSonetPathMaximumChannels=usdSonetPathMaximumChannels, usdSonetPathEntry=usdSonetPathEntry, UsdSonetVTType=UsdSonetVTType, usdSonetObjects=usdSonetObjects, usdSonetPathMaximumPathSpeed=usdSonetPathMaximumPathSpeed, usdSonetCompliances=usdSonetCompliances, usdSonetMediumEntry=usdSonetMediumEntry, usdSonetPathChannelized=usdSonetPathChannelized, usdSonetPathCapabilityTable=usdSonetPathCapabilityTable, PYSNMP_MODULE_ID=usdSonetMIB, usdSonetPathMinimumPathSpeed=usdSonetPathMinimumPathSpeed, usdSonetPathNextIfIndex=usdSonetPathNextIfIndex, usdSonetCompliance2=usdSonetCompliance2, usdSonetVTTributaryGroup=usdSonetVTTributaryGroup, usdSonetPathHierarchy=usdSonetPathHierarchy, usdSonetVTEntry=usdSonetVTEntry, usdSonetMediumTimingSource=usdSonetMediumTimingSource, usdSonetVTTributarySubChannel=usdSonetVTTributarySubChannel, UsdSonetLogicalPathChannel=UsdSonetLogicalPathChannel, usdSonetCompliance3=usdSonetCompliance3, usdSonetVTPathPayload=usdSonetVTPathPayload, usdSonetMIB=usdSonetMIB, usdSonetVTPathLogicalChannel=usdSonetVTPathLogicalChannel, UsdSonetLineSpeed=UsdSonetLineSpeed, usdSonetMediumLoopbackConfig=usdSonetMediumLoopbackConfig, usdSonetPathLowerIfIndex=usdSonetPathLowerIfIndex, usdSonetGroups=usdSonetGroups, usdSonetVTLowerIfIndex=usdSonetVTLowerIfIndex, usdSonetPathTable=usdSonetPathTable, usdSonetMediumType=usdSonetMediumType, usdSonetVirtualTributaryGroup2=usdSonetVirtualTributaryGroup2, usdSonetVTIfIndex=usdSonetVTIfIndex, usdSonetConformance=usdSonetConformance, usdSonetGroup2=usdSonetGroup2, usdSonetPathLogicalChannel=usdSonetPathLogicalChannel, UsdSonetPathHierarchy=UsdSonetPathHierarchy, usdSonetPathGroup=usdSonetPathGroup, usdSonetPathCapabilityEntry=usdSonetPathCapabilityEntry, usdSonetPathObjects=usdSonetPathObjects, usdSonetVTNextIfIndex=usdSonetVTNextIfIndex, usdSonetCompliance=usdSonetCompliance, usdSonetMediumTable=usdSonetMediumTable, usdSonetVTType=usdSonetVTType, usdSonetVTRowStatus=usdSonetVTRowStatus, usdSonetPathRemoveFlag=usdSonetPathRemoveFlag, usdSonetPathRowStatus=usdSonetPathRowStatus)
| (integer, object_identifier, octet_string) = mibBuilder.importSymbols('ASN1', 'Integer', 'ObjectIdentifier', 'OctetString')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(value_range_constraint, constraints_intersection, value_size_constraint, constraints_union, single_value_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ValueRangeConstraint', 'ConstraintsIntersection', 'ValueSizeConstraint', 'ConstraintsUnion', 'SingleValueConstraint')
(interface_index_or_zero, if_index, interface_index) = mibBuilder.importSymbols('IF-MIB', 'InterfaceIndexOrZero', 'ifIndex', 'InterfaceIndex')
(notification_group, module_compliance, object_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance', 'ObjectGroup')
(counter32, integer32, notification_type, mib_scalar, mib_table, mib_table_row, mib_table_column, time_ticks, iso, counter64, ip_address, unsigned32, object_identity, bits, gauge32, mib_identifier, module_identity) = mibBuilder.importSymbols('SNMPv2-SMI', 'Counter32', 'Integer32', 'NotificationType', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'TimeTicks', 'iso', 'Counter64', 'IpAddress', 'Unsigned32', 'ObjectIdentity', 'Bits', 'Gauge32', 'MibIdentifier', 'ModuleIdentity')
(textual_convention, display_string, truth_value, row_status) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'DisplayString', 'TruthValue', 'RowStatus')
(us_data_mibs,) = mibBuilder.importSymbols('Unisphere-Data-MIBs', 'usDataMibs')
(usd_next_if_index,) = mibBuilder.importSymbols('Unisphere-Data-TC', 'UsdNextIfIndex')
usd_sonet_mib = module_identity((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7))
usdSonetMIB.setRevisions(('2001-10-10 20:42', '2001-01-02 18:00', '1998-11-13 00:00'))
if mibBuilder.loadTexts:
usdSonetMIB.setLastUpdated('200110102042Z')
if mibBuilder.loadTexts:
usdSonetMIB.setOrganization('Unisphere Networks, Inc.')
class Usdsonetlinespeed(TextualConvention, Integer32):
status = 'current'
subtype_spec = Integer32.subtypeSpec + constraints_union(single_value_constraint(0, 1, 2, 3, 4))
named_values = named_values(('sonetUnknownSpeed', 0), ('sonetOc1Stm0', 1), ('sonetOc3Stm1', 2), ('sonetOc12Stm3', 3), ('sonetOc48Stm16', 4))
class Usdsonetlogicalpathchannel(TextualConvention, Integer32):
status = 'current'
subtype_spec = Integer32.subtypeSpec + value_range_constraint(0, 2147483647)
class Usdsonetpathhierarchy(TextualConvention, OctetString):
reference = 'RFC 854: NVT ASCII character set. See SNMPv2-TC.DisplayString DESCRIPTION for a summary.'
status = 'current'
display_hint = '32a'
subtype_spec = OctetString.subtypeSpec + value_size_constraint(0, 32)
class Usdsonetvttype(TextualConvention, Integer32):
status = 'deprecated'
subtype_spec = Integer32.subtypeSpec + constraints_union(single_value_constraint(0, 1, 3, 4, 5))
named_values = named_values(('tribVT15TU11', 0), ('tribVT20TU12', 1), ('tribVT3', 3), ('tribVT6', 4), ('tribVT6c', 5))
usd_sonet_objects = mib_identifier((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 1))
usd_sonet_path_objects = mib_identifier((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2))
usd_sonet_vt_objects = mib_identifier((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3))
usd_sonet_medium_table = mib_table((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 1, 1))
if mibBuilder.loadTexts:
usdSonetMediumTable.setStatus('current')
usd_sonet_medium_entry = mib_table_row((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 1, 1, 1)).setIndexNames((0, 'IF-MIB', 'ifIndex'))
if mibBuilder.loadTexts:
usdSonetMediumEntry.setStatus('current')
usd_sonet_medium_type = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 1, 1, 1, 1), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('sonet', 1), ('sdh', 2)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
usdSonetMediumType.setStatus('deprecated')
usd_sonet_medium_loopback_config = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 1, 1, 1, 2), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2, 3))).clone(namedValues=named_values(('sonetNoLoop', 0), ('sonetFacilityLoop', 1), ('sonetTerminalLoop', 2), ('sonetOtherLoop', 3)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
usdSonetMediumLoopbackConfig.setStatus('current')
usd_sonet_medium_timing_source = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 1, 1, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2))).clone(namedValues=named_values(('loop', 0), ('internalModule', 1), ('internalChassis', 2)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
usdSonetMediumTimingSource.setStatus('current')
usd_sonet_medium_circuit_identifier = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 1, 1, 1, 4), display_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
usdSonetMediumCircuitIdentifier.setStatus('deprecated')
usd_sonet_path_capability_table = mib_table((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 1))
if mibBuilder.loadTexts:
usdSonetPathCapabilityTable.setStatus('current')
usd_sonet_path_capability_entry = mib_table_row((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 1, 1)).setIndexNames((0, 'IF-MIB', 'ifIndex'))
if mibBuilder.loadTexts:
usdSonetPathCapabilityEntry.setStatus('current')
usd_sonet_path_remove_flag = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 1, 1, 1), truth_value()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
usdSonetPathRemoveFlag.setStatus('current')
usd_sonet_path_channelized = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 1, 1, 2), truth_value()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
usdSonetPathChannelized.setStatus('current')
usd_sonet_path_maximum_channels = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 1, 1, 3), unsigned32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
usdSonetPathMaximumChannels.setStatus('current')
usd_sonet_path_minimum_path_speed = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 1, 1, 4), usd_sonet_line_speed()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
usdSonetPathMinimumPathSpeed.setStatus('current')
usd_sonet_path_maximum_path_speed = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 1, 1, 5), usd_sonet_line_speed()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
usdSonetPathMaximumPathSpeed.setStatus('current')
usd_sonet_path_next_if_index = mib_scalar((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 2), usd_next_if_index()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
usdSonetPathNextIfIndex.setStatus('current')
usd_sonet_path_table = mib_table((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 3))
if mibBuilder.loadTexts:
usdSonetPathTable.setStatus('current')
usd_sonet_path_entry = mib_table_row((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 3, 1)).setIndexNames((0, 'Unisphere-Data-SONET-MIB', 'usdSonetPathIfIndex'))
if mibBuilder.loadTexts:
usdSonetPathEntry.setStatus('current')
usd_sonet_path_if_index = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 3, 1, 1), interface_index())
if mibBuilder.loadTexts:
usdSonetPathIfIndex.setStatus('current')
usd_sonet_path_logical_channel = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 3, 1, 2), usd_sonet_logical_path_channel()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
usdSonetPathLogicalChannel.setStatus('current')
usd_sonet_path_speed = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 3, 1, 3), usd_sonet_line_speed()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
usdSonetPathSpeed.setStatus('current')
usd_sonet_path_hierarchy = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 3, 1, 4), usd_sonet_path_hierarchy()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
usdSonetPathHierarchy.setStatus('current')
usd_sonet_path_lower_if_index = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 3, 1, 5), interface_index_or_zero()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
usdSonetPathLowerIfIndex.setStatus('current')
usd_sonet_path_row_status = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 2, 3, 1, 6), row_status()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
usdSonetPathRowStatus.setStatus('current')
usd_sonet_vt_next_if_index = mib_scalar((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 1), usd_next_if_index()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
usdSonetVTNextIfIndex.setStatus('current')
usd_sonet_vt_table = mib_table((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2))
if mibBuilder.loadTexts:
usdSonetVTTable.setStatus('current')
usd_sonet_vt_entry = mib_table_row((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1)).setIndexNames((0, 'Unisphere-Data-SONET-MIB', 'usdSonetVTIfIndex'))
if mibBuilder.loadTexts:
usdSonetVTEntry.setStatus('current')
usd_sonet_vt_if_index = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1, 1), interface_index())
if mibBuilder.loadTexts:
usdSonetVTIfIndex.setStatus('current')
usd_sonet_vt_path_logical_channel = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1, 2), usd_sonet_logical_path_channel()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
usdSonetVTPathLogicalChannel.setStatus('current')
usd_sonet_vt_type = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1, 3), usd_sonet_vt_type()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
usdSonetVTType.setStatus('deprecated')
usd_sonet_vt_path_payload = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1, 4), unsigned32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
usdSonetVTPathPayload.setStatus('current')
usd_sonet_vt_tributary_group = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1, 5), unsigned32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
usdSonetVTTributaryGroup.setStatus('current')
usd_sonet_vt_tributary_sub_channel = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1, 6), unsigned32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
usdSonetVTTributarySubChannel.setStatus('current')
usd_sonet_vt_lower_if_index = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1, 7), interface_index_or_zero()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
usdSonetVTLowerIfIndex.setStatus('current')
usd_sonet_vt_row_status = mib_table_column((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 3, 2, 1, 8), row_status()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
usdSonetVTRowStatus.setStatus('current')
usd_sonet_conformance = mib_identifier((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4))
usd_sonet_compliances = mib_identifier((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 1))
usd_sonet_groups = mib_identifier((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 2))
usd_sonet_compliance = module_compliance((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 1, 1)).setObjects(('Unisphere-Data-SONET-MIB', 'usdSonetGroup'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usd_sonet_compliance = usdSonetCompliance.setStatus('obsolete')
usd_sonet_compliance2 = module_compliance((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 1, 2)).setObjects(('Unisphere-Data-SONET-MIB', 'usdSonetGroup'), ('Unisphere-Data-SONET-MIB', 'usdSonetPathGroup'), ('Unisphere-Data-SONET-MIB', 'usdSonetVirtualTributaryGroup'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usd_sonet_compliance2 = usdSonetCompliance2.setStatus('deprecated')
usd_sonet_compliance3 = module_compliance((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 1, 3)).setObjects(('Unisphere-Data-SONET-MIB', 'usdSonetGroup2'), ('Unisphere-Data-SONET-MIB', 'usdSonetPathGroup'), ('Unisphere-Data-SONET-MIB', 'usdSonetVirtualTributaryGroup2'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usd_sonet_compliance3 = usdSonetCompliance3.setStatus('current')
usd_sonet_group = object_group((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 2, 1)).setObjects(('Unisphere-Data-SONET-MIB', 'usdSonetMediumType'), ('Unisphere-Data-SONET-MIB', 'usdSonetMediumLoopbackConfig'), ('Unisphere-Data-SONET-MIB', 'usdSonetMediumTimingSource'), ('Unisphere-Data-SONET-MIB', 'usdSonetMediumCircuitIdentifier'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usd_sonet_group = usdSonetGroup.setStatus('deprecated')
usd_sonet_path_group = object_group((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 2, 2)).setObjects(('Unisphere-Data-SONET-MIB', 'usdSonetPathRemoveFlag'), ('Unisphere-Data-SONET-MIB', 'usdSonetPathChannelized'), ('Unisphere-Data-SONET-MIB', 'usdSonetPathMaximumChannels'), ('Unisphere-Data-SONET-MIB', 'usdSonetPathMinimumPathSpeed'), ('Unisphere-Data-SONET-MIB', 'usdSonetPathMaximumPathSpeed'), ('Unisphere-Data-SONET-MIB', 'usdSonetPathNextIfIndex'), ('Unisphere-Data-SONET-MIB', 'usdSonetPathLogicalChannel'), ('Unisphere-Data-SONET-MIB', 'usdSonetPathSpeed'), ('Unisphere-Data-SONET-MIB', 'usdSonetPathHierarchy'), ('Unisphere-Data-SONET-MIB', 'usdSonetPathLowerIfIndex'), ('Unisphere-Data-SONET-MIB', 'usdSonetPathRowStatus'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usd_sonet_path_group = usdSonetPathGroup.setStatus('current')
usd_sonet_virtual_tributary_group = object_group((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 2, 3)).setObjects(('Unisphere-Data-SONET-MIB', 'usdSonetVTNextIfIndex'), ('Unisphere-Data-SONET-MIB', 'usdSonetVTPathLogicalChannel'), ('Unisphere-Data-SONET-MIB', 'usdSonetVTType'), ('Unisphere-Data-SONET-MIB', 'usdSonetVTPathPayload'), ('Unisphere-Data-SONET-MIB', 'usdSonetVTTributaryGroup'), ('Unisphere-Data-SONET-MIB', 'usdSonetVTTributarySubChannel'), ('Unisphere-Data-SONET-MIB', 'usdSonetVTLowerIfIndex'), ('Unisphere-Data-SONET-MIB', 'usdSonetVTRowStatus'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usd_sonet_virtual_tributary_group = usdSonetVirtualTributaryGroup.setStatus('deprecated')
usd_sonet_group2 = object_group((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 2, 4)).setObjects(('Unisphere-Data-SONET-MIB', 'usdSonetMediumLoopbackConfig'), ('Unisphere-Data-SONET-MIB', 'usdSonetMediumTimingSource'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usd_sonet_group2 = usdSonetGroup2.setStatus('current')
usd_sonet_virtual_tributary_group2 = object_group((1, 3, 6, 1, 4, 1, 4874, 2, 2, 7, 4, 2, 5)).setObjects(('Unisphere-Data-SONET-MIB', 'usdSonetVTNextIfIndex'), ('Unisphere-Data-SONET-MIB', 'usdSonetVTPathLogicalChannel'), ('Unisphere-Data-SONET-MIB', 'usdSonetVTPathPayload'), ('Unisphere-Data-SONET-MIB', 'usdSonetVTTributaryGroup'), ('Unisphere-Data-SONET-MIB', 'usdSonetVTTributarySubChannel'), ('Unisphere-Data-SONET-MIB', 'usdSonetVTLowerIfIndex'), ('Unisphere-Data-SONET-MIB', 'usdSonetVTRowStatus'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
usd_sonet_virtual_tributary_group2 = usdSonetVirtualTributaryGroup2.setStatus('current')
mibBuilder.exportSymbols('Unisphere-Data-SONET-MIB', usdSonetMediumCircuitIdentifier=usdSonetMediumCircuitIdentifier, usdSonetVirtualTributaryGroup=usdSonetVirtualTributaryGroup, usdSonetVTObjects=usdSonetVTObjects, usdSonetPathSpeed=usdSonetPathSpeed, usdSonetGroup=usdSonetGroup, usdSonetPathIfIndex=usdSonetPathIfIndex, usdSonetVTTable=usdSonetVTTable, usdSonetPathMaximumChannels=usdSonetPathMaximumChannels, usdSonetPathEntry=usdSonetPathEntry, UsdSonetVTType=UsdSonetVTType, usdSonetObjects=usdSonetObjects, usdSonetPathMaximumPathSpeed=usdSonetPathMaximumPathSpeed, usdSonetCompliances=usdSonetCompliances, usdSonetMediumEntry=usdSonetMediumEntry, usdSonetPathChannelized=usdSonetPathChannelized, usdSonetPathCapabilityTable=usdSonetPathCapabilityTable, PYSNMP_MODULE_ID=usdSonetMIB, usdSonetPathMinimumPathSpeed=usdSonetPathMinimumPathSpeed, usdSonetPathNextIfIndex=usdSonetPathNextIfIndex, usdSonetCompliance2=usdSonetCompliance2, usdSonetVTTributaryGroup=usdSonetVTTributaryGroup, usdSonetPathHierarchy=usdSonetPathHierarchy, usdSonetVTEntry=usdSonetVTEntry, usdSonetMediumTimingSource=usdSonetMediumTimingSource, usdSonetVTTributarySubChannel=usdSonetVTTributarySubChannel, UsdSonetLogicalPathChannel=UsdSonetLogicalPathChannel, usdSonetCompliance3=usdSonetCompliance3, usdSonetVTPathPayload=usdSonetVTPathPayload, usdSonetMIB=usdSonetMIB, usdSonetVTPathLogicalChannel=usdSonetVTPathLogicalChannel, UsdSonetLineSpeed=UsdSonetLineSpeed, usdSonetMediumLoopbackConfig=usdSonetMediumLoopbackConfig, usdSonetPathLowerIfIndex=usdSonetPathLowerIfIndex, usdSonetGroups=usdSonetGroups, usdSonetVTLowerIfIndex=usdSonetVTLowerIfIndex, usdSonetPathTable=usdSonetPathTable, usdSonetMediumType=usdSonetMediumType, usdSonetVirtualTributaryGroup2=usdSonetVirtualTributaryGroup2, usdSonetVTIfIndex=usdSonetVTIfIndex, usdSonetConformance=usdSonetConformance, usdSonetGroup2=usdSonetGroup2, usdSonetPathLogicalChannel=usdSonetPathLogicalChannel, UsdSonetPathHierarchy=UsdSonetPathHierarchy, usdSonetPathGroup=usdSonetPathGroup, usdSonetPathCapabilityEntry=usdSonetPathCapabilityEntry, usdSonetPathObjects=usdSonetPathObjects, usdSonetVTNextIfIndex=usdSonetVTNextIfIndex, usdSonetCompliance=usdSonetCompliance, usdSonetMediumTable=usdSonetMediumTable, usdSonetVTType=usdSonetVTType, usdSonetVTRowStatus=usdSonetVTRowStatus, usdSonetPathRemoveFlag=usdSonetPathRemoveFlag, usdSonetPathRowStatus=usdSonetPathRowStatus) |
"""
Tools to create and update EMA models.
"""
def create_ema_model(model):
""" Given a newly made network, detach all its parameters so its
parameters can serve as the EMA of another model. """
for param in model.parameters():
param.detach_()
return model
def update_ema_model(model, ema_model, alpha, iteration_num=None):
""" ema_param_t = alpha * param_t-1 + (1 - alpha) * param_t
Args:
model: model with gradient updates
ema_model: net with parameters that are an EMA of model
alpha: fractional weight put on the pre-update param value
"""
if iteration_num is not None:
# Use the true average until the exponential average is more correct
# iter=0 alpha=0, iter=1 alpha=0.5, iter=2 alpha=0.67, ...
alpha = min(1 - 1 / (iteration_num + 1), alpha)
for ema_param, param in zip(ema_model.parameters(), model.parameters()):
ema_param.data = alpha * ema_param.data + (1 - alpha) * param.data | """
Tools to create and update EMA models.
"""
def create_ema_model(model):
""" Given a newly made network, detach all its parameters so its
parameters can serve as the EMA of another model. """
for param in model.parameters():
param.detach_()
return model
def update_ema_model(model, ema_model, alpha, iteration_num=None):
""" ema_param_t = alpha * param_t-1 + (1 - alpha) * param_t
Args:
model: model with gradient updates
ema_model: net with parameters that are an EMA of model
alpha: fractional weight put on the pre-update param value
"""
if iteration_num is not None:
alpha = min(1 - 1 / (iteration_num + 1), alpha)
for (ema_param, param) in zip(ema_model.parameters(), model.parameters()):
ema_param.data = alpha * ema_param.data + (1 - alpha) * param.data |
'''
It is often convenient to build a large DataFrame by parsing many files as DataFrames and concatenating them all at once. You'll do this here with three files, but, in principle, this approach can be used to combine data from dozens or hundreds of files.
Here, you'll work with DataFrames compiled from The Guardian's Olympic medal dataset.
pandas has been imported as pd and two lists have been pre-loaded: An empty list called medals, and medal_types, which contains the strings 'bronze', 'silver', and 'gold'.
'''
medals = []
medal_types = ['bronze', 'silver', 'gold']
for medal in medal_types:
# Create the file name: file_name
file_name = "%s_top5.csv" % medal
# Create list of column names: columns
columns = ['Country', medal]
# Read file_name into a DataFrame: df
medal_df = pd.read_csv(file_name, header=0, index_col='Country', names=columns)
# Append medal_df to medals
medals.append(medal_df)
# Concatenate medals horizontally: medals
medals = pd.concat(medals, axis='columns')
# Print medals
print(medals) | """
It is often convenient to build a large DataFrame by parsing many files as DataFrames and concatenating them all at once. You'll do this here with three files, but, in principle, this approach can be used to combine data from dozens or hundreds of files.
Here, you'll work with DataFrames compiled from The Guardian's Olympic medal dataset.
pandas has been imported as pd and two lists have been pre-loaded: An empty list called medals, and medal_types, which contains the strings 'bronze', 'silver', and 'gold'.
"""
medals = []
medal_types = ['bronze', 'silver', 'gold']
for medal in medal_types:
file_name = '%s_top5.csv' % medal
columns = ['Country', medal]
medal_df = pd.read_csv(file_name, header=0, index_col='Country', names=columns)
medals.append(medal_df)
medals = pd.concat(medals, axis='columns')
print(medals) |
'''
Many-to-many data merge
The final merging scenario occurs when both DataFrames do not have unique keys for a merge. What happens here is that for each duplicated key, every pairwise combination will be created.
Two example DataFrames that share common key values have been pre-loaded: df1 and df2. Another DataFrame df3, which is the result of df1 merged with df2, has been pre-loaded. All three DataFrames have been printed - look at the output and notice how pairwise combinations have been created. This example is to help you develop your intuition for many-to-many merges.
Here, you'll work with the site and visited DataFrames from before, and a new survey DataFrame. Your task is to merge site and visited as you did in the earlier exercises. You will then merge this merged DataFrame with survey.
Begin by exploring the site, visited, and survey DataFrames in the IPython Shell.
INSTRUCTIONS
100XP
-Merge the site and visited DataFrames on the 'name' column of site and 'site' column of visited, exactly as you did in the previous two exercises. Save the result as m2m.
-Merge the m2m and survey DataFrames on the 'ident' column of m2m and 'taken' column of survey.
-Hit 'Submit Answer' to print the first 20 lines of the merged DataFrame!
'''
# Merge site and visited: m2m
m2o = pd.merge(left=site, right=visited, left_on='name', right_on='site')
# Merge m2m and survey: m2m
m2m = pd.merge(left=m2o, right=survey, left_on='ident', right_on='taken')
# Print the first 20 lines of m2m
print(m2m.head(20))
| """
Many-to-many data merge
The final merging scenario occurs when both DataFrames do not have unique keys for a merge. What happens here is that for each duplicated key, every pairwise combination will be created.
Two example DataFrames that share common key values have been pre-loaded: df1 and df2. Another DataFrame df3, which is the result of df1 merged with df2, has been pre-loaded. All three DataFrames have been printed - look at the output and notice how pairwise combinations have been created. This example is to help you develop your intuition for many-to-many merges.
Here, you'll work with the site and visited DataFrames from before, and a new survey DataFrame. Your task is to merge site and visited as you did in the earlier exercises. You will then merge this merged DataFrame with survey.
Begin by exploring the site, visited, and survey DataFrames in the IPython Shell.
INSTRUCTIONS
100XP
-Merge the site and visited DataFrames on the 'name' column of site and 'site' column of visited, exactly as you did in the previous two exercises. Save the result as m2m.
-Merge the m2m and survey DataFrames on the 'ident' column of m2m and 'taken' column of survey.
-Hit 'Submit Answer' to print the first 20 lines of the merged DataFrame!
"""
m2o = pd.merge(left=site, right=visited, left_on='name', right_on='site')
m2m = pd.merge(left=m2o, right=survey, left_on='ident', right_on='taken')
print(m2m.head(20)) |
boxWidth = 50 # Width of square boxes inside grid
margin = 15 # Margin around grid
border = 2 # grid thickness
steps = 11 # no. of loki movement b/w two maze boxes in animation
# COLORS
grey = (67,67,67)
black = (0,0,0)
white = (255,255,255)
yellow = (50,226,249)
red = (1,23,182)
green = (0,114,1)
| box_width = 50
margin = 15
border = 2
steps = 11
grey = (67, 67, 67)
black = (0, 0, 0)
white = (255, 255, 255)
yellow = (50, 226, 249)
red = (1, 23, 182)
green = (0, 114, 1) |
#!/usr/bin/env python
# encoding: utf-8
version_info = (0, 2, 1)
version = ".".join(map(str, version_info))
| version_info = (0, 2, 1)
version = '.'.join(map(str, version_info)) |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'Administrator'
class Screen(object):
@property
def width(self):
return self._width
@width.setter
def width(self, value):
if not isinstance(value, int):
raise ValueError('width must be an integer!')
self._width = value
@property
def height(self):
return self._height
@height.setter
def height(self, value):
if not isinstance(value, int):
raise ValueError('height must be an integer!')
self._height = value
@property
def resolution(self):
return self._height * self._width
#test
s = Screen()
s.width = 1024
s.height = 768
print(s.resolution)
assert s.resolution == 786432, '1024 * 768 = %d ?' % s.resolution
| __author__ = 'Administrator'
class Screen(object):
@property
def width(self):
return self._width
@width.setter
def width(self, value):
if not isinstance(value, int):
raise value_error('width must be an integer!')
self._width = value
@property
def height(self):
return self._height
@height.setter
def height(self, value):
if not isinstance(value, int):
raise value_error('height must be an integer!')
self._height = value
@property
def resolution(self):
return self._height * self._width
s = screen()
s.width = 1024
s.height = 768
print(s.resolution)
assert s.resolution == 786432, '1024 * 768 = %d ?' % s.resolution |
liste = list()
liste2 = [1,2,3,4]
| liste = list()
liste2 = [1, 2, 3, 4] |
#
# Connection Transport
#
# Exception classes used by this module.
class ExceptionPxTransport(ExceptionPexpect):
'''Raised for pxtransport exceptions.
'''
class pxtransport(object):
'''The carrier of the info.'''
def __init__(self, hostname=None, info=None):
if hostname is None and info is None:
raise ExceptionPxTransport("insufficient connection info")
self.info = info
if self.info is None:
self.info = find_info(hostname)
def executable(self):
'''Provide the program name to run'''
return ''
def exec_options(self):
'''Setup the executable's needed command line options'''
return []
def options(self):
'''Supply additional configuration parameters for pexpect'''
return []
| class Exceptionpxtransport(ExceptionPexpect):
"""Raised for pxtransport exceptions.
"""
class Pxtransport(object):
"""The carrier of the info."""
def __init__(self, hostname=None, info=None):
if hostname is None and info is None:
raise exception_px_transport('insufficient connection info')
self.info = info
if self.info is None:
self.info = find_info(hostname)
def executable(self):
"""Provide the program name to run"""
return ''
def exec_options(self):
"""Setup the executable's needed command line options"""
return []
def options(self):
"""Supply additional configuration parameters for pexpect"""
return [] |
while True:
nx, ny, w = input().split()
nx = int(nx)
ny = int(ny)
w = float(w)
if nx == ny and ny == w and w == 0:
break
xs = sorted(map(float, input().split()))
ys = sorted(map(float, input().split()))
xbroke = xs[0] - w / 2 > 0 or xs[-1] + w / 2 < 75
ybroke = ys[0] - w / 2 > 0 or ys[-1] + w / 2 < 100
if xbroke or ybroke:
print("NO")
continue
for i in range(1, nx):
if xs[i-1] + w / 2 < xs[i] - w / 2:
xbroke = True
break
for i in range(1, ny):
if ys[i-1] + w / 2 < ys[i] - w / 2:
ybroke = True
break
print("NO" if (xbroke or ybroke) else "YES")
| while True:
(nx, ny, w) = input().split()
nx = int(nx)
ny = int(ny)
w = float(w)
if nx == ny and ny == w and (w == 0):
break
xs = sorted(map(float, input().split()))
ys = sorted(map(float, input().split()))
xbroke = xs[0] - w / 2 > 0 or xs[-1] + w / 2 < 75
ybroke = ys[0] - w / 2 > 0 or ys[-1] + w / 2 < 100
if xbroke or ybroke:
print('NO')
continue
for i in range(1, nx):
if xs[i - 1] + w / 2 < xs[i] - w / 2:
xbroke = True
break
for i in range(1, ny):
if ys[i - 1] + w / 2 < ys[i] - w / 2:
ybroke = True
break
print('NO' if xbroke or ybroke else 'YES') |
class AttachableVolume:
"""
Base Mixin for requesting extra disk resources to k8s.
Sub classes should implement volume_spec specifiying a kubernetes PVC.
- size can be set by overriding volume_size_in_gb
- mount point can be set by overriding mount_location
"""
@property
def pvc_name(self):
"""
The pvc name which can be used by other resources
"""
return f"{self.volume_hash()}-moussaka-disk".lower()
@property
def short_pvc_name(self):
"""
a shorter version of the pvc name used
"""
return self.pvc_name[:60].replace("-", "")
def pod_volume_spec(self):
"""
specifies an item in the "Volumes" section of a pod definition
"""
pass
def pod_mount_spec(self):
"""
specifies volume_mounts block in a pod definition
"""
pass
def volume_hash(self):
"""
unique hash identifiying a volume within a pod
"""
pass
class EphemeralVolume(AttachableVolume):
"""
Mixin for requesting extra disk resources to k8s.
A default ephemeral disk is provided.
size can be set by overriding volume_size_in_gb
mount point can be set by overriding mount_location
Please be aware:
data in these volumes will be deleted once your task has ran
"""
def __init__(self, size_in_gb, mount_location="/mnt/data"):
super().__init__()
self.mount_location = mount_location
self.size_in_gb = size_in_gb
def volume_hash(self):
return hash(self.mount_location)
def pod_volume_spec(self):
return {
"volumes": [
{
"name": self.short_pvc_name,
"ephemeral": {
"volumeClaimTemplate": {
"spec": {
"accessModes": ["ReadWriteOnce"],
"storageClassName": "default",
"resources": {
"requests": {"storage": f"{self.size_in_gb}Gi"}
},
}
}
},
}
]
}
def pod_mount_spec(self):
return {
"volume_mounts": [
{"name": self.short_pvc_name, "mountPath": self.mount_location}
]
}
class AzureBlobStorageVolume(AttachableVolume):
"""
Returns a volume which mounts an azure storage container.
it assumes that the needed secret to access the underlying
azure storage account is available on k8s.
"""
def __init__(self, storage_account, storage_container):
super().__init__()
self.storage_account = storage_account
self.storage_container = storage_container
self.mount_location = f"/mnt/{storage_account}/{self.storage_container}"
def secret_name(self):
"""
returns the name of the k8s secret providing access to the blob storage
"""
return f"storage-sas-{self.storage_account}"
def volume_hash(self):
return hash(self.storage_account + self.storage_container + self.mount_location)
def pod_volume_spec(self):
return {
"volumes": [
{
"name": self.short_pvc_name,
"csi": {
"driver": "blob.csi.azure.com",
"volumeAttributes": {
"containerName": self.storage_container,
"secretName": self.secret_name(),
"mountOptions": "-o allow_other --file-cache-timeout-in-seconds=120",
},
},
}
]
}
def pod_mount_spec(self):
return {
"volume_mounts": [
{"name": self.short_pvc_name, "mountPath": self.mount_location}
]
}
| class Attachablevolume:
"""
Base Mixin for requesting extra disk resources to k8s.
Sub classes should implement volume_spec specifiying a kubernetes PVC.
- size can be set by overriding volume_size_in_gb
- mount point can be set by overriding mount_location
"""
@property
def pvc_name(self):
"""
The pvc name which can be used by other resources
"""
return f'{self.volume_hash()}-moussaka-disk'.lower()
@property
def short_pvc_name(self):
"""
a shorter version of the pvc name used
"""
return self.pvc_name[:60].replace('-', '')
def pod_volume_spec(self):
"""
specifies an item in the "Volumes" section of a pod definition
"""
pass
def pod_mount_spec(self):
"""
specifies volume_mounts block in a pod definition
"""
pass
def volume_hash(self):
"""
unique hash identifiying a volume within a pod
"""
pass
class Ephemeralvolume(AttachableVolume):
"""
Mixin for requesting extra disk resources to k8s.
A default ephemeral disk is provided.
size can be set by overriding volume_size_in_gb
mount point can be set by overriding mount_location
Please be aware:
data in these volumes will be deleted once your task has ran
"""
def __init__(self, size_in_gb, mount_location='/mnt/data'):
super().__init__()
self.mount_location = mount_location
self.size_in_gb = size_in_gb
def volume_hash(self):
return hash(self.mount_location)
def pod_volume_spec(self):
return {'volumes': [{'name': self.short_pvc_name, 'ephemeral': {'volumeClaimTemplate': {'spec': {'accessModes': ['ReadWriteOnce'], 'storageClassName': 'default', 'resources': {'requests': {'storage': f'{self.size_in_gb}Gi'}}}}}}]}
def pod_mount_spec(self):
return {'volume_mounts': [{'name': self.short_pvc_name, 'mountPath': self.mount_location}]}
class Azureblobstoragevolume(AttachableVolume):
"""
Returns a volume which mounts an azure storage container.
it assumes that the needed secret to access the underlying
azure storage account is available on k8s.
"""
def __init__(self, storage_account, storage_container):
super().__init__()
self.storage_account = storage_account
self.storage_container = storage_container
self.mount_location = f'/mnt/{storage_account}/{self.storage_container}'
def secret_name(self):
"""
returns the name of the k8s secret providing access to the blob storage
"""
return f'storage-sas-{self.storage_account}'
def volume_hash(self):
return hash(self.storage_account + self.storage_container + self.mount_location)
def pod_volume_spec(self):
return {'volumes': [{'name': self.short_pvc_name, 'csi': {'driver': 'blob.csi.azure.com', 'volumeAttributes': {'containerName': self.storage_container, 'secretName': self.secret_name(), 'mountOptions': '-o allow_other --file-cache-timeout-in-seconds=120'}}}]}
def pod_mount_spec(self):
return {'volume_mounts': [{'name': self.short_pvc_name, 'mountPath': self.mount_location}]} |
# -*- coding: utf-8 -*-
# For simplicity, these values are shared among both threads and comments.
MAX_THREADS = 1000
MAX_NAME = 50
MAX_DESCRIPTION = 3000
MAX_ADMINS = 2
# status
DEAD = 0
ALIVE = 1
STATUS = {
DEAD: 'dead',
ALIVE: 'alive',
}
BASE_SUBREDDITS = {'biology': ['biochemistry',
'bioengineering',
'bioinformatics',
'biophysics',
'evolution',
'genetics',
'genomics',
'molecular_biology',
'systems_biology',
'software'],
'statistics': ['statistics',
'machine_learning']
}
| max_threads = 1000
max_name = 50
max_description = 3000
max_admins = 2
dead = 0
alive = 1
status = {DEAD: 'dead', ALIVE: 'alive'}
base_subreddits = {'biology': ['biochemistry', 'bioengineering', 'bioinformatics', 'biophysics', 'evolution', 'genetics', 'genomics', 'molecular_biology', 'systems_biology', 'software'], 'statistics': ['statistics', 'machine_learning']} |
# directory list
DATA_DIR = "./plugins/data/"
# cv window
MAIN_WINDOW_NAME = 'main'
MASK_WINDOW_NAME = 'mask'
KEY_WAIT_DURATION = 1
| data_dir = './plugins/data/'
main_window_name = 'main'
mask_window_name = 'mask'
key_wait_duration = 1 |
# Copyright 2014 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def emit_binary(go,
name="",
source = None,
gc_linkopts = [],
linkstamp=None,
version_file=None,
info_file=None):
"""See go/toolchains.rst#binary for full documentation."""
if name == "": fail("name is a required parameter")
archive = go.archive(go, source)
executable = go.declare_file(go, name=name, ext=go.exe_extension)
go.link(go,
archive=archive,
executable=executable,
gc_linkopts=gc_linkopts,
linkstamp=linkstamp,
version_file=version_file,
info_file=info_file,
)
return archive, executable
| def emit_binary(go, name='', source=None, gc_linkopts=[], linkstamp=None, version_file=None, info_file=None):
"""See go/toolchains.rst#binary for full documentation."""
if name == '':
fail('name is a required parameter')
archive = go.archive(go, source)
executable = go.declare_file(go, name=name, ext=go.exe_extension)
go.link(go, archive=archive, executable=executable, gc_linkopts=gc_linkopts, linkstamp=linkstamp, version_file=version_file, info_file=info_file)
return (archive, executable) |
def split_tag(tag):
if tag == 'O':
state = 'O'
label = None
elif tag == '-X-':
state = 'O'
label = None
else:
state, label = tag.split('-')
return state, label
def iob2bio(tags):
processed_tags = [] # should be bio format
prev_state = None
prev_label = None
for t, tag in enumerate(tags):
state, label = split_tag(tag)
# case1. I-ORG I-ORG
# ^^^^^
if t == 0 and state == 'I':
new_state = 'B'
# case2. I-ORG I-PERSON
# ^^^^^^^^
elif state == 'I' and prev_label != label:
new_state = 'B'
# case3. O I-ORG
# ^^^^^
elif state == 'I' and prev_state == 'O':
new_state = 'B'
# case4. I-ORG I-ORG
# ^^^^^
elif state == 'I' and prev_label == label:
new_state = 'I'
else:
new_state = state
if label is None:
new_tag = 'O'
else:
new_tag = f'{new_state}-{label}'
processed_tags.append(new_tag)
prev_state = state
prev_label = label
return processed_tags
def bio2bioes(tags):
# BIO -> BIOES: it only needs to check next
processed_tags = [] # should be bio format
last_index = len(tags) - 1
for t, tag in enumerate(tags):
state, label = split_tag(tag)
if t == last_index:
next_state, next_label = 'O', None
else:
next_state, next_label = split_tag(tags[t+1])
# case1. B-ORG O or B-ORG B-ORG
# ^^^^^ ^^^^^
if state == 'B' and next_state in ['B', 'O']:
new_state = 'S'
# case2. I-ORG O or I-ORG B-PER
# ^^^^^ ^^^^^
elif state == 'I' and next_state in ['B', 'O']:
new_state = 'E'
# case3. I-ORG I-ORG
# ^^^^^
elif state == 'I' and next_state == 'I':
new_state = 'I'
else:
new_state = state
if label is None:
new_tag = 'O'
else:
new_tag = f'{new_state}-{label}'
processed_tags.append(new_tag)
return processed_tags
def get_word_format_func(in_format, out_format):
format_func_list = []
if in_format == 'bio' and out_format == 'bioes':
format_func_list.append(bio2bioes)
if in_format == 'iob' and out_format == 'bio':
format_func_list.append(iob2bio)
if in_format == 'iob' and out_format == 'bioes':
format_func_list.append(iob2bio)
format_func_list.append(bio2bioes)
return format_func_list
def apply_transform(elems, format_func_list):
for func in format_func_list:
elems = func(elems)
return elems
| def split_tag(tag):
if tag == 'O':
state = 'O'
label = None
elif tag == '-X-':
state = 'O'
label = None
else:
(state, label) = tag.split('-')
return (state, label)
def iob2bio(tags):
processed_tags = []
prev_state = None
prev_label = None
for (t, tag) in enumerate(tags):
(state, label) = split_tag(tag)
if t == 0 and state == 'I':
new_state = 'B'
elif state == 'I' and prev_label != label:
new_state = 'B'
elif state == 'I' and prev_state == 'O':
new_state = 'B'
elif state == 'I' and prev_label == label:
new_state = 'I'
else:
new_state = state
if label is None:
new_tag = 'O'
else:
new_tag = f'{new_state}-{label}'
processed_tags.append(new_tag)
prev_state = state
prev_label = label
return processed_tags
def bio2bioes(tags):
processed_tags = []
last_index = len(tags) - 1
for (t, tag) in enumerate(tags):
(state, label) = split_tag(tag)
if t == last_index:
(next_state, next_label) = ('O', None)
else:
(next_state, next_label) = split_tag(tags[t + 1])
if state == 'B' and next_state in ['B', 'O']:
new_state = 'S'
elif state == 'I' and next_state in ['B', 'O']:
new_state = 'E'
elif state == 'I' and next_state == 'I':
new_state = 'I'
else:
new_state = state
if label is None:
new_tag = 'O'
else:
new_tag = f'{new_state}-{label}'
processed_tags.append(new_tag)
return processed_tags
def get_word_format_func(in_format, out_format):
format_func_list = []
if in_format == 'bio' and out_format == 'bioes':
format_func_list.append(bio2bioes)
if in_format == 'iob' and out_format == 'bio':
format_func_list.append(iob2bio)
if in_format == 'iob' and out_format == 'bioes':
format_func_list.append(iob2bio)
format_func_list.append(bio2bioes)
return format_func_list
def apply_transform(elems, format_func_list):
for func in format_func_list:
elems = func(elems)
return elems |
class OnTheFarmDivTwo:
def animals(self, heads, legs):
if legs < 2 * heads or legs > 4 * heads or legs % 2:
return tuple()
x = (legs - 2 * heads) / 2
return heads - x, x
| class Onthefarmdivtwo:
def animals(self, heads, legs):
if legs < 2 * heads or legs > 4 * heads or legs % 2:
return tuple()
x = (legs - 2 * heads) / 2
return (heads - x, x) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.