content
stringlengths 7
1.05M
|
|---|
class Solution:
def canCompleteCircuit(self, gas, cost):
sum = 0
total = 0
start = 0
for i in range(len(gas)):
total += (gas[i] - cost[i])
if sum < 0:
sum = gas[i] - cost[i]
start = i
else:
sum += (gas[i] - cost[i])
return start if total >= 0 else -1
if __name__ == "__main__":
solution = Solution()
print(solution.canCompleteCircuit([1,2,3,4,5], [3,4,5,1,2]))
print(solution.canCompleteCircuit([2,3,4], [3,4,3]))
|
"""
Author: Xavid Ramirez
Email: xavid.ramirez01@utrgv.edu
Date: November 2, 2016
Desc: Hamming word encoding and decoding program. The python program
Will take either a set of bits to encode or decode. Encode will
encode the bits into Hamming Encoding. Decoding will check the
given bits, check the parities, fix parities if needed, return
the correct word, and return the unencoded bit word.
Dependencies: Python 3
License: MIT
"""
class Hamming:
def __init__(self):
self.query()
def query(self):
""" Prompt user if they would like to encode or decode, then begin process for that subclass """
command = input("Would you like to Encode or Decode? ")
if "encode" in command.lower():
bits = input("Please enter the bit word to be encoded in hamming code: ")
data = Encode(bits)
data.start()
elif "decode" in command.lower():
bits = input("\nPlease enter the bit word to be decoded in hamming code: ")
print("\n")
data = Decode(bits)
data.analyze()
else:
#If user enters invalid command, run to here and close program.
print("You entered an invalid command!")
class Encode:
def __init__(self,bits=None):
self.bits = list(bits) if bits != None else bits
self.blank = True if bits == None else False
self.MaxParity = self.encode_FindLargestParity()
self.ErrorLog = []
def encode(self,P):
""" Function to encode the given Parity for given bits """
pData = []
if P == 1:
#If parity bit is 1, then use slicing on the list to get the parity bits (every other bit, remove first bit)
pData.extend(self.bits[::P+1])
pData.pop(0)
self.encode_setParityBit(pData,P)
elif P in [2,4,8,16,32,64,128,256]:
#For given Parity bit in range, and for range in j to p, pull out the bits for that parity
# EX: Parity 2 => take two, ignore two, take two, ignore 2 etc...
# EX: Parity 4 => take four, ignore four, take four, ignore 4 etc..
for i in range( (P-1), len(self.bits), (P*2) ):
for j in range(0,P):
try:
pData.append(self.bits[i+j])
except IndexError:
#Exception for index out of range to ErrorLog list of errors, just for logging purposes
#List is known to hit out of range for large parity bits
self.ErrorLog.append("During parity bit" + str(P) +" check. Index out of range at " + str(i+j))
#Pop the first bit, as it is the parity, we need to find this parity, not encode it and it is set to NONE here
pData.pop(0)
#Run the encoding function for given Parity bit P
self.encode_setParityBit(pData,P)
def start(self):
""" Prepair the list for encoding """
"""
1. For every location for a possible Parity,
insert None into that specific location, shifting
the next bit to the following location
2. Now for every parity up to the Maximum Parity for
the given bits, encode the Parity bit (find the parity for sequence)
3. Print out the encoded output
"""
prepped = []
prepped.extend(self.bits)
for i in [1,2,4,8,16,32,64,128,256]:
if i < self.MaxParity:
prepped.insert(i-1,None)
elif i == self.MaxParity:
prepped.insert(i-1,None)
break
self.bits = prepped
for i in [1,2,4,8,16,32,64,128,256]:
if i == self.MaxParity:
self.encode(i)
break
elif i == 1:
self.encode(1)
else:
self.encode(i)
print("Encoding Complete...\n")
output = ''.join(self.bits)
print("Output => " + output)
def encode_setParityBit(self,pData,P):
""" Encode the parity bit """
#If number of 1's in parity bit sequence are even seto P to 0
#otherwise set P to 1
if pData.count('1') % 2 == 0:
self.bits[P-1] = '0'
elif pData.count('1') % 2 != 0:
self.bits[P-1] = '1'
def encode_FindLargestParity(self):
"""For given range of bits, find the largest Possible Parity for given
number of bits """
for i in [256,128,64,32,16,8,4,2,1]:
if i <= len(self.bits):
return i
class Decode:
def __init__(self, bits=None):
self.bits = list(bits) if bits != None else bits
self.blank = True if bits == None else False
self.error = False
self.errorBit = 0
self.MaxParity = self.decode_FindLargestParity()
self.ErrorLog = []
self.parityBits = []
def decode_FindLargestParity(self):
""" Find the largest possible parity for given bits """
maxP = 0
for i in [1,2,4,8,16,32,64,128,256]:
if len(self.bits) - i >= 0:
maxP = i
return maxP
def analyze(self):
""" Decode the list for each parity up to the max parity possible for given bit sequence """
for i in [1,2,4,8,16,32,64,128,256]:
if i == self.MaxParity:
self.decode(i)
break
else:
self.decode(i)
"""If there is an error, self.erroBit should contain the error bit, go and fix it, then re-analyze
Other wise, the test is complete, go give out the decoded bits
"""
if self.error == True:
self.error = False
print("\nError found in bit " + str(self.errorBit) + "... Fixing Error...\n")
self.FixError()
print("Rerunning parity analysis....")
self.analyze()
else:
print("\nTest Complete!")
print("\nCorrected encoded bits => " + ''.join(self.bits))
#Go print out decoded word
self.outputDecodedWord()
def outputDecodedWord(self):
""" Print out decoded bit sequence """
output = self.bits
for i in [256,128,64,32,16,8,4,2,1]:
if i <= self.MaxParity:
output.pop(i-1)
output = ''.join(output)
print("Decoded bits => " + output)
def decode(self,P):
"""Decode"""
pData = []
pVal = 0
if P == 1:
#If parity bit is 1, then use slicing on the list to get the parity bits (every other bit, remove first bit)
pData.extend(self.bits[::P+1])
pVal = pData[0]
pData.pop(0)
self.parityAnalysis(pData,P,pVal)
elif P in [2,4,8,16,32,64,128,256]:
#For given Parity bit in range, and for range in j to p, pull out the bits for that parity
# EX: Parity 2 => take two, ignore two, take two, ignore 2 etc...
# EX: Parity 4 => take four, ignore four, take four, ignore 4 etc..
for i in range( (P-1), len(self.bits), (P*2) ):
for j in range(0, P):
try:
pData.append(self.bits[i+j])
except IndexError:
self.ErrorLog.append("During parity bit" + str(P) +" check. Index out of range at " + str(i+j))
pVal = pData[0]
#Pop the first bit, this is the bit that will be analyzed and corected if needed.
pData.pop(0)
self.parityAnalysis(pData,P,pVal)
def parityAnalysis(self, pData, P, pVal):
""" This function alayzes the sequence for a Given parity, the value of the parity and marks erro if Parity is incorrect """
print("Data for Parity Bit " + str(P) + " = { " + str(pData) + " }")
print("P" + str(P) + " currently = " + str(pVal))
"""
If number of 1's are odd and Parity is 1 then there is no error
If number of 1's are even and Parity is 0 then there is no error
Otherwise it is Incorrect, mark error flag
Calculate the errorBit
"""
if pData.count('1') % 2 == 0 and pVal == '0':
print("Parity Bit " + str(P) + " is Correct...\n")
elif pData.count('1') % 2 != 0 and pVal == '1':
print("Parity Bit " + str(P) + " is Correct...\n")
else:
print("Parity Bit " + str(P) + " is Incorrect!\n")
self.errorBit += (int(pVal) * int(P))
self.error = True
def FixError(self):
""" Flip the value of the Error bit """
if self.bits[self.errorBit] == '1':
self.bits[self.errorBit] = '0'
else:
self.bits[self.errorBit] = '1'
def main():
hamming = Hamming()
if __name__ == '__main__':
main()
|
def encrypt(text,s):
result = ""
# transverse the plain text
for i in range(len(text)):
char = text[i]
# Encrypt uppercase characters in plain text
if (char.isupper()):
result += chr((ord(char) + s-65) % 26 + 65)
# Encrypt lowercase characters in plain text
else:
result += chr((ord(char) + s - 97) % 26 + 97)
return result
#check the above function
text = "ATTACKATONCYE"
s = 4
print ("Plain Text : " + text)
print ("Shift pattern : " + str(s))
print ("Cipher: " + encrypt(text,s))
|
def amount_of_elements_smaller(matrix, i, j):
'''Count the amount of elements smaller than m[i][j] in the (square) matrix.
Each column and row is sorted in ascending order.
>>> amount_of_elements_smaller([[1, 2, 3], [4, 5, 6], [7, 8, 9]], 1, 1)
4
'''
size = len(matrix)
assert size == len(matrix[0])
split = matrix[i][j]
jdx = size - 1
amount = 0
for idx in range(size):
while matrix[idx][jdx] >= split:
jdx -= 1
if jdx < 0:
return amount
amount += jdx + 1
return amount
|
class FieldTypeError(Exception):
"""Value has wrong datatype"""
pass
class ToManyMatchesError(Exception):
"""Found multiple Nodes instead of one."""
pass
class DoesNotExist(Exception):
"""Object Does not exist."""
pass
class RelationshipMatchError(Exception):
"""Err with Relationships."""
pass
class DeletionError(Exception):
"""Err by deleting an instance"""
|
# print('~~~~~~~~~~~~~我爱鱼C工作室~~~~~~~~~~~~~~~~')
# temp=input("不妨猜一下小甲鱼想的数字:")
# guess=int(temp)
# if guess == 8:
# print("卧槽,你是蛔虫吗")
# print("哼,猜中也没有奖励")
# else:
# print("猜错了,现在想的是8")
# print("游戏结束,不玩了")
# teacher='小甲鱼'
# print(teacher)
# teacher='老甲鱼'
# print(teacher)
# first=3
# second=8
# third=first+second
# print(third)
#
# myteacher='小甲鱼'
# yourteacher='黑夜'
# ourteacher=myteacher+yourteacher
# print(ourteacher)#字符串拼接
#
# print(first)
print('fishc!=Fishc')
# print('5'+'8')
#
# print('let\'s go!')
# print("let's go!")
#
# str='c:\\now'
# print(str)
#
# str=r'c:\now\users''\\'
# print(str)
#
# str="""师傅家附近,\n看看国家机关叽叽咕咕,\n是开放港口开发全部放假带你飞"""
# print(str)
a=0.485
b=int(a)
c=float(a)
d=str(a)
print(b,c,d)
print(10//6)
print(5%2,11%2)
with open('Burning.txt') as f:
for each_line in f:
a=each_line.find('徐')
print(a)
|
# coding: utf-8
# __The Data Set__
# In[1]:
r = open('la_weather.csv', 'r')
# In[2]:
w = r.read()
# In[3]:
w_list = w.split('\n')
# In[4]:
weather = []
for w in w_list:
wt = w.split(',')
weather.append(wt)
weather[:5]
# In[5]:
del weather[0]
# In[6]:
col_weather = []
for w in weather:
col_weather.append(w[1])
col_weather[:5]
# - Assign the first element of `col_weather` to `first_element` and display it using the `print()` function.
# - Assign the last element of `col_weather` to `last_element` and display it using the `print()` function.
# In[7]:
first_element = col_weather[0]
first_element
# In[8]:
last_element = col_weather[len(col_weather) - 1]
last_element
# __Dictionaries__
# In[9]:
students = ['Tom','Jim','Sue','Ann']
scores = [70,80,85,75]
# In[10]:
indexes = [0,1,2,3]
name = 'Sue'
score = 0
for i in indexes:
if students[i] == name:
score = scores[i]
print(score)
# In[11]:
# Make an empty dictionary like this:
scores = {'Tom':70,'Jime':80,'Sue':85,'Ann':75}
# In[12]:
scores['Tom']
# __Practice populating a Dictionary__
# In[13]:
superhero_ranks = {'Aquaman':1, 'Seperman':2}
# In[14]:
president_ranks = {}
president_ranks["FDR"] = 1
president_ranks["Lincoln"] = 2
president_ranks["Aquaman"] = 3
fdr_rank = president_ranks["FDR"]
lincoln_rank = president_ranks["Lincoln"]
aquaman_rank = president_ranks["Aquaman"]
# __Defining a Dictionary with Values__
# In[15]:
random_values = {"key1": 10, "key2": "indubitably", "key3": "dataquest", 3: 5.6}
# In[16]:
random_values
# In[17]:
# Create a dictionary named `animals`
animals = {7:'raven',
8:'goose',
9:'duck'}
# In[18]:
animals
# In[19]:
# Create a dictionary named `times`
times = {'morning': 8,
'afternoon': 14,
'evening': 19,
'night': 23}
times
# __Modifying Dictionary Values__
# In[20]:
students = {
"Tom": 60,
"Jim": 70
}
# In[21]:
# Add the key `Ann` and value 85 to the dictionary students
students['Ann'] = 85
# In[22]:
students
# In[23]:
# Replace the value for the key Tom with 80
students['Tom'] = 80
# In[24]:
# Add 5 to the value for the key Jim
students['Jim'] = students['Jim'] + 5
# In[25]:
students
# __The In Statement and Dictionaries__
# In[26]:
planet_numbers = {"mercury": 1, "venus": 2, "earth": 3, "mars": 4}
# In[27]:
# Check whether `jupiter` is a key in `planet_numbers`
jupiter_found = 'jupiter' in planet_numbers
# In[28]:
jupiter_found
# In[29]:
earth_found = 'earth' in planet_numbers
# In[30]:
earth_found
# __The Else Statement__
# ```python
# if temperature > 50:
# print("It's hot!")
# else:
# print("It's cold!")
# ```
# __Practicing with the Else Statement__
# In[31]:
scores = [80, 100, 60, 30]
high_scores = []
low_scores = []
for score in scores:
if score > 70:
high_scores.append(score)
else:
low_scores.append(score)
# In[32]:
high_scores
# In[33]:
low_scores
# In[34]:
planet_names = ["Mercury", "Venus", "Earth", "Mars", "Jupiter", "Saturn", "Neptune", "Uranus"]
short_names = []
long_names = []
for name in planet_names:
if len(name) > 5:
long_names.append(name)
else:
short_names.append(name)
# In[35]:
short_names
# In[36]:
long_names
# __Counting with Dictionaries__
# In[37]:
pantry = ["apple", "orange", "grape", "apple", "orange", "apple", "tomato", "potato", "grape"]
# In[38]:
pantry_counts = {}
for item in pantry:
if item in pantry_counts:
pantry_counts[item] += 1
else:
pantry_counts[item] = 1
# In[39]:
pantry_counts
# In[40]:
#print key and values
for key, value in pantry_counts.items():
print(key, value)
# __Counting the Weather__
#
# - Count how many times each type of weather occurs in the `col_weather` list, and store the results in a new dictionary called `weather_counts`.
# - When finished, `weather_counts` should contain a key for each different type of weather in the `weather` list, along with its associated frequency. Here's a preview of how the result should format the `weather_counts` dictionary.
# In[41]:
weather_counts = {}
for weather in col_weather:
if weather in weather_counts:
weather_counts[weather] += 1
else:
weather_counts[weather] = 1
# In[42]:
weather_counts
|
URL_CONFIG ="www.python.org"
DEFAULT_VALUE = 1
DEFAULT_CONSTANT = 0
|
class Solution:
def reverse(self, x: int) -> int:
negative = x<0
x = abs(x)
reversed = 0
while x!= 0:
reversed = reversed*10 + x%10
x //= 10
if reversed > 2**31-1:
return 0
return reversed if not negative else -reversed
|
""" modules for income tax """
# import datetime
class TaxReturn:
""" Tax return class """
def hello(self, x):
print("hello ", x)
print("Hi")
taxreturn = TaxReturn()
taxreturn.hello("monkey")
|
# type: ignore
__all__ = [
"meshc",
"barh",
"trisurf",
"compass",
"isonormals",
"plotutils",
"ezcontour",
"streamslice",
"scatter",
"rgb2ind",
"usev6plotapi",
"quiver",
"streamline",
"triplot",
"tetramesh",
"rose",
"patch",
"comet",
"voronoi",
"contourslice",
"histogram",
"errorbar",
"reducepatch",
"ezgraph3",
"interpstreamspeed",
"shrinkfaces",
"ezplot3",
"ezpolar",
"curl",
"stream3",
"contour",
"contours",
"coneplot",
"rotate",
"isosurface",
"pie3",
"specgraphhelper",
"stem",
"frame2im",
"comet3",
"ezmeshc",
"contourf",
"fplot",
"quiver3",
"isocolors",
"soundview",
"ellipsoid",
"parseplotapi",
"streamtube",
"changeseriestype",
"makebars",
"bar3h",
"image",
"trimesh",
"clabel",
"fill",
"spinmap",
"plotmatrix",
"ezsurf",
"divergence",
"ind2rgb",
"pareto",
"isocaps",
"moviein",
"pie",
"contourc",
"feather",
"hgline2lineseries",
"ezcontourf",
"stairs",
"surfc",
"im2java",
"ezplot",
"im2frame",
"colstyle",
"movieview",
"contour3",
"rgbplot",
"surf2patch",
"dither",
"contrast",
"waterfall",
"cylinder",
"bar",
"slice",
"histogram2",
"streamribbon",
"pcolor",
"ribbon",
"isplotchild",
"sphere",
"reducevolume",
"ezsurfc",
"imagesc",
"subvolume",
"streamparticles",
"volumebounds",
"plotchild",
"area",
"meshz",
"imageview",
"stem3",
"scatter3",
"ezmesh",
"plotdoneevent",
"stream2",
"vissuite",
"bar3",
"smooth3",
]
def meshc(*args):
raise NotImplementedError("meshc")
def barh(*args):
raise NotImplementedError("barh")
def trisurf(*args):
raise NotImplementedError("trisurf")
def compass(*args):
raise NotImplementedError("compass")
def isonormals(*args):
raise NotImplementedError("isonormals")
def plotutils(*args):
raise NotImplementedError("plotutils")
def ezcontour(*args):
raise NotImplementedError("ezcontour")
def streamslice(*args):
raise NotImplementedError("streamslice")
def scatter(*args):
raise NotImplementedError("scatter")
def rgb2ind(*args):
raise NotImplementedError("rgb2ind")
def usev6plotapi(*args):
raise NotImplementedError("usev6plotapi")
def quiver(*args):
raise NotImplementedError("quiver")
def streamline(*args):
raise NotImplementedError("streamline")
def triplot(*args):
raise NotImplementedError("triplot")
def tetramesh(*args):
raise NotImplementedError("tetramesh")
def rose(*args):
raise NotImplementedError("rose")
def patch(*args):
raise NotImplementedError("patch")
def comet(*args):
raise NotImplementedError("comet")
def voronoi(*args):
raise NotImplementedError("voronoi")
def contourslice(*args):
raise NotImplementedError("contourslice")
def histogram(*args):
raise NotImplementedError("histogram")
def errorbar(*args):
raise NotImplementedError("errorbar")
def reducepatch(*args):
raise NotImplementedError("reducepatch")
def ezgraph3(*args):
raise NotImplementedError("ezgraph3")
def interpstreamspeed(*args):
raise NotImplementedError("interpstreamspeed")
def shrinkfaces(*args):
raise NotImplementedError("shrinkfaces")
def ezplot3(*args):
raise NotImplementedError("ezplot3")
def ezpolar(*args):
raise NotImplementedError("ezpolar")
def curl(*args):
raise NotImplementedError("curl")
def stream3(*args):
raise NotImplementedError("stream3")
def contour(*args):
raise NotImplementedError("contour")
def contours(*args):
raise NotImplementedError("contours")
def coneplot(*args):
raise NotImplementedError("coneplot")
def rotate(*args):
raise NotImplementedError("rotate")
def isosurface(*args):
raise NotImplementedError("isosurface")
def pie3(*args):
raise NotImplementedError("pie3")
def specgraphhelper(*args):
raise NotImplementedError("specgraphhelper")
def stem(*args):
raise NotImplementedError("stem")
def frame2im(*args):
raise NotImplementedError("frame2im")
def comet3(*args):
raise NotImplementedError("comet3")
def ezmeshc(*args):
raise NotImplementedError("ezmeshc")
def contourf(*args):
raise NotImplementedError("contourf")
def fplot(*args):
raise NotImplementedError("fplot")
def quiver3(*args):
raise NotImplementedError("quiver3")
def isocolors(*args):
raise NotImplementedError("isocolors")
def soundview(*args):
raise NotImplementedError("soundview")
def ellipsoid(*args):
raise NotImplementedError("ellipsoid")
def parseplotapi(*args):
raise NotImplementedError("parseplotapi")
def streamtube(*args):
raise NotImplementedError("streamtube")
def changeseriestype(*args):
raise NotImplementedError("changeseriestype")
def makebars(*args):
raise NotImplementedError("makebars")
def bar3h(*args):
raise NotImplementedError("bar3h")
def image(*args):
raise NotImplementedError("image")
def trimesh(*args):
raise NotImplementedError("trimesh")
def clabel(*args):
raise NotImplementedError("clabel")
def fill(*args):
raise NotImplementedError("fill")
def spinmap(*args):
raise NotImplementedError("spinmap")
def plotmatrix(*args):
raise NotImplementedError("plotmatrix")
def ezsurf(*args):
raise NotImplementedError("ezsurf")
def divergence(*args):
raise NotImplementedError("divergence")
def ind2rgb(*args):
raise NotImplementedError("ind2rgb")
def pareto(*args):
raise NotImplementedError("pareto")
def isocaps(*args):
raise NotImplementedError("isocaps")
def moviein(*args):
raise NotImplementedError("moviein")
def pie(*args):
raise NotImplementedError("pie")
def contourc(*args):
raise NotImplementedError("contourc")
def feather(*args):
raise NotImplementedError("feather")
def hgline2lineseries(*args):
raise NotImplementedError("hgline2lineseries")
def ezcontourf(*args):
raise NotImplementedError("ezcontourf")
def stairs(*args):
raise NotImplementedError("stairs")
def surfc(*args):
raise NotImplementedError("surfc")
def im2java(*args):
raise NotImplementedError("im2java")
def ezplot(*args):
raise NotImplementedError("ezplot")
def im2frame(*args):
raise NotImplementedError("im2frame")
def colstyle(*args):
raise NotImplementedError("colstyle")
def movieview(*args):
raise NotImplementedError("movieview")
def contour3(*args):
raise NotImplementedError("contour3")
def rgbplot(*args):
raise NotImplementedError("rgbplot")
def surf2patch(*args):
raise NotImplementedError("surf2patch")
def dither(*args):
raise NotImplementedError("dither")
def contrast(*args):
raise NotImplementedError("contrast")
def waterfall(*args):
raise NotImplementedError("waterfall")
def cylinder(*args):
raise NotImplementedError("cylinder")
def bar(*args):
raise NotImplementedError("bar")
def slice(*args):
raise NotImplementedError("slice")
def histogram2(*args):
raise NotImplementedError("histogram2")
def streamribbon(*args):
raise NotImplementedError("streamribbon")
def pcolor(*args):
raise NotImplementedError("pcolor")
def ribbon(*args):
raise NotImplementedError("ribbon")
def isplotchild(*args):
raise NotImplementedError("isplotchild")
def sphere(*args):
raise NotImplementedError("sphere")
def reducevolume(*args):
raise NotImplementedError("reducevolume")
def ezsurfc(*args):
raise NotImplementedError("ezsurfc")
def imagesc(*args):
raise NotImplementedError("imagesc")
def subvolume(*args):
raise NotImplementedError("subvolume")
def streamparticles(*args):
raise NotImplementedError("streamparticles")
def volumebounds(*args):
raise NotImplementedError("volumebounds")
def plotchild(*args):
raise NotImplementedError("plotchild")
def area(*args):
raise NotImplementedError("area")
def meshz(*args):
raise NotImplementedError("meshz")
def imageview(*args):
raise NotImplementedError("imageview")
def stem3(*args):
raise NotImplementedError("stem3")
def scatter3(*args):
raise NotImplementedError("scatter3")
def ezmesh(*args):
raise NotImplementedError("ezmesh")
def plotdoneevent(*args):
raise NotImplementedError("plotdoneevent")
def stream2(*args):
raise NotImplementedError("stream2")
def vissuite(*args):
raise NotImplementedError("vissuite")
def bar3(*args):
raise NotImplementedError("bar3")
def smooth3(*args):
raise NotImplementedError("smooth3")
|
#!/usr/bin/python
print('Hello Git!')
print("Nakano Masaki")
|
a = [int(x) for x in input().split()]
a.sort() #this command sorts the list in ascending order
if a[-2]==a[-1]:
print(a[-3]+a[1])
else:
print(a[-2] + a[1])
|
N, A, B = map(int, input().split())
def f(a, b):
return [
p
for p in [
N - i * a + j
for i in range(
1,
N // a + 1 if N % a > b - N // a else
N // a
)
for j in range(1, a + 1)
] +
[
j
for j in range(
b - N // a if N % a > b - N // a else
b - N // a + 1,
N - (N // a) * a + 1 if N % a > b - N // a else
N - (N // a - 1) * a + 1
)
] +
[
j
for j in reversed(
range(
1,
b - N // a if N % a > b - N // a else
b - N // a + 1
)
)
]
]
# N - A + 1, N - A + 2, ..., N,
# N - 2A + 1, N - 2A + 2, ..., N - A,
# ...,
# N - floor(N / A)A + 1, N - floor(N / A)A + 2, ..., N - (floor(N / A) + 1)A,
# B - floor(N / A), B - floor(N / A) + 1, ..., N - floor(N / A)A
# B - floor(N / A) - 1, B - floor(N / A) - 2, ..., 1
# と構築する
ans = (
-1 if A * B < N or A + B > N + 1 else
' '.join(
str(p) if A >= B else
str(N - p + 1)
for p in f(max(A, B), min(A, B))
)
)
print(ans)
|
# The MessageQueue class provides an interface to be implemented by classes that store messages.
class MessageQueue(object):
# add a single message to the queue
def add(self, folder_id, folder_path, message_type, parameters=None, sender_controller_id=None, sender_user_id=None, timestamp=None):
pass
# returns a list of message objects once some are ready
def receive(self):
pass
|
info = open("phonebook.txt", "r+").readlines()
ph = {}
for i in range(len(info)):
word = info[i].split()
ph[word[0]]=word[1]
for i in sorted(ph.keys()):
print(i,ph[i])
|
print("Welcome to the roller coaster!")
height = int(input("What is your height in cm? "))
canRide = False
if height > 120:
age = int(input("What is your age in years? "))
if age > 18:
canRide = True
else:
canRide = False
else:
canRide = False
if canRide:
print('You can ride the roller coaster!')
else:
print('Sorry! You cannot ride the roller coaster')
|
class Pilot:
current_job = 'Pilot'
def __init__(self, name):
self.name = name
def say_hallo(self):
print(f'My name... {self.name}')
class CosmonautPilot(Pilot):
current_job = 'Cosmonaut'
previous_job = 'Pilot'
class GieroyCosmonautPilot(CosmonautPilot):
status = 'Gieroy'
ivan = GieroyCosmonautPilot(name='Иван Иванович')
ivan.say_hallo() # Здравствуйте... Иван Иванович
ivan.current_job # Cosmonaut
ivan.previous_job # Pilot
ivan.status # Gieroy
|
#
# PySNMP MIB module MYLEXDAC960SCSIRAIDCONTROLLER-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/MYLEXDAC960SCSIRAIDCONTROLLER-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:06:54 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Counter32, NotificationType, ModuleIdentity, iso, TimeTicks, Counter64, ObjectIdentity, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Integer32, Bits, Gauge32, enterprises, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "NotificationType", "ModuleIdentity", "iso", "TimeTicks", "Counter64", "ObjectIdentity", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Integer32", "Bits", "Gauge32", "enterprises", "Unsigned32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
class DmiCounter(Counter32):
pass
class DmiInteger(Integer32):
pass
class DmiDisplaystring(DisplayString):
pass
class DmiDateX(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(28, 28)
fixedLength = 28
class DmiComponentIndex(Integer32):
pass
mylex = MibIdentifier((1, 3, 6, 1, 4, 1, 1608))
mib = MibIdentifier((1, 3, 6, 1, 4, 1, 1608, 3))
v2 = MibIdentifier((1, 3, 6, 1, 4, 1, 1608, 3, 2))
dmtfGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1))
tComponentid = MibTable((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 1), )
if mibBuilder.loadTexts: tComponentid.setStatus('mandatory')
eComponentid = MibTableRow((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 1, 1), ).setIndexNames((0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eComponentid.setStatus('mandatory')
a1Manufacturer = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 1, 1, 1), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1Manufacturer.setStatus('mandatory')
a1Product = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 1, 1, 2), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1Product.setStatus('mandatory')
a1Version = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 1, 1, 3), DmiDisplaystring())
if mibBuilder.loadTexts: a1Version.setStatus('mandatory')
a1SerialNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 1, 1, 4), DmiDisplaystring())
if mibBuilder.loadTexts: a1SerialNumber.setStatus('mandatory')
a1Installation = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 1, 1, 5), DmiDateX())
if mibBuilder.loadTexts: a1Installation.setStatus('mandatory')
a1Verify = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 1, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a1Verify.setStatus('mandatory')
tControllerInformation = MibTable((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2), )
if mibBuilder.loadTexts: tControllerInformation.setStatus('mandatory')
eControllerInformation = MibTableRow((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1), ).setIndexNames((0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "DmiComponentIndex"), (0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a2ControllerNumber"))
if mibBuilder.loadTexts: eControllerInformation.setStatus('mandatory')
a2ControllerNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2ControllerNumber.setStatus('mandatory')
a2OperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2OperationalState.setStatus('mandatory')
a2FirmwareRevision = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2FirmwareRevision.setStatus('mandatory')
a2ConfiguredChannels = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2ConfiguredChannels.setStatus('mandatory')
a2ActualChannels = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 5), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2ActualChannels.setStatus('mandatory')
a2MaximumLogicalDrives = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2MaximumLogicalDrives.setStatus('mandatory')
a2MaximumTargetsPerChannel = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2MaximumTargetsPerChannel.setStatus('mandatory')
a2MaximumTaggedRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2MaximumTaggedRequests.setStatus('mandatory')
a2MaximumDataTransferSizePerIoRequestInK = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2MaximumDataTransferSizePerIoRequestInK.setStatus('mandatory')
a2MaximumConcurrentCommands = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 10), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2MaximumConcurrentCommands.setStatus('mandatory')
a2RebuildRate = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 11), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2RebuildRate.setStatus('mandatory')
a2LogicalSectorSizeInBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 12), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2LogicalSectorSizeInBytes.setStatus('mandatory')
a2PhysicalSectorSizeInBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 13), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2PhysicalSectorSizeInBytes.setStatus('mandatory')
a2CacheLineSizeInBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 14), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2CacheLineSizeInBytes.setStatus('mandatory')
a2DramSizeInMb = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 15), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2DramSizeInMb.setStatus('mandatory')
a2EpromSizeInKb = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 16), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2EpromSizeInKb.setStatus('mandatory')
a2BusType = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 17), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2BusType.setStatus('mandatory')
a2SystemBusNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 18), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2SystemBusNumber.setStatus('mandatory')
a2SlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 19), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2SlotNumber.setStatus('mandatory')
a2InterruptVectorNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 20), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2InterruptVectorNumber.setStatus('mandatory')
a2InterruptMode = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 2, 1, 21), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a2InterruptMode.setStatus('mandatory')
tLogicalDriveInformation = MibTable((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 3), )
if mibBuilder.loadTexts: tLogicalDriveInformation.setStatus('mandatory')
eLogicalDriveInformation = MibTableRow((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 3, 1), ).setIndexNames((0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "DmiComponentIndex"), (0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a3ControllerNumber"), (0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a3LogicalDriveNumber"))
if mibBuilder.loadTexts: eLogicalDriveInformation.setStatus('mandatory')
a3ControllerNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 3, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ControllerNumber.setStatus('mandatory')
a3LogicalDriveNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 3, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3LogicalDriveNumber.setStatus('mandatory')
a3OperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 3, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3OperationalState.setStatus('mandatory')
a3RaidLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 3, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3RaidLevel.setStatus('mandatory')
a3WritePolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 3, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3WritePolicy.setStatus('mandatory')
a3SizeInMb = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 3, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3SizeInMb.setStatus('mandatory')
a3StripeSizeInBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 3, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3StripeSizeInBytes.setStatus('mandatory')
a3PhysicalDriveMap = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 3, 1, 8), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3PhysicalDriveMap.setStatus('mandatory')
tPhyicalDeviceInformation = MibTable((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 4), )
if mibBuilder.loadTexts: tPhyicalDeviceInformation.setStatus('mandatory')
ePhyicalDeviceInformation = MibTableRow((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 4, 1), ).setIndexNames((0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "DmiComponentIndex"), (0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a4ControllerNumber"), (0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a4ScsiBusId"), (0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a4ScsiTargetId"))
if mibBuilder.loadTexts: ePhyicalDeviceInformation.setStatus('mandatory')
a4ControllerNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 4, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4ControllerNumber.setStatus('mandatory')
a4ScsiBusId = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 4, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4ScsiBusId.setStatus('mandatory')
a4ScsiTargetId = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 4, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4ScsiTargetId.setStatus('mandatory')
a4OperationalState = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 4, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4OperationalState.setStatus('mandatory')
a4VendorId = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 4, 1, 5), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4VendorId.setStatus('mandatory')
a4ProductId = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 4, 1, 6), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4ProductId.setStatus('mandatory')
a4ProductRevisionLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 4, 1, 7), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4ProductRevisionLevel.setStatus('mandatory')
a4SizeInMb = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 4, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4SizeInMb.setStatus('mandatory')
a4DeviceType = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 4, 1, 9), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4DeviceType.setStatus('mandatory')
a4SoftErrorsCount = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 4, 1, 10), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4SoftErrorsCount.setStatus('mandatory')
a4HardErrorsCount = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 4, 1, 11), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4HardErrorsCount.setStatus('mandatory')
a4ParityErrorsCount = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 4, 1, 12), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4ParityErrorsCount.setStatus('mandatory')
a4MiscErrorsCount = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 4, 1, 13), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a4MiscErrorsCount.setStatus('mandatory')
tMylexDac960ComponentInstrumentationInfo = MibTable((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 5), )
if mibBuilder.loadTexts: tMylexDac960ComponentInstrumentationInfo.setStatus('mandatory')
eMylexDac960ComponentInstrumentationInfo = MibTableRow((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 5, 1), ).setIndexNames((0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eMylexDac960ComponentInstrumentationInfo.setStatus('mandatory')
a5CiRevision = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 5, 1, 1), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5CiRevision.setStatus('mandatory')
a5CiBuildDate = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 5, 1, 2), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5CiBuildDate.setStatus('mandatory')
a5MdacDeviceDriverRevision = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 5, 1, 3), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5MdacDeviceDriverRevision.setStatus('mandatory')
a5MdacDeviceDriverBuildDate = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 5, 1, 4), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a5MdacDeviceDriverBuildDate.setStatus('mandatory')
tLogicalDriveStatistics = MibTable((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 6), )
if mibBuilder.loadTexts: tLogicalDriveStatistics.setStatus('mandatory')
eLogicalDriveStatistics = MibTableRow((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 6, 1), ).setIndexNames((0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "DmiComponentIndex"), (0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a6ControllerNumber"), (0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a6LogicalDriveNumber"))
if mibBuilder.loadTexts: eLogicalDriveStatistics.setStatus('mandatory')
a6ControllerNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 6, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6ControllerNumber.setStatus('mandatory')
a6LogicalDriveNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 6, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6LogicalDriveNumber.setStatus('mandatory')
a6ReadRequestsCount = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 6, 1, 3), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6ReadRequestsCount.setStatus('mandatory')
a6AmountOfDataReadInMb = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 6, 1, 4), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6AmountOfDataReadInMb.setStatus('mandatory')
a6WriteRequestsCount = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 6, 1, 5), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6WriteRequestsCount.setStatus('mandatory')
a6AmountOfDataWrittenInMb = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 6, 1, 6), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6AmountOfDataWrittenInMb.setStatus('mandatory')
a6ReadCacheHit = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 6, 1, 7), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a6ReadCacheHit.setStatus('mandatory')
tPhysicalDriveStatistics = MibTable((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 7), )
if mibBuilder.loadTexts: tPhysicalDriveStatistics.setStatus('mandatory')
ePhysicalDriveStatistics = MibTableRow((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 7, 1), ).setIndexNames((0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "DmiComponentIndex"), (0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a7ControllerNumber"), (0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a7ScsiBusId"), (0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a7ScsiTargetId"))
if mibBuilder.loadTexts: ePhysicalDriveStatistics.setStatus('mandatory')
a7ControllerNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 7, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7ControllerNumber.setStatus('mandatory')
a7ScsiBusId = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 7, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7ScsiBusId.setStatus('mandatory')
a7ScsiTargetId = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 7, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7ScsiTargetId.setStatus('mandatory')
a7ReadRequestsCount = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 7, 1, 4), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7ReadRequestsCount.setStatus('mandatory')
a7AmountOfDataReadInKb = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 7, 1, 5), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7AmountOfDataReadInKb.setStatus('mandatory')
a7WriteRequestsCount = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 7, 1, 6), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7WriteRequestsCount.setStatus('mandatory')
a7AmountOfDataWrittenInKb = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 7, 1, 7), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a7AmountOfDataWrittenInKb.setStatus('mandatory')
tErrorControl = MibTable((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 98), )
if mibBuilder.loadTexts: tErrorControl.setStatus('mandatory')
eErrorControl = MibTableRow((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 98, 1), ).setIndexNames((0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "DmiComponentIndex"), (0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a98Selfid"))
if mibBuilder.loadTexts: eErrorControl.setStatus('mandatory')
a98Selfid = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 98, 1, 1), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a98Selfid.setStatus('mandatory')
a98NumberOfFatalErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 98, 1, 2), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a98NumberOfFatalErrors.setStatus('mandatory')
a98NumberOfMajorErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 98, 1, 3), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a98NumberOfMajorErrors.setStatus('mandatory')
a98NumberOfWarnings = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 98, 1, 4), DmiCounter()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a98NumberOfWarnings.setStatus('mandatory')
a98ErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 98, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("vOk", 0), ("vWarning", 1), ("vMajor", 2), ("vFatal", 3), ("vInformational", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a98ErrorStatus.setStatus('mandatory')
a98ErrorStatusType = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 98, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("vPost", 0), ("vRuntime", 1), ("vDiagnosticTest", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a98ErrorStatusType.setStatus('mandatory')
a98AlarmGeneration = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 98, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vOff", 0), ("vOn", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a98AlarmGeneration.setStatus('mandatory')
tMiftomib = MibTable((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 99), )
if mibBuilder.loadTexts: tMiftomib.setStatus('mandatory')
eMiftomib = MibTableRow((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 99, 1), ).setIndexNames((0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eMiftomib.setStatus('mandatory')
a99MibName = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 99, 1, 1), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a99MibName.setStatus('mandatory')
a99MibOid = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 99, 1, 2), DmiDisplaystring()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a99MibOid.setStatus('mandatory')
a99DisableTrap = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 99, 1, 3), DmiInteger()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a99DisableTrap.setStatus('mandatory')
tTrapGroup = MibTable((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 9999), )
if mibBuilder.loadTexts: tTrapGroup.setStatus('mandatory')
eTrapGroup = MibTableRow((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 9999, 1), ).setIndexNames((0, "MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "DmiComponentIndex"))
if mibBuilder.loadTexts: eTrapGroup.setStatus('mandatory')
a9999ErrorTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 9999, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ErrorTime.setStatus('mandatory')
a9999ErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 9999, 1, 2), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ErrorStatus.setStatus('mandatory')
a9999ErrorGroupId = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 9999, 1, 3), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ErrorGroupId.setStatus('mandatory')
a9999ErrorInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 9999, 1, 4), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ErrorInstanceId.setStatus('mandatory')
a9999ComponentId = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 9999, 1, 5), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ComponentId.setStatus('mandatory')
a9999GroupId = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 9999, 1, 6), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999GroupId.setStatus('mandatory')
a9999InstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 9999, 1, 7), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999InstanceId.setStatus('mandatory')
a9999VendorCode1 = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 9999, 1, 8), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999VendorCode1.setStatus('mandatory')
a9999VendorCode2 = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 9999, 1, 9), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999VendorCode2.setStatus('mandatory')
a9999VendorText = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 9999, 1, 10), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999VendorText.setStatus('mandatory')
a9999ParentGroupId = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 9999, 1, 11), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ParentGroupId.setStatus('mandatory')
a9999ParentInstanceId = MibTableColumn((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 9999, 1, 12), DmiInteger()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a9999ParentInstanceId.setStatus('mandatory')
mdacEventError = NotificationType((1, 3, 6, 1, 4, 1, 1608, 3, 2, 1, 9999, 1) + (0,1)).setObjects(("MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a9999ErrorTime"), ("MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a9999ErrorStatus"), ("MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a9999ErrorGroupId"), ("MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a9999ErrorInstanceId"), ("MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a9999ComponentId"), ("MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a9999GroupId"), ("MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a9999InstanceId"), ("MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a9999VendorCode1"), ("MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a9999VendorCode2"), ("MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a9999VendorText"), ("MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a9999ParentGroupId"), ("MYLEXDAC960SCSIRAIDCONTROLLER-MIB", "a9999ParentInstanceId"))
mibBuilder.exportSymbols("MYLEXDAC960SCSIRAIDCONTROLLER-MIB", eErrorControl=eErrorControl, a3OperationalState=a3OperationalState, a2MaximumConcurrentCommands=a2MaximumConcurrentCommands, a2SlotNumber=a2SlotNumber, a6ReadRequestsCount=a6ReadRequestsCount, a9999GroupId=a9999GroupId, a9999ParentInstanceId=a9999ParentInstanceId, tErrorControl=tErrorControl, a9999ErrorGroupId=a9999ErrorGroupId, a98ErrorStatus=a98ErrorStatus, a2OperationalState=a2OperationalState, a3SizeInMb=a3SizeInMb, a98NumberOfFatalErrors=a98NumberOfFatalErrors, DmiInteger=DmiInteger, a4HardErrorsCount=a4HardErrorsCount, dmtfGroups=dmtfGroups, mdacEventError=mdacEventError, a4DeviceType=a4DeviceType, a6ReadCacheHit=a6ReadCacheHit, a98Selfid=a98Selfid, a2MaximumLogicalDrives=a2MaximumLogicalDrives, a5CiRevision=a5CiRevision, a5MdacDeviceDriverBuildDate=a5MdacDeviceDriverBuildDate, a9999ErrorStatus=a9999ErrorStatus, a2PhysicalSectorSizeInBytes=a2PhysicalSectorSizeInBytes, a3LogicalDriveNumber=a3LogicalDriveNumber, a7AmountOfDataWrittenInKb=a7AmountOfDataWrittenInKb, eLogicalDriveStatistics=eLogicalDriveStatistics, v2=v2, a6AmountOfDataReadInMb=a6AmountOfDataReadInMb, DmiComponentIndex=DmiComponentIndex, a9999VendorCode2=a9999VendorCode2, tLogicalDriveInformation=tLogicalDriveInformation, a98NumberOfMajorErrors=a98NumberOfMajorErrors, a6ControllerNumber=a6ControllerNumber, eControllerInformation=eControllerInformation, a1Version=a1Version, a7ReadRequestsCount=a7ReadRequestsCount, tMiftomib=tMiftomib, ePhysicalDriveStatistics=ePhysicalDriveStatistics, a2BusType=a2BusType, a1Installation=a1Installation, a3RaidLevel=a3RaidLevel, a2InterruptMode=a2InterruptMode, a3ControllerNumber=a3ControllerNumber, a7ScsiTargetId=a7ScsiTargetId, a4ScsiBusId=a4ScsiBusId, a5CiBuildDate=a5CiBuildDate, a5MdacDeviceDriverRevision=a5MdacDeviceDriverRevision, a9999InstanceId=a9999InstanceId, a2RebuildRate=a2RebuildRate, a4VendorId=a4VendorId, a6AmountOfDataWrittenInMb=a6AmountOfDataWrittenInMb, tPhysicalDriveStatistics=tPhysicalDriveStatistics, a99MibOid=a99MibOid, a4SoftErrorsCount=a4SoftErrorsCount, tPhyicalDeviceInformation=tPhyicalDeviceInformation, a2MaximumDataTransferSizePerIoRequestInK=a2MaximumDataTransferSizePerIoRequestInK, a1Verify=a1Verify, a99MibName=a99MibName, a1SerialNumber=a1SerialNumber, a4ProductRevisionLevel=a4ProductRevisionLevel, a6LogicalDriveNumber=a6LogicalDriveNumber, a9999ParentGroupId=a9999ParentGroupId, tTrapGroup=tTrapGroup, a2InterruptVectorNumber=a2InterruptVectorNumber, a1Manufacturer=a1Manufacturer, a2SystemBusNumber=a2SystemBusNumber, a4OperationalState=a4OperationalState, a2CacheLineSizeInBytes=a2CacheLineSizeInBytes, DmiDateX=DmiDateX, a2ActualChannels=a2ActualChannels, a1Product=a1Product, mib=mib, DmiCounter=DmiCounter, eLogicalDriveInformation=eLogicalDriveInformation, a7AmountOfDataReadInKb=a7AmountOfDataReadInKb, a98NumberOfWarnings=a98NumberOfWarnings, a3PhysicalDriveMap=a3PhysicalDriveMap, a7ControllerNumber=a7ControllerNumber, ePhyicalDeviceInformation=ePhyicalDeviceInformation, a9999VendorText=a9999VendorText, a4ControllerNumber=a4ControllerNumber, a4SizeInMb=a4SizeInMb, a98AlarmGeneration=a98AlarmGeneration, tComponentid=tComponentid, a2LogicalSectorSizeInBytes=a2LogicalSectorSizeInBytes, eMiftomib=eMiftomib, a2MaximumTargetsPerChannel=a2MaximumTargetsPerChannel, a3StripeSizeInBytes=a3StripeSizeInBytes, a9999ErrorTime=a9999ErrorTime, a98ErrorStatusType=a98ErrorStatusType, a2ControllerNumber=a2ControllerNumber, tControllerInformation=tControllerInformation, eComponentid=eComponentid, a4ProductId=a4ProductId, a4MiscErrorsCount=a4MiscErrorsCount, eTrapGroup=eTrapGroup, tLogicalDriveStatistics=tLogicalDriveStatistics, a2MaximumTaggedRequests=a2MaximumTaggedRequests, a99DisableTrap=a99DisableTrap, a9999ComponentId=a9999ComponentId, a2ConfiguredChannels=a2ConfiguredChannels, tMylexDac960ComponentInstrumentationInfo=tMylexDac960ComponentInstrumentationInfo, DmiDisplaystring=DmiDisplaystring, a2FirmwareRevision=a2FirmwareRevision, a9999VendorCode1=a9999VendorCode1, eMylexDac960ComponentInstrumentationInfo=eMylexDac960ComponentInstrumentationInfo, a7WriteRequestsCount=a7WriteRequestsCount, a4ScsiTargetId=a4ScsiTargetId, a7ScsiBusId=a7ScsiBusId, a3WritePolicy=a3WritePolicy, a2DramSizeInMb=a2DramSizeInMb, a9999ErrorInstanceId=a9999ErrorInstanceId, a6WriteRequestsCount=a6WriteRequestsCount, a2EpromSizeInKb=a2EpromSizeInKb, a4ParityErrorsCount=a4ParityErrorsCount, mylex=mylex)
|
n = int(input('Informe um número entre 0 e 9999: ').strip())
u = n // 1 % 10
d = n // 10 % 10
c = n // 100 % 10
m = n // 1000 % 10
print(f'Analisando o número {n}...')
print('Unidade: {}'.format(u))
print('Dezena: {}'.format(d))
print('Centena: {}'.format(c))
print('Milhar: {}'.format(m))
|
class GCodeSegment():
def __init__(self, code, number, x, y, z, raw):
self.code = code
self.number = number
self.raw = raw
self.x = x
self.y = y
self.z = z
self.has_cords = (self.x is not None or self.y is not None or self.z is not None)
if self.has_cords:
if self.x == None:
self.x = 0
if self.y == None:
self.y = 0
if self.z == None:
self.z = 0
if self.has_cords:
print (f'\t{self.code} {self.number} ({self.x}, {self.y}, {self.z})')
else:
print (f'\t{self.code} {self.number}')
def command(self):
return self.code + self.number
def get_cords(self):
return (self.x, self.y, self.z)
def has_cords(self):
return self.has_cords
def get_cord(self, cord):
cord = cord.upper()
if cord == 'X':
return self.x
elif cord == 'Y':
return self.y
elif cord == 'Z':
return self.z
|
print('Welcom to the Temperature Conventer.')
fahrenheit = float(input('\nWhat is the given temperature in Fahrenheit degrees? '))
celsius = (fahrenheit - 32) * 5 / 9
celsius = round(celsius, 4)
kelvin = (fahrenheit + 569.67) * 5 / 9
kelvin = round(kelvin, 4)
print('\nThe given temperature is equal to:')
print('\nFahrenheit degrees: \t ' + str(fahrenheit))
print('Celsius degrees: \t ' + str(celsius))
print('Kelvin degrees: \t ' + str(kelvin))
|
StageDict = {
"welcome":"welcome",
"hasImg":"hasImg",
"registed":"registed"
}
|
n = int(input('digite um numero: '))
dobro = int(n*2)
triplo = int(n*3)
raiz = float(n**(1/2))
print('O dobro de {} é {}, o triplo é {} e a raiz é {:.2f}'.format(n, dobro, triplo, raiz))
'''n = int(input('digite um numero: '))
mul = n*2
tri = n*3
rai = n**(1/2)
print('o dobro de {} é {}, o triplo é {} e a raiz quadrada é {:.3f}'.format(n, mul, tri, rai))'''
|
'''
В школе решили набрать три новых математических класса.
Так как занятия по математике у них проходят в одно и то же время,
было решено выделить кабинет для каждого класса и купить в них
новые парты. За каждой партой может сидеть не больше двух учеников.
Известно количество учащихся в каждом из трёх классов. Сколько всего
нужно закупить парт чтобы их хватило на всех учеников? Программа
получает на вход три натуральных числа: количество учащихся
в каждом из трех классов.
'''
cl1 = int(input())
cl2 = int(input())
cl3 = int(input())
d1 = cl1 // 2 + cl1 % 2
d2 = cl2 // 2 + cl2 % 2
d3 = cl3 // 2 + cl3 % 2
print(d1 + d2 + d3)
|
# -*- coding: utf-8 -*-
class CookieHandler(object):
"""This class intends to Handle the cookie field described by the
OpenFlow Specification and present in OpenVSwitch.
Cookie field has 64 bits. The first 32-bits are assigned to the id
of ACL input. The next 4 bits are assigned to the operation type and
the remaining 28 bits are filled by zeros.
"""
@staticmethod
def get_cookie(id_acl, src_port=0, dst_port=0):
id_acl = format(int(id_acl), '032b')
src_port = format(int(src_port), '016b')
dst_port = format(int(dst_port), '016b')
cookie = id_acl + src_port + dst_port
return int(cookie, 2)
@staticmethod
def get_id_acl(cookie):
cookie = format(cookie, '064b')
return int(cookie[0:32], 2)
@staticmethod
def get_src_port(cookie):
cookie = format(cookie, '064b')
return int(cookie[32:48], 2)
@staticmethod
def get_dst_port(cookie):
cookie = format(cookie, '064b')
return int(cookie[48:64], 2)
|
#-*- coding: utf-8 -*-
def getAdjacentes(qtde_v, MATRIZ):
"""Método getAdjacentes p/ pegar os adjacentes do Grafo"""
aMATRIZ = []
for i in range(qtde_v):
linha = []
for j in range(qtde_v):
if MATRIZ[i][j] == 1:
linha.append("v" + str(j))
aMATRIZ.append(linha)
y = 0
for i in aMATRIZ:
print("v" + str(y) + ": ", i)
y+=1
|
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
'includes': [
'../../../webrtc/build/common.gypi',
],
'targets': [
{
'target_name': 'rbe_components',
'type': 'static_library',
'include_dirs': [
'<(webrtc_root)/modules/remote_bitrate_estimator',
],
'sources': [
'<(webrtc_root)/modules/remote_bitrate_estimator/test/bwe_test_logging.cc',
'<(webrtc_root)/modules/remote_bitrate_estimator/test/bwe_test_logging.h',
'aimd_rate_control.cc',
'aimd_rate_control.h',
'inter_arrival.cc',
'inter_arrival.h',
'mimd_rate_control.cc',
'mimd_rate_control.h',
'overuse_detector.cc',
'overuse_detector.h',
'overuse_estimator.cc',
'overuse_estimator.h',
'remote_bitrate_estimator_abs_send_time.cc',
'remote_bitrate_estimator_single_stream.cc',
'remote_rate_control.cc',
'remote_rate_control.h',
],
},
],
}
|
n1 = int(input('Digite um número: '));
antecessor = n1 - 1;
sucessor = n1 + 1;
print('O antecessor do número {}, é: {}, e seu sucessor é: {}.'.format(n1, antecessor, sucessor));
|
class Solution:
def removeElement(self, nums: List[int], val: int) -> int:
i = 0
while i < len(nums):
if nums[i] == val: nums.pop(i)
else: i += 1
return len(nums)
|
class Base():
"""
This class represents the base variation of the game upon which other variations can be
built, utilizing the functionalities of this class.
If the superclass does not override all of these functions in this class, an error is
thrown.
"""
def __init__(self, players):
raise NotImplementedError
def play_a_round(self, players=None):
"""
Override this function in the superclass implementing a round of the game with players
and deck
"""
raise NotImplementedError
def end_game_reached(self):
"""
Override this function in the superclass to define the condition which states that the
game is over and the winner is declared
"""
raise NotImplementedError
def get_winner(self):
"""
Override this function in the superclass to return the winner of the game when called
"""
raise NotImplementedError
def remove_players_lost(self):
"""
Override this function in the superclass to cleanup the players who lose the game either
by losing all the cards or the losing condition defined in the variation
"""
raise NotImplementedError
|
class Animal:
def __init__(self, nombre):
self.nombre = nombre
def dormir(self):
print("zZzZ")
def mover(self):
print("caminar")
class Sponge(Animal):
def mover(self):
pass
class Cat(Animal):
def hacer_ruido(self):
print("Meow")
class Fish(Animal):
def mover(self):
print("swim")
def hacer_ruido(self):
print("glu glu")
pelusa = Cat("Pelusa")
pelusa.dormir()
pelusa.mover()
pelusa.hacer_ruido()
nemo = Fish("Nemo")
nemo.dormir()
nemo.mover()
nemo.hacer_ruido()
bob = Sponge("Bob")
bob.dormir()
bob.mover()
|
{
'targets': [
{
'target_name': 'ftdi_labtic',
'sources':
[
'src/ftdi_device.cc',
'src/ftdi_driver.cc'
],
'include_dirs+':
[
'src/',
],
'conditions':
[
['OS == "win"',
{
'include_dirs+':
[
'lib/'
],
'link_settings':
{
"conditions" :
[
["target_arch=='ia32'",
{
'libraries':
[
'-l<(module_root_dir)/lib/i386/ftd2xx.lib',
'-l<(module_root_dir)/lib/i386/FTChipID.lib'
]
}
],
["target_arch=='x64'", {
'libraries': [
'-l<(module_root_dir)/lib/amd64/ftd2xx.lib',
'-l<(module_root_dir)/lib/amd64/FTChipID.lib'
]
}]
]
}
}],
['OS != "win"',
{
'include_dirs+': [
'/usr/local/include/libftd2xx/'
],
'ldflags': [
'-Wl,-Map=output.map',
],
'link_settings': {
'libraries': [
'-lftd2xx'
]
}
}
]
],
}
]
}
|
def print_head(msg: str):
start = "| "
end = " |"
line = "-" * (len(msg) + len(start) + len(end))
print(line)
print(start + msg + end)
print(line)
|
load("@io_bazel_rules_go//go:deps.bzl", "go_register_toolchains", "go_rules_dependencies")
load("@bazel_gazelle//:deps.bzl", "gazelle_dependencies", "go_repository")
def rpmpack_dependencies():
go_rules_dependencies()
go_register_toolchains()
gazelle_dependencies()
go_repository(
name = "com_github_pkg_errors",
importpath = "github.com/pkg/errors",
tag = "v0.8.1",
)
go_repository(
name = "com_github_google_go_cmp",
importpath = "github.com/google/go-cmp",
tag = "v0.2.0",
)
go_repository(
name = "com_github_cavaliercoder_go_cpio",
commit = "925f9528c45e",
importpath = "github.com/cavaliercoder/go-cpio",
)
go_repository(
name = "com_github_ulikunitz_xz",
importpath = "github.com/ulikunitz/xz",
tag = "v0.5.6",
)
|
#Faça um programa para a leitura de duas notas parciais de um aluno.
#O programa deve calcular a média alcançada por aluno e apresentar:
#A mensagem "Aprovado", se a média alcançada for maior ou igual a sete;
#A mensagem "Reprovado", se a média for menor do que sete;
#A mensagem "Aprovado com Distinção", se a média for igual a dez.
n1 = float(input('Digite a nota da 1º avaliação:\n'))
n2 = float(input('digite a nota da 2º avaliação:\n'))
md = (n1+n2)/2
if md >= 7:
if md == 10:
print('Aprovado com Distinção')
else:
print('Aprovado')
else:
print('Reprovado')
|
class FiltroRecurso():
""" Representa um filtro de recurso que pode conter:
Nome, Tipo, Localização e Intervalos
"""
def __init__(self, nome, tipo, local, intervalos):
self.nome = nome
self.tipo = tipo
self.local = local
self.intervalos = intervalos
def atende(self, recurso):
""" Diz se um recurso atende aos critérios do filtro ou não """
return (
self.atende_nome(recurso) and
self.atende_tipo(recurso) and
self.atende_local(recurso) and
self.atende_intervalos(recurso)
)
def atende_nome(self, recurso):
if not self.nome:
return True
return recurso.nome == self.nome
def atende_tipo(self, recurso):
if not self.tipo:
return True
return recurso.tipo == self.tipo
def atende_local(self, recurso):
if not self.local:
return True
return recurso.local == self.local
def atende_intervalos(self, recurso):
return not any( a.intervalo.intercede(i) for a in recurso.agendamentos for i in self.intervalos)
|
class Dict(dict):
"""
内置dict方法的扩展
增加gets方法
传入以 逗号 “,” 为分隔符的多个key组成的字符串,或者多个key组成的列表或元组或集合,返回第一个有值的key对应的value
若需查找的key中本身有含有逗号 "," 则建议直接使用 get 方法
意义是寻找动态json中可能存在的值
例如:
新建字典:
d = Dict()
d["a"] = 1
d["c"] = 5
print(d.gets("a")) --> 1
print(d.gets("c,b,a")) --> 5
print(d.gets(['d', 'c', 'b', 'a'])) --> 5
载入已有字典:
d1 = {"e": 11, "f": 66, "g": 99}
d2 = Dict(d1)
print(d2.gets("e")) --> 11
print(d2.gets("h,f")) --> 66
其他用法与内置字典一致
"""
def __init__(self, d=None):
super(Dict, self).__init__()
if isinstance(d, dict):
for key in d:
self[key] = d.get(key)
def gets(self, k_iter, default=None):
if isinstance(k_iter, (list, set, tuple)):
for key in k_iter:
if self.get(key):
return self[key]
return default
elif isinstance(k_iter, str):
keys = [x.strip() for x in k_iter.split(",")]
for key in keys:
if self.get(key):
return self[key]
return default
|
#rekursif adalah fungsi yg memanggil dirinya sendiri ok sip
def cetak(x):
print(x)
if x>1:
cetak(x-1)
elif x<1:
cetak(x+1)
cetak(5)
|
def more_zeros(s):
s = "".join(dict.fromkeys(s)) # Getting rid of the duplicates in order
s2 = [bin(ord(i))[2:] for i in s]
s2 = [len(i)>2*i.count('1') for i in s2]
return [i for j, i in enumerate(s) if s2[j]]
print(more_zeros("DIGEST"))
|
tc = int(input())
while tc:
tc -= 1
n, k = map(int, input().split())
if k > 0:
print(n%k)
else:
print(n)
|
arr = list(map(int,input().split()))
i = 1
while True:
if i not in arr:
print(i)
break
i+=1
|
"""
Define citations for ESPEI
"""
ESPEI_CITATION = "B. Bocklund, R. Otis, A. Egorov, A. Obaied, I. Roslyakova, Z.-K. Liu, ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg, MRS Commun. (2019) 1-10. doi:10.1557/mrc.2019.59."
ESPEI_BIBTEX = """@article{Bocklund2019ESPEI,
archivePrefix = {arXiv},
arxivId = {1902.01269},
author = {Bocklund, Brandon and Otis, Richard and Egorov, Aleksei and Obaied, Abdulmonem and Roslyakova, Irina and Liu, Zi-Kui},
doi = {10.1557/mrc.2019.59},
eprint = {1902.01269},
issn = {2159-6859},
journal = {MRS Communications},
month = {jun},
pages = {1--10},
title = {{ESPEI for efficient thermodynamic database development, modification, and uncertainty quantification: application to Cu-Mg}},
year = {2019}
}
"""
|
class Token:
def __init__(self, t_type, lexeme, literal, line):
self.type = t_type
self.lexeme = lexeme
self.literal = literal
self.line = line
def __repr__(self):
return f"Token(type: {self.type}, lexeme: {self.lexeme}, literal: {self.literal}, line: {self.line})"
class Node:
pass
class NumberNode(Node):
def __init__(self, num):
self.value = num
def __repr__(self):
return f"Number({self.value})"
@property
def children(self):
return tuple()
class BinOp(Node):
def __init__(self, l, r, op):
self.left = l
self.right = r
self.op = op
def __repr__(self):
return f"BinOp({self.left} {self.op} {self.right})"
@property
def children(self):
return (self.left, self.op, self.right)
class AssignVar(Node):
def __init__(self, varname, v):
self.name = varname
self.value = v
def __repr__(self):
return f"Assign({self.name} = {self.value})"
@property
def children(self):
return (self.name, self.value)
class Variable(Node):
def __init__(self, name):
self.name = name
def __repr__(self):
return f"Variable({self.name})"
@property
def children(self):
return (self.name,)
class Call(Node):
def __init__(self, value, args):
self.value = value
self.args = args
def __repr__(self):
return f"Call({self.value}, {self.args})"
@property
def children(self):
return (self.value, self.args)
class ArrayNode(Node):
def __init__(self, elements):
self.elements = elements
def __repr__(self):
return f"Array{self.elements}"
@property
def children(self):
return self.elements
class IndexFrom(Node):
def __init__(self, value, idx):
self.value = value
self.idx = idx
def __repr__(self):
return f"IndexFrom({self.value}: {self.idx})"
@property
def children(self):
return (self.value, self.idx)
class SetAtIndex(Node):
def __init__(self, value, idx, new):
self.value = value
self.idx = idx
self.new = new
def __repr__(self):
return f"SetAtIndex({self.value}: {self.idx})"
@property
def children(self):
return (self.value, self.idx, self.new)
|
class Pessoa:
olhos = 2 # atributo de classe
def __init__(self, idade=None, nome='', *filhos): # Método construtor
self.idade = idade # atributo de objeto ou atributo de Instância
self.nome = nome
self.filhos = list(filhos) # atributo complexo
def cumprimentar(self):
return f'Olá, meu nome é: {self.nome}'
@classmethod
def nome_e_atributos_de_classe(cls): # metodo de classe
return f'{cls} - olhos {cls.olhos}'
class Homem(Pessoa):
def cumprimentar(self):
cumprimentar_da_classe_pai = super().cumprimentar() #sobrescrita de método
return f'{cumprimentar_da_classe_pai}. Aperto de Mão'
class Mutante(Pessoa):
olhos = 3 #sobrescrita de atributo
if __name__ == ('__main__'):
victor = Mutante(nome='Victor', idade=5) # Filho de Matheus
marcelo = Homem(nome='Marcelo', idade=3) # Filho de Matheus
matheus = Homem(23, 'Matheus', victor, marcelo) # Pai de Victor e Marcelo
victor.filhos = 'Victorzinho JR'
matheus.neto = 'Victorzinho JR' # Atributo dinâmico -> atributo criado para o objeto "matheus" após o método construtor
for filho in matheus.filhos:
print(filho.nome, filho.idade, filho.filhos)
print(matheus.neto)
print(victor.__dict__)
print(marcelo.__dict__)
print(matheus.__dict__)
print(matheus.olhos) # imprimindo a minha instância de atributo de classe
print(victor.olhos)
print(marcelo.cumprimentar())
print(victor.cumprimentar())
|
CONNECTION = {
'server': 'example@mail.com',
'user': 'user',
'password': 'password',
'port': 993
}
CONTENT_TYPES = ['text/plain', 'text/html']
ATTACHMENT_DIR = ''
ALLOWED_EXTENSIONS = ['csv']
|
class LNode:
def __init__(self, elem, next_=None):
self.elem = elem
self.next = next_
class LCList:
def __init__(self):
self._rear = None
def is_empty(self):
return self._rear is None
def prepend(self, elem):
p = LNode(elem)
if self._rear is None:
p.next = p
self._rear = p
else:
p.next = self._rear.next
self._rear.next = p
def append(self, elem):
self.prepend(elem)
self._rear = self._rear.next
def pop(self):
if self._rear is None:
print("no data")
p = self._rear.next
if self._rear is p:
self._rear = None
else:
self._rear.next = p.next
return p.elem
def printall(self):
if self.is_empty():
return
p = self._rear.next
while True:
print(p.elem)
if p is self._rear:
break
p = p.next
if __name__ == '__main__':
print("main program")
else:
print("Load module "+__file__)
|
counter = 0
while counter <= 5:
print("counter", counter)
counter = counter + 1
else:
print("counter has become false ")
|
LIMIT_STATEMENT = "LIMIT {last} OFFSET {offset}"
SUBTREES = """
SELECT keyword_tree.fingerprint, keyword, library, status, arguments, call_index
FROM keyword_tree
JOIN tree_hierarchy ON tree_hierarchy.subtree=keyword_tree.fingerprint
WHERE tree_hierarchy.fingerprint=%(fingerprint)s
ORDER BY call_index::int;
"""
TEAM_NAMES = "SELECT DISTINCT team FROM test_series ORDER BY team"
# Build status is aggregated from test cases statuses.
# Due to reruns only the last execution of a test case is considered.
# Last execution is determined primarily by test start_time if that exists
# otherwise by archiving order i.e. test_run_id
def test_series(by_teams=False, series=None, team=None):
return """
WITH last_builds as (
SELECT series,
max(build_number) as build_number,
count(*) as build_count
FROM test_series_mapping
{series_filter}
GROUP BY series
)
SELECT id, name, team, build_count as builds,
build_number as last_build,
CASE WHEN build_id IS NULL THEN build_number::text ELSE build_id END as last_build_id,
min(generated) as last_generated,
min(imported_at) as last_imported,
min(status) as last_status,
min(start_time) as last_started,
CASE WHEN min(start_time) IS NOT NULL THEN min(start_time) ELSE min(imported_at) END as sorting_value
FROM (
SELECT DISTINCT ON (tsm.series, test_id)
tsm.series, build_count, tsm.build_number, build_id,
generated, imported_at,
test_result.status
FROM last_builds
JOIN test_series_mapping as tsm ON last_builds.series=tsm.series
AND last_builds.build_number=tsm.build_number
JOIN test_result ON tsm.test_run_id=test_result.test_run_id
JOIN test_run ON test_run.id=tsm.test_run_id
WHERE NOT test_run.ignored
ORDER BY tsm.series, test_id, start_time DESC, test_result.test_run_id DESC
) AS final_test_results
JOIN test_series ON test_series.id=final_test_results.series
JOIN (
SELECT tsm.series, min(start_time) as start_time
FROM last_builds
JOIN test_series_mapping as tsm ON last_builds.series=tsm.series
AND last_builds.build_number=tsm.build_number
JOIN suite_result ON tsm.test_run_id=suite_result.test_run_id
GROUP BY tsm.series
) AS last_build_start_times ON test_series.id=last_build_start_times.series
{team_filter}
GROUP BY id, name, team, build_count, build_number, build_id
ORDER BY {team_sorting} sorting_value
""".format(team_sorting="team," if by_teams else '', # nosec
series_filter='WHERE series={}'.format(int(series)) if series else '', # nosec
team_filter='WHERE team=%(team)s' if team else '')
def test_run_ids(series=None, build_num=None, start_from=None, last=None, offset=0):
filters = []
if series:
filters.append("series={series_id}".format(series_id=int(series)))
if build_num:
filters.append("build_number={}".format(int(build_num)))
elif last:
filters.append("build_number IN ({})".format(build_numbers(series, start_from, last, offset)))
return """
SELECT test_run_id
FROM test_series_mapping as tsm
JOIN test_run ON test_run.id=tsm.test_run_id
WHERE NOT ignored
{filters}
ORDER BY build_number, test_run_id
""".format(filters='AND ' + ' AND '.join(filters) if filters else '') # nosec
def build_numbers(series, start_from=None, last=None, offset=0):
return """
SELECT build_number
FROM (
SELECT DISTINCT build_number
FROM test_series_mapping as tsm
JOIN test_run ON test_run.id=tsm.test_run_id
WHERE series={series}
{starting_filter}
AND NOT ignored
) as build_numbers
ORDER BY build_number DESC
{limit}
""".format(series=int(series), # nosec
limit=LIMIT_STATEMENT.format(last=int(last), offset=int(offset)) if last else '',
starting_filter="AND build_number <= {}".format(int(start_from)) if start_from else '')
def builds(series, build_number=None, start_from=None, last=None, offset=0, reverse=False):
if build_number:
build_filter = "AND build_number={}".format(int(build_number))
elif start_from or last:
build_filter = "AND build_number IN ({})".format(build_numbers(series, start_from, last, offset))
else:
build_filter = ""
return """
SELECT team, name,
build_number,
CASE WHEN build_id IS NULL THEN build_number::text ELSE build_id END as build_id,
array_agg(test_run_id) as test_runs,
min(status) as status,
min(imported_at) as archiving_time,
min(generated) as generation_time,
min(start_time) as start_time
FROM (
SELECT team, name,
build_number, build_id,
test_run.id as test_run_id,
min(test_run.imported_at) as imported_at,
min(test_run.generated) as generated,
-- Status can be aggregated by min because of FAIL, PASS, SKIP are in alphabetical order
min(test_statuses.status) as status,
-- The starting timestamp is from the suite timestamps
min(suite_result.start_time) as start_time
FROM test_series_mapping as tsm
JOIN test_series ON test_series.id=tsm.series
JOIN test_run ON test_run.id=tsm.test_run_id
JOIN suite_result ON suite_result.test_run_id=test_run.id
JOIN (
SELECT DISTINCT ON (build_id, test_id)
test_result.test_run_id, status
FROM test_result
JOIN test_series_mapping as tsm ON tsm.test_run_id=test_result.test_run_id
WHERE tsm.series={series}
ORDER BY build_id, test_id, start_time DESC, test_result.test_run_id DESC
) AS test_statuses ON tsm.test_run_id=test_statuses.test_run_id
WHERE tsm.series={series} and NOT test_run.ignored
{build_filter}
GROUP BY team, name, build_number, build_id, test_run.id
ORDER BY build_number, test_run.id
) AS test_runs
GROUP BY team, name, build_number, build_id
ORDER BY build_number {order};
""".format(series=int(series), # nosec
build_filter=build_filter,
order='ASC' if reverse else 'DESC')
def suite_result_info(series, build_num, suite):
return """
SELECT array_agg(test_result.test_run_id ORDER BY test_result.start_time DESC) as test_runs,
array_agg(test_result.status ORDER BY test_result.start_time DESC) as statuses,
suite.id as suite_id,
suite.name as suite_name,
suite.full_name as suite_full_name,
suite.repository as suite_repository,
test_case.id as id,
test_case.name as name,
test_case.full_name as full_name
FROM suite_result
JOIN suite ON suite_result.suite_id=suite.id
JOIN test_case ON test_case.suite_id=suite.id
JOIN test_result ON test_result.test_id=test_case.id
AND test_result.test_run_id=suite_result.test_run_id
WHERE suite_result.suite_id={suite_id}
AND test_result.test_run_id IN ({test_run_ids})
GROUP BY suite.id, suite.name, suite.full_name,
test_case.id, test_case.name, test_case.full_name
ORDER BY test_case.name;
""".format(test_run_ids=test_run_ids(series, build_num), suite_id=int(suite)) # nosec
def build_metadata(series, build_num):
return """
SELECT DISTINCT ON (suite_metadata.name, suite_metadata.value)
suite_metadata.name as metadata_name,
suite_metadata.value as metadata_value,
suite_metadata.suite_id,
suite_metadata.test_run_id
FROM suite_metadata
JOIN suite ON suite.id=suite_metadata.suite_id
WHERE test_run_id IN ({test_run_ids})
ORDER BY suite_metadata.name, suite_metadata.value, suite.full_name
""".format(test_run_ids=test_run_ids(series, build_num=build_num)) # nosec
def status_counts(series, start_from, last, offset=0):
return """
SELECT sum(total)::int as tests_total,
sum(passed)::int as tests_passed,
sum(failed)::int as tests_failed,
sum(skipped)::int as tests_skipped,
sum(other)::int as tests_other,
count(*) as suites_total,
count(nullif(status !~ '^PASS', true)) as suites_passed,
count(nullif(status !~ '^FAIL', true)) as suites_failed,
count(nullif(status !~ '^SKIP', true)) as suites_skipped,
count(nullif(status ~ '^((SKIP)|(PASS)|(FAIL))', true)) as suites_other,
min(build_start_time) as build_start_time,
build_id,
build_number
FROM (
SELECT
count(*) as total,
count(nullif(status !~ '^PASS', true)) as passed,
count(nullif(status !~ '^FAIL', true)) as failed,
count(nullif(status !~ '^SKIP', true)) as skipped,
count(nullif(status ~ '^((SKIP)|(PASS)|(FAIL))', true)) as other,
min(status) as status,
min(test_run_start_time) as build_start_time,
build_id,
build_number
FROM (
SELECT DISTINCT ON (test_case.id, build_number)
test_case.suite_id,
test_result.test_id,
test_result.status,
test_run_times.start_time as test_run_start_time,
CASE WHEN build_id IS NULL THEN build_number::text ELSE build_id END as build_id,
build_number
FROM test_result
JOIN test_case ON test_case.id=test_result.test_id
JOIN test_run ON test_run.id=test_result.test_run_id
JOIN (
SELECT min(start_time) as start_time, test_run_id
FROM suite_result
WHERE test_run_id IN ({test_run_ids})
GROUP BY test_run_id
) AS test_run_times ON test_run_times.test_run_id=test_result.test_run_id
JOIN test_series_mapping as tsm ON test_run.id=tsm.test_run_id
AND tsm.series={series}
WHERE NOT test_run.ignored
AND test_run.id IN ({test_run_ids})
ORDER BY test_case.id, build_number, test_result.start_time DESC, test_result.test_run_id DESC
) AS status_per_test
GROUP BY suite_id, build_number, build_id
) AS status_per_suite
GROUP BY build_number, build_id
ORDER BY build_number DESC
""".format(series=int(series), # nosec
test_run_ids=test_run_ids(series, start_from=start_from, last=last, offset=offset))
def history_page_data(series, start_from, last, offset=0):
return """
SELECT build_number,
suite_id,
suite_name,
suite_full_name,
suite_repository,
id,
name,
full_name,
test_run_id,
status,
start_time,
elapsed,
tags,
failure_log_level,
failure_message,
failure_timestamp
FROM (
SELECT DISTINCT ON (suite.id, test_results.id, build_number)
tsm.build_number,
suite.id as suite_id, suite.name as suite_name, suite.full_name as suite_full_name,
suite.repository as suite_repository,
suite_result.test_run_id as suite_test_run_id,
suite_result.start_time as suite_start_time,
test_results.id as id, test_results.name as name, test_results.full_name as full_name,
test_results._test_run_id as test_run_id,
test_results.status as status,
-- test_results.setup_status as setup_status,
-- test_results.execution_status as execution_status,
-- test_results.teardown_status as teardown_status,
-- test_results.fingerprint as fingerprint,
-- test_results.setup_fingerprint as setup_fingerprint,
-- test_results.execution_fingerprint as execution_fingerprint,
-- test_results.teardown_fingerprint as teardown_fingerprint,
test_results.start_time as start_time,
test_results.elapsed as elapsed,
-- test_results.setup_elapsed as setup_elapsed,
-- test_results.execution_elapsed as execution_elapsed,
-- test_results.teardown_elapsed as teardown_elapsed,
CASE WHEN tags IS NULL THEN '{array_literal}' ELSE tags END as tags,
log_messages.log_level as failure_log_level,
log_messages.message as failure_message,
log_messages.timestamp as failure_timestamp
FROM suite_result
JOIN suite ON suite.id=suite_result.suite_id
JOIN test_run ON test_run.id=suite_result.test_run_id
JOIN test_series_mapping as tsm ON test_run.id=tsm.test_run_id
AND tsm.series={series}
LEFT OUTER JOIN (
SELECT DISTINCT ON (test_case.id, build_number) *, test_result.test_run_id as _test_run_id
FROM test_result
JOIN test_case ON test_case.id=test_result.test_id
JOIN test_series_mapping as tsm ON test_result.test_run_id=tsm.test_run_id
AND tsm.series={series}
WHERE test_result.test_run_id IN ({test_run_ids})
ORDER BY test_case.id, build_number DESC, start_time DESC, test_result.test_run_id DESC
) as test_results ON test_results.suite_id=suite.id
AND test_results.build_number=tsm.build_number
LEFT OUTER JOIN (
SELECT array_agg(tag ORDER BY tag) as tags, test_id, test_run_id
FROM test_tag
WHERE test_run_id IN ({test_run_ids})
GROUP BY test_id, test_run_id
) as test_tags ON test_tags.test_id=test_results.test_id
AND test_tags.test_run_id=test_results._test_run_id
LEFT OUTER JOIN (
SELECT DISTINCT ON (test_run_id, test_id)
test_run_id, test_id, log_level, message, timestamp
FROM log_message
WHERE test_run_id IN ({test_run_ids})
AND test_id IS NOT NULL
AND log_level IN ('ERROR', 'FAIL')
ORDER BY test_run_id, test_id, timestamp DESC, id DESC
) as log_messages ON log_messages.test_id=test_results.test_id
AND log_messages.test_run_id=test_results._test_run_id
WHERE suite_result.test_run_id IN ({test_run_ids})
AND NOT ignored
ORDER BY suite_id, test_results.id, build_number DESC, suite_start_time DESC, suite_test_run_id DESC
) AS results
ORDER BY suite_full_name, full_name, build_number DESC;
""".format(array_literal='{}', # nosec
series=int(series),
test_run_ids=test_run_ids(series, start_from=start_from, last=last, offset=offset))
def simple_build_results_data(series, build_number):
return """
SELECT suite_id,
suite_name,
suite_full_name,
suite_repository,
id,
name,
full_name,
test_run_id,
status,
setup_status,
execution_status,
teardown_status,
start_time,
fingerprint,
setup_fingerprint,
execution_fingerprint,
teardown_fingerprint,
elapsed,
setup_elapsed,
execution_elapsed,
teardown_elapsed
FROM (
SELECT DISTINCT ON (suite.id, test_results.id)
suite.id as suite_id, suite.name as suite_name, suite.full_name as suite_full_name,
suite.repository as suite_repository,
suite_result.test_run_id as suite_test_run_id,
suite_result.start_time as suite_start_time,
test_results.id as id, test_results.name as name, test_results.full_name as full_name,
test_results._test_run_id as test_run_id,
test_results.status as status,
test_results.setup_status as setup_status,
test_results.execution_status as execution_status,
test_results.teardown_status as teardown_status,
test_results.start_time as start_time,
test_results.fingerprint as fingerprint,
test_results.setup_fingerprint as setup_fingerprint,
test_results.execution_fingerprint as execution_fingerprint,
test_results.teardown_fingerprint as teardown_fingerprint,
test_results.elapsed as elapsed,
test_results.setup_elapsed as setup_elapsed,
test_results.execution_elapsed as execution_elapsed,
test_results.teardown_elapsed as teardown_elapsed
FROM suite_result
JOIN suite ON suite.id=suite_result.suite_id
JOIN test_run ON test_run.id=suite_result.test_run_id
LEFT OUTER JOIN (
SELECT DISTINCT ON (test_case.id) *, test_result.test_run_id as _test_run_id
FROM test_result
JOIN test_case ON test_case.id=test_result.test_id
WHERE test_result.test_run_id IN ({test_run_ids})
ORDER BY test_case.id, start_time DESC, test_result.test_run_id DESC
) as test_results ON test_results.suite_id=suite.id
WHERE suite_result.test_run_id IN ({test_run_ids})
AND NOT ignored
ORDER BY suite_id, test_results.id, suite_start_time DESC, suite_test_run_id DESC
) AS results
ORDER BY suite_full_name, full_name;
""".format(test_run_ids=test_run_ids(series, build_num=build_number)) # nosec
def suite_result(series, build_number, suite):
return """
SELECT *
FROM (
SELECT DISTINCT ON (suite.id, test_results.id)
suite.id as suite_id, suite.name as suite_name, suite.full_name as suite_full_name,
suite.repository as suite_repository,
suite_result.test_run_id as suite_test_run_id,
suite_result.start_time as suite_start_time,
test_results.id as id, test_results.name as name, test_results.full_name as full_name,
test_results._test_run_id as test_run_id,
test_results.status as status,
test_results.setup_status as setup_status,
test_results.execution_status as execution_status,
test_results.teardown_status as teardown_status,
test_results.fingerprint as fingerprint,
test_results.setup_fingerprint as setup_fingerprint,
test_results.execution_fingerprint as execution_fingerprint,
test_results.teardown_fingerprint as teardown_fingerprint,
test_results.start_time as start_time,
test_results.elapsed as elapsed,
test_results.setup_elapsed as setup_elapsed,
test_results.execution_elapsed as execution_elapsed,
test_results.teardown_elapsed as teardown_elapsed,
CASE WHEN tags IS NULL THEN '{array_literal}' ELSE tags END as tags,
log_messages.log_level as failure_log_level,
log_messages.message as failure_message,
log_messages.timestamp as failure_timestamp
FROM suite_result
JOIN suite ON suite.id=suite_result.suite_id
JOIN test_run ON test_run.id=suite_result.test_run_id
JOIN test_series_mapping as tsm ON test_run.id=tsm.test_run_id
AND tsm.series={series}
LEFT OUTER JOIN (
SELECT DISTINCT ON (test_case.id) *, test_result.test_run_id as _test_run_id
FROM test_result
JOIN test_case ON test_case.id=test_result.test_id
JOIN test_series_mapping as tsm ON test_result.test_run_id=tsm.test_run_id
AND tsm.series={series}
WHERE test_result.test_run_id IN ({test_run_ids})
ORDER BY test_case.id, start_time DESC, test_result.test_run_id DESC
) as test_results ON test_results.suite_id=suite.id
LEFT OUTER JOIN (
SELECT array_agg(tag ORDER BY tag) as tags, test_id, test_run_id
FROM test_tag
WHERE test_run_id IN ({test_run_ids})
GROUP BY test_id, test_run_id
) as test_tags ON test_tags.test_id=test_results.test_id
AND test_tags.test_run_id=test_results._test_run_id
LEFT OUTER JOIN (
SELECT DISTINCT ON (test_run_id, test_id)
test_run_id, test_id, log_level, message, timestamp
FROM log_message
WHERE test_run_id IN ({test_run_ids})
AND test_id IS NOT NULL
AND log_level IN ('ERROR', 'FAIL')
ORDER BY test_run_id, test_id, timestamp DESC, id DESC
) as log_messages ON log_messages.test_id=test_results.test_id
AND log_messages.test_run_id=test_results._test_run_id
WHERE suite_result.test_run_id IN ({test_run_ids})
AND suite_result.suite_id={suite_id}
AND NOT ignored
ORDER BY suite_id, test_results.id, suite_start_time DESC, suite_test_run_id DESC
) AS results
ORDER BY suite_full_name, full_name;
""".format(array_literal='{}', # nosec
series=int(series),
suite_id=int(suite),
test_run_ids=test_run_ids(series, build_num=build_number))
def log_messages(test_run_id, suite_id=None, test_id=None):
return """
SELECT *
FROM log_message
WHERE test_run_id={test_run_id}
AND {test_filter}
{suite_filter}
ORDER BY timestamp, id
""".format(test_run_id=int(test_run_id), # nosec
suite_filter="AND suite_id={}".format(int(suite_id)) if suite_id else '',
test_filter="test_id={}".format(int(test_id)) if test_id else 'test_id IS NULL')
def most_stable_tests(series, start_from, last, offset, limit, limit_offset, stable):
return """
SELECT suite_id, suite_name, suite_full_name,
test_id, test_name, test_full_name,
count(nullif(status !~ '^FAIL', true)) as fails_in_window,
sum(failiness) as instability
FROM (
SELECT *,
CASE WHEN status = 'FAIL'
THEN 1.0/sqrt(ROW_NUMBER() OVER (PARTITION BY test_id ORDER BY build_number DESC))
ELSE 0
END as failiness
FROM (
SELECT DISTINCT ON (test_case.id, build_number)
suite.id as suite_id,
suite.name as suite_name,
suite.full_name as suite_full_name,
test_case.id as test_id,
test_case.name as test_name,
test_case.full_name as test_full_name,
test_result.status,
build_number
FROM test_result
JOIN test_case ON test_case.id=test_result.test_id
JOIN suite ON suite.id=test_case.suite_id
JOIN test_run ON test_run.id=test_result.test_run_id
JOIN test_series_mapping as tsm ON test_run.id=tsm.test_run_id
AND tsm.series={series}
WHERE NOT test_run.ignored
AND test_run.id IN ({test_run_ids})
ORDER BY test_case.id, build_number, test_result.start_time DESC, test_result.test_run_id DESC
) AS last_results
) AS failiness_contributions
GROUP BY suite_id, suite_name, suite_full_name, test_id, test_name, test_full_name
ORDER BY instability {order}
LIMIT {limit} OFFSET {limit_offset};
""".format(series=int(series), # nosec
test_run_ids=test_run_ids(series, start_from=start_from, last=last, offset=offset),
limit=int(limit),
limit_offset=int(limit_offset),
order="ASC" if stable else "DESC")
def keyword_analysis(series, build_number):
return """
WITH total_elapsed AS (
SELECT sum(top_suite_elapsed) as total
FROM (
SELECT max(elapsed) as top_suite_elapsed
FROM suite_result
WHERE test_run_id IN ({test_run_ids})
GROUP BY test_run_id
) AS foo
)
SELECT tree.library, tree.keyword,
sum(cumulative_execution_time)::real/total_elapsed.total*100 as percent,
min(min_execution_time) as min,
sum(cumulative_execution_time)/sum(calls) as avg,
max(max_execution_time) as max,
sum(cumulative_execution_time) as total,
sum(calls) as calls,
count(*) versions,
max(max_call_depth) as max_call_depth
FROM keyword_statistics as stat
JOIN keyword_tree as tree ON tree.fingerprint=stat.fingerprint
CROSS JOIN total_elapsed
WHERE test_run_id IN ({test_run_ids})
GROUP BY tree.library, tree.keyword, total_elapsed.total
ORDER BY total DESC
""".format(test_run_ids=test_run_ids(series, build_num=build_number))
|
n = int(input())
space = n-1
for i in range(n):
for k in range(space):
print(" ",end="")
for j in range(i+1):
print("* ",end="")
print()
space -= 1
|
type = 'MMDetector'
config = '/home/linkinpark213/Source/mmdetection/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py'
checkpoint = 'https://open-mmlab.s3.ap-northeast-2.amazonaws.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco/faster_rcnn_x101_64x4d_fpn_1x_coco_20200204-833ee192.pth'
conf_threshold = 0.5
|
def add_voxelizer_parameters(parser):
parser.add_argument(
"--voxelizer_factory",
choices=[
"occupancy_grid",
"tsdf_grid",
"image"
],
default="occupancy_grid",
help="The voxelizer factory to be used (default=occupancy_grid)"
)
parser.add_argument(
"--grid_shape",
type=lambda x: tuple(map(int, x.split(","))),
default="32,32,32",
help="The dimensionality of the voxel grid (default=(32, 32, 32)"
)
parser.add_argument(
"--save_voxels_to",
default=None,
help="Path to save the voxelised input to the network"
)
parser.add_argument(
"--image_shape",
type=lambda x: tuple(map(int, x.split(","))),
default="3,137,137",
help="The dimensionality of the voxel grid (default=(3,137,137)"
)
def add_training_parameters(parser):
"""Add arguments to a parser that are related with the training of the
network.
"""
parser.add_argument(
"--epochs",
type=int,
default=150,
help="Number of times to iterate over the dataset (default=150)"
)
parser.add_argument(
"--steps_per_epoch",
type=int,
default=500,
help=("Total number of steps (batches of samples) before declaring one"
" epoch finished and starting the next epoch (default=500)")
)
parser.add_argument(
"--batch_size",
type=int,
default=32,
help="Number of samples in a batch (default=32)"
)
parser.add_argument(
"--lr",
type=float,
default=1e-3,
help="Learning rate (default 1e-3)"
)
parser.add_argument(
"--lr_epochs",
type=lambda x: map(int, x.split(",")),
default="500,1000,1500",
help="Training epochs with diminishing learning rate"
)
parser.add_argument(
"--lr_factor",
type=float,
default=1.0,
help=("Factor according to which the learning rate will be diminished"
" (default=None)")
)
parser.add_argument(
"--optimizer",
choices=["Adam", "SGD"],
default="Adam",
help="The optimizer to be used (default=Adam)"
)
parser.add_argument(
"--momentum",
type=float,
default=0.9,
help=("Parameter used to update momentum in case of SGD optimizer"
" (default=0.9)")
)
def add_dataset_parameters(parser):
parser.add_argument(
"--dataset_type",
default="shapenet_quad",
choices=[
"shapenet_quad",
"shapenet_v1",
"shapenet_v2",
"surreal_bodies",
"dynamic_faust"
],
help="The type of the dataset type to be used"
)
parser.add_argument(
"--n_points_from_mesh",
type=int,
default=1000,
help="The maximum number of points sampled from mesh (default=1000)"
)
parser.add_argument(
"--model_tags",
type=lambda x: x.split(":"),
default=[],
help="The tags to the model to be used for testing",
)
def add_nn_parameters(parser):
"""Add arguments to control the design of the neural network architecture.
"""
parser.add_argument(
"--architecture",
choices=["tulsiani", "octnet", "resnet18"],
default="tulsiani",
help="Choose the architecture to train"
)
parser.add_argument(
"--train_with_bernoulli",
action="store_true",
help="Learn the Bernoulli priors during training"
)
parser.add_argument(
"--make_dense",
action="store_true",
help="When true use an additional FC before its regressor"
)
def add_regularizer_parameters(parser):
parser.add_argument(
"--regularizer_type",
choices=[
"bernoulli_regularizer",
"entropy_bernoulli_regularizer",
"parsimony_regularizer",
"overlapping_regularizer",
"sparsity_regularizer"
],
nargs="+",
default=[],
help=("The type of the regularizer on the shapes to be used"
" (default=None)")
)
parser.add_argument(
"--bernoulli_regularizer_weight",
type=float,
default=0.0,
help=("The importance of the regularization term on Bernoulli priors"
" (default=0.0)")
)
parser.add_argument(
"--maximum_number_of_primitives",
type=int,
default=5000,
help=("The maximum number of primitives in the predicted shape "
" (default=5000)")
)
parser.add_argument(
"--minimum_number_of_primitives",
type=int,
default=5,
help=("The minimum number of primitives in the predicted shape "
" (default=5)")
)
parser.add_argument(
"--entropy_bernoulli_regularizer_weight",
type=float,
default=0.0,
help=("The importance of the regularizer term on the entropy of"
" the bernoullis (default=0.0)")
)
parser.add_argument(
"--sparsity_regularizer_weight",
type=float,
default=0.0,
help="The weight on the sparsity regularizer (default=0.0)"
)
parser.add_argument(
"--parsimony_regularizer_weight",
type=float,
default=0.0,
help="The weight on the parsimony regularizer (default=0.0)"
)
parser.add_argument(
"--overlapping_regularizer_weight",
type=float,
default=0.0,
help="The weight on the overlapping regularizer (default=0.0)"
)
parser.add_argument(
"--enable_regularizer_after_epoch",
type=int,
default=0,
help="Epoch after which regularizer is enabled (default=10)"
)
parser.add_argument(
"--w1",
type=float,
default=0.005,
help="The weight on the first term of the sparsity regularizer (default=0.005)"
)
parser.add_argument(
"--w2",
type=float,
default=0.005,
help="The weight on the second term of the sparsity regularizer (default=0.005)"
)
def add_sq_mesh_sampler_parameters(parser):
parser.add_argument(
"--D_eta",
type=float,
default=0.05,
help="Step along the eta (default=0.05)"
)
parser.add_argument(
"--D_omega",
type=float,
default=0.05,
help="Step along the omega (default=0.05)"
)
parser.add_argument(
"--n_points_from_sq_mesh",
type=int,
default=180,
help="Number of points to sample from the mesh of the SQ (default=180)"
)
def add_gaussian_noise_layer_parameters(parser):
parser.add_argument(
"--add_gaussian_noise",
action="store_true",
help="Add Gaussian noise in the layers"
)
parser.add_argument(
"--mu",
type=float,
default=0.0,
help="Mean value of the Gaussian distribution"
)
parser.add_argument(
"--sigma",
type=float,
default=0.001,
help="Standard deviation of the Gaussian distribution"
)
def add_loss_parameters(parser):
parser.add_argument(
"--prim_to_pcl_loss_weight",
default=1.0,
type=float,
help=("The importance of the primitive-to-pointcloud loss in the "
"final loss (default = 1.0)")
)
parser.add_argument(
"--pcl_to_prim_loss_weight",
default=1.0,
type=float,
help=("The importance of the pointcloud-to-primitive loss in the "
"final loss (default = 1.0)")
)
def add_loss_options_parameters(parser):
parser.add_argument(
"--use_sq",
action="store_true",
help="Use Superquadrics as geometric primitives"
)
parser.add_argument(
"--use_cuboids",
action="store_true",
help="Use cuboids as geometric primitives"
)
parser.add_argument(
"--use_chamfer",
action="store_true",
help="Use the chamfer distance"
)
def voxelizer_shape(args):
if args.voxelizer_factory == "occupancy_grid":
return args.grid_shape
elif args.voxelizer_factory == "image":
return args.image_shape
elif args.voxelizer_factory == "tsdf_grid":
return (args.resolution,)*3
def get_loss_weights(args):
args = vars(args)
loss_weights = {
"pcl_to_prim_weight": args.get("pcl_to_prim_loss_weight", 1.0),
"prim_to_pcl_weight": args.get("prim_to_pcl_loss_weight", 1.0),
}
return loss_weights
def get_loss_options(args):
loss_weights = get_loss_weights(args)
args = vars(args)
# Create a dicitionary with the loss options based on the input arguments
loss_options = {
"use_sq": args.get("use_sq", False),
"use_cuboids": args.get("use_cuboids", False),
"use_chamfer": args.get("use_chamfer", False),
"loss_weights": loss_weights
}
return loss_options
|
def describe_pet(animal_type, pet_name='virgil'):
"""Display info about Virgil"""
print(f"\nI have a {animal_type}.")
print(f"My {animal_type}'s name is {pet_name.title()}")
describe_pet('dog', 'virgil')
describe_pet('hamster', 'harry')
describe_pet(pet_name='garfield', animal_type='cat')
describe_pet('dog')
|
j = 1
while j <= 9:
i = 1
while i <= j:
print(f'{i}*{j}={i*j}' , end='\t')
i += 1
j += 1
print()
|
# This Document class simulates the HTML DOM document object.
class Document:
def __init__(self, window):
self.window = window
self.created_elements_index = 0 # This property is required to ensure that every created HTML element can be accessed using a unique reference.
def getElementById(self, id): # This method simulates the document.getElementById() JavaScript method
# that returns the element that has the ID attribute with the specified value.
# Furthermore an HTML_Element object is created including all methods/(properties) related to an HTML element.
return HTML_Element(self.window, "document.getElementById('" + id + "')")
def createElement(self, tagName): # This method is similar to the document.createElement() JavaScript method
# that creates an Element Node with the specified name.
# A created HTML_Element object including all methods/(properties) related to an HTML element is returned.
# To create an element that can be referenced,
# the element is added to the Python.Created_Elements_references object as a new property.
# If the HTML element no longer needs to be accessed, the respective property of the Python.Created_Elements_references object should be deleted.
# Therefore, the deleteReference_command parameter of the __init__ function is given the JavaScript code to be executed
# when creating an HTML_Element object to delete the respective property of the Python.Created_Elements_references object.
self.created_elements_index += 1
self.window.execute('Python.Created_Elements_references.e' + str(self.created_elements_index) + ' = document.createElement("' + self.specialchars(tagName) + '");')
return HTML_Element(self.window, 'Python.Created_Elements_references.e' + str(self.created_elements_index), 'delete Python.Created_Elements_references.e' + str(self.created_elements_index))
def specialchars(self, s):
s = s.replace("\\", "\\\\")
return s.replace('"', '\\"')
# This class includes all methods/(properties) related to an HTML element.
class HTML_Element:
def __init__(self, window, element, deleteReference_command=None):
self.window = window # The Window object is required to communicate with JavaScript.
self.element = element # This property contains the JavaScript code to access the HTML element.
self.deleteReference_command = deleteReference_command # This property is needed in case an HTML element is created.
# It contains the JavaScript code to delete the respective property
# of the Python.Created_Elements_references object so that the
# HTML element can no longer be accessed.
# In the following way, simulated JavaScript HTML DOM attributes can be added to this class:
# @property
# async def attribute(self):
# return await self.window.get(self.element + ".attribute;")
# @attribute.setter
# def attribute(self, val):
# self.window.execute(self.element + '.attribute = "' + self.specialchars(val) + '";')
# It changes/returns the value of an element.
@property
async def value(self):
return await self.window.get(self.element + ".value;")
@value.setter
def value(self, val):
self.window.execute(self.element + '.value = "' + self.specialchars(val) + '";')
# It changes/returns the inner HTML of an element.
@property
async def innerHTML(self):
return await self.window.get(self.element + ".innerHTML;")
@innerHTML.setter
def innerHTML(self, val):
self.window.execute(self.element + '.innerHTML = "' + self.specialchars(val) + '";')
# This method makes it easy to access the attributes of HTML elements that have not yet been simulated in this class.
async def attribute(self, attr, val=None):
if val == None:
return await self.window.get(self.element + "." + self.specialchars(attr) + ";")
else:
self.window.execute(self.element + '.' + attr + ' = "' + self.specialchars(val) + '";')
# This method changes the attribute value of an HTML element.
def setAttribute(self, attr, val):
self.window.execute(self.element + '.setAttribute("' + self.specialchars(attr) + '", "' + self.specialchars(val) + '");')
# The HTML element is added to the body.
def append_this_to_body(self):
self.window.execute('document.body.appendChild(' + self.element + ');')
# In case an HTML element has been created, JavaScript code is passed during the initialization of the HTML_Element object
# allowing to delete the respective property of the Python.Created_Elements_references object
# so that the HTML element can no longer be accessed.
def deleteReference(self):
if self.deleteReference_command != None:
self.window.execute(self.deleteReference_command)
def specialchars(self, s):
s = s.replace("\\", "\\\\")
return s.replace('"', '\\"')
|
LOAD_HUB_FROM_EXT_TEMP = """
insert into {{ params.hub_name }}
select
ext.*
from {{ params.hub_name }}_ext ext
left join {{ params.hub_name }} h
on h.{{ params.pk_name }} = ext.{{ params.pk_name }}
where
h.{{ params.pk_name }} is null
and ext.load_dtm::time > '{{ params.from_dtm }}';
"""
LOAD_LINK_FROM_EXT_TEMP = """
"""
LOAD_SAT_FROM_EXT_TEMP = """
insert into {{ params.sat_name }}
select
t.*
from
(
select
ext.*
,md5({% for col in params.hash_diff_cols %}
{{ col }} ||
{% endfor %}
'') hash_diff
from {{ params.sat_name }}_ext ext
where ext.load_dtm::time > '{{ params.from_dtm }}'
) t
left join {{ params.sat_name }} s
on s.hash_diff = t.hash_diff
where
s.hash_diff is null
;
"""
UPDATE_COUNTRY_SCORING = """
CREATE OR REPLACE VIEW data_vault.country_scoring_test_data_1 as
select distinct
mapp.iso as iso,
cast('{pd.Timestamp.now()}' as timestamp) as load_dtm,
mapp.country_eng as country,
rrm.country as country_ru,
rrm.tnved,
rdm.comtrade_feat_1 as import_bulk_volume,
import_from_russia as import_from_russia,
rdm.av_tariff as average_tariff, rrm.barrier,
rdm.common_import_part,
rrm.exp_good_nisha,
{{ fts_w_1 }}*fts_feat_1_rate + {{ fts_w_2 }}*fts_feat_2_rate + {{ fts_w_3 }}*fts_feat_3_rate +
{{ comtrade_w_1 }}*comtrade_feat_1_rate + {{ comtrade_w_1 }}*comtrade_feat_2_rate +
{{ h_index_w }}*h_index_rate + {{ av_tariff_w }}*av_tariff_rate + {{ pokrytie_merami_w }}*pokrytie_merami_rate +
{{ transport_w }}*transport_place_rate + {{ pred_gdp_w }}*pred_gdp_rate +{{ pred_imp_w }}*pred_import_rate +
{{ easy_bus_w }}*easy_doing_business_rate + quota_rate + exp_good_nisha_rate + exp_potential_rate + polit_coef_rate
as score
from data_vault.country_scoring_result_rating_dm rrm
join data_vault.country_scoring_result_dm_1 as rdm
on rrm.country = rdm.country and rrm.tnved = rdm.tnved
join data_vault.country_scoring_mapper as mapp
on lower(mapp.country_ru) = lower(rrm.country)
where common_import_part_flg != 1;
"""
|
# Resample and tidy china: china_annual
china_annual = china.resample('A').last().pct_change(10).dropna()
# Resample and tidy us: us_annual
us_annual = us.resample('A').last().pct_change(10).dropna()
# Concatenate china_annual and us_annual: gdp
gdp = pd.concat([china_annual,us_annual],join='inner',axis=1)
# Resample gdp and print
print(gdp.resample('10A').last())
|
#уверен, что есть способ короче, но в голову не пришел
input_array_str = input("введите список разделенный пробелами: ").split(" ")
if len(input_array_str) > 0:
for i in range(1, len(input_array_str), 2):
a = input_array_str[i]
input_array_str[i] = input_array_str[i-1]
input_array_str[i-1] = a
print(input_array_str)
|
train = dict(
batch_size=10,
num_workers=4,
use_amp=True,
num_epochs=100,
num_iters=30000,
epoch_based=True,
lr=0.0001,
optimizer=dict(
mode="adamw",
set_to_none=True,
group_mode="r3", # ['trick', 'r3', 'all', 'finetune'],
cfg=dict(),
),
grad_acc_step=1,
sche_usebatch=True,
scheduler=dict(
warmup=dict(
num_iters=0,
),
mode="poly",
cfg=dict(
lr_decay=0.9,
min_coef=0.001,
),
),
save_num_models=1,
ms=dict(
enable=False,
extra_scales=[0.75, 1.25, 1.5],
),
grad_clip=dict(
enable=False,
mode="value", # or 'norm'
cfg=dict(),
),
ema=dict(
enable=False,
cmp_with_origin=True,
force_cpu=False,
decay=0.9998,
),
)
|
def simple_game(builder):
return builder. \
room(
name='the testing room',
description=
"""
It's a room with a Pythonista writing test cases.
There is a locked drawer with a key on top.
You need to get to the production server room
at the North exit, but management has stationed a
guard to to stop you.
"""
). \
room(
name='the production room',
description='Room with production servers',
final_room=True
). \
exit(
name='North',
description='The north exit',
from_room='the testing room',
to_room='the production room',
locked=True
). \
exit(
name='South',
description='The south exit',
from_room='the production room',
to_room='the testing room',
locked=False
). \
item(
name='TPS Report',
description="""A report explaining how to test a text-based
adventure game""",
use_message="""You read the report to the guard blocking the North.
He falls asleep allowing you to go through.""",
in_room='the testing room',
unlocks='North',
locked=True
). \
item(
name='Drawer Key',
description='An old metal key to the TPS report drawer',
use_message='You put the key in the lock',
in_room='the testing room',
unlocks='TPS Report', locked=False
). \
start_in('the testing room').build()
def save_internet_game(builder):
return builder. \
room(
name='Building 7',
description=
"""
As anyone who has watched "The IT Crowd" knows, the Internet
is contained entirely within a small, black, weightless box.
It is kept in the Stata Center and watched over by elite
Computer Science elders.
Unfortunately, one day, a MIT Professor named Jack Danielson,
enraged by the nth time a student asked him for help developing
a web app without even a simple object model for planning,
stole the Internet from the Stata Center.
You run into Building 7, and bar the door, blocking out the angry
rioters blaming anyone associated with MIT for the resulting
collapse of civilization. You have taken a recording of Prof.
Danielson's actions, and it drops from your hands onto the
floor as you bolt the door. You must find the Internet and return
it to its home, so that the world may be mended.
"""
). \
room(
name='Building 3',
description=
"""
Building 3 of MIT is deserted. The only thing there is a small
piece of paper on which someone has scrawled a series of five numbers.
"""
). \
room(
name='Building 10 Lobby',
description=
"""
The lobby for Building 10. There is an elevator leading up to
the 10-250 lecture hall, where several MIT students are hiding,
but the elevator is locked.
"""
). \
room(
name='Building 4 Athena Cluster',
description=
"""
A computer cluster. With no Internet, these computers are not
very useful...
In the back of the room, on a desk, you see a key.
"""
). \
room(
name='Lecture Hall 10-250',
description=
"""
You see several students on their laptops. Professor
Jack Danielson is standing at the front, giving a lecture.
Interestingly, one student appears to see no issue with the idea
of dropping nontrivial sums of money for a MIT education,
only to spend lecture browsing Facebook on their Macbook Air.
"How are you connected to the Internet?" you ask.
"It's wireless, silly!" he replies.
"""
). \
room(
name='Stata Center',
description=
"""
You see a pedestal, where a small black box could fit...
"""
). \
room(
name='Stata Center Roof',
description=
"""
On this roof, you meet a hooded man.
"I am the caretaker of the Internet, and I was rendered powerless
as it was stolen. If it was not for your help, I might have died.
But now you have saved us!"
He presses a button on the wall, and the Internet lights up again.
It shines so brightly that it can be seen from a 50 mile radius.
The world is now back online! The rioting mobs outside quiet,
and disperse.
The man remarks:
"What a delight the Internet is! If only I could be so grossly
incadescent!"
""",
final_room=True
). \
exit(
name='East Doorway',
description='The doorway from Building 7 to Building 3',
from_room='Building 7',
to_room='Building 3',
locked=False
). \
exit(
name='West Doorway',
description='The doorway from Building 3 to Building 7',
from_room='Building 3',
to_room='Building 7',
locked=False
). \
exit(
name='Entry to Building 10',
description='A wide entry into Building 10',
from_room='Building 3',
to_room='Building 10 Lobby',
locked=False
). \
exit(
name='Building 10 West Exit',
description='An exit from Building 10 to Building 3',
from_room='Building 10 Lobby',
to_room='Building 3',
locked=False
). \
exit(
name='Up Elevator',
description='The elevator in Building 10, going up from the lobby',
from_room='Building 10 Lobby',
to_room='Lecture Hall 10-250',
locked=True
). \
exit(
name='Down Elevator',
description='The elevator in Building 10, going down from 10-250',
from_room='Lecture Hall 10-250',
to_room='Building 10 Lobby',
locked=False
). \
exit(
name='Door to Athena Cluster',
description='An Athena cluster door with a combination lock',
from_room='Building 10 Lobby',
to_room='Building 4 Athena Cluster',
locked=True
). \
exit(
name='Door out of Athena Cluster',
description='The exit from the Athena cluster',
from_room='Building 4 Athena Cluster',
to_room='Building 10 Lobby',
locked=False
). \
exit(
name='Hallway to Stata',
description='A long empty hallway to the Stata Center',
from_room='Building 10 Lobby',
to_room='Stata Center',
locked=False
). \
exit(
name='Hallway to Building 10',
description='A long empty hallway leading back to Building 10',
from_room='Stata Center',
to_room='Building 10 Lobby',
locked=False
). \
exit(
name='Stairs to Stata Roof',
description='A winding staircase to the roof of the Stata Center',
from_room='Stata Center',
to_room='Stata Center Roof',
locked=True
). \
item(
name='Internet',
description='A small weightless black box. Handle with care.',
use_message=
"""
You place the Internet in the slot. The door to the stairs
opens. You should go up and have it turned on.
""",
in_room='Lecture Hall 10-250',
unlocks='Stairs to Stata Roof',
locked=True
). \
item(
name='Recording',
description=
"""
A tape from a security camera, showing Prof. Danielson's
theft of the Internet from the Stata center. You need this
as evidence that he is the one responsible.
""",
use_message=
"""
You brandish the tape. Prof. Danielson says, "Yes, I admit I
stole the Internet, but you deserved it! You people shouldn't
be allowed to write anything that runs on it until you learn
to plan things out!" He holds up the box, running on battery
power, and points to it as he speaks.
You attempt to reason with him: "Yes, planning is useful, and
people should do it more, but that doesn't justify wrecking
the technology that holds up modern civilization!"
The Professor remains unconvinced. At that moment, the battery
on the Internet dies. Murmurings are heard from the crowd of students.
"Hey! I'm getting signal not found errors!"
They point at the Professor.
"He's responsible! GET HIM!"
Professor Danielson drops the Internet and runs out of the room,
with the mob of students in hot pursuit.
""",
in_room='Building 7',
unlocks='Internet',
locked=False
). \
item(
name='Athena Cluster Combination',
description='A piece of paper with the numbers 27182',
use_message='You entered the combo, and opened the door.',
in_room='Building 3',
unlocks='Door to Athena Cluster',
locked=False
). \
item(
name='Elevator Key',
description='An old rusty key',
use_message='You turned the key in the lock, opening the elevator.',
in_room='Building 4 Athena Cluster',
unlocks='Up Elevator',
locked=False
).start_in('Building 7').build()
|
examples = [
{
"file": "FILENAME",
"info": [
{
"turn_num": 1,
"user": "USER QUERY",
"system": "HUMAN RESPONSE",
"HDSA": "HDSA RESPONSE",
"MarCo": "MarCo RESPONSE",
"MarCo vs. system":
{
"Readability":
["Tie", "MarCo", "System"],
"Completion":
["MarCo", "MarCo", "Tie"]
}
},
...
]
}
]
|
# ____ _____
# | _ \ __ _ _ |_ _| __ __ _ ___ ___ _ __
# | |_) / _` | | | || || '__/ _` |/ __/ _ \ '__|
# | _ < (_| | |_| || || | | (_| | (_| __/ |
# |_| \_\__,_|\__, ||_||_| \__,_|\___\___|_|
# |___/
#
VERSION = (0, 0, 1)
__version__ = '.'.join(map(str, VERSION))
|
A, B = map(int, input().split())
result = 0
for i in range(A, B + 1):
if (A + B + i) % 3 == 0:
result += 1
print(result)
|
# -*- coding: utf-8 -*-
class Graph(object):
r"""Graph.
This class described the graph data structure.
"""
pass
class DirectedGraph(Graph):
r"""Directed Graph.
This class described the directed graph data structure.
The graph is described using a dictionary.
graph = {node: [[parent nodes], [child nodes]]}
"""
class Root(object):
pass
def __init__(self):
self._root = self.Root()
self._graph = {self._root: []}
def addNode(self, parent, node):
"""
Add a node to the graph.
"""
pass
def getParents(self, node):
"""
Return the parent nodes of the given node.
"""
pass
def getChildren(self, node):
"""
Return the child nodes of the given node.
"""
pass
class DirectedAcyclicGraph(DirectedGraph):
"""Directed Acyclic Graph.
In this data structure, cycles are not allowed.
"""
def __init__(self):
super(DirectedAcyclicGraph, self).__init__()
|
# Search in Rotated Sorted Array: https://leetcode.com/problems/search-in-rotated-sorted-array/
# There is an integer array nums sorted in ascending order (with distinct values).
# Prior to being passed to your function, nums is rotated at an unknown pivot index k (0 <= k < nums.length) such that the resulting array is [nums[k], nums[k+1], ..., nums[n-1], nums[0], nums[1], ..., nums[k-1]] (0-indexed). For example, [0,1,2,4,5,6,7] might be rotated at pivot index 3 and become [4,5,6,7,0,1,2].
# Given the array nums after the rotation and an integer target, return the index of target if it is in nums, or -1 if it is not in nums.
# You must write an algorithm with O(log n) runtime complexity.
# Okay the simplest solution to this problem is to do a simple linear search which would take o(n) and is trivial
# so to improve on this normally what we do is a binary search because it is shifted there is probably a slight
# difference
# Actually you can quickly determine if you are including a part of the switch by comparing the first value in you search
# to the middle if the start is < mid point then you know it is sorted and you can continually normally
# otherwise y ou know that there was a switch and you need to go the opposite direction
class Solution:
def search(self, nums, target):
start, end = 0, len(nums) - 1
while start <= end:
mid = start + (end - start) // 2
if nums[mid] == target:
return mid
# if we have a normal bs then we know that
elif nums[mid] >= nums[start]:
# Check if our target is between start and mid
# Or if it is in the previous section as we rotated across
if target >= nums[start] and target < nums[mid]:
end = mid - 1
else:
start = mid + 1
# other wise we know we need to search in rotated across area
else:
if target <= nums[end] and target > nums[mid]:
start = mid + 1
else:
end = mid - 1
# if we didn't find the value return -1
return -1
# This problem seemed really hard at first but honestly since we know that it is a normal binary search if we look at the start and mid point we can
# quickly revert this to an almost normal implementation of the binary search algo
# This should run in o(log(n)) time and o(1) space as we are cutting the array in half every time and storing no information outside of that array
# Score Card
# Did I need hints? Slightly I kept messing up the moving of the start and end points
# Did you finish within 30 min? 22
# Was the solution optimal? Yup this runs in o(n+m) time in worst case and uses o(1) space
# Were there any bugs? See my hints
# 4 4 5 3 = 4
|
def init_logger(logger_type, data):
logger_profile = []
log_header = 'run_time,read_time,'
# what sesnors are we logging?
for k, v in data.items():
if(data[k]['device'] != ''): # check to see if our device is setup
for kk, vv in data[k]['sensors'].items():
if(data[k]['sensors'][kk]['read'] == True and data[k]['sensors'][kk]['log_on'] == True):
log_header += data[k]['sensors'][kk]['log_name'] + ","
logger_profile.append((k,'sensors',kk))
print(log_header.strip(","))
return logger_profile
def logger(logger_profile, data, start_read, end_read):
log = ''
i = 0
log += ("{0:0.4f},{1:0.4f},").format(start_read, end_read)
for x in logger_profile:
if(type(data[x[0]][x[1]][x[2]]['value']) is tuple or type(data[x[0]][x[1]][x[2]]['value']) is map):
y = list(data[x[0]][x[1]][x[2]]['value'])
# this isnt the best thing to do here, lets clean it up later
log += (data[x[0]][x[1]][x[2]]['log_format'] + ",").format(y[0], y[1], y[2])
elif(type(data[x[0]][x[1]][x[2]]['value']) is int):
log += ("{},").format(data[x[0]][x[1]][x[2]]['value'])
else:
log += (data[x[0]][x[1]][x[2]]['log_format'] + ",").format(data[x[0]][x[1]][x[2]]['value'])
print(log.strip(","))
|
class Solution(object):
def XXX(self, root):
def dfs(node, num, ret):
if node is None:
return num
num += 1
return max(dfs(node.left, num, ret), dfs(node.right, num, ret))
num -= 1
return dfs(root, 0, 0)
|
# Copyright 2012 Kevin Gillette. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
_ord = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-_"
_table = dict((c, i) for i, c in enumerate(_ord))
def encode(n, len=0):
out = ""
while n > 0 or len > 0:
out = _ord[n & 63] + out
n >>= 6
len -= 1
return out
def decode(input):
n = 0
for c in input:
c = _table.get(c)
if c is None:
raise ValueError("Invalid character in input: " + c)
n = n << 6 | c
return n
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Send SMS to Visitor with leads',
'category': 'Website/Website',
'sequence': 54,
'summary': 'Allows to send sms to website visitor that have lead',
'version': '1.0',
'description': """Allows to send sms to website visitor if the visitor is linked to a lead.""",
'depends': ['website_sms', 'crm'],
'data': [],
'installable': True,
'auto_install': True,
}
|
class User:
def __init__(self,name):
self.name=name
def show(self):
print(self.name)
user =User("ada66")
user.show()
|
def merge(left, right):
results = []
while(len(left) and len(right)):
if left[0] < right[0]:
results.append(left.pop(0))
print(left)
else:
results.append(right.pop(0))
print(right)
return [*results, *left, *right]
print(merge([3, 8, 12], [5, 10, 15]))
def merge_sort(arr):
if len(arr) == 1:
return arr
center = len(arr) // 2
print(center)
left = arr[0: center]
right = arr[center:]
print(left, right)
return merge(merge_sort(left), merge_sort(right))
print(merge_sort([22, 3, 15, 13, 822, 14, 15, 22, 75,]))
|
SIZE = 400
END_SCORE = 4000
GRID_LEN = 3
WINAT = 2048
GRID_PADDING = 10
CHROMOSOME_LEN = pow(GRID_LEN, 4) + 4*GRID_LEN*GRID_LEN + 1*(pow(GRID_LEN, 4))
TOURNAMENT_SELECTION_SIZE = 4
MUTATION_RATE = 0.4
NUMBER_OF_ELITE_CHROMOSOMES = 4
POPULATION_SIZE = 10
GEN_MAX = 10000
DONOTHINGINPUT_MAX = 5
BACKGROUND_COLOR_GAME = "#92877d"
BACKGROUND_COLOR_CELL_EMPTY = "#9e948a"
FONT = ("Verdana", 40, "bold")
KEY_UP_ALT = "\'\\uf700\'"
KEY_DOWN_ALT = "\'\\uf701\'"
KEY_LEFT_ALT = "\'\\uf702\'"
KEY_RIGHT_ALT = "\'\\uf703\'"
KEY_UP = 'w'
KEY_DOWN = 's'
KEY_LEFT = 'a'
KEY_RIGHT = 'd'
KEY_BACK = 'b'
KEY_J = "'j'"
KEY_K = "'k'"
KEY_L = "'l'"
KEY_H = "'h'"
|
#!/usr/bin/env python
'''
ch8q2a1.py
Function1 = obtain_os_version -- process the show version output and return the OS version (Version 15.0(1)M4) else return None.
Looking for line such as:
Cisco IOS Software, C880 Software (C880DATA-UNIVERSALK9-M), Version 15.0(1)M4, RELEASE SOFTWARE (fc1)
'''
def obtain_os_version(show_ver_file):
' Return OS Version or None '
os_version = None
show_ver_list = show_ver_file.split('\n')
for line in show_ver_list:
if "Cisco IOS Software" in line:
os_version = line.split(', ')[2]
return os_version
return os_version
|
# 5
# / \
# 3 7
# / \ / \
# 2 4 6 8
tree = Node(5)
insert(tree, Node(3))
insert(tree, Node(2))
insert(tree, Node(4))
insert(tree, Node(7))
insert(tree, Node(6))
insert(tree, Node(8))
# 5 3 2 4 7 6 8
preorder(tree)
|
numero = int(input('Digite um número inteiro: '))
dezena = numero // 10
dezena = dezena % 10
print(f'O dígito das dezenas é {dezena}')
|
"Primera parte del ejercicio"
def mcd_euclides (a,b):
#Vamos a resolver el algoritmo de euclides utilizando iteracion
while b != 0:
# Hacemos uso de la siguiente propiedad: mcd(a,b) = mcd(a−b,b)
aux = b
b = a%b
a = aux
#De esta manera vamos reduciendo a hasta que este sea el mcd
return a
"Segunda parte del ejercicio"
def mcd_sumas_restas (a,b):
#Ahora tenemos un handicap; solo podemos usar sumas y restas
while b != 0:
aux = b-a
b -= aux
a = aux
return a
def iniciar():
num1 = 6
num2 = 8
print ("Por el algoritmo de Euclides: {}".format(mcd_euclides(num1, num2)))
print ("Usando sumas y restas: ", mcd_sumas_restas(num1,num2))
|
# Copyright (C) 2021 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# These examples are taken from the TensorFlow specification:
#
# https://www.tensorflow.org/api_docs/cc/class/tensorflow/ops/mirror-pad
def test(name, input_dims, input_values, paddings, mode, output_dims, output_values):
t = Input("t", ("TENSOR_FLOAT32", input_dims))
paddings = Parameter("paddings", ("TENSOR_INT32", [len(input_dims), 2]), paddings)
output = Output("output", ("TENSOR_FLOAT32", output_dims))
model = Model().Operation("MIRROR_PAD", t, paddings, mode).To(output)
quant8_asymm_type = ("TENSOR_QUANT8_ASYMM", 0.5, 4)
quant8_asymm = DataTypeConverter(name="quant8_asymm").Identify({
t: quant8_asymm_type,
output: quant8_asymm_type,
})
quant8_asymm_signed_type = ("TENSOR_QUANT8_ASYMM_SIGNED", 0.25, -9)
quant8_asymm_signed = DataTypeConverter(name="quant8_asymm_signed").Identify({
t: quant8_asymm_signed_type,
output: quant8_asymm_signed_type,
})
Example({
t: input_values,
output: output_values,
}, model=model, name=name).AddVariations("float16", quant8_asymm, quant8_asymm_signed, "int32")
test("summary",
[2, 3], [1, 2, 3, # input_dims, input_values
4, 5, 6],
[1, 1, # paddings
2, 2],
1, # mode = SYMMETRIC
[4, 7], [2, 1, 1, 2, 3, 3, 2, # output_dims, output_values
2, 1, 1, 2, 3, 3, 2,
5, 4, 4, 5, 6, 6, 5,
5, 4, 4, 5, 6, 6, 5])
test("mode_reflect",
[3], [1, 2, 3], # input_dims, input_values
[0, 2], # paddings
0, # mode = REFLECT
[5], [1, 2, 3, 2, 1]) # output_dims, output_values
test("mode_symmetric",
[3], [1, 2, 3], # input_dims, input_values
[0, 2], # paddings
1, # mode = SYMMETRIC
[5], [1, 2, 3, 3, 2]) # output_dims, output_values
|
def sanitize(time_string):
if '-' in time_string:
spliter = '-'
elif ':' in time_string:
spliter = ':'
else:
return time_string
(mins, secs) = time_string.split(spliter)
return mins + '.' + secs
def get_coach_data(filepath):
try:
with open(filepath) as f:
data = f.readline()
data = data.strip().split(',')
return {
'name': data.pop(0),
'dob': data.pop(0),
'times': sorted(set([sanitize(time) for time in data]))[:3]
}
except IOError as err:
print('File error: ' + str(err))
return None
sarah = get_coach_data('hfpy_ch6_data/sarah2.txt')
james = get_coach_data('hfpy_ch6_data/james2.txt')
julie = get_coach_data('hfpy_ch6_data/julie2.txt')
mikey = get_coach_data('hfpy_ch6_data/mikey2.txt')
print(sarah['name'] + "'s fastest times are: " + str(sarah['times']))
print(james['name'] + "'s fastest times are: " + str(james['times']))
print(julie['name'] + "'s fastest times are: " + str(julie['times']))
print(mikey['name'] + "'s fastest times are: " + str(mikey['times']))
print('\n==========================> line break <============================\n')
# 改进2
class Athlete():
def __init__(self, name, dob=None, times=[]):
self.name = name
self.dob = dob
self.times = times
def top3(self):
return sorted(set([sanitize(time) for time in self.times]))[:3]
def add_time(self, data):
self.times.append(data)
def add_times(self, data_arr=[]):
self.times.extend(data_arr)
def get_coach_data(filepath):
try:
with open(filepath) as f:
data = f.readline().strip().split(',')
return Athlete(data.pop(0), data.pop(0), data)
except IOError as err:
print('File error: ' + str(err))
return None
james = get_coach_data('hfpy_ch6_data/james2.txt')
julie = get_coach_data('hfpy_ch6_data/julie2.txt')
mikey = get_coach_data('hfpy_ch6_data/mikey2.txt')
sarah = get_coach_data('hfpy_ch6_data/sarah2.txt')
print(james.name + "'s fastest times are: " + str(james.top3()))
print(julie.name + "'s fastest times are: " + str(julie.top3()))
print(mikey.name + "'s fastest times are: " + str(mikey.top3()))
print(sarah.name + "'s fastest times are: " + str(sarah.top3()))
# 改进3
print('\n==========================> line break <============================\n')
class Athlete(list):
def __init__(self, name, dob=None, times=[]):
list.__init__([])
self.name = name
self.dob = dob
self.extend(times)
def top3(self):
return sorted(set([sanitize(time) for time in self]))[:3]
james = get_coach_data('hfpy_ch6_data/james2.txt')
julie = get_coach_data('hfpy_ch6_data/julie2.txt')
mikey = get_coach_data('hfpy_ch6_data/mikey2.txt')
sarah = get_coach_data('hfpy_ch6_data/sarah2.txt')
print(james.name + "'s fastest times are: " + str(james.top3()))
print(julie.name + "'s fastest times are: " + str(julie.top3()))
print(mikey.name + "'s fastest times are: " + str(mikey.top3()))
print(sarah.name + "'s fastest times are: " + str(sarah.top3()))
|
class SemanticException(Exception):
def __init__(self, message, token=None):
if token is None:
super(SemanticException, self).__init__(message)
else:
super(SemanticException, self).__init__(message + ': ' + token.__str__())
|
keys = {
"x" :"x",
"y" :"y",
"l" :"left",
"left" :"left",
"r" :"right",
"right" :"right",
"t" :"top",
"top" :"top",
"b" :"bottom",
"bottom":"bottom",
"w" :"width",
"width" :"width",
"h" :"height",
"height":"height",
"a" :"align",
"align" :"align",
"d" :"dock",
"dock" :"dock"
}
align_values_reversed = {
"TopLeft":"tl,lt,topleft,lefttop",
"Top":"t,top",
"TopRight":"tr,rt,topright,righttop",
"Right":"r,right",
"BottomRight":"br,rb,bottomright,rightbottom",
"Bottom":"b,bottom",
"BottomLeft":"bl,lb,bottomleft,leftbottom",
"Left":"l,left",
"Center":"c,center",
}
def GenerateAlignValue():
global align_values_reversed
d = {}
for k in align_values_reversed:
for v in align_values_reversed[k].split(","):
d[v] = k
return d;
def ComputeHash(s):
s = s.upper()
h = 0
index = 0
for k in s:
ch_id = (ord(k)-ord('A'))+1
#h = h * 2 + ch_id
h = h + ch_id + index
index+=2
#h += ch_id
return h
def ComputeHashes(d_list):
d = {}
for k in d_list:
h = ComputeHash(k)
if not h in d:
d[h] = d_list[k]
if d[h]!=d_list[k]:
print("Colission: key:'"+k+"' mapped to '"+d_list[k]+"' has the same hash as keys mapped to '"+d[h]+"' !")
return None
return d
def CreateKeys():
res = ComputeHashes(keys)
if not res: return
d = {}
for k in keys:
d[keys[k]] = 1
s = "constexpr unsigned char LAYOUT_KEY_NONE = 0;\n"
v = 1;
idx = 1
for k in d:
s += "constexpr unsigned short LAYOUT_KEY_"+k.upper()+" = %d;\n"%(idx);
s += "constexpr unsigned short LAYOUT_FLAG_"+k.upper()+" = 0x%04X;\n"%(v);
v *= 2
idx+=1
s += "\n"
s += "constexpr unsigned char _layout_translate_map_["+str(max(res)+1)+"] = {"
for h in range(0,max(res)+1):
if h in res:
s += "LAYOUT_KEY_"+res[h].upper()+","
else:
s += "LAYOUT_KEY_NONE,"
s = s[:-1] + "};\n"
s += "\n";
s += "inline unsigned char HashToLayoutKey(unsigned int hash) {\n";
s += " if (hash>="+str(max(res)+1)+") return LAYOUT_KEY_NONE;\n";
s += " return _layout_translate_map_[hash];\n"
s += "};\n"
return s
def CreateAlignValues():
av = GenerateAlignValue()
res = ComputeHashes(av)
if not res: return
s = ""
#s += "/* HASH VALUES FOR ALIGN:\n"
#for h in res:
# s += " %s => %d\n"%(res[h],h)
#s += "*/\n"
s += "constexpr unsigned char _align_translate_map_["+str(max(res)+1)+"] = {"
for h in range(0,max(res)+1):
if h in res:
s += "(unsigned char)Alignament::"+res[h]+","
else:
s += "0xFF,"
s = s[:-1] + "};\n"
s += "\n";
s += "inline bool HashToAlignament(unsigned int hash, Alignament & align) {\n";
s += " if (hash>="+str(max(res)+1)+") return false;\n";
s += " auto ch = _align_translate_map_[hash];\n";
s += " if (ch == 0xFF) return false;\n";
s += " align = static_cast<Alignament>(ch);\n";
s += " return true;\n"
s += "};\n"
return s
s = "\n//========================================="
s += "\n// THIS CODE WAS AUTOMATICALLY GENERATED !"
s += "\n//========================================="
s += "\n"
s += "\n"+CreateKeys()
s += "\n"
s += "\n"+CreateAlignValues()
s += "\n"
s += "\n//========================================="
s += "\n// END OF AUTOMATICALLY GENERATED CODE"
s += "\n//========================================="
s += "\n"
print(s)
|
# this contains some predefined pair outputs .
def fixedpair(inp):
if inp == "who?":
pair = "I am TS3000."
elif inp == "who ?" :
pair = "I m bitch"
return pair
|
class PlanCircuit(APIObject, IDisposable):
""" An object that represents an enclosed area in a plan view within the Autodesk Revit project. """
def Dispose(self):
""" Dispose(self: APIObject,A_0: bool) """
pass
def GetPointInside(self):
"""
GetPointInside(self: PlanCircuit) -> UV
Returns a point inside the circuit.
"""
pass
def ReleaseManagedResources(self, *args):
""" ReleaseManagedResources(self: APIObject) """
pass
def ReleaseUnmanagedResources(self, *args):
""" ReleaseUnmanagedResources(self: APIObject) """
pass
def __enter__(self, *args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
Area = property(lambda self: object(), lambda self, v: None, lambda self: None)
"""The enclosed area of the circuit.
Get: Area(self: PlanCircuit) -> float
"""
IsRoomLocated = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Reports whether there is a room located in this circuit.
Get: IsRoomLocated(self: PlanCircuit) -> bool
"""
SideNum = property(lambda self: object(), lambda self, v: None, lambda self: None)
"""The number of sides in the circuit.
Get: SideNum(self: PlanCircuit) -> int
"""
|
'''
05 - Putting a list of dates in order
Much like numbers and strings, date objects in Python can be put
in order. Earlier dates come before later ones, and so we can sort
a list of date objects from earliest to latest.
What if our Florida hurricane dates had been scrambled? We've gone
ahead and shuffled them so they're in random order and saved the
results as dates_scrambled. Your job is to put them back in chronological
order, and then print the first and last dates from this sorted list.
Instructions:
- Print the first and last dates in dates_scrambled.
- Sort dates_scrambled using Python's built-in sorted() method, and save the
results to dates_ordered.
- Print the first and last dates in dates_ordered.
'''
# Print the first and last scrambled dates
print(dates_scrambled[0])
print(dates_scrambled[-1])
# Put the dates in order
dates_ordered = sorted(dates_scrambled)
# Print the first and last ordered dates
print(dates_ordered[0])
print(dates_ordered[-1])
|
_base_ = ['./rretinanet_obb_r50_fpn_1x_dota_v3.py']
# switch data path in '../_base_/datasets/dota1_0.py'
angle_version = 'v3'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='RResize', img_scale=(1024, 1024)),
dict(
type='RRandomFlip',
flip_ratio=[0.25, 0.25, 0.25],
direction=['horizontal', 'vertical', 'diagonal'],
version=angle_version),
dict(
type='PolyRandomRotate',
rotate_ratio=0.5,
angles_range=180,
auto_bound=False,
version=angle_version),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels'])
]
data = dict(train=dict(pipeline=train_pipeline))
|
# O(n) time | O(n) space
# Iterative solution 2
def spiralTraverse(array):
result = []
startRow, endRow = 0, len(array) - 1
startCol, endCol = 0, len(array[0]) - 1
while startRow <= endRow and startCol <= endCol:
for col in range(startCol, endCol + 1):
result.append(array[startRow][col])
for row in range(startRow + 1, endRow + 1):
result.append(array[row][endCol])
if startRow == endRow or startCol == endCol:
break
for col in reversed(range(startCol, endCol)):
result.append(array[endRow][col])
for row in reversed(range(startRow + 1, endRow)):
result.append(array[row][startCol])
startRow += 1
startCol += 1
endRow -= 1
endCol -= 1
return result
# O(n) time | O(n) space
# Recursive solution
def spiralTraverse(array):
result = []
spiralTraverseHelper(array, 0, len(array) - 1, 0,
len(array[0]) - 1, result)
return result
def spiralTraverseHelper(array, startRow, endRow, startCol, endCol, result):
if startRow > endRow or startCol > endCol:
return
for col in range(startCol, endCol + 1):
result.append(array[startRow][col])
for row in range(startRow + 1, endRow + 1):
result.append(array[row][endCol])
if startRow == endRow or startCol == endCol:
return
for col in reversed(range(startCol, endCol)):
result.append(array[endRow][col])
for row in reversed(range(startRow + 1, endRow)):
result.append(array[row][startCol])
spiralTraverseHelper(array, startRow + 1, endRow - 1,
startCol + 1, endCol - 1, result)
# O(n) time | O(n) space
# Iterative solution 1
def spiralTraverse(array):
result = []
minRow, maxRow = 0, len(array) - 1
min_col, max_col = 0, len(array[0]) - 1
row, col = 0, 0
while minRow <= maxRow and min_col <= max_col:
while col <= max_col:
result.append(array[row][col])
col += 1
minRow += 1
row = minRow
col = max_col
while row <= maxRow:
result.append(array[row][col])
row += 1
max_col -= 1
col = max_col
row = maxRow
if not (minRow <= maxRow and min_col <= max_col):
break
while col >= min_col:
result.append(array[row][col])
col -= 1
maxRow -= 1
row = maxRow
col = min_col
while row >= minRow:
result.append(array[row][col])
row -= 1
min_col += 1
col = min_col
row = minRow
return result
|
class MinStack:
def __init__(self):
"""
initialize your data structure here.
"""
self.stack = []
self.helper = []
def push(self, x: int) -> None:
self.stack.append(x)
if not self.helper or x <= self.helper[-1]:
self.helper.append(x)
def pop(self) -> None:
top = self.stack.pop()
if self.helper and top == self.helper[-1]:
self.helper.pop()
return top
def top(self) -> int:
return self.stack[-1]
def getMin(self) -> int:
return self.helper[-1]
|
"""Return empty resources block."""
def GenerateConfig(_):
return """resources:"""
|
for i in range(5):
ans = 0
coins = []
for i in range(int(input())):
coins.append(int(input()))
avg = sum(coins) // len(coins)
for i in coins:
ans += abs(avg - i)
print(ans // 2)
|
"""
Given an integer, write an algorithm to convert it to hexadecimal. For negative integer, two's complement method is used.
Note:
All letters in hexadecimal (a-f) must be in lowercase.
The hexadecimal string must not contain extra leading 0s.
If the number is zero, it is represented by a single zero character '0'; otherwise,
the first character in the hexadecimal string will not be the zero character.
The given number is guaranteed to fit within the range of a 32-bit signed integer.
You must not use any method provided by the library which converts/formats the number to hex directly.
Example 1:
Input:
26
Output:
"1a"
Example 2:
Input:
-1
Output:
"ffffffff"
"""
class Solution(object):
def toHex(self, num):
"""
:type num: int
:rtype: str
"""
if 0 == num:
return "0"
# both OK
mapping = dict(zip(range(0, 16), "0123456789abcdef"))
mapping = "0123456789abcdef"
if num < 0:
num += 2 ** 32
remains = []
while num:
remains.append(mapping[num % 16])
num /= 16
return "".join(remains[::-1])
|
app_name = 'app004'
urlpatterns = [
]
|
#
# PySNMP MIB module ZHONE-DISMAN-TRACEROUTE-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ZHONE-DISMAN-TRACEROUTE-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:41:14 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ConstraintsUnion")
InterfaceIndexOrZero, = mibBuilder.importSymbols("IF-MIB", "InterfaceIndexOrZero")
InetAddress, InetAddressType = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, IpAddress, mib_2, TimeTicks, Unsigned32, ModuleIdentity, ObjectIdentity, Counter32, NotificationType, Integer32, iso, Bits, Counter64, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "IpAddress", "mib-2", "TimeTicks", "Unsigned32", "ModuleIdentity", "ObjectIdentity", "Counter32", "NotificationType", "Integer32", "iso", "Bits", "Counter64", "MibIdentifier")
RowStatus, StorageType, DisplayString, TextualConvention, TruthValue, DateAndTime = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "StorageType", "DisplayString", "TextualConvention", "TruthValue", "DateAndTime")
OperationResponseStatus, = mibBuilder.importSymbols("ZHONE-DISMAN-PING-MIB", "OperationResponseStatus")
zhoneIp, = mibBuilder.importSymbols("Zhone", "zhoneIp")
zhoneTraceRouteMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20))
zhoneTraceRouteMIB.setRevisions(('2000-09-21 00:00',))
if mibBuilder.loadTexts: zhoneTraceRouteMIB.setLastUpdated('200009210000Z')
if mibBuilder.loadTexts: zhoneTraceRouteMIB.setOrganization('IETF Distributed Management Working Group')
zhoneTraceRouteNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 0))
zhoneTraceRouteObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1))
zhoneTraceRouteConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 2))
zhoneTraceRouteImplementationTypeDomains = MibIdentifier((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 3))
zhoneTraceRouteUsingUdpProbes = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 3, 1))
if mibBuilder.loadTexts: zhoneTraceRouteUsingUdpProbes.setStatus('current')
zhoneTraceRouteMaxConcurrentRequests = MibScalar((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 1), Unsigned32().clone(10)).setUnits('requests').setMaxAccess("readwrite")
if mibBuilder.loadTexts: zhoneTraceRouteMaxConcurrentRequests.setStatus('current')
zhoneTraceRouteCtlIndexNext = MibScalar((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteCtlIndexNext.setStatus('current')
zhoneTraceRouteCtlTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3), )
if mibBuilder.loadTexts: zhoneTraceRouteCtlTable.setStatus('current')
zhoneTraceRouteCtlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1), ).setIndexNames((0, "ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlIndex"))
if mibBuilder.loadTexts: zhoneTraceRouteCtlEntry.setStatus('current')
zhoneTraceRouteCtlIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteCtlIndex.setStatus('current')
zhoneTraceRouteCtlTargetAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 2), InetAddressType().clone('ipv4')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlTargetAddressType.setStatus('current')
zhoneTraceRouteCtlTargetAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 3), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlTargetAddress.setStatus('current')
zhoneTraceRouteCtlByPassRouteTable = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 4), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlByPassRouteTable.setStatus('current')
zhoneTraceRouteCtlDataSize = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65507))).setUnits('octets').setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlDataSize.setStatus('current')
zhoneTraceRouteCtlTimeOut = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 6), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 60)).clone(3)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlTimeOut.setStatus('current')
zhoneTraceRouteCtlProbesPerHop = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 10)).clone(3)).setUnits('probes').setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlProbesPerHop.setStatus('current')
zhoneTraceRouteCtlPort = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 8), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(33434)).setUnits('UDP Port').setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlPort.setStatus('current')
zhoneTraceRouteCtlMaxTtl = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 9), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(30)).setUnits('time-to-live value').setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlMaxTtl.setStatus('current')
zhoneTraceRouteCtlDSField = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 10), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlDSField.setStatus('current')
zhoneTraceRouteCtlSourceAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 11), InetAddressType().clone('unknown')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlSourceAddressType.setStatus('current')
zhoneTraceRouteCtlSourceAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 12), InetAddress().clone(hexValue="0")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlSourceAddress.setStatus('current')
zhoneTraceRouteCtlIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 13), InterfaceIndexOrZero()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlIfIndex.setStatus('current')
zhoneTraceRouteCtlMiscOptions = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 14), SnmpAdminString().clone(hexValue="0")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlMiscOptions.setStatus('current')
zhoneTraceRouteCtlMaxFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 15), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(5)).setUnits('timeouts').setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlMaxFailures.setStatus('current')
zhoneTraceRouteCtlDontFragment = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 16), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlDontFragment.setStatus('current')
zhoneTraceRouteCtlInitialTtl = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 17), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlInitialTtl.setStatus('current')
zhoneTraceRouteCtlFrequency = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 18), Unsigned32()).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlFrequency.setStatus('current')
zhoneTraceRouteCtlStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 19), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlStorageType.setStatus('current')
zhoneTraceRouteCtlAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlAdminStatus.setStatus('current')
zhoneTraceRouteCtlDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 21), SnmpAdminString().clone(hexValue="0")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlDescr.setStatus('current')
zhoneTraceRouteCtlMaxRows = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 22), Unsigned32().clone(50)).setUnits('rows').setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlMaxRows.setStatus('current')
zhoneTraceRouteCtlTrapGeneration = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 23), Bits().clone(namedValues=NamedValues(("pathChange", 0), ("testFailure", 1), ("testCompletion", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlTrapGeneration.setStatus('current')
zhoneTraceRouteCtlCreateHopsEntries = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 24), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlCreateHopsEntries.setStatus('current')
zhoneTraceRouteCtlType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 25), ObjectIdentifier().clone((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 3, 1))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlType.setStatus('current')
zhoneTraceRouteCtlRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 3, 1, 26), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: zhoneTraceRouteCtlRowStatus.setStatus('current')
zhoneTraceRouteResultsTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 4), )
if mibBuilder.loadTexts: zhoneTraceRouteResultsTable.setStatus('current')
zhoneTraceRouteResultsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 4, 1), ).setIndexNames((0, "ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlIndex"))
if mibBuilder.loadTexts: zhoneTraceRouteResultsEntry.setStatus('current')
zhoneTraceRouteResultsOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 4, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteResultsOperStatus.setStatus('current')
zhoneTraceRouteResultsCurHopCount = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 4, 1, 2), Gauge32()).setUnits('hops').setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteResultsCurHopCount.setStatus('current')
zhoneTraceRouteResultsCurProbeCount = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 4, 1, 3), Gauge32()).setUnits('probes').setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteResultsCurProbeCount.setStatus('current')
zhoneTraceRouteResultsIpTgtAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 4, 1, 4), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteResultsIpTgtAddrType.setStatus('current')
zhoneTraceRouteResultsIpTgtAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 4, 1, 5), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteResultsIpTgtAddr.setStatus('current')
zhoneTraceRouteResultsTestAttempts = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 4, 1, 6), Unsigned32()).setUnits('tests').setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteResultsTestAttempts.setStatus('current')
zhoneTraceRouteResultsTestSuccesses = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 4, 1, 7), Unsigned32()).setUnits('tests').setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteResultsTestSuccesses.setStatus('current')
zhoneTraceRouteResultsLastGoodPath = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 4, 1, 8), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteResultsLastGoodPath.setStatus('current')
zhoneTraceRouteHopsTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 5), )
if mibBuilder.loadTexts: zhoneTraceRouteHopsTable.setStatus('current')
zhoneTraceRouteHopsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 5, 1), ).setIndexNames((0, "ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlIndex"), (0, "ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteHopsHopIndex"))
if mibBuilder.loadTexts: zhoneTraceRouteHopsEntry.setStatus('current')
zhoneTraceRouteHopsHopIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 5, 1, 1), Unsigned32())
if mibBuilder.loadTexts: zhoneTraceRouteHopsHopIndex.setStatus('current')
zhoneTraceRouteHopsIpTgtAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 5, 1, 2), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteHopsIpTgtAddressType.setStatus('current')
zhoneTraceRouteHopsIpTgtAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 5, 1, 3), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteHopsIpTgtAddress.setStatus('current')
zhoneTraceRouteHopsMinRtt = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 5, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteHopsMinRtt.setStatus('current')
zhoneTraceRouteHopsMaxRtt = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 5, 1, 5), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteHopsMaxRtt.setStatus('current')
zhoneTraceRouteHopsAverageRtt = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 5, 1, 6), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteHopsAverageRtt.setStatus('current')
zhoneTraceRouteHopsRttSumOfSquares = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 5, 1, 7), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteHopsRttSumOfSquares.setStatus('current')
zhoneTraceRouteHopsSentProbes = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 5, 1, 8), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteHopsSentProbes.setStatus('current')
zhoneTraceRouteHopsProbeResponses = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 5, 1, 9), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteHopsProbeResponses.setStatus('current')
zhoneTraceRouteHopsLastGoodProbe = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 1, 5, 1, 10), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneTraceRouteHopsLastGoodProbe.setStatus('current')
zhoneTraceRoutePathChange = NotificationType((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 0, 1)).setObjects(("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteResultsIpTgtAddr"))
if mibBuilder.loadTexts: zhoneTraceRoutePathChange.setStatus('current')
zhoneTraceRouteTestFailed = NotificationType((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 0, 2)).setObjects(("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteResultsIpTgtAddr"))
if mibBuilder.loadTexts: zhoneTraceRouteTestFailed.setStatus('current')
zhoneTraceRouteTestCompleted = NotificationType((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 0, 3)).setObjects(("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteResultsIpTgtAddr"))
if mibBuilder.loadTexts: zhoneTraceRouteTestCompleted.setStatus('current')
zhoneTraceRouteGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 2, 1))
zhoneTraceRouteGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 2, 1, 1)).setObjects(("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteMaxConcurrentRequests"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlTargetAddressType"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlTargetAddress"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlByPassRouteTable"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlDataSize"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlTimeOut"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlProbesPerHop"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlPort"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlMaxTtl"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlDSField"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlSourceAddressType"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlSourceAddress"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlIfIndex"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlMiscOptions"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlMaxFailures"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlDontFragment"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlInitialTtl"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlFrequency"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlStorageType"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlAdminStatus"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlMaxRows"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlTrapGeneration"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlDescr"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlCreateHopsEntries"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlType"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteCtlRowStatus"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteResultsOperStatus"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteResultsCurHopCount"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteResultsCurProbeCount"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteResultsIpTgtAddrType"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteResultsIpTgtAddr"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteResultsTestAttempts"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteResultsTestSuccesses"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
zhoneTraceRouteGroup = zhoneTraceRouteGroup.setStatus('current')
zhoneTraceRouteTimeStampGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 2, 1, 2)).setObjects(("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteResultsLastGoodPath"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
zhoneTraceRouteTimeStampGroup = zhoneTraceRouteTimeStampGroup.setStatus('current')
zhoneTraceRouteNotificationsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 2, 1, 3)).setObjects(("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRoutePathChange"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteTestFailed"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteTestCompleted"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
zhoneTraceRouteNotificationsGroup = zhoneTraceRouteNotificationsGroup.setStatus('current')
zhoneTraceRouteHopsTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 5504, 4, 1, 20, 2, 1, 4)).setObjects(("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteHopsIpTgtAddressType"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteHopsIpTgtAddress"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteHopsMinRtt"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteHopsMaxRtt"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteHopsAverageRtt"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteHopsRttSumOfSquares"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteHopsSentProbes"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteHopsProbeResponses"), ("ZHONE-DISMAN-TRACEROUTE-MIB", "zhoneTraceRouteHopsLastGoodProbe"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
zhoneTraceRouteHopsTableGroup = zhoneTraceRouteHopsTableGroup.setStatus('current')
mibBuilder.exportSymbols("ZHONE-DISMAN-TRACEROUTE-MIB", zhoneTraceRouteMaxConcurrentRequests=zhoneTraceRouteMaxConcurrentRequests, zhoneTraceRouteResultsLastGoodPath=zhoneTraceRouteResultsLastGoodPath, zhoneTraceRouteResultsIpTgtAddrType=zhoneTraceRouteResultsIpTgtAddrType, zhoneTraceRouteHopsTable=zhoneTraceRouteHopsTable, zhoneTraceRouteCtlDataSize=zhoneTraceRouteCtlDataSize, zhoneTraceRouteCtlIfIndex=zhoneTraceRouteCtlIfIndex, zhoneTraceRouteCtlDescr=zhoneTraceRouteCtlDescr, zhoneTraceRouteCtlSourceAddress=zhoneTraceRouteCtlSourceAddress, zhoneTraceRouteHopsHopIndex=zhoneTraceRouteHopsHopIndex, zhoneTraceRouteHopsEntry=zhoneTraceRouteHopsEntry, zhoneTraceRouteResultsIpTgtAddr=zhoneTraceRouteResultsIpTgtAddr, zhoneTraceRouteHopsProbeResponses=zhoneTraceRouteHopsProbeResponses, zhoneTraceRouteNotificationsGroup=zhoneTraceRouteNotificationsGroup, zhoneTraceRouteCtlDSField=zhoneTraceRouteCtlDSField, zhoneTraceRouteHopsTableGroup=zhoneTraceRouteHopsTableGroup, zhoneTraceRouteImplementationTypeDomains=zhoneTraceRouteImplementationTypeDomains, zhoneTraceRouteCtlType=zhoneTraceRouteCtlType, zhoneTraceRouteHopsSentProbes=zhoneTraceRouteHopsSentProbes, zhoneTraceRouteCtlSourceAddressType=zhoneTraceRouteCtlSourceAddressType, zhoneTraceRouteResultsEntry=zhoneTraceRouteResultsEntry, zhoneTraceRouteCtlTrapGeneration=zhoneTraceRouteCtlTrapGeneration, zhoneTraceRouteCtlTimeOut=zhoneTraceRouteCtlTimeOut, zhoneTraceRouteCtlTable=zhoneTraceRouteCtlTable, zhoneTraceRouteResultsCurHopCount=zhoneTraceRouteResultsCurHopCount, zhoneTraceRouteTimeStampGroup=zhoneTraceRouteTimeStampGroup, zhoneTraceRouteHopsLastGoodProbe=zhoneTraceRouteHopsLastGoodProbe, zhoneTraceRouteCtlFrequency=zhoneTraceRouteCtlFrequency, PYSNMP_MODULE_ID=zhoneTraceRouteMIB, zhoneTraceRouteHopsMinRtt=zhoneTraceRouteHopsMinRtt, zhoneTraceRouteHopsIpTgtAddressType=zhoneTraceRouteHopsIpTgtAddressType, zhoneTraceRouteHopsMaxRtt=zhoneTraceRouteHopsMaxRtt, zhoneTraceRouteObjects=zhoneTraceRouteObjects, zhoneTraceRouteCtlMaxFailures=zhoneTraceRouteCtlMaxFailures, zhoneTraceRouteCtlAdminStatus=zhoneTraceRouteCtlAdminStatus, zhoneTraceRouteCtlByPassRouteTable=zhoneTraceRouteCtlByPassRouteTable, zhoneTraceRouteCtlInitialTtl=zhoneTraceRouteCtlInitialTtl, zhoneTraceRouteCtlTargetAddressType=zhoneTraceRouteCtlTargetAddressType, zhoneTraceRouteCtlMaxTtl=zhoneTraceRouteCtlMaxTtl, zhoneTraceRouteCtlMiscOptions=zhoneTraceRouteCtlMiscOptions, zhoneTraceRouteResultsOperStatus=zhoneTraceRouteResultsOperStatus, zhoneTraceRouteResultsTestSuccesses=zhoneTraceRouteResultsTestSuccesses, zhoneTraceRouteHopsAverageRtt=zhoneTraceRouteHopsAverageRtt, zhoneTraceRouteConformance=zhoneTraceRouteConformance, zhoneTraceRouteCtlProbesPerHop=zhoneTraceRouteCtlProbesPerHop, zhoneTraceRouteTestFailed=zhoneTraceRouteTestFailed, zhoneTraceRouteMIB=zhoneTraceRouteMIB, zhoneTraceRouteGroups=zhoneTraceRouteGroups, zhoneTraceRouteTestCompleted=zhoneTraceRouteTestCompleted, zhoneTraceRouteResultsTestAttempts=zhoneTraceRouteResultsTestAttempts, zhoneTraceRouteCtlTargetAddress=zhoneTraceRouteCtlTargetAddress, zhoneTraceRouteCtlStorageType=zhoneTraceRouteCtlStorageType, zhoneTraceRouteUsingUdpProbes=zhoneTraceRouteUsingUdpProbes, zhoneTraceRouteResultsCurProbeCount=zhoneTraceRouteResultsCurProbeCount, zhoneTraceRouteCtlEntry=zhoneTraceRouteCtlEntry, zhoneTraceRouteCtlIndex=zhoneTraceRouteCtlIndex, zhoneTraceRouteGroup=zhoneTraceRouteGroup, zhoneTraceRoutePathChange=zhoneTraceRoutePathChange, zhoneTraceRouteNotifications=zhoneTraceRouteNotifications, zhoneTraceRouteCtlPort=zhoneTraceRouteCtlPort, zhoneTraceRouteCtlMaxRows=zhoneTraceRouteCtlMaxRows, zhoneTraceRouteHopsRttSumOfSquares=zhoneTraceRouteHopsRttSumOfSquares, zhoneTraceRouteCtlIndexNext=zhoneTraceRouteCtlIndexNext, zhoneTraceRouteCtlDontFragment=zhoneTraceRouteCtlDontFragment, zhoneTraceRouteCtlRowStatus=zhoneTraceRouteCtlRowStatus, zhoneTraceRouteHopsIpTgtAddress=zhoneTraceRouteHopsIpTgtAddress, zhoneTraceRouteResultsTable=zhoneTraceRouteResultsTable, zhoneTraceRouteCtlCreateHopsEntries=zhoneTraceRouteCtlCreateHopsEntries)
|
# Given a list of intervals,
# merge all the overlapping intervals to produce a list that has only mutually exclusive intervals.
# Example:
# Intervals: [[1,4], [2,5], [7,9]]
# Output: [[1,5], [7,9]]
# Explanation: Since the first two intervals [1,4] and [2,5] overlap, we merged them into one [1,5].
# O(N) for merge O(NlogN) for sorting -> O(NlogN)
# space:O(N)
class Interval:
def __init__(self, start, end) -> None:
self.start = start
self.end = end
def print_interval(self):
print("[" + str(self.start) + "," + str(self.end) + "]", end='')
def merge_intervals(arr):
intervals = []
for i in arr:
intervals.append(Interval(i[0], i[1]))
if len(intervals) < 2:
return intervals
intervals.sort(key = lambda x: x.start)
merged_intervals = []
start = intervals[0].start
end = intervals[0].end
for i in range(1, len(intervals)):
interval = intervals[i]
if interval.start <= end:
end = max(interval.end, end)
else:
merged_intervals.append(Interval(start, end))
start = interval.start
end = interval.end
merged_intervals.append(Interval(start, end))
return merged_intervals
for i in merge_intervals([[1,4], [2,5], [7,9]]):
i.print_interval()
print()
for i in merge_intervals([[6,7], [2,4], [5,9]]):
i.print_interval()
print()
for i in merge_intervals([[1,4], [2,6], [3,5]]):
i.print_interval()
print()
|
class AlignmentType:
@property
def _alignment_type(self):
return self.record_type
|
"""
Write a function called sed that takes as arguments a pattern string, a replacement string,
and two filenames; it should read the first file and write the contents into the second file
(creating it if necessary). If the pattern string appears anywhere in the file, it should be
replaced with the replacement string.
If an error occurs while opening, reading, writing or closing files, your program should
catch the exception, print an error message, and exit.
"""
def sed(pattern: str, replacement: str, source: str, dest: str) -> None:
try:
src_file = open(source, 'r')
dest_file = open(dest, 'w')
replaced = src_file.read().replace(pattern, replacement)
dest_file.write(replaced)
src_file.close()
dest_file.close()
except:
print("An error occured while reading or writing the file.")
if __name__ == '__main__':
sed('zymology', 'vitor', 'think-python-2e-exercises/words.txt', 'think-python-2e-exercises/new_text.txt')
|
def reverse_number(n):
r = 0
while n > 0:
r *= 10
r += n % 10
n //= 10
return r
def isPrime(n):
flag = False
if(n > 1):
for i in range(2, n):
if(n % i == 0):
flag = True
break
return flag
n = int(input())
flag = False
if(isPrime(n)):
print(n,"is not an Emirp number")
else:
flag = True
if(flag == True):
x = reverse_number(n)
if(isPrime(x)):
print(n,"is not an Emirp number")
else:
print(n,"is an Emirp number")
|
#
# PySNMP MIB module HPN-ICF-L4RDT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HPN-ICF-L4RDT-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:39:34 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsIntersection")
hpnicfCommon, = mibBuilder.importSymbols("HPN-ICF-OID-MIB", "hpnicfCommon")
NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance")
Counter64, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, NotificationType, Gauge32, MibIdentifier, Unsigned32, IpAddress, Integer32, iso, ModuleIdentity, Counter32, TimeTicks, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "NotificationType", "Gauge32", "MibIdentifier", "Unsigned32", "IpAddress", "Integer32", "iso", "ModuleIdentity", "Counter32", "TimeTicks", "ObjectIdentity")
MacAddress, RowStatus, DisplayString, TextualConvention, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "MacAddress", "RowStatus", "DisplayString", "TextualConvention", "TruthValue")
hpnicfL4Redirect = ModuleIdentity((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10))
if mibBuilder.loadTexts: hpnicfL4Redirect.setLastUpdated('200409210000Z')
if mibBuilder.loadTexts: hpnicfL4Redirect.setOrganization('')
if mibBuilder.loadTexts: hpnicfL4Redirect.setContactInfo('')
if mibBuilder.loadTexts: hpnicfL4Redirect.setDescription('See description above')
hpnicfL4RedirectCacheTable = MibTable((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 1), )
if mibBuilder.loadTexts: hpnicfL4RedirectCacheTable.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectCacheTable.setDescription('This table contains an entry for each Web Cache device that this unit is aware of.')
hpnicfL4RedirectCacheEntry = MibTableRow((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 1, 1), ).setIndexNames((0, "HPN-ICF-L4RDT-MIB", "hpnicfL4RedirectCacheIpAddress"))
if mibBuilder.loadTexts: hpnicfL4RedirectCacheEntry.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectCacheEntry.setDescription('Each row specifies a known Web Cache device.')
hpnicfL4RedirectCacheIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 1, 1, 1), IpAddress())
if mibBuilder.loadTexts: hpnicfL4RedirectCacheIpAddress.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectCacheIpAddress.setDescription('This object specifies the IP address of the Web Cache device.')
hpnicfL4RedirectCacheRedirectionStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("disabledNotRedirecting", 1), ("enabledNoHealthChecker", 2), ("enabledHealthChecking", 3), ("enabledHealthCheckOKNotRedirecting", 4), ("enabledHealthCheckFailed", 5), ("enabledRedirecting", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hpnicfL4RedirectCacheRedirectionStatus.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectCacheRedirectionStatus.setDescription('This object returns the current state of traffic redirection to the cache. If redirection is disabled, this object shall return disabledNotRedirecting(1). If a unit cannot be selected to perform the cache health check, this object shall return enabledNoHealthChecker(2). If the software is determining if the cache is able to do redirection(this will happen when the redirection state transitions from disabled to enabled), this object shall return enabledHealthChecking(3). If the cache health check succeeded but the hardware is unable to support redirection to the cache port, this object shall return enabledHealthCheckOKNotRedirecting(4). If the latest health check of the cache has failed, this object shall return enabledHealthCheckFailed(5). If the cache is in use and traffic is being redirected to it, this object shall return enabledRedirecting(6). The default value is disabledNotRedirecting(1).')
hpnicfL4RedirectCachePort = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 1, 1, 3), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hpnicfL4RedirectCachePort.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectCachePort.setDescription('This object stores the ifIndex that identifies the port or link aggregation which provides the connection that leads to the cache. If only manual cache configuration is supported, this value must be supplied. The method of cache configuration can be ascertained by the presence or absence of the L4 manual cache configuration id within the 3com-352 MIB. The default value is 0.')
hpnicfL4RedirectCacheRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 1, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hpnicfL4RedirectCacheRowStatus.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectCacheRowStatus.setDescription('This object is used to create and remove Web Cache entries. The following are the valid values that may be written to RowStatus: Writing createAndGo(4) to the RowStatus of a non-existent row shall create a row with default values and shall set the row to active(1). If the row already exists, it shall be an error. Writing createAndWait(5) to the RowStatus of a non-existent row shall create a row with default values and shall set the row to notInService(2). If the row already exists, it shall be an error. Writing active(1) to the RowStatus of an existing row shall change the value of that row to active(1). Writing active(1) to the RowStatus of an existing row that is already active(1) shall not cause an error, the row shall remain active(1). If the row does not exist, it shall be an error. Writing notInService(2) to the RowStatus of an existing row shall change the value of that row to notInService(2). Writing notInService(2) to the RowStatus of an existing row that is already notInService(2) shall not cause an error, the row shall remain notInService(2). If the row does not exist, it shall be an error. Writing destroy(6) to the RowStatus of a non-existent row shall be an error. If the row exists, it shall be removed. Writing notReady(3) to the RowStatus of a non-existent row or to an existent row shall be an error. If the user does not supply values for the necessary objects, default values will be supplied. Attempts to create more entries than the hardware can support shall be rejected.')
hpnicfL4RedirectCacheMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 1, 1, 5), MacAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hpnicfL4RedirectCacheMacAddress.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectCacheMacAddress.setDescription('This object defines the MAC address of the attached Web cache device. If only manual configuration of the cache is supported, this value must be supplied. The method of cache configuration can be ascertained by the presence or absence of the L4 manual cache configuration id within the 3com-352 MIB. The default value is 0.')
hpnicfL4RedirectCacheVlan = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 1, 1, 6), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hpnicfL4RedirectCacheVlan.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectCacheVlan.setDescription('This object specifies the VLAN which the cache port belongs to.')
hpnicfL4RedirectCacheTcpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 1, 1, 7), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hpnicfL4RedirectCacheTcpPort.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectCacheTcpPort.setDescription('This object specifies the TCP port number that is being redirected ')
hpnicfL4RedirectIpExclusionTable = MibTable((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 2), )
if mibBuilder.loadTexts: hpnicfL4RedirectIpExclusionTable.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectIpExclusionTable.setDescription('This table lists the IP addresses and subnetworks that Web Cache redirection is not supported for. Some devices may not support addition to this table.')
hpnicfL4RedirectIpExclusionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 2, 1), ).setIndexNames((0, "HPN-ICF-L4RDT-MIB", "hpnicfL4RedirectIpExclusionIpAddress"))
if mibBuilder.loadTexts: hpnicfL4RedirectIpExclusionEntry.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectIpExclusionEntry.setDescription('Each row contains an IP address or a IP subnetwork that is being excluded from the redirection.')
hpnicfL4RedirectIpExclusionIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 2, 1, 1), IpAddress())
if mibBuilder.loadTexts: hpnicfL4RedirectIpExclusionIpAddress.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectIpExclusionIpAddress.setDescription('This object specifies the IP address or the subnetwork address that is to be excluded.')
hpnicfL4RedirectIpExclusionMaskLen = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hpnicfL4RedirectIpExclusionMaskLen.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectIpExclusionMaskLen.setDescription('This object provides the number of bits in the subnetwork mask. This mask shall be applied to the excludeIP address to determine the subnetwork that is to be excluded. A value of 32 implies that the excludeIP address refers to an individual host. The default value is 32.')
hpnicfL4RedirectIpExclusionRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 2, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hpnicfL4RedirectIpExclusionRowStatus.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectIpExclusionRowStatus.setDescription('This object is used to add rows to the Exclusion Table. The following are the valid values that may be written to RowStatus: Writing createAndGo(4) to the RowStatus of a non-existent row shall create a new row. The new row shall be active(1). If the row exists, it shall be an error. Writing createAndWait(5) to the RowStatus of a non-existent row or to an existent row shall be an error. Writing active(1) to the RowStatus of an existing row shall change the value of that row to active(1). Writing active(1) to the RowStatus of an existing row that is already active(1) shall not cause an error, the row shall remain active(1). If the row does not exist, it shall be an error. Writing notInService(2) to the RowStatus of an existing row shall change the value of that row to notInService(2). Writing notInService(2) to the RowStatus of an existing row that is already notInService(2) shall not cause an error, the row shall remain notInService(2). If the row does not exist, it shall be an error. Writing destroy(6) to the RowStatus of a non-existent row shall be an error. If the row exists, it shall be removed. Writing notReady(3) to the RowStatus of a non-existent row or to an existent row shall be an error. Attempts to create more entries than the hardware can support shall be rejected.')
hpnicfL4RedirectVlanTable = MibTable((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 3), )
if mibBuilder.loadTexts: hpnicfL4RedirectVlanTable.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectVlanTable.setDescription('This table contains a row for each VLAN of the packet which need to be redirected to the Web cache.')
hpnicfL4RedirectVlanEntry = MibTableRow((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 3, 1), ).setIndexNames((0, "HPN-ICF-L4RDT-MIB", "hpnicfL4RedirectVlanID"))
if mibBuilder.loadTexts: hpnicfL4RedirectVlanEntry.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectVlanEntry.setDescription('Each row specifies a VLAN of the packet which need to be redirected to the Web cache.')
hpnicfL4RedirectVlanID = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 3, 1, 1), Integer32())
if mibBuilder.loadTexts: hpnicfL4RedirectVlanID.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectVlanID.setDescription('This object specifies the VLAN ID of the packet which need to be redirected to the Web cache.')
hpnicfL4RedirectVlanRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 3, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hpnicfL4RedirectVlanRowStatus.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectVlanRowStatus.setDescription('This object allows ports to be added and removed from the table. The following are the valid values that may be written to RowStatus: Writing createAndGo(4) to the RowStatus of a non-existent row shall create a new row. The new row shall be active(1). If the row exists, it shall be an error. Writing createAndWait(5) to the RowStatus of a non-existent row or to an existent row shall be an error. Writing active(1) to the RowStatus of an existing row shall change the value of that row to active(1). Writing active(1) to the RowStatus of an existing row that is already active(1) shall not cause an error, the row shall remain active(1). If the row does not exist, it shall be an error. Writing notInService(2) to the RowStatus of a non-existent row or to an existent row shall be an error. Writing destroy(6) to the RowStatus of a non-existent row shall be an error. If the row exists, it shall be removed. Writing notReady(3) to the RowStatus of a non-existent row or to an existent row shall be an error. Attempts to create more entries than the hardware can support shall be rejected.')
hpnicfL4RedirectInformationString = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hpnicfL4RedirectInformationString.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectInformationString.setDescription('This object shall contain the string generated as a result of a Layer 4 Redirection configuration. It shall contain either a string describing successful configuration or a string describing unsuccessful configuration. This length of this string shall be no longer than 80 characters.')
hpnicfL4RedirectFreeCacheEntries = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hpnicfL4RedirectFreeCacheEntries.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectFreeCacheEntries.setDescription('This object indicates the number of entries that may still be added to the hpnicfL4RedirectCacheTable.')
hpnicfL4RedirectFreeIpExclusionEntries = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hpnicfL4RedirectFreeIpExclusionEntries.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectFreeIpExclusionEntries.setDescription('This object indicates the number of entries that may still be added to the hpnicfL4RedirectIpExclusionTable.')
hpnicfL4RedirectFreeVlanEntries = MibScalar((1, 3, 6, 1, 4, 1, 11, 2, 14, 11, 15, 2, 10, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hpnicfL4RedirectFreeVlanEntries.setStatus('current')
if mibBuilder.loadTexts: hpnicfL4RedirectFreeVlanEntries.setDescription('This object indicates the number of entries that may still be added to the hpnicfL4RedirectVlanTable.')
mibBuilder.exportSymbols("HPN-ICF-L4RDT-MIB", hpnicfL4RedirectCacheEntry=hpnicfL4RedirectCacheEntry, hpnicfL4RedirectVlanEntry=hpnicfL4RedirectVlanEntry, hpnicfL4RedirectFreeVlanEntries=hpnicfL4RedirectFreeVlanEntries, hpnicfL4RedirectFreeCacheEntries=hpnicfL4RedirectFreeCacheEntries, hpnicfL4RedirectIpExclusionMaskLen=hpnicfL4RedirectIpExclusionMaskLen, hpnicfL4RedirectCacheTcpPort=hpnicfL4RedirectCacheTcpPort, hpnicfL4RedirectFreeIpExclusionEntries=hpnicfL4RedirectFreeIpExclusionEntries, hpnicfL4RedirectIpExclusionRowStatus=hpnicfL4RedirectIpExclusionRowStatus, PYSNMP_MODULE_ID=hpnicfL4Redirect, hpnicfL4RedirectCachePort=hpnicfL4RedirectCachePort, hpnicfL4RedirectCacheRedirectionStatus=hpnicfL4RedirectCacheRedirectionStatus, hpnicfL4RedirectIpExclusionEntry=hpnicfL4RedirectIpExclusionEntry, hpnicfL4RedirectCacheIpAddress=hpnicfL4RedirectCacheIpAddress, hpnicfL4RedirectCacheTable=hpnicfL4RedirectCacheTable, hpnicfL4Redirect=hpnicfL4Redirect, hpnicfL4RedirectCacheRowStatus=hpnicfL4RedirectCacheRowStatus, hpnicfL4RedirectVlanID=hpnicfL4RedirectVlanID, hpnicfL4RedirectInformationString=hpnicfL4RedirectInformationString, hpnicfL4RedirectIpExclusionTable=hpnicfL4RedirectIpExclusionTable, hpnicfL4RedirectCacheVlan=hpnicfL4RedirectCacheVlan, hpnicfL4RedirectIpExclusionIpAddress=hpnicfL4RedirectIpExclusionIpAddress, hpnicfL4RedirectVlanTable=hpnicfL4RedirectVlanTable, hpnicfL4RedirectVlanRowStatus=hpnicfL4RedirectVlanRowStatus, hpnicfL4RedirectCacheMacAddress=hpnicfL4RedirectCacheMacAddress)
|
"""
file access support
"""
def backup_one_line(fd):
"""
Moves the file pointer to right after the previous newline in an ASCII file
@param fd :
@type fd : file descriptor
Notes
=====
It ignores the current character in case it is a newline.
"""
index = -2
c = ""
while c != "\n":
fd.seek(index,2)
c = fd.read(1)
index -= 1
def get_last_line(fd):
"""
Gets the last line in an ASCII file.
This is useful for getting the last line in a very large ASCII file.
It also reports of the last line was completed with a newline or interrupted.
@param fd :
@type fd : file descriptor
"""
fd.seek(-1,2)
clast = fd.read(1)
if clast == "\n":
complete_line = True
else:
complete_line = False
backup_one_line(fd)
line = fd.readline()
return (complete_line, line)
|
"""
Error types
"""
class AlreadyBoundError(Exception):
"""
Raised if a factory is already bound to a name.
"""
pass
class CyclicGraphError(Exception):
"""
Raised if a graph has a cycle.
"""
pass
class LockedGraphError(Exception):
"""
Raised when attempting to create a component in a locked object graph.
"""
pass
class NotBoundError(Exception):
"""
Raised if not factory is bound to a name.
"""
pass
class ValidationError(Exception):
"""
Raised if a configuration value fails validation.
"""
pass
|
"""
Ratiorg got statues of different sizes as a present from CodeMaster for his birthday, each statue having an non-negative integer size.
Since he likes to make things perfect, he wants to arrange them from smallest to largest so that each statue will be bigger than the previous one exactly by 1.
He may need some additional statues to be able to accomplish that. Help him figure out the minimum number of additional statues needed.
"""
def solution(statues):
sorted_statues = sorted(statues)
pointer = 0
while pointer != len(sorted_statues) - 1:
if (sorted_statues[pointer + 1] - sorted_statues[pointer]) > 1:
sorted_statues.insert(pointer + 1, sorted_statues[pointer] + 1)
pointer += 1
return len(sorted_statues) - len(statues)
|
# -*- coding: utf-8 -*-
# @Time : 2021/1/3
# @Author : handsomezhou
DB_SUFFIX = "db"
DB_SUFFIX_WITH_FULL_STOP = ".db"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.