content
stringlengths 7
1.05M
| fixed_cases
stringlengths 1
1.28M
|
|---|---|
class Solution:
def canCross(self, stones: List[int]) -> bool:
n = len(stones)
# dp[i][j] := True if a frog can make a size j jump to stones[i]
dp = [[False] * (n + 1) for _ in range(n)]
dp[0][0] = True
for i in range(1, n):
for j in range(i):
k = stones[i] - stones[j]
if k > n:
continue
for x in (k - 1, k, k + 1):
if 0 <= x <= n:
dp[i][k] |= dp[j][x]
return any(dp[-1])
|
class Solution:
def can_cross(self, stones: List[int]) -> bool:
n = len(stones)
dp = [[False] * (n + 1) for _ in range(n)]
dp[0][0] = True
for i in range(1, n):
for j in range(i):
k = stones[i] - stones[j]
if k > n:
continue
for x in (k - 1, k, k + 1):
if 0 <= x <= n:
dp[i][k] |= dp[j][x]
return any(dp[-1])
|
class PPSTimingCalibrationModeEnum:
CondDB = 0
JSON = 1
SQLite = 2
|
class Ppstimingcalibrationmodeenum:
cond_db = 0
json = 1
sq_lite = 2
|
a = input()
i = ord(a)
for i in range(i-96):
print(chr(i+97),end=" ")
|
a = input()
i = ord(a)
for i in range(i - 96):
print(chr(i + 97), end=' ')
|
n=int(input("From number"))
n1=int(input("To number"))
for i in range(n,n1):
for j in range(2,i):
if i%j==0:
break
else:
print(i)
|
n = int(input('From number'))
n1 = int(input('To number'))
for i in range(n, n1):
for j in range(2, i):
if i % j == 0:
break
else:
print(i)
|
# Recursive implementation to find the gcd (greatest common divisor) of two integers using the euclidean algorithm.
# For more than two numbers, e.g. three, you can box it like this: gcd(a,gcd(b,greatest_common_divisor.c)) etc.
# This runs in O(log(n)) where n is the maximum of a and b.
# :param a: the first integer
# :param b: the second integer
# :return: the greatest common divisor (gcd) of the two integers.
def gcd(a, b):
# print("New *a* is " + str(a) + ", new *b* is " + str(b))
if b == 0:
# print("b is 0, stopping recursion, a is the gcd: " + str(a))
return a
# print("Recursing with new a = b and new b = a % b...")
return gcd(b, a % b)
|
def gcd(a, b):
if b == 0:
return a
return gcd(b, a % b)
|
def _build(**kwargs):
kwargs.setdefault("mac_src", '00.00.00.00.00.01')
kwargs.setdefault("mac_dst", '00.00.00.00.00.02')
kwargs.setdefault("mac_src_step", '00.00.00.00.00.01')
kwargs.setdefault("mac_dst_step", '00.00.00.00.00.01')
kwargs.setdefault("arp_src_hw_addr", '01.00.00.00.00.01')
kwargs.setdefault("arp_dst_hw_addr", '01.00.00.00.00.02')
kwargs.setdefault("ip_src_addr", '11.1.1.1')
kwargs.setdefault("ip_dst_addr", '225.1.1.1')
kwargs.setdefault("ip_src_step", '0.0.0.1')
kwargs.setdefault("ip_dst_step", '0.0.0.1')
kwargs.setdefault("mac_src_count", 20)
kwargs.setdefault("mac_dst_count", 20)
kwargs.setdefault("arp_src_hw_count", 20)
kwargs.setdefault("arp_dst_hw_count", 10)
kwargs.setdefault("ip_src_count", 20)
kwargs.setdefault("ip_dst_count", 20)
kwargs.setdefault("transmit_mode", 'continuous')
kwargs.setdefault("length_mode", 'fixed')
kwargs.setdefault("vlan_id", 10)
kwargs.setdefault("vlan_id_count", 10)
kwargs.setdefault("vlan_id_step", 3)
kwargs.setdefault("l2_encap", 'ethernet_ii')
kwargs.setdefault("frame_size", 64)
kwargs.setdefault("pkts_per_burst", 10)
kwargs.setdefault("mode", "create")
return kwargs
def _build2(index=0):
if index == 0: return _build()
if index == 1: return _build(length_mode='random', transmit_mode='single_burst')
if index == 2: return _build(length_mode='increment', transmit_mode='single_burst', frame_size_step=2000)
if index == 3: return _build(mac_dst_mode="increment")
if index == 4: return _build(mac_src_mode="increment", transmit_mode='single_burst')
if index == 5: return _build(l3_protocol='arp', arp_src_hw_mode="increment", arp_dst_hw_mode="decrement")
if index == 6: return _build(rate_pps=1, l3_protocol='ipv4', ip_src_mode='increment', ip_dst_mode='decrement')
if index == 7: return _build(vlan_user_priority="3", l2_encap="ethernet_ii_vlan", vlan_id_mode="increment")
if index == 8: return _build(vlan="enable", l3_protocol='ipv4', ip_src_addr='1.1.1.1', ip_dst_addr='5.5.5.5',
ip_dscp="8", high_speed_result_analysis=0, track_by='trackingenabled0 ipv4DefaultPhb0',
ip_dscp_tracking=1)
if index == 9: return _build(l2_encap='ethernet_ii', ethernet_value='88CC',
data_pattern='02 07 04 00 11 97 2F 8E 80 04 07 03 00 11 97 2F 8E 82 06 02 00 78 00 00 00 00 '
'00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00')
if index == 10: return _build(l2_encap='ethernet_ii', ethernet_value='8809', data_pattern_mode='fixed',
data_pattern='02 07 04 00 11 97 2F 8E 80 04 07 03 00 11 97 2F 8E 82 06 02 00 78 00 00 00 00 '
'00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00')
if index == 11: return _build(data_pattern='FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF 00 2D 01 04 00 C8 00 5A 05 05 '
'05 05 10 02 0E 01 04 00 01 00 01 02 00 41 04 00 00 00 C8', l3_protocol='ipv4', ip_protocol=6,
ip_src_addr='1.1.1.1', l4_protocol='tcp', ip_precedence=5, frame_size=103,
ip_dst_addr='1.1.1.2', tcp_dst_port=179, tcp_src_port=54821, tcp_window=115,
tcp_seq_num=1115372998, tcp_ack_num=1532875182,tcp_ack_flag=1, tcp_psh_flag=1, ip_ttl=1)
if index == 12: return _build(l3_protocol='ipv6', data_pattern='01 D1 49 5E 00 08 00 02 00 78 00 01 00 0A 00 03 00 01 00 13 '
'5F 1F F2 80 00 06 00 06 00 19 00 17 00 18 00 19 00 0C 00 33 ' '00 01 00 00 00 00 00 00 00 00',
frame_size=116, ipv6_dst_addr="FF02:0:0:0:0:0:1:2", ipv6_src_addr="FE80:0:0:0:201:5FF:FE00:500",
ipv6_next_header=17, ipv6_traffic_class=224,l4_protocol='udp',udp_dst_port=546,
udp_src_port=547, ipv6_hop_limit=255)
if index == 13: return _build(l3_protocol='arp', arp_src_hw_addr="00:00:00:11:11:80",
arp_dst_hw_addr="00:00:00:00:00:00", arp_operation='arpRequest', ip_src_addr='1.1.1.1', ip_dst_addr='1.1.1.2')
if index == 14: return _build(l3_protocol='ipv6', data_pattern='FF FF', l4_protocol="icmp", ipv6_dst_addr="fe80::ba6a:97ff:feca:bb98",
ipv6_src_addr="2001::2", ipv6_next_header=58, icmp_target_addr='2001::2', icmp_type=136, icmp_ndp_nam_o_flag=0,
icmp_ndp_nam_r_flag=1, icmp_ndp_nam_s_flag=1, ipv6_hop_limit=255)
if index == 15: return _build(rate_pps=1, l3_protocol='ipv4',ip_src_addr='11.1.1.1', ip_dst_addr='225.1.1.1',ip_protocol=2, \
l4_protocol='igmp',igmp_msg_type='report',igmp_group_addr='225.1.1.1',high_speed_result_analysis=0)
return None
def ut_stream_get(index=0, **kws):
kwargs = _build2(index)
if kwargs: kwargs.update(kws)
return kwargs
if __name__ == '__main__':
print(ut_stream_get(0))
for i in range(100):
d = ut_stream_get(i)
if not d:
break
print(d)
|
def _build(**kwargs):
kwargs.setdefault('mac_src', '00.00.00.00.00.01')
kwargs.setdefault('mac_dst', '00.00.00.00.00.02')
kwargs.setdefault('mac_src_step', '00.00.00.00.00.01')
kwargs.setdefault('mac_dst_step', '00.00.00.00.00.01')
kwargs.setdefault('arp_src_hw_addr', '01.00.00.00.00.01')
kwargs.setdefault('arp_dst_hw_addr', '01.00.00.00.00.02')
kwargs.setdefault('ip_src_addr', '11.1.1.1')
kwargs.setdefault('ip_dst_addr', '225.1.1.1')
kwargs.setdefault('ip_src_step', '0.0.0.1')
kwargs.setdefault('ip_dst_step', '0.0.0.1')
kwargs.setdefault('mac_src_count', 20)
kwargs.setdefault('mac_dst_count', 20)
kwargs.setdefault('arp_src_hw_count', 20)
kwargs.setdefault('arp_dst_hw_count', 10)
kwargs.setdefault('ip_src_count', 20)
kwargs.setdefault('ip_dst_count', 20)
kwargs.setdefault('transmit_mode', 'continuous')
kwargs.setdefault('length_mode', 'fixed')
kwargs.setdefault('vlan_id', 10)
kwargs.setdefault('vlan_id_count', 10)
kwargs.setdefault('vlan_id_step', 3)
kwargs.setdefault('l2_encap', 'ethernet_ii')
kwargs.setdefault('frame_size', 64)
kwargs.setdefault('pkts_per_burst', 10)
kwargs.setdefault('mode', 'create')
return kwargs
def _build2(index=0):
if index == 0:
return _build()
if index == 1:
return _build(length_mode='random', transmit_mode='single_burst')
if index == 2:
return _build(length_mode='increment', transmit_mode='single_burst', frame_size_step=2000)
if index == 3:
return _build(mac_dst_mode='increment')
if index == 4:
return _build(mac_src_mode='increment', transmit_mode='single_burst')
if index == 5:
return _build(l3_protocol='arp', arp_src_hw_mode='increment', arp_dst_hw_mode='decrement')
if index == 6:
return _build(rate_pps=1, l3_protocol='ipv4', ip_src_mode='increment', ip_dst_mode='decrement')
if index == 7:
return _build(vlan_user_priority='3', l2_encap='ethernet_ii_vlan', vlan_id_mode='increment')
if index == 8:
return _build(vlan='enable', l3_protocol='ipv4', ip_src_addr='1.1.1.1', ip_dst_addr='5.5.5.5', ip_dscp='8', high_speed_result_analysis=0, track_by='trackingenabled0 ipv4DefaultPhb0', ip_dscp_tracking=1)
if index == 9:
return _build(l2_encap='ethernet_ii', ethernet_value='88CC', data_pattern='02 07 04 00 11 97 2F 8E 80 04 07 03 00 11 97 2F 8E 82 06 02 00 78 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00')
if index == 10:
return _build(l2_encap='ethernet_ii', ethernet_value='8809', data_pattern_mode='fixed', data_pattern='02 07 04 00 11 97 2F 8E 80 04 07 03 00 11 97 2F 8E 82 06 02 00 78 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00')
if index == 11:
return _build(data_pattern='FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF FF 00 2D 01 04 00 C8 00 5A 05 05 05 05 10 02 0E 01 04 00 01 00 01 02 00 41 04 00 00 00 C8', l3_protocol='ipv4', ip_protocol=6, ip_src_addr='1.1.1.1', l4_protocol='tcp', ip_precedence=5, frame_size=103, ip_dst_addr='1.1.1.2', tcp_dst_port=179, tcp_src_port=54821, tcp_window=115, tcp_seq_num=1115372998, tcp_ack_num=1532875182, tcp_ack_flag=1, tcp_psh_flag=1, ip_ttl=1)
if index == 12:
return _build(l3_protocol='ipv6', data_pattern='01 D1 49 5E 00 08 00 02 00 78 00 01 00 0A 00 03 00 01 00 13 5F 1F F2 80 00 06 00 06 00 19 00 17 00 18 00 19 00 0C 00 33 00 01 00 00 00 00 00 00 00 00', frame_size=116, ipv6_dst_addr='FF02:0:0:0:0:0:1:2', ipv6_src_addr='FE80:0:0:0:201:5FF:FE00:500', ipv6_next_header=17, ipv6_traffic_class=224, l4_protocol='udp', udp_dst_port=546, udp_src_port=547, ipv6_hop_limit=255)
if index == 13:
return _build(l3_protocol='arp', arp_src_hw_addr='00:00:00:11:11:80', arp_dst_hw_addr='00:00:00:00:00:00', arp_operation='arpRequest', ip_src_addr='1.1.1.1', ip_dst_addr='1.1.1.2')
if index == 14:
return _build(l3_protocol='ipv6', data_pattern='FF FF', l4_protocol='icmp', ipv6_dst_addr='fe80::ba6a:97ff:feca:bb98', ipv6_src_addr='2001::2', ipv6_next_header=58, icmp_target_addr='2001::2', icmp_type=136, icmp_ndp_nam_o_flag=0, icmp_ndp_nam_r_flag=1, icmp_ndp_nam_s_flag=1, ipv6_hop_limit=255)
if index == 15:
return _build(rate_pps=1, l3_protocol='ipv4', ip_src_addr='11.1.1.1', ip_dst_addr='225.1.1.1', ip_protocol=2, l4_protocol='igmp', igmp_msg_type='report', igmp_group_addr='225.1.1.1', high_speed_result_analysis=0)
return None
def ut_stream_get(index=0, **kws):
kwargs = _build2(index)
if kwargs:
kwargs.update(kws)
return kwargs
if __name__ == '__main__':
print(ut_stream_get(0))
for i in range(100):
d = ut_stream_get(i)
if not d:
break
print(d)
|
#Weird algorithm for matrix multiplication. just addition will produce result matrix
class Solution:
def oddCells(self, n: int, m: int, indices: List[List[int]]) -> int:
row = [0] * n
col = [0] * m
ans = 0
for r,c in indices:
row[r] += 1
col[c] += 1
for i in range(n):
for j in range(m):
ans += (row[i] + col[j] )%2
return ans
|
class Solution:
def odd_cells(self, n: int, m: int, indices: List[List[int]]) -> int:
row = [0] * n
col = [0] * m
ans = 0
for (r, c) in indices:
row[r] += 1
col[c] += 1
for i in range(n):
for j in range(m):
ans += (row[i] + col[j]) % 2
return ans
|
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 13 20:04:26 2020
@author: ninjaac
"""
def count_substring(string, sub_string):
return string.count(sub_string)
#return (''.join(string)).count(''.join(sub_string))
if __name__ == '__main__':
string = input().strip()
sub_string = input().strip()
count = count_substring(string, sub_string)
print(count)
|
"""
Created on Tue Oct 13 20:04:26 2020
@author: ninjaac
"""
def count_substring(string, sub_string):
return string.count(sub_string)
if __name__ == '__main__':
string = input().strip()
sub_string = input().strip()
count = count_substring(string, sub_string)
print(count)
|
unavailable_dict = {
"0": {
("11", "12", "13"): [
"message_1",
"message_4"
],
("21", "22", "23"): [
"message_3",
"message_6"
],
("24",): [
"message_2",
"message_5"
],
("0", "blank",): [
"message_7"
]
},
"1": {
("11", "12", "13"): [
"message_8"
],
("21", "22", "23"): [
"message_9"
],
("0", "blank",): [
"message_10"
]
},
"2": {("0", "blank",):
[
"message_11"
]}
}
earnings_dict = {
"0": {
"13": {
"4": "after_3_years_23_4"
},
"11": {
"3": "earnings_15_months_unavailable_22_21_3",
"4": "after_3_years_21_22_4"
},
"12": {
"3": "earnings_15_months_unavailable_22_21_3",
"4": "after_3_years_21_22_4"
},
"23": {
"4": "after_3_years_23_4"
},
"21": {
"3": "earnings_15_months_unavailable_22_21_3",
"4": "after_3_years_21_22_4"
},
"22": {
"3": "earnings_15_months_unavailable_22_21_3",
"4": "after_3_years_21_22_4"
},
"blank": "unavailable_0_any"
},
"1": {
"13": {
"4": "after_3_years_23_4"
},
"11": {
"3": "earnings_15_months_unavailable_22_21_3",
"4": "after_3_years_21_22_4"
},
"12": {
"3": "earnings_15_months_unavailable_22_21_3",
"4": "after_3_years_21_22_4"
},
"23": {
"4": "after_3_years_23_4"
},
"21": {
"3": "earnings_15_months_unavailable_22_21_3",
"4": "after_3_years_21_22_4"
},
"22": {
"3": "earnings_15_months_unavailable_22_21_3",
"4": "after_3_years_21_22_4"
},
"blank": "unavailable_1_any"
},
"2": {
"13": {
"4": "after_3_years_23_4"
},
"11": {
"3": "earnings_15_months_unavailable_22_21_3",
"4": "after_3_years_21_22_4"
},
"12": {
"3": "earnings_15_months_unavailable_22_21_3",
"4": "after_3_years_21_22_4"
},
"23": {
"4": "after_3_years_23_4"
},
"21": {
"3": "earnings_15_months_unavailable_22_21_3",
"4": "after_3_years_21_22_4"
},
"22": {
"3": "earnings_15_months_unavailable_22_21_3",
"4": "after_3_years_21_22_4"
},
"blank": "unavailable_2_any"
},
}
|
unavailable_dict = {'0': {('11', '12', '13'): ['message_1', 'message_4'], ('21', '22', '23'): ['message_3', 'message_6'], ('24',): ['message_2', 'message_5'], ('0', 'blank'): ['message_7']}, '1': {('11', '12', '13'): ['message_8'], ('21', '22', '23'): ['message_9'], ('0', 'blank'): ['message_10']}, '2': {('0', 'blank'): ['message_11']}}
earnings_dict = {'0': {'13': {'4': 'after_3_years_23_4'}, '11': {'3': 'earnings_15_months_unavailable_22_21_3', '4': 'after_3_years_21_22_4'}, '12': {'3': 'earnings_15_months_unavailable_22_21_3', '4': 'after_3_years_21_22_4'}, '23': {'4': 'after_3_years_23_4'}, '21': {'3': 'earnings_15_months_unavailable_22_21_3', '4': 'after_3_years_21_22_4'}, '22': {'3': 'earnings_15_months_unavailable_22_21_3', '4': 'after_3_years_21_22_4'}, 'blank': 'unavailable_0_any'}, '1': {'13': {'4': 'after_3_years_23_4'}, '11': {'3': 'earnings_15_months_unavailable_22_21_3', '4': 'after_3_years_21_22_4'}, '12': {'3': 'earnings_15_months_unavailable_22_21_3', '4': 'after_3_years_21_22_4'}, '23': {'4': 'after_3_years_23_4'}, '21': {'3': 'earnings_15_months_unavailable_22_21_3', '4': 'after_3_years_21_22_4'}, '22': {'3': 'earnings_15_months_unavailable_22_21_3', '4': 'after_3_years_21_22_4'}, 'blank': 'unavailable_1_any'}, '2': {'13': {'4': 'after_3_years_23_4'}, '11': {'3': 'earnings_15_months_unavailable_22_21_3', '4': 'after_3_years_21_22_4'}, '12': {'3': 'earnings_15_months_unavailable_22_21_3', '4': 'after_3_years_21_22_4'}, '23': {'4': 'after_3_years_23_4'}, '21': {'3': 'earnings_15_months_unavailable_22_21_3', '4': 'after_3_years_21_22_4'}, '22': {'3': 'earnings_15_months_unavailable_22_21_3', '4': 'after_3_years_21_22_4'}, 'blank': 'unavailable_2_any'}}
|
# In theory, RightScale's API is discoverable through ``links`` in responses.
# In practice, we have to help our robots along with the following hints:
RS_DEFAULT_ACTIONS = {
'index': {
'http_method': 'get',
},
'show': {
'http_method': 'get',
'extra_path': '/%(res_id)s',
},
'create': {
'http_method': 'post',
},
'update': {
'http_method': 'put',
'extra_path': '/%(res_id)s',
},
'destroy': {
'http_method': 'delete',
'extra_path': '/%(res_id)s',
},
}
ALERT_ACTIONS = {
'disable': {
'http_method': 'post',
'extra_path': '/%(res_id)s/disable',
},
'enable': {
'http_method': 'post',
'extra_path': '/%(res_id)s/enable',
},
'quench': {
'http_method': 'post',
'extra_path': '/%(res_id)s/quench',
},
'create': None,
'update': None,
'destroy': None,
}
COOKBOOK_ATTACHMENT_ACTIONS = {
'multi_attach': {
'http_method': 'post',
'extra_path': '/multi_attach',
},
'multi_detach': {
'http_method': 'post',
'extra_path': '/multi_detach',
},
'update': None,
}
INPUT_ACTIONS = {
'multi_update': {
'http_method': 'put',
'extra_path': '/multi_update',
},
'show': None,
'create': None,
'update': None,
'destroy': None,
}
INSTANCE_ACTIONS = {
'launch': {
'http_method': 'post',
'extra_path': '/%(res_id)s/launch',
},
'lock': {
'http_method': 'post',
'extra_path': '/%(res_id)s/lock',
},
'multi_run_executable': {
'http_method': 'post',
'extra_path': '/multi_run_executable',
},
'multi_terminate': {
'http_method': 'post',
'extra_path': '/multi_terminate',
},
'reboot': {
'http_method': 'post',
'extra_path': '/%(res_id)s/reboot',
},
'run_executable': {
'http_method': 'post',
'extra_path': '/%(res_id)s/run_executable',
},
'set_custom_lodgement': {
'http_method': 'post',
'extra_path': '/%(res_id)s/set_custom_lodgement',
},
'start': {
'http_method': 'post',
'extra_path': '/%(res_id)s/start',
},
'stop': {
'http_method': 'post',
'extra_path': '/%(res_id)s/stop',
},
'terminate': {
'http_method': 'post',
'extra_path': '/%(res_id)s/terminate',
},
'unlock': {
'http_method': 'post',
'extra_path': '/%(res_id)s/unlock',
},
'create': None,
'destroy': None,
}
MULTI_CLOUD_IMAGE_ACTIONS = {
'clone': {
'http_method': 'post',
'extra_path': '/%(res_id)s/clone',
},
'commit': {
'http_method': 'post',
'extra_path': '/%(res_id)s/commit',
},
}
SERVER_ARRAY_ACTIONS = {
'clone': {
'http_method': 'post',
'extra_path': '/%(res_id)s/clone',
},
'current_instances': {
'http_method': 'get',
'extra_path': '/%(res_id)s/current_instances',
},
'launch': {
'http_method': 'post',
'extra_path': '/%(res_id)s/launch',
},
'multi_run_executable': {
'http_method': 'post',
'extra_path': '/%(res_id)s/multi_run_executable',
},
'multi_terminate': {
'http_method': 'post',
'extra_path': '/%(res_id)s/multi_terminate',
},
}
UPDATE_NONE_ACTIONS = {
'update': None,
}
# Specify variations from the default actions defined in RS_DEFAULT_ACTIONS.
# These specs come from http://reference.rightscale.com/api1.5/index.html
ROOT_COLLECTIONS = {
'account_groups': {
'create': None,
'update': None,
'destroy': None,
},
'accounts': {
'index': None,
'create': None,
'update': None,
'destroy': None,
},
# alert_specs use defaults
'alerts': ALERT_ACTIONS,
'audit_entries': {
'append': {
'http_method': 'post',
'extra_path': '/%(res_id)s/append',
},
'detail': {
'http_method': 'get',
'extra_path': '/%(res_id)s/detail',
},
'destroy': None,
},
'backups': {
'cleanup': {
'http_method': 'post',
'extra_path': '/cleanup',
},
},
'child_accounts': {
'show': None,
'destroy': None,
},
'cloud_accounts': {
'update': None,
},
'clouds': {
'create': None,
'update': None,
'destroy': None,
},
# these are only in the 1.5 docs and are not available as hrefs.
'cookbook_attachments': COOKBOOK_ATTACHMENT_ACTIONS,
'cookbooks': {
'follow': {
'http_method': 'post',
'extra_path': '/%(res_id)s/follow',
},
'freeze': {
'http_method': 'post',
'extra_path': '/%(res_id)s/freeze',
},
'obsolete': {
'http_method': 'post',
'extra_path': '/%(res_id)s/obsolete',
},
'create': None,
'update': None,
},
# credentials use defaults
'deployments': {
'clone': {
'http_method': 'post',
'extra_path': '/%(res_id)s/clone',
},
'lock': {
'http_method': 'post',
'extra_path': '/%(res_id)s/lock',
},
'servers': {
'http_method': 'get',
'extra_path': '/%(res_id)s/servers',
},
'unlock': {
'http_method': 'post',
'extra_path': '/%(res_id)s/unlock',
},
},
'identity_providers': {
'create': None,
'update': None,
'destroy': None,
},
'multi_cloud_images': MULTI_CLOUD_IMAGE_ACTIONS,
# network_gateways use defaults
# network_option_group_attachments use defaults
# network_option_groups use defaults
# networks use defaults
# oauth2 is a special case just used during auth
'permissions': {
'update': None,
},
# only in 1.5 api docs, not discoverable via href
'placement_groups': {
'update': None,
},
'preferences': {
'create': None,
},
'publication_lineages': {
'index': None,
'create': None,
'update': None,
'destroy': None,
},
'publications': {
'import': {
'http_method': 'post',
'extra_path': '/%(res_id)s/import',
},
'create': None,
'update': None,
'destroy': None,
},
'repositories': {
'cookbook_import': {
'http_method': 'post',
'extra_path': '/%(res_id)s/cookbook_import',
},
'refetch': {
'http_method': 'post',
'extra_path': '/%(res_id)s/refetch',
},
'resolve': {
'http_method': 'post',
'extra_path': '/resolve',
},
},
'right_scripts': {
'create': {
'http_method': 'post',
},
'commit': {
'http_method': 'post',
'extra_path': '/%(res_id)s/commit',
},
'update': {
'http_method': 'put',
},
'destroy': {
'http_method': 'delete',
},
},
# route_tables uses defaults
# routes uses defaults
# security_group_rules uses defaults
# rs api 1.5 returns a link where rel=self for the ``/api/sessions``
# resource. sadly, the href=/api/session. regardless, we don't need
# it as an attribute because it's where we started off.
'self': None,
'server_arrays': SERVER_ARRAY_ACTIONS,
'server_template_multi_cloud_images': {
'make_default': {
'http_method': 'post',
'extra_path': '/%(res_id)s/make_default',
},
'update': None,
},
'server_templates': {
'clone': {
'http_method': 'post',
'extra_path': '/%(res_id)s/clone',
},
'commit': {
'http_method': 'post',
'extra_path': '/%(res_id)s/commit',
},
'detect_changes_in_head': {
'http_method': 'post',
'extra_path': '/%(res_id)s/detect_changes_in_head',
},
'publish': {
'http_method': 'post',
'extra_path': '/%(res_id)s/publish',
},
'resolve': {
'http_method': 'post',
'extra_path': '/%(res_id)s/resolve',
},
'swap_repository': {
'http_method': 'post',
'extra_path': '/%(res_id)s/swap_repository',
},
},
'servers': {
'clone': {
'http_method': 'post',
'extra_path': '/%(res_id)s/clone',
},
'launch': {
'http_method': 'post',
'extra_path': '/%(res_id)s/launch',
},
'terminate': {
'http_method': 'post',
'extra_path': '/%(res_id)s/terminate',
},
},
# workaround inconsistency in rs hateoas
'sessions': {
'accounts': {
'http_method': 'get',
'extra_path': '/accounts',
},
'index': None,
'show': None,
'create': None,
'update': None,
'destroy': None,
},
'tags': {
'by_resource': {
'http_method': 'post',
'extra_path': '/by_resource',
},
'by_tag': {
'http_method': 'post',
'extra_path': '/by_tag',
},
'multi_add': {
'http_method': 'post',
'extra_path': '/multi_add',
},
'multi_delete': {
'http_method': 'post',
'extra_path': '/multi_delete',
},
'index': None,
'show': None,
'create': None,
'update': None,
'destroy': None,
},
'users': {
'destroy': None,
},
}
CLOUD_COLLECTIONS = {
'datacenters': {
'create': None,
'update': None,
'destroy': None,
},
'images': {
'create': None,
'update': None,
'destroy': None,
},
'instance_types': {
'create': None,
'update': None,
'destroy': None,
},
'instances': INSTANCE_ACTIONS,
'ip_address_bindings': UPDATE_NONE_ACTIONS,
# ip_addresses uses defaults
'recurring_volume_attachments': UPDATE_NONE_ACTIONS,
'security_groups': {
'update': None,
},
'ssh_keys': {
'update': None,
},
# subnets uses defaults
'volume_attachments': UPDATE_NONE_ACTIONS,
'volume_snapshots': UPDATE_NONE_ACTIONS,
}
INSTANCE_COLLECTIONS = {
'alerts': ALERT_ACTIONS,
'inputs': INPUT_ACTIONS,
# instance_custom_lodgements uses defaults
'monitoring_metrics': {
'data': {
'http_method': 'get',
'extra_path': '/%(res_id)s/data',
},
'create': None,
'update': None,
'destroy': None,
},
# subnets uses defaults
# TODO: investigate to see how useful tasks is by itself. i.e. there's
# no way to index() all tasks for an instance. regardless, this
# definition is here at least for completeness.
'tasks': {
'show': {
'http_method': 'get',
'extra_path': '/live/tasks/%(res_id)s',
},
'index': None,
'create': None,
'update': None,
'destroy': None,
},
'volume_attachments': UPDATE_NONE_ACTIONS,
'volumes': {
'update': None,
},
'volume_types': {
'create': None,
'update': None,
'destroy': None,
},
}
COOKBOOK_COLLECTIONS = {
'cookbook_attachments': COOKBOOK_ATTACHMENT_ACTIONS,
}
DEPLOYMENT_COLLECTIONS = {
'alerts': ALERT_ACTIONS,
'inputs': INPUT_ACTIONS,
'server_arrays': SERVER_ARRAY_ACTIONS,
}
IP_ADDRESS_COLLECTIONS = {
'ip_address_bindings': UPDATE_NONE_ACTIONS,
}
REPOSITORY_COLLECTIONS = {
'repository_assets': {
'create': None,
'update': None,
'destroy': None,
},
}
SERVER_COLLECTIONS = {
# alert_specs use defaults
'alerts': ALERT_ACTIONS,
}
SERVER_ARRAY_COLLECTIONS = {
# alert_specs use defaults
'alerts': ALERT_ACTIONS,
'current_instances': INSTANCE_ACTIONS,
}
SERVER_TEMPLATES_COLLECTIONS = {
'cookbook_attachments': COOKBOOK_ATTACHMENT_ACTIONS,
'inputs': INPUT_ACTIONS,
'multi_cloud_images': MULTI_CLOUD_IMAGE_ACTIONS,
'runnable_bindings': {
'multi_update': {
'http_method': 'put',
'extra_path': '/multi_update',
},
'update': None,
},
}
VOLUME_COLLECTIONS = {
'recurring_volume_attachments': UPDATE_NONE_ACTIONS,
'volume_snapshots': UPDATE_NONE_ACTIONS,
}
VOLUME_SNAPSHOT_COLLECTIONS = {
'recurring_volume_attachments': UPDATE_NONE_ACTIONS,
}
COLLECTIONS = {
'application/vnd.rightscale.session+json': ROOT_COLLECTIONS,
'application/vnd.rightscale.cookbook+json': COOKBOOK_COLLECTIONS,
'application/vnd.rightscale.cloud+json': CLOUD_COLLECTIONS,
'application/vnd.rightscale.instance+json': INSTANCE_COLLECTIONS,
'application/vnd.rightscale.ip_address+json': IP_ADDRESS_COLLECTIONS,
'application/vnd.rightscale.deployment+json': DEPLOYMENT_COLLECTIONS,
# multi_cloud_image has a ``settings`` collection (a.k.a.
# MultiCloudImageSettings in the RS docs) that just uses defaults, so
# no need for an extra map
'application/vnd.rightscale.repository+json': REPOSITORY_COLLECTIONS,
'application/vnd.rightscale.server+json': SERVER_COLLECTIONS,
'application/vnd.rightscale.server_array+json': SERVER_ARRAY_COLLECTIONS,
'application/vnd.rightscale.server_template+json': SERVER_TEMPLATES_COLLECTIONS,
# security_group has a ``security_group_rules`` collection that just
# uses defaults, so no need for an extra map
'application/vnd.rightscale.volume+json': VOLUME_COLLECTIONS,
'application/vnd.rightscale.volume_snapshot+json': VOLUME_SNAPSHOT_COLLECTIONS,
}
|
rs_default_actions = {'index': {'http_method': 'get'}, 'show': {'http_method': 'get', 'extra_path': '/%(res_id)s'}, 'create': {'http_method': 'post'}, 'update': {'http_method': 'put', 'extra_path': '/%(res_id)s'}, 'destroy': {'http_method': 'delete', 'extra_path': '/%(res_id)s'}}
alert_actions = {'disable': {'http_method': 'post', 'extra_path': '/%(res_id)s/disable'}, 'enable': {'http_method': 'post', 'extra_path': '/%(res_id)s/enable'}, 'quench': {'http_method': 'post', 'extra_path': '/%(res_id)s/quench'}, 'create': None, 'update': None, 'destroy': None}
cookbook_attachment_actions = {'multi_attach': {'http_method': 'post', 'extra_path': '/multi_attach'}, 'multi_detach': {'http_method': 'post', 'extra_path': '/multi_detach'}, 'update': None}
input_actions = {'multi_update': {'http_method': 'put', 'extra_path': '/multi_update'}, 'show': None, 'create': None, 'update': None, 'destroy': None}
instance_actions = {'launch': {'http_method': 'post', 'extra_path': '/%(res_id)s/launch'}, 'lock': {'http_method': 'post', 'extra_path': '/%(res_id)s/lock'}, 'multi_run_executable': {'http_method': 'post', 'extra_path': '/multi_run_executable'}, 'multi_terminate': {'http_method': 'post', 'extra_path': '/multi_terminate'}, 'reboot': {'http_method': 'post', 'extra_path': '/%(res_id)s/reboot'}, 'run_executable': {'http_method': 'post', 'extra_path': '/%(res_id)s/run_executable'}, 'set_custom_lodgement': {'http_method': 'post', 'extra_path': '/%(res_id)s/set_custom_lodgement'}, 'start': {'http_method': 'post', 'extra_path': '/%(res_id)s/start'}, 'stop': {'http_method': 'post', 'extra_path': '/%(res_id)s/stop'}, 'terminate': {'http_method': 'post', 'extra_path': '/%(res_id)s/terminate'}, 'unlock': {'http_method': 'post', 'extra_path': '/%(res_id)s/unlock'}, 'create': None, 'destroy': None}
multi_cloud_image_actions = {'clone': {'http_method': 'post', 'extra_path': '/%(res_id)s/clone'}, 'commit': {'http_method': 'post', 'extra_path': '/%(res_id)s/commit'}}
server_array_actions = {'clone': {'http_method': 'post', 'extra_path': '/%(res_id)s/clone'}, 'current_instances': {'http_method': 'get', 'extra_path': '/%(res_id)s/current_instances'}, 'launch': {'http_method': 'post', 'extra_path': '/%(res_id)s/launch'}, 'multi_run_executable': {'http_method': 'post', 'extra_path': '/%(res_id)s/multi_run_executable'}, 'multi_terminate': {'http_method': 'post', 'extra_path': '/%(res_id)s/multi_terminate'}}
update_none_actions = {'update': None}
root_collections = {'account_groups': {'create': None, 'update': None, 'destroy': None}, 'accounts': {'index': None, 'create': None, 'update': None, 'destroy': None}, 'alerts': ALERT_ACTIONS, 'audit_entries': {'append': {'http_method': 'post', 'extra_path': '/%(res_id)s/append'}, 'detail': {'http_method': 'get', 'extra_path': '/%(res_id)s/detail'}, 'destroy': None}, 'backups': {'cleanup': {'http_method': 'post', 'extra_path': '/cleanup'}}, 'child_accounts': {'show': None, 'destroy': None}, 'cloud_accounts': {'update': None}, 'clouds': {'create': None, 'update': None, 'destroy': None}, 'cookbook_attachments': COOKBOOK_ATTACHMENT_ACTIONS, 'cookbooks': {'follow': {'http_method': 'post', 'extra_path': '/%(res_id)s/follow'}, 'freeze': {'http_method': 'post', 'extra_path': '/%(res_id)s/freeze'}, 'obsolete': {'http_method': 'post', 'extra_path': '/%(res_id)s/obsolete'}, 'create': None, 'update': None}, 'deployments': {'clone': {'http_method': 'post', 'extra_path': '/%(res_id)s/clone'}, 'lock': {'http_method': 'post', 'extra_path': '/%(res_id)s/lock'}, 'servers': {'http_method': 'get', 'extra_path': '/%(res_id)s/servers'}, 'unlock': {'http_method': 'post', 'extra_path': '/%(res_id)s/unlock'}}, 'identity_providers': {'create': None, 'update': None, 'destroy': None}, 'multi_cloud_images': MULTI_CLOUD_IMAGE_ACTIONS, 'permissions': {'update': None}, 'placement_groups': {'update': None}, 'preferences': {'create': None}, 'publication_lineages': {'index': None, 'create': None, 'update': None, 'destroy': None}, 'publications': {'import': {'http_method': 'post', 'extra_path': '/%(res_id)s/import'}, 'create': None, 'update': None, 'destroy': None}, 'repositories': {'cookbook_import': {'http_method': 'post', 'extra_path': '/%(res_id)s/cookbook_import'}, 'refetch': {'http_method': 'post', 'extra_path': '/%(res_id)s/refetch'}, 'resolve': {'http_method': 'post', 'extra_path': '/resolve'}}, 'right_scripts': {'create': {'http_method': 'post'}, 'commit': {'http_method': 'post', 'extra_path': '/%(res_id)s/commit'}, 'update': {'http_method': 'put'}, 'destroy': {'http_method': 'delete'}}, 'self': None, 'server_arrays': SERVER_ARRAY_ACTIONS, 'server_template_multi_cloud_images': {'make_default': {'http_method': 'post', 'extra_path': '/%(res_id)s/make_default'}, 'update': None}, 'server_templates': {'clone': {'http_method': 'post', 'extra_path': '/%(res_id)s/clone'}, 'commit': {'http_method': 'post', 'extra_path': '/%(res_id)s/commit'}, 'detect_changes_in_head': {'http_method': 'post', 'extra_path': '/%(res_id)s/detect_changes_in_head'}, 'publish': {'http_method': 'post', 'extra_path': '/%(res_id)s/publish'}, 'resolve': {'http_method': 'post', 'extra_path': '/%(res_id)s/resolve'}, 'swap_repository': {'http_method': 'post', 'extra_path': '/%(res_id)s/swap_repository'}}, 'servers': {'clone': {'http_method': 'post', 'extra_path': '/%(res_id)s/clone'}, 'launch': {'http_method': 'post', 'extra_path': '/%(res_id)s/launch'}, 'terminate': {'http_method': 'post', 'extra_path': '/%(res_id)s/terminate'}}, 'sessions': {'accounts': {'http_method': 'get', 'extra_path': '/accounts'}, 'index': None, 'show': None, 'create': None, 'update': None, 'destroy': None}, 'tags': {'by_resource': {'http_method': 'post', 'extra_path': '/by_resource'}, 'by_tag': {'http_method': 'post', 'extra_path': '/by_tag'}, 'multi_add': {'http_method': 'post', 'extra_path': '/multi_add'}, 'multi_delete': {'http_method': 'post', 'extra_path': '/multi_delete'}, 'index': None, 'show': None, 'create': None, 'update': None, 'destroy': None}, 'users': {'destroy': None}}
cloud_collections = {'datacenters': {'create': None, 'update': None, 'destroy': None}, 'images': {'create': None, 'update': None, 'destroy': None}, 'instance_types': {'create': None, 'update': None, 'destroy': None}, 'instances': INSTANCE_ACTIONS, 'ip_address_bindings': UPDATE_NONE_ACTIONS, 'recurring_volume_attachments': UPDATE_NONE_ACTIONS, 'security_groups': {'update': None}, 'ssh_keys': {'update': None}, 'volume_attachments': UPDATE_NONE_ACTIONS, 'volume_snapshots': UPDATE_NONE_ACTIONS}
instance_collections = {'alerts': ALERT_ACTIONS, 'inputs': INPUT_ACTIONS, 'monitoring_metrics': {'data': {'http_method': 'get', 'extra_path': '/%(res_id)s/data'}, 'create': None, 'update': None, 'destroy': None}, 'tasks': {'show': {'http_method': 'get', 'extra_path': '/live/tasks/%(res_id)s'}, 'index': None, 'create': None, 'update': None, 'destroy': None}, 'volume_attachments': UPDATE_NONE_ACTIONS, 'volumes': {'update': None}, 'volume_types': {'create': None, 'update': None, 'destroy': None}}
cookbook_collections = {'cookbook_attachments': COOKBOOK_ATTACHMENT_ACTIONS}
deployment_collections = {'alerts': ALERT_ACTIONS, 'inputs': INPUT_ACTIONS, 'server_arrays': SERVER_ARRAY_ACTIONS}
ip_address_collections = {'ip_address_bindings': UPDATE_NONE_ACTIONS}
repository_collections = {'repository_assets': {'create': None, 'update': None, 'destroy': None}}
server_collections = {'alerts': ALERT_ACTIONS}
server_array_collections = {'alerts': ALERT_ACTIONS, 'current_instances': INSTANCE_ACTIONS}
server_templates_collections = {'cookbook_attachments': COOKBOOK_ATTACHMENT_ACTIONS, 'inputs': INPUT_ACTIONS, 'multi_cloud_images': MULTI_CLOUD_IMAGE_ACTIONS, 'runnable_bindings': {'multi_update': {'http_method': 'put', 'extra_path': '/multi_update'}, 'update': None}}
volume_collections = {'recurring_volume_attachments': UPDATE_NONE_ACTIONS, 'volume_snapshots': UPDATE_NONE_ACTIONS}
volume_snapshot_collections = {'recurring_volume_attachments': UPDATE_NONE_ACTIONS}
collections = {'application/vnd.rightscale.session+json': ROOT_COLLECTIONS, 'application/vnd.rightscale.cookbook+json': COOKBOOK_COLLECTIONS, 'application/vnd.rightscale.cloud+json': CLOUD_COLLECTIONS, 'application/vnd.rightscale.instance+json': INSTANCE_COLLECTIONS, 'application/vnd.rightscale.ip_address+json': IP_ADDRESS_COLLECTIONS, 'application/vnd.rightscale.deployment+json': DEPLOYMENT_COLLECTIONS, 'application/vnd.rightscale.repository+json': REPOSITORY_COLLECTIONS, 'application/vnd.rightscale.server+json': SERVER_COLLECTIONS, 'application/vnd.rightscale.server_array+json': SERVER_ARRAY_COLLECTIONS, 'application/vnd.rightscale.server_template+json': SERVER_TEMPLATES_COLLECTIONS, 'application/vnd.rightscale.volume+json': VOLUME_COLLECTIONS, 'application/vnd.rightscale.volume_snapshot+json': VOLUME_SNAPSHOT_COLLECTIONS}
|
'''
Created on Oct 1, 2011
@author: jose
'''
|
"""
Created on Oct 1, 2011
@author: jose
"""
|
"""binary tree
"""
#https://www.hackerrank.com/challenges/tree-preorder-traversal/problem
#recrusive:
def preOrder(root):
print(root.info,end=' ')
if root.left:
preOrder(root.left)
if root.right:
preOrder(root.right)
#itrative:
def preOrder(root):
stack=[root]
while(stack):
a=stack.pop()
print(a.info,end=' ')
if(a.right):
stack.append(a.right)
if a.left:
stack.append(a.left)
#https://www.hackerrank.com/challenges/tree-postorder-traversal/problem
#recursice:
def postOrder(root):
if(root.left):
postOrder(root.left)
if root.right:
postOrder(root.right)
print(root.info,end=' ')
#itreative:
def postOrder(root):
queue=[]
stack=[root]
while stack:
a=stack.pop()
queue.append(a.info)
if a.left:
stack.append(a.left)
if a.right:
stack.append(a.right)
for i in range(len(queue)):
print(queue[-i-1],end=' ')
#https://www.hackerrank.com/challenges/tree-inorder-traversal/problem
def inOrder(root):
if root.left:
inOrder(root.left)
print(root.info,end=' ')
if root.right:
inOrder(root.right)
#itreative:
def inOrder(root):
cur=root
stack=[]
q=[]
while True:
while cur:
stack.append(cur)
cur=cur.left
cur=stack.pop()
print(cur.info,end=' ')
cur=cur.right
if not (cur or stack):
break
"""
today leet code challenge
https://leetcode.com/explore/challenge/card/april-leetcoding-challenge-2021/594/week-2-april-8th-april-14th/3702/
"""
class Solution:
def isAlienSorted(self, words: List[str], order: str) -> bool:
d,l={},[]
for i in range(len(order)):
d[order[i]]=i
print(d)
for i in words:
l.append([d[j]for j in i])
for i in range(1,len(l)):
if l[i-1]>l[i]:
return False
return True
#https://leetcode.com/problems/binary-tree-inorder-traversal/
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def inorderTraversal(self, root: TreeNode) -> List[int]:
l=[]
ans=[]
a=root
while True:
if not(a or l):
break
while a:
l.append(a)
a=a.left
a=l.pop()
ans.append(a.val)
a=a.right
return ans
|
"""binary tree
"""
def pre_order(root):
print(root.info, end=' ')
if root.left:
pre_order(root.left)
if root.right:
pre_order(root.right)
def pre_order(root):
stack = [root]
while stack:
a = stack.pop()
print(a.info, end=' ')
if a.right:
stack.append(a.right)
if a.left:
stack.append(a.left)
def post_order(root):
if root.left:
post_order(root.left)
if root.right:
post_order(root.right)
print(root.info, end=' ')
def post_order(root):
queue = []
stack = [root]
while stack:
a = stack.pop()
queue.append(a.info)
if a.left:
stack.append(a.left)
if a.right:
stack.append(a.right)
for i in range(len(queue)):
print(queue[-i - 1], end=' ')
def in_order(root):
if root.left:
in_order(root.left)
print(root.info, end=' ')
if root.right:
in_order(root.right)
def in_order(root):
cur = root
stack = []
q = []
while True:
while cur:
stack.append(cur)
cur = cur.left
cur = stack.pop()
print(cur.info, end=' ')
cur = cur.right
if not (cur or stack):
break
'\ntoday leet code challenge\nhttps://leetcode.com/explore/challenge/card/april-leetcoding-challenge-2021/594/week-2-april-8th-april-14th/3702/\n'
class Solution:
def is_alien_sorted(self, words: List[str], order: str) -> bool:
(d, l) = ({}, [])
for i in range(len(order)):
d[order[i]] = i
print(d)
for i in words:
l.append([d[j] for j in i])
for i in range(1, len(l)):
if l[i - 1] > l[i]:
return False
return True
class Solution:
def inorder_traversal(self, root: TreeNode) -> List[int]:
l = []
ans = []
a = root
while True:
if not (a or l):
break
while a:
l.append(a)
a = a.left
a = l.pop()
ans.append(a.val)
a = a.right
return ans
|
# Definition for a undirected graph node
class UndirectedGraphNode:
def __init__(self, x):
self.label = x
self.neighbors = []
def __str__(self):
return "({} -> {})".format(self.label,
[nb.label for nb in self.neighbors])
def __repr__(self):
return self.__str__()
class Solution:
# @param node, a undirected graph node
# @return a undirected graph node
def cloneGraph(self, node):
if node is None:
return None
old_labels = dict()
frontier = [node]
old_labels[node.label] = node
while frontier:
top = frontier.pop()
for nb in top.neighbors:
if nb.label not in old_labels:
old_labels[nb.label] = nb
frontier.append(nb)
new_labels = dict()
for v in old_labels:
new_labels[v] = UndirectedGraphNode(v)
for v in old_labels:
for nb in old_labels[v].neighbors:
new_labels[v].neighbors.append(new_labels[nb.label])
return new_labels[node.label]
def main():
graph = [0, 1, 2]
for i in range(3):
graph[i] = UndirectedGraphNode(i)
graph[0].neighbors = [graph[1], graph[2]]
graph[1].neighbors = [graph[0], graph[2]]
graph[2].neighbors = [graph[0], graph[1], graph[2]]
fn = Solution().cloneGraph
node = fn(graph[0])
print(node, node.neighbors[0], node.neighbors[1])
if __name__ == '__main__':
main()
|
class Undirectedgraphnode:
def __init__(self, x):
self.label = x
self.neighbors = []
def __str__(self):
return '({} -> {})'.format(self.label, [nb.label for nb in self.neighbors])
def __repr__(self):
return self.__str__()
class Solution:
def clone_graph(self, node):
if node is None:
return None
old_labels = dict()
frontier = [node]
old_labels[node.label] = node
while frontier:
top = frontier.pop()
for nb in top.neighbors:
if nb.label not in old_labels:
old_labels[nb.label] = nb
frontier.append(nb)
new_labels = dict()
for v in old_labels:
new_labels[v] = undirected_graph_node(v)
for v in old_labels:
for nb in old_labels[v].neighbors:
new_labels[v].neighbors.append(new_labels[nb.label])
return new_labels[node.label]
def main():
graph = [0, 1, 2]
for i in range(3):
graph[i] = undirected_graph_node(i)
graph[0].neighbors = [graph[1], graph[2]]
graph[1].neighbors = [graph[0], graph[2]]
graph[2].neighbors = [graph[0], graph[1], graph[2]]
fn = solution().cloneGraph
node = fn(graph[0])
print(node, node.neighbors[0], node.neighbors[1])
if __name__ == '__main__':
main()
|
class Salary:
def __init__(self,pay,bonus):
self.pay=pay
self.bonus=bonus
def annual(self):
return (self.pay*12)+self.bonus
class employee:
def __init__(self,name,age,pay,bonus):
self.name=name
self.age=age
self.pay=pay
self.bonus=bonus
self.obj_salary=Salary(pay,bonus)
def total_salary(self):
return self.obj_salary.annual()
emp=employee('ajay',12,1,1)
print(emp.total_salary())
|
class Salary:
def __init__(self, pay, bonus):
self.pay = pay
self.bonus = bonus
def annual(self):
return self.pay * 12 + self.bonus
class Employee:
def __init__(self, name, age, pay, bonus):
self.name = name
self.age = age
self.pay = pay
self.bonus = bonus
self.obj_salary = salary(pay, bonus)
def total_salary(self):
return self.obj_salary.annual()
emp = employee('ajay', 12, 1, 1)
print(emp.total_salary())
|
test = {
'name': 'smallest-int',
'points': 0,
'suites': [
{
'cases': [
{
'code': r"""
sqlite> SELECT * FROM smallest_int;
11/11/2015 10:01:03|7
11/11/2015 13:53:36|7
11/11/2015 14:52:07|7
11/11/2015 15:36:00|7
11/11/2015 15:46:03|7
11/11/2015 16:11:56|7
11/11/2015 17:42:09|7
11/11/2015 11:49:59|8
11/12/2015 14:30:09|8
11/11/2015 9:57:49|9
11/11/2015 10:29:15|10
11/11/2015 11:18:22|10
11/11/2015 16:56:15|10
11/11/2015 10:04:51|11
11/11/2015 10:27:47|11
11/11/2015 11:04:43|11
11/11/2015 12:27:14|11
11/11/2015 12:52:33|11
11/11/2015 13:05:03|11
11/11/2015 13:48:29|11
""",
'hidden': False,
'locked': False
}
],
'ordered': False,
'scored': True,
'setup': r"""
sqlite> .read lab12.sql
""",
'teardown': '',
'type': 'sqlite'
}
]
}
|
test = {'name': 'smallest-int', 'points': 0, 'suites': [{'cases': [{'code': '\n sqlite> SELECT * FROM smallest_int;\n 11/11/2015 10:01:03|7\n 11/11/2015 13:53:36|7\n 11/11/2015 14:52:07|7\n 11/11/2015 15:36:00|7\n 11/11/2015 15:46:03|7\n 11/11/2015 16:11:56|7\n 11/11/2015 17:42:09|7\n 11/11/2015 11:49:59|8\n 11/12/2015 14:30:09|8\n 11/11/2015 9:57:49|9\n 11/11/2015 10:29:15|10\n 11/11/2015 11:18:22|10\n 11/11/2015 16:56:15|10\n 11/11/2015 10:04:51|11\n 11/11/2015 10:27:47|11\n 11/11/2015 11:04:43|11\n 11/11/2015 12:27:14|11\n 11/11/2015 12:52:33|11\n 11/11/2015 13:05:03|11\n 11/11/2015 13:48:29|11\n ', 'hidden': False, 'locked': False}], 'ordered': False, 'scored': True, 'setup': '\n sqlite> .read lab12.sql\n ', 'teardown': '', 'type': 'sqlite'}]}
|
# Christian Piper
# 11/11/19
# This will accept algebra equations on the input and solve for the variable
def main():
equation = input("Input your equation: ")
variable = input("Input the variable to be solved for: ")
solveAlgebraEquation(equation, variable)
def solveAlgebraEquation(equation, variable):
collector = ["","","","","",""]
iteration = 0
for char in equation:
if char == " " or char == "+" or char == "-" or char == "*" or char == "/" or char == ")" or char == "=" or char == "^":
print(char + " - It's a separator!")
iteration += 1
else:
collector[iteration] = collector[iteration] + char
for count in range(0, iteration):
print(collector[count])
main()
|
def main():
equation = input('Input your equation: ')
variable = input('Input the variable to be solved for: ')
solve_algebra_equation(equation, variable)
def solve_algebra_equation(equation, variable):
collector = ['', '', '', '', '', '']
iteration = 0
for char in equation:
if char == ' ' or char == '+' or char == '-' or (char == '*') or (char == '/') or (char == ')') or (char == '=') or (char == '^'):
print(char + " - It's a separator!")
iteration += 1
else:
collector[iteration] = collector[iteration] + char
for count in range(0, iteration):
print(collector[count])
main()
|
# Location of exported xml playlist from itunes
# Update username and directory to their appropriate path e.g C:/users/bob/desktop/file.xml
xml_playlist_loc = 'C:/users/username/directory/_MyMusic_.xml'
# name of user directory where your Music folder is located
# example: C:/users/bob/music/itunes
music_folder_dir = 'name'
|
xml_playlist_loc = 'C:/users/username/directory/_MyMusic_.xml'
music_folder_dir = 'name'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class Address(dict):
"""
Dictionary class that provides some convenience wrappers for accessing
commonly used data elements on an Address.
"""
def __init__(self, address_dict, order="lat"):
super(Address, self).__init__(address_dict)
self.order = order
@property
def coords(self):
"""
Returns a tuple representing the location of the address in a
GIS coords format, i.e. (longitude, latitude).
"""
x, y = ("lat", "lng") if self.order == "lat" else ("lng", "lat")
try:
return (self["location"][x], self["location"][y])
except KeyError:
return None
@property
def accuracy(self):
"""
Returns the accuracy integer or None of the geocoded address.
"""
try:
return self["accuracy"]
except KeyError:
return None
@property
def formatted_address(self):
"""
Returns a list of formatted addresses from the Location list
"""
return self.get("formatted_address", "")
class Location(dict):
"""
Dictionary class that provides some convenience accessors to commonly used
data elements.
"""
def __init__(self, result_dict, order="lat"):
super(Location, self).__init__(result_dict)
try:
self.best_match = Address(self["results"][0], order=order)
# A KeyError would be raised if an address could not be parsed or
# geocoded, i.e. from a batch address geocoding process. An index error
# would be raised under similar circumstances, e.g. the 'results' key
# just refers to an empty list.
except (KeyError, IndexError):
self.best_match = Address({})
self.order = order
@property
def coords(self):
"""
Returns a tuple representing the location of the first result in a
GIS coords format, i.e. (longitude, latitude).
"""
return self.best_match.coords
@property
def accuracy(self):
"""
Returns the accuracy integer or None of the geocoded address.
"""
return self.best_match.accuracy
@property
def formatted_address(self):
"""
Returns a list of formatted addresses from the Location list
"""
return self.best_match.formatted_address
class LocationCollection(list):
"""
A list of Location objects, with dictionary lookup by address.
"""
lookups = {}
def __init__(self, results_list, order="lat"):
"""
Loads the individual responses into an internal list and uses the query
values as lookup keys.
"""
results = []
for index, result in enumerate(results_list):
results.append(Location(result["response"], order=order))
self.lookups[result["query"]] = index
super(LocationCollection, self).__init__(results)
self.order = order
def get(self, key):
"""
Returns an individual Location by query lookup, e.g. address or point.
"""
if isinstance(key, tuple):
# TODO handle different ordering
try:
x, y = float(key[0]), float(key[1])
except IndexError:
raise ValueError("Two values are required for a coordinate pair")
except ValueError:
raise ValueError("Only float or float-coercable values can be passed")
key = "{0},{1}".format(x, y)
return self[self.lookups[key]]
@property
def coords(self):
"""
Returns a list of tuples for the best matched coordinates.
"""
return [l.coords for l in self]
@property
def formatted_addresses(self):
"""
Returns a list of formatted addresses from the Location list
"""
return [l.formatted_address for l in self]
|
class Address(dict):
"""
Dictionary class that provides some convenience wrappers for accessing
commonly used data elements on an Address.
"""
def __init__(self, address_dict, order='lat'):
super(Address, self).__init__(address_dict)
self.order = order
@property
def coords(self):
"""
Returns a tuple representing the location of the address in a
GIS coords format, i.e. (longitude, latitude).
"""
(x, y) = ('lat', 'lng') if self.order == 'lat' else ('lng', 'lat')
try:
return (self['location'][x], self['location'][y])
except KeyError:
return None
@property
def accuracy(self):
"""
Returns the accuracy integer or None of the geocoded address.
"""
try:
return self['accuracy']
except KeyError:
return None
@property
def formatted_address(self):
"""
Returns a list of formatted addresses from the Location list
"""
return self.get('formatted_address', '')
class Location(dict):
"""
Dictionary class that provides some convenience accessors to commonly used
data elements.
"""
def __init__(self, result_dict, order='lat'):
super(Location, self).__init__(result_dict)
try:
self.best_match = address(self['results'][0], order=order)
except (KeyError, IndexError):
self.best_match = address({})
self.order = order
@property
def coords(self):
"""
Returns a tuple representing the location of the first result in a
GIS coords format, i.e. (longitude, latitude).
"""
return self.best_match.coords
@property
def accuracy(self):
"""
Returns the accuracy integer or None of the geocoded address.
"""
return self.best_match.accuracy
@property
def formatted_address(self):
"""
Returns a list of formatted addresses from the Location list
"""
return self.best_match.formatted_address
class Locationcollection(list):
"""
A list of Location objects, with dictionary lookup by address.
"""
lookups = {}
def __init__(self, results_list, order='lat'):
"""
Loads the individual responses into an internal list and uses the query
values as lookup keys.
"""
results = []
for (index, result) in enumerate(results_list):
results.append(location(result['response'], order=order))
self.lookups[result['query']] = index
super(LocationCollection, self).__init__(results)
self.order = order
def get(self, key):
"""
Returns an individual Location by query lookup, e.g. address or point.
"""
if isinstance(key, tuple):
try:
(x, y) = (float(key[0]), float(key[1]))
except IndexError:
raise value_error('Two values are required for a coordinate pair')
except ValueError:
raise value_error('Only float or float-coercable values can be passed')
key = '{0},{1}'.format(x, y)
return self[self.lookups[key]]
@property
def coords(self):
"""
Returns a list of tuples for the best matched coordinates.
"""
return [l.coords for l in self]
@property
def formatted_addresses(self):
"""
Returns a list of formatted addresses from the Location list
"""
return [l.formatted_address for l in self]
|
def backtickify(s):
return '`{}`'.format(s)
def bind_exercises(g, exercises, start=1):
for i, ex in enumerate(exercises):
qno = i + start
varname = 'q{}'.format(qno)
assert varname not in g
g[varname] = ex
yield varname
|
def backtickify(s):
return '`{}`'.format(s)
def bind_exercises(g, exercises, start=1):
for (i, ex) in enumerate(exercises):
qno = i + start
varname = 'q{}'.format(qno)
assert varname not in g
g[varname] = ex
yield varname
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 31 17:42:50 2021
@author: tan_k
"""
|
"""
Created on Sat Jul 31 17:42:50 2021
@author: tan_k
"""
|
#
# PySNMP MIB module RBN-CONFIG-FILE-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/RBN-CONFIG-FILE-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:44:14 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint")
InetAddress, InetAddressType = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType")
rbnMgmt, = mibBuilder.importSymbols("RBN-SMI", "rbnMgmt")
OwnerString, = mibBuilder.importSymbols("RMON-MIB", "OwnerString")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance")
MibIdentifier, iso, Bits, Counter64, ObjectIdentity, NotificationType, Integer32, Counter32, Gauge32, ModuleIdentity, Unsigned32, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "MibIdentifier", "iso", "Bits", "Counter64", "ObjectIdentity", "NotificationType", "Integer32", "Counter32", "Gauge32", "ModuleIdentity", "Unsigned32", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress")
TestAndIncr, TextualConvention, DisplayString, TruthValue, RowStatus, TimeStamp = mibBuilder.importSymbols("SNMPv2-TC", "TestAndIncr", "TextualConvention", "DisplayString", "TruthValue", "RowStatus", "TimeStamp")
rbnConfigFileMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 2352, 2, 13))
rbnConfigFileMib.setRevisions(('2002-05-29 00:00', '2001-10-10 00:00',))
if mibBuilder.loadTexts: rbnConfigFileMib.setLastUpdated('200110100000Z')
if mibBuilder.loadTexts: rbnConfigFileMib.setOrganization('Redback Networks, Inc.')
rbnConfigFileMIBNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 2, 13, 0))
rbnConfigFileMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1))
rbnConfigFileMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 2, 13, 2))
rcfJobs = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1))
rcfJobSpinLock = MibScalar((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 1), TestAndIncr()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rcfJobSpinLock.setStatus('current')
rcfJobNextIndex = MibScalar((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rcfJobNextIndex.setStatus('current')
rcfJobTable = MibTable((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3), )
if mibBuilder.loadTexts: rcfJobTable.setStatus('current')
rcfJobEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1), ).setIndexNames((0, "RBN-CONFIG-FILE-MIB", "rcfJobIndex"))
if mibBuilder.loadTexts: rcfJobEntry.setStatus('current')
rcfJobIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: rcfJobIndex.setStatus('current')
rcfJobOp = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("load", 0), ("save", 1)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: rcfJobOp.setStatus('current')
rcfJobProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("local", 0), ("tftp", 1), ("ftp", 2))).clone('local')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: rcfJobProtocol.setStatus('current')
rcfJobFilename = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 4), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 128))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: rcfJobFilename.setStatus('current')
rcfJobIpAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 5), InetAddressType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: rcfJobIpAddressType.setStatus('current')
rcfJobIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 6), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: rcfJobIpAddress.setStatus('current')
rcfJobUsername = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 7), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: rcfJobUsername.setStatus('current')
rcfJobPassword = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 8), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: rcfJobPassword.setStatus('current')
rcfJobPassiveMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 9), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: rcfJobPassiveMode.setStatus('current')
rcfJobStopAtLine = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 10), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: rcfJobStopAtLine.setStatus('current')
rcfJobSaveDefaults = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 11), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: rcfJobSaveDefaults.setStatus('current')
rcfJobState = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("pending", 0), ("inProgress", 1), ("completed", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rcfJobState.setStatus('current')
rcfJobResult = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("success", 0), ("other", 1), ("noMemory", 2), ("parse", 3), ("io", 4), ("access", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rcfJobResult.setStatus('current')
rcfJobErrorMsgs = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 14), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rcfJobErrorMsgs.setStatus('current')
rcfJobCreateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 15), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rcfJobCreateTime.setStatus('current')
rcfJobStartTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 16), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rcfJobStartTime.setStatus('current')
rcfJobStopTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 17), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rcfJobStopTime.setStatus('current')
rcfJobNotifyOnCompletion = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 18), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: rcfJobNotifyOnCompletion.setStatus('current')
rcfJobOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 19), OwnerString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: rcfJobOwner.setStatus('current')
rcfJobRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 20), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: rcfJobRowStatus.setStatus('current')
rcfJobCompleted = NotificationType((1, 3, 6, 1, 4, 1, 2352, 2, 13, 0, 1)).setObjects(("RBN-CONFIG-FILE-MIB", "rcfJobResult"), ("RBN-CONFIG-FILE-MIB", "rcfJobErrorMsgs"))
if mibBuilder.loadTexts: rcfJobCompleted.setStatus('current')
rbnConfigFileCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 2, 13, 2, 1))
rbnConfigFileGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 2, 13, 2, 2))
rcfJobGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2352, 2, 13, 2, 2, 1)).setObjects(("RBN-CONFIG-FILE-MIB", "rcfJobSpinLock"), ("RBN-CONFIG-FILE-MIB", "rcfJobNextIndex"), ("RBN-CONFIG-FILE-MIB", "rcfJobOp"), ("RBN-CONFIG-FILE-MIB", "rcfJobProtocol"), ("RBN-CONFIG-FILE-MIB", "rcfJobFilename"), ("RBN-CONFIG-FILE-MIB", "rcfJobIpAddressType"), ("RBN-CONFIG-FILE-MIB", "rcfJobIpAddress"), ("RBN-CONFIG-FILE-MIB", "rcfJobUsername"), ("RBN-CONFIG-FILE-MIB", "rcfJobPassword"), ("RBN-CONFIG-FILE-MIB", "rcfJobPassiveMode"), ("RBN-CONFIG-FILE-MIB", "rcfJobStopAtLine"), ("RBN-CONFIG-FILE-MIB", "rcfJobSaveDefaults"), ("RBN-CONFIG-FILE-MIB", "rcfJobState"), ("RBN-CONFIG-FILE-MIB", "rcfJobResult"), ("RBN-CONFIG-FILE-MIB", "rcfJobCreateTime"), ("RBN-CONFIG-FILE-MIB", "rcfJobStartTime"), ("RBN-CONFIG-FILE-MIB", "rcfJobStopTime"), ("RBN-CONFIG-FILE-MIB", "rcfJobErrorMsgs"), ("RBN-CONFIG-FILE-MIB", "rcfJobNotifyOnCompletion"), ("RBN-CONFIG-FILE-MIB", "rcfJobOwner"), ("RBN-CONFIG-FILE-MIB", "rcfJobRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rcfJobGroup = rcfJobGroup.setStatus('current')
rcfJobNotifyGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2352, 2, 13, 2, 2, 2)).setObjects(("RBN-CONFIG-FILE-MIB", "rcfJobCompleted"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rcfJobNotifyGroup = rcfJobNotifyGroup.setStatus('current')
rbnConfigFileCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 2352, 2, 13, 2, 1, 1)).setObjects(("RBN-CONFIG-FILE-MIB", "rcfJobGroup"), ("RBN-CONFIG-FILE-MIB", "rcfJobNotifyGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rbnConfigFileCompliance = rbnConfigFileCompliance.setStatus('current')
mibBuilder.exportSymbols("RBN-CONFIG-FILE-MIB", rcfJobIpAddress=rcfJobIpAddress, rcfJobErrorMsgs=rcfJobErrorMsgs, rbnConfigFileMIBNotifications=rbnConfigFileMIBNotifications, rcfJobNotifyGroup=rcfJobNotifyGroup, rcfJobUsername=rcfJobUsername, rcfJobStartTime=rcfJobStartTime, rcfJobSpinLock=rcfJobSpinLock, rcfJobRowStatus=rcfJobRowStatus, rcfJobStopTime=rcfJobStopTime, rcfJobIpAddressType=rcfJobIpAddressType, rcfJobEntry=rcfJobEntry, rcfJobs=rcfJobs, rbnConfigFileGroups=rbnConfigFileGroups, rcfJobState=rcfJobState, rcfJobCompleted=rcfJobCompleted, rcfJobNextIndex=rcfJobNextIndex, rcfJobPassword=rcfJobPassword, rcfJobStopAtLine=rcfJobStopAtLine, rcfJobPassiveMode=rcfJobPassiveMode, rcfJobTable=rcfJobTable, rcfJobCreateTime=rcfJobCreateTime, rbnConfigFileMib=rbnConfigFileMib, rcfJobFilename=rcfJobFilename, rcfJobOp=rcfJobOp, rbnConfigFileMIBObjects=rbnConfigFileMIBObjects, rcfJobSaveDefaults=rcfJobSaveDefaults, rcfJobNotifyOnCompletion=rcfJobNotifyOnCompletion, rcfJobIndex=rcfJobIndex, PYSNMP_MODULE_ID=rbnConfigFileMib, rcfJobOwner=rcfJobOwner, rbnConfigFileCompliances=rbnConfigFileCompliances, rbnConfigFileCompliance=rbnConfigFileCompliance, rcfJobProtocol=rcfJobProtocol, rbnConfigFileMIBConformance=rbnConfigFileMIBConformance, rcfJobGroup=rcfJobGroup, rcfJobResult=rcfJobResult)
|
(object_identifier, octet_string, integer) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'OctetString', 'Integer')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(single_value_constraint, value_range_constraint, constraints_intersection, constraints_union, value_size_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'SingleValueConstraint', 'ValueRangeConstraint', 'ConstraintsIntersection', 'ConstraintsUnion', 'ValueSizeConstraint')
(inet_address, inet_address_type) = mibBuilder.importSymbols('INET-ADDRESS-MIB', 'InetAddress', 'InetAddressType')
(rbn_mgmt,) = mibBuilder.importSymbols('RBN-SMI', 'rbnMgmt')
(owner_string,) = mibBuilder.importSymbols('RMON-MIB', 'OwnerString')
(snmp_admin_string,) = mibBuilder.importSymbols('SNMP-FRAMEWORK-MIB', 'SnmpAdminString')
(notification_group, object_group, module_compliance) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ObjectGroup', 'ModuleCompliance')
(mib_identifier, iso, bits, counter64, object_identity, notification_type, integer32, counter32, gauge32, module_identity, unsigned32, time_ticks, mib_scalar, mib_table, mib_table_row, mib_table_column, ip_address) = mibBuilder.importSymbols('SNMPv2-SMI', 'MibIdentifier', 'iso', 'Bits', 'Counter64', 'ObjectIdentity', 'NotificationType', 'Integer32', 'Counter32', 'Gauge32', 'ModuleIdentity', 'Unsigned32', 'TimeTicks', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'IpAddress')
(test_and_incr, textual_convention, display_string, truth_value, row_status, time_stamp) = mibBuilder.importSymbols('SNMPv2-TC', 'TestAndIncr', 'TextualConvention', 'DisplayString', 'TruthValue', 'RowStatus', 'TimeStamp')
rbn_config_file_mib = module_identity((1, 3, 6, 1, 4, 1, 2352, 2, 13))
rbnConfigFileMib.setRevisions(('2002-05-29 00:00', '2001-10-10 00:00'))
if mibBuilder.loadTexts:
rbnConfigFileMib.setLastUpdated('200110100000Z')
if mibBuilder.loadTexts:
rbnConfigFileMib.setOrganization('Redback Networks, Inc.')
rbn_config_file_mib_notifications = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 2, 13, 0))
rbn_config_file_mib_objects = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1))
rbn_config_file_mib_conformance = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 2, 13, 2))
rcf_jobs = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1))
rcf_job_spin_lock = mib_scalar((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 1), test_and_incr()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rcfJobSpinLock.setStatus('current')
rcf_job_next_index = mib_scalar((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 2), unsigned32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rcfJobNextIndex.setStatus('current')
rcf_job_table = mib_table((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3))
if mibBuilder.loadTexts:
rcfJobTable.setStatus('current')
rcf_job_entry = mib_table_row((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1)).setIndexNames((0, 'RBN-CONFIG-FILE-MIB', 'rcfJobIndex'))
if mibBuilder.loadTexts:
rcfJobEntry.setStatus('current')
rcf_job_index = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 1), unsigned32().subtype(subtypeSpec=value_range_constraint(1, 4294967295)))
if mibBuilder.loadTexts:
rcfJobIndex.setStatus('current')
rcf_job_op = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 2), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('load', 0), ('save', 1)))).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
rcfJobOp.setStatus('current')
rcf_job_protocol = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2))).clone(namedValues=named_values(('local', 0), ('tftp', 1), ('ftp', 2))).clone('local')).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
rcfJobProtocol.setStatus('current')
rcf_job_filename = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 4), snmp_admin_string().subtype(subtypeSpec=value_size_constraint(1, 128))).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
rcfJobFilename.setStatus('current')
rcf_job_ip_address_type = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 5), inet_address_type()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
rcfJobIpAddressType.setStatus('current')
rcf_job_ip_address = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 6), inet_address()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
rcfJobIpAddress.setStatus('current')
rcf_job_username = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 7), snmp_admin_string().subtype(subtypeSpec=value_size_constraint(1, 32))).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
rcfJobUsername.setStatus('current')
rcf_job_password = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 8), snmp_admin_string().subtype(subtypeSpec=value_size_constraint(0, 128))).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
rcfJobPassword.setStatus('current')
rcf_job_passive_mode = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 9), truth_value().clone('false')).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
rcfJobPassiveMode.setStatus('current')
rcf_job_stop_at_line = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 10), unsigned32()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
rcfJobStopAtLine.setStatus('current')
rcf_job_save_defaults = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 11), truth_value().clone('false')).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
rcfJobSaveDefaults.setStatus('current')
rcf_job_state = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 12), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2))).clone(namedValues=named_values(('pending', 0), ('inProgress', 1), ('completed', 2)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rcfJobState.setStatus('current')
rcf_job_result = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 13), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1, 2, 3, 4, 5))).clone(namedValues=named_values(('success', 0), ('other', 1), ('noMemory', 2), ('parse', 3), ('io', 4), ('access', 5)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rcfJobResult.setStatus('current')
rcf_job_error_msgs = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 14), snmp_admin_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rcfJobErrorMsgs.setStatus('current')
rcf_job_create_time = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 15), time_stamp()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rcfJobCreateTime.setStatus('current')
rcf_job_start_time = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 16), time_stamp()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rcfJobStartTime.setStatus('current')
rcf_job_stop_time = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 17), time_stamp()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rcfJobStopTime.setStatus('current')
rcf_job_notify_on_completion = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 18), truth_value().clone('false')).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
rcfJobNotifyOnCompletion.setStatus('current')
rcf_job_owner = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 19), owner_string()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
rcfJobOwner.setStatus('current')
rcf_job_row_status = mib_table_column((1, 3, 6, 1, 4, 1, 2352, 2, 13, 1, 1, 3, 1, 20), row_status()).setMaxAccess('readcreate')
if mibBuilder.loadTexts:
rcfJobRowStatus.setStatus('current')
rcf_job_completed = notification_type((1, 3, 6, 1, 4, 1, 2352, 2, 13, 0, 1)).setObjects(('RBN-CONFIG-FILE-MIB', 'rcfJobResult'), ('RBN-CONFIG-FILE-MIB', 'rcfJobErrorMsgs'))
if mibBuilder.loadTexts:
rcfJobCompleted.setStatus('current')
rbn_config_file_compliances = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 2, 13, 2, 1))
rbn_config_file_groups = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 2, 13, 2, 2))
rcf_job_group = object_group((1, 3, 6, 1, 4, 1, 2352, 2, 13, 2, 2, 1)).setObjects(('RBN-CONFIG-FILE-MIB', 'rcfJobSpinLock'), ('RBN-CONFIG-FILE-MIB', 'rcfJobNextIndex'), ('RBN-CONFIG-FILE-MIB', 'rcfJobOp'), ('RBN-CONFIG-FILE-MIB', 'rcfJobProtocol'), ('RBN-CONFIG-FILE-MIB', 'rcfJobFilename'), ('RBN-CONFIG-FILE-MIB', 'rcfJobIpAddressType'), ('RBN-CONFIG-FILE-MIB', 'rcfJobIpAddress'), ('RBN-CONFIG-FILE-MIB', 'rcfJobUsername'), ('RBN-CONFIG-FILE-MIB', 'rcfJobPassword'), ('RBN-CONFIG-FILE-MIB', 'rcfJobPassiveMode'), ('RBN-CONFIG-FILE-MIB', 'rcfJobStopAtLine'), ('RBN-CONFIG-FILE-MIB', 'rcfJobSaveDefaults'), ('RBN-CONFIG-FILE-MIB', 'rcfJobState'), ('RBN-CONFIG-FILE-MIB', 'rcfJobResult'), ('RBN-CONFIG-FILE-MIB', 'rcfJobCreateTime'), ('RBN-CONFIG-FILE-MIB', 'rcfJobStartTime'), ('RBN-CONFIG-FILE-MIB', 'rcfJobStopTime'), ('RBN-CONFIG-FILE-MIB', 'rcfJobErrorMsgs'), ('RBN-CONFIG-FILE-MIB', 'rcfJobNotifyOnCompletion'), ('RBN-CONFIG-FILE-MIB', 'rcfJobOwner'), ('RBN-CONFIG-FILE-MIB', 'rcfJobRowStatus'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rcf_job_group = rcfJobGroup.setStatus('current')
rcf_job_notify_group = notification_group((1, 3, 6, 1, 4, 1, 2352, 2, 13, 2, 2, 2)).setObjects(('RBN-CONFIG-FILE-MIB', 'rcfJobCompleted'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rcf_job_notify_group = rcfJobNotifyGroup.setStatus('current')
rbn_config_file_compliance = module_compliance((1, 3, 6, 1, 4, 1, 2352, 2, 13, 2, 1, 1)).setObjects(('RBN-CONFIG-FILE-MIB', 'rcfJobGroup'), ('RBN-CONFIG-FILE-MIB', 'rcfJobNotifyGroup'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rbn_config_file_compliance = rbnConfigFileCompliance.setStatus('current')
mibBuilder.exportSymbols('RBN-CONFIG-FILE-MIB', rcfJobIpAddress=rcfJobIpAddress, rcfJobErrorMsgs=rcfJobErrorMsgs, rbnConfigFileMIBNotifications=rbnConfigFileMIBNotifications, rcfJobNotifyGroup=rcfJobNotifyGroup, rcfJobUsername=rcfJobUsername, rcfJobStartTime=rcfJobStartTime, rcfJobSpinLock=rcfJobSpinLock, rcfJobRowStatus=rcfJobRowStatus, rcfJobStopTime=rcfJobStopTime, rcfJobIpAddressType=rcfJobIpAddressType, rcfJobEntry=rcfJobEntry, rcfJobs=rcfJobs, rbnConfigFileGroups=rbnConfigFileGroups, rcfJobState=rcfJobState, rcfJobCompleted=rcfJobCompleted, rcfJobNextIndex=rcfJobNextIndex, rcfJobPassword=rcfJobPassword, rcfJobStopAtLine=rcfJobStopAtLine, rcfJobPassiveMode=rcfJobPassiveMode, rcfJobTable=rcfJobTable, rcfJobCreateTime=rcfJobCreateTime, rbnConfigFileMib=rbnConfigFileMib, rcfJobFilename=rcfJobFilename, rcfJobOp=rcfJobOp, rbnConfigFileMIBObjects=rbnConfigFileMIBObjects, rcfJobSaveDefaults=rcfJobSaveDefaults, rcfJobNotifyOnCompletion=rcfJobNotifyOnCompletion, rcfJobIndex=rcfJobIndex, PYSNMP_MODULE_ID=rbnConfigFileMib, rcfJobOwner=rcfJobOwner, rbnConfigFileCompliances=rbnConfigFileCompliances, rbnConfigFileCompliance=rbnConfigFileCompliance, rcfJobProtocol=rcfJobProtocol, rbnConfigFileMIBConformance=rbnConfigFileMIBConformance, rcfJobGroup=rcfJobGroup, rcfJobResult=rcfJobResult)
|
# Definition for singly-linked list.
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def removeElements(self, head: ListNode, val: int) -> ListNode:
dummy = ListNode(-1)
dummy.next = head
def t(d):
if d.next:
if d.next.val == val:
d.next = d.next.next
t(d)
else:
t(d.next)
t(dummy)
return dummy.next
if __name__=="__main__":
output=Solution().removeElements([1,2,6,3,4,5,6],6)
print(output)
|
class Listnode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def remove_elements(self, head: ListNode, val: int) -> ListNode:
dummy = list_node(-1)
dummy.next = head
def t(d):
if d.next:
if d.next.val == val:
d.next = d.next.next
t(d)
else:
t(d.next)
t(dummy)
return dummy.next
if __name__ == '__main__':
output = solution().removeElements([1, 2, 6, 3, 4, 5, 6], 6)
print(output)
|
# nxn chessboard
n = 10
# number of dragons on the chessboard
dragons = 10
solution = [-1]*n
captured = [[0 for i in range(n)] for i in range(n)]
number = 0
local_calls = 0
total_calls = 0
def init():
global captured
def isCaptured(x, y):
global captured
return captured[x][y]
def capture(x, y):
for i in range(n):
captured[i][y] += 1
captured[x][i] += 1
# this point double counted in prev. for-loop,
captured[x][y] -= 1
i = x + 1
j = y + 1
while (i < n and j < n):
captured[i][j] += 1
i += 1
j += 1
i = x + 1
j = y - 1
while (i < n and j >= 0):
captured[i][j] += 1
i += 1
j -= 1
i = x - 1
j = y - 1
while (i >= 0 and j >= 0):
captured[i][j] += 1
i -= 1
j -= 1
i = x - 1
j = y + 1
while (i >= 0 and j < n):
captured[i][j] += 1
i -= 1
j += 1
if x - 2 >= 0:
if y - 1 >= 0:
captured[x-2][y-1] += 1
if y + 1 < n:
captured[x-2][y+1] += 1
if x + 2 < n:
if y - 1 >= 0:
captured[x + 2][y - 1] += 1
if y + 1 < n:
captured[x + 2][y + 1] += 1
if y - 2 >= 0:
if x - 1 >= 0:
captured[x - 1][y - 2] += 1
if x + 1 < n:
captured[x + 1][y - 2] += 1
if y + 2 < n:
if x - 1 >= 0:
captured[x - 1][y + 2] += 1
if x + 1 < n:
captured[x + 1][y + 2] += 1
def free (x, y):
for i in range(n):
captured[i][y] -= 1
captured[x][i] -= 1
# this point double counted in prev. for-loop,
captured[x][y] += 1
i = x + 1
j = y + 1
while (i < n and j < n):
captured[i][j] -= 1
i += 1
j += 1
i = x + 1
j = y - 1
while (i < n and j >= 0):
captured[i][j] -= 1
i += 1
j -= 1
i = x - 1
j = y - 1
while (i >= 0 and j >= 0):
captured[i][j] -= 1
i -= 1
j -= 1
i = x - 1
j = y + 1
while (i >= 0 and j < n):
captured[i][j] -= 1
i -= 1
j += 1
if x - 2 >= 0:
if y - 1 >= 0:
captured[x-2][y-1] -= 1
if y + 1 < n:
captured[x-2][y+1] -= 1
if x + 2 < n:
if y - 1 >= 0:
captured[x + 2][y - 1] -= 1
if y + 1 < n:
captured[x + 2][y + 1] -= 1
if y - 2 >= 0:
if x - 1 >= 0:
captured[x - 1][y - 2] -= 1
if x + 1 < n:
captured[x + 1][y - 2] -= 1
if y + 2 < n:
if x - 1 >= 0:
captured[x - 1][y + 2] -= 1
if x + 1 < n:
captured[x + 1][y + 2] -= 1
def find(x, d):
global captured, solution, number, total_calls, local_calls, dragons
total_calls += 1
local_calls += 1
if x == d:
number += 1
print("Soluiton: ", number, " Coord: ", solution)
print("Number of local calls ", local_calls)
local_calls = 0
return
for j in range(n):
if not isCaptured(x, j):
solution[x] = j
capture(x, j)
find(x + 1, dragons)
free(x, j)
print("")
print("Coordinate '-1' means no Dragon in that line")
print("")
find(0, dragons)
print("")
print("Number of total calls ", total_calls)
|
n = 10
dragons = 10
solution = [-1] * n
captured = [[0 for i in range(n)] for i in range(n)]
number = 0
local_calls = 0
total_calls = 0
def init():
global captured
def is_captured(x, y):
global captured
return captured[x][y]
def capture(x, y):
for i in range(n):
captured[i][y] += 1
captured[x][i] += 1
captured[x][y] -= 1
i = x + 1
j = y + 1
while i < n and j < n:
captured[i][j] += 1
i += 1
j += 1
i = x + 1
j = y - 1
while i < n and j >= 0:
captured[i][j] += 1
i += 1
j -= 1
i = x - 1
j = y - 1
while i >= 0 and j >= 0:
captured[i][j] += 1
i -= 1
j -= 1
i = x - 1
j = y + 1
while i >= 0 and j < n:
captured[i][j] += 1
i -= 1
j += 1
if x - 2 >= 0:
if y - 1 >= 0:
captured[x - 2][y - 1] += 1
if y + 1 < n:
captured[x - 2][y + 1] += 1
if x + 2 < n:
if y - 1 >= 0:
captured[x + 2][y - 1] += 1
if y + 1 < n:
captured[x + 2][y + 1] += 1
if y - 2 >= 0:
if x - 1 >= 0:
captured[x - 1][y - 2] += 1
if x + 1 < n:
captured[x + 1][y - 2] += 1
if y + 2 < n:
if x - 1 >= 0:
captured[x - 1][y + 2] += 1
if x + 1 < n:
captured[x + 1][y + 2] += 1
def free(x, y):
for i in range(n):
captured[i][y] -= 1
captured[x][i] -= 1
captured[x][y] += 1
i = x + 1
j = y + 1
while i < n and j < n:
captured[i][j] -= 1
i += 1
j += 1
i = x + 1
j = y - 1
while i < n and j >= 0:
captured[i][j] -= 1
i += 1
j -= 1
i = x - 1
j = y - 1
while i >= 0 and j >= 0:
captured[i][j] -= 1
i -= 1
j -= 1
i = x - 1
j = y + 1
while i >= 0 and j < n:
captured[i][j] -= 1
i -= 1
j += 1
if x - 2 >= 0:
if y - 1 >= 0:
captured[x - 2][y - 1] -= 1
if y + 1 < n:
captured[x - 2][y + 1] -= 1
if x + 2 < n:
if y - 1 >= 0:
captured[x + 2][y - 1] -= 1
if y + 1 < n:
captured[x + 2][y + 1] -= 1
if y - 2 >= 0:
if x - 1 >= 0:
captured[x - 1][y - 2] -= 1
if x + 1 < n:
captured[x + 1][y - 2] -= 1
if y + 2 < n:
if x - 1 >= 0:
captured[x - 1][y + 2] -= 1
if x + 1 < n:
captured[x + 1][y + 2] -= 1
def find(x, d):
global captured, solution, number, total_calls, local_calls, dragons
total_calls += 1
local_calls += 1
if x == d:
number += 1
print('Soluiton: ', number, ' Coord: ', solution)
print('Number of local calls ', local_calls)
local_calls = 0
return
for j in range(n):
if not is_captured(x, j):
solution[x] = j
capture(x, j)
find(x + 1, dragons)
free(x, j)
print('')
print("Coordinate '-1' means no Dragon in that line")
print('')
find(0, dragons)
print('')
print('Number of total calls ', total_calls)
|
class ApiException(Exception):
pass
class Forbidden(ApiException):
def __init__(self, message):
self.message = message
self.status_code = 403
class NotFound(ApiException):
def __init__(self, message):
self.message = message
self.status_code = 404
class BadRequest(ApiException):
def __init__(self, message):
self.message = message
self.status_code = 400
|
class Apiexception(Exception):
pass
class Forbidden(ApiException):
def __init__(self, message):
self.message = message
self.status_code = 403
class Notfound(ApiException):
def __init__(self, message):
self.message = message
self.status_code = 404
class Badrequest(ApiException):
def __init__(self, message):
self.message = message
self.status_code = 400
|
#
# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def test(name, input0, input1, input2, output0, input0_data, input1_data, input2_data, output_data):
model = Model().Operation("SELECT", input0, input1, input2).To(output0)
quant8 = DataTypeConverter().Identify({
input1: ["TENSOR_QUANT8_ASYMM", 1.5, 129],
input2: ["TENSOR_QUANT8_ASYMM", 0.5, 127],
output0: ["TENSOR_QUANT8_ASYMM", 1.0, 128],
})
example = Example({
input0: input0_data,
input1: input1_data,
input2: input2_data,
output0: output_data,
}, model=model, name=name).AddVariations("int32", "float16", "relaxed", quant8)
test(
name="one_dim",
input0=Input("input0", "TENSOR_BOOL8", "{3}"),
input1=Input("input1", "TENSOR_FLOAT32", "{3}"),
input2=Input("input2", "TENSOR_FLOAT32", "{3}"),
output0=Output("output0", "TENSOR_FLOAT32", "{3}"),
input0_data=[True, False, True],
input1_data=[1, 2, 3],
input2_data=[4, 5, 6],
output_data=[1, 5, 3],
)
test(
name="two_dim",
input0=Input("input0", "TENSOR_BOOL8", "{2, 2}"),
input1=Input("input1", "TENSOR_FLOAT32", "{2, 2}"),
input2=Input("input2", "TENSOR_FLOAT32", "{2, 2}"),
output0=Output("output0", "TENSOR_FLOAT32", "{2, 2}"),
input0_data=[False, True, False, True],
input1_data=[1, 2, 3, 4],
input2_data=[5, 6, 7, 8],
output_data=[5, 2, 7, 4],
)
test(
name="five_dim",
input0=Input("input0", "TENSOR_BOOL8", "{2, 1, 2, 1, 2}"),
input1=Input("input1", "TENSOR_FLOAT32", "{2, 1, 2, 1, 2}"),
input2=Input("input2", "TENSOR_FLOAT32", "{2, 1, 2, 1, 2}"),
output0=Output("output0", "TENSOR_FLOAT32", "{2, 1, 2, 1, 2}"),
input0_data=[True, False, True, False, True, False, True, False],
input1_data=[1, 2, 3, 4, 5, 6, 7, 8],
input2_data=[9, 10, 11, 12, 13, 14, 15, 16],
output_data=[1, 10, 3, 12, 5, 14, 7, 16],
)
|
def test(name, input0, input1, input2, output0, input0_data, input1_data, input2_data, output_data):
model = model().Operation('SELECT', input0, input1, input2).To(output0)
quant8 = data_type_converter().Identify({input1: ['TENSOR_QUANT8_ASYMM', 1.5, 129], input2: ['TENSOR_QUANT8_ASYMM', 0.5, 127], output0: ['TENSOR_QUANT8_ASYMM', 1.0, 128]})
example = example({input0: input0_data, input1: input1_data, input2: input2_data, output0: output_data}, model=model, name=name).AddVariations('int32', 'float16', 'relaxed', quant8)
test(name='one_dim', input0=input('input0', 'TENSOR_BOOL8', '{3}'), input1=input('input1', 'TENSOR_FLOAT32', '{3}'), input2=input('input2', 'TENSOR_FLOAT32', '{3}'), output0=output('output0', 'TENSOR_FLOAT32', '{3}'), input0_data=[True, False, True], input1_data=[1, 2, 3], input2_data=[4, 5, 6], output_data=[1, 5, 3])
test(name='two_dim', input0=input('input0', 'TENSOR_BOOL8', '{2, 2}'), input1=input('input1', 'TENSOR_FLOAT32', '{2, 2}'), input2=input('input2', 'TENSOR_FLOAT32', '{2, 2}'), output0=output('output0', 'TENSOR_FLOAT32', '{2, 2}'), input0_data=[False, True, False, True], input1_data=[1, 2, 3, 4], input2_data=[5, 6, 7, 8], output_data=[5, 2, 7, 4])
test(name='five_dim', input0=input('input0', 'TENSOR_BOOL8', '{2, 1, 2, 1, 2}'), input1=input('input1', 'TENSOR_FLOAT32', '{2, 1, 2, 1, 2}'), input2=input('input2', 'TENSOR_FLOAT32', '{2, 1, 2, 1, 2}'), output0=output('output0', 'TENSOR_FLOAT32', '{2, 1, 2, 1, 2}'), input0_data=[True, False, True, False, True, False, True, False], input1_data=[1, 2, 3, 4, 5, 6, 7, 8], input2_data=[9, 10, 11, 12, 13, 14, 15, 16], output_data=[1, 10, 3, 12, 5, 14, 7, 16])
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2018 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.com/license.html.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/.
sql = [
"""CREATE TEMPORARY TABLE node_change_old AS SELECT * FROM node_change;""",
"""DROP TABLE node_change;""",
"""CREATE TABLE node_change (
rev text,
path text,
kind char(1),
change char(1),
base_path text,
base_rev text,
UNIQUE(rev, path, change)
);""",
"""INSERT INTO node_change (rev,path,kind,change,base_path,base_rev)
SELECT rev,path,kind,change,base_path,base_rev FROM node_change_old;""",
"""DROP TABLE node_change_old;"""
]
def do_upgrade(env, ver, cursor):
# Wiki pages were accidentially created with the version number starting at
# 0 instead of 1; This should fix that
cursor.execute("SELECT name, version FROM wiki WHERE name IN "
"(SELECT name FROM wiki WHERE version=0) ORDER BY name,"
"version DESC")
result = cursor.fetchall()
if result:
cursor.executemany("UPDATE wiki SET version=version+1 WHERE name=%s "
"and version=%s",
[tuple(row) for row in result])
# Correct difference between db_default.py and upgrades/db10.py: The
# 'change' was missing from the uniqueness constraint
for s in sql:
cursor.execute(s)
|
sql = ['CREATE TEMPORARY TABLE node_change_old AS SELECT * FROM node_change;', 'DROP TABLE node_change;', 'CREATE TABLE node_change (\n rev text,\n path text,\n kind char(1),\n change char(1),\n base_path text,\n base_rev text,\n UNIQUE(rev, path, change)\n);', 'INSERT INTO node_change (rev,path,kind,change,base_path,base_rev)\n SELECT rev,path,kind,change,base_path,base_rev FROM node_change_old;', 'DROP TABLE node_change_old;']
def do_upgrade(env, ver, cursor):
cursor.execute('SELECT name, version FROM wiki WHERE name IN (SELECT name FROM wiki WHERE version=0) ORDER BY name,version DESC')
result = cursor.fetchall()
if result:
cursor.executemany('UPDATE wiki SET version=version+1 WHERE name=%s and version=%s', [tuple(row) for row in result])
for s in sql:
cursor.execute(s)
|
class LDAP_record:
"""
consume LDAP record and provide methods for accessing interesting data
"""
def __init__(self, unid):
self.error = False
ldap_dict = {}
#
# request complete user record from LDAP
cmd = "/Users/" + unid
try:
raw_data = subprocess.check_output(["/usr/bin/dscl", "/LDAPv3/your.ldap.server", "-read", cmd])
except:
self.error = True
return
#
# begin parsing data into dictionary
raw_data = string.replace(raw_data, '\n ', ' ')
raw_data = raw_data.split('\n')
for line in raw_data:
y = line.split(":")
y = [x for x in y if 'dsAttrTypeNative' not in x]
if len(y) == 2:
key = y[0]
value = y[1]
value = value.lstrip()
else:
key = y[0]
value = y[1:]
value = [x for x in value if x]
if key:
ldap_dict[key] = value
self.record = ldap_dict
def is_student(self):
try:
if 'CurrentStudent' in self.record['Student']:
return True
else:
return False
except:
return False
def is_staff(self):
try:
if self.record['Employee']: return True
except:
return False
def my_name(self):
try:
if self.record['gecos']:
if len(self.record['gecos']) > 1:
return self.record['gecos']
else:
# print "Beep!"
try:
if self.record['displayName']: return self.record['displayName']
except:
return None
except:
try:
if self.record['displayName']: return self.record['displayName']
except:
return None
def my_title(self):
try:
if self.record['title']: return self.record['title']
except:
return None
def my_email(self):
try:
if self.record['mail']: return self.record['mail']
except:
try:
if self.record['ExtensionAttribute4']: return self.record['ExtensionAttribute4']
except:
return None
def my_phone(self):
try:
if self.record['telephoneNumber']: return self.record['telephoneNumber']
except:
return None
def my_department(self):
try:
if self.record['department']: return self.record['department']
except:
return None
def my_address(self):
try:
if self.record['streetAddress']: return self.record['streetAddress']
except:
return None
#
# diagnostic methods
def print_full(self):
for k, v in self.record.items():
print ("%s > %r" % (k, v))
def print_keys(self):
return self.record.keys()
def ldap(self):
"""
translate LDAP data from object into fields used in tugboat
"""
try:
self.status_label.configure(style='Normal.TLabel')
self.status_string.set("LDAP selected.")
if self.valid_unid():
print("ldap %r" % self.endusername_string.get())
this_person = LDAP_record(self.endusername_string.get())
if not this_person.error:
self.fullname_string.set(this_person.my_name())
self.email_string.set(this_person.my_email())
self.phone_string.set(this_person.my_phone())
self.room_string.set(this_person.my_address())
if this_person.my_title() is None:
if this_person.my_department() is None:
self.position_string.set("")
else:
self.position_string.set(this_person.my_department())
else:
if this_person.my_department() is None:
self.position_string.set(this_person.my_title())
else:
self.position_string.set(this_person.my_title() + "/" + this_person.my_department())
if self.division_string.get():
self.division_string.set('None')
if self.building_string.get():
self.building_string.set('None')
else:
self.status_label.configure(style='Warning.TLabel')
self.status_string.set("LDAP error, no record found for uNID.")
self.reset_data()
else:
self.status_label.configure(style='Warning.TLabel')
self.status_string.set("Error setting LDAP Mode, no valid uNID.")
self.reset_user()
return
except ValueError:
self.status_label.configure(style='Warning.TLabel')
self.status_string.set("Error setting LDAP Mode.")
return
|
class Ldap_Record:
"""
consume LDAP record and provide methods for accessing interesting data
"""
def __init__(self, unid):
self.error = False
ldap_dict = {}
cmd = '/Users/' + unid
try:
raw_data = subprocess.check_output(['/usr/bin/dscl', '/LDAPv3/your.ldap.server', '-read', cmd])
except:
self.error = True
return
raw_data = string.replace(raw_data, '\n ', ' ')
raw_data = raw_data.split('\n')
for line in raw_data:
y = line.split(':')
y = [x for x in y if 'dsAttrTypeNative' not in x]
if len(y) == 2:
key = y[0]
value = y[1]
value = value.lstrip()
else:
key = y[0]
value = y[1:]
value = [x for x in value if x]
if key:
ldap_dict[key] = value
self.record = ldap_dict
def is_student(self):
try:
if 'CurrentStudent' in self.record['Student']:
return True
else:
return False
except:
return False
def is_staff(self):
try:
if self.record['Employee']:
return True
except:
return False
def my_name(self):
try:
if self.record['gecos']:
if len(self.record['gecos']) > 1:
return self.record['gecos']
else:
try:
if self.record['displayName']:
return self.record['displayName']
except:
return None
except:
try:
if self.record['displayName']:
return self.record['displayName']
except:
return None
def my_title(self):
try:
if self.record['title']:
return self.record['title']
except:
return None
def my_email(self):
try:
if self.record['mail']:
return self.record['mail']
except:
try:
if self.record['ExtensionAttribute4']:
return self.record['ExtensionAttribute4']
except:
return None
def my_phone(self):
try:
if self.record['telephoneNumber']:
return self.record['telephoneNumber']
except:
return None
def my_department(self):
try:
if self.record['department']:
return self.record['department']
except:
return None
def my_address(self):
try:
if self.record['streetAddress']:
return self.record['streetAddress']
except:
return None
def print_full(self):
for (k, v) in self.record.items():
print('%s > %r' % (k, v))
def print_keys(self):
return self.record.keys()
def ldap(self):
"""
translate LDAP data from object into fields used in tugboat
"""
try:
self.status_label.configure(style='Normal.TLabel')
self.status_string.set('LDAP selected.')
if self.valid_unid():
print('ldap %r' % self.endusername_string.get())
this_person = ldap_record(self.endusername_string.get())
if not this_person.error:
self.fullname_string.set(this_person.my_name())
self.email_string.set(this_person.my_email())
self.phone_string.set(this_person.my_phone())
self.room_string.set(this_person.my_address())
if this_person.my_title() is None:
if this_person.my_department() is None:
self.position_string.set('')
else:
self.position_string.set(this_person.my_department())
elif this_person.my_department() is None:
self.position_string.set(this_person.my_title())
else:
self.position_string.set(this_person.my_title() + '/' + this_person.my_department())
if self.division_string.get():
self.division_string.set('None')
if self.building_string.get():
self.building_string.set('None')
else:
self.status_label.configure(style='Warning.TLabel')
self.status_string.set('LDAP error, no record found for uNID.')
self.reset_data()
else:
self.status_label.configure(style='Warning.TLabel')
self.status_string.set('Error setting LDAP Mode, no valid uNID.')
self.reset_user()
return
except ValueError:
self.status_label.configure(style='Warning.TLabel')
self.status_string.set('Error setting LDAP Mode.')
return
|
NAME='nagios'
CFLAGS = []
LDFLAGS = []
LIBS = []
GCC_LIST = ['nagios']
|
name = 'nagios'
cflags = []
ldflags = []
libs = []
gcc_list = ['nagios']
|
class Solution:
def find_permutation(self, S):
S = sorted(S)
self.res = []
visited = [False] * len(S)
self.find_permutationUtil(S, visited, '')
return self.res
def find_permutationUtil(self, S, visited, str):
if len(str) == len(S):
self.res.append(str)
return
for i in range(len(S)):
char = S[i]
if visited[i] == False:
visited[i] = True
self.find_permutationUtil(S, visited, str+char)
visited[i] = False
if __name__ == '__main__':
sol = Solution()
sol.find_permutation("ABC")
|
class Solution:
def find_permutation(self, S):
s = sorted(S)
self.res = []
visited = [False] * len(S)
self.find_permutationUtil(S, visited, '')
return self.res
def find_permutation_util(self, S, visited, str):
if len(str) == len(S):
self.res.append(str)
return
for i in range(len(S)):
char = S[i]
if visited[i] == False:
visited[i] = True
self.find_permutationUtil(S, visited, str + char)
visited[i] = False
if __name__ == '__main__':
sol = solution()
sol.find_permutation('ABC')
|
s = set(map(int, input().split()))
print(s)
print("max is: ",max(s))
print("min is: ",min(s))
|
s = set(map(int, input().split()))
print(s)
print('max is: ', max(s))
print('min is: ', min(s))
|
def transact(environment, t, to_agent, from_agent, settlement_type, amount, description):
"Function that ensures a correct transaction between agents"
environment.measurement['period'].append(t)
environment.measurement['to_agent'].append(to_agent)
environment.measurement['from_agent'].append(from_agent)
environment.measurement['settlement_type'].append(settlement_type)
environment.measurement['amount'].append(amount)
environment.measurement['description'].append(description)
|
def transact(environment, t, to_agent, from_agent, settlement_type, amount, description):
"""Function that ensures a correct transaction between agents"""
environment.measurement['period'].append(t)
environment.measurement['to_agent'].append(to_agent)
environment.measurement['from_agent'].append(from_agent)
environment.measurement['settlement_type'].append(settlement_type)
environment.measurement['amount'].append(amount)
environment.measurement['description'].append(description)
|
class NSGA2:
def __init__(self, initializer, evaluator, selector, crossover, mutator, stopper):
self.initializer = initializer
self.evaluator = evaluator
self.selector = selector
self.crossover = crossover
self.mutator = mutator
self.stopper = stopper
self.population = None
self.population_log = []
def make_phenotype(self, population):
for individual in population.values():
individual.evaluate()
def make_next_population(self, champions):
next_population = {}
for parent_a, parent_b in zip(champions[::2], champions[1::2]):
child_a, child_b = self.crossover.crossover(parent_a, parent_b)
next_population[child_a.individual_id] = child_a
next_population[child_b.individual_id] = child_b
for individual in next_population.values():
self.mutator.mutate(individual)
return next_population
def search(self, verbose=False):
if verbose:
print("Initialize population...")
population = self.initializer.initialize()
self.make_phenotype(population)
self.evaluator.evaluate(population)
self.population_log.append(population)
if verbose:
print("Run search...")
interrupt = False
while not self.stopper.stop(population):
try:
champions = self.selector.select(population)
next_population = self.make_next_population(champions)
self.make_phenotype(next_population)
self.evaluator.evaluate(next_population)
population = next_population
self.population_log.append(population)
except KeyboardInterrupt:
if verbose:
print("Search interrupted...")
break
if verbose:
print("Terminating search...")
self.population = population
return population
|
class Nsga2:
def __init__(self, initializer, evaluator, selector, crossover, mutator, stopper):
self.initializer = initializer
self.evaluator = evaluator
self.selector = selector
self.crossover = crossover
self.mutator = mutator
self.stopper = stopper
self.population = None
self.population_log = []
def make_phenotype(self, population):
for individual in population.values():
individual.evaluate()
def make_next_population(self, champions):
next_population = {}
for (parent_a, parent_b) in zip(champions[::2], champions[1::2]):
(child_a, child_b) = self.crossover.crossover(parent_a, parent_b)
next_population[child_a.individual_id] = child_a
next_population[child_b.individual_id] = child_b
for individual in next_population.values():
self.mutator.mutate(individual)
return next_population
def search(self, verbose=False):
if verbose:
print('Initialize population...')
population = self.initializer.initialize()
self.make_phenotype(population)
self.evaluator.evaluate(population)
self.population_log.append(population)
if verbose:
print('Run search...')
interrupt = False
while not self.stopper.stop(population):
try:
champions = self.selector.select(population)
next_population = self.make_next_population(champions)
self.make_phenotype(next_population)
self.evaluator.evaluate(next_population)
population = next_population
self.population_log.append(population)
except KeyboardInterrupt:
if verbose:
print('Search interrupted...')
break
if verbose:
print('Terminating search...')
self.population = population
return population
|
class RetrievalError(Exception):
pass
class SetterError(Exception):
pass
class ControlError(SetterError):
pass
class AuthentificationError(Exception):
pass
class TemporaryAuthentificationError(AuthentificationError):
pass
class APICompatibilityError(Exception):
pass
class APIError(Exception):
pass
|
class Retrievalerror(Exception):
pass
class Settererror(Exception):
pass
class Controlerror(SetterError):
pass
class Authentificationerror(Exception):
pass
class Temporaryauthentificationerror(AuthentificationError):
pass
class Apicompatibilityerror(Exception):
pass
class Apierror(Exception):
pass
|
# Based on largest_rectangle_in_histagram 84
def maximal_rectrangle_in_matrix(matrix):
if len(matrix) == 0 or len(matrix[0]) == 0:
return 0
# Append extra 0 to the end, to mark the end of the curr row
curr_row = [0] * (len(matrix[0]) + 1)
ans = 0
for row in range(len(matrix)):
for column in range(len(matrix[0])):
if matrix[row][column] == '1':
curr_row[column] += 1
else:
curr_row[column] = 0
stack = [-1]
# print(curr_row)
for curr_index, height in enumerate(curr_row):
while height < curr_row[stack[-1]]:
prev_index = stack.pop()
h = curr_row[prev_index]
w = curr_index - stack[-1] - 1
ans = max(ans, h * w)
# print(ans, curr_index, prev_index)
stack.append(curr_index)
return ans
# print(maximal_rectrangle_in_matrix([["0", "1"], ["1", "0"]]))
print(maximal_rectrangle_in_matrix([
["1", "0", "1", "0", "0"],
["1", "0", "1", "1", "1"],
["1", "1", "1", "1", "1"],
["1", "0", "0", "1", "0"]
]))
def maximalRectangle_new(matrix):
if not matrix or not matrix[0]:
return 0
n = len(matrix[0])
height = [0] * (n + 1)
ans = 0
for row in matrix:
for i in range(n):
height[i] = height[i] + 1 if row[i] == '1' else 0
stack = [-1]
# print(height)
for i in range(n + 1):
while height[i] < height[stack[-1]]:
h = height[stack.pop()]
w = i - 1 - stack[-1]
ans = max(ans, h * w)
stack.append(i)
return ans
# print(maximal_rectrangle_in_matrix([["0", "1"], ["1", "0"]]))
print(maximalRectangle_new([
["1", "0", "1", "0", "0"],
["1", "0", "1", "1", "1"],
["1", "1", "1", "1", "1"],
["1", "0", "0", "1", "0"]
]))
|
def maximal_rectrangle_in_matrix(matrix):
if len(matrix) == 0 or len(matrix[0]) == 0:
return 0
curr_row = [0] * (len(matrix[0]) + 1)
ans = 0
for row in range(len(matrix)):
for column in range(len(matrix[0])):
if matrix[row][column] == '1':
curr_row[column] += 1
else:
curr_row[column] = 0
stack = [-1]
for (curr_index, height) in enumerate(curr_row):
while height < curr_row[stack[-1]]:
prev_index = stack.pop()
h = curr_row[prev_index]
w = curr_index - stack[-1] - 1
ans = max(ans, h * w)
stack.append(curr_index)
return ans
print(maximal_rectrangle_in_matrix([['1', '0', '1', '0', '0'], ['1', '0', '1', '1', '1'], ['1', '1', '1', '1', '1'], ['1', '0', '0', '1', '0']]))
def maximal_rectangle_new(matrix):
if not matrix or not matrix[0]:
return 0
n = len(matrix[0])
height = [0] * (n + 1)
ans = 0
for row in matrix:
for i in range(n):
height[i] = height[i] + 1 if row[i] == '1' else 0
stack = [-1]
for i in range(n + 1):
while height[i] < height[stack[-1]]:
h = height[stack.pop()]
w = i - 1 - stack[-1]
ans = max(ans, h * w)
stack.append(i)
return ans
print(maximal_rectangle_new([['1', '0', '1', '0', '0'], ['1', '0', '1', '1', '1'], ['1', '1', '1', '1', '1'], ['1', '0', '0', '1', '0']]))
|
class Deque(object):
"""A deque (double-ended queue) is a linear structure of
ordered items where the addition and removal of items can
take place on any end.
Thus deques can work as FIFO (First In, First Out) or
LIFO (Last In, First Out)
Examples:
>>> d = Deque()
>>> d.is_empty()
True
>>> d.add_front(4)
>>> d.add_front('dog')
>>> print(d)
[4, 'dog']
>>> d.size()
2
>>> d.remove_front()
'dog'
>>> d.add_rear(True)
>>> print(d)
[True, 4]
>>> d.remove_rear()
True
"""
def __init__(self):
self.items = []
def __str__(self):
"""Return the string method of the deque"""
return str(list(self.items))
def is_empty(self):
"""See whether the deque is empty"""
return self.items == []
def add_front(self, item):
"""Add an item in the front"""
self.items.append(item)
def add_rear(self, item):
"""Add an item in the rear"""
self.items.insert(0, item)
def remove_front(self):
"""Remove an item in the front"""
return self.items.pop()
def remove_rear(self):
"""Remove an item in the rear"""
return self.items.pop(0)
def size(self):
"""Return the number of items on the deque"""
return len(self.items)
|
class Deque(object):
"""A deque (double-ended queue) is a linear structure of
ordered items where the addition and removal of items can
take place on any end.
Thus deques can work as FIFO (First In, First Out) or
LIFO (Last In, First Out)
Examples:
>>> d = Deque()
>>> d.is_empty()
True
>>> d.add_front(4)
>>> d.add_front('dog')
>>> print(d)
[4, 'dog']
>>> d.size()
2
>>> d.remove_front()
'dog'
>>> d.add_rear(True)
>>> print(d)
[True, 4]
>>> d.remove_rear()
True
"""
def __init__(self):
self.items = []
def __str__(self):
"""Return the string method of the deque"""
return str(list(self.items))
def is_empty(self):
"""See whether the deque is empty"""
return self.items == []
def add_front(self, item):
"""Add an item in the front"""
self.items.append(item)
def add_rear(self, item):
"""Add an item in the rear"""
self.items.insert(0, item)
def remove_front(self):
"""Remove an item in the front"""
return self.items.pop()
def remove_rear(self):
"""Remove an item in the rear"""
return self.items.pop(0)
def size(self):
"""Return the number of items on the deque"""
return len(self.items)
|
"""
Sampler metadata
"""
class Sampler(object):
method_name = None
def __init__(self):
pass
class GibbsSampler(Sampler):
method_name = 'gibbs'
def __init__(self, n_iter=1000, n_burnin=100, n_thread=1):
# super(GibbsSampler, self).__init__()
self.n_iter = n_iter
self.n_burnin = n_burnin
self.n_thread = n_thread
class MAPtpx(Sampler):
method_name = 'maptpx'
SAMPLERS = [(cls.method_name, cls) for cls in (GibbsSampler, MAPtpx)]
|
"""
Sampler metadata
"""
class Sampler(object):
method_name = None
def __init__(self):
pass
class Gibbssampler(Sampler):
method_name = 'gibbs'
def __init__(self, n_iter=1000, n_burnin=100, n_thread=1):
self.n_iter = n_iter
self.n_burnin = n_burnin
self.n_thread = n_thread
class Maptpx(Sampler):
method_name = 'maptpx'
samplers = [(cls.method_name, cls) for cls in (GibbsSampler, MAPtpx)]
|
# A program to check if the number is odd or even
def even_or_odd(num):
if num == "q":
return "Invalid"
elif num % 2 == 0:
return "Even"
else:
return "Odd"
while True:
try:
user_input = float(input("Enter then number you would like to check is odd or even"))
except ValueError or TypeError:
user_input = "q"
finally:
print("The number is ", even_or_odd(user_input))
|
def even_or_odd(num):
if num == 'q':
return 'Invalid'
elif num % 2 == 0:
return 'Even'
else:
return 'Odd'
while True:
try:
user_input = float(input('Enter then number you would like to check is odd or even'))
except ValueError or TypeError:
user_input = 'q'
finally:
print('The number is ', even_or_odd(user_input))
|
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 13 16:43:18 2017
@author: coskun
"""
def closest_power(base, num):
'''
base: base of the exponential, integer > 1
num: number you want to be closest to, integer > 0
Find the integer exponent such that base**exponent is closest to num.
Note that the base**exponent may be either greater or smaller than num.
In case of a tie, return the smaller value.
Returns the exponent.
'''
exp = 0
# Limitations here
if base <= 1:
return 0
if num <= 0:
return 0
if base == num:
return 1
if num == 1:
return 0
if abs(base) > abs(num):
return 0
else:
i = 1
tmp = 0
while abs(base)**i < abs(num):
tmp = i
i += 1
if abs(num) == abs(base)**i:
#global exp
exp = i
elif (abs(num) - abs(base)**tmp) == (abs(base)**i - abs(num)):
# in case of tie :)
exp = tmp
elif (abs(num) - abs(base)**tmp) < (abs(base)**i - abs(num)):
#global exp
exp = tmp
else:
#global exp
exp = i
#Return Conditions
return exp
print(closest_power(3,210))
|
"""
Created on Mon Feb 13 16:43:18 2017
@author: coskun
"""
def closest_power(base, num):
"""
base: base of the exponential, integer > 1
num: number you want to be closest to, integer > 0
Find the integer exponent such that base**exponent is closest to num.
Note that the base**exponent may be either greater or smaller than num.
In case of a tie, return the smaller value.
Returns the exponent.
"""
exp = 0
if base <= 1:
return 0
if num <= 0:
return 0
if base == num:
return 1
if num == 1:
return 0
if abs(base) > abs(num):
return 0
else:
i = 1
tmp = 0
while abs(base) ** i < abs(num):
tmp = i
i += 1
if abs(num) == abs(base) ** i:
exp = i
elif abs(num) - abs(base) ** tmp == abs(base) ** i - abs(num):
exp = tmp
elif abs(num) - abs(base) ** tmp < abs(base) ** i - abs(num):
exp = tmp
else:
exp = i
return exp
print(closest_power(3, 210))
|
class Solution:
def minCut(self, s: str) -> int:
if not s:
return 0
memo = dict()
def helper(l,r):
if l > r:
return 0
minVal = float('inf')
for i in range(l,r+1,1):
strs = s[l:i+1]
if strs == strs[::-1]:
if s[i+1:r+1] in memo:
minVal = min(memo[s[i+1:r+1]], minVal)
else:
minVal = min(helper(i+1,r), minVal)
memo[s[l:r+1]] = minVal + 1
return memo[s[l:r+1]]
helper(0,len(s)-1)
return memo[s]-1 if memo[s] != float('inf') else 0
|
class Solution:
def min_cut(self, s: str) -> int:
if not s:
return 0
memo = dict()
def helper(l, r):
if l > r:
return 0
min_val = float('inf')
for i in range(l, r + 1, 1):
strs = s[l:i + 1]
if strs == strs[::-1]:
if s[i + 1:r + 1] in memo:
min_val = min(memo[s[i + 1:r + 1]], minVal)
else:
min_val = min(helper(i + 1, r), minVal)
memo[s[l:r + 1]] = minVal + 1
return memo[s[l:r + 1]]
helper(0, len(s) - 1)
return memo[s] - 1 if memo[s] != float('inf') else 0
|
'''https://practice.geeksforgeeks.org/problems/cyclically-rotate-an-array-by-one2614/1
Cyclically rotate an array by one
Basic Accuracy: 64.05% Submissions: 66795 Points: 1
Given an array, rotate the array by one position in clock-wise direction.
Example 1:
Input:
N = 5
A[] = {1, 2, 3, 4, 5}
Output:
5 1 2 3 4
Example 2:
Input:
N = 8
A[] = {9, 8, 7, 6, 4, 2, 1, 3}
Output:
3 9 8 7 6 4 2 1
Your Task:
You don't need to read input or print anything. Your task is to complete the function rotate() which takes the array A[] and its size N as inputs and modify the array.
Expected Time Complexity: O(N)
Expected Auxiliary Space: O(1)
Constraints:
1<=N<=105
0<=a[i]<=105'''
def rotate(arr, n):
temp = arr[n-1]
i = n-1
while(i > 0):
b = i-1
arr[i] = arr[b]
i -= 1
arr[0] = temp
|
"""https://practice.geeksforgeeks.org/problems/cyclically-rotate-an-array-by-one2614/1
Cyclically rotate an array by one
Basic Accuracy: 64.05% Submissions: 66795 Points: 1
Given an array, rotate the array by one position in clock-wise direction.
Example 1:
Input:
N = 5
A[] = {1, 2, 3, 4, 5}
Output:
5 1 2 3 4
Example 2:
Input:
N = 8
A[] = {9, 8, 7, 6, 4, 2, 1, 3}
Output:
3 9 8 7 6 4 2 1
Your Task:
You don't need to read input or print anything. Your task is to complete the function rotate() which takes the array A[] and its size N as inputs and modify the array.
Expected Time Complexity: O(N)
Expected Auxiliary Space: O(1)
Constraints:
1<=N<=105
0<=a[i]<=105"""
def rotate(arr, n):
temp = arr[n - 1]
i = n - 1
while i > 0:
b = i - 1
arr[i] = arr[b]
i -= 1
arr[0] = temp
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
def max_min(v):
m_min, m_max = min(v[0], v[-1]), max(v[0], v[-1])
for i in v[1:len(v)-1]:
if i<m_min:
m_min = i
elif i > m_max:
m_max = i
return m_min, m_max
if __name__ == '__main__':
m_min, m_max = max_min([9, 3, 4, 7, 2, 0])
print(m_min, m_max)
|
def max_min(v):
(m_min, m_max) = (min(v[0], v[-1]), max(v[0], v[-1]))
for i in v[1:len(v) - 1]:
if i < m_min:
m_min = i
elif i > m_max:
m_max = i
return (m_min, m_max)
if __name__ == '__main__':
(m_min, m_max) = max_min([9, 3, 4, 7, 2, 0])
print(m_min, m_max)
|
# -*- coding: utf-8 -*-
"""
seqansphinx
~~~~~~~~~~~~~
This package is a namespace package that contains all extensions
distributed in the ``seqansphinx`` distribution.
:copyright: Copyright 2014 by Manuel Holtgrewe
:license: MIT, see LICENSE for details.
"""
__import__('pkg_resources').declare_namespace(__name__)
|
"""
seqansphinx
~~~~~~~~~~~~~
This package is a namespace package that contains all extensions
distributed in the ``seqansphinx`` distribution.
:copyright: Copyright 2014 by Manuel Holtgrewe
:license: MIT, see LICENSE for details.
"""
__import__('pkg_resources').declare_namespace(__name__)
|
string = [x for x in input()]
string_list_after = []
index = 0
power = 0
for el in range(len(string)):
if string[index] == ">":
expl = int(string[el + 1])
power += int(expl)
string.pop(el+1)
string.append('')
power -= 1
while power > 0:
index = el
string.pop(el + 1)
string.append('')
power -= 1
else:
string_list_after.append(string[index])
index += 1
print()
|
string = [x for x in input()]
string_list_after = []
index = 0
power = 0
for el in range(len(string)):
if string[index] == '>':
expl = int(string[el + 1])
power += int(expl)
string.pop(el + 1)
string.append('')
power -= 1
while power > 0:
index = el
string.pop(el + 1)
string.append('')
power -= 1
else:
string_list_after.append(string[index])
index += 1
print()
|
"""Exceptions for cmd_utils."""
class CommandException(Exception):
"""
Base Exception for cmd_utils exceptions.
Attributes:
exc - string message
return_code - return code of command
"""
def __init__(self, exc, return_code=None):
Exception.__init__(self)
self.exc = exc
self.return_code = return_code
def __str__(self):
return self.__repr__()
def __repr__(self):
return self.exc
class ReturnCodeError(CommandException):
"""Raised when a command returns a non-zero exit code."""
pass
class SSHError(CommandException):
"""Raised when SSH connection fails."""
pass
|
"""Exceptions for cmd_utils."""
class Commandexception(Exception):
"""
Base Exception for cmd_utils exceptions.
Attributes:
exc - string message
return_code - return code of command
"""
def __init__(self, exc, return_code=None):
Exception.__init__(self)
self.exc = exc
self.return_code = return_code
def __str__(self):
return self.__repr__()
def __repr__(self):
return self.exc
class Returncodeerror(CommandException):
"""Raised when a command returns a non-zero exit code."""
pass
class Ssherror(CommandException):
"""Raised when SSH connection fails."""
pass
|
#A more compact version of the algorithm that can be executed parallelly.
list1 = [0x58, 0x76, 0x54, 0x3a, 0xbe, 0x58, 0x76, 0x54, 0xbe, 0xcd, 0x45, 0x66, 0x85, 0x65]
# ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^
# Par=4: bulk bulk bulk bulk bulk bulk bulk bulk bulk bulk bulk bulk rest rest
# desc desc desc
def parallel_roling_mac_hash(list, par):
multiplier = 0x01000193 #algorithm parameter
hash = [0 for x in range(par)] #the fields of the array can be computed parallelly
mult_powers = [multiplier**(x+1) for x in range(par)]
list_length = len(list)
bulk = list_length//par
rest = list_length%par
for i in range(rest): #"rest" number of threads have to go one step further
for j in range(bulk + 1):
index = j * par + i
hash[i] += list[index]
if index > list_length-par:
mult_index = -par + (list_length - index) - 1 #going from highest power to first order
hash[i] *= mult_powers[mult_index]
else:
hash[i] *= mult_powers[-1]
hash[i] %= 2**32
for i in range(rest,par): #threads for calculating the remaining data
for j in range(bulk):
index = j * par + i
hash[i] += list[index]
if index > list_length-par:
mult_index = -par + (list_length - index) - 1 #going from highest power to first order
hash[i] *= mult_powers[mult_index]
else:
hash[i] *= mult_powers[-1]
hash[i] %= 2**32
sum = 0
for i in range(par): #summing up results of the parallel computations (single threaded mostly)
sum += hash[i]
sum %= 2**32
return(sum)
#TESTING
def rolling_hash_by_mac(list):
hash = 0
for byte in list:
hash += byte
hash *= 0x01000193
hash %= 2**32
return(hash)
print('parallel:', parallel_roling_mac_hash(list1,3))
print('parallel:', parallel_roling_mac_hash(list1,5))
print('parallel:', parallel_roling_mac_hash(list1,8))
print('expected:', rolling_hash_by_mac(list1))
|
list1 = [88, 118, 84, 58, 190, 88, 118, 84, 190, 205, 69, 102, 133, 101]
def parallel_roling_mac_hash(list, par):
multiplier = 16777619
hash = [0 for x in range(par)]
mult_powers = [multiplier ** (x + 1) for x in range(par)]
list_length = len(list)
bulk = list_length // par
rest = list_length % par
for i in range(rest):
for j in range(bulk + 1):
index = j * par + i
hash[i] += list[index]
if index > list_length - par:
mult_index = -par + (list_length - index) - 1
hash[i] *= mult_powers[mult_index]
else:
hash[i] *= mult_powers[-1]
hash[i] %= 2 ** 32
for i in range(rest, par):
for j in range(bulk):
index = j * par + i
hash[i] += list[index]
if index > list_length - par:
mult_index = -par + (list_length - index) - 1
hash[i] *= mult_powers[mult_index]
else:
hash[i] *= mult_powers[-1]
hash[i] %= 2 ** 32
sum = 0
for i in range(par):
sum += hash[i]
sum %= 2 ** 32
return sum
def rolling_hash_by_mac(list):
hash = 0
for byte in list:
hash += byte
hash *= 16777619
hash %= 2 ** 32
return hash
print('parallel:', parallel_roling_mac_hash(list1, 3))
print('parallel:', parallel_roling_mac_hash(list1, 5))
print('parallel:', parallel_roling_mac_hash(list1, 8))
print('expected:', rolling_hash_by_mac(list1))
|
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
# In[4]:
def fib(n):
a, b = 0,1
while a < n:
print(a)
a, b = b, a + b
# In[8]:
def primos(n):
numbers = [True, True] + [True] * (n-1)
last_prime_number = 2
i = last_prime_number
while last_prime_number**2 <= n:
i += last_prime_number
while i <= n:
numbers[i] = False
i += last_prime_number
j = last_prime_number + 1
while j < n:
if numbers[j]:
last_prime_number = j
break
j += 1
i = last_prime_number
return [i + 2 for i, not_crossed in enumerate(numbers[2:]) if not_crossed]
# In[34]:
def perfecto(num):
suma = 0
for i in range(1, num):
#print(i)
if num % i == 0:
suma = suma + i
if (suma == num):
return True
else:
return False
# In[45]:
def numeros_perfectos(n):
factors = []
i = 1
while len(factors) != n:
if perfecto(i):
factors.append(i)
i += 1
return factors
# In[48]:
numeros_perfectos(3)
primos(10)
fib(10)
# In[ ]:
|
def fib(n):
(a, b) = (0, 1)
while a < n:
print(a)
(a, b) = (b, a + b)
def primos(n):
numbers = [True, True] + [True] * (n - 1)
last_prime_number = 2
i = last_prime_number
while last_prime_number ** 2 <= n:
i += last_prime_number
while i <= n:
numbers[i] = False
i += last_prime_number
j = last_prime_number + 1
while j < n:
if numbers[j]:
last_prime_number = j
break
j += 1
i = last_prime_number
return [i + 2 for (i, not_crossed) in enumerate(numbers[2:]) if not_crossed]
def perfecto(num):
suma = 0
for i in range(1, num):
if num % i == 0:
suma = suma + i
if suma == num:
return True
else:
return False
def numeros_perfectos(n):
factors = []
i = 1
while len(factors) != n:
if perfecto(i):
factors.append(i)
i += 1
return factors
numeros_perfectos(3)
primos(10)
fib(10)
|
"""
Number of Provinces
There are n cities. Some of them are connected, while some are not.
If city a is connected directly with city b, and city b is connected directly with city c,
then city a is connected indirectly with city c.
A province is a group of directly or indirectly connected cities
and no other cities outside of the group.
You are given an n x n matrix isConnected where isConnected[i][j] = 1 if the
ith city and the jth city are directly connected, and
isConnected[i][j] = 0 otherwise.
Return the total number of provinces.
Example 1:
Input: isConnected = [[1,1,0],[1,1,0],[0,0,1]]
Output: 2
Example 2:
Input: isConnected = [[1,0,0],[0,1,0],[0,0,1]]
Output: 3
Constraints:
1 <= n <= 200
n == isConnected.length
n == isConnected[i].length
isConnected[i][j] is 1 or 0.
isConnected[i][i] == 1
isConnected[i][j] == isConnected[j][i]
"""
# time: O(n^2)
# space: O(n) to store visit list
class Solution:
def findCircleNum(self, M: List[List[int]]) -> int:
if not M:
return 0
n = len(M)
visit = [False]*n
def dfs(u):
for v in range(n):
if M[u][v] ==1 and visit[v] == False:
visit[v] = True
dfs(v)
count = 0
for i in range(n):
if visit[i] == False:
count += 1
visit[i] = True
dfs(i)
return count
|
"""
Number of Provinces
There are n cities. Some of them are connected, while some are not.
If city a is connected directly with city b, and city b is connected directly with city c,
then city a is connected indirectly with city c.
A province is a group of directly or indirectly connected cities
and no other cities outside of the group.
You are given an n x n matrix isConnected where isConnected[i][j] = 1 if the
ith city and the jth city are directly connected, and
isConnected[i][j] = 0 otherwise.
Return the total number of provinces.
Example 1:
Input: isConnected = [[1,1,0],[1,1,0],[0,0,1]]
Output: 2
Example 2:
Input: isConnected = [[1,0,0],[0,1,0],[0,0,1]]
Output: 3
Constraints:
1 <= n <= 200
n == isConnected.length
n == isConnected[i].length
isConnected[i][j] is 1 or 0.
isConnected[i][i] == 1
isConnected[i][j] == isConnected[j][i]
"""
class Solution:
def find_circle_num(self, M: List[List[int]]) -> int:
if not M:
return 0
n = len(M)
visit = [False] * n
def dfs(u):
for v in range(n):
if M[u][v] == 1 and visit[v] == False:
visit[v] = True
dfs(v)
count = 0
for i in range(n):
if visit[i] == False:
count += 1
visit[i] = True
dfs(i)
return count
|
class CliError(RuntimeError):
pass
|
class Clierror(RuntimeError):
pass
|
class AjaxableResponseMixin(object):
"""
Mixin to add AJAX support to a form.
Must be used with an object-based FormView (e.g. CreateView)
"""
def form_invalid(self, form):
response = super(AjaxableResponseMixin, self).form_invalid(form)
if self.request.is_ajax():
return JsonResponse(form.errors, status=400)
else:
return response
def form_valid(self, form):
# We make sure to call the parent's form_valid() method because
# it might do some processing (in the case of CreateView, it will
# call form.save() for example).
response = super(AjaxableResponseMixin, self).form_valid(form)
if self.request.is_ajax():
data = {
'pk': self.object.pk,
}
return JsonResponse(data)
else:
return response
|
class Ajaxableresponsemixin(object):
"""
Mixin to add AJAX support to a form.
Must be used with an object-based FormView (e.g. CreateView)
"""
def form_invalid(self, form):
response = super(AjaxableResponseMixin, self).form_invalid(form)
if self.request.is_ajax():
return json_response(form.errors, status=400)
else:
return response
def form_valid(self, form):
response = super(AjaxableResponseMixin, self).form_valid(form)
if self.request.is_ajax():
data = {'pk': self.object.pk}
return json_response(data)
else:
return response
|
#!/usr/bin/env python
__author__ = "Giuseppe Chiesa"
__copyright__ = "Copyright 2017-2021, Giuseppe Chiesa"
__credits__ = ["Giuseppe Chiesa"]
__license__ = "BSD"
__maintainer__ = "Giuseppe Chiesa"
__email__ = "mail@giuseppechiesa.it"
__status__ = "PerpetualBeta"
def write_with_modecheck(file_handler, data):
if file_handler.mode == 'w':
file_handler.write(data.decode('utf-8'))
else:
file_handler.write(data)
|
__author__ = 'Giuseppe Chiesa'
__copyright__ = 'Copyright 2017-2021, Giuseppe Chiesa'
__credits__ = ['Giuseppe Chiesa']
__license__ = 'BSD'
__maintainer__ = 'Giuseppe Chiesa'
__email__ = 'mail@giuseppechiesa.it'
__status__ = 'PerpetualBeta'
def write_with_modecheck(file_handler, data):
if file_handler.mode == 'w':
file_handler.write(data.decode('utf-8'))
else:
file_handler.write(data)
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
class BaseEmitter(object):
'''Base for emitters of the *data-migrator*.
Attributes:
manager (BaseManager): reference to the manager that is calling this
emitter to export objects from that manager
model_class (Model): reference to the model linked to the class
extension (str): file extension for output file of this emitter
note: :attr:`~.model_class` and :attr:`~.manager` are linked together
'''
def __init__(self, extension=None, manager=None):
# reference to the manager that is calling this emitter to
# export objects from the manager
self.manager = manager
self.model_class = manager.model_class
self.meta = self.model_class._meta
self.extension = extension or getattr(self.__class__,
'extension', '.txt')
def emit(self, l):
'''output the result set of an object.
Args:
l (Model): object to transform
Returns:
list: generated strings
'''
raise NotImplementedError
def filename(self):
'''generate filename for this emitter.
generates a filename bases on :attr:`~.BaseEmitter.extension` and
either :attr:`~.Options.file_name` or :attr:`~.Options.table_name`
Returns:
str: filename
'''
_ext = self.extension
if _ext[0] != '.':
_ext = '.' + _ext
_filename = self.meta.file_name or (self.meta.table_name + _ext)
return _filename
def preamble(self, headers):
'''generate a premable for the file to emit.
Args:
headers (list): additional header to provide outside the emitter
(e.g. statistics)
Returns:
list: preamble lines
'''
raise NotImplementedError
def postamble(self): #pylint disable=no-self-use
'''generate a postamble for the file to emit.
Returns:
list: postamble lines
'''
return []
|
class Baseemitter(object):
"""Base for emitters of the *data-migrator*.
Attributes:
manager (BaseManager): reference to the manager that is calling this
emitter to export objects from that manager
model_class (Model): reference to the model linked to the class
extension (str): file extension for output file of this emitter
note: :attr:`~.model_class` and :attr:`~.manager` are linked together
"""
def __init__(self, extension=None, manager=None):
self.manager = manager
self.model_class = manager.model_class
self.meta = self.model_class._meta
self.extension = extension or getattr(self.__class__, 'extension', '.txt')
def emit(self, l):
"""output the result set of an object.
Args:
l (Model): object to transform
Returns:
list: generated strings
"""
raise NotImplementedError
def filename(self):
"""generate filename for this emitter.
generates a filename bases on :attr:`~.BaseEmitter.extension` and
either :attr:`~.Options.file_name` or :attr:`~.Options.table_name`
Returns:
str: filename
"""
_ext = self.extension
if _ext[0] != '.':
_ext = '.' + _ext
_filename = self.meta.file_name or self.meta.table_name + _ext
return _filename
def preamble(self, headers):
"""generate a premable for the file to emit.
Args:
headers (list): additional header to provide outside the emitter
(e.g. statistics)
Returns:
list: preamble lines
"""
raise NotImplementedError
def postamble(self):
"""generate a postamble for the file to emit.
Returns:
list: postamble lines
"""
return []
|
"""
LeetCode Problem: 833. Find And Replace in String
Link: https://leetcode.com/problems/find-and-replace-in-string/
Language: Python
Written by: Mostofa Adib Shakib
Time complexity: O(n)
Space Complexity: O(n)
"""
class Solution:
def findReplaceString(self, S: str, indexes: List[int], sources: List[str], targets: List[str]) -> str:
modified = list(S)
for index, source, target in zip(indexes, sources, targets):
if not S[index:].startswith(source):
continue
else:
modified[index] = target
for i in range(index+1, len(source) + index):
modified[i] = ""
return "".join(modified)
|
"""
LeetCode Problem: 833. Find And Replace in String
Link: https://leetcode.com/problems/find-and-replace-in-string/
Language: Python
Written by: Mostofa Adib Shakib
Time complexity: O(n)
Space Complexity: O(n)
"""
class Solution:
def find_replace_string(self, S: str, indexes: List[int], sources: List[str], targets: List[str]) -> str:
modified = list(S)
for (index, source, target) in zip(indexes, sources, targets):
if not S[index:].startswith(source):
continue
else:
modified[index] = target
for i in range(index + 1, len(source) + index):
modified[i] = ''
return ''.join(modified)
|
class Stack(object):
def __init__(self):
self.stack = []
def is_empty(self):
return not self.stack
def push(self, v):
self.stack.append(v)
def pop(self):
data = self.stack[-1]
del self.stack[-1]
return data
def peek(self):
return self.stack[-1]
def size_stack(self):
return len(self.stack)
if __name__ == "__main__":
s = Stack()
s.push(1)
s.push(2)
s.push(3)
print(s.size_stack())
print("Popped: ", s.pop())
print("Popped: ", s.pop())
print(s.size_stack())
print("Peek: ", s.peek())
print("Popped: ", s.pop())
print(s.size_stack())
|
class Stack(object):
def __init__(self):
self.stack = []
def is_empty(self):
return not self.stack
def push(self, v):
self.stack.append(v)
def pop(self):
data = self.stack[-1]
del self.stack[-1]
return data
def peek(self):
return self.stack[-1]
def size_stack(self):
return len(self.stack)
if __name__ == '__main__':
s = stack()
s.push(1)
s.push(2)
s.push(3)
print(s.size_stack())
print('Popped: ', s.pop())
print('Popped: ', s.pop())
print(s.size_stack())
print('Peek: ', s.peek())
print('Popped: ', s.pop())
print(s.size_stack())
|
array = [0,0,1,1,1,1,2,2,2,2,3,3]
indexEqualCurrentUsage = []
for index in range(len(array)):
if array[index] == 1:
indexEqualCurrentUsage.append(index)
print(indexEqualCurrentUsage)
|
array = [0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3]
index_equal_current_usage = []
for index in range(len(array)):
if array[index] == 1:
indexEqualCurrentUsage.append(index)
print(indexEqualCurrentUsage)
|
"""
We see many sharp peaks in the training set, but these peaks are not always present
in the test set, suggesting that they are due to noise. Therefore, ignoring this
noise, we might have expected the neural responses to be a sum of only a few
different filters and only at a few different positions. This would mean that we
would expect the `out_layer` weight matrix to be sparse, not dense like the one
shown here.
""";
|
"""
We see many sharp peaks in the training set, but these peaks are not always present
in the test set, suggesting that they are due to noise. Therefore, ignoring this
noise, we might have expected the neural responses to be a sum of only a few
different filters and only at a few different positions. This would mean that we
would expect the `out_layer` weight matrix to be sparse, not dense like the one
shown here.
"""
|
### do not use these settings and passwords for production!
# these settings are required to connect the postgres-db to metabase
POSTGRES_USER='postgres'
POSTGRES_PASSWORD='1234'
POSTGRES_HOST='postgresdb'
POSTGRES_PORT='5432'
POSTGRES_DB_NAME='postgres'
|
postgres_user = 'postgres'
postgres_password = '1234'
postgres_host = 'postgresdb'
postgres_port = '5432'
postgres_db_name = 'postgres'
|
# # Sunny data
# outFeaturesPath = "models/features_40_sun_only"
# outLabelsPath = "models/labels_sun_only"
# imageFolderName = 'IMG_sun_only'
# features_directory = '../data/'
# labels_file = '../data/driving_log_sun_only.csv'
# modelPath = 'models/MsAutopilot_sun_only.h5'
# NoColumns = 3 # steering value index in csv
# # Foggy data
# outFeaturesPath = "models/features_40_foggy"
# outLabelsPath = "models/labels_foggy"
# imageFolderName = 'IMG_foggy'
# features_directory = '../data/'
# labels_file = '../data/driving_log_foggy.csv'
# modelPath = 'models/MsAutopilot_foggy.h5'
# NoColumns = 6 # steering value index in csv
# # Test data (fog only, no model will be trained, just pickles to extract)
outFeaturesPath = "models/features_40_fog_only"
outLabelsPath = "models/labels_fog_only"
imageFolderName = 'IMG_fog_only'
features_directory = '../data/'
labels_file = '../data/driving_log_fog_only.csv'
NoColumns = 3 # steering value index in csv
modelPathFoggy = 'models/MsAutopilot_foggy.h5'
modelPathSunOnly = 'models/MsAutopilot_sun_only.h5'
|
out_features_path = 'models/features_40_fog_only'
out_labels_path = 'models/labels_fog_only'
image_folder_name = 'IMG_fog_only'
features_directory = '../data/'
labels_file = '../data/driving_log_fog_only.csv'
no_columns = 3
model_path_foggy = 'models/MsAutopilot_foggy.h5'
model_path_sun_only = 'models/MsAutopilot_sun_only.h5'
|
with open('inputs/input2.txt') as fin:
raw = fin.read()
def parse(raw):
start = [(x[:3], int(x[4:])) for x in (raw.split('\n'))]
return start
a = parse(raw)
def part_1(arr):
indices = set()
acc = 0
i = 0
while i < len(arr):
pair = arr[i]
if i in indices:
break
indices.add(i)
if pair[0] == 'acc':
acc += pair[1]
i += 1
elif pair[0] == 'jmp':
i += pair[1]
else:
i += 1
return acc
def not_infinite(arr):
indices = set()
acc = 0
i = 0
while True:
if i in indices:
return 0
indices.add(i)
if i == len(arr):
return acc
pair = arr[i]
if pair[0] == 'acc':
acc += pair[1]
i += 1
elif pair[0] == 'jmp':
i += pair[1]
else:
i += 1
def part_2(arr):
for i, x in enumerate(arr):
if x[0] == 'jmp':
test = arr[:i] + [('nop', x[1])] + arr[i + 1:]
if c := not_infinite(test):
return c
if x[0] == 'nop':
test = arr[:i] + [('jmp', x[1])] + arr[(i + 1):]
if c := not_infinite(test):
return c
print(part_1(a))
print(part_2(a))
|
with open('inputs/input2.txt') as fin:
raw = fin.read()
def parse(raw):
start = [(x[:3], int(x[4:])) for x in raw.split('\n')]
return start
a = parse(raw)
def part_1(arr):
indices = set()
acc = 0
i = 0
while i < len(arr):
pair = arr[i]
if i in indices:
break
indices.add(i)
if pair[0] == 'acc':
acc += pair[1]
i += 1
elif pair[0] == 'jmp':
i += pair[1]
else:
i += 1
return acc
def not_infinite(arr):
indices = set()
acc = 0
i = 0
while True:
if i in indices:
return 0
indices.add(i)
if i == len(arr):
return acc
pair = arr[i]
if pair[0] == 'acc':
acc += pair[1]
i += 1
elif pair[0] == 'jmp':
i += pair[1]
else:
i += 1
def part_2(arr):
for (i, x) in enumerate(arr):
if x[0] == 'jmp':
test = arr[:i] + [('nop', x[1])] + arr[i + 1:]
if (c := not_infinite(test)):
return c
if x[0] == 'nop':
test = arr[:i] + [('jmp', x[1])] + arr[i + 1:]
if (c := not_infinite(test)):
return c
print(part_1(a))
print(part_2(a))
|
################################ A Library of Functions ##################################
##################################################################################################
#simple function which displays a matrix
def matrixDisplay(M):
for i in range(len(M)):
for j in range(len(M[i])):
print((M[i][j]), end = " ")
print()
##################################################################################################
#matrix product
def matrixProduct(L, M):
if len(L[0]) != len(M): #ensuring the plausiblity
print("Matrix multiplication not possible.")
else:
print("Multiplying the two matrices: ")
P=[[0 for i in range(len(M[0]))] for j in range(len(L))] #initializing empty matrix
for i in range(len(L)): #iterating rows
for j in range(len(M[0])): #iterating columns
for k in range(len(M)): #iterating elements and substituing them
P[i][j] = P[i][j] + (L[i][k] * M[k][j])
matrixDisplay(P)
##################################################################################################
#the gauss-jordan elimination code
def gaussj(a, b):
n = len(b) #defining the range through which the loops will run
for k in range(n): #loop to index pivot rows and eliminated columns
#partial pivoting
if abs(a[k][k]) < 1.0e-12:
for i in range(k+1, n):
if abs(a[i][k]) > abs(a[k][k]):
for j in range(k, n):
a[k][j], a[i][j] = a[i][j], a[k][j] #swapping of rows
b[k], b[i] = b[i], b[k]
break
#division of the pivot row
pivot = a[k][k]
if pivot == 0:
print("There is no unique solution to this system of equations.")
return
for j in range(k, n): #index of columns of the pivot row
a[k][j] /= pivot
b[k] /= pivot
#elimination loop
for i in range(n): #index of subtracted rows
if i == k or a[i][k] == 0: continue
factor = a[i][k]
for j in range(k, n): #index of columns for subtraction
a[i][j] -= factor * a[k][j]
b[i] -= factor * b[k]
print(b)
#################################################################################################
#calculation of determinant using gauss-jordan elimination
def determinant(a):
n = len(a) #defining the range through which the loops will run
if n != len(a[0]): #checking if determinant is possible to calculate
print("The matrix must be a square matrix.")
else:
s = 0
#code to obtain row echelon matrix using partial pivoting
for k in range(n-1):
if abs(a[k][k]) < 1.0e-12:
for i in range(k+1, n):
if abs(a[i][k]) > abs(a[k][k]):
for j in range(k, n):
a[k][j], a[i][j] = a[i][j], a[k][j] #swapping
s = s + 1 #counting the number of swaps happened
for i in range(k+1, n):
if a[i][k] == 0: continue
factor = a[i][k]/a[k][k]
for j in range(k, n):
a[i][j] = a[i][j] - factor * a[k][j]
d = 1
for i in range(len(a)):
d = d * a[i][i] #enlisting the diagonal elements
d = d*(-1)**s
print(d)
#################################################################################################
#calculating inverse
def inverse(a):
n = len(a) #defining the range through which loops will run
#constructing the n X 2n augmented matrix
P = [[0.0 for i in range(len(a))] for j in range(len(a))]
for i in range(3):
for j in range(3):
P[j][j] = 1.0
for i in range(len(a)):
a[i].extend(P[i])
#main loop for gaussian elimination begins here
for k in range(n):
if abs(a[k][k]) < 1.0e-12:
for i in range(k+1, n):
if abs(a[i][k]) > abs(a[k][k]):
for j in range(k, 2*n):
a[k][j], a[i][j] = a[i][j], a[k][j] #swapping of rows
break
pivot = a[k][k] #defining the pivot
if pivot == 0: #checking if matrix is invertible
print("This matrix is not invertible.")
return
else:
for j in range(k, 2*n): #index of columns of the pivot row
a[k][j] /= pivot
for i in range(n): #index the subtracted rows
if i == k or a[i][k] == 0: continue
factor = a[i][k]
for j in range(k, 2*n): #index the columns for subtraction
a[i][j] -= factor * a[k][j]
for i in range(len(a)): #displaying the matrix
for j in range(n, len(a[0])):
print("{:.2f}".format(a[i][j]), end = " ") #printing upto 2 places in decimal.
print()
|
def matrix_display(M):
for i in range(len(M)):
for j in range(len(M[i])):
print(M[i][j], end=' ')
print()
def matrix_product(L, M):
if len(L[0]) != len(M):
print('Matrix multiplication not possible.')
else:
print('Multiplying the two matrices: ')
p = [[0 for i in range(len(M[0]))] for j in range(len(L))]
for i in range(len(L)):
for j in range(len(M[0])):
for k in range(len(M)):
P[i][j] = P[i][j] + L[i][k] * M[k][j]
matrix_display(P)
def gaussj(a, b):
n = len(b)
for k in range(n):
if abs(a[k][k]) < 1e-12:
for i in range(k + 1, n):
if abs(a[i][k]) > abs(a[k][k]):
for j in range(k, n):
(a[k][j], a[i][j]) = (a[i][j], a[k][j])
(b[k], b[i]) = (b[i], b[k])
break
pivot = a[k][k]
if pivot == 0:
print('There is no unique solution to this system of equations.')
return
for j in range(k, n):
a[k][j] /= pivot
b[k] /= pivot
for i in range(n):
if i == k or a[i][k] == 0:
continue
factor = a[i][k]
for j in range(k, n):
a[i][j] -= factor * a[k][j]
b[i] -= factor * b[k]
print(b)
def determinant(a):
n = len(a)
if n != len(a[0]):
print('The matrix must be a square matrix.')
else:
s = 0
for k in range(n - 1):
if abs(a[k][k]) < 1e-12:
for i in range(k + 1, n):
if abs(a[i][k]) > abs(a[k][k]):
for j in range(k, n):
(a[k][j], a[i][j]) = (a[i][j], a[k][j])
s = s + 1
for i in range(k + 1, n):
if a[i][k] == 0:
continue
factor = a[i][k] / a[k][k]
for j in range(k, n):
a[i][j] = a[i][j] - factor * a[k][j]
d = 1
for i in range(len(a)):
d = d * a[i][i]
d = d * (-1) ** s
print(d)
def inverse(a):
n = len(a)
p = [[0.0 for i in range(len(a))] for j in range(len(a))]
for i in range(3):
for j in range(3):
P[j][j] = 1.0
for i in range(len(a)):
a[i].extend(P[i])
for k in range(n):
if abs(a[k][k]) < 1e-12:
for i in range(k + 1, n):
if abs(a[i][k]) > abs(a[k][k]):
for j in range(k, 2 * n):
(a[k][j], a[i][j]) = (a[i][j], a[k][j])
break
pivot = a[k][k]
if pivot == 0:
print('This matrix is not invertible.')
return
else:
for j in range(k, 2 * n):
a[k][j] /= pivot
for i in range(n):
if i == k or a[i][k] == 0:
continue
factor = a[i][k]
for j in range(k, 2 * n):
a[i][j] -= factor * a[k][j]
for i in range(len(a)):
for j in range(n, len(a[0])):
print('{:.2f}'.format(a[i][j]), end=' ')
print()
|
class Solution:
def sumNumbers(self, root: Optional[TreeNode]) -> int:
def DFS(root):
if not root: return 0
if not root.left and not root.right : return int(root.val)
if root.left: root.left.val = 10 * root.val + root.left.val
if root.right: root.right.val = 10 * root.val + root.right.val
return DFS(root.left) + DFS(root.right)
return DFS(root)
|
class Solution:
def sum_numbers(self, root: Optional[TreeNode]) -> int:
def dfs(root):
if not root:
return 0
if not root.left and (not root.right):
return int(root.val)
if root.left:
root.left.val = 10 * root.val + root.left.val
if root.right:
root.right.val = 10 * root.val + root.right.val
return dfs(root.left) + dfs(root.right)
return dfs(root)
|
# Copyright (c) 2010-2022 openpyxl
"""
List of builtin formulae
"""
FORMULAE = ("CUBEKPIMEMBER", "CUBEMEMBER", "CUBEMEMBERPROPERTY", "CUBERANKEDMEMBER", "CUBESET", "CUBESETCOUNT", "CUBEVALUE", "DAVERAGE", "DCOUNT", "DCOUNTA", "DGET", "DMAX", "DMIN", "DPRODUCT", "DSTDEV", "DSTDEVP", "DSUM", "DVAR", "DVARP", "DATE", "DATEDIF", "DATEVALUE", "DAY", "DAYS360", "EDATE", "EOMONTH", "HOUR", "MINUTE", "MONTH", "NETWORKDAYS", "NETWORKDAYS.INTL", "NOW", "SECOND", "TIME", "TIMEVALUE", "TODAY", "WEEKDAY", "WEEKNUM", "WORKDAY ", "WORKDAY.INTL", "YEAR", "YEARFRAC", "BESSELI", "BESSELJ", "BESSELK", "BESSELY", "BIN2DEC", "BIN2HEX", "BIN2OCT", "COMPLEX", "CONVERT", "DEC2BIN", "DEC2HEX", "DEC2OCT", "DELTA", "ERF", "ERFC", "GESTEP", "HEX2BIN", "HEX2DEC", "HEX2OCT", "IMABS", "IMAGINARY", "IMARGUMENT", "IMCONJUGATE", "IMCOS", "IMDIV", "IMEXP", "IMLN", "IMLOG10", "IMLOG2", "IMPOWER", "IMPRODUCT", "IMREAL", "IMSIN", "IMSQRT", "IMSUB", "IMSUM", "OCT2BIN", "OCT2DEC", "OCT2HEX", "ACCRINT", "ACCRINTM", "AMORDEGRC", "AMORLINC", "COUPDAYBS", "COUPDAYS", "COUPDAYSNC", "COUPNCD", "COUPNUM", "COUPPCD", "CUMIPMT", "CUMPRINC", "DB", "DDB", "DISC", "DOLLARDE", "DOLLARFR", "DURATION", "EFFECT", "FV", "FVSCHEDULE", "INTRATE", "IPMT", "IRR", "ISPMT", "MDURATION", "MIRR", "NOMINAL", "NPER", "NPV", "ODDFPRICE", "ODDFYIELD", "ODDLPRICE", "ODDLYIELD", "PMT", "PPMT", "PRICE", "PRICEDISC", "PRICEMAT", "PV", "RATE", "RECEIVED", "SLN", "SYD", "TBILLEQ", "TBILLPRICE", "TBILLYIELD", "VDB", "XIRR", "XNPV", "YIELD", "YIELDDISC", "YIELDMAT", "CELL", "ERROR.TYPE", "INFO", "ISBLANK", "ISERR", "ISERROR", "ISEVEN", "ISLOGICAL", "ISNA", "ISNONTEXT", "ISNUMBER", "ISODD", "ISREF", "ISTEXT", "N", "NA", "TYPE", "AND", "FALSE", "IF", "IFERROR", "NOT", "OR", "TRUE ADDRESS", "AREAS", "CHOOSE", "COLUMN", "COLUMNS", "GETPIVOTDATA", "HLOOKUP", "HYPERLINK", "INDEX", "INDIRECT", "LOOKUP", "MATCH", "OFFSET", "ROW", "ROWS", "RTD", "TRANSPOSE", "VLOOKUP", "ABS", "ACOS", "ACOSH", "ASIN", "ASINH", "ATAN", "ATAN2", "ATANH", "CEILING", "COMBIN", "COS", "COSH", "DEGREES", "ECMA.CEILING", "EVEN", "EXP", "FACT", "FACTDOUBLE", "FLOOR", "GCD", "INT", "ISO.CEILING", "LCM", "LN", "LOG", "LOG10", "MDETERM", "MINVERSE", "MMULT", "MOD", "MROUND", "MULTINOMIAL", "ODD", "PI", "POWER", "PRODUCT", "QUOTIENT", "RADIANS", "RAND", "RANDBETWEEN", "ROMAN", "ROUND", "ROUNDDOWN", "ROUNDUP", "SERIESSUM", "SIGN", "SIN", "SINH", "SQRT", "SQRTPI", "SUBTOTAL", "SUM", "SUMIF", "SUMIFS", "SUMPRODUCT", "SUMSQ", "SUMX2MY2", "SUMX2PY2", "SUMXMY2", "TAN", "TANH", "TRUNC", "AVEDEV", "AVERAGE", "AVERAGEA", "AVERAGEIF", "AVERAGEIFS", "BETADIST", "BETAINV", "BINOMDIST", "CHIDIST", "CHIINV", "CHITEST", "CONFIDENCE", "CORREL", "COUNT", "COUNTA", "COUNTBLANK", "COUNTIF", "COUNTIFS", "COVAR", "CRITBINOM", "DEVSQ", "EXPONDIST", "FDIST", "FINV", "FISHER", "FISHERINV", "FORECAST", "FREQUENCY", "FTEST", "GAMMADIST", "GAMMAINV", "GAMMALN", "GEOMEAN", "GROWTH", "HARMEAN", "HYPGEOMDIST", "INTERCEPT", "KURT", "LARGE", "LINEST", "LOGEST", "LOGINV", "LOGNORMDIST", "MAX", "MAXA", "MEDIAN", "MIN", "MINA", "MODE", "NEGBINOMDIST", "NORMDIST", "NORMINV", "NORMSDIST", "NORMSINV", "PEARSON", "PERCENTILE", "PERCENTRANK", "PERMUT", "POISSON", "PROB", "QUARTILE", "RANK", "RSQ", "SKEW", "SLOPE", "SMALL", "STANDARDIZE", "STDEV STDEVA", "STDEVP", "STDEVPA STEYX", "TDIST", "TINV", "TREND", "TRIMMEAN", "TTEST", "VAR", "VARA", "VARP", "VARPA", "WEIBULL", "ZTEST", "ASC", "BAHTTEXT", "CHAR", "CLEAN", "CODE", "CONCATENATE", "DOLLAR", "EXACT", "FIND", "FINDB", "FIXED", "JIS", "LEFT", "LEFTB", "LEN", "LENB", "LOWER", "MID", "MIDB", "PHONETIC", "PROPER", "REPLACE", "REPLACEB", "REPT", "RIGHT", "RIGHTB", "SEARCH", "SEARCHB", "SUBSTITUTE", "T", "TEXT", "TRIM", "UPPER", "VALUE")
FORMULAE = frozenset(FORMULAE)
|
"""
List of builtin formulae
"""
formulae = ('CUBEKPIMEMBER', 'CUBEMEMBER', 'CUBEMEMBERPROPERTY', 'CUBERANKEDMEMBER', 'CUBESET', 'CUBESETCOUNT', 'CUBEVALUE', 'DAVERAGE', 'DCOUNT', 'DCOUNTA', 'DGET', 'DMAX', 'DMIN', 'DPRODUCT', 'DSTDEV', 'DSTDEVP', 'DSUM', 'DVAR', 'DVARP', 'DATE', 'DATEDIF', 'DATEVALUE', 'DAY', 'DAYS360', 'EDATE', 'EOMONTH', 'HOUR', 'MINUTE', 'MONTH', 'NETWORKDAYS', 'NETWORKDAYS.INTL', 'NOW', 'SECOND', 'TIME', 'TIMEVALUE', 'TODAY', 'WEEKDAY', 'WEEKNUM', 'WORKDAY ', 'WORKDAY.INTL', 'YEAR', 'YEARFRAC', 'BESSELI', 'BESSELJ', 'BESSELK', 'BESSELY', 'BIN2DEC', 'BIN2HEX', 'BIN2OCT', 'COMPLEX', 'CONVERT', 'DEC2BIN', 'DEC2HEX', 'DEC2OCT', 'DELTA', 'ERF', 'ERFC', 'GESTEP', 'HEX2BIN', 'HEX2DEC', 'HEX2OCT', 'IMABS', 'IMAGINARY', 'IMARGUMENT', 'IMCONJUGATE', 'IMCOS', 'IMDIV', 'IMEXP', 'IMLN', 'IMLOG10', 'IMLOG2', 'IMPOWER', 'IMPRODUCT', 'IMREAL', 'IMSIN', 'IMSQRT', 'IMSUB', 'IMSUM', 'OCT2BIN', 'OCT2DEC', 'OCT2HEX', 'ACCRINT', 'ACCRINTM', 'AMORDEGRC', 'AMORLINC', 'COUPDAYBS', 'COUPDAYS', 'COUPDAYSNC', 'COUPNCD', 'COUPNUM', 'COUPPCD', 'CUMIPMT', 'CUMPRINC', 'DB', 'DDB', 'DISC', 'DOLLARDE', 'DOLLARFR', 'DURATION', 'EFFECT', 'FV', 'FVSCHEDULE', 'INTRATE', 'IPMT', 'IRR', 'ISPMT', 'MDURATION', 'MIRR', 'NOMINAL', 'NPER', 'NPV', 'ODDFPRICE', 'ODDFYIELD', 'ODDLPRICE', 'ODDLYIELD', 'PMT', 'PPMT', 'PRICE', 'PRICEDISC', 'PRICEMAT', 'PV', 'RATE', 'RECEIVED', 'SLN', 'SYD', 'TBILLEQ', 'TBILLPRICE', 'TBILLYIELD', 'VDB', 'XIRR', 'XNPV', 'YIELD', 'YIELDDISC', 'YIELDMAT', 'CELL', 'ERROR.TYPE', 'INFO', 'ISBLANK', 'ISERR', 'ISERROR', 'ISEVEN', 'ISLOGICAL', 'ISNA', 'ISNONTEXT', 'ISNUMBER', 'ISODD', 'ISREF', 'ISTEXT', 'N', 'NA', 'TYPE', 'AND', 'FALSE', 'IF', 'IFERROR', 'NOT', 'OR', 'TRUE ADDRESS', 'AREAS', 'CHOOSE', 'COLUMN', 'COLUMNS', 'GETPIVOTDATA', 'HLOOKUP', 'HYPERLINK', 'INDEX', 'INDIRECT', 'LOOKUP', 'MATCH', 'OFFSET', 'ROW', 'ROWS', 'RTD', 'TRANSPOSE', 'VLOOKUP', 'ABS', 'ACOS', 'ACOSH', 'ASIN', 'ASINH', 'ATAN', 'ATAN2', 'ATANH', 'CEILING', 'COMBIN', 'COS', 'COSH', 'DEGREES', 'ECMA.CEILING', 'EVEN', 'EXP', 'FACT', 'FACTDOUBLE', 'FLOOR', 'GCD', 'INT', 'ISO.CEILING', 'LCM', 'LN', 'LOG', 'LOG10', 'MDETERM', 'MINVERSE', 'MMULT', 'MOD', 'MROUND', 'MULTINOMIAL', 'ODD', 'PI', 'POWER', 'PRODUCT', 'QUOTIENT', 'RADIANS', 'RAND', 'RANDBETWEEN', 'ROMAN', 'ROUND', 'ROUNDDOWN', 'ROUNDUP', 'SERIESSUM', 'SIGN', 'SIN', 'SINH', 'SQRT', 'SQRTPI', 'SUBTOTAL', 'SUM', 'SUMIF', 'SUMIFS', 'SUMPRODUCT', 'SUMSQ', 'SUMX2MY2', 'SUMX2PY2', 'SUMXMY2', 'TAN', 'TANH', 'TRUNC', 'AVEDEV', 'AVERAGE', 'AVERAGEA', 'AVERAGEIF', 'AVERAGEIFS', 'BETADIST', 'BETAINV', 'BINOMDIST', 'CHIDIST', 'CHIINV', 'CHITEST', 'CONFIDENCE', 'CORREL', 'COUNT', 'COUNTA', 'COUNTBLANK', 'COUNTIF', 'COUNTIFS', 'COVAR', 'CRITBINOM', 'DEVSQ', 'EXPONDIST', 'FDIST', 'FINV', 'FISHER', 'FISHERINV', 'FORECAST', 'FREQUENCY', 'FTEST', 'GAMMADIST', 'GAMMAINV', 'GAMMALN', 'GEOMEAN', 'GROWTH', 'HARMEAN', 'HYPGEOMDIST', 'INTERCEPT', 'KURT', 'LARGE', 'LINEST', 'LOGEST', 'LOGINV', 'LOGNORMDIST', 'MAX', 'MAXA', 'MEDIAN', 'MIN', 'MINA', 'MODE', 'NEGBINOMDIST', 'NORMDIST', 'NORMINV', 'NORMSDIST', 'NORMSINV', 'PEARSON', 'PERCENTILE', 'PERCENTRANK', 'PERMUT', 'POISSON', 'PROB', 'QUARTILE', 'RANK', 'RSQ', 'SKEW', 'SLOPE', 'SMALL', 'STANDARDIZE', 'STDEV STDEVA', 'STDEVP', 'STDEVPA STEYX', 'TDIST', 'TINV', 'TREND', 'TRIMMEAN', 'TTEST', 'VAR', 'VARA', 'VARP', 'VARPA', 'WEIBULL', 'ZTEST', 'ASC', 'BAHTTEXT', 'CHAR', 'CLEAN', 'CODE', 'CONCATENATE', 'DOLLAR', 'EXACT', 'FIND', 'FINDB', 'FIXED', 'JIS', 'LEFT', 'LEFTB', 'LEN', 'LENB', 'LOWER', 'MID', 'MIDB', 'PHONETIC', 'PROPER', 'REPLACE', 'REPLACEB', 'REPT', 'RIGHT', 'RIGHTB', 'SEARCH', 'SEARCHB', 'SUBSTITUTE', 'T', 'TEXT', 'TRIM', 'UPPER', 'VALUE')
formulae = frozenset(FORMULAE)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2021, Cisco Systems
# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
DOCUMENTATION = r"""
---
module: network_device
short_description: Resource module for Network Device
description:
- Manage operations create, update and delete of the resource Network Device.
version_added: '1.0.0'
author: Rafael Campos (@racampos)
options:
NetworkDeviceGroupList:
description: List of Network Device Group names for this node.
elements: str
type: list
NetworkDeviceIPList:
description: List of IP Subnets for this node.
suboptions:
getIpaddressExclude:
description: It can be either single IP address or IP range address.
type: str
ipaddress:
description: Network Device's ipaddress.
type: str
mask:
description: Network Device's mask.
type: int
type: list
authenticationSettings:
description: Network Device's authenticationSettings.
suboptions:
dtlsRequired:
description: This value enforces use of dtls.
type: bool
enableKeyWrap:
description: EnableKeyWrap flag.
type: bool
enableMultiSecret:
description: EnableMultiSecret flag.
type: bool
enabled:
description: Enabled flag.
type: bool
keyEncryptionKey:
description: Network Device's keyEncryptionKey.
type: str
keyInputFormat:
description: Allowed values - ASCII, - HEXADECIMAL.
type: str
messageAuthenticatorCodeKey:
description: Network Device's messageAuthenticatorCodeKey.
type: str
networkProtocol:
description: Allowed values - RADIUS, - TACACS_PLUS.
type: str
radiusSharedSecret:
description: Network Device's radiusSharedSecret.
type: str
secondRadiusSharedSecret:
description: Network Device's secondRadiusSharedSecret.
type: str
type: dict
coaPort:
description: Network Device's coaPort.
type: int
description:
description: Network Device's description.
type: str
dtlsDnsName:
description: This value is used to verify the client identity contained in the X.509
RADIUS/DTLS client certificate.
type: str
id:
description: Network Device's id.
type: str
modelName:
description: Network Device's modelName.
type: str
name:
description: Network Device's name.
type: str
profileName:
description: Network Device's profileName.
type: str
snmpsettings:
description: Network Device's snmpsettings.
suboptions:
linkTrapQuery:
description: LinkTrapQuery flag.
type: bool
macTrapQuery:
description: MacTrapQuery flag.
type: bool
originatingPolicyServicesNode:
description: Network Device's originatingPolicyServicesNode.
type: str
pollingInterval:
description: Network Device's pollingInterval.
type: int
roCommunity:
description: Network Device's roCommunity.
type: str
version:
description: Network Device's version.
type: str
type: dict
softwareVersion:
description: Network Device's softwareVersion.
type: str
tacacsSettings:
description: Network Device's tacacsSettings.
suboptions:
connectModeOptions:
description: Allowed values - OFF, - ON_LEGACY, - ON_DRAFT_COMPLIANT.
type: str
sharedSecret:
description: Network Device's sharedSecret.
type: str
type: dict
trustsecsettings:
description: Network Device's trustsecsettings.
suboptions:
deviceAuthenticationSettings:
description: Network Device's deviceAuthenticationSettings.
suboptions:
sgaDeviceId:
description: Network Device's sgaDeviceId.
type: str
sgaDevicePassword:
description: Network Device's sgaDevicePassword.
type: str
type: dict
deviceConfigurationDeployment:
description: Network Device's deviceConfigurationDeployment.
suboptions:
enableModePassword:
description: Network Device's enableModePassword.
type: str
execModePassword:
description: Network Device's execModePassword.
type: str
execModeUsername:
description: Network Device's execModeUsername.
type: str
includeWhenDeployingSGTUpdates:
description: IncludeWhenDeployingSGTUpdates flag.
type: bool
type: dict
pushIdSupport:
description: PushIdSupport flag.
type: bool
sgaNotificationAndUpdates:
description: Network Device's sgaNotificationAndUpdates.
suboptions:
coaSourceHost:
description: Network Device's coaSourceHost.
type: str
downlaodEnvironmentDataEveryXSeconds:
description: Network Device's downlaodEnvironmentDataEveryXSeconds.
type: int
downlaodPeerAuthorizationPolicyEveryXSeconds:
description: Network Device's downlaodPeerAuthorizationPolicyEveryXSeconds.
type: int
downloadSGACLListsEveryXSeconds:
description: Network Device's downloadSGACLListsEveryXSeconds.
type: int
otherSGADevicesToTrustThisDevice:
description: OtherSGADevicesToTrustThisDevice flag.
type: bool
reAuthenticationEveryXSeconds:
description: Network Device's reAuthenticationEveryXSeconds.
type: int
sendConfigurationToDevice:
description: SendConfigurationToDevice flag.
type: bool
sendConfigurationToDeviceUsing:
description: Allowed values - ENABLE_USING_COA, - ENABLE_USING_CLI, - DISABLE_ALL.
type: str
type: dict
type: dict
requirements:
- ciscoisesdk
seealso:
# Reference by Internet resource
- name: Network Device reference
description: Complete reference of the Network Device object model.
link: https://ciscoisesdk.readthedocs.io/en/latest/api/api.html#v3-0-0-summary
"""
EXAMPLES = r"""
- name: Update by name
cisco.ise.network_device:
ise_hostname: "{{ise_hostname}}"
ise_username: "{{ise_username}}"
ise_password: "{{ise_password}}"
ise_verify: "{{ise_verify}}"
state: present
NetworkDeviceGroupList:
- string
NetworkDeviceIPList:
- getIpaddressExclude: string
ipaddress: string
mask: 0
authenticationSettings:
dtlsRequired: true
enableKeyWrap: true
enableMultiSecret: true
enabled: true
keyEncryptionKey: string
keyInputFormat: string
messageAuthenticatorCodeKey: string
networkProtocol: string
radiusSharedSecret: string
secondRadiusSharedSecret: string
coaPort: 0
description: string
dtlsDnsName: string
id: string
modelName: string
name: string
profileName: string
snmpsettings:
linkTrapQuery: true
macTrapQuery: true
originatingPolicyServicesNode: string
pollingInterval: 0
roCommunity: string
version: string
softwareVersion: string
tacacsSettings:
connectModeOptions: string
sharedSecret: string
trustsecsettings:
deviceAuthenticationSettings:
sgaDeviceId: string
sgaDevicePassword: string
deviceConfigurationDeployment:
enableModePassword: string
execModePassword: string
execModeUsername: string
includeWhenDeployingSGTUpdates: true
pushIdSupport: true
sgaNotificationAndUpdates:
coaSourceHost: string
downlaodEnvironmentDataEveryXSeconds: 0
downlaodPeerAuthorizationPolicyEveryXSeconds: 0
downloadSGACLListsEveryXSeconds: 0
otherSGADevicesToTrustThisDevice: true
reAuthenticationEveryXSeconds: 0
sendConfigurationToDevice: true
sendConfigurationToDeviceUsing: string
- name: Delete by name
cisco.ise.network_device:
ise_hostname: "{{ise_hostname}}"
ise_username: "{{ise_username}}"
ise_password: "{{ise_password}}"
ise_verify: "{{ise_verify}}"
state: absent
name: string
- name: Update by id
cisco.ise.network_device:
ise_hostname: "{{ise_hostname}}"
ise_username: "{{ise_username}}"
ise_password: "{{ise_password}}"
ise_verify: "{{ise_verify}}"
state: present
NetworkDeviceGroupList:
- string
NetworkDeviceIPList:
- getIpaddressExclude: string
ipaddress: string
mask: 0
authenticationSettings:
dtlsRequired: true
enableKeyWrap: true
enableMultiSecret: true
enabled: true
keyEncryptionKey: string
keyInputFormat: string
messageAuthenticatorCodeKey: string
networkProtocol: string
radiusSharedSecret: string
secondRadiusSharedSecret: string
coaPort: 0
description: string
dtlsDnsName: string
id: string
modelName: string
name: string
profileName: string
snmpsettings:
linkTrapQuery: true
macTrapQuery: true
originatingPolicyServicesNode: string
pollingInterval: 0
roCommunity: string
version: string
softwareVersion: string
tacacsSettings:
connectModeOptions: string
sharedSecret: string
trustsecsettings:
deviceAuthenticationSettings:
sgaDeviceId: string
sgaDevicePassword: string
deviceConfigurationDeployment:
enableModePassword: string
execModePassword: string
execModeUsername: string
includeWhenDeployingSGTUpdates: true
pushIdSupport: true
sgaNotificationAndUpdates:
coaSourceHost: string
downlaodEnvironmentDataEveryXSeconds: 0
downlaodPeerAuthorizationPolicyEveryXSeconds: 0
downloadSGACLListsEveryXSeconds: 0
otherSGADevicesToTrustThisDevice: true
reAuthenticationEveryXSeconds: 0
sendConfigurationToDevice: true
sendConfigurationToDeviceUsing: string
- name: Delete by id
cisco.ise.network_device:
ise_hostname: "{{ise_hostname}}"
ise_username: "{{ise_username}}"
ise_password: "{{ise_password}}"
ise_verify: "{{ise_verify}}"
state: absent
id: string
- name: Create
cisco.ise.network_device:
ise_hostname: "{{ise_hostname}}"
ise_username: "{{ise_username}}"
ise_password: "{{ise_password}}"
ise_verify: "{{ise_verify}}"
state: present
NetworkDeviceGroupList:
- string
NetworkDeviceIPList:
- getIpaddressExclude: string
ipaddress: string
mask: 0
authenticationSettings:
dtlsRequired: true
enableKeyWrap: true
enableMultiSecret: true
enabled: true
keyEncryptionKey: string
keyInputFormat: string
messageAuthenticatorCodeKey: string
networkProtocol: string
radiusSharedSecret: string
secondRadiusSharedSecret: string
coaPort: 0
description: string
dtlsDnsName: string
modelName: string
name: string
profileName: string
snmpsettings:
linkTrapQuery: true
macTrapQuery: true
originatingPolicyServicesNode: string
pollingInterval: 0
roCommunity: string
version: string
softwareVersion: string
tacacsSettings:
connectModeOptions: string
sharedSecret: string
trustsecsettings:
deviceAuthenticationSettings:
sgaDeviceId: string
sgaDevicePassword: string
deviceConfigurationDeployment:
enableModePassword: string
execModePassword: string
execModeUsername: string
includeWhenDeployingSGTUpdates: true
pushIdSupport: true
sgaNotificationAndUpdates:
coaSourceHost: string
downlaodEnvironmentDataEveryXSeconds: 0
downlaodPeerAuthorizationPolicyEveryXSeconds: 0
downloadSGACLListsEveryXSeconds: 0
otherSGADevicesToTrustThisDevice: true
reAuthenticationEveryXSeconds: 0
sendConfigurationToDevice: true
sendConfigurationToDeviceUsing: string
"""
RETURN = r"""
ise_response:
description: A dictionary or list with the response returned by the Cisco ISE Python SDK
returned: always
type: dict
sample: >
{
"UpdatedFieldsList": {
"updatedField": {
"field": "string",
"oldValue": "string",
"newValue": "string"
},
"field": "string",
"oldValue": "string",
"newValue": "string"
}
}
"""
|
documentation = "\n---\nmodule: network_device\nshort_description: Resource module for Network Device\ndescription:\n- Manage operations create, update and delete of the resource Network Device.\nversion_added: '1.0.0'\nauthor: Rafael Campos (@racampos)\noptions:\n NetworkDeviceGroupList:\n description: List of Network Device Group names for this node.\n elements: str\n type: list\n NetworkDeviceIPList:\n description: List of IP Subnets for this node.\n suboptions:\n getIpaddressExclude:\n description: It can be either single IP address or IP range address.\n type: str\n ipaddress:\n description: Network Device's ipaddress.\n type: str\n mask:\n description: Network Device's mask.\n type: int\n type: list\n authenticationSettings:\n description: Network Device's authenticationSettings.\n suboptions:\n dtlsRequired:\n description: This value enforces use of dtls.\n type: bool\n enableKeyWrap:\n description: EnableKeyWrap flag.\n type: bool\n enableMultiSecret:\n description: EnableMultiSecret flag.\n type: bool\n enabled:\n description: Enabled flag.\n type: bool\n keyEncryptionKey:\n description: Network Device's keyEncryptionKey.\n type: str\n keyInputFormat:\n description: Allowed values - ASCII, - HEXADECIMAL.\n type: str\n messageAuthenticatorCodeKey:\n description: Network Device's messageAuthenticatorCodeKey.\n type: str\n networkProtocol:\n description: Allowed values - RADIUS, - TACACS_PLUS.\n type: str\n radiusSharedSecret:\n description: Network Device's radiusSharedSecret.\n type: str\n secondRadiusSharedSecret:\n description: Network Device's secondRadiusSharedSecret.\n type: str\n type: dict\n coaPort:\n description: Network Device's coaPort.\n type: int\n description:\n description: Network Device's description.\n type: str\n dtlsDnsName:\n description: This value is used to verify the client identity contained in the X.509\n RADIUS/DTLS client certificate.\n type: str\n id:\n description: Network Device's id.\n type: str\n modelName:\n description: Network Device's modelName.\n type: str\n name:\n description: Network Device's name.\n type: str\n profileName:\n description: Network Device's profileName.\n type: str\n snmpsettings:\n description: Network Device's snmpsettings.\n suboptions:\n linkTrapQuery:\n description: LinkTrapQuery flag.\n type: bool\n macTrapQuery:\n description: MacTrapQuery flag.\n type: bool\n originatingPolicyServicesNode:\n description: Network Device's originatingPolicyServicesNode.\n type: str\n pollingInterval:\n description: Network Device's pollingInterval.\n type: int\n roCommunity:\n description: Network Device's roCommunity.\n type: str\n version:\n description: Network Device's version.\n type: str\n type: dict\n softwareVersion:\n description: Network Device's softwareVersion.\n type: str\n tacacsSettings:\n description: Network Device's tacacsSettings.\n suboptions:\n connectModeOptions:\n description: Allowed values - OFF, - ON_LEGACY, - ON_DRAFT_COMPLIANT.\n type: str\n sharedSecret:\n description: Network Device's sharedSecret.\n type: str\n type: dict\n trustsecsettings:\n description: Network Device's trustsecsettings.\n suboptions:\n deviceAuthenticationSettings:\n description: Network Device's deviceAuthenticationSettings.\n suboptions:\n sgaDeviceId:\n description: Network Device's sgaDeviceId.\n type: str\n sgaDevicePassword:\n description: Network Device's sgaDevicePassword.\n type: str\n type: dict\n deviceConfigurationDeployment:\n description: Network Device's deviceConfigurationDeployment.\n suboptions:\n enableModePassword:\n description: Network Device's enableModePassword.\n type: str\n execModePassword:\n description: Network Device's execModePassword.\n type: str\n execModeUsername:\n description: Network Device's execModeUsername.\n type: str\n includeWhenDeployingSGTUpdates:\n description: IncludeWhenDeployingSGTUpdates flag.\n type: bool\n type: dict\n pushIdSupport:\n description: PushIdSupport flag.\n type: bool\n sgaNotificationAndUpdates:\n description: Network Device's sgaNotificationAndUpdates.\n suboptions:\n coaSourceHost:\n description: Network Device's coaSourceHost.\n type: str\n downlaodEnvironmentDataEveryXSeconds:\n description: Network Device's downlaodEnvironmentDataEveryXSeconds.\n type: int\n downlaodPeerAuthorizationPolicyEveryXSeconds:\n description: Network Device's downlaodPeerAuthorizationPolicyEveryXSeconds.\n type: int\n downloadSGACLListsEveryXSeconds:\n description: Network Device's downloadSGACLListsEveryXSeconds.\n type: int\n otherSGADevicesToTrustThisDevice:\n description: OtherSGADevicesToTrustThisDevice flag.\n type: bool\n reAuthenticationEveryXSeconds:\n description: Network Device's reAuthenticationEveryXSeconds.\n type: int\n sendConfigurationToDevice:\n description: SendConfigurationToDevice flag.\n type: bool\n sendConfigurationToDeviceUsing:\n description: Allowed values - ENABLE_USING_COA, - ENABLE_USING_CLI, - DISABLE_ALL.\n type: str\n type: dict\n type: dict\nrequirements:\n- ciscoisesdk\nseealso:\n# Reference by Internet resource\n- name: Network Device reference\n description: Complete reference of the Network Device object model.\n link: https://ciscoisesdk.readthedocs.io/en/latest/api/api.html#v3-0-0-summary\n"
examples = '\n- name: Update by name\n cisco.ise.network_device:\n ise_hostname: "{{ise_hostname}}"\n ise_username: "{{ise_username}}"\n ise_password: "{{ise_password}}"\n ise_verify: "{{ise_verify}}"\n state: present\n NetworkDeviceGroupList:\n - string\n NetworkDeviceIPList:\n - getIpaddressExclude: string\n ipaddress: string\n mask: 0\n authenticationSettings:\n dtlsRequired: true\n enableKeyWrap: true\n enableMultiSecret: true\n enabled: true\n keyEncryptionKey: string\n keyInputFormat: string\n messageAuthenticatorCodeKey: string\n networkProtocol: string\n radiusSharedSecret: string\n secondRadiusSharedSecret: string\n coaPort: 0\n description: string\n dtlsDnsName: string\n id: string\n modelName: string\n name: string\n profileName: string\n snmpsettings:\n linkTrapQuery: true\n macTrapQuery: true\n originatingPolicyServicesNode: string\n pollingInterval: 0\n roCommunity: string\n version: string\n softwareVersion: string\n tacacsSettings:\n connectModeOptions: string\n sharedSecret: string\n trustsecsettings:\n deviceAuthenticationSettings:\n sgaDeviceId: string\n sgaDevicePassword: string\n deviceConfigurationDeployment:\n enableModePassword: string\n execModePassword: string\n execModeUsername: string\n includeWhenDeployingSGTUpdates: true\n pushIdSupport: true\n sgaNotificationAndUpdates:\n coaSourceHost: string\n downlaodEnvironmentDataEveryXSeconds: 0\n downlaodPeerAuthorizationPolicyEveryXSeconds: 0\n downloadSGACLListsEveryXSeconds: 0\n otherSGADevicesToTrustThisDevice: true\n reAuthenticationEveryXSeconds: 0\n sendConfigurationToDevice: true\n sendConfigurationToDeviceUsing: string\n\n- name: Delete by name\n cisco.ise.network_device:\n ise_hostname: "{{ise_hostname}}"\n ise_username: "{{ise_username}}"\n ise_password: "{{ise_password}}"\n ise_verify: "{{ise_verify}}"\n state: absent\n name: string\n\n- name: Update by id\n cisco.ise.network_device:\n ise_hostname: "{{ise_hostname}}"\n ise_username: "{{ise_username}}"\n ise_password: "{{ise_password}}"\n ise_verify: "{{ise_verify}}"\n state: present\n NetworkDeviceGroupList:\n - string\n NetworkDeviceIPList:\n - getIpaddressExclude: string\n ipaddress: string\n mask: 0\n authenticationSettings:\n dtlsRequired: true\n enableKeyWrap: true\n enableMultiSecret: true\n enabled: true\n keyEncryptionKey: string\n keyInputFormat: string\n messageAuthenticatorCodeKey: string\n networkProtocol: string\n radiusSharedSecret: string\n secondRadiusSharedSecret: string\n coaPort: 0\n description: string\n dtlsDnsName: string\n id: string\n modelName: string\n name: string\n profileName: string\n snmpsettings:\n linkTrapQuery: true\n macTrapQuery: true\n originatingPolicyServicesNode: string\n pollingInterval: 0\n roCommunity: string\n version: string\n softwareVersion: string\n tacacsSettings:\n connectModeOptions: string\n sharedSecret: string\n trustsecsettings:\n deviceAuthenticationSettings:\n sgaDeviceId: string\n sgaDevicePassword: string\n deviceConfigurationDeployment:\n enableModePassword: string\n execModePassword: string\n execModeUsername: string\n includeWhenDeployingSGTUpdates: true\n pushIdSupport: true\n sgaNotificationAndUpdates:\n coaSourceHost: string\n downlaodEnvironmentDataEveryXSeconds: 0\n downlaodPeerAuthorizationPolicyEveryXSeconds: 0\n downloadSGACLListsEveryXSeconds: 0\n otherSGADevicesToTrustThisDevice: true\n reAuthenticationEveryXSeconds: 0\n sendConfigurationToDevice: true\n sendConfigurationToDeviceUsing: string\n\n- name: Delete by id\n cisco.ise.network_device:\n ise_hostname: "{{ise_hostname}}"\n ise_username: "{{ise_username}}"\n ise_password: "{{ise_password}}"\n ise_verify: "{{ise_verify}}"\n state: absent\n id: string\n\n- name: Create\n cisco.ise.network_device:\n ise_hostname: "{{ise_hostname}}"\n ise_username: "{{ise_username}}"\n ise_password: "{{ise_password}}"\n ise_verify: "{{ise_verify}}"\n state: present\n NetworkDeviceGroupList:\n - string\n NetworkDeviceIPList:\n - getIpaddressExclude: string\n ipaddress: string\n mask: 0\n authenticationSettings:\n dtlsRequired: true\n enableKeyWrap: true\n enableMultiSecret: true\n enabled: true\n keyEncryptionKey: string\n keyInputFormat: string\n messageAuthenticatorCodeKey: string\n networkProtocol: string\n radiusSharedSecret: string\n secondRadiusSharedSecret: string\n coaPort: 0\n description: string\n dtlsDnsName: string\n modelName: string\n name: string\n profileName: string\n snmpsettings:\n linkTrapQuery: true\n macTrapQuery: true\n originatingPolicyServicesNode: string\n pollingInterval: 0\n roCommunity: string\n version: string\n softwareVersion: string\n tacacsSettings:\n connectModeOptions: string\n sharedSecret: string\n trustsecsettings:\n deviceAuthenticationSettings:\n sgaDeviceId: string\n sgaDevicePassword: string\n deviceConfigurationDeployment:\n enableModePassword: string\n execModePassword: string\n execModeUsername: string\n includeWhenDeployingSGTUpdates: true\n pushIdSupport: true\n sgaNotificationAndUpdates:\n coaSourceHost: string\n downlaodEnvironmentDataEveryXSeconds: 0\n downlaodPeerAuthorizationPolicyEveryXSeconds: 0\n downloadSGACLListsEveryXSeconds: 0\n otherSGADevicesToTrustThisDevice: true\n reAuthenticationEveryXSeconds: 0\n sendConfigurationToDevice: true\n sendConfigurationToDeviceUsing: string\n\n'
return = '\nise_response:\n description: A dictionary or list with the response returned by the Cisco ISE Python SDK\n returned: always\n type: dict\n sample: >\n {\n "UpdatedFieldsList": {\n "updatedField": {\n "field": "string",\n "oldValue": "string",\n "newValue": "string"\n },\n "field": "string",\n "oldValue": "string",\n "newValue": "string"\n }\n }\n'
|
class OrderCodeAlreadyExists(Exception):
pass
class DealerDoesNotExist(Exception):
pass
class OrderDoesNotExist(Exception):
pass
class StatusNotAllowed(Exception):
pass
|
class Ordercodealreadyexists(Exception):
pass
class Dealerdoesnotexist(Exception):
pass
class Orderdoesnotexist(Exception):
pass
class Statusnotallowed(Exception):
pass
|
# -*- coding: utf-8 -*-
# Copyright (c) 2017-18 Richard Hull and contributors
# See LICENSE.rst for details.
_DIGITS = {
' ': 0x00,
'-': 0x01,
'_': 0x08,
'\'': 0x02,
'0': 0x7e,
'1': 0x30,
'2': 0x6d,
'3': 0x79,
'4': 0x33,
'5': 0x5b,
'6': 0x5f,
'7': 0x70,
'8': 0x7f,
'9': 0x7b,
'a': 0x7d,
'b': 0x1f,
'c': 0x0d,
'd': 0x3d,
'e': 0x6f,
'f': 0x47,
'g': 0x7b,
'h': 0x17,
'i': 0x10,
'j': 0x18,
# 'k': cant represent
'l': 0x06,
# 'm': cant represent
'n': 0x15,
'o': 0x1d,
'p': 0x67,
'q': 0x73,
'r': 0x05,
's': 0x5b,
't': 0x0f,
'u': 0x1c,
'v': 0x1c,
# 'w': cant represent
# 'x': cant represent
'y': 0x3b,
'z': 0x6d,
'A': 0x77,
'B': 0x7f,
'C': 0x4e,
'D': 0x7e,
'E': 0x4f,
'F': 0x47,
'G': 0x5e,
'H': 0x37,
'I': 0x30,
'J': 0x38,
# 'K': cant represent
'L': 0x0e,
# 'M': cant represent
'N': 0x76,
'O': 0x7e,
'P': 0x67,
'Q': 0x73,
'R': 0x46,
'S': 0x5b,
'T': 0x0f,
'U': 0x3e,
'V': 0x3e,
# 'W': cant represent
# 'X': cant represent
'Y': 0x3b,
'Z': 0x6d,
',': 0x80,
'.': 0x80
}
def regular(text, notfound="_"):
try:
undefined = _DIGITS[notfound]
iterator = iter(text)
while True:
char = next(iterator)
yield _DIGITS.get(char, undefined)
except StopIteration:
pass
def dot_muncher(text, notfound="_"):
undefined = _DIGITS[notfound]
iterator = iter(text)
last = _DIGITS.get(next(iterator), undefined)
try:
while True:
curr = _DIGITS.get(next(iterator), undefined)
if curr == 0x80:
yield curr | last
elif last != 0x80:
yield last
last = curr
except StopIteration:
if last != 0x80:
yield last
|
_digits = {' ': 0, '-': 1, '_': 8, "'": 2, '0': 126, '1': 48, '2': 109, '3': 121, '4': 51, '5': 91, '6': 95, '7': 112, '8': 127, '9': 123, 'a': 125, 'b': 31, 'c': 13, 'd': 61, 'e': 111, 'f': 71, 'g': 123, 'h': 23, 'i': 16, 'j': 24, 'l': 6, 'n': 21, 'o': 29, 'p': 103, 'q': 115, 'r': 5, 's': 91, 't': 15, 'u': 28, 'v': 28, 'y': 59, 'z': 109, 'A': 119, 'B': 127, 'C': 78, 'D': 126, 'E': 79, 'F': 71, 'G': 94, 'H': 55, 'I': 48, 'J': 56, 'L': 14, 'N': 118, 'O': 126, 'P': 103, 'Q': 115, 'R': 70, 'S': 91, 'T': 15, 'U': 62, 'V': 62, 'Y': 59, 'Z': 109, ',': 128, '.': 128}
def regular(text, notfound='_'):
try:
undefined = _DIGITS[notfound]
iterator = iter(text)
while True:
char = next(iterator)
yield _DIGITS.get(char, undefined)
except StopIteration:
pass
def dot_muncher(text, notfound='_'):
undefined = _DIGITS[notfound]
iterator = iter(text)
last = _DIGITS.get(next(iterator), undefined)
try:
while True:
curr = _DIGITS.get(next(iterator), undefined)
if curr == 128:
yield (curr | last)
elif last != 128:
yield last
last = curr
except StopIteration:
if last != 128:
yield last
|
"""Hex Grid, by Al Sweigart al@inventwithpython.com
Displays a simple tessellation of a hexagon grid.
This and other games are available at https://nostarch.com/XX
Tags: tiny, beginner, artistic"""
__version__ = 0
# Set up the constants:
# (!) Try changing these values to other numbers:
X_REPEAT = 19 # How many times to tessellate horizontally.
Y_REPEAT = 12 # How many times to tessellate vertically.
for y in range(Y_REPEAT):
# Display the top half of the hexagon:
for x in range(X_REPEAT):
print(r'/ \_', end='')
print()
# Display the bottom half of the hexagon:
for x in range(X_REPEAT):
print(r'\_/ ', end='')
print()
|
"""Hex Grid, by Al Sweigart al@inventwithpython.com
Displays a simple tessellation of a hexagon grid.
This and other games are available at https://nostarch.com/XX
Tags: tiny, beginner, artistic"""
__version__ = 0
x_repeat = 19
y_repeat = 12
for y in range(Y_REPEAT):
for x in range(X_REPEAT):
print('/ \\_', end='')
print()
for x in range(X_REPEAT):
print('\\_/ ', end='')
print()
|
class FlameTextEdit(QtWidgets.QTextEdit):
"""
Custom Qt Flame Text Edit Widget
To use:
text_edit = FlameTextEdit('some_text_here', True_or_False, window)
"""
def __init__(self, text, read_only, parent_window, *args, **kwargs):
super(FlameTextEdit, self).__init__(*args, **kwargs)
self.setMinimumHeight(50)
self.setMinimumWidth(150)
self.setReadOnly(read_only)
if read_only:
self.setStyleSheet('color: #9a9a9a; selection-color: #262626; selection-background-color: #b8b1a7; border: 1px inset #404040; font: 14px "Discreet"')
else:
self.setStyleSheet('QTextEdit {color: #9a9a9a; background-color: #373e47; selection-color: #262626; selection-background-color: #b8b1a7; border: 1px inset #404040; font: 14px "Discreet"}'
'QTextEdit:focus {background-color: #474e58}')
self.verticalScrollBar().setStyleSheet('color: #818181; background-color: #313131')
self.horizontalScrollBar().setStyleSheet('color: #818181; background-color: #313131')
|
class Flametextedit(QtWidgets.QTextEdit):
"""
Custom Qt Flame Text Edit Widget
To use:
text_edit = FlameTextEdit('some_text_here', True_or_False, window)
"""
def __init__(self, text, read_only, parent_window, *args, **kwargs):
super(FlameTextEdit, self).__init__(*args, **kwargs)
self.setMinimumHeight(50)
self.setMinimumWidth(150)
self.setReadOnly(read_only)
if read_only:
self.setStyleSheet('color: #9a9a9a; selection-color: #262626; selection-background-color: #b8b1a7; border: 1px inset #404040; font: 14px "Discreet"')
else:
self.setStyleSheet('QTextEdit {color: #9a9a9a; background-color: #373e47; selection-color: #262626; selection-background-color: #b8b1a7; border: 1px inset #404040; font: 14px "Discreet"}QTextEdit:focus {background-color: #474e58}')
self.verticalScrollBar().setStyleSheet('color: #818181; background-color: #313131')
self.horizontalScrollBar().setStyleSheet('color: #818181; background-color: #313131')
|
# merge sort
# h/t https://www.thecrazyprogrammer.com/2017/12/python-merge-sort.html
# h/t https://www.youtube.com/watch?v=Nso25TkBsYI
def merge_sort(array):
n = len(array)
if n > 1:
mid = n//2
left = array[0:mid]
right = array[mid:n]
print(mid, left, right, array)
merge_sort(left)
merge_sort(right)
merge(left, right, array, n)
def merge(left, right, array, array_length):
right_length = len(right)
left_length = len(left)
left_index = right_index = 0
for array_index in range(0, array_length):
if right_index == right_length:
array[array_index:array_length] = left[left_index:left_length]
break
elif left_index == left_length:
array[array_index:array_length] = right[right_index:right_length]
break
elif left[left_index] <= right[right_index]:
array[array_index] = left[left_index]
left_index += 1
else:
array[array_index] = right[right_index]
right_index += 1
array = [99,2,3,3,12,4,5]
arr_len = len(array)
merge_sort(array)
print(array)
assert len(array) == arr_len
|
def merge_sort(array):
n = len(array)
if n > 1:
mid = n // 2
left = array[0:mid]
right = array[mid:n]
print(mid, left, right, array)
merge_sort(left)
merge_sort(right)
merge(left, right, array, n)
def merge(left, right, array, array_length):
right_length = len(right)
left_length = len(left)
left_index = right_index = 0
for array_index in range(0, array_length):
if right_index == right_length:
array[array_index:array_length] = left[left_index:left_length]
break
elif left_index == left_length:
array[array_index:array_length] = right[right_index:right_length]
break
elif left[left_index] <= right[right_index]:
array[array_index] = left[left_index]
left_index += 1
else:
array[array_index] = right[right_index]
right_index += 1
array = [99, 2, 3, 3, 12, 4, 5]
arr_len = len(array)
merge_sort(array)
print(array)
assert len(array) == arr_len
|
# Lucas Sequence Using Recursion
def recur_luc(n):
if n == 1:
return n
if n == 0:
return 2
return (recur_luc(n-1) + recur_luc(n-2))
limit = int(input("How many terms to include in Lucas series:"))
print("Lucas series:")
for i in range(limit):
print(recur_luc(i))
|
def recur_luc(n):
if n == 1:
return n
if n == 0:
return 2
return recur_luc(n - 1) + recur_luc(n - 2)
limit = int(input('How many terms to include in Lucas series:'))
print('Lucas series:')
for i in range(limit):
print(recur_luc(i))
|
# encoding: utf-8
"""Exceptions used by marrow.mailer to report common errors."""
__all__ = [
'MailException',
'MailConfigurationException',
'TransportException',
'TransportFailedException',
'MessageFailedException',
'TransportExhaustedException',
'ManagerException'
]
class MailException(Exception):
"""The base for all marrow.mailer exceptions."""
pass
# Application Exceptions
class DeliveryException(MailException):
"""The base class for all public-facing exceptions."""
pass
class DeliveryFailedException(DeliveryException):
"""The message stored in args[0] could not be delivered for the reason
given in args[1]. (These can be accessed as e.msg and e.reason.)"""
def __init__(self, message, reason):
self.msg = message
self.reason = reason
super(DeliveryFailedException, self).__init__(message, reason)
# Internal Exceptions
class MailerNotRunning(MailException):
"""Raised when attempting to deliver messages using a dead interface."""
pass
class MailConfigurationException(MailException):
"""There was an error in the configuration of marrow.mailer."""
pass
class TransportException(MailException):
"""The base for all marrow.mailer Transport exceptions."""
pass
class TransportFailedException(TransportException):
"""The transport has failed to deliver the message due to an internal
error; a new instance of the transport should be used to retry."""
pass
class MessageFailedException(TransportException):
"""The transport has failed to deliver the message due to a problem with
the message itself, and no attempt should be made to retry delivery of
this message. The transport may still be re-used, however.
The reason for the failure should be the first argument.
"""
pass
class TransportExhaustedException(TransportException):
"""The transport has successfully delivered the message, but can no longer
be used for future message delivery; a new instance should be used on the
next request."""
pass
class ManagerException(MailException):
"""The base for all marrow.mailer Manager exceptions."""
pass
|
"""Exceptions used by marrow.mailer to report common errors."""
__all__ = ['MailException', 'MailConfigurationException', 'TransportException', 'TransportFailedException', 'MessageFailedException', 'TransportExhaustedException', 'ManagerException']
class Mailexception(Exception):
"""The base for all marrow.mailer exceptions."""
pass
class Deliveryexception(MailException):
"""The base class for all public-facing exceptions."""
pass
class Deliveryfailedexception(DeliveryException):
"""The message stored in args[0] could not be delivered for the reason
given in args[1]. (These can be accessed as e.msg and e.reason.)"""
def __init__(self, message, reason):
self.msg = message
self.reason = reason
super(DeliveryFailedException, self).__init__(message, reason)
class Mailernotrunning(MailException):
"""Raised when attempting to deliver messages using a dead interface."""
pass
class Mailconfigurationexception(MailException):
"""There was an error in the configuration of marrow.mailer."""
pass
class Transportexception(MailException):
"""The base for all marrow.mailer Transport exceptions."""
pass
class Transportfailedexception(TransportException):
"""The transport has failed to deliver the message due to an internal
error; a new instance of the transport should be used to retry."""
pass
class Messagefailedexception(TransportException):
"""The transport has failed to deliver the message due to a problem with
the message itself, and no attempt should be made to retry delivery of
this message. The transport may still be re-used, however.
The reason for the failure should be the first argument.
"""
pass
class Transportexhaustedexception(TransportException):
"""The transport has successfully delivered the message, but can no longer
be used for future message delivery; a new instance should be used on the
next request."""
pass
class Managerexception(MailException):
"""The base for all marrow.mailer Manager exceptions."""
pass
|
#
# PySNMP MIB module RBN-SYS-SECURITY-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/RBN-SYS-SECURITY-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:53:26 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint")
CounterBasedGauge64, = mibBuilder.importSymbols("HCNUM-TC", "CounterBasedGauge64")
rbnModules, = mibBuilder.importSymbols("RBN-SMI", "rbnModules")
RbnUnsigned64, = mibBuilder.importSymbols("RBN-TC", "RbnUnsigned64")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
MibIdentifier, ModuleIdentity, Bits, Gauge32, NotificationType, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, Counter64, iso, Unsigned32, IpAddress, Integer32, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "MibIdentifier", "ModuleIdentity", "Bits", "Gauge32", "NotificationType", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "Counter64", "iso", "Unsigned32", "IpAddress", "Integer32", "ObjectIdentity")
DisplayString, TextualConvention, DateAndTime = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention", "DateAndTime")
rbnSysSecurityMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 2352, 5, 54))
rbnSysSecurityMib.setRevisions(('2009-11-09 18:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: rbnSysSecurityMib.setRevisionsDescriptions(('Initial version',))
if mibBuilder.loadTexts: rbnSysSecurityMib.setLastUpdated('200911091800Z')
if mibBuilder.loadTexts: rbnSysSecurityMib.setOrganization('Ericsson Inc.')
if mibBuilder.loadTexts: rbnSysSecurityMib.setContactInfo(' Ericsson Inc. 100 Headquarters Drive San Jose, CA 95134 USA Phone: +1 408 750 5000 Fax: +1 408 750 5599 ')
if mibBuilder.loadTexts: rbnSysSecurityMib.setDescription('This MIB module defines attributes and notifications related to system and network level security issues. All mib objects defined in the module are viewed within the context identified in the SNMP protocol (i.e. the community string in v1/v2c or the contextName in v3). ')
rbnSysSecNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 5, 54, 0))
rbnSysSecObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1))
rbnSysSecConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 5, 54, 2))
rbnSysSecThresholdObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 1))
rbnSysSecNotifyEnable = MibScalar((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 1, 1), Bits().clone(namedValues=NamedValues(("maliciousPkt", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rbnSysSecNotifyEnable.setStatus('current')
if mibBuilder.loadTexts: rbnSysSecNotifyEnable.setDescription('The bit mask to enable/disable notifications for crossing specific threshold.')
rbnMeasurementInterval = MibScalar((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(1, 3600)).clone(60)).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: rbnMeasurementInterval.setStatus('current')
if mibBuilder.loadTexts: rbnMeasurementInterval.setDescription('Data is sampled at the start and end of a specified interval. The difference between the start and end values |end - start| is called the delta value. When setting the interval, care should be taken that the interval should be short enough that the sampled variable is very unlikely to increase or decrease by more than range of the variable. ')
rbnMaliciousPktsThresholdHi = MibScalar((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 1, 3), RbnUnsigned64()).setUnits('Packets').setMaxAccess("readwrite")
if mibBuilder.loadTexts: rbnMaliciousPktsThresholdHi.setStatus('current')
if mibBuilder.loadTexts: rbnMaliciousPktsThresholdHi.setDescription('When the current sampling interval delta value of the malicious packets counter is greater than or equal to this threshold, and the delta value at the last sampling interval was less than this threshold, a single high threshold exceeded event will be generated. A single high threshold exceeded event will also be generated if the first sampling interval delta value of the malicious IP packets counter is greater than or equal to this threshold. After a high threshold exceeded event is generated, another such event will not be generated until the delta value falls below this threshold and reaches the rbnMaliciousPktsThresholdLow, generating a low threshold exceeded event. In other words there cannot be successive high threshold events without an intervening low threshold event. ')
rbnMaliciousPktsThresholdLow = MibScalar((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 1, 4), RbnUnsigned64()).setUnits('Packets').setMaxAccess("readwrite")
if mibBuilder.loadTexts: rbnMaliciousPktsThresholdLow.setStatus('current')
if mibBuilder.loadTexts: rbnMaliciousPktsThresholdLow.setDescription('When the current sampling interval delta value of the malicious packets counter is less than or equal to this threshold, and the delta value at the last sampling interval was greater than this threshold, a single low threshold exceeded event will be generated. In addition, a high threshold exceeded event must occur before a low threshold exceeded event can be generated. ')
rbnSysSecStatsObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 2))
rbnMaliciousPktsCounter = MibScalar((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 2, 1), Counter64()).setUnits('Packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: rbnMaliciousPktsCounter.setStatus('current')
if mibBuilder.loadTexts: rbnMaliciousPktsCounter.setDescription('A count of all malicious pkts. This includes but is not limited to malformed IP packets, malformed layer 4 IP, packets filtered by ACLs for specific faults, IP packets identified as attempting to spoof a system, and IP packets which failed reassembly.')
rbnMaliciousPktsDelta = MibScalar((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 2, 2), CounterBasedGauge64()).setUnits('packets').setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: rbnMaliciousPktsDelta.setStatus('current')
if mibBuilder.loadTexts: rbnMaliciousPktsDelta.setDescription('The delta value of rbnMaliciousPktsCounter at the most recently completed measurement interval.')
rbnSysSecNotifyObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 4))
rbnThresholdNotifyTime = MibScalar((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 4, 1), DateAndTime()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: rbnThresholdNotifyTime.setStatus('current')
if mibBuilder.loadTexts: rbnThresholdNotifyTime.setDescription('The DateAndTime of the notification.')
rbnMaliciousPktThresholdHiExceeded = NotificationType((1, 3, 6, 1, 4, 1, 2352, 5, 54, 0, 1))
if mibBuilder.loadTexts: rbnMaliciousPktThresholdHiExceeded.setStatus('current')
if mibBuilder.loadTexts: rbnMaliciousPktThresholdHiExceeded.setDescription('This notification signifies that one of the delta values is equal to or greater than the corresponding high threshold value. The specific delta value is the last object in the notification varbind list. ')
rbnMaliciousPktThresholdLowExceeded = NotificationType((1, 3, 6, 1, 4, 1, 2352, 5, 54, 0, 2)).setObjects(("RBN-SYS-SECURITY-MIB", "rbnThresholdNotifyTime"))
if mibBuilder.loadTexts: rbnMaliciousPktThresholdLowExceeded.setStatus('current')
if mibBuilder.loadTexts: rbnMaliciousPktThresholdLowExceeded.setDescription('This notification signifies that one of the delta values is less than or equal to the corresponding low threshold value. The specific delta value is the last object in the notification varbind list. ')
rbnSysSecCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 5, 54, 2, 1))
rbnSysSecGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2352, 5, 54, 2, 2))
rbnMaliciousPktGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2352, 5, 54, 2, 2, 1)).setObjects(("RBN-SYS-SECURITY-MIB", "rbnSysSecNotifyEnable"), ("RBN-SYS-SECURITY-MIB", "rbnMeasurementInterval"), ("RBN-SYS-SECURITY-MIB", "rbnMaliciousPktsThresholdHi"), ("RBN-SYS-SECURITY-MIB", "rbnMaliciousPktsThresholdLow"), ("RBN-SYS-SECURITY-MIB", "rbnMaliciousPktsCounter"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rbnMaliciousPktGroup = rbnMaliciousPktGroup.setStatus('current')
if mibBuilder.loadTexts: rbnMaliciousPktGroup.setDescription('Set of objects for the group.')
rbnSysSecNotifyObjectsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2352, 5, 54, 2, 2, 4)).setObjects(("RBN-SYS-SECURITY-MIB", "rbnMaliciousPktsDelta"), ("RBN-SYS-SECURITY-MIB", "rbnThresholdNotifyTime"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rbnSysSecNotifyObjectsGroup = rbnSysSecNotifyObjectsGroup.setStatus('current')
if mibBuilder.loadTexts: rbnSysSecNotifyObjectsGroup.setDescription('Set of objects for the group.')
rbnSysSecNotificationGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2352, 5, 54, 2, 2, 5)).setObjects(("RBN-SYS-SECURITY-MIB", "rbnMaliciousPktThresholdHiExceeded"), ("RBN-SYS-SECURITY-MIB", "rbnMaliciousPktThresholdLowExceeded"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rbnSysSecNotificationGroup = rbnSysSecNotificationGroup.setStatus('current')
if mibBuilder.loadTexts: rbnSysSecNotificationGroup.setDescription('Set of notifications for the group.')
rbnSysSecCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 2352, 5, 54, 2, 1, 1)).setObjects(("RBN-SYS-SECURITY-MIB", "rbnMaliciousPktGroup"), ("RBN-SYS-SECURITY-MIB", "rbnSysSecNotifyObjectsGroup"), ("RBN-SYS-SECURITY-MIB", "rbnSysSecNotificationGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rbnSysSecCompliance = rbnSysSecCompliance.setStatus('current')
if mibBuilder.loadTexts: rbnSysSecCompliance.setDescription('The compliance statement for support of this mib module.')
mibBuilder.exportSymbols("RBN-SYS-SECURITY-MIB", rbnMeasurementInterval=rbnMeasurementInterval, rbnSysSecConformance=rbnSysSecConformance, rbnMaliciousPktThresholdHiExceeded=rbnMaliciousPktThresholdHiExceeded, rbnSysSecNotifications=rbnSysSecNotifications, rbnSysSecCompliances=rbnSysSecCompliances, rbnSysSecGroups=rbnSysSecGroups, rbnSysSecNotifyObjectsGroup=rbnSysSecNotifyObjectsGroup, rbnMaliciousPktGroup=rbnMaliciousPktGroup, rbnSysSecObjects=rbnSysSecObjects, rbnMaliciousPktsThresholdHi=rbnMaliciousPktsThresholdHi, rbnSysSecCompliance=rbnSysSecCompliance, rbnSysSecNotifyObjects=rbnSysSecNotifyObjects, rbnSysSecThresholdObjects=rbnSysSecThresholdObjects, rbnSysSecNotificationGroup=rbnSysSecNotificationGroup, PYSNMP_MODULE_ID=rbnSysSecurityMib, rbnSysSecNotifyEnable=rbnSysSecNotifyEnable, rbnMaliciousPktsCounter=rbnMaliciousPktsCounter, rbnMaliciousPktsThresholdLow=rbnMaliciousPktsThresholdLow, rbnSysSecStatsObjects=rbnSysSecStatsObjects, rbnMaliciousPktThresholdLowExceeded=rbnMaliciousPktThresholdLowExceeded, rbnThresholdNotifyTime=rbnThresholdNotifyTime, rbnSysSecurityMib=rbnSysSecurityMib, rbnMaliciousPktsDelta=rbnMaliciousPktsDelta)
|
(integer, object_identifier, octet_string) = mibBuilder.importSymbols('ASN1', 'Integer', 'ObjectIdentifier', 'OctetString')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(constraints_intersection, constraints_union, single_value_constraint, value_size_constraint, value_range_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ConstraintsIntersection', 'ConstraintsUnion', 'SingleValueConstraint', 'ValueSizeConstraint', 'ValueRangeConstraint')
(counter_based_gauge64,) = mibBuilder.importSymbols('HCNUM-TC', 'CounterBasedGauge64')
(rbn_modules,) = mibBuilder.importSymbols('RBN-SMI', 'rbnModules')
(rbn_unsigned64,) = mibBuilder.importSymbols('RBN-TC', 'RbnUnsigned64')
(module_compliance, notification_group, object_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'NotificationGroup', 'ObjectGroup')
(mib_identifier, module_identity, bits, gauge32, notification_type, time_ticks, mib_scalar, mib_table, mib_table_row, mib_table_column, counter32, counter64, iso, unsigned32, ip_address, integer32, object_identity) = mibBuilder.importSymbols('SNMPv2-SMI', 'MibIdentifier', 'ModuleIdentity', 'Bits', 'Gauge32', 'NotificationType', 'TimeTicks', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Counter32', 'Counter64', 'iso', 'Unsigned32', 'IpAddress', 'Integer32', 'ObjectIdentity')
(display_string, textual_convention, date_and_time) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TextualConvention', 'DateAndTime')
rbn_sys_security_mib = module_identity((1, 3, 6, 1, 4, 1, 2352, 5, 54))
rbnSysSecurityMib.setRevisions(('2009-11-09 18:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts:
rbnSysSecurityMib.setRevisionsDescriptions(('Initial version',))
if mibBuilder.loadTexts:
rbnSysSecurityMib.setLastUpdated('200911091800Z')
if mibBuilder.loadTexts:
rbnSysSecurityMib.setOrganization('Ericsson Inc.')
if mibBuilder.loadTexts:
rbnSysSecurityMib.setContactInfo(' Ericsson Inc. 100 Headquarters Drive San Jose, CA 95134 USA Phone: +1 408 750 5000 Fax: +1 408 750 5599 ')
if mibBuilder.loadTexts:
rbnSysSecurityMib.setDescription('This MIB module defines attributes and notifications related to system and network level security issues. All mib objects defined in the module are viewed within the context identified in the SNMP protocol (i.e. the community string in v1/v2c or the contextName in v3). ')
rbn_sys_sec_notifications = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 5, 54, 0))
rbn_sys_sec_objects = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1))
rbn_sys_sec_conformance = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 5, 54, 2))
rbn_sys_sec_threshold_objects = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 1))
rbn_sys_sec_notify_enable = mib_scalar((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 1, 1), bits().clone(namedValues=named_values(('maliciousPkt', 0)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rbnSysSecNotifyEnable.setStatus('current')
if mibBuilder.loadTexts:
rbnSysSecNotifyEnable.setDescription('The bit mask to enable/disable notifications for crossing specific threshold.')
rbn_measurement_interval = mib_scalar((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 1, 2), gauge32().subtype(subtypeSpec=value_range_constraint(1, 3600)).clone(60)).setUnits('seconds').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rbnMeasurementInterval.setStatus('current')
if mibBuilder.loadTexts:
rbnMeasurementInterval.setDescription('Data is sampled at the start and end of a specified interval. The difference between the start and end values |end - start| is called the delta value. When setting the interval, care should be taken that the interval should be short enough that the sampled variable is very unlikely to increase or decrease by more than range of the variable. ')
rbn_malicious_pkts_threshold_hi = mib_scalar((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 1, 3), rbn_unsigned64()).setUnits('Packets').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rbnMaliciousPktsThresholdHi.setStatus('current')
if mibBuilder.loadTexts:
rbnMaliciousPktsThresholdHi.setDescription('When the current sampling interval delta value of the malicious packets counter is greater than or equal to this threshold, and the delta value at the last sampling interval was less than this threshold, a single high threshold exceeded event will be generated. A single high threshold exceeded event will also be generated if the first sampling interval delta value of the malicious IP packets counter is greater than or equal to this threshold. After a high threshold exceeded event is generated, another such event will not be generated until the delta value falls below this threshold and reaches the rbnMaliciousPktsThresholdLow, generating a low threshold exceeded event. In other words there cannot be successive high threshold events without an intervening low threshold event. ')
rbn_malicious_pkts_threshold_low = mib_scalar((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 1, 4), rbn_unsigned64()).setUnits('Packets').setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rbnMaliciousPktsThresholdLow.setStatus('current')
if mibBuilder.loadTexts:
rbnMaliciousPktsThresholdLow.setDescription('When the current sampling interval delta value of the malicious packets counter is less than or equal to this threshold, and the delta value at the last sampling interval was greater than this threshold, a single low threshold exceeded event will be generated. In addition, a high threshold exceeded event must occur before a low threshold exceeded event can be generated. ')
rbn_sys_sec_stats_objects = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 2))
rbn_malicious_pkts_counter = mib_scalar((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 2, 1), counter64()).setUnits('Packets').setMaxAccess('readonly')
if mibBuilder.loadTexts:
rbnMaliciousPktsCounter.setStatus('current')
if mibBuilder.loadTexts:
rbnMaliciousPktsCounter.setDescription('A count of all malicious pkts. This includes but is not limited to malformed IP packets, malformed layer 4 IP, packets filtered by ACLs for specific faults, IP packets identified as attempting to spoof a system, and IP packets which failed reassembly.')
rbn_malicious_pkts_delta = mib_scalar((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 2, 2), counter_based_gauge64()).setUnits('packets').setMaxAccess('accessiblefornotify')
if mibBuilder.loadTexts:
rbnMaliciousPktsDelta.setStatus('current')
if mibBuilder.loadTexts:
rbnMaliciousPktsDelta.setDescription('The delta value of rbnMaliciousPktsCounter at the most recently completed measurement interval.')
rbn_sys_sec_notify_objects = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 4))
rbn_threshold_notify_time = mib_scalar((1, 3, 6, 1, 4, 1, 2352, 5, 54, 1, 4, 1), date_and_time()).setMaxAccess('accessiblefornotify')
if mibBuilder.loadTexts:
rbnThresholdNotifyTime.setStatus('current')
if mibBuilder.loadTexts:
rbnThresholdNotifyTime.setDescription('The DateAndTime of the notification.')
rbn_malicious_pkt_threshold_hi_exceeded = notification_type((1, 3, 6, 1, 4, 1, 2352, 5, 54, 0, 1))
if mibBuilder.loadTexts:
rbnMaliciousPktThresholdHiExceeded.setStatus('current')
if mibBuilder.loadTexts:
rbnMaliciousPktThresholdHiExceeded.setDescription('This notification signifies that one of the delta values is equal to or greater than the corresponding high threshold value. The specific delta value is the last object in the notification varbind list. ')
rbn_malicious_pkt_threshold_low_exceeded = notification_type((1, 3, 6, 1, 4, 1, 2352, 5, 54, 0, 2)).setObjects(('RBN-SYS-SECURITY-MIB', 'rbnThresholdNotifyTime'))
if mibBuilder.loadTexts:
rbnMaliciousPktThresholdLowExceeded.setStatus('current')
if mibBuilder.loadTexts:
rbnMaliciousPktThresholdLowExceeded.setDescription('This notification signifies that one of the delta values is less than or equal to the corresponding low threshold value. The specific delta value is the last object in the notification varbind list. ')
rbn_sys_sec_compliances = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 5, 54, 2, 1))
rbn_sys_sec_groups = mib_identifier((1, 3, 6, 1, 4, 1, 2352, 5, 54, 2, 2))
rbn_malicious_pkt_group = object_group((1, 3, 6, 1, 4, 1, 2352, 5, 54, 2, 2, 1)).setObjects(('RBN-SYS-SECURITY-MIB', 'rbnSysSecNotifyEnable'), ('RBN-SYS-SECURITY-MIB', 'rbnMeasurementInterval'), ('RBN-SYS-SECURITY-MIB', 'rbnMaliciousPktsThresholdHi'), ('RBN-SYS-SECURITY-MIB', 'rbnMaliciousPktsThresholdLow'), ('RBN-SYS-SECURITY-MIB', 'rbnMaliciousPktsCounter'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rbn_malicious_pkt_group = rbnMaliciousPktGroup.setStatus('current')
if mibBuilder.loadTexts:
rbnMaliciousPktGroup.setDescription('Set of objects for the group.')
rbn_sys_sec_notify_objects_group = object_group((1, 3, 6, 1, 4, 1, 2352, 5, 54, 2, 2, 4)).setObjects(('RBN-SYS-SECURITY-MIB', 'rbnMaliciousPktsDelta'), ('RBN-SYS-SECURITY-MIB', 'rbnThresholdNotifyTime'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rbn_sys_sec_notify_objects_group = rbnSysSecNotifyObjectsGroup.setStatus('current')
if mibBuilder.loadTexts:
rbnSysSecNotifyObjectsGroup.setDescription('Set of objects for the group.')
rbn_sys_sec_notification_group = notification_group((1, 3, 6, 1, 4, 1, 2352, 5, 54, 2, 2, 5)).setObjects(('RBN-SYS-SECURITY-MIB', 'rbnMaliciousPktThresholdHiExceeded'), ('RBN-SYS-SECURITY-MIB', 'rbnMaliciousPktThresholdLowExceeded'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rbn_sys_sec_notification_group = rbnSysSecNotificationGroup.setStatus('current')
if mibBuilder.loadTexts:
rbnSysSecNotificationGroup.setDescription('Set of notifications for the group.')
rbn_sys_sec_compliance = module_compliance((1, 3, 6, 1, 4, 1, 2352, 5, 54, 2, 1, 1)).setObjects(('RBN-SYS-SECURITY-MIB', 'rbnMaliciousPktGroup'), ('RBN-SYS-SECURITY-MIB', 'rbnSysSecNotifyObjectsGroup'), ('RBN-SYS-SECURITY-MIB', 'rbnSysSecNotificationGroup'))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rbn_sys_sec_compliance = rbnSysSecCompliance.setStatus('current')
if mibBuilder.loadTexts:
rbnSysSecCompliance.setDescription('The compliance statement for support of this mib module.')
mibBuilder.exportSymbols('RBN-SYS-SECURITY-MIB', rbnMeasurementInterval=rbnMeasurementInterval, rbnSysSecConformance=rbnSysSecConformance, rbnMaliciousPktThresholdHiExceeded=rbnMaliciousPktThresholdHiExceeded, rbnSysSecNotifications=rbnSysSecNotifications, rbnSysSecCompliances=rbnSysSecCompliances, rbnSysSecGroups=rbnSysSecGroups, rbnSysSecNotifyObjectsGroup=rbnSysSecNotifyObjectsGroup, rbnMaliciousPktGroup=rbnMaliciousPktGroup, rbnSysSecObjects=rbnSysSecObjects, rbnMaliciousPktsThresholdHi=rbnMaliciousPktsThresholdHi, rbnSysSecCompliance=rbnSysSecCompliance, rbnSysSecNotifyObjects=rbnSysSecNotifyObjects, rbnSysSecThresholdObjects=rbnSysSecThresholdObjects, rbnSysSecNotificationGroup=rbnSysSecNotificationGroup, PYSNMP_MODULE_ID=rbnSysSecurityMib, rbnSysSecNotifyEnable=rbnSysSecNotifyEnable, rbnMaliciousPktsCounter=rbnMaliciousPktsCounter, rbnMaliciousPktsThresholdLow=rbnMaliciousPktsThresholdLow, rbnSysSecStatsObjects=rbnSysSecStatsObjects, rbnMaliciousPktThresholdLowExceeded=rbnMaliciousPktThresholdLowExceeded, rbnThresholdNotifyTime=rbnThresholdNotifyTime, rbnSysSecurityMib=rbnSysSecurityMib, rbnMaliciousPktsDelta=rbnMaliciousPktsDelta)
|
lines = open('input.txt').read().splitlines()
matches = {'(': ')', '[': ']', '{': '}', '<': '>'}
penalty = {')': 3, ']': 57, '}': 1197, '>': 25137}
costs = {')': 1, ']': 2, '}': 3, '>': 4}
errors = []
incpl_costs = []
for i, l in enumerate(lines):
expected_closings = []
for c in l:
if c in matches.keys():
expected_closings.append(matches[c])
else:
if expected_closings[-1] != c:
# corrupted
errors.append((i, c))
break
else:
del expected_closings[-1]
if not errors or errors[-1][0] != i:
# incomplete
cur_costs = 0
for c in expected_closings[::-1]:
cur_costs = cur_costs * 5 + costs[c]
incpl_costs.append(cur_costs)
print(sum(penalty[c] for _, c in errors))
print(sorted(incpl_costs)[len(incpl_costs) // 2])
|
lines = open('input.txt').read().splitlines()
matches = {'(': ')', '[': ']', '{': '}', '<': '>'}
penalty = {')': 3, ']': 57, '}': 1197, '>': 25137}
costs = {')': 1, ']': 2, '}': 3, '>': 4}
errors = []
incpl_costs = []
for (i, l) in enumerate(lines):
expected_closings = []
for c in l:
if c in matches.keys():
expected_closings.append(matches[c])
elif expected_closings[-1] != c:
errors.append((i, c))
break
else:
del expected_closings[-1]
if not errors or errors[-1][0] != i:
cur_costs = 0
for c in expected_closings[::-1]:
cur_costs = cur_costs * 5 + costs[c]
incpl_costs.append(cur_costs)
print(sum((penalty[c] for (_, c) in errors)))
print(sorted(incpl_costs)[len(incpl_costs) // 2])
|
# Time: O(n^2)
# Space: O(1)
# 892
# On a N * N grid, we place some 1 * 1 * 1 cubes.
#
# Each value v = grid[i][j] represents a tower of v cubes
# placed on top of grid cell (i, j).
#
# Return the total surface area of the resulting shapes.
#
# Example 1:
#
# Input: [[2]]
# Output: 10
# Example 2:
#
# Input: [[1,2],[3,4]]
# Output: 34
# Example 3:
#
# Input: [[1,0],[0,2]]
# Output: 16
# Example 4:
#
# Input: [[1,1,1],[1,0,1],[1,1,1]]
# Output: 32
# Example 5:
#
# Input: [[2,2,2],[2,1,2],[2,2,2]]
# Output: 46
#
# Note:
# - 1 <= N <= 50
# - 0 <= grid[i][j] <= 50
class Solution(object):
def surfaceArea(self, grid): # USE THIS: minus joint surface, 36 ms
"""
:type grid: List[List[int]]
:rtype: int
"""
result = 0
for i in xrange(len(grid)):
for j in xrange(len(grid)):
if grid[i][j]:
result += 2 + grid[i][j]*4
if i:
result -= min(grid[i][j], grid[i-1][j])*2
if j:
result -= min(grid[i][j], grid[i][j-1])*2
return result
# look at 4 neighbors of each cell, add contribution if higher than neighbor. Avoid double count
# 56 ms
def surfaceArea2(self, grid):
n, ans = len(grid), 0
for i in xrange(n):
for j in xrange(n):
if grid[i][j]:
ans += 2
for nx, ny in [(i-1,j),(i,j-1),(i+1,j),(i,j+1)]:
if 0<=nx<n and 0<=ny<n:
nei_val = grid[nx][ny]
else:
nei_val = 0
ans += max(0, grid[i][j]-nei_val)
return ans
# add difference between neighboring cells: iterate more times
def surfaceArea3(self, grid):
ans = 0
for row in grid:
ans += 2*sum(c>0 for c in row)
rrow = [0]+row+[0]
ans += sum(abs(rrow[i]-rrow[i+1]) for i in xrange(len(rrow)-1))
for col in zip(*grid):
col = (0,)+col+(0,) # KENG: zip's result is tuple which cannot concatenate to list
ans += sum(abs(col[i]-col[i+1]) for i in xrange(len(col)-1))
return ans
|
class Solution(object):
def surface_area(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
result = 0
for i in xrange(len(grid)):
for j in xrange(len(grid)):
if grid[i][j]:
result += 2 + grid[i][j] * 4
if i:
result -= min(grid[i][j], grid[i - 1][j]) * 2
if j:
result -= min(grid[i][j], grid[i][j - 1]) * 2
return result
def surface_area2(self, grid):
(n, ans) = (len(grid), 0)
for i in xrange(n):
for j in xrange(n):
if grid[i][j]:
ans += 2
for (nx, ny) in [(i - 1, j), (i, j - 1), (i + 1, j), (i, j + 1)]:
if 0 <= nx < n and 0 <= ny < n:
nei_val = grid[nx][ny]
else:
nei_val = 0
ans += max(0, grid[i][j] - nei_val)
return ans
def surface_area3(self, grid):
ans = 0
for row in grid:
ans += 2 * sum((c > 0 for c in row))
rrow = [0] + row + [0]
ans += sum((abs(rrow[i] - rrow[i + 1]) for i in xrange(len(rrow) - 1)))
for col in zip(*grid):
col = (0,) + col + (0,)
ans += sum((abs(col[i] - col[i + 1]) for i in xrange(len(col) - 1)))
return ans
|
# 1109. Corporate Flight Bookings
# Weekly Contest 144
# Time: O(len(n))
# Space: O(n)
class Solution:
def corpFlightBookings(self, bookings: List[List[int]], n: int) -> List[int]:
"""
Shorter solution:
"""
res = [0]*(n+1)
for i,j,k in bookings:
res[i-1]+=k
res[j]-=k
for i in range(1,n):
res[i]+=res[i-1]
return res[:-1]
"""
Longer Solution
"""
result = [{} for _ in range(n)]
for booking in bookings:
i,j,k = booking[0],booking[1],booking[2]
if 'start' in result[i-1]:
result[i-1]['start']+=k
else:
result[i-1]['start']=k
if 'end' in result[j-1]:
result[j-1]['end']+=k
else:
result[j-1]['end']=k
adder = 0
seats = [0]*n
for index,flight in enumerate(result):
if 'start' in flight:
adder+=flight['start']
seats[index]+=adder
if 'end' in flight:
adder-=flight['end']
return seats
|
class Solution:
def corp_flight_bookings(self, bookings: List[List[int]], n: int) -> List[int]:
"""
Shorter solution:
"""
res = [0] * (n + 1)
for (i, j, k) in bookings:
res[i - 1] += k
res[j] -= k
for i in range(1, n):
res[i] += res[i - 1]
return res[:-1]
'\n Longer Solution\n '
result = [{} for _ in range(n)]
for booking in bookings:
(i, j, k) = (booking[0], booking[1], booking[2])
if 'start' in result[i - 1]:
result[i - 1]['start'] += k
else:
result[i - 1]['start'] = k
if 'end' in result[j - 1]:
result[j - 1]['end'] += k
else:
result[j - 1]['end'] = k
adder = 0
seats = [0] * n
for (index, flight) in enumerate(result):
if 'start' in flight:
adder += flight['start']
seats[index] += adder
if 'end' in flight:
adder -= flight['end']
return seats
|
# Given a sorted array of integers, find the starting and ending position of a given target value.
# Your algorithm's runtime complexity must be in the order of O(log n).
# If the target is not found in the array, return [-1, -1].
# For example,
# Given [5, 7, 7, 8, 8, 10] and target value 8,
# return [3, 4].
def search_range(numbers, target):
result = [-1, -1]
if len(numbers) == 0:
return result
low = 0
high = len(numbers) - 1
while low <= high:
mid = low + (high - low) // 2
if numbers[mid] >= target:
high = mid - 1
else:
low = mid + 1
if low < len(numbers) and numbers[low] == target:
result[0] = low
else:
return result
high = len(numbers) - 1
while low <= high:
mid = low + (high - low) // 2
if numbers[mid] <= target:
low = mid + 1
else:
high = mid - 1
result[1] = high
return result
if __name__ == '__main__':
array = [5, 7, 7, 8, 8, 10]
target = 8
print('The range of', target, 'in array',
array, 'is:', search_range(array, target))
|
def search_range(numbers, target):
result = [-1, -1]
if len(numbers) == 0:
return result
low = 0
high = len(numbers) - 1
while low <= high:
mid = low + (high - low) // 2
if numbers[mid] >= target:
high = mid - 1
else:
low = mid + 1
if low < len(numbers) and numbers[low] == target:
result[0] = low
else:
return result
high = len(numbers) - 1
while low <= high:
mid = low + (high - low) // 2
if numbers[mid] <= target:
low = mid + 1
else:
high = mid - 1
result[1] = high
return result
if __name__ == '__main__':
array = [5, 7, 7, 8, 8, 10]
target = 8
print('The range of', target, 'in array', array, 'is:', search_range(array, target))
|
# -*- coding: utf-8 -*-
"""
flaskbb.core.exceptions
~~~~~~~~~~~~~~~~~~~~~~~
Exceptions raised by flaskbb.core,
forms the root of all exceptions in
FlaskBB.
:copyright: (c) 2014-2018 the FlaskBB Team
:license: BSD, see LICENSE for more details
"""
class BaseFlaskBBError(Exception):
"""
Root exception for FlaskBB.
"""
class ValidationError(BaseFlaskBBError):
"""
Used to signal validation errors for things such as
token verification, user registration, etc.
:param str attribute: The attribute the validation error applies to,
if the validation error applies to multiple attributes or to
the entire object, this should be set to None
:param str reason: Why the attribute, collection of attributes or object
is invalid.
"""
def __init__(self, attribute, reason):
self.attribute = attribute
self.reason = reason
super(ValidationError, self).__init__((attribute, reason))
class StopValidation(BaseFlaskBBError):
"""
Raised from validation handlers to signal that
validation should end immediately and no further
processing should be done.
Can also be used to communicate all errors
raised during a validation run.
:param reasons: A sequence of `(attribute, reason)` pairs explaining
why the object is invalid.
"""
def __init__(self, reasons):
self.reasons = reasons
super(StopValidation, self).__init__(reasons)
class PersistenceError(BaseFlaskBBError):
"""
Used to catch down errors when persisting models to the database instead
of letting all issues percolate up, this should be raised from those
exceptions without smashing their tracebacks. Example::
try:
db.session.add(new_user)
db.session.commit()
except Exception:
raise PersistenceError("Couldn't save user account")
"""
def accumulate_errors(caller, validators, throw=True):
errors = []
for validator in validators:
try:
caller(validator)
except ValidationError as e:
errors.append((e.attribute, e.reason))
if len(errors) and throw:
raise StopValidation(errors)
return errors
|
"""
flaskbb.core.exceptions
~~~~~~~~~~~~~~~~~~~~~~~
Exceptions raised by flaskbb.core,
forms the root of all exceptions in
FlaskBB.
:copyright: (c) 2014-2018 the FlaskBB Team
:license: BSD, see LICENSE for more details
"""
class Baseflaskbberror(Exception):
"""
Root exception for FlaskBB.
"""
class Validationerror(BaseFlaskBBError):
"""
Used to signal validation errors for things such as
token verification, user registration, etc.
:param str attribute: The attribute the validation error applies to,
if the validation error applies to multiple attributes or to
the entire object, this should be set to None
:param str reason: Why the attribute, collection of attributes or object
is invalid.
"""
def __init__(self, attribute, reason):
self.attribute = attribute
self.reason = reason
super(ValidationError, self).__init__((attribute, reason))
class Stopvalidation(BaseFlaskBBError):
"""
Raised from validation handlers to signal that
validation should end immediately and no further
processing should be done.
Can also be used to communicate all errors
raised during a validation run.
:param reasons: A sequence of `(attribute, reason)` pairs explaining
why the object is invalid.
"""
def __init__(self, reasons):
self.reasons = reasons
super(StopValidation, self).__init__(reasons)
class Persistenceerror(BaseFlaskBBError):
"""
Used to catch down errors when persisting models to the database instead
of letting all issues percolate up, this should be raised from those
exceptions without smashing their tracebacks. Example::
try:
db.session.add(new_user)
db.session.commit()
except Exception:
raise PersistenceError("Couldn't save user account")
"""
def accumulate_errors(caller, validators, throw=True):
errors = []
for validator in validators:
try:
caller(validator)
except ValidationError as e:
errors.append((e.attribute, e.reason))
if len(errors) and throw:
raise stop_validation(errors)
return errors
|
class Solution(object):
def romanToInt(self, s):
"""
:type s: str
:rtype: int
"""
lm2int = {"I": 1, "V": 5, "X": 10, "L": 50, "C": 100, "D": 500,
"M": 1000}
s_len_num = len(s)
ans = 0
# for i in range(s_len_num-1):
# if lm2int[s[i]] < lm2int[s[i+1]]:
# ans -= lm2int[s[i]]
# else:
# ans += lm2int[s[i]]
# return ans+lm2int[s[-1]]
lm2int = {'I': 1, 'IV': 3, 'V': 5, 'IX': 8, 'X': 10, 'XL': 30, 'L': 50,
'XC': 80, 'C': 100, 'CD': 300, 'D': 500, 'CM': 800, 'M': 1000}
alist = ['IV', 'IX', 'XL', 'XC', 'CD', 'CM']
s_len_num = len(s)
i,ans = 0,0
while i < s_len_num - 1:
if s[i]+s[i+1] in alist:
ans += lm2int[s[i]+s[i+1]]
print('2--',i,s[i:i+2],lm2int.get(s[i:i+2]),ans)
i += 2
else:
ans += lm2int[s[i]]
print('1--',i,s[i],lm2int.get(s[i]),ans)
i += 1
return ans+lm2int[s[-1]]
s = "LVIII"
sl = Solution()
print(sl.romanToInt(s))
|
class Solution(object):
def roman_to_int(self, s):
"""
:type s: str
:rtype: int
"""
lm2int = {'I': 1, 'V': 5, 'X': 10, 'L': 50, 'C': 100, 'D': 500, 'M': 1000}
s_len_num = len(s)
ans = 0
lm2int = {'I': 1, 'IV': 3, 'V': 5, 'IX': 8, 'X': 10, 'XL': 30, 'L': 50, 'XC': 80, 'C': 100, 'CD': 300, 'D': 500, 'CM': 800, 'M': 1000}
alist = ['IV', 'IX', 'XL', 'XC', 'CD', 'CM']
s_len_num = len(s)
(i, ans) = (0, 0)
while i < s_len_num - 1:
if s[i] + s[i + 1] in alist:
ans += lm2int[s[i] + s[i + 1]]
print('2--', i, s[i:i + 2], lm2int.get(s[i:i + 2]), ans)
i += 2
else:
ans += lm2int[s[i]]
print('1--', i, s[i], lm2int.get(s[i]), ans)
i += 1
return ans + lm2int[s[-1]]
s = 'LVIII'
sl = solution()
print(sl.romanToInt(s))
|
# https://leetcode.com/problems/climbing-stairs/
# ---------------------------------------------------
# Runtime Complexity: O(n)
# Space Complexity: O(1)
class Solution:
def climbStairs(self, n: int) -> int:
if n <= 2:
return n
prev_prev = 1
prev = 2
cur = 0
for i in range(3, n + 1):
cur = prev_prev + prev
prev_prev, prev = prev, cur
return cur
# ---------------------------------------------------
# Test Cases
# ---------------------------------------------------
solution = Solution()
# 0
print(solution.climbStairs(0))
# 1
print(solution.climbStairs(1))
# 2
print(solution.climbStairs(2))
# 3
print(solution.climbStairs(3))
# 5
print(solution.climbStairs(4))
# 8
print(solution.climbStairs(5))
# 13
print(solution.climbStairs(6))
|
class Solution:
def climb_stairs(self, n: int) -> int:
if n <= 2:
return n
prev_prev = 1
prev = 2
cur = 0
for i in range(3, n + 1):
cur = prev_prev + prev
(prev_prev, prev) = (prev, cur)
return cur
solution = solution()
print(solution.climbStairs(0))
print(solution.climbStairs(1))
print(solution.climbStairs(2))
print(solution.climbStairs(3))
print(solution.climbStairs(4))
print(solution.climbStairs(5))
print(solution.climbStairs(6))
|
"""
Behavioral pattern:
Iterator => 1.Iterable 2.Iteration
Requirements that should know:
__iter__ , __next__
"""
class Iteration:
def __init__(self, value):
self.value = value
def __next__(self):
if self.value == 0:
raise StopIteration('End of sequence')
for item in range(0, self.value):
value = self.value
self.value -= 1
return value
class Iterable:
def __init__(self, value):
self.value = value
def __iter__(self):
return Iteration(self.value)
if __name__ == '__main__':
f1 = Iterable(5)
f2 = iter(f1)
print(next(f2))
print(next(f2))
print(next(f2))
print(next(f2))
print(next(f2))
"""we add raise error that don't show later than zero """
print(next(f2))
|
"""
Behavioral pattern:
Iterator => 1.Iterable 2.Iteration
Requirements that should know:
__iter__ , __next__
"""
class Iteration:
def __init__(self, value):
self.value = value
def __next__(self):
if self.value == 0:
raise stop_iteration('End of sequence')
for item in range(0, self.value):
value = self.value
self.value -= 1
return value
class Iterable:
def __init__(self, value):
self.value = value
def __iter__(self):
return iteration(self.value)
if __name__ == '__main__':
f1 = iterable(5)
f2 = iter(f1)
print(next(f2))
print(next(f2))
print(next(f2))
print(next(f2))
print(next(f2))
"we add raise error that don't show later than zero "
print(next(f2))
|
def urlify(s, i):
p1, p2 = len(s) - 1, i
while p1 >= 0 and p2 >= 0:
if s[p2] != " ":
s[p1] = s[p2]
else:
for i in reversed("%20"):
s[p1] = i
p1 -= 1
p1 -= 1
p2 -= 1
|
def urlify(s, i):
(p1, p2) = (len(s) - 1, i)
while p1 >= 0 and p2 >= 0:
if s[p2] != ' ':
s[p1] = s[p2]
else:
for i in reversed('%20'):
s[p1] = i
p1 -= 1
p1 -= 1
p2 -= 1
|
# Example 1:
# Input: candidates = [2,3,6,7], target = 7
# Output: [[2,2,3],[7]]
# Explanation:
# 2 and 3 are candidates, and 2 + 2 + 3 = 7.
# Note that 2 can be used multiple times.
# 7 is a candidate, and 7 = 7.
# These are the only two combinations.
# Example 2:
# Input: candidates = [2,3,5], target = 8
# Output: [[2,2,2,2],[2,3,3],[3,5]]
class Solution:
def combinationSum(self, candidates: List[int], target: int) -> List[List[int]]:
comb = [[] if i > 0 else [[]] for i in range(target + 1)]
for candidate in candidates:
for i in range(candidate, len(comb)):
comb[i].extend(comb + [candidate] for comb in comb[i - candidate])
return comb[-1]
|
class Solution:
def combination_sum(self, candidates: List[int], target: int) -> List[List[int]]:
comb = [[] if i > 0 else [[]] for i in range(target + 1)]
for candidate in candidates:
for i in range(candidate, len(comb)):
comb[i].extend((comb + [candidate] for comb in comb[i - candidate]))
return comb[-1]
|
#################################################
# Unit helpers #
#################################################
class Length(float):
unit2m = dict(mm=0.001, cm=0.01, dm=0.1, m=1, km=1000)
def __new__(cls, val, unit):
return float.__new__(cls, val)
def __init__(self, val, unit):
assert unit in Length.unit2m, 'Unknown units'
self.unit = unit
def __str__(self):
return f'{float(self)} {self.unit}'
def __repr__(self):
return self.__str__()
def to(self, name):
if name in Length.unit2m:
return Length.unit2m[self.unit] * float(self) / Length.unit2m[name]
def __abs__(self):
return Length.unit2m[self.unit] * float(self)
class Time(float):
unit2s = dict(
s=1,
min=60,
h=3600,
d=24 * 3600,
y=365.25 * 24 * 3600,
ky=1e3 * 365.25 * 24 * 3600,
Ma=1e6 * 365.25 * 24 * 3600,
)
def __new__(cls, val, unit):
return float.__new__(cls, val)
def __init__(self, val, unit):
assert unit in Time.unit2s, 'Unknown units'
self.unit = unit
def __str__(self):
return f'{float(self)} {self.unit}'
def __repr__(self):
return self.__str__()
def to(self, name):
if name in Time.unit2s:
return Time.unit2s[self.unit] * float(self) / Time.unit2s[name]
def __abs__(self):
return Time.unit2s[self.unit] * float(self)
|
class Length(float):
unit2m = dict(mm=0.001, cm=0.01, dm=0.1, m=1, km=1000)
def __new__(cls, val, unit):
return float.__new__(cls, val)
def __init__(self, val, unit):
assert unit in Length.unit2m, 'Unknown units'
self.unit = unit
def __str__(self):
return f'{float(self)} {self.unit}'
def __repr__(self):
return self.__str__()
def to(self, name):
if name in Length.unit2m:
return Length.unit2m[self.unit] * float(self) / Length.unit2m[name]
def __abs__(self):
return Length.unit2m[self.unit] * float(self)
class Time(float):
unit2s = dict(s=1, min=60, h=3600, d=24 * 3600, y=365.25 * 24 * 3600, ky=1000.0 * 365.25 * 24 * 3600, Ma=1000000.0 * 365.25 * 24 * 3600)
def __new__(cls, val, unit):
return float.__new__(cls, val)
def __init__(self, val, unit):
assert unit in Time.unit2s, 'Unknown units'
self.unit = unit
def __str__(self):
return f'{float(self)} {self.unit}'
def __repr__(self):
return self.__str__()
def to(self, name):
if name in Time.unit2s:
return Time.unit2s[self.unit] * float(self) / Time.unit2s[name]
def __abs__(self):
return Time.unit2s[self.unit] * float(self)
|
command = input()
student_ticket = 0
standard_ticket = 0
kid_ticket = 0
while command != "Finish":
seats = int(input())
ticket_type = input()
tickets_sold = 0
while seats > tickets_sold:
tickets_sold += 1
if ticket_type == "student":
student_ticket += 1
elif ticket_type == "standard":
standard_ticket += 1
elif ticket_type == "kid":
kid_ticket += 1
if tickets_sold == seats:
break
ticket_type = input()
if ticket_type == "End":
break
print(f"{command} - {tickets_sold / seats * 100:.2f}% full.")
command = input()
total_tickets = student_ticket + standard_ticket + kid_ticket
print(f"Total tickets: {total_tickets}")
print(f"{student_ticket / total_tickets * 100:.2f}% student tickets.")
print(f"{standard_ticket / total_tickets * 100:.2f}% standard tickets.")
print(f"{kid_ticket / total_tickets * 100:.2f}% kids tickets.")
|
command = input()
student_ticket = 0
standard_ticket = 0
kid_ticket = 0
while command != 'Finish':
seats = int(input())
ticket_type = input()
tickets_sold = 0
while seats > tickets_sold:
tickets_sold += 1
if ticket_type == 'student':
student_ticket += 1
elif ticket_type == 'standard':
standard_ticket += 1
elif ticket_type == 'kid':
kid_ticket += 1
if tickets_sold == seats:
break
ticket_type = input()
if ticket_type == 'End':
break
print(f'{command} - {tickets_sold / seats * 100:.2f}% full.')
command = input()
total_tickets = student_ticket + standard_ticket + kid_ticket
print(f'Total tickets: {total_tickets}')
print(f'{student_ticket / total_tickets * 100:.2f}% student tickets.')
print(f'{standard_ticket / total_tickets * 100:.2f}% standard tickets.')
print(f'{kid_ticket / total_tickets * 100:.2f}% kids tickets.')
|
META_SITE_DOMAIN = 'www.geocoptix.com'
META_USE_OG_PROPERTIES = True
META_USE_TWITTER_PROPERTIES = True
META_TWITTER_AUTHOR = 'geocoptix'
META_FB_AUTHOR_URL = 'https://facebook.com/geocoptix'
|
meta_site_domain = 'www.geocoptix.com'
meta_use_og_properties = True
meta_use_twitter_properties = True
meta_twitter_author = 'geocoptix'
meta_fb_author_url = 'https://facebook.com/geocoptix'
|
def extractMyFirstTimeTranslating(item):
"""
'My First Time Translating'
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or 'preview' in item['title'].lower():
return None
return False
|
def extract_my_first_time_translating(item):
"""
'My First Time Translating'
"""
(vol, chp, frag, postfix) = extract_vol_chapter_fragment_postfix(item['title'])
if not (chp or vol or frag) or 'preview' in item['title'].lower():
return None
return False
|
class Toggle_Button(QPushButton):
sent_fix = pyqtSignal(bool, int, bool)
def __init__(self, currentRowCount):
QPushButton.__init__(self, "ON")
# self.setFixedSize(100, 100)
self.currentRowCount = self.rowCount()
self.setStyleSheet("background-color: green")
self.setCheckable(True)
self.toggled.connect(self.slot_toggle)
# when toggled connect initial state is True in order to make first click as OFF then True state must be red and OFF
@pyqtSlot(bool)
def slot_toggle(self, state):
print(self.currentRowCount, state)
self.sent_fix.emit(False, self.currentRowCount, state)
self.setStyleSheet("background-color: %s" % ({True: "red", False: "green"}[state]))
self.setText({True: "OFF", False: "ON"}[state])
|
class Toggle_Button(QPushButton):
sent_fix = pyqt_signal(bool, int, bool)
def __init__(self, currentRowCount):
QPushButton.__init__(self, 'ON')
self.currentRowCount = self.rowCount()
self.setStyleSheet('background-color: green')
self.setCheckable(True)
self.toggled.connect(self.slot_toggle)
@pyqt_slot(bool)
def slot_toggle(self, state):
print(self.currentRowCount, state)
self.sent_fix.emit(False, self.currentRowCount, state)
self.setStyleSheet('background-color: %s' % {True: 'red', False: 'green'}[state])
self.setText({True: 'OFF', False: 'ON'}[state])
|
TBD = None
img_norm_cfg = dict(mean=TBD, std=TBD, to_rgb=TBD)
train_pipeline = TBD
test_pipeline = TBD
# dataset settings
dataset_type = 'VOCDataset'
data_root = 'data/VOCdevkit/'
dataset_repeats = 10
data = dict(
samples_per_gpu=TBD,
workers_per_gpu=TBD,
train=dict(
type='RepeatDataset',
times=dataset_repeats,
dataset=dict(
type=dataset_type,
ann_file=[
data_root + 'VOC2007/ImageSets/Main/trainval.txt',
data_root + 'VOC2012/ImageSets/Main/trainval.txt'
],
img_prefix=[data_root + 'VOC2007/', data_root + 'VOC2012/'],
pipeline=train_pipeline)),
val=dict(
type=dataset_type,
ann_file=data_root + 'VOC2007/ImageSets/Main/test.txt',
img_prefix=data_root + 'VOC2007/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'VOC2007/ImageSets/Main/test.txt',
img_prefix=data_root + 'VOC2007/',
pipeline=test_pipeline))
evaluation = dict(interval=1, metric='mAP')
|
tbd = None
img_norm_cfg = dict(mean=TBD, std=TBD, to_rgb=TBD)
train_pipeline = TBD
test_pipeline = TBD
dataset_type = 'VOCDataset'
data_root = 'data/VOCdevkit/'
dataset_repeats = 10
data = dict(samples_per_gpu=TBD, workers_per_gpu=TBD, train=dict(type='RepeatDataset', times=dataset_repeats, dataset=dict(type=dataset_type, ann_file=[data_root + 'VOC2007/ImageSets/Main/trainval.txt', data_root + 'VOC2012/ImageSets/Main/trainval.txt'], img_prefix=[data_root + 'VOC2007/', data_root + 'VOC2012/'], pipeline=train_pipeline)), val=dict(type=dataset_type, ann_file=data_root + 'VOC2007/ImageSets/Main/test.txt', img_prefix=data_root + 'VOC2007/', pipeline=test_pipeline), test=dict(type=dataset_type, ann_file=data_root + 'VOC2007/ImageSets/Main/test.txt', img_prefix=data_root + 'VOC2007/', pipeline=test_pipeline))
evaluation = dict(interval=1, metric='mAP')
|
load("@bazel_skylib//lib:unittest.bzl", "asserts", "unittest")
load(
"//:coursier.bzl",
"add_netrc_entries_from_mirror_urls",
"compute_dependency_inputs_signature",
"extract_netrc_from_auth_url",
"get_coursier_cache_or_default",
"get_netrc_lines_from_entries",
"remove_auth_from_url",
"split_url",
infer = "infer_artifact_path_from_primary_and_repos",
)
ALL_TESTS = []
def add_test(test_impl_func):
test = unittest.make(test_impl_func)
ALL_TESTS.append(test)
return test
def _infer_doc_example_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
"group/path/to/artifact/file.jar",
infer("http://a:b@c/group/path/to/artifact/file.jar", ["http://c"]),
)
return unittest.end(env)
infer_doc_example_test = add_test(_infer_doc_example_test_impl)
def _infer_basic_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
"group/artifact/version/foo.jar",
infer("https://base/group/artifact/version/foo.jar", ["https://base"]),
)
asserts.equals(
env,
"group/artifact/version/foo.jar",
infer("http://base/group/artifact/version/foo.jar", ["http://base"]),
)
return unittest.end(env)
infer_basic_test = add_test(_infer_basic_test_impl)
def _infer_auth_basic_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
"group1/artifact/version/foo.jar",
infer("https://a@c/group1/artifact/version/foo.jar", ["https://a:b@c"]),
)
asserts.equals(
env,
"group2/artifact/version/foo.jar",
infer("https://a@c/group2/artifact/version/foo.jar", ["https://a@c"]),
)
asserts.equals(
env,
"group3/artifact/version/foo.jar",
infer("https://a@c/group3/artifact/version/foo.jar", ["https://c"]),
)
return unittest.end(env)
infer_auth_basic_test = add_test(_infer_auth_basic_test_impl)
def _infer_leading_repo_miss_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
"group/artifact/version/foo.jar",
infer("https://a@c/group/artifact/version/foo.jar", ["https://a:b@c/missubdir", "https://a:b@c"]),
)
return unittest.end(env)
infer_leading_repo_miss_test = add_test(_infer_leading_repo_miss_test_impl)
def _infer_repo_trailing_slash_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
"group/artifact/version/foo.jar",
infer("https://a@c/group/artifact/version/foo.jar", ["https://a:b@c"]),
)
asserts.equals(
env,
"group/artifact/version/foo.jar",
infer("https://a@c/group/artifact/version/foo.jar", ["https://a:b@c/"]),
)
asserts.equals(
env,
"group/artifact/version/foo.jar",
infer("https://a@c/group/artifact/version/foo.jar", ["https://a:b@c//"]),
)
return unittest.end(env)
infer_repo_trailing_slash_test = add_test(_infer_repo_trailing_slash_test_impl)
def _remove_auth_basic_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
"https://c1",
remove_auth_from_url("https://a:b@c1"),
)
return unittest.end(env)
remove_auth_basic_test = add_test(_remove_auth_basic_test_impl)
def _remove_auth_basic_with_path_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
"https://c1/some/random/path",
remove_auth_from_url("https://a:b@c1/some/random/path"),
)
return unittest.end(env)
remove_auth_basic_with_path_test = add_test(_remove_auth_basic_with_path_test_impl)
def _remove_auth_only_user_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
"https://c1",
remove_auth_from_url("https://a@c1"),
)
return unittest.end(env)
remove_auth_only_user_test = add_test(_remove_auth_only_user_test_impl)
def _remove_auth_noauth_noop_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
"https://c1",
remove_auth_from_url("https://c1"),
)
return unittest.end(env)
remove_auth_noauth_noop_test = add_test(_remove_auth_noauth_noop_test_impl)
def _split_url_basic_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
("https", ["c1"]),
split_url("https://c1"),
)
return unittest.end(env)
split_url_basic_test = add_test(_split_url_basic_test_impl)
def _split_url_basic_auth_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
("https", ["a:b@c1"]),
split_url("https://a:b@c1"),
)
asserts.equals(
env,
("https", ["a@c1"]),
split_url("https://a@c1"),
)
return unittest.end(env)
split_url_basic_auth_test = add_test(_split_url_basic_auth_test_impl)
def _split_url_with_path_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
("https", ["c1", "some", "path"]),
split_url("https://c1/some/path"),
)
return unittest.end(env)
split_url_with_path_test = add_test(_split_url_with_path_test_impl)
def _extract_netrc_from_auth_url_noop_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
{},
extract_netrc_from_auth_url("https://c1"),
)
asserts.equals(
env,
{},
extract_netrc_from_auth_url("https://c2/useless@inurl"),
)
return unittest.end(env)
extract_netrc_from_auth_url_noop_test = add_test(_extract_netrc_from_auth_url_noop_test_impl)
def _extract_netrc_from_auth_url_with_auth_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
{"machine": "c", "login": "a", "password": "b"},
extract_netrc_from_auth_url("https://a:b@c"),
)
asserts.equals(
env,
{"machine": "c", "login": "a", "password": "b"},
extract_netrc_from_auth_url("https://a:b@c/some/other/stuff@thisplace/for/testing"),
)
asserts.equals(
env,
{"machine": "c", "login": "a", "password": None},
extract_netrc_from_auth_url("https://a@c"),
)
asserts.equals(
env,
{"machine": "c", "login": "a", "password": None},
extract_netrc_from_auth_url("https://a@c/some/other/stuff@thisplace/for/testing"),
)
return unittest.end(env)
extract_netrc_from_auth_url_with_auth_test = add_test(_extract_netrc_from_auth_url_with_auth_test_impl)
def _extract_netrc_from_auth_url_at_in_password_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
{"machine": "c", "login": "a", "password": "p@ssword"},
extract_netrc_from_auth_url("https://a:p@ssword@c"),
)
return unittest.end(env)
extract_netrc_from_auth_url_at_in_password_test = add_test(_extract_netrc_from_auth_url_at_in_password_test_impl)
def _add_netrc_entries_from_mirror_urls_noop_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
{},
add_netrc_entries_from_mirror_urls({}, ["https://c1", "https://c1/something@there"]),
)
return unittest.end(env)
add_netrc_entries_from_mirror_urls_noop_test = add_test(_add_netrc_entries_from_mirror_urls_noop_test_impl)
def _add_netrc_entries_from_mirror_urls_basic_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
{"c1": {"a": "b"}},
add_netrc_entries_from_mirror_urls({}, ["https://a:b@c1"]),
)
asserts.equals(
env,
{"c1": {"a": "b"}},
add_netrc_entries_from_mirror_urls(
{"c1": {"a": "b"}},
["https://a:b@c1"],
),
)
return unittest.end(env)
add_netrc_entries_from_mirror_urls_basic_test = add_test(_add_netrc_entries_from_mirror_urls_basic_test_impl)
def _add_netrc_entries_from_mirror_urls_multi_login_ignored_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
{"c1": {"a": "b"}},
add_netrc_entries_from_mirror_urls({}, ["https://a:b@c1", "https://a:b2@c1", "https://a2:b3@c1"]),
)
asserts.equals(
env,
{"c1": {"a": "b"}},
add_netrc_entries_from_mirror_urls(
{"c1": {"a": "b"}},
["https://a:b@c1", "https://a:b2@c1", "https://a2:b3@c1"],
),
)
return unittest.end(env)
add_netrc_entries_from_mirror_urls_multi_login_ignored_test = add_test(_add_netrc_entries_from_mirror_urls_multi_login_ignored_test_impl)
def _add_netrc_entries_from_mirror_urls_multi_case_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
{
"foo": {"bar": "baz"},
"c1": {"a1": "b1"},
"c2": {"a2": "b2"},
},
add_netrc_entries_from_mirror_urls(
{"foo": {"bar": "baz"}},
["https://a1:b1@c1", "https://a2:b2@c2", "https://a:b@c1", "https://a:b2@c1", "https://a2:b3@c1"],
),
)
return unittest.end(env)
add_netrc_entries_from_mirror_urls_multi_case_test = add_test(_add_netrc_entries_from_mirror_urls_multi_case_test_impl)
def _get_netrc_lines_from_entries_noop_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
[],
get_netrc_lines_from_entries({}),
)
return unittest.end(env)
get_netrc_lines_from_entries_noop_test = add_test(_get_netrc_lines_from_entries_noop_test_impl)
def _get_netrc_lines_from_entries_basic_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
["machine c", "login a", "password b"],
get_netrc_lines_from_entries({
"c": {"a": "b"},
}),
)
return unittest.end(env)
get_netrc_lines_from_entries_basic_test = add_test(_get_netrc_lines_from_entries_basic_test_impl)
def _get_netrc_lines_from_entries_no_pass_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
["machine c", "login a"],
get_netrc_lines_from_entries({
"c": {"a": ""},
}),
)
return unittest.end(env)
get_netrc_lines_from_entries_no_pass_test = add_test(_get_netrc_lines_from_entries_no_pass_test_impl)
def _get_netrc_lines_from_entries_multi_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(
env,
[
"machine c",
"login a",
"password b",
"machine c2",
"login a2",
"password p@ssword",
],
get_netrc_lines_from_entries({
"c": {"a": "b"},
"c2": {"a2": "p@ssword"},
}),
)
return unittest.end(env)
get_netrc_lines_from_entries_multi_test = add_test(_get_netrc_lines_from_entries_multi_test_impl)
def _mock_repo_path(path):
if path.startswith("/"):
return path
else:
return "/mockroot/" + path
def _mock_which(path):
return False
def _get_coursier_cache_or_default_disabled_test(ctx):
env = unittest.begin(ctx)
mock_repository_ctx = struct(
os = struct(
environ = {
"COURSIER_CACHE": _mock_repo_path("/does/not/matter"),
},
name = "linux",
),
which = _mock_which,
)
asserts.equals(
env,
"v1",
get_coursier_cache_or_default(mock_repository_ctx, False),
)
return unittest.end(env)
get_coursier_cache_or_default_disabled_test = add_test(_get_coursier_cache_or_default_disabled_test)
def _get_coursier_cache_or_default_enabled_with_default_location_linux_test(ctx):
env = unittest.begin(ctx)
mock_repository_ctx = struct(
os = struct(
environ = {
"HOME": "/home/testuser",
},
name = "linux",
),
which = _mock_which,
)
asserts.equals(
env,
"/home/testuser/.cache/coursier/v1",
get_coursier_cache_or_default(mock_repository_ctx, True),
)
return unittest.end(env)
get_coursier_cache_or_default_enabled_with_default_location_linux_test = add_test(_get_coursier_cache_or_default_enabled_with_default_location_linux_test)
def _get_coursier_cache_or_default_enabled_with_default_location_mac_test(ctx):
env = unittest.begin(ctx)
mock_repository_ctx = struct(
os = struct(
environ = {
"HOME": "/Users/testuser",
},
name = "mac",
),
which = _mock_which,
)
asserts.equals(
env,
"/Users/testuser/Library/Caches/Coursier/v1",
get_coursier_cache_or_default(mock_repository_ctx, True),
)
return unittest.end(env)
get_coursier_cache_or_default_enabled_with_default_location_mac_test = add_test(_get_coursier_cache_or_default_enabled_with_default_location_mac_test)
def _get_coursier_cache_or_default_enabled_with_custom_location_test(ctx):
env = unittest.begin(ctx)
mock_repository_ctx = struct(
os = struct(
environ = {
"COURSIER_CACHE": _mock_repo_path("/custom/location"),
},
name = "linux",
),
which = _mock_which,
)
asserts.equals(
env,
"/custom/location",
get_coursier_cache_or_default(mock_repository_ctx, True),
)
return unittest.end(env)
get_coursier_cache_or_default_enabled_with_custom_location_test = add_test(_get_coursier_cache_or_default_enabled_with_custom_location_test)
def _mock_which_true(path):
return True
def _mock_execute(args):
if args[-1] == "/Users/testuser/Library/Caches/Coursier/v1":
return struct(return_code = 1)
else:
return struct(return_code = 0)
def _get_coursier_cache_or_default_enabled_with_home_dot_coursier_directory_test(ctx):
env = unittest.begin(ctx)
mock_repository_ctx = struct(
os = struct(
environ = {
"HOME": "/Users/testuser",
},
name = "mac",
),
which = _mock_which_true,
execute = _mock_execute,
)
asserts.equals(
env,
"/Users/testuser/.coursier/cache/v1",
get_coursier_cache_or_default(mock_repository_ctx, True),
)
return unittest.end(env)
get_coursier_cache_or_default_enabled_with_home_dot_coursier_directory_test = add_test(_get_coursier_cache_or_default_enabled_with_home_dot_coursier_directory_test)
def _calculate_inputs_hash_does_not_care_about_input_order_test(ctx):
env = unittest.begin(ctx)
# Order of artifacts is switched in each hash
hash1 = compute_dependency_inputs_signature([
"""{"group": "first", "artifact": "artifact", "version": "version"}""",
"""{"group": "second", "artifact": "artifact", "version": "version"}""",
])
hash2 = compute_dependency_inputs_signature([
"""{"group": "second", "artifact": "artifact", "version": "version"}""",
"""{"group": "first", "artifact": "artifact", "version": "version"}""",
])
return unittest.end(env)
calculate_inputs_hash_does_not_care_about_input_order_test = add_test(_calculate_inputs_hash_does_not_care_about_input_order_test)
def coursier_test_suite():
unittest.suite(
"coursier_tests",
*ALL_TESTS
)
|
load('@bazel_skylib//lib:unittest.bzl', 'asserts', 'unittest')
load('//:coursier.bzl', 'add_netrc_entries_from_mirror_urls', 'compute_dependency_inputs_signature', 'extract_netrc_from_auth_url', 'get_coursier_cache_or_default', 'get_netrc_lines_from_entries', 'remove_auth_from_url', 'split_url', infer='infer_artifact_path_from_primary_and_repos')
all_tests = []
def add_test(test_impl_func):
test = unittest.make(test_impl_func)
ALL_TESTS.append(test)
return test
def _infer_doc_example_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, 'group/path/to/artifact/file.jar', infer('http://a:b@c/group/path/to/artifact/file.jar', ['http://c']))
return unittest.end(env)
infer_doc_example_test = add_test(_infer_doc_example_test_impl)
def _infer_basic_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, 'group/artifact/version/foo.jar', infer('https://base/group/artifact/version/foo.jar', ['https://base']))
asserts.equals(env, 'group/artifact/version/foo.jar', infer('http://base/group/artifact/version/foo.jar', ['http://base']))
return unittest.end(env)
infer_basic_test = add_test(_infer_basic_test_impl)
def _infer_auth_basic_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, 'group1/artifact/version/foo.jar', infer('https://a@c/group1/artifact/version/foo.jar', ['https://a:b@c']))
asserts.equals(env, 'group2/artifact/version/foo.jar', infer('https://a@c/group2/artifact/version/foo.jar', ['https://a@c']))
asserts.equals(env, 'group3/artifact/version/foo.jar', infer('https://a@c/group3/artifact/version/foo.jar', ['https://c']))
return unittest.end(env)
infer_auth_basic_test = add_test(_infer_auth_basic_test_impl)
def _infer_leading_repo_miss_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, 'group/artifact/version/foo.jar', infer('https://a@c/group/artifact/version/foo.jar', ['https://a:b@c/missubdir', 'https://a:b@c']))
return unittest.end(env)
infer_leading_repo_miss_test = add_test(_infer_leading_repo_miss_test_impl)
def _infer_repo_trailing_slash_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, 'group/artifact/version/foo.jar', infer('https://a@c/group/artifact/version/foo.jar', ['https://a:b@c']))
asserts.equals(env, 'group/artifact/version/foo.jar', infer('https://a@c/group/artifact/version/foo.jar', ['https://a:b@c/']))
asserts.equals(env, 'group/artifact/version/foo.jar', infer('https://a@c/group/artifact/version/foo.jar', ['https://a:b@c//']))
return unittest.end(env)
infer_repo_trailing_slash_test = add_test(_infer_repo_trailing_slash_test_impl)
def _remove_auth_basic_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, 'https://c1', remove_auth_from_url('https://a:b@c1'))
return unittest.end(env)
remove_auth_basic_test = add_test(_remove_auth_basic_test_impl)
def _remove_auth_basic_with_path_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, 'https://c1/some/random/path', remove_auth_from_url('https://a:b@c1/some/random/path'))
return unittest.end(env)
remove_auth_basic_with_path_test = add_test(_remove_auth_basic_with_path_test_impl)
def _remove_auth_only_user_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, 'https://c1', remove_auth_from_url('https://a@c1'))
return unittest.end(env)
remove_auth_only_user_test = add_test(_remove_auth_only_user_test_impl)
def _remove_auth_noauth_noop_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, 'https://c1', remove_auth_from_url('https://c1'))
return unittest.end(env)
remove_auth_noauth_noop_test = add_test(_remove_auth_noauth_noop_test_impl)
def _split_url_basic_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, ('https', ['c1']), split_url('https://c1'))
return unittest.end(env)
split_url_basic_test = add_test(_split_url_basic_test_impl)
def _split_url_basic_auth_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, ('https', ['a:b@c1']), split_url('https://a:b@c1'))
asserts.equals(env, ('https', ['a@c1']), split_url('https://a@c1'))
return unittest.end(env)
split_url_basic_auth_test = add_test(_split_url_basic_auth_test_impl)
def _split_url_with_path_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, ('https', ['c1', 'some', 'path']), split_url('https://c1/some/path'))
return unittest.end(env)
split_url_with_path_test = add_test(_split_url_with_path_test_impl)
def _extract_netrc_from_auth_url_noop_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, {}, extract_netrc_from_auth_url('https://c1'))
asserts.equals(env, {}, extract_netrc_from_auth_url('https://c2/useless@inurl'))
return unittest.end(env)
extract_netrc_from_auth_url_noop_test = add_test(_extract_netrc_from_auth_url_noop_test_impl)
def _extract_netrc_from_auth_url_with_auth_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, {'machine': 'c', 'login': 'a', 'password': 'b'}, extract_netrc_from_auth_url('https://a:b@c'))
asserts.equals(env, {'machine': 'c', 'login': 'a', 'password': 'b'}, extract_netrc_from_auth_url('https://a:b@c/some/other/stuff@thisplace/for/testing'))
asserts.equals(env, {'machine': 'c', 'login': 'a', 'password': None}, extract_netrc_from_auth_url('https://a@c'))
asserts.equals(env, {'machine': 'c', 'login': 'a', 'password': None}, extract_netrc_from_auth_url('https://a@c/some/other/stuff@thisplace/for/testing'))
return unittest.end(env)
extract_netrc_from_auth_url_with_auth_test = add_test(_extract_netrc_from_auth_url_with_auth_test_impl)
def _extract_netrc_from_auth_url_at_in_password_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, {'machine': 'c', 'login': 'a', 'password': 'p@ssword'}, extract_netrc_from_auth_url('https://a:p@ssword@c'))
return unittest.end(env)
extract_netrc_from_auth_url_at_in_password_test = add_test(_extract_netrc_from_auth_url_at_in_password_test_impl)
def _add_netrc_entries_from_mirror_urls_noop_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, {}, add_netrc_entries_from_mirror_urls({}, ['https://c1', 'https://c1/something@there']))
return unittest.end(env)
add_netrc_entries_from_mirror_urls_noop_test = add_test(_add_netrc_entries_from_mirror_urls_noop_test_impl)
def _add_netrc_entries_from_mirror_urls_basic_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, {'c1': {'a': 'b'}}, add_netrc_entries_from_mirror_urls({}, ['https://a:b@c1']))
asserts.equals(env, {'c1': {'a': 'b'}}, add_netrc_entries_from_mirror_urls({'c1': {'a': 'b'}}, ['https://a:b@c1']))
return unittest.end(env)
add_netrc_entries_from_mirror_urls_basic_test = add_test(_add_netrc_entries_from_mirror_urls_basic_test_impl)
def _add_netrc_entries_from_mirror_urls_multi_login_ignored_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, {'c1': {'a': 'b'}}, add_netrc_entries_from_mirror_urls({}, ['https://a:b@c1', 'https://a:b2@c1', 'https://a2:b3@c1']))
asserts.equals(env, {'c1': {'a': 'b'}}, add_netrc_entries_from_mirror_urls({'c1': {'a': 'b'}}, ['https://a:b@c1', 'https://a:b2@c1', 'https://a2:b3@c1']))
return unittest.end(env)
add_netrc_entries_from_mirror_urls_multi_login_ignored_test = add_test(_add_netrc_entries_from_mirror_urls_multi_login_ignored_test_impl)
def _add_netrc_entries_from_mirror_urls_multi_case_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, {'foo': {'bar': 'baz'}, 'c1': {'a1': 'b1'}, 'c2': {'a2': 'b2'}}, add_netrc_entries_from_mirror_urls({'foo': {'bar': 'baz'}}, ['https://a1:b1@c1', 'https://a2:b2@c2', 'https://a:b@c1', 'https://a:b2@c1', 'https://a2:b3@c1']))
return unittest.end(env)
add_netrc_entries_from_mirror_urls_multi_case_test = add_test(_add_netrc_entries_from_mirror_urls_multi_case_test_impl)
def _get_netrc_lines_from_entries_noop_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, [], get_netrc_lines_from_entries({}))
return unittest.end(env)
get_netrc_lines_from_entries_noop_test = add_test(_get_netrc_lines_from_entries_noop_test_impl)
def _get_netrc_lines_from_entries_basic_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, ['machine c', 'login a', 'password b'], get_netrc_lines_from_entries({'c': {'a': 'b'}}))
return unittest.end(env)
get_netrc_lines_from_entries_basic_test = add_test(_get_netrc_lines_from_entries_basic_test_impl)
def _get_netrc_lines_from_entries_no_pass_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, ['machine c', 'login a'], get_netrc_lines_from_entries({'c': {'a': ''}}))
return unittest.end(env)
get_netrc_lines_from_entries_no_pass_test = add_test(_get_netrc_lines_from_entries_no_pass_test_impl)
def _get_netrc_lines_from_entries_multi_test_impl(ctx):
env = unittest.begin(ctx)
asserts.equals(env, ['machine c', 'login a', 'password b', 'machine c2', 'login a2', 'password p@ssword'], get_netrc_lines_from_entries({'c': {'a': 'b'}, 'c2': {'a2': 'p@ssword'}}))
return unittest.end(env)
get_netrc_lines_from_entries_multi_test = add_test(_get_netrc_lines_from_entries_multi_test_impl)
def _mock_repo_path(path):
if path.startswith('/'):
return path
else:
return '/mockroot/' + path
def _mock_which(path):
return False
def _get_coursier_cache_or_default_disabled_test(ctx):
env = unittest.begin(ctx)
mock_repository_ctx = struct(os=struct(environ={'COURSIER_CACHE': _mock_repo_path('/does/not/matter')}, name='linux'), which=_mock_which)
asserts.equals(env, 'v1', get_coursier_cache_or_default(mock_repository_ctx, False))
return unittest.end(env)
get_coursier_cache_or_default_disabled_test = add_test(_get_coursier_cache_or_default_disabled_test)
def _get_coursier_cache_or_default_enabled_with_default_location_linux_test(ctx):
env = unittest.begin(ctx)
mock_repository_ctx = struct(os=struct(environ={'HOME': '/home/testuser'}, name='linux'), which=_mock_which)
asserts.equals(env, '/home/testuser/.cache/coursier/v1', get_coursier_cache_or_default(mock_repository_ctx, True))
return unittest.end(env)
get_coursier_cache_or_default_enabled_with_default_location_linux_test = add_test(_get_coursier_cache_or_default_enabled_with_default_location_linux_test)
def _get_coursier_cache_or_default_enabled_with_default_location_mac_test(ctx):
env = unittest.begin(ctx)
mock_repository_ctx = struct(os=struct(environ={'HOME': '/Users/testuser'}, name='mac'), which=_mock_which)
asserts.equals(env, '/Users/testuser/Library/Caches/Coursier/v1', get_coursier_cache_or_default(mock_repository_ctx, True))
return unittest.end(env)
get_coursier_cache_or_default_enabled_with_default_location_mac_test = add_test(_get_coursier_cache_or_default_enabled_with_default_location_mac_test)
def _get_coursier_cache_or_default_enabled_with_custom_location_test(ctx):
env = unittest.begin(ctx)
mock_repository_ctx = struct(os=struct(environ={'COURSIER_CACHE': _mock_repo_path('/custom/location')}, name='linux'), which=_mock_which)
asserts.equals(env, '/custom/location', get_coursier_cache_or_default(mock_repository_ctx, True))
return unittest.end(env)
get_coursier_cache_or_default_enabled_with_custom_location_test = add_test(_get_coursier_cache_or_default_enabled_with_custom_location_test)
def _mock_which_true(path):
return True
def _mock_execute(args):
if args[-1] == '/Users/testuser/Library/Caches/Coursier/v1':
return struct(return_code=1)
else:
return struct(return_code=0)
def _get_coursier_cache_or_default_enabled_with_home_dot_coursier_directory_test(ctx):
env = unittest.begin(ctx)
mock_repository_ctx = struct(os=struct(environ={'HOME': '/Users/testuser'}, name='mac'), which=_mock_which_true, execute=_mock_execute)
asserts.equals(env, '/Users/testuser/.coursier/cache/v1', get_coursier_cache_or_default(mock_repository_ctx, True))
return unittest.end(env)
get_coursier_cache_or_default_enabled_with_home_dot_coursier_directory_test = add_test(_get_coursier_cache_or_default_enabled_with_home_dot_coursier_directory_test)
def _calculate_inputs_hash_does_not_care_about_input_order_test(ctx):
env = unittest.begin(ctx)
hash1 = compute_dependency_inputs_signature(['{"group": "first", "artifact": "artifact", "version": "version"}', '{"group": "second", "artifact": "artifact", "version": "version"}'])
hash2 = compute_dependency_inputs_signature(['{"group": "second", "artifact": "artifact", "version": "version"}', '{"group": "first", "artifact": "artifact", "version": "version"}'])
return unittest.end(env)
calculate_inputs_hash_does_not_care_about_input_order_test = add_test(_calculate_inputs_hash_does_not_care_about_input_order_test)
def coursier_test_suite():
unittest.suite('coursier_tests', *ALL_TESTS)
|
# ------------------------------------------------------------------------------
#
class Attributes (object) :
# FIXME: add method sigs
# --------------------------------------------------------------------------
#
def __init__ (self, vals={}) :
raise Exception ("%s is not implemented" % self.__class__.__name__)
# ------------------------------------------------------------------------------
#
|
class Attributes(object):
def __init__(self, vals={}):
raise exception('%s is not implemented' % self.__class__.__name__)
|
class IdGenerator(object):
number = 0
@staticmethod
def next():
tmp = IdGenerator.number
IdGenerator.number += 1
return str(tmp)
|
class Idgenerator(object):
number = 0
@staticmethod
def next():
tmp = IdGenerator.number
IdGenerator.number += 1
return str(tmp)
|
# Copyright (c) 2016 Vivaldi Technologies AS. All rights reserved
{
'targets': [
{
'target_name': 'vivaldi_browser',
'type': 'static_library',
'dependencies': [
'app/vivaldi_resources.gyp:*',
'chromium/base/base.gyp:base',
'chromium/components/components.gyp:search_engines',
'chromium/chrome/chrome_resources.gyp:chrome_resources',
'chromium/chrome/chrome_resources.gyp:chrome_strings',
'chromium/crypto/crypto.gyp:crypto',
'chromium/chrome/chrome.gyp:browser_extensions',
'chromium/components/components.gyp:os_crypt',
'chromium/skia/skia.gyp:skia',
'extensions/vivaldi_api_resources.gyp:*',
'vivaldi_extensions',
'vivaldi_preferences',
],
'include_dirs': [
'.',
'chromium',
],
'sources': [
'app/vivaldi.rc',
'app/vivaldi_commands.h',
'app/vivaldi_command_controller.cpp',
'app/vivaldi_command_controller.h',
'clientparts/vivaldi_content_browser_client_parts.h',
'clientparts/vivaldi_content_browser_client_parts.cc',
'extraparts/vivaldi_browser_main_extra_parts.h',
'extraparts/vivaldi_browser_main_extra_parts.cc',
'importer/imported_notes_entry.cpp',
'importer/imported_notes_entry.h',
'importer/viv_importer.cpp',
'importer/viv_importer.h',
'importer/chrome_importer_bookmark.cpp',
'importer/chromium_importer.cpp',
'importer/chromium_importer.h',
'importer/chromium_profile_importer.h',
'importer/chromium_profile_importer.cpp',
'importer/chromium_profile_lock.cpp',
'importer/chromium_profile_lock.h',
'importer/viv_importer_bookmark.cpp',
'importer/viv_importer_notes.cpp',
'importer/viv_importer_utils.h',
'importer/chrome_importer_utils.h',
'importer/viv_importer_wand.cpp',
'importer/viv_opera_reader.cpp',
'importer/viv_opera_reader.h',
'importer/chrome_bookmark_reader.cpp',
'importer/chrome_bookmark_reader.h',
'notes/notes_attachment.cpp',
'notes/notes_attachment.h',
'notes/notesnode.cpp',
'notes/notesnode.h',
'notes/notes_factory.cpp',
'notes/notes_factory.h',
'notes/notes_model.cpp',
'notes/notes_model.h',
'notes/notes_model_observer.h',
'notes/notes_model_loaded_observer.cpp',
'notes/notes_model_loaded_observer.h',
'notes/notes_storage.cpp',
'notes/notes_storage.h',
'notifications/notification_permission_context_extensions.cc',
'notifications/notification_permission_context_extensions.h',
'ui/webgui/notes_ui.cpp',
'ui/webgui/notes_ui.h',
'ui/webgui/vivaldi_web_ui_controller_factory.cpp',
'ui/webgui/vivaldi_web_ui_controller_factory.h',
'ui/views/vivaldi_pin_shortcut.cpp',
'ui/views/vivaldi_pin_shortcut.h',
],
# Disables warnings about size_t to int conversions when the types
# have different sizes
'msvs_disabled_warnings': [ 4267 ],
'conditions': [
['OS=="linux"', {
"sources":[
'extraparts/vivaldi_browser_main_extra_parts_linux.cc',
'importer/viv_importer_util_linux.cpp',
'importer/chromium_importer_util_linux.cpp',
'importer/chromium_profile_lock_posix.cpp',
],
}],
['OS=="win"', {
"sources":[
'browser/vivaldi_download_status.cpp',
'browser/vivaldi_download_status.h',
'extraparts/vivaldi_browser_main_extra_parts_win.cc',
'importer/viv_importer_util_win.cpp',
'importer/chrome_importer_util_win.cpp',
'importer/chromium_profile_lock_win.cpp',
],
}],
['OS=="mac"', {
"sources":[
'extraparts/vivaldi_browser_main_extra_parts_mac.mm',
'importer/viv_importer_util_mac.mm',
'importer/chromium_importer_util_mac.mm',
'importer/chromium_profile_lock_mac.mm',
],
}, { #'OS!="mac"
"dependencies":[
'chromium/chrome/chrome.gyp:utility',
],
}],
],
},
{
'target_name': 'vivaldi_extensions',
'type': 'static_library',
'dependencies': [
'extensions/api/vivaldi_chrome_extensions.gyp:*',
'extensions/schema/vivaldi_api.gyp:vivaldi_chrome_api',
],
'sources': [
'extensions/permissions/vivaldi_api_permissions.cpp',
'extensions/permissions/vivaldi_api_permissions.h',
'extensions/vivaldi_extensions_client.cpp',
'extensions/vivaldi_extensions_client.h',
'extensions/vivaldi_extensions_init.cpp',
'extensions/vivaldi_extensions_init.h',
],
'include_dirs': [
'.',
'chromium',
],
},
{
'target_name': 'vivaldi_packaged_app',
'type': 'none',
'dependencies': [
'vivapp/vivaldi_app.gypi:*',
],
},
{
'target_name': 'vivaldi_helper_script',
'type': 'none',
'conditions': [
['OS=="win"', {
'copies': [{
'destination': '<(PRODUCT_DIR)',
'files': [
'app/vivaldi_local_profile.bat',
],
}],
}],
['OS=="win" and target_arch == "ia32"', {
'copies': [{
'destination': '<(PRODUCT_DIR)',
'files': [
'third_party/_winsparkle_lib/WinSparkle.dll',
'third_party/_winsparkle_lib/WinSparkle.lib',
],
}],
}],
['OS=="win" and target_arch == "x64"', {
'copies': [{
'destination': '<(PRODUCT_DIR)',
'files': [
'third_party/_winsparkle_lib/x64/WinSparkle.dll',
'third_party/_winsparkle_lib/x64/WinSparkle.lib',
],
}],
}],
],
},
{
'target_name': 'vivaldi_preferences',
'type': 'static_library',
'dependencies': [
'chromium/base/base.gyp:base',
],
'sources': [
'prefs/vivaldi_pref_names.h',
'prefs/vivaldi_pref_names.cc',
],
'conditions': [
['OS=="linux"', {
"sources":[
'prefs/vivaldi_browser_prefs_linux.cc',
],
}],
['OS=="win"', {
"sources":[
'prefs/vivaldi_browser_prefs_win.cc',
],
}],
['OS=="mac"', {
"sources":[
'prefs/vivaldi_browser_prefs_mac.mm',
],
}],
],
'include_dirs': [
'.',
'chromium',
],
},
],
}
|
{'targets': [{'target_name': 'vivaldi_browser', 'type': 'static_library', 'dependencies': ['app/vivaldi_resources.gyp:*', 'chromium/base/base.gyp:base', 'chromium/components/components.gyp:search_engines', 'chromium/chrome/chrome_resources.gyp:chrome_resources', 'chromium/chrome/chrome_resources.gyp:chrome_strings', 'chromium/crypto/crypto.gyp:crypto', 'chromium/chrome/chrome.gyp:browser_extensions', 'chromium/components/components.gyp:os_crypt', 'chromium/skia/skia.gyp:skia', 'extensions/vivaldi_api_resources.gyp:*', 'vivaldi_extensions', 'vivaldi_preferences'], 'include_dirs': ['.', 'chromium'], 'sources': ['app/vivaldi.rc', 'app/vivaldi_commands.h', 'app/vivaldi_command_controller.cpp', 'app/vivaldi_command_controller.h', 'clientparts/vivaldi_content_browser_client_parts.h', 'clientparts/vivaldi_content_browser_client_parts.cc', 'extraparts/vivaldi_browser_main_extra_parts.h', 'extraparts/vivaldi_browser_main_extra_parts.cc', 'importer/imported_notes_entry.cpp', 'importer/imported_notes_entry.h', 'importer/viv_importer.cpp', 'importer/viv_importer.h', 'importer/chrome_importer_bookmark.cpp', 'importer/chromium_importer.cpp', 'importer/chromium_importer.h', 'importer/chromium_profile_importer.h', 'importer/chromium_profile_importer.cpp', 'importer/chromium_profile_lock.cpp', 'importer/chromium_profile_lock.h', 'importer/viv_importer_bookmark.cpp', 'importer/viv_importer_notes.cpp', 'importer/viv_importer_utils.h', 'importer/chrome_importer_utils.h', 'importer/viv_importer_wand.cpp', 'importer/viv_opera_reader.cpp', 'importer/viv_opera_reader.h', 'importer/chrome_bookmark_reader.cpp', 'importer/chrome_bookmark_reader.h', 'notes/notes_attachment.cpp', 'notes/notes_attachment.h', 'notes/notesnode.cpp', 'notes/notesnode.h', 'notes/notes_factory.cpp', 'notes/notes_factory.h', 'notes/notes_model.cpp', 'notes/notes_model.h', 'notes/notes_model_observer.h', 'notes/notes_model_loaded_observer.cpp', 'notes/notes_model_loaded_observer.h', 'notes/notes_storage.cpp', 'notes/notes_storage.h', 'notifications/notification_permission_context_extensions.cc', 'notifications/notification_permission_context_extensions.h', 'ui/webgui/notes_ui.cpp', 'ui/webgui/notes_ui.h', 'ui/webgui/vivaldi_web_ui_controller_factory.cpp', 'ui/webgui/vivaldi_web_ui_controller_factory.h', 'ui/views/vivaldi_pin_shortcut.cpp', 'ui/views/vivaldi_pin_shortcut.h'], 'msvs_disabled_warnings': [4267], 'conditions': [['OS=="linux"', {'sources': ['extraparts/vivaldi_browser_main_extra_parts_linux.cc', 'importer/viv_importer_util_linux.cpp', 'importer/chromium_importer_util_linux.cpp', 'importer/chromium_profile_lock_posix.cpp']}], ['OS=="win"', {'sources': ['browser/vivaldi_download_status.cpp', 'browser/vivaldi_download_status.h', 'extraparts/vivaldi_browser_main_extra_parts_win.cc', 'importer/viv_importer_util_win.cpp', 'importer/chrome_importer_util_win.cpp', 'importer/chromium_profile_lock_win.cpp']}], ['OS=="mac"', {'sources': ['extraparts/vivaldi_browser_main_extra_parts_mac.mm', 'importer/viv_importer_util_mac.mm', 'importer/chromium_importer_util_mac.mm', 'importer/chromium_profile_lock_mac.mm']}, {'dependencies': ['chromium/chrome/chrome.gyp:utility']}]]}, {'target_name': 'vivaldi_extensions', 'type': 'static_library', 'dependencies': ['extensions/api/vivaldi_chrome_extensions.gyp:*', 'extensions/schema/vivaldi_api.gyp:vivaldi_chrome_api'], 'sources': ['extensions/permissions/vivaldi_api_permissions.cpp', 'extensions/permissions/vivaldi_api_permissions.h', 'extensions/vivaldi_extensions_client.cpp', 'extensions/vivaldi_extensions_client.h', 'extensions/vivaldi_extensions_init.cpp', 'extensions/vivaldi_extensions_init.h'], 'include_dirs': ['.', 'chromium']}, {'target_name': 'vivaldi_packaged_app', 'type': 'none', 'dependencies': ['vivapp/vivaldi_app.gypi:*']}, {'target_name': 'vivaldi_helper_script', 'type': 'none', 'conditions': [['OS=="win"', {'copies': [{'destination': '<(PRODUCT_DIR)', 'files': ['app/vivaldi_local_profile.bat']}]}], ['OS=="win" and target_arch == "ia32"', {'copies': [{'destination': '<(PRODUCT_DIR)', 'files': ['third_party/_winsparkle_lib/WinSparkle.dll', 'third_party/_winsparkle_lib/WinSparkle.lib']}]}], ['OS=="win" and target_arch == "x64"', {'copies': [{'destination': '<(PRODUCT_DIR)', 'files': ['third_party/_winsparkle_lib/x64/WinSparkle.dll', 'third_party/_winsparkle_lib/x64/WinSparkle.lib']}]}]]}, {'target_name': 'vivaldi_preferences', 'type': 'static_library', 'dependencies': ['chromium/base/base.gyp:base'], 'sources': ['prefs/vivaldi_pref_names.h', 'prefs/vivaldi_pref_names.cc'], 'conditions': [['OS=="linux"', {'sources': ['prefs/vivaldi_browser_prefs_linux.cc']}], ['OS=="win"', {'sources': ['prefs/vivaldi_browser_prefs_win.cc']}], ['OS=="mac"', {'sources': ['prefs/vivaldi_browser_prefs_mac.mm']}]], 'include_dirs': ['.', 'chromium']}]}
|
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution(object):
def rotateRight(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
current = head
length = 0
while current :
length+=1
breakhere =current
current= current.next
if k ==0 or length<=1 : return head
k= k% length
breakAt = length-k-1
current = head
while current.next:
if breakAt==0:
breakhere = current
breakAt-=1
current = current.next
current.next = head
head = breakhere.next
breakhere.next = None
return head;
"""
linkedlist = 1->2->3->4->5
rotate = 2
Step 1 : traverse through array and to avoid edge cases keep pointer breakHere on last Node
Step 2 : find the length and calculate the Break Point
In above example the length of linked list is 5 and rotate is 2 hence the node to be break is ahead of 3
breakAt = length -k-1
Step 3 : Go to that node and keep a pointer breakHere on that Node and traverse till last node
Step 4 : Attach last node to head by doing "current.next = head"
Step 5 : Now make the new head from the point where we are going to break it
"head = breakHere.next" Do this step first so you will not lose reference
Step 6 : Break the Node
head breakHere breakHere head
| | | |
1->2->3->4->5 --- current will look like 1->2->->3 4->5
|_____________|
"""
|
class Listnode(object):
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution(object):
def rotate_right(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
current = head
length = 0
while current:
length += 1
breakhere = current
current = current.next
if k == 0 or length <= 1:
return head
k = k % length
break_at = length - k - 1
current = head
while current.next:
if breakAt == 0:
breakhere = current
break_at -= 1
current = current.next
current.next = head
head = breakhere.next
breakhere.next = None
return head
'\n\nlinkedlist = 1->2->3->4->5\nrotate = 2\n\nStep 1 : traverse through array and to avoid edge cases keep pointer breakHere on last Node\nStep 2 : find the length and calculate the Break Point\n\n In above example the length of linked list is 5 and rotate is 2 hence the node to be break is ahead of 3\n breakAt = length -k-1\n \nStep 3 : Go to that node and keep a pointer breakHere on that Node and traverse till last node\nStep 4 : Attach last node to head by doing "current.next = head"\nStep 5 : Now make the new head from the point where we are going to break it\n "head = breakHere.next" Do this step first so you will not lose reference\nStep 6 : Break the Node\n\nhead breakHere breakHere head\n| | | |\n\n1->2->3->4->5 --- current will look like 1->2->->3 4->5\n |_____________|\n\n \n \n'
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def split(arr, size):
"""Splits an array to smaller arrays of size"""
arrays = []
while len(arr) > size:
piece = arr[:size]
arrays.append(piece)
arr = arr[size:]
arrays.append(arr)
return arrays
def take_out_elements(list_object, indices):
"""Removes elements from list in specified indices"""
removed_elements = []
indices = sorted(indices, reverse=True)
for idx in indices:
if idx < len(list_object):
removed_elements.append(list_object.pop(idx))
return removed_elements
|
def split(arr, size):
"""Splits an array to smaller arrays of size"""
arrays = []
while len(arr) > size:
piece = arr[:size]
arrays.append(piece)
arr = arr[size:]
arrays.append(arr)
return arrays
def take_out_elements(list_object, indices):
"""Removes elements from list in specified indices"""
removed_elements = []
indices = sorted(indices, reverse=True)
for idx in indices:
if idx < len(list_object):
removed_elements.append(list_object.pop(idx))
return removed_elements
|
# -*- coding: utf-8 -*-
"""
1287. Element Appearing More Than 25% In Sorted Array
Given an integer array sorted in non-decreasing order, there is exactly one integer in the array that occurs
more than 25% of the time.
Return that integer.
Constraints:
1 <= arr.length <= 10^4
0 <= arr[i] <= 10^5
"""
class Solution:
def findSpecialInteger(self, arr):
occur = len(arr) / 4
count = 0
last = -1
for val in arr:
if val == last:
count += 1
else:
last = val
count = 1
if count > occur:
return val
|
"""
1287. Element Appearing More Than 25% In Sorted Array
Given an integer array sorted in non-decreasing order, there is exactly one integer in the array that occurs
more than 25% of the time.
Return that integer.
Constraints:
1 <= arr.length <= 10^4
0 <= arr[i] <= 10^5
"""
class Solution:
def find_special_integer(self, arr):
occur = len(arr) / 4
count = 0
last = -1
for val in arr:
if val == last:
count += 1
else:
last = val
count = 1
if count > occur:
return val
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" ib.ext.cfg.CommissionReport -> config module for CommissionReport.java.
"""
|
""" ib.ext.cfg.CommissionReport -> config module for CommissionReport.java.
"""
|
class Solution:
def plusOne(self, digits):
"""
66. Plus One
https://leetcode.com/problems/plus-one
"""
for i in range(len(digits)):
if digits[-i] < 9:
digits[-i] += 1
return digits
digits[-i] = 0
return [1] + [0] * len(digits)
|
class Solution:
def plus_one(self, digits):
"""
66. Plus One
https://leetcode.com/problems/plus-one
"""
for i in range(len(digits)):
if digits[-i] < 9:
digits[-i] += 1
return digits
digits[-i] = 0
return [1] + [0] * len(digits)
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'targets': [],
'conditions': [
# The CrNet build is ninja-only because of the hack in
# ios/build/packaging/link_dependencies.py.
['OS=="ios" and "<(GENERATOR)"=="ninja"', {
'targets': [
{
'target_name': 'crnet_test',
'type': 'executable',
'dependencies': [
'../../../ios/crnet/crnet.gyp:crnet',
'../../../ios/third_party/gcdwebserver/gcdwebserver.gyp:gcdwebserver',
'../../../testing/gtest.gyp:gtest',
],
'sources': [
'crnet_http_tests.mm',
'crnet_test_runner.mm',
],
'include_dirs': [
'../../..',
'..',
],
'link_settings': {
},
},
],
}],
],
}
|
{'variables': {'chromium_code': 1}, 'targets': [], 'conditions': [['OS=="ios" and "<(GENERATOR)"=="ninja"', {'targets': [{'target_name': 'crnet_test', 'type': 'executable', 'dependencies': ['../../../ios/crnet/crnet.gyp:crnet', '../../../ios/third_party/gcdwebserver/gcdwebserver.gyp:gcdwebserver', '../../../testing/gtest.gyp:gtest'], 'sources': ['crnet_http_tests.mm', 'crnet_test_runner.mm'], 'include_dirs': ['../../..', '..'], 'link_settings': {}}]}]]}
|
input = """
d(1).
d(2).
d(3).
d(4) :- #count{V : d(V)} > 2.
"""
output = """
d(1).
d(2).
d(3).
d(4) :- #count{V : d(V)} > 2.
"""
|
input = '\nd(1).\nd(2).\nd(3).\n\nd(4) :- #count{V : d(V)} > 2.\n\n'
output = '\nd(1).\nd(2).\nd(3).\n\nd(4) :- #count{V : d(V)} > 2.\n\n'
|
def parse_map(in_file):
with open(in_file) as f:
lines = f.read().splitlines()
width = len(lines[0])
height = len(lines)
points = {}
for x in range(width):
for y in range(height):
points[(x, y)] = lines[y][x]
return points, width, height
def solve(in_file):
(points, width, height) = parse_map(in_file)
trees = 0
x = 0
y = 0
while y < height:
if points[(x, y)] == "#":
trees += 1
x = (x + 3) % width
y += 1
return trees
# print(solve('sample.txt'))
print(solve("input.txt"))
|
def parse_map(in_file):
with open(in_file) as f:
lines = f.read().splitlines()
width = len(lines[0])
height = len(lines)
points = {}
for x in range(width):
for y in range(height):
points[x, y] = lines[y][x]
return (points, width, height)
def solve(in_file):
(points, width, height) = parse_map(in_file)
trees = 0
x = 0
y = 0
while y < height:
if points[x, y] == '#':
trees += 1
x = (x + 3) % width
y += 1
return trees
print(solve('input.txt'))
|
"""125. Backpack II
"""
class Solution:
"""
@param m: An integer m denotes the size of a backpack
@param A: Given n items with size A[i]
@param V: Given n items with value V[i]
@return: The maximum value
"""
def backPackII(self, m, A, V):
# write your code here
dp = [[float('-inf')] * (m + 1) for _ in range(len(A) + 1)]
dp[0][0] = 0
for i in range(1, len(A) + 1):
for j in range(m, -1, -1):
if j < A[i - 1]:
dp[i][j] = dp[i - 1][j]
else:
dp[i][j] = max(dp[i - 1][j], dp[i - 1][j - A[i - 1]] + V[i - 1])
return max(dp[-1])
#####
n = len(A)
dp = [[float('-inf')] * (m + 1) for _ in range(n + 1)]
dp[0][0] = 0
for i in range(1, n + 1):
for j in rnage(m , -1, -1):
if j < A[i - 1]:
dp[i][j] = dp[i - 1][j]
else:
dp[i][j] = max(dp[i - 1][j], dp[i - 1][j - A[i - 1]] + V[i - 1])
return max(dp[n][j] for j in range(m + 1))
|
"""125. Backpack II
"""
class Solution:
"""
@param m: An integer m denotes the size of a backpack
@param A: Given n items with size A[i]
@param V: Given n items with value V[i]
@return: The maximum value
"""
def back_pack_ii(self, m, A, V):
dp = [[float('-inf')] * (m + 1) for _ in range(len(A) + 1)]
dp[0][0] = 0
for i in range(1, len(A) + 1):
for j in range(m, -1, -1):
if j < A[i - 1]:
dp[i][j] = dp[i - 1][j]
else:
dp[i][j] = max(dp[i - 1][j], dp[i - 1][j - A[i - 1]] + V[i - 1])
return max(dp[-1])
n = len(A)
dp = [[float('-inf')] * (m + 1) for _ in range(n + 1)]
dp[0][0] = 0
for i in range(1, n + 1):
for j in rnage(m, -1, -1):
if j < A[i - 1]:
dp[i][j] = dp[i - 1][j]
else:
dp[i][j] = max(dp[i - 1][j], dp[i - 1][j - A[i - 1]] + V[i - 1])
return max((dp[n][j] for j in range(m + 1)))
|
a1=input("whats your age")
a2=input("whats your age")
int(a1)
int(a2)
age=int(a1)-int(a2)
print(abs(age))
|
a1 = input('whats your age')
a2 = input('whats your age')
int(a1)
int(a2)
age = int(a1) - int(a2)
print(abs(age))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.