content
stringlengths
7
1.05M
fixed_cases
stringlengths
1
1.28M
#! /usr/bin/python3 # -*- coding: utf-8 -*- # @Time : 2019/3/9 2:37 PM # @Author : xiaoliji # @Email : yutian9527@gmail.com class ListNode: def __init__(self, x: int): self.val = x self.next = None def construct_linklist(nodes: 'iterable')-> 'LinkedList': vals = list(nodes) head = ListNode(0) h = head for val in vals: h.next = ListNode(val) h = h.next return head.next def pretty_linklist(head: 'LinkedList') -> str: ans = [] h = head while h: ans.append(str(h.val)) h = h.next return '->'.join(ans) class TreeNode: def __init__(self, x: int): self.val = x self.left = None self.right = None def preorder_traversal(root: TreeNode) -> list: def dfs(node): if node: yield node.val yield from dfs(node.left) yield from dfs(node.right) return list(dfs(root)) def inorder_traversal(root: TreeNode) -> list: def dfs(node): if node: yield from dfs(node.left) yield node.val yield from dfs(node.right) return list(dfs(root)) def deserialize_tree(data): nodes = data.split(',')[::-1] return deserialize_tree_util(nodes) def deserialize_tree_util(nodes): val = nodes.pop() if val == '$': return None root = TreeNode(int(val)) root.left = deserialize_tree_util(nodes) root.right = deserialize_tree_util(nodes) return root def is_same_tree(p: 'TreeNode', q: 'TreeNode') -> 'bool': if p and q: return (p.val==q.val and is_same_tree(p.left, q.left) and is_same_tree(p.right, q.right)) else: return p is q
class Listnode: def __init__(self, x: int): self.val = x self.next = None def construct_linklist(nodes: 'iterable') -> 'LinkedList': vals = list(nodes) head = list_node(0) h = head for val in vals: h.next = list_node(val) h = h.next return head.next def pretty_linklist(head: 'LinkedList') -> str: ans = [] h = head while h: ans.append(str(h.val)) h = h.next return '->'.join(ans) class Treenode: def __init__(self, x: int): self.val = x self.left = None self.right = None def preorder_traversal(root: TreeNode) -> list: def dfs(node): if node: yield node.val yield from dfs(node.left) yield from dfs(node.right) return list(dfs(root)) def inorder_traversal(root: TreeNode) -> list: def dfs(node): if node: yield from dfs(node.left) yield node.val yield from dfs(node.right) return list(dfs(root)) def deserialize_tree(data): nodes = data.split(',')[::-1] return deserialize_tree_util(nodes) def deserialize_tree_util(nodes): val = nodes.pop() if val == '$': return None root = tree_node(int(val)) root.left = deserialize_tree_util(nodes) root.right = deserialize_tree_util(nodes) return root def is_same_tree(p: 'TreeNode', q: 'TreeNode') -> 'bool': if p and q: return p.val == q.val and is_same_tree(p.left, q.left) and is_same_tree(p.right, q.right) else: return p is q
class Shape: @staticmethod #define a static method before initiating an instance def add_ally(x,y): x.append(y) return x def __init__(self, shape_type): self.shape_type=shape_type self.__allies=[] #initiate a private empty list in an instance @property def allies_public(self): #make it accessible for the subclasses return self.__allies def __create_allies(self): #create a private method that calls the static method defined above return self.add_ally(self.allies_public, self.object) @property #make it accessible by the subclasses def ally_list_p(self): return self.__create_allies() class Circle(Shape): def __init__(self, shape_type): super().__init__(shape_type) self.object='square' #define the second argument for the add_ally method self.ally_list_p #call the add_ally method self.object='other' #repeat as many times as you want self.ally_list_p def __str__(self): return "results: "+ str(self.allies_public) def main(): circle_test=Circle('circle') print(circle_test) if __name__ == '__main__': main()
class Shape: @staticmethod def add_ally(x, y): x.append(y) return x def __init__(self, shape_type): self.shape_type = shape_type self.__allies = [] @property def allies_public(self): return self.__allies def __create_allies(self): return self.add_ally(self.allies_public, self.object) @property def ally_list_p(self): return self.__create_allies() class Circle(Shape): def __init__(self, shape_type): super().__init__(shape_type) self.object = 'square' self.ally_list_p self.object = 'other' self.ally_list_p def __str__(self): return 'results: ' + str(self.allies_public) def main(): circle_test = circle('circle') print(circle_test) if __name__ == '__main__': main()
def vogal(letra): if(letra=='a') or (letra=='A'): return True if (letra=='e') or (letra=='E'): return True if (letra=='i') or (letra=='I'): return True if (letra=='o') or (letra=='O'): return True if (letra=='u') or (letra=='U'): return True else: return False
def vogal(letra): if letra == 'a' or letra == 'A': return True if letra == 'e' or letra == 'E': return True if letra == 'i' or letra == 'I': return True if letra == 'o' or letra == 'O': return True if letra == 'u' or letra == 'U': return True else: return False
# 24. Swap Nodes in Pairs ''' Given a linked list, swap every two adjacent nodes and return its head. You may not modify the values in the list's nodes, only nodes itself may be changed. Input: 1->2->3->4, Output: 2->1->4->3. ''' class Solution: def swapPairs(self, head: ListNode) -> ListNode: current = head while current: if current.next == None: return head if current.next: current.val, current.next.val = current.next.val, current.val current = current.next.next return head
""" Given a linked list, swap every two adjacent nodes and return its head. You may not modify the values in the list's nodes, only nodes itself may be changed. Input: 1->2->3->4, Output: 2->1->4->3. """ class Solution: def swap_pairs(self, head: ListNode) -> ListNode: current = head while current: if current.next == None: return head if current.next: (current.val, current.next.val) = (current.next.val, current.val) current = current.next.next return head
def encrypt_letter(msg, key): enc_id = (ord(msg) + ord(key)) % 1114112 return chr(enc_id) def decrypt_letter(msg, key): dec_id = (1114112 + ord(msg) - ord(key)) % 1114112 return chr(dec_id) def process_message(message, key, encrypt): returned_message = "" for i, letter in enumerate(message): if encrypt: returned_message += encrypt_letter(letter, key[i%len(key)]) else: returned_message += decrypt_letter(letter, key[i%len(key)]) return returned_message
def encrypt_letter(msg, key): enc_id = (ord(msg) + ord(key)) % 1114112 return chr(enc_id) def decrypt_letter(msg, key): dec_id = (1114112 + ord(msg) - ord(key)) % 1114112 return chr(dec_id) def process_message(message, key, encrypt): returned_message = '' for (i, letter) in enumerate(message): if encrypt: returned_message += encrypt_letter(letter, key[i % len(key)]) else: returned_message += decrypt_letter(letter, key[i % len(key)]) return returned_message
def pre_save_operation(instance): # If this is a new record, then someone has started it in the Admin using EITHER a legacy COVE ID # OR a PBSMM UUID. Depending on which, the retrieval endpoint is slightly different, so this sets # the appropriate URL to access. if instance.pk is None: if instance.object_id and instance.object_id.strip(): url = __get_api_url('pbsmm', instance.object_id) else: if instance.legacy_tp_media_id: url = __get_api_url('cove', str(instance.legacy_tp_media_id)) else: return # do nothing - can't get an ID to look up! # Otherwise, it's an existing record and the UUID should be used else: # Editing an existing record - do nothing if ingest_on_save is NOT checked! if not instance.ingest_on_save: return url = __get_api_url('pbsmm', instance.object_id) # OK - get the record from the API (status, json) = get_PBSMM_record(url) instance.last_api_status = status # Update this record's time stamp (the API has its own) instance.date_last_api_update = datetime.datetime.now() if status != 200: return # Process the record (code is in ingest.py) instance = process_asset_record(json, instance) # continue saving, but turn off the ingest_on_save flag instance.ingest_on_save = False # otherwise we could end up in an infinite loop! # We're done here - continue with the save() operation return
def pre_save_operation(instance): if instance.pk is None: if instance.object_id and instance.object_id.strip(): url = __get_api_url('pbsmm', instance.object_id) elif instance.legacy_tp_media_id: url = __get_api_url('cove', str(instance.legacy_tp_media_id)) else: return else: if not instance.ingest_on_save: return url = __get_api_url('pbsmm', instance.object_id) (status, json) = get_pbsmm_record(url) instance.last_api_status = status instance.date_last_api_update = datetime.datetime.now() if status != 200: return instance = process_asset_record(json, instance) instance.ingest_on_save = False return
t = int(input().strip()) result = 0 current_time, current_value = 1, 3 def get_next_interval_cycle(time, value): next_time = time + value next_value = value * 2 return next_time, next_value while True: new_time, new_value = get_next_interval_cycle( current_time, current_value ) if new_time > t: result = current_value - (t - current_time) break elif new_time == t: result = new_value current_time, current_value = new_time, new_value print(result)
t = int(input().strip()) result = 0 (current_time, current_value) = (1, 3) def get_next_interval_cycle(time, value): next_time = time + value next_value = value * 2 return (next_time, next_value) while True: (new_time, new_value) = get_next_interval_cycle(current_time, current_value) if new_time > t: result = current_value - (t - current_time) break elif new_time == t: result = new_value (current_time, current_value) = (new_time, new_value) print(result)
def acmTeam(topic): maxtopic, maxteam = 0, 1 for i in range(n): for j in range(i+1, n): know = 0 for x in range(m): if topic[i][x] == '1' or topic[j][x] == '1': know += 1 if know > maxtopic: maxtopic = know maxteam = 1 elif know > 0 and know == maxtopic: maxteam += 1 return maxtopic, maxteam
def acm_team(topic): (maxtopic, maxteam) = (0, 1) for i in range(n): for j in range(i + 1, n): know = 0 for x in range(m): if topic[i][x] == '1' or topic[j][x] == '1': know += 1 if know > maxtopic: maxtopic = know maxteam = 1 elif know > 0 and know == maxtopic: maxteam += 1 return (maxtopic, maxteam)
# flake8: noqa # tor ref src\app\config\auth_dirs.inc AUTHORITY_DIRS = """ "moria1 orport=9101 " "v3ident=D586D18309DED4CD6D57C18FDB97EFA96D330566 " "128.31.0.39:9131 9695 DFC3 5FFE B861 329B 9F1A B04C 4639 7020 CE31", "tor26 orport=443 " "v3ident=14C131DFC5C6F93646BE72FA1401C02A8DF2E8B4 " "ipv6=[2001:858:2:2:aabb:0:563b:1526]:443 " "86.59.21.38:80 847B 1F85 0344 D787 6491 A548 92F9 0493 4E4E B85D", "dizum orport=443 " "v3ident=E8A9C45EDE6D711294FADF8E7951F4DE6CA56B58 " "45.66.33.45:80 7EA6 EAD6 FD83 083C 538F 4403 8BBF A077 587D D755", "Serge orport=9001 bridge " "66.111.2.131:9030 BA44 A889 E64B 93FA A2B1 14E0 2C2A 279A 8555 C533", "gabelmoo orport=443 " "v3ident=ED03BB616EB2F60BEC80151114BB25CEF515B226 " "ipv6=[2001:638:a000:4140::ffff:189]:443 " "131.188.40.189:80 F204 4413 DAC2 E02E 3D6B CF47 35A1 9BCA 1DE9 7281", "dannenberg orport=443 " "v3ident=0232AF901C31A04EE9848595AF9BB7620D4C5B2E " "ipv6=[2001:678:558:1000::244]:443 " "193.23.244.244:80 7BE6 83E6 5D48 1413 21C5 ED92 F075 C553 64AC 7123", "maatuska orport=80 " "v3ident=49015F787433103580E3B66A1707A00E60F2D15B " "ipv6=[2001:67c:289c::9]:80 " "171.25.193.9:443 BD6A 8292 55CB 08E6 6FBE 7D37 4836 3586 E46B 3810", "Faravahar orport=443 " "v3ident=EFCBE720AB3A82B99F9E953CD5BF50F7EEFC7B97 " "154.35.175.225:80 CF6D 0AAF B385 BE71 B8E1 11FC 5CFF 4B47 9237 33BC", "longclaw orport=443 " "v3ident=23D15D965BC35114467363C165C4F724B64B4F66 " "199.58.81.140:80 74A9 1064 6BCE EFBC D2E8 74FC 1DC9 9743 0F96 8145", "bastet orport=443 " "v3ident=27102BC123E7AF1D4741AE047E160C91ADC76B21 " "ipv6=[2620:13:4000:6000::1000:118]:443 " "204.13.164.118:80 24E2 F139 121D 4394 C54B 5BCC 368B 3B41 1857 C413", """ FALLBACK_DIRS = """ /* type=fallback */ /* version=3.0.0 */ /* timestamp=20200723133610 */ /* source=offer-list */ /* ===== */ /* Offer list excluded 1807 of 1978 candidates. */ /* Checked IPv4 DirPorts served a consensus within 15.0s. */ /* Final Count: 144 (Eligible 171, Target 447 (2239 * 0.20), Max 200) Excluded: 27 (Same Operator 15, Failed/Skipped Download 6, Excess 6) Bandwidth Range: 0.6 - 96.1 MByte/s */ /* Onionoo Source: details Date: 2020-07-23 13:00:00 Version: 8.0 URL: https:onionoo.torproject.orgdetails?fieldsfingerprint%2Cnickname%2Ccontact%2Clast_changed_address_or_port%2Cconsensus_weight%2Cadvertised_bandwidth%2Cor_addresses%2Cdir_address%2Crecommended_version%2Cflags%2Ceffective_family%2Cplatform&typerelay&first_seen_days90-&last_seen_days-0&flagV2Dir&order-consensus_weight%2Cfirst_seen */ /* Onionoo Source: uptime Date: 2020-07-23 13:00:00 Version: 8.0 URL: https:onionoo.torproject.orguptime?typerelay&first_seen_days90-&last_seen_days-0&flagV2Dir&order-consensus_weight%2Cfirst_seen */ /* ===== */ "185.225.17.3:80 orport=443 id=0338F9F55111FE8E3570E7DE117EF3AF999CC1D7" " ipv6=[2a0a:c800:1:5::3]:443" /* nickname=Nebuchadnezzar */ /* extrainfo=0 */ /* ===== */ , "81.7.10.193:9002 orport=993 id=03C3069E814E296EB18776EB61B1ECB754ED89FE" /* nickname=Ichotolot61 */ /* extrainfo=1 */ /* ===== */ , "163.172.149.155:80 orport=443 id=0B85617241252517E8ECF2CFC7F4C1A32DCD153F" /* nickname=niij02 */ /* extrainfo=0 */ /* ===== */ , "5.200.21.144:80 orport=443 id=0C039F35C2E40DCB71CD8A07E97C7FD7787D42D6" /* nickname=libel */ /* extrainfo=0 */ /* ===== */ , "81.7.18.7:9030 orport=9001 id=0C475BA4D3AA3C289B716F95954CAD616E50C4E5" /* nickname=Freebird32 */ /* extrainfo=1 */ /* ===== */ , "193.234.15.60:80 orport=443 id=0F6E5CA4BF5565D9AA9FDDCA165AFC6A5305763D" " ipv6=[2a00:1c20:4089:1234:67bc:79f3:61c0:6e49]:443" /* nickname=jaures3 */ /* extrainfo=0 */ /* ===== */ , "93.177.67.71:9030 orport=8080 id=113143469021882C3A4B82F084F8125B08EE471E" " ipv6=[2a03:4000:38:559::2]:8080" /* nickname=parasol */ /* extrainfo=0 */ /* ===== */ , "37.120.174.249:80 orport=443 id=11DF0017A43AF1F08825CD5D973297F81AB00FF3" " ipv6=[2a03:4000:6:724c:df98:15f9:b34d:443]:443" /* nickname=gGDHjdcC6zAlM8k08lX */ /* extrainfo=0 */ /* ===== */ , "193.11.114.43:9030 orport=9001 id=12AD30E5D25AA67F519780E2111E611A455FDC89" " ipv6=[2001:6b0:30:1000::99]:9050" /* nickname=mdfnet1 */ /* extrainfo=0 */ /* ===== */ , "37.157.195.87:8030 orport=443 id=12FD624EE73CEF37137C90D38B2406A66F68FAA2" /* nickname=thanatosCZ */ /* extrainfo=0 */ /* ===== */ , "193.234.15.61:80 orport=443 id=158581827034DEF1BAB1FC248D180165452E53D3" " ipv6=[2a00:1c20:4089:1234:2712:a3d0:666b:88a6]:443" /* nickname=bakunin3 */ /* extrainfo=0 */ /* ===== */ , "51.15.78.0:9030 orport=9001 id=15BE17C99FACE24470D40AF782D6A9C692AB36D6" " ipv6=[2001:bc8:1824:c4b::1]:9001" /* nickname=rofltor07 */ /* extrainfo=0 */ /* ===== */ , "204.11.50.131:9030 orport=9001 id=185F2A57B0C4620582602761097D17DB81654F70" /* nickname=BoingBoing */ /* extrainfo=0 */ /* ===== */ , "50.7.74.171:9030 orport=9001 id=1CD17CB202063C51C7DAD3BACEF87ECE81C2350F" " ipv6=[2001:49f0:d002:2::51]:443" /* nickname=theia1 */ /* extrainfo=0 */ /* ===== */ , "199.184.246.250:80 orport=443 id=1F6ABD086F40B890A33C93CC4606EE68B31C9556" " ipv6=[2620:124:1009:1::171]:443" /* nickname=dao */ /* extrainfo=0 */ /* ===== */ , "212.47.229.2:9030 orport=9001 id=20462CBA5DA4C2D963567D17D0B7249718114A68" " ipv6=[2001:bc8:47ac:23a::1]:9001" /* nickname=scaletor */ /* extrainfo=0 */ /* ===== */ , "77.247.181.164:80 orport=443 id=204DFD2A2C6A0DC1FA0EACB495218E0B661704FD" /* nickname=HaveHeart */ /* extrainfo=0 */ /* ===== */ , "163.172.176.167:80 orport=443 id=230A8B2A8BA861210D9B4BA97745AEC217A94207" /* nickname=niij01 */ /* extrainfo=0 */ /* ===== */ , "193.234.15.57:80 orport=443 id=24D0491A2ADAAB52C17625FBC926D84477AEA322" " ipv6=[2a00:1c20:4089:1234:7825:2c5d:1ecd:c66f]:443" /* nickname=bakunin */ /* extrainfo=0 */ /* ===== */ , "185.220.101.137:20137 orport=10137 id=28F4F392F8F19E3FBDE09616D9DB8143A1E2DDD3" " ipv6=[2a0b:f4c2:1::137]:10137" /* nickname=niftycottonmouse */ /* extrainfo=0 */ /* ===== */ , "138.201.250.33:9012 orport=9011 id=2BA2C8E96B2590E1072AECE2BDB5C48921BF8510" /* nickname=storm */ /* extrainfo=0 */ /* ===== */ , "5.181.50.99:80 orport=443 id=2BB85DC5BD3C6F0D81A4F2B5882176C6BF7ECF5A" " ipv6=[2a03:4000:3f:16c:3851:6bff:fe07:bd2]:443" /* nickname=AlanTuring */ /* extrainfo=0 */ /* ===== */ , "97.74.237.196:9030 orport=9001 id=2F0F32AB1E5B943CA7D062C03F18960C86E70D94" /* nickname=Minotaur */ /* extrainfo=0 */ /* ===== */ , "94.230.208.147:8080 orport=8443 id=311A4533F7A2415F42346A6C8FA77E6FD279594C" " ipv6=[2a02:418:6017::147]:8443" /* nickname=DigiGesTor3e2 */ /* extrainfo=0 */ /* ===== */ , "109.105.109.162:52860 orport=60784 id=32EE911D968BE3E016ECA572BB1ED0A9EE43FC2F" " ipv6=[2001:948:7:2::163]:5001" /* nickname=ndnr1 */ /* extrainfo=0 */ /* ===== */ , "185.100.84.212:80 orport=443 id=330CD3DB6AD266DC70CDB512B036957D03D9BC59" " ipv6=[2a06:1700:0:7::1]:443" /* nickname=TeamTardis */ /* extrainfo=0 */ /* ===== */ , "64.79.152.132:80 orport=443 id=375DCBB2DBD94E5263BC0C015F0C9E756669617E" /* nickname=ebola */ /* extrainfo=0 */ /* ===== */ , "198.50.191.95:80 orport=443 id=39F096961ED2576975C866D450373A9913AFDC92" /* nickname=shhovh */ /* extrainfo=0 */ /* ===== */ , "50.7.74.174:9030 orport=9001 id=3AFDAAD91A15B4C6A7686A53AA8627CA871FF491" " ipv6=[2001:49f0:d002:2::57]:443" /* nickname=theia7 */ /* extrainfo=0 */ /* ===== */ , "212.83.154.33:8888 orport=443 id=3C79699D4FBC37DE1A212D5033B56DAE079AC0EF" " ipv6=[2001:bc8:31d3:1dd::1]:443" /* nickname=bauruine203 */ /* extrainfo=0 */ /* ===== */ , "51.38.65.160:9030 orport=9001 id=3CB4193EF4E239FCEDC4DC43468E0B0D6B67ACC3" " ipv6=[2001:41d0:801:2000::f6e]:9001" /* nickname=rofltor10 */ /* extrainfo=0 */ /* ===== */ , "95.216.211.81:80 orport=443 id=3CCF9573F59137E52787D9C322AC19D2BD090B70" " ipv6=[2a01:4f9:c010:4dfa::1]:443" /* nickname=BurningMan */ /* extrainfo=0 */ /* ===== */ , "217.79.179.177:9030 orport=9001 id=3E53D3979DB07EFD736661C934A1DED14127B684" " ipv6=[2001:4ba0:fff9:131:6c4f::90d3]:9001" /* nickname=Unnamed */ /* extrainfo=0 */ /* ===== */ , "66.111.2.16:9030 orport=9001 id=3F092986E9B87D3FDA09B71FA3A602378285C77A" " ipv6=[2610:1c0:0:5::16]:9001" /* nickname=NYCBUG1 */ /* extrainfo=0 */ /* ===== */ , "185.100.85.101:9030 orport=9001 id=4061C553CA88021B8302F0814365070AAE617270" /* nickname=TorExitRomania */ /* extrainfo=0 */ /* ===== */ , "163.172.157.213:8080 orport=443 id=4623A9EC53BFD83155929E56D6F7B55B5E718C24" /* nickname=Cotopaxi */ /* extrainfo=0 */ /* ===== */ , "193.70.43.76:9030 orport=9001 id=484A10BA2B8D48A5F0216674C8DD50EF27BC32F3" /* nickname=Aerodynamik03 */ /* extrainfo=0 */ /* ===== */ , "109.70.100.4:80 orport=443 id=4BFC9C631A93FF4BA3AA84BC6931B4310C38A263" " ipv6=[2a03:e600:100::4]:443" /* nickname=karotte */ /* extrainfo=0 */ /* ===== */ , "81.7.13.84:80 orport=443 id=4EB55679FA91363B97372554F8DC7C63F4E5B101" " ipv6=[2a02:180:1:1::5b8f:538c]:443" /* nickname=torpidsDEisppro */ /* extrainfo=0 */ /* ===== */ , "108.53.208.157:80 orport=443 id=4F0DB7E687FC7C0AE55C8F243DA8B0EB27FBF1F2" /* nickname=Binnacle */ /* extrainfo=1 */ /* ===== */ , "5.9.158.75:9030 orport=9001 id=509EAB4C5D10C9A9A24B4EA0CE402C047A2D64E6" " ipv6=[2a01:4f8:190:514a::2]:9001" /* nickname=zwiebeltoralf2 */ /* extrainfo=1 */ /* ===== */ , "69.30.215.42:80 orport=443 id=510176C07005D47B23E6796F02C93241A29AA0E9" " ipv6=[2604:4300:a:2e:21b:21ff:fe11:392]:443" /* nickname=torpidsUSwholesale */ /* extrainfo=0 */ /* ===== */ , "176.223.141.106:80 orport=443 id=5262556D44A7F2434990FDE1AE7973C67DF49E58" /* nickname=Theoden */ /* extrainfo=0 */ /* ===== */ , "85.25.159.65:995 orport=80 id=52BFADA8BEAA01BA46C8F767F83C18E2FE50C1B9" /* nickname=BeastieJoy63 */ /* extrainfo=0 */ /* ===== */ , "193.234.15.59:80 orport=443 id=562434D987CF49D45649B76ADCA993BEA8F78471" " ipv6=[2a00:1c20:4089:1234:bff6:e1bb:1ce3:8dc6]:443" /* nickname=bakunin2 */ /* extrainfo=0 */ /* ===== */ , "89.234.157.254:80 orport=443 id=578E007E5E4535FBFEF7758D8587B07B4C8C5D06" " ipv6=[2001:67c:2608::1]:443" /* nickname=marylou1 */ /* extrainfo=0 */ /* ===== */ , "172.98.193.43:80 orport=443 id=5E56738E7F97AA81DEEF59AF28494293DFBFCCDF" /* nickname=Backplane */ /* extrainfo=0 */ /* ===== */ , "163.172.139.104:8080 orport=443 id=68F175CCABE727AA2D2309BCD8789499CEE36ED7" /* nickname=Pichincha */ /* extrainfo=0 */ /* ===== */ , "95.217.16.212:80 orport=443 id=6A7551EEE18F78A9813096E82BF84F740D32B911" " ipv6=[2a01:4f9:c010:609a::1]:443" /* nickname=TorMachine */ /* extrainfo=0 */ /* ===== */ , "78.156.110.135:9093 orport=9092 id=7262B9D2EDE0B6A266C4B43D6202209BF6BBA888" /* nickname=SkynetRenegade */ /* extrainfo=0 */ /* ===== */ , "85.235.250.88:80 orport=443 id=72B2B12A3F60408BDBC98C6DF53988D3A0B3F0EE" " ipv6=[2a01:3a0:1:1900:85:235:250:88]:443" /* nickname=TykRelay01 */ /* extrainfo=0 */ /* ===== */ , "178.17.170.23:9030 orport=9001 id=742C45F2D9004AADE0077E528A4418A6A81BC2BA" " ipv6=[2a00:1dc0:caff:7d::8254]:9001" /* nickname=TorExitMoldova2 */ /* extrainfo=0 */ /* ===== */ , "81.7.14.31:9001 orport=443 id=7600680249A22080ECC6173FBBF64D6FCF330A61" /* nickname=Ichotolot62 */ /* extrainfo=1 */ /* ===== */ , "62.171.144.155:80 orport=443 id=7614EF326635DA810638E2F5D449D10AE2BB7158" " ipv6=[2a02:c207:3004:8874::1]:443" /* nickname=Nicenstein */ /* extrainfo=0 */ /* ===== */ , "77.247.181.166:80 orport=443 id=77131D7E2EC1CA9B8D737502256DA9103599CE51" /* nickname=CriticalMass */ /* extrainfo=0 */ /* ===== */ , "5.196.23.64:9030 orport=9001 id=775B0FAFDE71AADC23FFC8782B7BEB1D5A92733E" /* nickname=Aerodynamik01 */ /* extrainfo=0 */ /* ===== */ , "185.244.193.141:9030 orport=9001 id=79509683AB4C8DDAF90A120C69A4179C6CD5A387" " ipv6=[2a03:4000:27:192:24:12:1984:4]:9001" /* nickname=DerDickeReloaded */ /* extrainfo=0 */ /* ===== */ , "82.223.21.74:9030 orport=9001 id=7A32C9519D80CA458FC8B034A28F5F6815649A98" " ipv6=[2001:ba0:1800:6c::1]:9001" /* nickname=silentrocket */ /* extrainfo=0 */ /* ===== */ , "51.254.136.195:80 orport=443 id=7BB70F8585DFC27E75D692970C0EEB0F22983A63" /* nickname=torproxy02 */ /* extrainfo=0 */ /* ===== */ , "77.247.181.162:80 orport=443 id=7BFB908A3AA5B491DA4CA72CCBEE0E1F2A939B55" /* nickname=sofia */ /* extrainfo=0 */ /* ===== */ , "193.11.114.45:9031 orport=9002 id=80AAF8D5956A43C197104CEF2550CD42D165C6FB" /* nickname=mdfnet2 */ /* extrainfo=0 */ /* ===== */ , "51.254.96.208:9030 orport=9001 id=8101421BEFCCF4C271D5483C5AABCAAD245BBB9D" " ipv6=[2001:41d0:401:3100::30dc]:9001" /* nickname=rofltor01 */ /* extrainfo=0 */ /* ===== */ , "152.89.106.147:9030 orport=9001 id=8111FEB45EF2950EB8F84BFD8FF070AB07AEE9DD" " ipv6=[2a03:4000:39:605:c4f2:c9ff:fe64:c215]:9001" /* nickname=TugaOnionMR3 */ /* extrainfo=0 */ /* ===== */ , "192.42.116.16:80 orport=443 id=81B75D534F91BFB7C57AB67DA10BCEF622582AE8" /* nickname=hviv104 */ /* extrainfo=0 */ /* ===== */ , "192.87.28.82:9030 orport=9001 id=844AE9CAD04325E955E2BE1521563B79FE7094B7" " ipv6=[2001:678:230:3028:192:87:28:82]:9001" /* nickname=Smeerboel */ /* extrainfo=0 */ /* ===== */ , "85.228.136.92:9030 orport=443 id=855BC2DABE24C861CD887DB9B2E950424B49FC34" /* nickname=Logforme */ /* extrainfo=0 */ /* ===== */ , "178.254.7.88:8080 orport=8443 id=85A885433E50B1874F11CEC9BE98451E24660976" /* nickname=wr3ck3d0ni0n01 */ /* extrainfo=0 */ /* ===== */ , "163.172.194.53:9030 orport=9001 id=8C00FA7369A7A308F6A137600F0FA07990D9D451" " ipv6=[2001:bc8:225f:142:6c69:7461:7669:73]:9001" /* nickname=GrmmlLitavis */ /* extrainfo=0 */ /* ===== */ , "188.138.102.98:465 orport=443 id=8CAA470B905758742203E3EB45941719FCA9FEEC" /* nickname=BeastieJoy64 */ /* extrainfo=0 */ /* ===== */ , "109.70.100.6:80 orport=443 id=8CF987FF43FB7F3D9AA4C4F3D96FFDF247A9A6C2" " ipv6=[2a03:e600:100::6]:443" /* nickname=zucchini */ /* extrainfo=0 */ /* ===== */ , "5.189.169.190:8030 orport=8080 id=8D79F73DCD91FC4F5017422FAC70074D6DB8DD81" /* nickname=thanatosDE */ /* extrainfo=0 */ /* ===== */ , "80.67.172.162:80 orport=443 id=8E6EDA78D8E3ABA88D877C3E37D6D4F0938C7B9F" " ipv6=[2001:910:1410:600::1]:443" /* nickname=AlGrothendieck */ /* extrainfo=0 */ /* ===== */ , "54.37.139.118:9030 orport=9001 id=90A5D1355C4B5840E950EB61E673863A6AE3ACA1" " ipv6=[2001:41d0:601:1100::1b8]:9001" /* nickname=rofltor09 */ /* extrainfo=0 */ /* ===== */ , "96.253.78.108:80 orport=443 id=924B24AFA7F075D059E8EEB284CC400B33D3D036" /* nickname=NSDFreedom */ /* extrainfo=0 */ /* ===== */ , "109.70.100.5:80 orport=443 id=9661AC95717798884F3E3727D360DD98D66727CC" " ipv6=[2a03:e600:100::5]:443" /* nickname=erdapfel */ /* extrainfo=0 */ /* ===== */ , "173.212.254.192:31336 orport=31337 id=99E246DB480B313A3012BC3363093CC26CD209C7" " ipv6=[2a02:c207:3002:3972::1]:31337" /* nickname=ViDiSrv */ /* extrainfo=0 */ /* ===== */ , "188.127.69.60:80 orport=443 id=9B2BC7EFD661072AFADC533BE8DCF1C19D8C2DCC" " ipv6=[2a02:29d0:8008:c0de:bad:beef::]:443" /* nickname=MIGHTYWANG */ /* extrainfo=0 */ /* ===== */ , "185.100.86.128:9030 orport=9001 id=9B31F1F1C1554F9FFB3455911F82E818EF7C7883" " ipv6=[2a06:1700:1::11]:9001" /* nickname=TorExitFinland */ /* extrainfo=0 */ /* ===== */ , "95.142.161.63:80 orport=443 id=9BA84E8C90083676F86C7427C8D105925F13716C" " ipv6=[2001:4b98:dc0:47:216:3eff:fe3d:888c]:443" /* nickname=ekumen */ /* extrainfo=0 */ /* ===== */ , "86.105.212.130:9030 orport=443 id=9C900A7F6F5DD034CFFD192DAEC9CCAA813DB022" /* nickname=firstor2 */ /* extrainfo=0 */ /* ===== */ , "46.28.110.244:80 orport=443 id=9F7D6E6420183C2B76D3CE99624EBC98A21A967E" /* nickname=Nivrim */ /* extrainfo=0 */ /* ===== */ , "46.165.230.5:80 orport=443 id=A0F06C2FADF88D3A39AA3072B406F09D7095AC9E" /* nickname=Dhalgren */ /* extrainfo=1 */ /* ===== */ , "193.234.15.55:80 orport=443 id=A1B28D636A56AAFFE92ADCCA937AA4BD5333BB4C" " ipv6=[2a00:1c20:4089:1234:7b2c:11c5:5221:903e]:443" /* nickname=bakunin4 */ /* extrainfo=0 */ /* ===== */ , "128.31.0.13:80 orport=443 id=A53C46F5B157DD83366D45A8E99A244934A14C46" /* nickname=csailmitexit */ /* extrainfo=0 */ /* ===== */ , "212.47.233.86:9130 orport=9101 id=A68097FE97D3065B1A6F4CE7187D753F8B8513F5" /* nickname=olabobamanmu */ /* extrainfo=0 */ /* ===== */ , "163.172.149.122:80 orport=443 id=A9406A006D6E7B5DA30F2C6D4E42A338B5E340B2" /* nickname=niij03 */ /* extrainfo=0 */ /* ===== */ , "176.10.107.180:9030 orport=9001 id=AC2BEDD0BAC72838EA7E6F113F856C4E8018ACDB" /* nickname=schokomilch */ /* extrainfo=0 */ /* ===== */ , "195.154.164.243:80 orport=443 id=AC66FFA4AB35A59EBBF5BF4C70008BF24D8A7A5C" " ipv6=[2001:bc8:399f:f000::1]:993" /* nickname=torpidsFRonline3 */ /* extrainfo=0 */ /* ===== */ , "185.129.62.62:9030 orport=9001 id=ACDD9E85A05B127BA010466C13C8C47212E8A38F" " ipv6=[2a06:d380:0:3700::62]:9001" /* nickname=kramse */ /* extrainfo=0 */ /* ===== */ , "188.40.128.246:9030 orport=9001 id=AD19490C7DBB26D3A68EFC824F67E69B0A96E601" " ipv6=[2a01:4f8:221:1ac1:dead:beef:7005:9001]:9001" /* nickname=sputnik */ /* extrainfo=0 */ /* ===== */ , "176.10.104.240:8080 orport=8443 id=AD86CD1A49573D52A7B6F4A35750F161AAD89C88" /* nickname=DigiGesTor1e2 */ /* extrainfo=0 */ /* ===== */ , "178.17.174.14:9030 orport=9001 id=B06F093A3D4DFAD3E923F4F28A74901BD4F74EB1" " ipv6=[2a00:1dc0:caff:8b::5b9a]:9001" /* nickname=TorExitMoldova */ /* extrainfo=0 */ /* ===== */ , "212.129.62.232:80 orport=443 id=B143D439B72D239A419F8DCE07B8A8EB1B486FA7" /* nickname=wardsback */ /* extrainfo=0 */ /* ===== */ , "109.70.100.2:80 orport=443 id=B27CF1DCEECD50F7992B07D720D7F6BF0EDF9D40" " ipv6=[2a03:e600:100::2]:443" /* nickname=radieschen */ /* extrainfo=0 */ /* ===== */ , "136.243.214.137:80 orport=443 id=B291D30517D23299AD7CEE3E60DFE60D0E3A4664" /* nickname=TorKIT */ /* extrainfo=0 */ /* ===== */ , "93.115.97.242:9030 orport=9001 id=B5212DB685A2A0FCFBAE425738E478D12361710D" /* nickname=firstor */ /* extrainfo=0 */ /* ===== */ , "193.11.114.46:9032 orport=9003 id=B83DC1558F0D34353BB992EF93AFEAFDB226A73E" /* nickname=mdfnet3 */ /* extrainfo=0 */ /* ===== */ , "85.248.227.164:444 orport=9002 id=B84F248233FEA90CAD439F292556A3139F6E1B82" " ipv6=[2a00:1298:8011:212::164]:9004" /* nickname=tollana */ /* extrainfo=0 */ /* ===== */ , "51.15.179.153:110 orport=995 id=BB60F5BA113A0B8B44B7B37DE3567FE561E92F78" " ipv6=[2001:bc8:3fec:500:7ea::]:995" /* nickname=Casper04 */ /* extrainfo=0 */ /* ===== */ , "198.96.155.3:8080 orport=5001 id=BCEDF6C193AA687AE471B8A22EBF6BC57C2D285E" /* nickname=gurgle */ /* extrainfo=0 */ /* ===== */ , "128.199.55.207:9030 orport=9001 id=BCEF908195805E03E92CCFE669C48738E556B9C5" " ipv6=[2a03:b0c0:2:d0::158:3001]:9001" /* nickname=EldritchReaper */ /* extrainfo=0 */ /* ===== */ , "213.141.138.174:9030 orport=9001 id=BD552C165E2ED2887D3F1CCE9CFF155DDA2D86E6" /* nickname=Schakalium */ /* extrainfo=0 */ /* ===== */ , "148.251.190.229:9030 orport=9010 id=BF0FB582E37F738CD33C3651125F2772705BB8E8" " ipv6=[2a01:4f8:211:c68::2]:9010" /* nickname=quadhead */ /* extrainfo=0 */ /* ===== */ , "212.47.233.250:9030 orport=9001 id=BF735F669481EE1CCC348F0731551C933D1E2278" " ipv6=[2001:bc8:4400:2b00::1c:629]:9001" /* nickname=freeway */ /* extrainfo=0 */ /* ===== */ , "132.248.241.5:9130 orport=9101 id=C0C4F339046EB824999F711D178472FDF53BE7F5" /* nickname=toritounam2 */ /* extrainfo=0 */ /* ===== */ , "109.70.100.3:80 orport=443 id=C282248597D1C8522A2A7525E61C8B77BBC37614" " ipv6=[2a03:e600:100::3]:443" /* nickname=erbse */ /* extrainfo=0 */ /* ===== */ , "50.7.74.170:9030 orport=9001 id=C36A434DB54C66E1A97A5653858CE36024352C4D" " ipv6=[2001:49f0:d002:2::59]:443" /* nickname=theia9 */ /* extrainfo=0 */ /* ===== */ , "188.138.112.60:1433 orport=1521 id=C414F28FD2BEC1553024299B31D4E726BEB8E788" /* nickname=zebra620 */ /* extrainfo=0 */ /* ===== */ , "178.20.55.18:80 orport=443 id=C656B41AEFB40A141967EBF49D6E69603C9B4A11" /* nickname=marcuse2 */ /* extrainfo=0 */ /* ===== */ , "85.248.227.163:443 orport=9001 id=C793AB88565DDD3C9E4C6F15CCB9D8C7EF964CE9" " ipv6=[2a00:1298:8011:212::163]:9003" /* nickname=ori */ /* extrainfo=0 */ /* ===== */ , "50.7.74.173:80 orport=443 id=C87A4D8B534F78FDF0F4639B55F121401FEF259C" " ipv6=[2001:49f0:d002:2::54]:443" /* nickname=theia4 */ /* extrainfo=0 */ /* ===== */ , "176.31.103.150:9030 orport=9001 id=CBD0D1BD110EC52963082D839AC6A89D0AE243E7" /* nickname=UV74S7mjxRcYVrGsAMw */ /* extrainfo=0 */ /* ===== */ , "193.234.15.62:80 orport=443 id=CD0F9AA1A5064430B1DE8E645CBA7A502B27ED5F" " ipv6=[2a00:1c20:4089:1234:a6a4:2926:d0af:dfee]:443" /* nickname=jaures4 */ /* extrainfo=0 */ /* ===== */ , "85.25.213.211:465 orport=80 id=CE47F0356D86CF0A1A2008D97623216D560FB0A8" /* nickname=BeastieJoy61 */ /* extrainfo=0 */ /* ===== */ , "50.7.74.172:80 orport=443 id=D1AFBF3117B308B6D1A7AA762B1315FD86A6B8AF" " ipv6=[2001:49f0:d002:2::52]:443" /* nickname=theia2 */ /* extrainfo=0 */ /* ===== */ , "66.111.2.20:9030 orport=9001 id=D317C7889162E9EC4A1DA1A1095C2A0F377536D9" " ipv6=[2610:1c0:0:5::20]:9001" /* nickname=NYCBUG0 */ /* extrainfo=0 */ /* ===== */ , "5.45.111.149:80 orport=443 id=D405FCCF06ADEDF898DF2F29C9348DCB623031BA" " ipv6=[2a03:4000:6:2388:df98:15f9:b34d:443]:443" /* nickname=gGDHjdcC6zAlM8k08lY */ /* extrainfo=0 */ /* ===== */ , "12.235.151.200:9030 orport=9029 id=D5C33F3E203728EDF8361EA868B2939CCC43FAFB" /* nickname=nx1tor */ /* extrainfo=0 */ /* ===== */ , "212.83.166.62:80 orport=443 id=D7082DB97E7F0481CBF4B88CA5F5683399E196A3" /* nickname=shhop */ /* extrainfo=0 */ /* ===== */ , "54.36.237.163:80 orport=443 id=DB2682153AC0CCAECD2BD1E9EBE99C6815807A1E" /* nickname=GermanCraft2 */ /* extrainfo=0 */ /* ===== */ , "171.25.193.20:80 orport=443 id=DD8BD7307017407FCC36F8D04A688F74A0774C02" " ipv6=[2001:67c:289c::20]:443" /* nickname=DFRI0 */ /* extrainfo=0 */ /* ===== */ , "83.212.99.68:80 orport=443 id=DDBB2A38252ADDA53E4492DDF982CA6CC6E10EC0" " ipv6=[2001:648:2ffc:1225:a800:bff:fe3d:67b5]:443" /* nickname=zouzounella */ /* extrainfo=0 */ /* ===== */ , "166.70.207.2:9130 orport=9101 id=E41B16F7DDF52EBB1DB4268AB2FE340B37AD8904" /* nickname=xmission1 */ /* extrainfo=0 */ /* ===== */ , "185.100.86.182:9030 orport=8080 id=E51620B90DCB310138ED89EDEDD0A5C361AAE24E" /* nickname=NormalCitizen */ /* extrainfo=0 */ /* ===== */ , "212.47.244.38:8080 orport=443 id=E81EF60A73B3809F8964F73766B01BAA0A171E20" /* nickname=Chimborazo */ /* extrainfo=0 */ /* ===== */ , "185.4.132.148:80 orport=443 id=E8D114B3C78D8E6E7FEB1004650DD632C2143C9E" " ipv6=[2a02:c500:2:f0::5492]:443" /* nickname=libreonion1 */ /* extrainfo=0 */ /* ===== */ , "195.154.105.170:9030 orport=9001 id=E947C029087FA1C3499BEF5D4372947C51223D8F" /* nickname=dgplug */ /* extrainfo=0 */ /* ===== */ , "131.188.40.188:1443 orport=11180 id=EBE718E1A49EE229071702964F8DB1F318075FF8" " ipv6=[2001:638:a000:4140::ffff:188]:11180" /* nickname=fluxe4 */ /* extrainfo=1 */ /* ===== */ , "192.87.28.28:9030 orport=9001 id=ED2338CAC2711B3E331392E1ED2831219B794024" " ipv6=[2001:678:230:3028:192:87:28:28]:9001" /* nickname=SEC6xFreeBSD64 */ /* extrainfo=0 */ /* ===== */ , "178.20.55.16:80 orport=443 id=EFAE44728264982224445E96214C15F9075DEE1D" /* nickname=marcuse1 */ /* extrainfo=0 */ /* ===== */ , "217.182.75.181:9030 orport=9001 id=EFEACD781604EB80FBC025EDEDEA2D523AEAAA2F" /* nickname=Aerodynamik02 */ /* extrainfo=0 */ /* ===== */ , "193.234.15.58:80 orport=443 id=F24F8BEA2779A79111F33F6832B062BED306B9CB" " ipv6=[2a00:1c20:4089:1234:cdae:1b3e:cc38:3d45]:443" /* nickname=jaures2 */ /* extrainfo=0 */ /* ===== */ , "129.13.131.140:80 orport=443 id=F2DFE5FA1E4CF54F8E761A6D304B9B4EC69BDAE8" " ipv6=[2a00:1398:5:f604:cafe:cafe:cafe:9001]:443" /* nickname=AlleKochenKaffee */ /* extrainfo=0 */ /* ===== */ , "37.187.102.108:80 orport=443 id=F4263275CF54A6836EE7BD527B1328836A6F06E1" " ipv6=[2001:41d0:a:266c::1]:443" /* nickname=EvilMoe */ /* extrainfo=0 */ /* ===== */ , "5.199.142.236:9030 orport=9001 id=F4C0EDAA0BF0F7EC138746F8FEF1CE26C7860265" /* nickname=tornodenumber9004 */ /* extrainfo=0 */ /* ===== */ , "163.172.154.162:9030 orport=9001 id=F741E5124CB12700DA946B78C9B2DD175D6CD2A1" " ipv6=[2001:bc8:47a0:162a::1]:9001" /* nickname=rofltor06 */ /* extrainfo=0 */ /* ===== */ , "78.47.18.110:443 orport=80 id=F8D27B163B9247B232A2EEE68DD8B698695C28DE" " ipv6=[2a01:4f8:120:4023::110]:80" /* nickname=fluxe3 */ /* extrainfo=1 */ /* ===== */ , "91.143.88.62:80 orport=443 id=F9246DEF2B653807236DA134F2AEAB103D58ABFE" /* nickname=Freebird31 */ /* extrainfo=1 */ /* ===== */ , "149.56.45.200:9030 orport=9001 id=FE296180018833AF03A8EACD5894A614623D3F76" " ipv6=[2607:5300:201:3000::17d3]:9002" /* nickname=PyotrTorpotkinOne */ /* extrainfo=0 */ /* ===== */ , "62.141.38.69:80 orport=443 id=FF9FC6D130FA26AE3AE8B23688691DC419F0F22E" " ipv6=[2001:4ba0:cafe:ac5::]:443" /* nickname=rinderwahnRelay3L */ /* extrainfo=0 */ /* ===== */ , "193.11.164.243:9030 orport=9001 id=FFA72BD683BC2FCF988356E6BEC1E490F313FB07" " ipv6=[2001:6b0:7:125::243]:9001" /* nickname=Lule */ /* extrainfo=0 */ /* ===== */ , """
authority_dirs = '\n"moria1 orport=9101 "\n "v3ident=D586D18309DED4CD6D57C18FDB97EFA96D330566 "\n "128.31.0.39:9131 9695 DFC3 5FFE B861 329B 9F1A B04C 4639 7020 CE31",\n"tor26 orport=443 "\n "v3ident=14C131DFC5C6F93646BE72FA1401C02A8DF2E8B4 "\n "ipv6=[2001:858:2:2:aabb:0:563b:1526]:443 "\n "86.59.21.38:80 847B 1F85 0344 D787 6491 A548 92F9 0493 4E4E B85D",\n"dizum orport=443 "\n "v3ident=E8A9C45EDE6D711294FADF8E7951F4DE6CA56B58 "\n "45.66.33.45:80 7EA6 EAD6 FD83 083C 538F 4403 8BBF A077 587D D755",\n"Serge orport=9001 bridge "\n "66.111.2.131:9030 BA44 A889 E64B 93FA A2B1 14E0 2C2A 279A 8555 C533",\n"gabelmoo orport=443 "\n "v3ident=ED03BB616EB2F60BEC80151114BB25CEF515B226 "\n "ipv6=[2001:638:a000:4140::ffff:189]:443 "\n "131.188.40.189:80 F204 4413 DAC2 E02E 3D6B CF47 35A1 9BCA 1DE9 7281",\n"dannenberg orport=443 "\n "v3ident=0232AF901C31A04EE9848595AF9BB7620D4C5B2E "\n "ipv6=[2001:678:558:1000::244]:443 "\n "193.23.244.244:80 7BE6 83E6 5D48 1413 21C5 ED92 F075 C553 64AC 7123",\n"maatuska orport=80 "\n "v3ident=49015F787433103580E3B66A1707A00E60F2D15B "\n "ipv6=[2001:67c:289c::9]:80 "\n "171.25.193.9:443 BD6A 8292 55CB 08E6 6FBE 7D37 4836 3586 E46B 3810",\n"Faravahar orport=443 "\n "v3ident=EFCBE720AB3A82B99F9E953CD5BF50F7EEFC7B97 "\n "154.35.175.225:80 CF6D 0AAF B385 BE71 B8E1 11FC 5CFF 4B47 9237 33BC",\n"longclaw orport=443 "\n "v3ident=23D15D965BC35114467363C165C4F724B64B4F66 "\n "199.58.81.140:80 74A9 1064 6BCE EFBC D2E8 74FC 1DC9 9743 0F96 8145",\n"bastet orport=443 "\n "v3ident=27102BC123E7AF1D4741AE047E160C91ADC76B21 "\n "ipv6=[2620:13:4000:6000::1000:118]:443 "\n "204.13.164.118:80 24E2 F139 121D 4394 C54B 5BCC 368B 3B41 1857 C413",\n' fallback_dirs = '\n/* type=fallback */\n/* version=3.0.0 */\n/* timestamp=20200723133610 */\n/* source=offer-list */\n/* ===== */\n/* Offer list excluded 1807 of 1978 candidates. */\n/* Checked IPv4 DirPorts served a consensus within 15.0s. */\n/*\nFinal Count: 144 (Eligible 171, Target 447 (2239 * 0.20), Max 200)\nExcluded: 27 (Same Operator 15, Failed/Skipped Download 6, Excess 6)\nBandwidth Range: 0.6 - 96.1 MByte/s\n*/\n/*\nOnionoo Source: details Date: 2020-07-23 13:00:00 Version: 8.0\nURL: https:onionoo.torproject.orgdetails?fieldsfingerprint%2Cnickname%2Ccontact%2Clast_changed_address_or_port%2Cconsensus_weight%2Cadvertised_bandwidth%2Cor_addresses%2Cdir_address%2Crecommended_version%2Cflags%2Ceffective_family%2Cplatform&typerelay&first_seen_days90-&last_seen_days-0&flagV2Dir&order-consensus_weight%2Cfirst_seen\n*/\n/*\nOnionoo Source: uptime Date: 2020-07-23 13:00:00 Version: 8.0\nURL: https:onionoo.torproject.orguptime?typerelay&first_seen_days90-&last_seen_days-0&flagV2Dir&order-consensus_weight%2Cfirst_seen\n*/\n/* ===== */\n"185.225.17.3:80 orport=443 id=0338F9F55111FE8E3570E7DE117EF3AF999CC1D7"\n" ipv6=[2a0a:c800:1:5::3]:443"\n/* nickname=Nebuchadnezzar */\n/* extrainfo=0 */\n/* ===== */\n,\n"81.7.10.193:9002 orport=993 id=03C3069E814E296EB18776EB61B1ECB754ED89FE"\n/* nickname=Ichotolot61 */\n/* extrainfo=1 */\n/* ===== */\n,\n"163.172.149.155:80 orport=443 id=0B85617241252517E8ECF2CFC7F4C1A32DCD153F"\n/* nickname=niij02 */\n/* extrainfo=0 */\n/* ===== */\n,\n"5.200.21.144:80 orport=443 id=0C039F35C2E40DCB71CD8A07E97C7FD7787D42D6"\n/* nickname=libel */\n/* extrainfo=0 */\n/* ===== */\n,\n"81.7.18.7:9030 orport=9001 id=0C475BA4D3AA3C289B716F95954CAD616E50C4E5"\n/* nickname=Freebird32 */\n/* extrainfo=1 */\n/* ===== */\n,\n"193.234.15.60:80 orport=443 id=0F6E5CA4BF5565D9AA9FDDCA165AFC6A5305763D"\n" ipv6=[2a00:1c20:4089:1234:67bc:79f3:61c0:6e49]:443"\n/* nickname=jaures3 */\n/* extrainfo=0 */\n/* ===== */\n,\n"93.177.67.71:9030 orport=8080 id=113143469021882C3A4B82F084F8125B08EE471E"\n" ipv6=[2a03:4000:38:559::2]:8080"\n/* nickname=parasol */\n/* extrainfo=0 */\n/* ===== */\n,\n"37.120.174.249:80 orport=443 id=11DF0017A43AF1F08825CD5D973297F81AB00FF3"\n" ipv6=[2a03:4000:6:724c:df98:15f9:b34d:443]:443"\n/* nickname=gGDHjdcC6zAlM8k08lX */\n/* extrainfo=0 */\n/* ===== */\n,\n"193.11.114.43:9030 orport=9001 id=12AD30E5D25AA67F519780E2111E611A455FDC89"\n" ipv6=[2001:6b0:30:1000::99]:9050"\n/* nickname=mdfnet1 */\n/* extrainfo=0 */\n/* ===== */\n,\n"37.157.195.87:8030 orport=443 id=12FD624EE73CEF37137C90D38B2406A66F68FAA2"\n/* nickname=thanatosCZ */\n/* extrainfo=0 */\n/* ===== */\n,\n"193.234.15.61:80 orport=443 id=158581827034DEF1BAB1FC248D180165452E53D3"\n" ipv6=[2a00:1c20:4089:1234:2712:a3d0:666b:88a6]:443"\n/* nickname=bakunin3 */\n/* extrainfo=0 */\n/* ===== */\n,\n"51.15.78.0:9030 orport=9001 id=15BE17C99FACE24470D40AF782D6A9C692AB36D6"\n" ipv6=[2001:bc8:1824:c4b::1]:9001"\n/* nickname=rofltor07 */\n/* extrainfo=0 */\n/* ===== */\n,\n"204.11.50.131:9030 orport=9001 id=185F2A57B0C4620582602761097D17DB81654F70"\n/* nickname=BoingBoing */\n/* extrainfo=0 */\n/* ===== */\n,\n"50.7.74.171:9030 orport=9001 id=1CD17CB202063C51C7DAD3BACEF87ECE81C2350F"\n" ipv6=[2001:49f0:d002:2::51]:443"\n/* nickname=theia1 */\n/* extrainfo=0 */\n/* ===== */\n,\n"199.184.246.250:80 orport=443 id=1F6ABD086F40B890A33C93CC4606EE68B31C9556"\n" ipv6=[2620:124:1009:1::171]:443"\n/* nickname=dao */\n/* extrainfo=0 */\n/* ===== */\n,\n"212.47.229.2:9030 orport=9001 id=20462CBA5DA4C2D963567D17D0B7249718114A68"\n" ipv6=[2001:bc8:47ac:23a::1]:9001"\n/* nickname=scaletor */\n/* extrainfo=0 */\n/* ===== */\n,\n"77.247.181.164:80 orport=443 id=204DFD2A2C6A0DC1FA0EACB495218E0B661704FD"\n/* nickname=HaveHeart */\n/* extrainfo=0 */\n/* ===== */\n,\n"163.172.176.167:80 orport=443 id=230A8B2A8BA861210D9B4BA97745AEC217A94207"\n/* nickname=niij01 */\n/* extrainfo=0 */\n/* ===== */\n,\n"193.234.15.57:80 orport=443 id=24D0491A2ADAAB52C17625FBC926D84477AEA322"\n" ipv6=[2a00:1c20:4089:1234:7825:2c5d:1ecd:c66f]:443"\n/* nickname=bakunin */\n/* extrainfo=0 */\n/* ===== */\n,\n"185.220.101.137:20137 orport=10137 id=28F4F392F8F19E3FBDE09616D9DB8143A1E2DDD3"\n" ipv6=[2a0b:f4c2:1::137]:10137"\n/* nickname=niftycottonmouse */\n/* extrainfo=0 */\n/* ===== */\n,\n"138.201.250.33:9012 orport=9011 id=2BA2C8E96B2590E1072AECE2BDB5C48921BF8510"\n/* nickname=storm */\n/* extrainfo=0 */\n/* ===== */\n,\n"5.181.50.99:80 orport=443 id=2BB85DC5BD3C6F0D81A4F2B5882176C6BF7ECF5A"\n" ipv6=[2a03:4000:3f:16c:3851:6bff:fe07:bd2]:443"\n/* nickname=AlanTuring */\n/* extrainfo=0 */\n/* ===== */\n,\n"97.74.237.196:9030 orport=9001 id=2F0F32AB1E5B943CA7D062C03F18960C86E70D94"\n/* nickname=Minotaur */\n/* extrainfo=0 */\n/* ===== */\n,\n"94.230.208.147:8080 orport=8443 id=311A4533F7A2415F42346A6C8FA77E6FD279594C"\n" ipv6=[2a02:418:6017::147]:8443"\n/* nickname=DigiGesTor3e2 */\n/* extrainfo=0 */\n/* ===== */\n,\n"109.105.109.162:52860 orport=60784 id=32EE911D968BE3E016ECA572BB1ED0A9EE43FC2F"\n" ipv6=[2001:948:7:2::163]:5001"\n/* nickname=ndnr1 */\n/* extrainfo=0 */\n/* ===== */\n,\n"185.100.84.212:80 orport=443 id=330CD3DB6AD266DC70CDB512B036957D03D9BC59"\n" ipv6=[2a06:1700:0:7::1]:443"\n/* nickname=TeamTardis */\n/* extrainfo=0 */\n/* ===== */\n,\n"64.79.152.132:80 orport=443 id=375DCBB2DBD94E5263BC0C015F0C9E756669617E"\n/* nickname=ebola */\n/* extrainfo=0 */\n/* ===== */\n,\n"198.50.191.95:80 orport=443 id=39F096961ED2576975C866D450373A9913AFDC92"\n/* nickname=shhovh */\n/* extrainfo=0 */\n/* ===== */\n,\n"50.7.74.174:9030 orport=9001 id=3AFDAAD91A15B4C6A7686A53AA8627CA871FF491"\n" ipv6=[2001:49f0:d002:2::57]:443"\n/* nickname=theia7 */\n/* extrainfo=0 */\n/* ===== */\n,\n"212.83.154.33:8888 orport=443 id=3C79699D4FBC37DE1A212D5033B56DAE079AC0EF"\n" ipv6=[2001:bc8:31d3:1dd::1]:443"\n/* nickname=bauruine203 */\n/* extrainfo=0 */\n/* ===== */\n,\n"51.38.65.160:9030 orport=9001 id=3CB4193EF4E239FCEDC4DC43468E0B0D6B67ACC3"\n" ipv6=[2001:41d0:801:2000::f6e]:9001"\n/* nickname=rofltor10 */\n/* extrainfo=0 */\n/* ===== */\n,\n"95.216.211.81:80 orport=443 id=3CCF9573F59137E52787D9C322AC19D2BD090B70"\n" ipv6=[2a01:4f9:c010:4dfa::1]:443"\n/* nickname=BurningMan */\n/* extrainfo=0 */\n/* ===== */\n,\n"217.79.179.177:9030 orport=9001 id=3E53D3979DB07EFD736661C934A1DED14127B684"\n" ipv6=[2001:4ba0:fff9:131:6c4f::90d3]:9001"\n/* nickname=Unnamed */\n/* extrainfo=0 */\n/* ===== */\n,\n"66.111.2.16:9030 orport=9001 id=3F092986E9B87D3FDA09B71FA3A602378285C77A"\n" ipv6=[2610:1c0:0:5::16]:9001"\n/* nickname=NYCBUG1 */\n/* extrainfo=0 */\n/* ===== */\n,\n"185.100.85.101:9030 orport=9001 id=4061C553CA88021B8302F0814365070AAE617270"\n/* nickname=TorExitRomania */\n/* extrainfo=0 */\n/* ===== */\n,\n"163.172.157.213:8080 orport=443 id=4623A9EC53BFD83155929E56D6F7B55B5E718C24"\n/* nickname=Cotopaxi */\n/* extrainfo=0 */\n/* ===== */\n,\n"193.70.43.76:9030 orport=9001 id=484A10BA2B8D48A5F0216674C8DD50EF27BC32F3"\n/* nickname=Aerodynamik03 */\n/* extrainfo=0 */\n/* ===== */\n,\n"109.70.100.4:80 orport=443 id=4BFC9C631A93FF4BA3AA84BC6931B4310C38A263"\n" ipv6=[2a03:e600:100::4]:443"\n/* nickname=karotte */\n/* extrainfo=0 */\n/* ===== */\n,\n"81.7.13.84:80 orport=443 id=4EB55679FA91363B97372554F8DC7C63F4E5B101"\n" ipv6=[2a02:180:1:1::5b8f:538c]:443"\n/* nickname=torpidsDEisppro */\n/* extrainfo=0 */\n/* ===== */\n,\n"108.53.208.157:80 orport=443 id=4F0DB7E687FC7C0AE55C8F243DA8B0EB27FBF1F2"\n/* nickname=Binnacle */\n/* extrainfo=1 */\n/* ===== */\n,\n"5.9.158.75:9030 orport=9001 id=509EAB4C5D10C9A9A24B4EA0CE402C047A2D64E6"\n" ipv6=[2a01:4f8:190:514a::2]:9001"\n/* nickname=zwiebeltoralf2 */\n/* extrainfo=1 */\n/* ===== */\n,\n"69.30.215.42:80 orport=443 id=510176C07005D47B23E6796F02C93241A29AA0E9"\n" ipv6=[2604:4300:a:2e:21b:21ff:fe11:392]:443"\n/* nickname=torpidsUSwholesale */\n/* extrainfo=0 */\n/* ===== */\n,\n"176.223.141.106:80 orport=443 id=5262556D44A7F2434990FDE1AE7973C67DF49E58"\n/* nickname=Theoden */\n/* extrainfo=0 */\n/* ===== */\n,\n"85.25.159.65:995 orport=80 id=52BFADA8BEAA01BA46C8F767F83C18E2FE50C1B9"\n/* nickname=BeastieJoy63 */\n/* extrainfo=0 */\n/* ===== */\n,\n"193.234.15.59:80 orport=443 id=562434D987CF49D45649B76ADCA993BEA8F78471"\n" ipv6=[2a00:1c20:4089:1234:bff6:e1bb:1ce3:8dc6]:443"\n/* nickname=bakunin2 */\n/* extrainfo=0 */\n/* ===== */\n,\n"89.234.157.254:80 orport=443 id=578E007E5E4535FBFEF7758D8587B07B4C8C5D06"\n" ipv6=[2001:67c:2608::1]:443"\n/* nickname=marylou1 */\n/* extrainfo=0 */\n/* ===== */\n,\n"172.98.193.43:80 orport=443 id=5E56738E7F97AA81DEEF59AF28494293DFBFCCDF"\n/* nickname=Backplane */\n/* extrainfo=0 */\n/* ===== */\n,\n"163.172.139.104:8080 orport=443 id=68F175CCABE727AA2D2309BCD8789499CEE36ED7"\n/* nickname=Pichincha */\n/* extrainfo=0 */\n/* ===== */\n,\n"95.217.16.212:80 orport=443 id=6A7551EEE18F78A9813096E82BF84F740D32B911"\n" ipv6=[2a01:4f9:c010:609a::1]:443"\n/* nickname=TorMachine */\n/* extrainfo=0 */\n/* ===== */\n,\n"78.156.110.135:9093 orport=9092 id=7262B9D2EDE0B6A266C4B43D6202209BF6BBA888"\n/* nickname=SkynetRenegade */\n/* extrainfo=0 */\n/* ===== */\n,\n"85.235.250.88:80 orport=443 id=72B2B12A3F60408BDBC98C6DF53988D3A0B3F0EE"\n" ipv6=[2a01:3a0:1:1900:85:235:250:88]:443"\n/* nickname=TykRelay01 */\n/* extrainfo=0 */\n/* ===== */\n,\n"178.17.170.23:9030 orport=9001 id=742C45F2D9004AADE0077E528A4418A6A81BC2BA"\n" ipv6=[2a00:1dc0:caff:7d::8254]:9001"\n/* nickname=TorExitMoldova2 */\n/* extrainfo=0 */\n/* ===== */\n,\n"81.7.14.31:9001 orport=443 id=7600680249A22080ECC6173FBBF64D6FCF330A61"\n/* nickname=Ichotolot62 */\n/* extrainfo=1 */\n/* ===== */\n,\n"62.171.144.155:80 orport=443 id=7614EF326635DA810638E2F5D449D10AE2BB7158"\n" ipv6=[2a02:c207:3004:8874::1]:443"\n/* nickname=Nicenstein */\n/* extrainfo=0 */\n/* ===== */\n,\n"77.247.181.166:80 orport=443 id=77131D7E2EC1CA9B8D737502256DA9103599CE51"\n/* nickname=CriticalMass */\n/* extrainfo=0 */\n/* ===== */\n,\n"5.196.23.64:9030 orport=9001 id=775B0FAFDE71AADC23FFC8782B7BEB1D5A92733E"\n/* nickname=Aerodynamik01 */\n/* extrainfo=0 */\n/* ===== */\n,\n"185.244.193.141:9030 orport=9001 id=79509683AB4C8DDAF90A120C69A4179C6CD5A387"\n" ipv6=[2a03:4000:27:192:24:12:1984:4]:9001"\n/* nickname=DerDickeReloaded */\n/* extrainfo=0 */\n/* ===== */\n,\n"82.223.21.74:9030 orport=9001 id=7A32C9519D80CA458FC8B034A28F5F6815649A98"\n" ipv6=[2001:ba0:1800:6c::1]:9001"\n/* nickname=silentrocket */\n/* extrainfo=0 */\n/* ===== */\n,\n"51.254.136.195:80 orport=443 id=7BB70F8585DFC27E75D692970C0EEB0F22983A63"\n/* nickname=torproxy02 */\n/* extrainfo=0 */\n/* ===== */\n,\n"77.247.181.162:80 orport=443 id=7BFB908A3AA5B491DA4CA72CCBEE0E1F2A939B55"\n/* nickname=sofia */\n/* extrainfo=0 */\n/* ===== */\n,\n"193.11.114.45:9031 orport=9002 id=80AAF8D5956A43C197104CEF2550CD42D165C6FB"\n/* nickname=mdfnet2 */\n/* extrainfo=0 */\n/* ===== */\n,\n"51.254.96.208:9030 orport=9001 id=8101421BEFCCF4C271D5483C5AABCAAD245BBB9D"\n" ipv6=[2001:41d0:401:3100::30dc]:9001"\n/* nickname=rofltor01 */\n/* extrainfo=0 */\n/* ===== */\n,\n"152.89.106.147:9030 orport=9001 id=8111FEB45EF2950EB8F84BFD8FF070AB07AEE9DD"\n" ipv6=[2a03:4000:39:605:c4f2:c9ff:fe64:c215]:9001"\n/* nickname=TugaOnionMR3 */\n/* extrainfo=0 */\n/* ===== */\n,\n"192.42.116.16:80 orport=443 id=81B75D534F91BFB7C57AB67DA10BCEF622582AE8"\n/* nickname=hviv104 */\n/* extrainfo=0 */\n/* ===== */\n,\n"192.87.28.82:9030 orport=9001 id=844AE9CAD04325E955E2BE1521563B79FE7094B7"\n" ipv6=[2001:678:230:3028:192:87:28:82]:9001"\n/* nickname=Smeerboel */\n/* extrainfo=0 */\n/* ===== */\n,\n"85.228.136.92:9030 orport=443 id=855BC2DABE24C861CD887DB9B2E950424B49FC34"\n/* nickname=Logforme */\n/* extrainfo=0 */\n/* ===== */\n,\n"178.254.7.88:8080 orport=8443 id=85A885433E50B1874F11CEC9BE98451E24660976"\n/* nickname=wr3ck3d0ni0n01 */\n/* extrainfo=0 */\n/* ===== */\n,\n"163.172.194.53:9030 orport=9001 id=8C00FA7369A7A308F6A137600F0FA07990D9D451"\n" ipv6=[2001:bc8:225f:142:6c69:7461:7669:73]:9001"\n/* nickname=GrmmlLitavis */\n/* extrainfo=0 */\n/* ===== */\n,\n"188.138.102.98:465 orport=443 id=8CAA470B905758742203E3EB45941719FCA9FEEC"\n/* nickname=BeastieJoy64 */\n/* extrainfo=0 */\n/* ===== */\n,\n"109.70.100.6:80 orport=443 id=8CF987FF43FB7F3D9AA4C4F3D96FFDF247A9A6C2"\n" ipv6=[2a03:e600:100::6]:443"\n/* nickname=zucchini */\n/* extrainfo=0 */\n/* ===== */\n,\n"5.189.169.190:8030 orport=8080 id=8D79F73DCD91FC4F5017422FAC70074D6DB8DD81"\n/* nickname=thanatosDE */\n/* extrainfo=0 */\n/* ===== */\n,\n"80.67.172.162:80 orport=443 id=8E6EDA78D8E3ABA88D877C3E37D6D4F0938C7B9F"\n" ipv6=[2001:910:1410:600::1]:443"\n/* nickname=AlGrothendieck */\n/* extrainfo=0 */\n/* ===== */\n,\n"54.37.139.118:9030 orport=9001 id=90A5D1355C4B5840E950EB61E673863A6AE3ACA1"\n" ipv6=[2001:41d0:601:1100::1b8]:9001"\n/* nickname=rofltor09 */\n/* extrainfo=0 */\n/* ===== */\n,\n"96.253.78.108:80 orport=443 id=924B24AFA7F075D059E8EEB284CC400B33D3D036"\n/* nickname=NSDFreedom */\n/* extrainfo=0 */\n/* ===== */\n,\n"109.70.100.5:80 orport=443 id=9661AC95717798884F3E3727D360DD98D66727CC"\n" ipv6=[2a03:e600:100::5]:443"\n/* nickname=erdapfel */\n/* extrainfo=0 */\n/* ===== */\n,\n"173.212.254.192:31336 orport=31337 id=99E246DB480B313A3012BC3363093CC26CD209C7"\n" ipv6=[2a02:c207:3002:3972::1]:31337"\n/* nickname=ViDiSrv */\n/* extrainfo=0 */\n/* ===== */\n,\n"188.127.69.60:80 orport=443 id=9B2BC7EFD661072AFADC533BE8DCF1C19D8C2DCC"\n" ipv6=[2a02:29d0:8008:c0de:bad:beef::]:443"\n/* nickname=MIGHTYWANG */\n/* extrainfo=0 */\n/* ===== */\n,\n"185.100.86.128:9030 orport=9001 id=9B31F1F1C1554F9FFB3455911F82E818EF7C7883"\n" ipv6=[2a06:1700:1::11]:9001"\n/* nickname=TorExitFinland */\n/* extrainfo=0 */\n/* ===== */\n,\n"95.142.161.63:80 orport=443 id=9BA84E8C90083676F86C7427C8D105925F13716C"\n" ipv6=[2001:4b98:dc0:47:216:3eff:fe3d:888c]:443"\n/* nickname=ekumen */\n/* extrainfo=0 */\n/* ===== */\n,\n"86.105.212.130:9030 orport=443 id=9C900A7F6F5DD034CFFD192DAEC9CCAA813DB022"\n/* nickname=firstor2 */\n/* extrainfo=0 */\n/* ===== */\n,\n"46.28.110.244:80 orport=443 id=9F7D6E6420183C2B76D3CE99624EBC98A21A967E"\n/* nickname=Nivrim */\n/* extrainfo=0 */\n/* ===== */\n,\n"46.165.230.5:80 orport=443 id=A0F06C2FADF88D3A39AA3072B406F09D7095AC9E"\n/* nickname=Dhalgren */\n/* extrainfo=1 */\n/* ===== */\n,\n"193.234.15.55:80 orport=443 id=A1B28D636A56AAFFE92ADCCA937AA4BD5333BB4C"\n" ipv6=[2a00:1c20:4089:1234:7b2c:11c5:5221:903e]:443"\n/* nickname=bakunin4 */\n/* extrainfo=0 */\n/* ===== */\n,\n"128.31.0.13:80 orport=443 id=A53C46F5B157DD83366D45A8E99A244934A14C46"\n/* nickname=csailmitexit */\n/* extrainfo=0 */\n/* ===== */\n,\n"212.47.233.86:9130 orport=9101 id=A68097FE97D3065B1A6F4CE7187D753F8B8513F5"\n/* nickname=olabobamanmu */\n/* extrainfo=0 */\n/* ===== */\n,\n"163.172.149.122:80 orport=443 id=A9406A006D6E7B5DA30F2C6D4E42A338B5E340B2"\n/* nickname=niij03 */\n/* extrainfo=0 */\n/* ===== */\n,\n"176.10.107.180:9030 orport=9001 id=AC2BEDD0BAC72838EA7E6F113F856C4E8018ACDB"\n/* nickname=schokomilch */\n/* extrainfo=0 */\n/* ===== */\n,\n"195.154.164.243:80 orport=443 id=AC66FFA4AB35A59EBBF5BF4C70008BF24D8A7A5C"\n" ipv6=[2001:bc8:399f:f000::1]:993"\n/* nickname=torpidsFRonline3 */\n/* extrainfo=0 */\n/* ===== */\n,\n"185.129.62.62:9030 orport=9001 id=ACDD9E85A05B127BA010466C13C8C47212E8A38F"\n" ipv6=[2a06:d380:0:3700::62]:9001"\n/* nickname=kramse */\n/* extrainfo=0 */\n/* ===== */\n,\n"188.40.128.246:9030 orport=9001 id=AD19490C7DBB26D3A68EFC824F67E69B0A96E601"\n" ipv6=[2a01:4f8:221:1ac1:dead:beef:7005:9001]:9001"\n/* nickname=sputnik */\n/* extrainfo=0 */\n/* ===== */\n,\n"176.10.104.240:8080 orport=8443 id=AD86CD1A49573D52A7B6F4A35750F161AAD89C88"\n/* nickname=DigiGesTor1e2 */\n/* extrainfo=0 */\n/* ===== */\n,\n"178.17.174.14:9030 orport=9001 id=B06F093A3D4DFAD3E923F4F28A74901BD4F74EB1"\n" ipv6=[2a00:1dc0:caff:8b::5b9a]:9001"\n/* nickname=TorExitMoldova */\n/* extrainfo=0 */\n/* ===== */\n,\n"212.129.62.232:80 orport=443 id=B143D439B72D239A419F8DCE07B8A8EB1B486FA7"\n/* nickname=wardsback */\n/* extrainfo=0 */\n/* ===== */\n,\n"109.70.100.2:80 orport=443 id=B27CF1DCEECD50F7992B07D720D7F6BF0EDF9D40"\n" ipv6=[2a03:e600:100::2]:443"\n/* nickname=radieschen */\n/* extrainfo=0 */\n/* ===== */\n,\n"136.243.214.137:80 orport=443 id=B291D30517D23299AD7CEE3E60DFE60D0E3A4664"\n/* nickname=TorKIT */\n/* extrainfo=0 */\n/* ===== */\n,\n"93.115.97.242:9030 orport=9001 id=B5212DB685A2A0FCFBAE425738E478D12361710D"\n/* nickname=firstor */\n/* extrainfo=0 */\n/* ===== */\n,\n"193.11.114.46:9032 orport=9003 id=B83DC1558F0D34353BB992EF93AFEAFDB226A73E"\n/* nickname=mdfnet3 */\n/* extrainfo=0 */\n/* ===== */\n,\n"85.248.227.164:444 orport=9002 id=B84F248233FEA90CAD439F292556A3139F6E1B82"\n" ipv6=[2a00:1298:8011:212::164]:9004"\n/* nickname=tollana */\n/* extrainfo=0 */\n/* ===== */\n,\n"51.15.179.153:110 orport=995 id=BB60F5BA113A0B8B44B7B37DE3567FE561E92F78"\n" ipv6=[2001:bc8:3fec:500:7ea::]:995"\n/* nickname=Casper04 */\n/* extrainfo=0 */\n/* ===== */\n,\n"198.96.155.3:8080 orport=5001 id=BCEDF6C193AA687AE471B8A22EBF6BC57C2D285E"\n/* nickname=gurgle */\n/* extrainfo=0 */\n/* ===== */\n,\n"128.199.55.207:9030 orport=9001 id=BCEF908195805E03E92CCFE669C48738E556B9C5"\n" ipv6=[2a03:b0c0:2:d0::158:3001]:9001"\n/* nickname=EldritchReaper */\n/* extrainfo=0 */\n/* ===== */\n,\n"213.141.138.174:9030 orport=9001 id=BD552C165E2ED2887D3F1CCE9CFF155DDA2D86E6"\n/* nickname=Schakalium */\n/* extrainfo=0 */\n/* ===== */\n,\n"148.251.190.229:9030 orport=9010 id=BF0FB582E37F738CD33C3651125F2772705BB8E8"\n" ipv6=[2a01:4f8:211:c68::2]:9010"\n/* nickname=quadhead */\n/* extrainfo=0 */\n/* ===== */\n,\n"212.47.233.250:9030 orport=9001 id=BF735F669481EE1CCC348F0731551C933D1E2278"\n" ipv6=[2001:bc8:4400:2b00::1c:629]:9001"\n/* nickname=freeway */\n/* extrainfo=0 */\n/* ===== */\n,\n"132.248.241.5:9130 orport=9101 id=C0C4F339046EB824999F711D178472FDF53BE7F5"\n/* nickname=toritounam2 */\n/* extrainfo=0 */\n/* ===== */\n,\n"109.70.100.3:80 orport=443 id=C282248597D1C8522A2A7525E61C8B77BBC37614"\n" ipv6=[2a03:e600:100::3]:443"\n/* nickname=erbse */\n/* extrainfo=0 */\n/* ===== */\n,\n"50.7.74.170:9030 orport=9001 id=C36A434DB54C66E1A97A5653858CE36024352C4D"\n" ipv6=[2001:49f0:d002:2::59]:443"\n/* nickname=theia9 */\n/* extrainfo=0 */\n/* ===== */\n,\n"188.138.112.60:1433 orport=1521 id=C414F28FD2BEC1553024299B31D4E726BEB8E788"\n/* nickname=zebra620 */\n/* extrainfo=0 */\n/* ===== */\n,\n"178.20.55.18:80 orport=443 id=C656B41AEFB40A141967EBF49D6E69603C9B4A11"\n/* nickname=marcuse2 */\n/* extrainfo=0 */\n/* ===== */\n,\n"85.248.227.163:443 orport=9001 id=C793AB88565DDD3C9E4C6F15CCB9D8C7EF964CE9"\n" ipv6=[2a00:1298:8011:212::163]:9003"\n/* nickname=ori */\n/* extrainfo=0 */\n/* ===== */\n,\n"50.7.74.173:80 orport=443 id=C87A4D8B534F78FDF0F4639B55F121401FEF259C"\n" ipv6=[2001:49f0:d002:2::54]:443"\n/* nickname=theia4 */\n/* extrainfo=0 */\n/* ===== */\n,\n"176.31.103.150:9030 orport=9001 id=CBD0D1BD110EC52963082D839AC6A89D0AE243E7"\n/* nickname=UV74S7mjxRcYVrGsAMw */\n/* extrainfo=0 */\n/* ===== */\n,\n"193.234.15.62:80 orport=443 id=CD0F9AA1A5064430B1DE8E645CBA7A502B27ED5F"\n" ipv6=[2a00:1c20:4089:1234:a6a4:2926:d0af:dfee]:443"\n/* nickname=jaures4 */\n/* extrainfo=0 */\n/* ===== */\n,\n"85.25.213.211:465 orport=80 id=CE47F0356D86CF0A1A2008D97623216D560FB0A8"\n/* nickname=BeastieJoy61 */\n/* extrainfo=0 */\n/* ===== */\n,\n"50.7.74.172:80 orport=443 id=D1AFBF3117B308B6D1A7AA762B1315FD86A6B8AF"\n" ipv6=[2001:49f0:d002:2::52]:443"\n/* nickname=theia2 */\n/* extrainfo=0 */\n/* ===== */\n,\n"66.111.2.20:9030 orport=9001 id=D317C7889162E9EC4A1DA1A1095C2A0F377536D9"\n" ipv6=[2610:1c0:0:5::20]:9001"\n/* nickname=NYCBUG0 */\n/* extrainfo=0 */\n/* ===== */\n,\n"5.45.111.149:80 orport=443 id=D405FCCF06ADEDF898DF2F29C9348DCB623031BA"\n" ipv6=[2a03:4000:6:2388:df98:15f9:b34d:443]:443"\n/* nickname=gGDHjdcC6zAlM8k08lY */\n/* extrainfo=0 */\n/* ===== */\n,\n"12.235.151.200:9030 orport=9029 id=D5C33F3E203728EDF8361EA868B2939CCC43FAFB"\n/* nickname=nx1tor */\n/* extrainfo=0 */\n/* ===== */\n,\n"212.83.166.62:80 orport=443 id=D7082DB97E7F0481CBF4B88CA5F5683399E196A3"\n/* nickname=shhop */\n/* extrainfo=0 */\n/* ===== */\n,\n"54.36.237.163:80 orport=443 id=DB2682153AC0CCAECD2BD1E9EBE99C6815807A1E"\n/* nickname=GermanCraft2 */\n/* extrainfo=0 */\n/* ===== */\n,\n"171.25.193.20:80 orport=443 id=DD8BD7307017407FCC36F8D04A688F74A0774C02"\n" ipv6=[2001:67c:289c::20]:443"\n/* nickname=DFRI0 */\n/* extrainfo=0 */\n/* ===== */\n,\n"83.212.99.68:80 orport=443 id=DDBB2A38252ADDA53E4492DDF982CA6CC6E10EC0"\n" ipv6=[2001:648:2ffc:1225:a800:bff:fe3d:67b5]:443"\n/* nickname=zouzounella */\n/* extrainfo=0 */\n/* ===== */\n,\n"166.70.207.2:9130 orport=9101 id=E41B16F7DDF52EBB1DB4268AB2FE340B37AD8904"\n/* nickname=xmission1 */\n/* extrainfo=0 */\n/* ===== */\n,\n"185.100.86.182:9030 orport=8080 id=E51620B90DCB310138ED89EDEDD0A5C361AAE24E"\n/* nickname=NormalCitizen */\n/* extrainfo=0 */\n/* ===== */\n,\n"212.47.244.38:8080 orport=443 id=E81EF60A73B3809F8964F73766B01BAA0A171E20"\n/* nickname=Chimborazo */\n/* extrainfo=0 */\n/* ===== */\n,\n"185.4.132.148:80 orport=443 id=E8D114B3C78D8E6E7FEB1004650DD632C2143C9E"\n" ipv6=[2a02:c500:2:f0::5492]:443"\n/* nickname=libreonion1 */\n/* extrainfo=0 */\n/* ===== */\n,\n"195.154.105.170:9030 orport=9001 id=E947C029087FA1C3499BEF5D4372947C51223D8F"\n/* nickname=dgplug */\n/* extrainfo=0 */\n/* ===== */\n,\n"131.188.40.188:1443 orport=11180 id=EBE718E1A49EE229071702964F8DB1F318075FF8"\n" ipv6=[2001:638:a000:4140::ffff:188]:11180"\n/* nickname=fluxe4 */\n/* extrainfo=1 */\n/* ===== */\n,\n"192.87.28.28:9030 orport=9001 id=ED2338CAC2711B3E331392E1ED2831219B794024"\n" ipv6=[2001:678:230:3028:192:87:28:28]:9001"\n/* nickname=SEC6xFreeBSD64 */\n/* extrainfo=0 */\n/* ===== */\n,\n"178.20.55.16:80 orport=443 id=EFAE44728264982224445E96214C15F9075DEE1D"\n/* nickname=marcuse1 */\n/* extrainfo=0 */\n/* ===== */\n,\n"217.182.75.181:9030 orport=9001 id=EFEACD781604EB80FBC025EDEDEA2D523AEAAA2F"\n/* nickname=Aerodynamik02 */\n/* extrainfo=0 */\n/* ===== */\n,\n"193.234.15.58:80 orport=443 id=F24F8BEA2779A79111F33F6832B062BED306B9CB"\n" ipv6=[2a00:1c20:4089:1234:cdae:1b3e:cc38:3d45]:443"\n/* nickname=jaures2 */\n/* extrainfo=0 */\n/* ===== */\n,\n"129.13.131.140:80 orport=443 id=F2DFE5FA1E4CF54F8E761A6D304B9B4EC69BDAE8"\n" ipv6=[2a00:1398:5:f604:cafe:cafe:cafe:9001]:443"\n/* nickname=AlleKochenKaffee */\n/* extrainfo=0 */\n/* ===== */\n,\n"37.187.102.108:80 orport=443 id=F4263275CF54A6836EE7BD527B1328836A6F06E1"\n" ipv6=[2001:41d0:a:266c::1]:443"\n/* nickname=EvilMoe */\n/* extrainfo=0 */\n/* ===== */\n,\n"5.199.142.236:9030 orport=9001 id=F4C0EDAA0BF0F7EC138746F8FEF1CE26C7860265"\n/* nickname=tornodenumber9004 */\n/* extrainfo=0 */\n/* ===== */\n,\n"163.172.154.162:9030 orport=9001 id=F741E5124CB12700DA946B78C9B2DD175D6CD2A1"\n" ipv6=[2001:bc8:47a0:162a::1]:9001"\n/* nickname=rofltor06 */\n/* extrainfo=0 */\n/* ===== */\n,\n"78.47.18.110:443 orport=80 id=F8D27B163B9247B232A2EEE68DD8B698695C28DE"\n" ipv6=[2a01:4f8:120:4023::110]:80"\n/* nickname=fluxe3 */\n/* extrainfo=1 */\n/* ===== */\n,\n"91.143.88.62:80 orport=443 id=F9246DEF2B653807236DA134F2AEAB103D58ABFE"\n/* nickname=Freebird31 */\n/* extrainfo=1 */\n/* ===== */\n,\n"149.56.45.200:9030 orport=9001 id=FE296180018833AF03A8EACD5894A614623D3F76"\n" ipv6=[2607:5300:201:3000::17d3]:9002"\n/* nickname=PyotrTorpotkinOne */\n/* extrainfo=0 */\n/* ===== */\n,\n"62.141.38.69:80 orport=443 id=FF9FC6D130FA26AE3AE8B23688691DC419F0F22E"\n" ipv6=[2001:4ba0:cafe:ac5::]:443"\n/* nickname=rinderwahnRelay3L */\n/* extrainfo=0 */\n/* ===== */\n,\n"193.11.164.243:9030 orport=9001 id=FFA72BD683BC2FCF988356E6BEC1E490F313FB07"\n" ipv6=[2001:6b0:7:125::243]:9001"\n/* nickname=Lule */\n/* extrainfo=0 */\n/* ===== */\n,\n'
def make_readable(seconds): if seconds >= 0 and seconds <= 359999: hrs = seconds // 3600 seconds %= 3600 mins = seconds // 60 seconds %= 60 secs = seconds hour = str('{:02d}'.format(hrs)) minute = str('{:02d}'.format(mins)) second = str('{:02d}'.format(secs)) time = f"{hour}:{minute}:{second}" return time
def make_readable(seconds): if seconds >= 0 and seconds <= 359999: hrs = seconds // 3600 seconds %= 3600 mins = seconds // 60 seconds %= 60 secs = seconds hour = str('{:02d}'.format(hrs)) minute = str('{:02d}'.format(mins)) second = str('{:02d}'.format(secs)) time = f'{hour}:{minute}:{second}' return time
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) class Giraph(MavenPackage): """Apache Giraph is an iterative graph processing system built for high scalability.""" homepage = "https://giraph.apache.org/" url = "https://downloads.apache.org/giraph/giraph-1.0.0/giraph-dist-1.0.0-src.tar.gz" list_url = "https://downloads.apache.org/giraph/" list_depth = 1 version('1.2.0', sha256='6206f4ad220ea42aa0c4abecce343e36026cf9c6e0a2853f1eb08543da452ad1') version('1.1.0', sha256='181d94b8198c0f312d4611e24b0056b5181c8358a7ec89b0393661736cd19a4c') depends_on('java@7:', type=('build', 'run')) depends_on('maven@3.0.0:', type='build') def install(self, spec, prefix): giraph_path = join_path(self.stage.source_path, 'giraph-dist', 'target', 'giraph-{0}-for-hadoop-1.2.1-bin' .format(spec.version), 'giraph-{0}-for-hadoop-1.2.1' .format(spec.version)) with working_dir(giraph_path): install_tree('.', prefix)
class Giraph(MavenPackage): """Apache Giraph is an iterative graph processing system built for high scalability.""" homepage = 'https://giraph.apache.org/' url = 'https://downloads.apache.org/giraph/giraph-1.0.0/giraph-dist-1.0.0-src.tar.gz' list_url = 'https://downloads.apache.org/giraph/' list_depth = 1 version('1.2.0', sha256='6206f4ad220ea42aa0c4abecce343e36026cf9c6e0a2853f1eb08543da452ad1') version('1.1.0', sha256='181d94b8198c0f312d4611e24b0056b5181c8358a7ec89b0393661736cd19a4c') depends_on('java@7:', type=('build', 'run')) depends_on('maven@3.0.0:', type='build') def install(self, spec, prefix): giraph_path = join_path(self.stage.source_path, 'giraph-dist', 'target', 'giraph-{0}-for-hadoop-1.2.1-bin'.format(spec.version), 'giraph-{0}-for-hadoop-1.2.1'.format(spec.version)) with working_dir(giraph_path): install_tree('.', prefix)
""" Contains config variables unique to the user. Copy this file to config.py and make any necessary changes. """ PYTHON_COMMAND = "python"
""" Contains config variables unique to the user. Copy this file to config.py and make any necessary changes. """ python_command = 'python'
"""# `//ll:ll.bzl` Rules for building C/C++ with an upstream LLVM/Clang toolchain. Build files should import these rules via `@rules_ll//ll:defs.bzl`. """ load("//ll:providers.bzl", "LlCompilationDatabaseFragmentsInfo", "LlInfo") load( "//ll:internal_functions.bzl", "resolve_binary_deps", "resolve_library_deps", ) load( "//ll:actions.bzl", "compile_objects", "create_archive_library", "expose_headers", "link_bitcode_library", "link_executable", ) load( "//ll:attributes.bzl", "LL_BINARY_ATTRS", "LL_LIBRARY_ATTRS", ) def select_toolchain_type(ctx): if ctx.attr.heterogeneous_mode in ["hip_nvidia", "hip_amd"]: return "//ll:heterogeneous_toolchain_type" return "//ll:toolchain_type" def _ll_library_impl(ctx): ( headers, defines, includes, angled_includes, transitive_hdrs, transitive_defines, transitive_includes, transitive_angled_includes, ) = resolve_library_deps(ctx) intermediary_objects, cdfs = compile_objects( ctx, headers = headers, defines = defines, includes = includes, angled_includes = angled_includes, toolchain_type = select_toolchain_type(ctx), ) out_files = intermediary_objects if ctx.attr.aggregate == "static": out_file = create_archive_library( ctx, in_files = intermediary_objects, toolchain_type = select_toolchain_type(ctx), ) out_files = [out_file] elif ctx.attr.aggregate == "bitcode": out_file = link_bitcode_library( ctx, in_files = intermediary_objects, toolchain_type = select_toolchain_type(ctx), ) out_files = [out_file] transitive_cdfs = [ dep[LlCompilationDatabaseFragmentsInfo].cdfs for dep in ctx.attr.deps ] return [ DefaultInfo( files = depset(out_files), ), LlInfo( transitive_hdrs = transitive_hdrs, transitive_defines = transitive_defines, transitive_includes = transitive_includes, transitive_angled_includes = transitive_angled_includes, ), LlCompilationDatabaseFragmentsInfo( cdfs = depset(cdfs, transitive = transitive_cdfs), ), ] ll_library = rule( implementation = _ll_library_impl, executable = False, attrs = LL_LIBRARY_ATTRS, output_to_genfiles = True, toolchains = [ "//ll:toolchain_type", "//ll:heterogeneous_toolchain_type", ], doc = """ Creates a static archive. Example: ```python ll_library( srcs = ["my_library.cpp"], ) ``` """, ) def _ll_binary_impl(ctx): headers, defines, includes, angled_includes = resolve_binary_deps(ctx) intermediary_objects, cdfs = compile_objects( ctx, headers = headers, defines = defines, includes = includes, angled_includes = angled_includes, toolchain_type = select_toolchain_type(ctx), ) out_file = link_executable( ctx, in_files = intermediary_objects + ctx.files.deps, toolchain_type = select_toolchain_type(ctx), ) transitive_cdfs = [ dep[LlCompilationDatabaseFragmentsInfo].cdfs for dep in ctx.attr.deps ] return [ DefaultInfo( files = depset([out_file]), executable = out_file, ), LlCompilationDatabaseFragmentsInfo( cdfs = depset(cdfs, transitive = transitive_cdfs), ), ] ll_binary = rule( implementation = _ll_binary_impl, executable = True, attrs = LL_BINARY_ATTRS, toolchains = [ "//ll:toolchain_type", "//ll:heterogeneous_toolchain_type", ], doc = """ Creates an executable. Example: ```python ll_binary( srcs = ["my_executable.cpp"], ) ``` """, )
"""# `//ll:ll.bzl` Rules for building C/C++ with an upstream LLVM/Clang toolchain. Build files should import these rules via `@rules_ll//ll:defs.bzl`. """ load('//ll:providers.bzl', 'LlCompilationDatabaseFragmentsInfo', 'LlInfo') load('//ll:internal_functions.bzl', 'resolve_binary_deps', 'resolve_library_deps') load('//ll:actions.bzl', 'compile_objects', 'create_archive_library', 'expose_headers', 'link_bitcode_library', 'link_executable') load('//ll:attributes.bzl', 'LL_BINARY_ATTRS', 'LL_LIBRARY_ATTRS') def select_toolchain_type(ctx): if ctx.attr.heterogeneous_mode in ['hip_nvidia', 'hip_amd']: return '//ll:heterogeneous_toolchain_type' return '//ll:toolchain_type' def _ll_library_impl(ctx): (headers, defines, includes, angled_includes, transitive_hdrs, transitive_defines, transitive_includes, transitive_angled_includes) = resolve_library_deps(ctx) (intermediary_objects, cdfs) = compile_objects(ctx, headers=headers, defines=defines, includes=includes, angled_includes=angled_includes, toolchain_type=select_toolchain_type(ctx)) out_files = intermediary_objects if ctx.attr.aggregate == 'static': out_file = create_archive_library(ctx, in_files=intermediary_objects, toolchain_type=select_toolchain_type(ctx)) out_files = [out_file] elif ctx.attr.aggregate == 'bitcode': out_file = link_bitcode_library(ctx, in_files=intermediary_objects, toolchain_type=select_toolchain_type(ctx)) out_files = [out_file] transitive_cdfs = [dep[LlCompilationDatabaseFragmentsInfo].cdfs for dep in ctx.attr.deps] return [default_info(files=depset(out_files)), ll_info(transitive_hdrs=transitive_hdrs, transitive_defines=transitive_defines, transitive_includes=transitive_includes, transitive_angled_includes=transitive_angled_includes), ll_compilation_database_fragments_info(cdfs=depset(cdfs, transitive=transitive_cdfs))] ll_library = rule(implementation=_ll_library_impl, executable=False, attrs=LL_LIBRARY_ATTRS, output_to_genfiles=True, toolchains=['//ll:toolchain_type', '//ll:heterogeneous_toolchain_type'], doc='\nCreates a static archive.\n\nExample:\n\n ```python\n ll_library(\n srcs = ["my_library.cpp"],\n )\n ```\n') def _ll_binary_impl(ctx): (headers, defines, includes, angled_includes) = resolve_binary_deps(ctx) (intermediary_objects, cdfs) = compile_objects(ctx, headers=headers, defines=defines, includes=includes, angled_includes=angled_includes, toolchain_type=select_toolchain_type(ctx)) out_file = link_executable(ctx, in_files=intermediary_objects + ctx.files.deps, toolchain_type=select_toolchain_type(ctx)) transitive_cdfs = [dep[LlCompilationDatabaseFragmentsInfo].cdfs for dep in ctx.attr.deps] return [default_info(files=depset([out_file]), executable=out_file), ll_compilation_database_fragments_info(cdfs=depset(cdfs, transitive=transitive_cdfs))] ll_binary = rule(implementation=_ll_binary_impl, executable=True, attrs=LL_BINARY_ATTRS, toolchains=['//ll:toolchain_type', '//ll:heterogeneous_toolchain_type'], doc='\nCreates an executable.\n\nExample:\n\n ```python\n ll_binary(\n srcs = ["my_executable.cpp"],\n )\n ```\n')
epsilon_d_ = { "epsilon": ["float", "0.03", "0.01 ... 0.3"], } distribution_d_ = { "distribution": ["string", "normal", "normal, laplace, logistic, gumbel"], } n_neighbours_d_ = { "n_neighbours": ["int", "3", "1 ... 10"], } p_accept_d_ = { "p_accept": ["float", "0.1", "0.01 ... 0.3"], } repulsion_factor_d = { "repulsion_factor": ["float", "5", "2 ... 10"], } annealing_rate_d = { "annealing_rate": ["float", "0.97", "0.9 ... 0.99"], } start_temp_d = { "start_temp": ["float", "1", "0.5 ... 1.5"], } alpha_d = { "alpha": ["float", "1", "0.5 ... 2"], } gamma_d = { "gamma": ["float", "2", "0.5 ... 5"], } beta_d = { "beta": ["float", "0.5", "0.25 ... 3"], } sigma_d = { "sigma": ["float", "0.5", "0.25 ... 3"], } step_size_d = { "step_size": ["int", "1", "1 ... 1000"], } n_iter_restart_d = { "n_iter_restart": ["int", "10", "5 ... 20"], } iters_p_dim_d = { "iters_p_dim": ["int", "10", "5 ... 15"], } n_positions_d = { "n_positions": ["int", "4", "2 ... 8"], } pattern_size_d = { "pattern_size": ["float", "0.25", "0.1 ... 0.5"], } reduction_d = { "reduction": ["float", "0.9", "0.75 ... 0.99"], } population_parallel_temp_d = { "population": ["int", "5", "3 ... 15"], } n_iter_swap_parallel_temp_d = { "n_iter_swap": ["int", "10", "5 ... 15"], } population_pso_d = { "population": ["int", "10", "4 ... 15"], } inertia_d = { "inertia": ["float", "0.5", "0.25 ... 0.75"], } cognitive_weight_d = { "cognitive_weight": ["float", "0.5", "0.25 ... 0.75"], } social_weight_d = { "social_weight": ["float", "0.5", "0.25 ... 0.75"], } temp_weight_d = { "temp_weight": ["float", "0.2", "0.05 ... 0.3"], } population_evo_strat_d = { "population": ["int", "10", "4 ... 15"], } mutation_rate_d = { "mutation_rate": ["float", "0.7", "0.1 ... 0.9"], } crossover_rate_d = { "crossover_rate": ["float", "0.3", "0.1 ... 0.9"], } gpr_bayes_opt_d = { "gpr": ["class", "0.3", "-"], } xi_bayes_opt_d = { "xi": ["float", "0.3", "0.1 ... 0.9"], } warm_start_smbo_d = { "warm_start_smbo": ["pandas dataframe", "None", "-"], } max_sample_size_d = { "max_sample_size": ["int", "10000000", "-"], } sampling_d = { "sampling": ["dict", "{'random': 1000000}", "-"], } gamma_tpe_d = { "gamma_tpe": ["float", "0.2", "0.05 ... 0.75"], } tree_regressor_d = { "tree_regressor": [ "string", "extra_tree", "extra_tree, random_forest, gradient_boost", ], } tree_para_d = { "tree_para": ["dict", "{'n_estimators': 100}", "-"], } xi_forest_opt_d = { "xi": ["float", "0.03", "0.001 ... 0.1"], }
epsilon_d_ = {'epsilon': ['float', '0.03', '0.01 ... 0.3']} distribution_d_ = {'distribution': ['string', 'normal', 'normal, laplace, logistic, gumbel']} n_neighbours_d_ = {'n_neighbours': ['int', '3', '1 ... 10']} p_accept_d_ = {'p_accept': ['float', '0.1', '0.01 ... 0.3']} repulsion_factor_d = {'repulsion_factor': ['float', '5', '2 ... 10']} annealing_rate_d = {'annealing_rate': ['float', '0.97', '0.9 ... 0.99']} start_temp_d = {'start_temp': ['float', '1', '0.5 ... 1.5']} alpha_d = {'alpha': ['float', '1', '0.5 ... 2']} gamma_d = {'gamma': ['float', '2', '0.5 ... 5']} beta_d = {'beta': ['float', '0.5', '0.25 ... 3']} sigma_d = {'sigma': ['float', '0.5', '0.25 ... 3']} step_size_d = {'step_size': ['int', '1', '1 ... 1000']} n_iter_restart_d = {'n_iter_restart': ['int', '10', '5 ... 20']} iters_p_dim_d = {'iters_p_dim': ['int', '10', '5 ... 15']} n_positions_d = {'n_positions': ['int', '4', '2 ... 8']} pattern_size_d = {'pattern_size': ['float', '0.25', '0.1 ... 0.5']} reduction_d = {'reduction': ['float', '0.9', '0.75 ... 0.99']} population_parallel_temp_d = {'population': ['int', '5', '3 ... 15']} n_iter_swap_parallel_temp_d = {'n_iter_swap': ['int', '10', '5 ... 15']} population_pso_d = {'population': ['int', '10', '4 ... 15']} inertia_d = {'inertia': ['float', '0.5', '0.25 ... 0.75']} cognitive_weight_d = {'cognitive_weight': ['float', '0.5', '0.25 ... 0.75']} social_weight_d = {'social_weight': ['float', '0.5', '0.25 ... 0.75']} temp_weight_d = {'temp_weight': ['float', '0.2', '0.05 ... 0.3']} population_evo_strat_d = {'population': ['int', '10', '4 ... 15']} mutation_rate_d = {'mutation_rate': ['float', '0.7', '0.1 ... 0.9']} crossover_rate_d = {'crossover_rate': ['float', '0.3', '0.1 ... 0.9']} gpr_bayes_opt_d = {'gpr': ['class', '0.3', '-']} xi_bayes_opt_d = {'xi': ['float', '0.3', '0.1 ... 0.9']} warm_start_smbo_d = {'warm_start_smbo': ['pandas dataframe', 'None', '-']} max_sample_size_d = {'max_sample_size': ['int', '10000000', '-']} sampling_d = {'sampling': ['dict', "{'random': 1000000}", '-']} gamma_tpe_d = {'gamma_tpe': ['float', '0.2', '0.05 ... 0.75']} tree_regressor_d = {'tree_regressor': ['string', 'extra_tree', 'extra_tree, random_forest, gradient_boost']} tree_para_d = {'tree_para': ['dict', "{'n_estimators': 100}", '-']} xi_forest_opt_d = {'xi': ['float', '0.03', '0.001 ... 0.1']}
def fatorial(n): if n == 0: return 1 else: return n * fatorial(n - 1) while True: try: entrada = input() entrada = entrada.split() fat1 = fatorial(int(entrada[0])) fat2 = fatorial(int(entrada[1])) print(fat1 + fat2) except EOFError: break
def fatorial(n): if n == 0: return 1 else: return n * fatorial(n - 1) while True: try: entrada = input() entrada = entrada.split() fat1 = fatorial(int(entrada[0])) fat2 = fatorial(int(entrada[1])) print(fat1 + fat2) except EOFError: break
class TrieNode: def __init__(self): self.children = [None] * 26 self.end = False self.size = 0 class Trie: def __init__(self): self.root = self.getNode() def getNode(self): return TrieNode() def _charToIndex(self, ch): return ord(ch) - ord('a') def add(self, key): crawl = self.root for c in key: index = self._charToIndex(c) if not crawl.children[index]: crawl.children[index] = self.getNode() crawl = crawl.children[index] crawl.size += 1 crawl.end = True def find(self, key): crawl = self.root for c in key: index = self._charToIndex(c) if not crawl.children[index]: return 0 crawl = crawl.children[index] return crawl.size def contacts(queries): trie = Trie() results = [] for q in queries: c, v = q[0], q[1] if c == "add": trie.add(v) elif c == "find": results.append(trie.find(v)) return results trie = Trie() trie.add("hack") trie.add("hackerrank") print(trie.find("hac")) print(trie.find("hak"))
class Trienode: def __init__(self): self.children = [None] * 26 self.end = False self.size = 0 class Trie: def __init__(self): self.root = self.getNode() def get_node(self): return trie_node() def _char_to_index(self, ch): return ord(ch) - ord('a') def add(self, key): crawl = self.root for c in key: index = self._charToIndex(c) if not crawl.children[index]: crawl.children[index] = self.getNode() crawl = crawl.children[index] crawl.size += 1 crawl.end = True def find(self, key): crawl = self.root for c in key: index = self._charToIndex(c) if not crawl.children[index]: return 0 crawl = crawl.children[index] return crawl.size def contacts(queries): trie = trie() results = [] for q in queries: (c, v) = (q[0], q[1]) if c == 'add': trie.add(v) elif c == 'find': results.append(trie.find(v)) return results trie = trie() trie.add('hack') trie.add('hackerrank') print(trie.find('hac')) print(trie.find('hak'))
# coding=utf-8 """ This is exceptions used in graph package """ class GraphError(Exception): """ This is base graph error """ pass class GraphTypeError(GraphError, TypeError): """ This error occurs when there is a type mismatch in this package """ pass class GraphExistenceError(GraphError): """ This error occurs when there is a index mismatch in this package """ pass
""" This is exceptions used in graph package """ class Grapherror(Exception): """ This is base graph error """ pass class Graphtypeerror(GraphError, TypeError): """ This error occurs when there is a type mismatch in this package """ pass class Graphexistenceerror(GraphError): """ This error occurs when there is a index mismatch in this package """ pass
class Database: def __init__(self, row_counts): self.row_counts = row_counts self.max_row_count = max(row_counts) n_tables = len(row_counts) self.parents = list(range(n_tables)) def merge(self, src, dst): src_parent = self.get_parent(src) dst_parent = self.get_parent(dst) if src_parent == dst_parent: return False # use union by rank heuristic: # Rank array isn't necessary: we don't have any choice on how the union should be done self.parents[src_parent] = dst_parent self.row_counts[dst_parent] += self.row_counts[src_parent] self.row_counts[src_parent] = 0 if self.max_row_count < self.row_counts[dst_parent]: self.max_row_count = self.row_counts[dst_parent] return True # Time Complexity: O(log n) # Space Complexity: O(1) def get_parent(self, table): # find parent rootTable = table while rootTable != self.parents[rootTable]: rootTable = self.parents[rootTable] #Compress Path while table != rootTable: parent = self.parents[table] self.parents[table] = rootTable table = parent return rootTable def main(): n_tables, n_queries = map(int, input().split()) counts = list(map(int, input().split())) assert len(counts) == n_tables db = Database(counts) for i in range(n_queries): dst, src = map(int, input().split()) db.merge(src - 1, dst - 1) print(db.max_row_count) if __name__ == "__main__": main()
class Database: def __init__(self, row_counts): self.row_counts = row_counts self.max_row_count = max(row_counts) n_tables = len(row_counts) self.parents = list(range(n_tables)) def merge(self, src, dst): src_parent = self.get_parent(src) dst_parent = self.get_parent(dst) if src_parent == dst_parent: return False self.parents[src_parent] = dst_parent self.row_counts[dst_parent] += self.row_counts[src_parent] self.row_counts[src_parent] = 0 if self.max_row_count < self.row_counts[dst_parent]: self.max_row_count = self.row_counts[dst_parent] return True def get_parent(self, table): root_table = table while rootTable != self.parents[rootTable]: root_table = self.parents[rootTable] while table != rootTable: parent = self.parents[table] self.parents[table] = rootTable table = parent return rootTable def main(): (n_tables, n_queries) = map(int, input().split()) counts = list(map(int, input().split())) assert len(counts) == n_tables db = database(counts) for i in range(n_queries): (dst, src) = map(int, input().split()) db.merge(src - 1, dst - 1) print(db.max_row_count) if __name__ == '__main__': main()
# index_power # Created by JKChang # 16/04/2018, 14:49 # Tag: # Description: You are given an array with positive numbers and a number N. You should find the N-th power of the # element in the array with the index N. If N is outside of the array, then return -1. Don't forget that the first # element has the index 0. # # Let's look at a few examples: # - array = [1, 2, 3, 4] and N = 2, then the result is 32 == 9; # - array = [1, 2, 3] and N = 3, but N is outside of the array, so the result is -1. # # Input: Two arguments. An array as a list of integers and a number as a integer. # # Output: The result as an integer. def index_power(array, n): """ Find Nth power of the element with index N. """ if n > len(array) - 1: return -1 return array[n] ** n if __name__ == '__main__': # These "asserts" using only for self-checking and not necessary for auto-testing assert index_power([1, 2, 3, 4], 2) == 9, "Square" assert index_power([1, 3, 10, 100], 3) == 1000000, "Cube" assert index_power([0, 1], 0) == 1, "Zero power" assert index_power([1, 2], 3) == -1, "IndexError" print("Coding complete? Click 'Check' to review your tests and earn cool rewards!")
def index_power(array, n): """ Find Nth power of the element with index N. """ if n > len(array) - 1: return -1 return array[n] ** n if __name__ == '__main__': assert index_power([1, 2, 3, 4], 2) == 9, 'Square' assert index_power([1, 3, 10, 100], 3) == 1000000, 'Cube' assert index_power([0, 1], 0) == 1, 'Zero power' assert index_power([1, 2], 3) == -1, 'IndexError' print("Coding complete? Click 'Check' to review your tests and earn cool rewards!")
# Find the Most Competitive Subsequence: https://leetcode.com/problems/find-the-most-competitive-subsequence/ # Given an integer array nums and a positive integer k, return the most competitive subsequence of nums of size k. # An array's subsequence is a resulting sequence obtained by erasing some (possibly zero) elements from the array. # We define that a subsequence a is more competitive than a subsequence b (of the same length) if in the first position where a and b differ, subsequence a has a number less than the corresponding number in b. For example, [1,3,4] is more competitive than [1,3,5] because the first position they differ is at the final number, and 4 is less than 5. # If so we simply keep increasing the result so long as we have >= k possible letters to be added # by using a stack we can check if the number we are checking is smaller and can pop off the bigger so long as the above condition is true class Solution: def mostCompetitive(self, nums, k: int): stack = [] addition = len(nums) - k for num in nums: while addition > 0 and len(stack) > 0 and stack[-1] > num: stack.pop() addition -= 1 stack.append(num) while len(stack) != k: stack.pop() return stack # The above works the trick is we need to figure out how many values we are allowed to remove while parsing so that we can always have # the right amount of values in the subset. Then we can pop off any values if they are larger numbers on our parsing stack than our # cur num # This will run in O(N) for time and space and I believe it is optimal. # Score Card # Did I need hints? N # Did you finish within 30 min? 15 # Was the solution optimal? Yea # Were there any bugs? I forgot that I need to pop off all numbers # 5 5 5 4 = 4.75
class Solution: def most_competitive(self, nums, k: int): stack = [] addition = len(nums) - k for num in nums: while addition > 0 and len(stack) > 0 and (stack[-1] > num): stack.pop() addition -= 1 stack.append(num) while len(stack) != k: stack.pop() return stack
# Copyright (c) 2018 SMHI, Swedish Meteorological and Hydrological Institute # License: MIT License (see LICENSE.txt or http://opensource.org/licenses/mit). """ ================================================================================ ================================================================================ ================================================================================ """ def grid_configure(frame, rows={}, columns={}): """ Put weighting on the given frame. Rows an collumns that ar not in rows and columns will get weight 1. """ for r in range(30): if r in rows: frame.grid_rowconfigure(r, weight=rows[r]) else: frame.grid_rowconfigure(r, weight=1) for c in range(30): if c in columns: frame.grid_columnconfigure(c, weight=columns[c]) else: frame.grid_columnconfigure(c, weight=1) """ ================================================================================ ================================================================================ ================================================================================ """
""" ================================================================================ ================================================================================ ================================================================================ """ def grid_configure(frame, rows={}, columns={}): """ Put weighting on the given frame. Rows an collumns that ar not in rows and columns will get weight 1. """ for r in range(30): if r in rows: frame.grid_rowconfigure(r, weight=rows[r]) else: frame.grid_rowconfigure(r, weight=1) for c in range(30): if c in columns: frame.grid_columnconfigure(c, weight=columns[c]) else: frame.grid_columnconfigure(c, weight=1) '\n================================================================================\n================================================================================\n================================================================================\n'
# Template 1. preorder DFS # O(N) / O(H) class Solution: def preorderTraversal(self, root: TreeNode) -> List[int]: arr = [] def preorder(node): if not node: return arr.append(node.val) preorder(node.left) preorder(node.right) preorder(root) return arr # Template 2. inorder DFS # O(N) / O(H) class Solution: def inorderTraversal(self, root: TreeNode) -> List[int]: arr = [] def inorder(node): if not node: return inorder(node.left) arr.append(node.val) inorder(node.right) inorder(root) return arr # Template 3. postorder DFS # O(N) / O(H) class Solution: def postorderTraversal(self, root: TreeNode) -> List[int]: arr = [] def postorder(node): if not node: return postorder(node.left) postorder(node.right) arr.append(node.val) postorder(root) return arr
class Solution: def preorder_traversal(self, root: TreeNode) -> List[int]: arr = [] def preorder(node): if not node: return arr.append(node.val) preorder(node.left) preorder(node.right) preorder(root) return arr class Solution: def inorder_traversal(self, root: TreeNode) -> List[int]: arr = [] def inorder(node): if not node: return inorder(node.left) arr.append(node.val) inorder(node.right) inorder(root) return arr class Solution: def postorder_traversal(self, root: TreeNode) -> List[int]: arr = [] def postorder(node): if not node: return postorder(node.left) postorder(node.right) arr.append(node.val) postorder(root) return arr
""" Macro for supporting custom resource set typically defined in Gradle via source sets. Bazel expects all resources to be in same root `res` directory but Gradle does not have this limitation. This macro receives directory on file system and copies the required resources to Bazel compatible `res` folder during build. """ def custom_res(target, dir_name, resource_files = []): """ This macro make sures the given `resource_files` are present in a bazel compatible folder by copying them to correct directory. Args: target: The label of the target for which resources should be made compatible. dir_name: The root name of the folder which needs to be fixed i.e the one that is not `res` resource_files: The list of files that should be copied to. Usually a result of `glob` function. Returns: A list of generated resource_files in the correct `res` directory that can be specified as input to android_library or android_binary rules """ new_root_dir = target + "_" + dir_name fixed_resource_path = [] for old_resource_path in resource_files: fixed_path = new_root_dir + "/" + old_resource_path.replace("/" + dir_name, "/res") fixed_resource_path.append(fixed_path) genrule_suffix = old_resource_path.replace("/", "_").replace(".", "_").replace("-", "_") native.genrule( name = "_" + target + "_" + genrule_suffix, srcs = [old_resource_path], outs = [fixed_path], cmd = "cp $< $@", ) return fixed_resource_path
""" Macro for supporting custom resource set typically defined in Gradle via source sets. Bazel expects all resources to be in same root `res` directory but Gradle does not have this limitation. This macro receives directory on file system and copies the required resources to Bazel compatible `res` folder during build. """ def custom_res(target, dir_name, resource_files=[]): """ This macro make sures the given `resource_files` are present in a bazel compatible folder by copying them to correct directory. Args: target: The label of the target for which resources should be made compatible. dir_name: The root name of the folder which needs to be fixed i.e the one that is not `res` resource_files: The list of files that should be copied to. Usually a result of `glob` function. Returns: A list of generated resource_files in the correct `res` directory that can be specified as input to android_library or android_binary rules """ new_root_dir = target + '_' + dir_name fixed_resource_path = [] for old_resource_path in resource_files: fixed_path = new_root_dir + '/' + old_resource_path.replace('/' + dir_name, '/res') fixed_resource_path.append(fixed_path) genrule_suffix = old_resource_path.replace('/', '_').replace('.', '_').replace('-', '_') native.genrule(name='_' + target + '_' + genrule_suffix, srcs=[old_resource_path], outs=[fixed_path], cmd='cp $< $@') return fixed_resource_path
"""Test Forex API""" def test_forex_api_doc(client): response = client.get("/api/forex/doc") assert response.status_code == 200 def test_get_usd_rates(client): response = client.get("/api/forex/rates/usd", follow_redirects=True) assert response.status_code == 200
"""Test Forex API""" def test_forex_api_doc(client): response = client.get('/api/forex/doc') assert response.status_code == 200 def test_get_usd_rates(client): response = client.get('/api/forex/rates/usd', follow_redirects=True) assert response.status_code == 200
with open('artistsfollowed.txt') as af: with open('/app/tosearch.txt', 'w') as ts: for line in af: if 'name' in line: line = line.strip() line = line.replace('name: ', '') line = line.replace('\'', '') line = line.replace(',', '') ts.write(line + '\n')
with open('artistsfollowed.txt') as af: with open('/app/tosearch.txt', 'w') as ts: for line in af: if 'name' in line: line = line.strip() line = line.replace('name: ', '') line = line.replace("'", '') line = line.replace(',', '') ts.write(line + '\n')
class Evaluator: """ A superclass for metrics evaluations""" def __init__(self, qp_ens): """Class constructor. Parameters ---------- qp_ens: qp.Ensemble object PDFs as qp.Ensemble """ self._qp_ens = qp_ens def evaluate(self): #pragma: no cover """ Evaluates the metric a function of the truth and prediction Returns ------- metric: dictionary value of the metric and statistics thereof """ raise NotImplementedError # class CRPS(Evaluator): # ''' Continuous rank probability score (Gneiting et al., 2006)''' # # def __init__(self, sample, name="CRPS"): # """Class constructor. # Parameters # ---------- # sample: `qp.ensemble` # ensemble of PDFS # name: `str` # the name of the metric # """ # super().__init__(sample, name) # # # def evaluate(self): # raise NotImplementedError
class Evaluator: """ A superclass for metrics evaluations""" def __init__(self, qp_ens): """Class constructor. Parameters ---------- qp_ens: qp.Ensemble object PDFs as qp.Ensemble """ self._qp_ens = qp_ens def evaluate(self): """ Evaluates the metric a function of the truth and prediction Returns ------- metric: dictionary value of the metric and statistics thereof """ raise NotImplementedError
'''a Python program to print out a set containing all the colors from color_list_1 which are not present in color_list_2''' def main(): color_list_1 = set(["White", "Black", "Red"]) color_list_2 = set(["Red", "Green"]) print("Original set elements:") print(color_list_1) print(color_list_2) print("\nDifferenct of color_list_1 and color_list_2:") print(color_list_1.difference(color_list_2)) print("\nDifferenct of color_list_2 and color_list_1:") print(color_list_2.difference(color_list_1)) main ()
"""a Python program to print out a set containing all the colors from color_list_1 which are not present in color_list_2""" def main(): color_list_1 = set(['White', 'Black', 'Red']) color_list_2 = set(['Red', 'Green']) print('Original set elements:') print(color_list_1) print(color_list_2) print('\nDifferenct of color_list_1 and color_list_2:') print(color_list_1.difference(color_list_2)) print('\nDifferenct of color_list_2 and color_list_1:') print(color_list_2.difference(color_list_1)) main()
input = [ ('Mamma Mia', ['ABBA']), ('Ghost Rule', ['DECO*27', 'Hatsune Miku']), ('Animals', ['Martin Garrix']), ('Remember The Name', ['Ed Sheeran', 'Eminem', '50 Cent']), ('404 Not Found', []) ] def songTitle(song): artists = '' if len(song[1]) > 1: for artist in range(len(song[1])): artists += (song[1][artist] + ', ') artists += ('and ' + song[1][-1] + ' - ') elif len(song[1]) == 1: artists = song[1][0] + ' - ' else: artists = '' return artists + song[0] for song in input: print(songTitle(song))
input = [('Mamma Mia', ['ABBA']), ('Ghost Rule', ['DECO*27', 'Hatsune Miku']), ('Animals', ['Martin Garrix']), ('Remember The Name', ['Ed Sheeran', 'Eminem', '50 Cent']), ('404 Not Found', [])] def song_title(song): artists = '' if len(song[1]) > 1: for artist in range(len(song[1])): artists += song[1][artist] + ', ' artists += 'and ' + song[1][-1] + ' - ' elif len(song[1]) == 1: artists = song[1][0] + ' - ' else: artists = '' return artists + song[0] for song in input: print(song_title(song))
class FluidAudioDriver(): ''' Represents the FluidSynth audio driver object as defined in audio.h. This class is inspired by the FluidAudioDriver object from pyfluidsynth by MostAwesomeDude. Member: audio_driver -- The FluidSynth audio driver object (fluid_audio_driver_t). handle -- The handle to the FluidSynth library. Should be FluidHandle but a raw handle will probably work, too (FluidHandle). ''' def __init__( self, handle, synth, settings ): ''' Create a new FluidSynth audio driver instance using given handle, synth and settings objects. ''' self.handle = handle self.audio_driver = handle.new_fluid_audio_driver( settings.settings, synth.synth ) def __del__(self): ''' Delete the audio driver. ''' self.handle.delete_fluid_audio_driver( self.audio_driver )
class Fluidaudiodriver: """ Represents the FluidSynth audio driver object as defined in audio.h. This class is inspired by the FluidAudioDriver object from pyfluidsynth by MostAwesomeDude. Member: audio_driver -- The FluidSynth audio driver object (fluid_audio_driver_t). handle -- The handle to the FluidSynth library. Should be FluidHandle but a raw handle will probably work, too (FluidHandle). """ def __init__(self, handle, synth, settings): """ Create a new FluidSynth audio driver instance using given handle, synth and settings objects. """ self.handle = handle self.audio_driver = handle.new_fluid_audio_driver(settings.settings, synth.synth) def __del__(self): """ Delete the audio driver. """ self.handle.delete_fluid_audio_driver(self.audio_driver)
class Writer: def __init__(self, outfile): self.outfile = outfile def writeHeader(self): pass def write(self, record): pass def writeFooter(self): pass
class Writer: def __init__(self, outfile): self.outfile = outfile def write_header(self): pass def write(self, record): pass def write_footer(self): pass
""" Pyformlang ========== Pyformlang is a python module to perform operation on formal languages. How to use the documentation ---------------------------- Documentation is available in two formats: docstrings directly in the code and a readthedocs website: https://pyformlang.readthedocs.io. Available subpackages --------------------- regular_expression Regular Expressions finite_automaton Finite automata (deterministic, non-deterministic, with/without epsilon transitions fst Finite State Transducers cfg Context-Free Grammar pda Push-Down Automata Indexed Grammar Indexed Grammar rsa Recursive automaton """ __all__ = ["finite_automaton", "regular_expression", "cfg", "fst", "indexed_grammar", "pda", "rsa"]
""" Pyformlang ========== Pyformlang is a python module to perform operation on formal languages. How to use the documentation ---------------------------- Documentation is available in two formats: docstrings directly in the code and a readthedocs website: https://pyformlang.readthedocs.io. Available subpackages --------------------- regular_expression Regular Expressions finite_automaton Finite automata (deterministic, non-deterministic, with/without epsilon transitions fst Finite State Transducers cfg Context-Free Grammar pda Push-Down Automata Indexed Grammar Indexed Grammar rsa Recursive automaton """ __all__ = ['finite_automaton', 'regular_expression', 'cfg', 'fst', 'indexed_grammar', 'pda', 'rsa']
def b2tc(number: int): """ Funcion devuelve el complemento de un numero entero el cual se define como "inversion de todos los bits". :param number: numero entero :type number: int :return: cadena conforme a resultado inverso de bit (XOR) :rtype: str """ b2int = int(bin(number)[2:]) # [2:] excluir `0b` xor = ['0' if b == '1' else '1' for b in str(b2int)] # comprehension list return ''.join(xor) if __name__ == '__main__': """ -- Complemento a 2's -- Leer un numero entero, convertir a binario e imprimir complemento a 2's de la conversion rapida. La conversion rapida comprende invertir todo los bits de un numero entero. En el sistema binario se conoce como operacion XOR. La conversion rapida es relativamente sencilla, 1ro. convertir a binario 2do. invertir bits Tome en cuenta que este ejemplo NO ES la implementacion adecuada del metodo sino, comprender un poco el uso de "comprehension list". Fuente (https://es.wikipedia.org/wiki/Complemento_a_dos) """ num = 0 print("INGRESE NUMERO ENTERO") try: num = int(input("R: ")) except ValueError as ex: raise RuntimeError("<EXCEPCION> Tipo de dato invalido") from ex print("\nNUMERO ENTERO %d\n2b: %d\ncomplemento 2b: %s" % (num, int(bin(num)[2:]), b2tc(num)))
def b2tc(number: int): """ Funcion devuelve el complemento de un numero entero el cual se define como "inversion de todos los bits". :param number: numero entero :type number: int :return: cadena conforme a resultado inverso de bit (XOR) :rtype: str """ b2int = int(bin(number)[2:]) xor = ['0' if b == '1' else '1' for b in str(b2int)] return ''.join(xor) if __name__ == '__main__': '\n -- Complemento a 2\'s --\n\n Leer un numero entero, convertir a binario e imprimir complemento a 2\'s de la conversion rapida.\n\n La conversion rapida comprende invertir todo los bits de un numero entero. En el sistema binario\n se conoce como operacion XOR.\n\n La conversion rapida es relativamente sencilla,\n 1ro. convertir a binario\n 2do. invertir bits\n\n Tome en cuenta que este ejemplo NO ES la implementacion adecuada del metodo sino,\n comprender un poco el uso de "comprehension list".\n\n Fuente (https://es.wikipedia.org/wiki/Complemento_a_dos)\n ' num = 0 print('INGRESE NUMERO ENTERO') try: num = int(input('R: ')) except ValueError as ex: raise runtime_error('<EXCEPCION> Tipo de dato invalido') from ex print('\nNUMERO ENTERO %d\n2b: %d\ncomplemento 2b: %s' % (num, int(bin(num)[2:]), b2tc(num)))
a='cdef' b='ab' print('a'/'b') a=8 b='ab' print('a'/'b')
a = 'cdef' b = 'ab' print('a' / 'b') a = 8 b = 'ab' print('a' / 'b')
## GroupID-8 (14114002_14114068) - Abhishek Jaisingh & Tarun Kumar ## Date: April 15, 2016 ## bitwise_manipulations.py - Bitwise Manipulation Functions for Travelling Salesman Problem def size(int_type): length = 0 count = 0 while (int_type): count += (int_type & 1) length += 1 int_type >>= 1 return count def length(int_type): length = 0 count = 0 while (int_type): count += (int_type & 1) length += 1 int_type >>= 1 return length
def size(int_type): length = 0 count = 0 while int_type: count += int_type & 1 length += 1 int_type >>= 1 return count def length(int_type): length = 0 count = 0 while int_type: count += int_type & 1 length += 1 int_type >>= 1 return length
class Solution: def calPoints(self, ops): """ :type ops: List[str] :rtype: int """ result = [] for v in ops: length = len(result) if v == '+': if length >= 2: result.append(result[length-1] + result[length-2]) elif v == 'D': if length >= 1: result.append(result[length-1]*2) elif v == 'C': if length >= 1: result.pop() else: result.append(int(v)) return sum(result) if __name__ == '__main__': s = Solution() print(s.calPoints(["5","2","C","D","+"])) print(s.calPoints(["5","-2","4","C","D","9","+","+"])) print(s.calPoints(["D","-2","C","C","D","9","+","C"]))
class Solution: def cal_points(self, ops): """ :type ops: List[str] :rtype: int """ result = [] for v in ops: length = len(result) if v == '+': if length >= 2: result.append(result[length - 1] + result[length - 2]) elif v == 'D': if length >= 1: result.append(result[length - 1] * 2) elif v == 'C': if length >= 1: result.pop() else: result.append(int(v)) return sum(result) if __name__ == '__main__': s = solution() print(s.calPoints(['5', '2', 'C', 'D', '+'])) print(s.calPoints(['5', '-2', '4', 'C', 'D', '9', '+', '+'])) print(s.calPoints(['D', '-2', 'C', 'C', 'D', '9', '+', 'C']))
print("To change the data type of data") int_data=int(input("Enter the integer data:")) dec_data=float(input("Enter the decimal data:")) int_str=str(int_data) print(int_str) dec_int=int(dec_data) print(dec_int) dec_str=str(dec_data)
print('To change the data type of data') int_data = int(input('Enter the integer data:')) dec_data = float(input('Enter the decimal data:')) int_str = str(int_data) print(int_str) dec_int = int(dec_data) print(dec_int) dec_str = str(dec_data)
# Time: O(l) # Space: O(l) # Given an integer n, find the closest integer (not including itself), which is a palindrome. # # The 'closest' is defined as absolute difference minimized between two integers. # # Example 1: # Input: "123" # Output: "121" # Note: # The input n is a positive integer represented by string, whose length will not exceed 18. # If there is a tie, return the smaller one as answer. class Solution(object): def nearestPalindromic(self, n): """ :type n: str :rtype: str """ l = len(n) candidates = set((str(10**l + 1), str(10**(l - 1) - 1))) prefix = int(n[:(l + 1)/2]) for i in map(str, (prefix-1, prefix, prefix+1)): candidates.add(i + [i, i[:-1]][l%2][::-1]) candidates.discard(n) return min(candidates, key=lambda x: (abs(int(x) - int(n)), int(x)))
class Solution(object): def nearest_palindromic(self, n): """ :type n: str :rtype: str """ l = len(n) candidates = set((str(10 ** l + 1), str(10 ** (l - 1) - 1))) prefix = int(n[:(l + 1) / 2]) for i in map(str, (prefix - 1, prefix, prefix + 1)): candidates.add(i + [i, i[:-1]][l % 2][::-1]) candidates.discard(n) return min(candidates, key=lambda x: (abs(int(x) - int(n)), int(x)))
@dataclass class Point: x: int y: int z: int position = property() @position.setter def position(self, new_value): if type(new_value) not in (list, tuple, set): raise TypeError elif len(new_value) != 3: raise ValueError else: self.x, self.y, self.z = new_value
@dataclass class Point: x: int y: int z: int position = property() @position.setter def position(self, new_value): if type(new_value) not in (list, tuple, set): raise TypeError elif len(new_value) != 3: raise ValueError else: (self.x, self.y, self.z) = new_value
def get_sql_lite_conn_str(db_file: str): db_file_stripped = db_file.strip() if not db_file or not db_file_stripped: # db_file = '../db/meet_app.db' raise Exception("SQL lite DB file is not specified.") return 'sqlite:///' + db_file_stripped
def get_sql_lite_conn_str(db_file: str): db_file_stripped = db_file.strip() if not db_file or not db_file_stripped: raise exception('SQL lite DB file is not specified.') return 'sqlite:///' + db_file_stripped
# -*- coding: utf-8 -*- __author__ = """Juan Eiros""" __email__ = 'jeirosz@gmail.com'
__author__ = 'Juan Eiros' __email__ = 'jeirosz@gmail.com'
# definitions used # constants DE = 'DE' # variables qhLen = None # number of rows of the quarter of an hour output vector (4 each hour) header = None zones = None dataImport = None dataExport = None sumImport = None sumExport = None data = None # output file names output_file_name = 'GenerationAndLoad.csv' # countries in the CWE area countries_dict = { 'AT': ['DE'], 'BE': ['FR', 'NL'], 'FR': ['BE', 'DE'], 'DE': ['AT', 'FR', 'NL'], 'NL': ['BE', 'DE'] } PSRTYPE_MAPPINGS = { 'A03': 'Mixed', 'A04': 'Generation', 'A05': 'Load', 'B01': 'Biomass', 'B02': 'Fossil Brown coal/Lignite', 'B03': 'Fossil Coal-derived gas', 'B04': 'Fossil Gas', 'B05': 'Fossil Hard coal', 'B06': 'Fossil Oil', 'B07': 'Fossil Oil shale', 'B08': 'Fossil Peat', 'B09': 'Geothermal', 'B10': 'Hydro Pumped Storage', 'B11': 'Hydro Run-of-river and poundage', 'B12': 'Hydro Water Reservoir', 'B13': 'Marine', 'B14': 'Nuclear', 'B15': 'Other renewable', 'B16': 'Solar', 'B17': 'Waste', 'B18': 'Wind Offshore', 'B19': 'Wind Onshore', 'B20': 'Other', 'B21': 'AC Link', 'B22': 'DC Link', 'B23': 'Substation', 'B24': 'Transformer' } # countries_dict = { # #'AL': ['GR', 'ME', 'RS'], # 'AT': ['CZ', 'DE', 'HU', 'IT', 'SI', 'CH'], # #'BY': ['LT', 'UA'], # 'BE': ['FR', 'LU', 'NL'], # #'BA': ['HR', 'ME', 'RS'], # 'BG': ['GR', 'MK', 'RO', 'RS', 'TR'], # #'HR': ['BA', 'HU', 'RS', 'SI'], # 'CZ': ['AT', 'DE', 'PL', 'SK'], # 'DK': ['DE', 'NO', 'SE'], # 'EE': ['FI', 'LV', 'RU'], # #'MK': ['BG', 'GR', 'RS'], # 'FI': ['EE', 'NO', 'RU', 'SE'], # 'FR': ['BE', 'DE', 'IT', 'ES', 'CH'], # 'DE': ['AT', 'CH', 'CZ', 'DK', 'FR', 'NL', 'PL', 'SE'], # 'GR': ['AL', 'BG', 'IT', 'MK', 'TR'], # 'HU': ['AT', 'HR', 'RO', 'RS', 'SK', 'UA'], # 'IT': ['AT', 'FR', 'GR', 'MT', 'SI', 'CH'], # 'LV': ['EE', 'LT', 'RU'], # 'LT': ['BY', 'LV', 'PL', 'RU', 'SE'], # #'MT': ['IT'], # #'MD': ['UA'], # 'ME': ['AL', 'BA', 'RS'], # 'NL': ['BE', 'DE', 'NO'], # 'NO': ['DK', 'FI', 'NL', 'SE'], # 'PL': ['CZ', 'DE', 'LT', 'SK', 'SE', 'UA'], # 'PT': ['ES'], # 'RO': ['BG', 'HU', 'RS', 'UA'], # #'RU': ['EE', 'FI', 'LV', 'LT', 'UA'], # #'RS': ['AL', 'BA', 'BG', 'HR', 'HU', 'MK', 'ME', 'RO'], # 'SK': ['CZ', 'HU', 'PL', 'UA'], # 'SI': ['AT', 'HR', 'IT'], # 'ES': ['FR', 'PT'], # 'SE': ['DK', 'FI', 'DE', 'LT', 'NO', 'PL'], # 'CH': ['AT', 'FR', 'DE', 'IT'] # #'TR': ['BG', 'GR'], # #'UA': ['BY', 'HU', 'MD', 'PL', 'RO', 'RU', 'SK'] # }
de = 'DE' qh_len = None header = None zones = None data_import = None data_export = None sum_import = None sum_export = None data = None output_file_name = 'GenerationAndLoad.csv' countries_dict = {'AT': ['DE'], 'BE': ['FR', 'NL'], 'FR': ['BE', 'DE'], 'DE': ['AT', 'FR', 'NL'], 'NL': ['BE', 'DE']} psrtype_mappings = {'A03': 'Mixed', 'A04': 'Generation', 'A05': 'Load', 'B01': 'Biomass', 'B02': 'Fossil Brown coal/Lignite', 'B03': 'Fossil Coal-derived gas', 'B04': 'Fossil Gas', 'B05': 'Fossil Hard coal', 'B06': 'Fossil Oil', 'B07': 'Fossil Oil shale', 'B08': 'Fossil Peat', 'B09': 'Geothermal', 'B10': 'Hydro Pumped Storage', 'B11': 'Hydro Run-of-river and poundage', 'B12': 'Hydro Water Reservoir', 'B13': 'Marine', 'B14': 'Nuclear', 'B15': 'Other renewable', 'B16': 'Solar', 'B17': 'Waste', 'B18': 'Wind Offshore', 'B19': 'Wind Onshore', 'B20': 'Other', 'B21': 'AC Link', 'B22': 'DC Link', 'B23': 'Substation', 'B24': 'Transformer'}
class StructureObject: def __init__(self, name="", dir=""): self.name = name self.dir = dir def getName(self): return self.name def getDir(self): return self.dir def getType(self): return type(self)
class Structureobject: def __init__(self, name='', dir=''): self.name = name self.dir = dir def get_name(self): return self.name def get_dir(self): return self.dir def get_type(self): return type(self)
def dbl_linear(n): u = [1] def func(nums): global fin new_nums = [] for i in nums: new_nums.append(2 * i + 1) new_nums.append(3 * i + 1) u.extend(new_nums) if len(u) > n*12: fin = list(sorted(set(u))) else: func(new_nums) return fin[n] return func(u) if __name__ == "__main__": print(dbl_linear(10)) print(dbl_linear(500)) print(dbl_linear(60000))
def dbl_linear(n): u = [1] def func(nums): global fin new_nums = [] for i in nums: new_nums.append(2 * i + 1) new_nums.append(3 * i + 1) u.extend(new_nums) if len(u) > n * 12: fin = list(sorted(set(u))) else: func(new_nums) return fin[n] return func(u) if __name__ == '__main__': print(dbl_linear(10)) print(dbl_linear(500)) print(dbl_linear(60000))
def write(filename, content): with open(filename, 'w') as file_object: file_object.write(content) def appendWrite(filename, content): with open(filename, 'a') as file_object: file_object.write(content)
def write(filename, content): with open(filename, 'w') as file_object: file_object.write(content) def append_write(filename, content): with open(filename, 'a') as file_object: file_object.write(content)
# Copyright (c) 2021 Qianyun, Inc. All rights reserved. # smartx SMARTX_INSTANCE_STATE_STOPPED = 'stopped'
smartx_instance_state_stopped = 'stopped'
for i in range (6): for j in range (7): if (i==0 and j %3!=0) or (i==1 and j % 3==0) or (i-j==2) or (i+j==8): print("*",end=" ") else: print(end=" ") print()
for i in range(6): for j in range(7): if i == 0 and j % 3 != 0 or (i == 1 and j % 3 == 0) or i - j == 2 or (i + j == 8): print('*', end=' ') else: print(end=' ') print()
A, B, K = map(int, input().split()) if A >= K: A -= K print(A, B) else: A, B = 0, max(0, B - (K - A)) print(A, B)
(a, b, k) = map(int, input().split()) if A >= K: a -= K print(A, B) else: (a, b) = (0, max(0, B - (K - A))) print(A, B)
class Solution: def plusOne(self, digits: List[int]) -> List[int]: carry = 0 for i in range(len(digits) - 1, -1, -1): if i == len(digits) - 1: digits[i] += 1 else: digits[i] = digits[i] + carry if digits[i] == 10: digits[i] = 0 carry = 1 else: carry = 0 return digits if not carry else [1] + digits
class Solution: def plus_one(self, digits: List[int]) -> List[int]: carry = 0 for i in range(len(digits) - 1, -1, -1): if i == len(digits) - 1: digits[i] += 1 else: digits[i] = digits[i] + carry if digits[i] == 10: digits[i] = 0 carry = 1 else: carry = 0 return digits if not carry else [1] + digits
def close(n, smallest=10, d=10): """ A sequence is near increasing if each element but the last two is smaller than all elements following its subsequent element. That is, element i must be smaller than elements i + 2, i + 3, i + 4, etc. Implement close, which takes a non-negative integer n and returns the largest near increasing sequence of digits within n as an integer. The arguments smallest and d are part of the implementation; you must determine their purpose. The only values you may use are integers and booleans (True and False) (no lists, strings, etc.). Return the longest sequence of near-increasing digits in n. >>> close(123) 123 >>> close(153) 153 >>> close(1523) 153 >>> close(15123) 1123 >>> close(11111111) 11 >>> close(985357) 557 >>> close(14735476) 143576 >>> close(812348567) 1234567 """ if n == 0: return ______ no = close(n//10, smallest, d) if smallest > ______: yes = ______ return ______(yes, no) return ______ # ORIGINAL SKELETON FOLLOWS # def close(n, smallest=10, d=10): # """ A sequence is near increasing if each element but the last two is smaller than all elements # following its subsequent element. That is, element i must be smaller than elements i + 2, i + 3, i + 4, etc. # Implement close, which takes a non-negative integer n and returns the largest near increasing sequence # of digits within n as an integer. The arguments smallest and d are part of the implementation; you must # determine their purpose. The only values you may use are integers and booleans (True and False) (no lists, strings, etc.). # Return the longest sequence of near-increasing digits in n. # >>> close(123) # 123 # >>> close(153) # 153 # >>> close(1523) # 153 # >>> close(15123) # 1123 # >>> close(11111111) # 11 # >>> close(985357) # 557 # >>> close(14735476) # 143576 # >>> close(812348567) # 1234567 # """ # if n == 0: # return ______ # no = close(n//10, smallest, d) # if smallest > ______: # yes = ______ # return ______(yes, no) # return ______
def close(n, smallest=10, d=10): """ A sequence is near increasing if each element but the last two is smaller than all elements following its subsequent element. That is, element i must be smaller than elements i + 2, i + 3, i + 4, etc. Implement close, which takes a non-negative integer n and returns the largest near increasing sequence of digits within n as an integer. The arguments smallest and d are part of the implementation; you must determine their purpose. The only values you may use are integers and booleans (True and False) (no lists, strings, etc.). Return the longest sequence of near-increasing digits in n. >>> close(123) 123 >>> close(153) 153 >>> close(1523) 153 >>> close(15123) 1123 >>> close(11111111) 11 >>> close(985357) 557 >>> close(14735476) 143576 >>> close(812348567) 1234567 """ if n == 0: return ______ no = close(n // 10, smallest, d) if smallest > ______: yes = ______ return ______(yes, no) return ______
#!/usr/bin/env python3 charlimit = 450 def isChan(chan, checkprefix): if not chan: return False elif chan.startswith("#"): return True elif checkprefix and len(chan) >= 2 and not chan[0].isalnum() and chan[1] == "#": return True else: return False
charlimit = 450 def is_chan(chan, checkprefix): if not chan: return False elif chan.startswith('#'): return True elif checkprefix and len(chan) >= 2 and (not chan[0].isalnum()) and (chan[1] == '#'): return True else: return False
__title__ = 'MongoFlask' __description__ = 'A Python Flask library for connecting a MongoDB instance to a Flask application' __url__ = 'https://github.com/juanmanuel96/mongo-flask' __version_info__ = ('0', '1', '3') __version__ = '.'.join(__version_info__) __author__ = 'Juan Vazquez' __py_version__ = 3
__title__ = 'MongoFlask' __description__ = 'A Python Flask library for connecting a MongoDB instance to a Flask application' __url__ = 'https://github.com/juanmanuel96/mongo-flask' __version_info__ = ('0', '1', '3') __version__ = '.'.join(__version_info__) __author__ = 'Juan Vazquez' __py_version__ = 3
JournalFame = {"4" : 1200, "5" : 2400, "6" : 4800, "7" : 9600, "8" : 19200} FameGenerated = {"4": {"2Hweapon":720, "1Hweapon":540, "BigArmor":360, "SmallArmor":180}, "5": {"2Hweapon":2880, "1Hweapon":2160, "BigArmor":1440, "SmallArmor":720}, "6": {"2Hweapon":8640, "1Hweapon":6480, "BigArmor":4320, "SmallArmor":2160}, "7": {"2Hweapon":20640, "1Hweapon":15480, "BigArmor":10320, "SmallArmor":5160}, "8": {"2Hweapon":44640, "1Hweapon":33480, "BigArmor":22320, "SmallArmor":11160}} treeType = {"B" : "Blacksmith", "I" : "Imbuer", "F" : "Fletcher"} itemType = {"BattleAxe" : "1Hweapon", "Halberd" : "2Hweapon", "Great_Axe" : "2Hweapon", "Boots" : "SmallArmor", "Shield" : "SmallArmor", "1H_Crossbow" : "1Hweapon", "2H_Crossbow" : "2Hweapon", "Armor" : "BigArmor", "1H_Hammer" : "1Hweapon", "2H_Hammer" : "2Hweapon", "Helmet" : "SmallArmor", "1H_Mace" : "1Hweapon", "2H_Mace" : "2Hweapon", "1H_Sword" : "1Hweapon", "2H_Sword" : "2Hweapon", "Robe" : "BigArmor", "Sandals" : "SmallArmor", "Cowl" : "SmallArmor", "1H_Damage_Staff" : "1Hweapon", "2H_Damage_Staff" : "2Hweapon", "1H_Holy_Staff" : "1Hweapon", "2H_Holy_Staff" : "2Hweapon", "SpellTome" : "SmallArmor", "Torch" : "SmallArmor", "1H_Nature_Staff" : "1Hweapon", "2H_Nature_staff" : "2Hweapon", "1H_Dagger" : "1Hweapon", "2H_Dagger" : "2Hweapon", "Claws" : "2Hweapon", "Staff" : "2Hweapon", "1H_Spear" : "1Hweapon", "Pike" : "2Hweapon", "Glaive" : "2Hweapon", "Jacket" : "BigArmor", "Shoes" : "SmallArmor", "Hood" : "SmallArmor"} itemTree = {"BattleAxe" : "Blacksmith", "Halberd" : "Blacksmith", "Great_Axe" : "Blacksmith", "Boots" : "Blacksmith", "Shield" : "Blacksmith", "1H_Crossbow" : "Blacksmith", "2H_Crossbow" : "Blacksmith", "Armor" : "Blacksmith", "1H_Hammer" : "Blacksmith", "2H_Hammer" : "Blacksmith", "Helmet" : "Blacksmith", "1H_Mace" : "Blacksmith", "2H_Mace" : "Blacksmith", "1H_Sword" : "Blacksmith", "2H_Sword" : "Blacksmith", "Robe" : "Imbuer", "Sandals" : "Imbuer", "Cowl" : "Imbuer", "1H_Damage_Staff" : "Imbuer", "2H_Damage_Staff" : "Imbuer", "1H_Holy_Staff" : "Imbuer", "2H_Holy_Staff" : "Imbuer", "SpellTome" : "Imbuer", "Torch" : "Fletcher", "1H_Nature_Staff" : "Fletcher", "2H_Nature_staff" : "Fletcher", "1H_Dagger" : "Fletcher", "2H_Dagger" : "Fletcher", "Claws" : "Fletcher", "Staff" : "Fletcher", "1H_Spear" : "Fletcher", "Pike" : "Fletcher", "Glaive" : "Fletcher", "Jacket" : "Fletcher", "Shoes" : "Fletcher", "Hood" : "Fletcher"} clothCost = {"BattleAxe" : 0, "Halberd" : 0, "Great_Axe" : 0, "Boots" : 0, "Shield" : 0, "1H_Crossbow" : 0, "2H_Crossbow" : 0, "Armor" : 0, "1H_Hammer" : 0, "2H_Hammer" : 12, "Helmet" : 0, "1H_Mace" : 8, "2H_Mace" : 12, "1H_Sword" : 0, "2H_Sword" : 0, "Robe" : 16, "Sandals" : 8, "Cowl" : 8, "1H_Damage_Staff" : 0, "2H_Damage_Staff" : 0, "1H_Holy_Staff" : 8, "2H_Holy_Staff" : 12, "SpellTome" : 4, "Torch" : 4, "1H_Nature_Staff" : 8, "2H_Nature_staff" : 12, "1H_Dagger" : 0, "2H_Dagger" : 0, "Claws" : 0, "Staff" : 0, "1H_Spear" : 0, "Pike" : 0, "Glaive" : 0, "Jacket" : 0, "Shoes" : 0, "Hood" : 0} metalCost = {"BattleAxe" : 16, "Halberd" : 12, "Great_Axe" : 20, "Boots" : 8, "Shield" : 4, "1H_Crossbow" : 8, "2H_Crossbow" : 12, "Armor" : 16, "1H_Hammer" : 24, "2H_Hammer" : 20, "Helmet" : 8, "1H_Mace" : 16, "2H_Mace" : 20, "1H_Sword" : 16, "2H_Sword" : 20, "Robe" : 0, "Sandals" : 0, "Cowl" : 0, "1H_Damage_Staff" : 8, "2H_Damage_Staff" : 12, "1H_Holy_Staff" : 0, "2H_Holy_Staff" : 0, "SpellTome" : 0, "Torch" : 0, "1H_Nature_Staff" : 0, "2H_Nature_staff" : 0, "1H_Dagger" : 12, "2H_Dagger" : 16, "Claws" : 12, "Staff" : 12, "1H_Spear" : 8, "Pike" : 12, "Glaive" : 20, "Jacket" : 0, "Shoes" : 0, "Hood" : 0} woodCost = {"BattleAxe" : 8, "Halberd" : 20, "Great_Axe" : 12, "Boots" : 0, "Shield" : 4, "1H_Crossbow" : 16, "2H_Crossbow" : 20, "Armor" : 0, "1H_Hammer" : 0, "2H_Hammer" : 0, "Helmet" : 0, "1H_Mace" : 0, "2H_Mace" : 0, "1H_Sword" : 0, "2H_Sword" : 0, "Robe" : 0, "Sandals" : 0, "Cowl" : 0, "1H_Damage_Staff" : 16, "2H_Damage_Staff" : 20, "1H_Holy_Staff" : 16, "2H_Holy_Staff" : 20, "SpellTome" : 0, "Torch" : 4, "1H_Nature_Staff" : 16, "2H_Nature_staff" : 20, "1H_Dagger" : 0, "2H_Dagger" : 0, "Claws" : 0, "Staff" : 0, "1H_Spear" : 16, "Pike" : 20, "Glaive" : 12, "Jacket" : 0, "Shoes" : 0, "Hood" : 0} leatherCost = {"BattleAxe" : 0, "Halberd" : 0, "Great_Axe" : 0, "Boots" : 0, "Shield" : 0, "1H_Crossbow" : 0, "2H_Crossbow" : 0, "Armor" : 0, "1H_Hammer" : 0, "2H_Hammer" : 0, "Helmet" : 0, "1H_Mace" : 0, "2H_Mace" : 0, "1H_Sword" : 8, "2H_Sword" : 12, "Robe" : 0, "Sandals" : 0, "Cowl" : 0, "1H_Damage_Staff" : 0, "2H_Damage_Staff" : 0, "1H_Holy_Staff" : 0, "2H_Holy_Staff" : 0, "SpellTome" : 4, "Torch" : 0, "1H_Nature_Staff" : 0, "2H_Nature_staff" : 0, "1H_Dagger" : 12, "2H_Dagger" : 16, "Claws" : 20, "Staff" : 20, "1H_Spear" : 0, "Pike" : 0, "Glaive" : 0, "Jacket" : 16, "Shoes" : 8, "Hood" : 8} craftedItems = {"BattleAxe" : 0, "Halberd" : 0, "Great_Axe" : 0, "Boots" : 0, "Shield" : 0, "1H_Crossbow" : 0, "2H_Crossbow" : 0, "Armor" : 0, "1H_Hammer" : 0, "2H_Hammer" : 0, "Helmet" : 0, "1H_Mace" : 0, "2H_Mace" : 0, "1H_Sword" : 0, "2H_Sword" : 0, "Robe" : 0, "Sandals" : 0, "Cowl" : 0, "1H_Damage_Staff" : 0, "2H_Damage_Staff" : 0, "1H_Holy_Staff" : 0, "2H_Holy_Staff" : 0, "SpellTome" : 0, "Torch" : 0, "1H_Nature_Staff" : 0, "2H_Nature_staff" : 0, "1H_Dagger" : 0, "2H_Dagger" : 0, "Claws" : 0, "Staff" : 0, "1H_Spear" : 0, "Pike" : 0, "Glaive" : 0, "Jacket" : 0, "Shoes" : 0, "Hood" : 0} RRR = {"N" : 0.248, "Y" : 0.479} focusMessage = {"N" : "Focus was not used", "Y" : "Focus was used"}
journal_fame = {'4': 1200, '5': 2400, '6': 4800, '7': 9600, '8': 19200} fame_generated = {'4': {'2Hweapon': 720, '1Hweapon': 540, 'BigArmor': 360, 'SmallArmor': 180}, '5': {'2Hweapon': 2880, '1Hweapon': 2160, 'BigArmor': 1440, 'SmallArmor': 720}, '6': {'2Hweapon': 8640, '1Hweapon': 6480, 'BigArmor': 4320, 'SmallArmor': 2160}, '7': {'2Hweapon': 20640, '1Hweapon': 15480, 'BigArmor': 10320, 'SmallArmor': 5160}, '8': {'2Hweapon': 44640, '1Hweapon': 33480, 'BigArmor': 22320, 'SmallArmor': 11160}} tree_type = {'B': 'Blacksmith', 'I': 'Imbuer', 'F': 'Fletcher'} item_type = {'BattleAxe': '1Hweapon', 'Halberd': '2Hweapon', 'Great_Axe': '2Hweapon', 'Boots': 'SmallArmor', 'Shield': 'SmallArmor', '1H_Crossbow': '1Hweapon', '2H_Crossbow': '2Hweapon', 'Armor': 'BigArmor', '1H_Hammer': '1Hweapon', '2H_Hammer': '2Hweapon', 'Helmet': 'SmallArmor', '1H_Mace': '1Hweapon', '2H_Mace': '2Hweapon', '1H_Sword': '1Hweapon', '2H_Sword': '2Hweapon', 'Robe': 'BigArmor', 'Sandals': 'SmallArmor', 'Cowl': 'SmallArmor', '1H_Damage_Staff': '1Hweapon', '2H_Damage_Staff': '2Hweapon', '1H_Holy_Staff': '1Hweapon', '2H_Holy_Staff': '2Hweapon', 'SpellTome': 'SmallArmor', 'Torch': 'SmallArmor', '1H_Nature_Staff': '1Hweapon', '2H_Nature_staff': '2Hweapon', '1H_Dagger': '1Hweapon', '2H_Dagger': '2Hweapon', 'Claws': '2Hweapon', 'Staff': '2Hweapon', '1H_Spear': '1Hweapon', 'Pike': '2Hweapon', 'Glaive': '2Hweapon', 'Jacket': 'BigArmor', 'Shoes': 'SmallArmor', 'Hood': 'SmallArmor'} item_tree = {'BattleAxe': 'Blacksmith', 'Halberd': 'Blacksmith', 'Great_Axe': 'Blacksmith', 'Boots': 'Blacksmith', 'Shield': 'Blacksmith', '1H_Crossbow': 'Blacksmith', '2H_Crossbow': 'Blacksmith', 'Armor': 'Blacksmith', '1H_Hammer': 'Blacksmith', '2H_Hammer': 'Blacksmith', 'Helmet': 'Blacksmith', '1H_Mace': 'Blacksmith', '2H_Mace': 'Blacksmith', '1H_Sword': 'Blacksmith', '2H_Sword': 'Blacksmith', 'Robe': 'Imbuer', 'Sandals': 'Imbuer', 'Cowl': 'Imbuer', '1H_Damage_Staff': 'Imbuer', '2H_Damage_Staff': 'Imbuer', '1H_Holy_Staff': 'Imbuer', '2H_Holy_Staff': 'Imbuer', 'SpellTome': 'Imbuer', 'Torch': 'Fletcher', '1H_Nature_Staff': 'Fletcher', '2H_Nature_staff': 'Fletcher', '1H_Dagger': 'Fletcher', '2H_Dagger': 'Fletcher', 'Claws': 'Fletcher', 'Staff': 'Fletcher', '1H_Spear': 'Fletcher', 'Pike': 'Fletcher', 'Glaive': 'Fletcher', 'Jacket': 'Fletcher', 'Shoes': 'Fletcher', 'Hood': 'Fletcher'} cloth_cost = {'BattleAxe': 0, 'Halberd': 0, 'Great_Axe': 0, 'Boots': 0, 'Shield': 0, '1H_Crossbow': 0, '2H_Crossbow': 0, 'Armor': 0, '1H_Hammer': 0, '2H_Hammer': 12, 'Helmet': 0, '1H_Mace': 8, '2H_Mace': 12, '1H_Sword': 0, '2H_Sword': 0, 'Robe': 16, 'Sandals': 8, 'Cowl': 8, '1H_Damage_Staff': 0, '2H_Damage_Staff': 0, '1H_Holy_Staff': 8, '2H_Holy_Staff': 12, 'SpellTome': 4, 'Torch': 4, '1H_Nature_Staff': 8, '2H_Nature_staff': 12, '1H_Dagger': 0, '2H_Dagger': 0, 'Claws': 0, 'Staff': 0, '1H_Spear': 0, 'Pike': 0, 'Glaive': 0, 'Jacket': 0, 'Shoes': 0, 'Hood': 0} metal_cost = {'BattleAxe': 16, 'Halberd': 12, 'Great_Axe': 20, 'Boots': 8, 'Shield': 4, '1H_Crossbow': 8, '2H_Crossbow': 12, 'Armor': 16, '1H_Hammer': 24, '2H_Hammer': 20, 'Helmet': 8, '1H_Mace': 16, '2H_Mace': 20, '1H_Sword': 16, '2H_Sword': 20, 'Robe': 0, 'Sandals': 0, 'Cowl': 0, '1H_Damage_Staff': 8, '2H_Damage_Staff': 12, '1H_Holy_Staff': 0, '2H_Holy_Staff': 0, 'SpellTome': 0, 'Torch': 0, '1H_Nature_Staff': 0, '2H_Nature_staff': 0, '1H_Dagger': 12, '2H_Dagger': 16, 'Claws': 12, 'Staff': 12, '1H_Spear': 8, 'Pike': 12, 'Glaive': 20, 'Jacket': 0, 'Shoes': 0, 'Hood': 0} wood_cost = {'BattleAxe': 8, 'Halberd': 20, 'Great_Axe': 12, 'Boots': 0, 'Shield': 4, '1H_Crossbow': 16, '2H_Crossbow': 20, 'Armor': 0, '1H_Hammer': 0, '2H_Hammer': 0, 'Helmet': 0, '1H_Mace': 0, '2H_Mace': 0, '1H_Sword': 0, '2H_Sword': 0, 'Robe': 0, 'Sandals': 0, 'Cowl': 0, '1H_Damage_Staff': 16, '2H_Damage_Staff': 20, '1H_Holy_Staff': 16, '2H_Holy_Staff': 20, 'SpellTome': 0, 'Torch': 4, '1H_Nature_Staff': 16, '2H_Nature_staff': 20, '1H_Dagger': 0, '2H_Dagger': 0, 'Claws': 0, 'Staff': 0, '1H_Spear': 16, 'Pike': 20, 'Glaive': 12, 'Jacket': 0, 'Shoes': 0, 'Hood': 0} leather_cost = {'BattleAxe': 0, 'Halberd': 0, 'Great_Axe': 0, 'Boots': 0, 'Shield': 0, '1H_Crossbow': 0, '2H_Crossbow': 0, 'Armor': 0, '1H_Hammer': 0, '2H_Hammer': 0, 'Helmet': 0, '1H_Mace': 0, '2H_Mace': 0, '1H_Sword': 8, '2H_Sword': 12, 'Robe': 0, 'Sandals': 0, 'Cowl': 0, '1H_Damage_Staff': 0, '2H_Damage_Staff': 0, '1H_Holy_Staff': 0, '2H_Holy_Staff': 0, 'SpellTome': 4, 'Torch': 0, '1H_Nature_Staff': 0, '2H_Nature_staff': 0, '1H_Dagger': 12, '2H_Dagger': 16, 'Claws': 20, 'Staff': 20, '1H_Spear': 0, 'Pike': 0, 'Glaive': 0, 'Jacket': 16, 'Shoes': 8, 'Hood': 8} crafted_items = {'BattleAxe': 0, 'Halberd': 0, 'Great_Axe': 0, 'Boots': 0, 'Shield': 0, '1H_Crossbow': 0, '2H_Crossbow': 0, 'Armor': 0, '1H_Hammer': 0, '2H_Hammer': 0, 'Helmet': 0, '1H_Mace': 0, '2H_Mace': 0, '1H_Sword': 0, '2H_Sword': 0, 'Robe': 0, 'Sandals': 0, 'Cowl': 0, '1H_Damage_Staff': 0, '2H_Damage_Staff': 0, '1H_Holy_Staff': 0, '2H_Holy_Staff': 0, 'SpellTome': 0, 'Torch': 0, '1H_Nature_Staff': 0, '2H_Nature_staff': 0, '1H_Dagger': 0, '2H_Dagger': 0, 'Claws': 0, 'Staff': 0, '1H_Spear': 0, 'Pike': 0, 'Glaive': 0, 'Jacket': 0, 'Shoes': 0, 'Hood': 0} rrr = {'N': 0.248, 'Y': 0.479} focus_message = {'N': 'Focus was not used', 'Y': 'Focus was used'}
# -*- coding: utf-8 -*- # Copyright 2017 Vector Creations Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module implements the TCP replication protocol used by synapse to communicate between the master process and its workers (when they're enabled). Further details can be found in docs/tcp_replication.rst Structure of the module: * handler.py - the classes used to handle sending/receiving commands to replication * command.py - the definitions of all the valid commands * protocol.py - the TCP protocol classes * resource.py - handles streaming stream updates to replications * streams/ - the definitons of all the valid streams The general interaction of the classes are: +---------------------+ | ReplicationStreamer | +---------------------+ | v +---------------------------+ +----------------------+ | ReplicationCommandHandler |---->|ReplicationDataHandler| +---------------------------+ +----------------------+ | ^ v | +-------------+ | Protocols | | (TCP/redis) | +-------------+ Where the ReplicationDataHandler (or subclasses) handles incoming stream updates. """
"""This module implements the TCP replication protocol used by synapse to communicate between the master process and its workers (when they're enabled). Further details can be found in docs/tcp_replication.rst Structure of the module: * handler.py - the classes used to handle sending/receiving commands to replication * command.py - the definitions of all the valid commands * protocol.py - the TCP protocol classes * resource.py - handles streaming stream updates to replications * streams/ - the definitons of all the valid streams The general interaction of the classes are: +---------------------+ | ReplicationStreamer | +---------------------+ | v +---------------------------+ +----------------------+ | ReplicationCommandHandler |---->|ReplicationDataHandler| +---------------------------+ +----------------------+ | ^ v | +-------------+ | Protocols | | (TCP/redis) | +-------------+ Where the ReplicationDataHandler (or subclasses) handles incoming stream updates. """
class ExtraException(Exception): def __init__(self, message: str = None, **kwargs): if message: self.message = message self.extra = kwargs super().__init__(message, kwargs) def __str__(self) -> str: return self.message class PackageNotFoundError(ExtraException, LookupError): message = 'package not found' class InvalidFieldsError(ExtraException, ValueError): message = 'invalid fields'
class Extraexception(Exception): def __init__(self, message: str=None, **kwargs): if message: self.message = message self.extra = kwargs super().__init__(message, kwargs) def __str__(self) -> str: return self.message class Packagenotfounderror(ExtraException, LookupError): message = 'package not found' class Invalidfieldserror(ExtraException, ValueError): message = 'invalid fields'
class Solution: # @param A : list of integers # @return an integer ''' Facebook Codelab No hints or solutions needed N light bulbs are connected by a wire. Each bulb has a switch associated with it, however due to faulty wiring, a switch also changes the state of all the bulbs to the right of current bulb. Given an initial state of all bulbs, find the minimum number of switches you have to press to turn on all the bulbs. You can press the same switch multiple times. Input : [0 1 0 1] Return : 4 ''' def bulbs(self, A): if A is None: return None if len(A) == 0: return 0 cnt = 0 rightFlipped = False for i, v in enumerate(A): if v ^ rightFlipped: continue else: A[i] = 1 cnt += 1 rightFlipped = not rightFlipped return cnt if __name__ == "__main__": A = [3, 0, 1, 0]
class Solution: """ Facebook Codelab No hints or solutions needed N light bulbs are connected by a wire. Each bulb has a switch associated with it, however due to faulty wiring, a switch also changes the state of all the bulbs to the right of current bulb. Given an initial state of all bulbs, find the minimum number of switches you have to press to turn on all the bulbs. You can press the same switch multiple times. Input : [0 1 0 1] Return : 4 """ def bulbs(self, A): if A is None: return None if len(A) == 0: return 0 cnt = 0 right_flipped = False for (i, v) in enumerate(A): if v ^ rightFlipped: continue else: A[i] = 1 cnt += 1 right_flipped = not rightFlipped return cnt if __name__ == '__main__': a = [3, 0, 1, 0]
def main(): recursion() def recursion(): count = [0] num = b(5, 2, count) print(num) # print(sum(count)) print(count[0]) def b(n, k, count): # count.append(1) ## count the number of stack frame count[0] += 1 if k == 0 or k == n: print('Base Case!') return 2 else: return b(n-1, k-1, count) + b(n-1, k, count) if __name__ == '__main__': main()
def main(): recursion() def recursion(): count = [0] num = b(5, 2, count) print(num) print(count[0]) def b(n, k, count): count[0] += 1 if k == 0 or k == n: print('Base Case!') return 2 else: return b(n - 1, k - 1, count) + b(n - 1, k, count) if __name__ == '__main__': main()
# # @lc app=leetcode.cn id=547 lang=python3 # # [547] friend-circles # None # @lc code=end
None
""" Memoization decorator. """ def memoize(f): memos = {} def memoized(*args): if args not in memos: memos[args] = f(*args) return memos[args] return memoized
""" Memoization decorator. """ def memoize(f): memos = {} def memoized(*args): if args not in memos: memos[args] = f(*args) return memos[args] return memoized
class CaseInsensitiveKey( object ): def __init__( self, key ): self.key = key def __hash__( self ): return hash( self.key.lower() ) def __eq__( self, other ): return self.key.lower() == other.key.lower() def __str__( self ): return self.key GROK_PATTERN_CONF = dict() # Basic String GROK_PATTERN_CONF[ CaseInsensitiveKey( 'String' ) ] = 'DATA' # DATA or NOTSPACE ? GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Quote String' ) ] = 'QS' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'UUID' ) ] = 'UUID' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Log Level' ) ] = 'LOGLEVEL' # Networking GROK_PATTERN_CONF[ CaseInsensitiveKey( 'IP' ) ] = 'IP' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Host/Domain' ) ] = 'HOST' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Host:Port' ) ] = 'HOSTPORT' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'IP or Host/Domain' ) ] = 'IPORHOST' # Path GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Full URL' ) ] = 'URI' # http://www.google.com?search=mj GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Url Path' ) ] = 'URIPATHPARAM' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Unix Path' ) ] = 'UNIXPATH' # Json # GROK_PATTERN_CONF[ CaseInsensitiveKey( 'json' ) ] = # Number GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Number' ) ] = 'NUMBER' # Integer/Long OR Float/Double GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Integer/Long' ) ] = 'INT' # GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Float/Double' ) ] = # Date GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Year' ) ] = 'YEAR' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Month' ) ] = 'MONTH' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Month Number' ) ] = 'MONTHNUM' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Day' ) ] = 'DAY' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Hour' ) ] = 'HOUR' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Minute' ) ] = 'MINUTE' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'Second' ) ] = 'SECOND' # GROK_PATTERN_CONF[ CaseInsensitiveKey( ) ] = 'TZ' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'ISO8601' ) ] = 'TIMESTAMP_ISO8601' GROK_PATTERN_CONF[ CaseInsensitiveKey( 'HTTPDATE' ) ] = 'HTTPDATE' # GROK_PATTERN_CONF[ CaseInsensitiveKey( 'custom' ) ] = # custom # TODO # GROK_PATTERN_CONF = { # # Basic String # 'String': 'DATA', # DATA or NOTSPACE ? # 'Quote String': 'QS', # 'UUID': 'UUID', # 'Log Level': 'LOGLEVEL', # # Networking # 'IP': 'IP', # 'Host/Domain': 'HOST', # 'Host:Port': 'HOSTPORT', # 'IP or Host/Domain': 'IPORHOST', # # Path # 'Full URL': 'URI', # http://www.google.com?search=mj # 'Url Path': 'URIPATHPARAM', # 'Unix Path': 'UNIXPATH', # # Json # 'json': '', # TODO json regular expression # # Number # 'Number': 'NUMBER', # Integer/Long OR Float/Double # 'Integer/Long': 'INT', # 'Float/Double': '', # # Date # 'Year': 'YEAR', # 'Month': 'MONTH', # 'Month Number': 'MONTHNUM', # 'Day': 'DAY', # 'Hour': 'HOUR', # 'Minute': 'MINUTE', # 'Second': 'SECOND', # '': 'TZ', # 'ISO8601': 'TIMESTAMP_ISO8601', # 'HTTPDATE': 'HTTPDATE', # 'custom': '', # custom # TODO # }
class Caseinsensitivekey(object): def __init__(self, key): self.key = key def __hash__(self): return hash(self.key.lower()) def __eq__(self, other): return self.key.lower() == other.key.lower() def __str__(self): return self.key grok_pattern_conf = dict() GROK_PATTERN_CONF[case_insensitive_key('String')] = 'DATA' GROK_PATTERN_CONF[case_insensitive_key('Quote String')] = 'QS' GROK_PATTERN_CONF[case_insensitive_key('UUID')] = 'UUID' GROK_PATTERN_CONF[case_insensitive_key('Log Level')] = 'LOGLEVEL' GROK_PATTERN_CONF[case_insensitive_key('IP')] = 'IP' GROK_PATTERN_CONF[case_insensitive_key('Host/Domain')] = 'HOST' GROK_PATTERN_CONF[case_insensitive_key('Host:Port')] = 'HOSTPORT' GROK_PATTERN_CONF[case_insensitive_key('IP or Host/Domain')] = 'IPORHOST' GROK_PATTERN_CONF[case_insensitive_key('Full URL')] = 'URI' GROK_PATTERN_CONF[case_insensitive_key('Url Path')] = 'URIPATHPARAM' GROK_PATTERN_CONF[case_insensitive_key('Unix Path')] = 'UNIXPATH' GROK_PATTERN_CONF[case_insensitive_key('Number')] = 'NUMBER' GROK_PATTERN_CONF[case_insensitive_key('Integer/Long')] = 'INT' GROK_PATTERN_CONF[case_insensitive_key('Year')] = 'YEAR' GROK_PATTERN_CONF[case_insensitive_key('Month')] = 'MONTH' GROK_PATTERN_CONF[case_insensitive_key('Month Number')] = 'MONTHNUM' GROK_PATTERN_CONF[case_insensitive_key('Day')] = 'DAY' GROK_PATTERN_CONF[case_insensitive_key('Hour')] = 'HOUR' GROK_PATTERN_CONF[case_insensitive_key('Minute')] = 'MINUTE' GROK_PATTERN_CONF[case_insensitive_key('Second')] = 'SECOND' GROK_PATTERN_CONF[case_insensitive_key('ISO8601')] = 'TIMESTAMP_ISO8601' GROK_PATTERN_CONF[case_insensitive_key('HTTPDATE')] = 'HTTPDATE'
class ValueResolver: def resolve(self, value): if value.kind() == "id": string = self._resolve_id_value(value) elif value.kind() == "node": string = f"Node({self.resolve(value.value)}.value)" elif value.kind() == "arc": string = f"Arc({self.resolve(value.source)}, {self.resolve(value.target)}, {self.resolve(value.weight)}.value, \"{value.type}\")" elif value.kind() == "graph": string = self._resolve_graph_value(value) elif value.kind() == "num": string = f"Num({value.value})" elif value.kind() == "logic": string = f"Logic({value.value})" elif value.kind() == "nope": string = "Nope()" elif value.kind() == "fun_call": string = self._resolve_fun_call(value.fun_call) elif value.kind() == "binary_operation.summation": string = self._resolve_summation(value) elif value.kind() == "binary_operation.subtraction": string = self._resolve_subtraction(value) elif value.kind() == "binary_operation.multiplication": string = self._resolve_multiplication(value) elif value.kind() == "binary_operation.division": string = self._resolve_division(value) elif value.kind() == "binary_operation.and": string = self._resolve_and(value) elif value.kind() == "binary_operation.or": string = self._resolve_or(value) elif value.kind() == "unary_operation.not": string = self._resolve_not(value) elif value.kind() == "binary_operation.equal": string = self._resolve_equal(value) elif value.kind() == "binary_operation.not_equal": string = self._resolve_not_equal(value) elif value.kind() == "binary_operation.greater_or_equal": string = self._resolve_greater_or_equal(value) elif value.kind() == "binary_operation.less_or_equal": string = self._resolve_less_or_equal(value) elif value.kind() == "binary_operation.greater": string = self._resolve_greater(value) elif value.kind() == "binary_operation.less": string = self._resolve_less(value) string += f".cast(\"{value.cast_type}\")" if value.cast_type else "" string += f"[{int(value.index.value)}]" if value.index else "" return string def _resolve_fun_call(self, statement): return f"{statement.name}({self._resolve_array(statement.args)})" def _resolve_summation(self, statement): return f"{self.resolve(statement.left)}.summation({self.resolve(statement.right)})" def _resolve_subtraction(self, statement): return f"{self.resolve(statement.left)}.subtraction({self.resolve(statement.right)})" def _resolve_multiplication(self, statement): return f"{self.resolve(statement.left)}.multiplication({self.resolve(statement.right)})" def _resolve_division(self, statement): return f"{self.resolve(statement.left)}.division({self.resolve(statement.right)})" def _resolve_and(self, statement): return f"{self.resolve(statement.left)}.and_({self.resolve(statement.right)})" def _resolve_or(self, statement): return f"{self.resolve(statement.left)}.or_({self.resolve(statement.right)})" def _resolve_not(self, statement): return f"{self.resolve(statement.target)}.not_()" def _resolve_equal(self, statement): return f"{self.resolve(statement.left)}.equal({self.resolve(statement.right)})" def _resolve_not_equal(self, statement): return f"{self.resolve(statement.left)}.not_equal({self.resolve(statement.right)})" def _resolve_greater_or_equal(self, statement): return f"{self.resolve(statement.left)}.greater_or_equal({self.resolve(statement.right)})" def _resolve_less_or_equal(self, statement): return f"{self.resolve(statement.left)}.less_or_equal({self.resolve(statement.right)})" def _resolve_greater(self, statement): return f"{self.resolve(statement.left)}.greater({self.resolve(statement.right)})" def _resolve_less(self, statement): return f"{self.resolve(statement.left)}.less({self.resolve(statement.right)})" def _resolve_id_value(self, statement): if statement.index is not None: return statement.name + f"[int({self.resolve(statement.index)}.value)]" else: return statement.name def _resolve_graph_value(self, value): return f"Graph([{self._resolve_array(value.elements)}])" def _resolve_array(self, values): values_ = [self.resolve(value) for value in values] return ", ".join(values_)
class Valueresolver: def resolve(self, value): if value.kind() == 'id': string = self._resolve_id_value(value) elif value.kind() == 'node': string = f'Node({self.resolve(value.value)}.value)' elif value.kind() == 'arc': string = f'Arc({self.resolve(value.source)}, {self.resolve(value.target)}, {self.resolve(value.weight)}.value, "{value.type}")' elif value.kind() == 'graph': string = self._resolve_graph_value(value) elif value.kind() == 'num': string = f'Num({value.value})' elif value.kind() == 'logic': string = f'Logic({value.value})' elif value.kind() == 'nope': string = 'Nope()' elif value.kind() == 'fun_call': string = self._resolve_fun_call(value.fun_call) elif value.kind() == 'binary_operation.summation': string = self._resolve_summation(value) elif value.kind() == 'binary_operation.subtraction': string = self._resolve_subtraction(value) elif value.kind() == 'binary_operation.multiplication': string = self._resolve_multiplication(value) elif value.kind() == 'binary_operation.division': string = self._resolve_division(value) elif value.kind() == 'binary_operation.and': string = self._resolve_and(value) elif value.kind() == 'binary_operation.or': string = self._resolve_or(value) elif value.kind() == 'unary_operation.not': string = self._resolve_not(value) elif value.kind() == 'binary_operation.equal': string = self._resolve_equal(value) elif value.kind() == 'binary_operation.not_equal': string = self._resolve_not_equal(value) elif value.kind() == 'binary_operation.greater_or_equal': string = self._resolve_greater_or_equal(value) elif value.kind() == 'binary_operation.less_or_equal': string = self._resolve_less_or_equal(value) elif value.kind() == 'binary_operation.greater': string = self._resolve_greater(value) elif value.kind() == 'binary_operation.less': string = self._resolve_less(value) string += f'.cast("{value.cast_type}")' if value.cast_type else '' string += f'[{int(value.index.value)}]' if value.index else '' return string def _resolve_fun_call(self, statement): return f'{statement.name}({self._resolve_array(statement.args)})' def _resolve_summation(self, statement): return f'{self.resolve(statement.left)}.summation({self.resolve(statement.right)})' def _resolve_subtraction(self, statement): return f'{self.resolve(statement.left)}.subtraction({self.resolve(statement.right)})' def _resolve_multiplication(self, statement): return f'{self.resolve(statement.left)}.multiplication({self.resolve(statement.right)})' def _resolve_division(self, statement): return f'{self.resolve(statement.left)}.division({self.resolve(statement.right)})' def _resolve_and(self, statement): return f'{self.resolve(statement.left)}.and_({self.resolve(statement.right)})' def _resolve_or(self, statement): return f'{self.resolve(statement.left)}.or_({self.resolve(statement.right)})' def _resolve_not(self, statement): return f'{self.resolve(statement.target)}.not_()' def _resolve_equal(self, statement): return f'{self.resolve(statement.left)}.equal({self.resolve(statement.right)})' def _resolve_not_equal(self, statement): return f'{self.resolve(statement.left)}.not_equal({self.resolve(statement.right)})' def _resolve_greater_or_equal(self, statement): return f'{self.resolve(statement.left)}.greater_or_equal({self.resolve(statement.right)})' def _resolve_less_or_equal(self, statement): return f'{self.resolve(statement.left)}.less_or_equal({self.resolve(statement.right)})' def _resolve_greater(self, statement): return f'{self.resolve(statement.left)}.greater({self.resolve(statement.right)})' def _resolve_less(self, statement): return f'{self.resolve(statement.left)}.less({self.resolve(statement.right)})' def _resolve_id_value(self, statement): if statement.index is not None: return statement.name + f'[int({self.resolve(statement.index)}.value)]' else: return statement.name def _resolve_graph_value(self, value): return f'Graph([{self._resolve_array(value.elements)}])' def _resolve_array(self, values): values_ = [self.resolve(value) for value in values] return ', '.join(values_)
""" Module docstring """ def _write_file_impl(ctx): f = ctx.actions.declare_file("out.txt") ctx.actions.write(f, "contents") write_file = rule( attrs = {}, implementation = _write_file_impl, )
""" Module docstring """ def _write_file_impl(ctx): f = ctx.actions.declare_file('out.txt') ctx.actions.write(f, 'contents') write_file = rule(attrs={}, implementation=_write_file_impl)
######################## ### Feature combine ### ######################## #Combine Units: df_tcad = merge_extraction(dfs=[df_tcad,df_units,gdf_16],val=['dba','type1','units','low','high','units_from_type','est_from_type','shape_area']) replace_nan(df_tcad,['units_x','units_y','low','high'],val=0,reverse=True) #Don't fill in any unit estimates from improvement data yet (will do that in handle missing) (the low high estimate ranges) df_tcad.units_y = np.where(df_tcad.est_from_type, np.nan, df_tcad.units_y) #Fill in missing in known data from 2016 with type data df_tcad.units_x.fillna(df_tcad.units_y,inplace=True) #Known units column df_tcad['known']= np.where(df_tcad.units_x.notnull(), True,False) #Where type data is larger then 2016 take the new value #df_tcad.units_x = np.where((df_tcad.units_x < df_tcad.units_y), df_tcad.units_y, df_tcad.units_x) #Get rid of type values del df_tcad['units_y'] #Combine Locations: df_tcad = merge_extraction(dfs=[df_tcad,gdf,df_locs],on='prop_id',val=['land_use','X','Y','geometry','shape_area']) df_tcad.X_y.fillna(df_tcad.X_x,inplace=True) df_tcad.Y_y.fillna(df_tcad.Y_x,inplace=True) df_tcad.Y_x.fillna(df_tcad.Y_y,inplace=True) df_tcad.X_x.fillna(df_tcad.X_y,inplace=True) del df_tcad['X_y'] del df_tcad['Y_y'] df_tcad = df_tcad.merge(df_con,left_on='geo_id',right_on='TCAD ID',how='left') df_tcad.Y_x.fillna(df_tcad.Latitude,inplace=True) df_tcad.X_x.fillna(df_tcad.Longitude,inplace=True) del df_tcad['Latitude'] del df_tcad['Longitude'] #Known location column df_tcad['known_loc']= np.where(df_tcad.X_x.notnull(), True,False) #Residential criteria res = ['A1','A2','A3','A4','A5','B1','B2','B3','B4','E2','M1','XA'] res_type = ['ALT LIVING CTR', 'DORMITORY HIRISE', 'DORMITORY', 'APARTMENT 5-25', 'LUXURY HI-RISE APTS 100+', 'APARTMENT 100+', 'GARAGE APARTMENT', 'Accessory Dwelling Unit' 'FOURPLEX', 'MOHO DOUBLE PP'] #got rid of detail only res_code = ['100','113','150','160','210','220','230','240','330'] df_tcad['res'] = df_tcad.code2.isin(res) df_tcad['res'] = (df_tcad.res | (df_tcad.hs=='T') | df_tcad.type1.isin(res_type) | (df_tcad.type1.str.contains('CONDO',na=False)) | (df_tcad.type1.str.contains('DWELLING',na=False)) | (df_tcad.desc.str.contains('APARTMENT',na=False)) | (df_tcad.desc.str.contains('APARTMENT',na=False)) | df_tcad.type1.str.contains('APARTMENT',na=False) | df_tcad.type1.str.contains('APT',na=False) | df_tcad.type1.str.contains('DORM',na=False) | (df_tcad.type1.str.contains('MOHO',na=False))) #df_tcad['res'] = (df_tcad.res | df_tcad.type1.isin(res_type) | (df_tcad.type1.str.contains('CONDO',na=False)) | (df_tcad.type1.str.contains('DWELLING',na=False)) | ((df_tcad.desc.str.contains('CONDO',na=False))&(~df_tcad.desc.str.contains('CONDO',na=False))) | (df_tcad.desc.str.contains('APARTMENT',na=False)))) #apts = df_tcad.loc[ df_tcad.type1.str.contains('APARTMENT',na=False) | df_tcad.type1.str.contains('APT',na=False) | df_tcad.type1.str.contains('DORM',na=False)| (df_tcad.type1.str.contains('CONDO',na=False))]
df_tcad = merge_extraction(dfs=[df_tcad, df_units, gdf_16], val=['dba', 'type1', 'units', 'low', 'high', 'units_from_type', 'est_from_type', 'shape_area']) replace_nan(df_tcad, ['units_x', 'units_y', 'low', 'high'], val=0, reverse=True) df_tcad.units_y = np.where(df_tcad.est_from_type, np.nan, df_tcad.units_y) df_tcad.units_x.fillna(df_tcad.units_y, inplace=True) df_tcad['known'] = np.where(df_tcad.units_x.notnull(), True, False) del df_tcad['units_y'] df_tcad = merge_extraction(dfs=[df_tcad, gdf, df_locs], on='prop_id', val=['land_use', 'X', 'Y', 'geometry', 'shape_area']) df_tcad.X_y.fillna(df_tcad.X_x, inplace=True) df_tcad.Y_y.fillna(df_tcad.Y_x, inplace=True) df_tcad.Y_x.fillna(df_tcad.Y_y, inplace=True) df_tcad.X_x.fillna(df_tcad.X_y, inplace=True) del df_tcad['X_y'] del df_tcad['Y_y'] df_tcad = df_tcad.merge(df_con, left_on='geo_id', right_on='TCAD ID', how='left') df_tcad.Y_x.fillna(df_tcad.Latitude, inplace=True) df_tcad.X_x.fillna(df_tcad.Longitude, inplace=True) del df_tcad['Latitude'] del df_tcad['Longitude'] df_tcad['known_loc'] = np.where(df_tcad.X_x.notnull(), True, False) res = ['A1', 'A2', 'A3', 'A4', 'A5', 'B1', 'B2', 'B3', 'B4', 'E2', 'M1', 'XA'] res_type = ['ALT LIVING CTR', 'DORMITORY HIRISE', 'DORMITORY', 'APARTMENT 5-25', 'LUXURY HI-RISE APTS 100+', 'APARTMENT 100+', 'GARAGE APARTMENT', 'Accessory Dwelling UnitFOURPLEX', 'MOHO DOUBLE PP'] res_code = ['100', '113', '150', '160', '210', '220', '230', '240', '330'] df_tcad['res'] = df_tcad.code2.isin(res) df_tcad['res'] = df_tcad.res | (df_tcad.hs == 'T') | df_tcad.type1.isin(res_type) | df_tcad.type1.str.contains('CONDO', na=False) | df_tcad.type1.str.contains('DWELLING', na=False) | df_tcad.desc.str.contains('APARTMENT', na=False) | df_tcad.desc.str.contains('APARTMENT', na=False) | df_tcad.type1.str.contains('APARTMENT', na=False) | df_tcad.type1.str.contains('APT', na=False) | df_tcad.type1.str.contains('DORM', na=False) | df_tcad.type1.str.contains('MOHO', na=False)
exports = { "name": "Earth", "aspects": { "amulets": [ { "item": "scholar", "effect": "health", "description": "Into the earth is the answer. " "Into the earth lies existance. " "Into the earth lies death and memory. " "Less health for opponents" }, { "item": "stargazer", "effect": "hit", "description": "The stars blesses our earth. " "More accurate attacks" } ], "potions": [ { "item": "justice", "effect": "health", "description": "In the tomb, ancient spirits scream " "for justice. Less health for opponents" }, { "item": "blood", "effect": "hit", "description": "Into the earth, lies the blood of " "our ancestors. Raised hit ratio" } ] }, "traces": ['taurus', 'virgo', 'capricorn'] }
exports = {'name': 'Earth', 'aspects': {'amulets': [{'item': 'scholar', 'effect': 'health', 'description': 'Into the earth is the answer. Into the earth lies existance. Into the earth lies death and memory. Less health for opponents'}, {'item': 'stargazer', 'effect': 'hit', 'description': 'The stars blesses our earth. More accurate attacks'}], 'potions': [{'item': 'justice', 'effect': 'health', 'description': 'In the tomb, ancient spirits scream for justice. Less health for opponents'}, {'item': 'blood', 'effect': 'hit', 'description': 'Into the earth, lies the blood of our ancestors. Raised hit ratio'}]}, 'traces': ['taurus', 'virgo', 'capricorn']}
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) class PyCarputils(PythonPackage): """The carputils framework for running simulations with the openCARP software.""" homepage = "https://www.opencarp.org" git = "https://git.opencarp.org/openCARP/carputils.git" maintainers = ['MarieHouillon'] version('master', branch='master') # Version to use with openCARP 7.0 version('oc7.0', commit='4c04db61744f2fb7665594d7c810699c5c55c77c') depends_on('git') depends_on('python@:3.8', type=('build', 'run')) depends_on('py-pip', type='build') depends_on('py-numpy@1.14.5:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-python-dateutil', type='run') depends_on('py-scipy@:1.5.4', type='run') depends_on('py-matplotlib@:3.3.3', type='run') depends_on('py-pandas@:1.1.4', type='run') depends_on('py-tables@3.6.1', type='run') depends_on('py-six@:1.14.0', type='run') depends_on('py-ruamel-yaml', type='run')
class Pycarputils(PythonPackage): """The carputils framework for running simulations with the openCARP software.""" homepage = 'https://www.opencarp.org' git = 'https://git.opencarp.org/openCARP/carputils.git' maintainers = ['MarieHouillon'] version('master', branch='master') version('oc7.0', commit='4c04db61744f2fb7665594d7c810699c5c55c77c') depends_on('git') depends_on('python@:3.8', type=('build', 'run')) depends_on('py-pip', type='build') depends_on('py-numpy@1.14.5:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-python-dateutil', type='run') depends_on('py-scipy@:1.5.4', type='run') depends_on('py-matplotlib@:3.3.3', type='run') depends_on('py-pandas@:1.1.4', type='run') depends_on('py-tables@3.6.1', type='run') depends_on('py-six@:1.14.0', type='run') depends_on('py-ruamel-yaml', type='run')
def perfect_number(number): printing = "It's not so perfect." checker = number nums = 0 proper_devisors_sum = 0 for num in range(1, int(checker)): if int(checker) % num == 0: nums += int(num) if nums == int(checker): printing = "We have a perfect number!" return printing print(perfect_number(str(input())))
def perfect_number(number): printing = "It's not so perfect." checker = number nums = 0 proper_devisors_sum = 0 for num in range(1, int(checker)): if int(checker) % num == 0: nums += int(num) if nums == int(checker): printing = 'We have a perfect number!' return printing print(perfect_number(str(input())))
# Get all available modules # help("modules") module_names = [ "micropython", "uhashlib", "uselect", "_onewire", "sys", "uheapq", "ustruct", "builtins", "uarray", "uio", "utime", "cmath", "ubinascii", "ujson", "utimeq", "firmware", "ubluetooth", "umachine", "uzlib", "gc", "ucollections", "uos", "hub", "uctypes", "urandom", "math", "uerrno", "ure" ] # List of objects to be processed. object_list_todo = list() object_list_done = list() # Loop over the builtin modules. for object_name in module_names: # Add them to the object list to be processed. object_list_todo.append((object_name, __import__(object_name))) object_list_done.append(object_name) # Loop over all objects. while len(object_list_todo) > 0: object_name, object_handle = object_list_todo.pop(0) # Reserve space in map, and ensure checks for whether we already have this module pass. object_map = dict() # Get contents of object. object_entries = dir(object_handle) # Loop over all entries in the object. for object_entry in object_entries: # Add them to the map for output. object_map[object_entry] = (type(getattr(object_handle, object_entry)).__name__, str(getattr(object_handle, object_entry))) # Add them to the object list to be processed. if (not object_entry in object_list_done): # Add them to the list for further processing if they are not a base type. if ((not isinstance(getattr(object_handle, object_entry), (str, float, int, list, dict, set))) and (not type(getattr(object_handle, object_entry)).__name__ == "function")): object_list_todo.append((object_entry, getattr(object_handle, object_entry))) object_list_done.append(object_entry) print("{} = {}".format(object_name, object_map))
module_names = ['micropython', 'uhashlib', 'uselect', '_onewire', 'sys', 'uheapq', 'ustruct', 'builtins', 'uarray', 'uio', 'utime', 'cmath', 'ubinascii', 'ujson', 'utimeq', 'firmware', 'ubluetooth', 'umachine', 'uzlib', 'gc', 'ucollections', 'uos', 'hub', 'uctypes', 'urandom', 'math', 'uerrno', 'ure'] object_list_todo = list() object_list_done = list() for object_name in module_names: object_list_todo.append((object_name, __import__(object_name))) object_list_done.append(object_name) while len(object_list_todo) > 0: (object_name, object_handle) = object_list_todo.pop(0) object_map = dict() object_entries = dir(object_handle) for object_entry in object_entries: object_map[object_entry] = (type(getattr(object_handle, object_entry)).__name__, str(getattr(object_handle, object_entry))) if not object_entry in object_list_done: if not isinstance(getattr(object_handle, object_entry), (str, float, int, list, dict, set)) and (not type(getattr(object_handle, object_entry)).__name__ == 'function'): object_list_todo.append((object_entry, getattr(object_handle, object_entry))) object_list_done.append(object_entry) print('{} = {}'.format(object_name, object_map))
{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# Summary Functions\n", "\n", "def get_summ_combined_county_annual(state, county = '',verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", " Returns seller details such as addresses\n", "\n", " >>>get_summ_combined_county_annual('OH', 'Summit')\n", " EXAMPLE OUTPUT\n", " '''\n", "\n", " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'combined_county_annual?'\n", " add_state = 'state=' + state\n", " add_county = '&county=' + county\n", " add_key = '&key=' + key\n", " full_url = base_url + function_url + add_state + add_county + add_key\n", "\n", " if verification == True:\n", " print(full_url)\n", " combined_county_annual_df = json_normalize(requests.get(full_url).json())\n", " return combined_county_annual_df\n", " else:\n", " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", " \n", " \n", "def get_summ_combined_county_monthly(state, county = '',verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", " Returns seller details such as addresses\n", "\n", " >>>get_summ_combined_county_monthly('OH', 'Summit')\n", " EXAMPLE OUTPUT\n", " '''\n", "\n", " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'combined_county_monthly?'\n", " add_state = 'state=' + state\n", " add_county = '&county=' + county\n", " add_key = '&key=' + key\n", " full_url = base_url + function_url + add_state + add_county + add_key\n", "\n", " if verification == True:\n", " print(full_url)\n", " combined_county_monthly_df = json_normalize(requests.get(full_url).json())\n", " return combined_county_monthly_df\n", " else:\n", " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", " \n", "def get_summ_total_pharmacies_county(state, county = '',verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", " Returns all pharmacy totals by county (Will be large and could take extra time to load)\n", "\n", " >>>get_summ_total_pharmacies_county('OH', 'Summit')\n", " EXAMPLE OUTPUT\n", " '''\n", "\n", " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'total_pharmacies_county?'\n", " add_state = 'state=' + state\n", " add_county = '&county=' + county\n", " add_key = '&key=' + key\n", " full_url = base_url + function_url + add_state + add_county + add_key\n", "\n", " if verification == True:\n", " print(full_url)\n", " total_pharmacies_county_df = json_normalize(requests.get(full_url).json())\n", " return total_pharmacies_county_df\n", " else:\n", " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", " \n", "def get_summ_total_manufacturers_county(state, county = '',verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", " Returns all Manufacturer totals by county (Will be large and could take extra time to load)\n", "\n", " >>>get_summ_total_manufacturers_county('OH', 'Summit')\n", " EXAMPLE OUTPUT\n", " '''\n", "\n", " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'total_manufacturers_county?'\n", " add_state = 'state=' + state\n", " add_county = '&county=' + county\n", " add_key = '&key=' + key\n", " full_url = base_url + function_url + add_state + add_county + add_key\n", "\n", " if verification == True:\n", " print(full_url)\n", " total_manufacturers_county_df = json_normalize(requests.get(full_url).json())\n", " return total_manufacturers_county_df\n", " else:\n", " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", " \n", "def get_summ_total_distributors_county(state, county = '',verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", " Returns all Distributor totals by county (Will be large and could take extra time to load)\n", "\n", " >>>get_summ_total_distributors_county('OH', 'Summit')\n", " EXAMPLE OUTPUT\n", " '''\n", "\n", " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'total_distributors_county?'\n", " add_state = 'state=' + state\n", " add_county = '&county=' + county\n", " add_key = '&key=' + key\n", " full_url = base_url + function_url + add_state + add_county + add_key\n", "\n", " if verification == True:\n", " print(full_url)\n", " total_distributors_county_df = json_normalize(requests.get(full_url).json())\n", " return total_distributors_county_df\n", " else:\n", " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", " \n", "def get_summ_total_pharmacies_state(state,verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", " Returns all pharmacy totals by state (Will be large and could take extra time to load)\n", "\n", " >>>get_summ_total_pharmacies_state('OH')\n", " EXAMPLE OUTPUT\n", " '''\n", "\n", " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'total_pharmacies_state?'\n", " add_state = 'state=' + state\n", " add_key = '&key=' + key\n", " full_url = base_url + function_url + add_state + add_key\n", "\n", " if verification == True:\n", " print(full_url)\n", " total_pharmacies_state_df = json_normalize(requests.get(full_url).json())\n", " return total_pharmacies_state_df\n", " else:\n", " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", " \n", "def get_summ_total_manufacturers_state(state,verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", " Returns all Manufacturer totals by state (Will be large and could take extra time to load) \n", "\n", " >>>get_summ_total_manufacturers_state('OH', 'Summit')\n", " EXAMPLE OUTPUT\n", " '''\n", "\n", " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'total_manufacturers_state?'\n", " add_state = 'state=' + state\n", " add_key = '&key=' + key\n", " full_url = base_url + function_url + add_state + add_key\n", "\n", " if verification == True:\n", " print(full_url)\n", " total_manufacturers_state_df = json_normalize(requests.get(full_url).json())\n", " return total_manufacturers_state_df\n", " else:\n", " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", " \n", "def get_summ_total_distributors_state(state,verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", " Returns all Distributor totals by state (Will be large and could take extra time to load) \n", "\n", " >>>get_summ_total_distributors_state('OH', 'Summit')\n", " EXAMPLE OUTPUT\n", " '''\n", "\n", " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'total_distributors_state?'\n", " add_state = 'state=' + state\n", " add_key = '&key=' + key\n", " full_url = base_url + function_url + add_state + add_key\n", "\n", " if verification == True:\n", " print(full_url)\n", " total_distributors_state_df = json_normalize(requests.get(full_url).json())\n", " return total_distributors_state_df\n", " else:\n", " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL and state are correct: ', full_url)\n", "\n", "def get_summ_combined_buyer_annual(state, county = '',verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", " Returns summarized annual dosages of pharmacies and practitioners by state and county \n", "\n", " >>>get_summ_combined_buyer_annual('OH', 'Summit')\n", " EXAMPLE OUTPUT\n", " '''\n", "\n", " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'combined_buyer_annual?'\n", " add_state = 'state=' + state\n", " add_county = '&county=' + county\n", " add_key = '&key=' + key\n", " full_url = base_url + function_url + add_state + add_county + add_key\n", "\n", " if verification == True:\n", " print(full_url)\n", " combined_buyer_annual_df = json_normalize(requests.get(full_url).json())\n", " return combined_buyer_annual_df\n", " else:\n", " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", " \n", "def get_summ_combined_buyer_monthly(state, year, county = '',verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", " Returns dosages by pharmacy or practitioner by county, state, and yea \n", "\n", " >>>get_summ_combined_buyer_monthly('OH', 'Summit')\n", " EXAMPLE OUTPUT\n", " '''\n", "\n", " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'combined_buyer_monthly?'\n", " add_state = 'state=' + state\n", " add_county = '&county=' + county\n", " add_year = '&year=' + year\n", " add_key = '&key=' + key\n", " full_url = base_url + function_url + add_state + add_county + add_year + add_key\n", "\n", " if verification == True:\n", " print(full_url)\n", " combined_buyer_monthly_df = json_normalize(requests.get(full_url).json())\n", " return combined_buyer_monthly_df\n", " else:\n", " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", " " ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.9" } }, "nbformat": 4, "nbformat_minor": 4 }
{'cells': [{'cell_type': 'code', 'execution_count': null, 'metadata': {}, 'outputs': [], 'source': ['# Summary Functions\n', '\n', "def get_summ_combined_county_annual(state, county = '',verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", ' Returns seller details such as addresses\n', '\n', " >>>get_summ_combined_county_annual('OH', 'Summit')\n", ' EXAMPLE OUTPUT\n', " '''\n", '\n', " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'combined_county_annual?'\n", " add_state = 'state=' + state\n", " add_county = '&county=' + county\n", " add_key = '&key=' + key\n", ' full_url = base_url + function_url + add_state + add_county + add_key\n', '\n', ' if verification == True:\n', ' print(full_url)\n', ' combined_county_annual_df = json_normalize(requests.get(full_url).json())\n', ' return combined_county_annual_df\n', ' else:\n', " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", ' \n', ' \n', "def get_summ_combined_county_monthly(state, county = '',verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", ' Returns seller details such as addresses\n', '\n', " >>>get_summ_combined_county_monthly('OH', 'Summit')\n", ' EXAMPLE OUTPUT\n', " '''\n", '\n', " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'combined_county_monthly?'\n", " add_state = 'state=' + state\n", " add_county = '&county=' + county\n", " add_key = '&key=' + key\n", ' full_url = base_url + function_url + add_state + add_county + add_key\n', '\n', ' if verification == True:\n', ' print(full_url)\n', ' combined_county_monthly_df = json_normalize(requests.get(full_url).json())\n', ' return combined_county_monthly_df\n', ' else:\n', " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", ' \n', "def get_summ_total_pharmacies_county(state, county = '',verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", ' Returns all pharmacy totals by county (Will be large and could take extra time to load)\n', '\n', " >>>get_summ_total_pharmacies_county('OH', 'Summit')\n", ' EXAMPLE OUTPUT\n', " '''\n", '\n', " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'total_pharmacies_county?'\n", " add_state = 'state=' + state\n", " add_county = '&county=' + county\n", " add_key = '&key=' + key\n", ' full_url = base_url + function_url + add_state + add_county + add_key\n', '\n', ' if verification == True:\n', ' print(full_url)\n', ' total_pharmacies_county_df = json_normalize(requests.get(full_url).json())\n', ' return total_pharmacies_county_df\n', ' else:\n', " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", ' \n', "def get_summ_total_manufacturers_county(state, county = '',verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", ' Returns all Manufacturer totals by county (Will be large and could take extra time to load)\n', '\n', " >>>get_summ_total_manufacturers_county('OH', 'Summit')\n", ' EXAMPLE OUTPUT\n', " '''\n", '\n', " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'total_manufacturers_county?'\n", " add_state = 'state=' + state\n", " add_county = '&county=' + county\n", " add_key = '&key=' + key\n", ' full_url = base_url + function_url + add_state + add_county + add_key\n', '\n', ' if verification == True:\n', ' print(full_url)\n', ' total_manufacturers_county_df = json_normalize(requests.get(full_url).json())\n', ' return total_manufacturers_county_df\n', ' else:\n', " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", ' \n', "def get_summ_total_distributors_county(state, county = '',verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", ' Returns all Distributor totals by county (Will be large and could take extra time to load)\n', '\n', " >>>get_summ_total_distributors_county('OH', 'Summit')\n", ' EXAMPLE OUTPUT\n', " '''\n", '\n', " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'total_distributors_county?'\n", " add_state = 'state=' + state\n", " add_county = '&county=' + county\n", " add_key = '&key=' + key\n", ' full_url = base_url + function_url + add_state + add_county + add_key\n', '\n', ' if verification == True:\n', ' print(full_url)\n', ' total_distributors_county_df = json_normalize(requests.get(full_url).json())\n', ' return total_distributors_county_df\n', ' else:\n', " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", ' \n', "def get_summ_total_pharmacies_state(state,verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", ' Returns all pharmacy totals by state (Will be large and could take extra time to load)\n', '\n', " >>>get_summ_total_pharmacies_state('OH')\n", ' EXAMPLE OUTPUT\n', " '''\n", '\n', " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'total_pharmacies_state?'\n", " add_state = 'state=' + state\n", " add_key = '&key=' + key\n", ' full_url = base_url + function_url + add_state + add_key\n', '\n', ' if verification == True:\n', ' print(full_url)\n', ' total_pharmacies_state_df = json_normalize(requests.get(full_url).json())\n', ' return total_pharmacies_state_df\n', ' else:\n', " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", ' \n', "def get_summ_total_manufacturers_state(state,verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", ' Returns all Manufacturer totals by state (Will be large and could take extra time to load) \n', '\n', " >>>get_summ_total_manufacturers_state('OH', 'Summit')\n", ' EXAMPLE OUTPUT\n', " '''\n", '\n', " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'total_manufacturers_state?'\n", " add_state = 'state=' + state\n", " add_key = '&key=' + key\n", ' full_url = base_url + function_url + add_state + add_key\n', '\n', ' if verification == True:\n', ' print(full_url)\n', ' total_manufacturers_state_df = json_normalize(requests.get(full_url).json())\n', ' return total_manufacturers_state_df\n', ' else:\n', " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", ' \n', "def get_summ_total_distributors_state(state,verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", ' Returns all Distributor totals by state (Will be large and could take extra time to load) \n', '\n', " >>>get_summ_total_distributors_state('OH', 'Summit')\n", ' EXAMPLE OUTPUT\n', " '''\n", '\n', " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'total_distributors_state?'\n", " add_state = 'state=' + state\n", " add_key = '&key=' + key\n", ' full_url = base_url + function_url + add_state + add_key\n', '\n', ' if verification == True:\n', ' print(full_url)\n', ' total_distributors_state_df = json_normalize(requests.get(full_url).json())\n', ' return total_distributors_state_df\n', ' else:\n', " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL and state are correct: ', full_url)\n", '\n', "def get_summ_combined_buyer_annual(state, county = '',verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", ' Returns summarized annual dosages of pharmacies and practitioners by state and county \n', '\n', " >>>get_summ_combined_buyer_annual('OH', 'Summit')\n", ' EXAMPLE OUTPUT\n', " '''\n", '\n', " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'combined_buyer_annual?'\n", " add_state = 'state=' + state\n", " add_county = '&county=' + county\n", " add_key = '&key=' + key\n", ' full_url = base_url + function_url + add_state + add_county + add_key\n', '\n', ' if verification == True:\n', ' print(full_url)\n', ' combined_buyer_annual_df = json_normalize(requests.get(full_url).json())\n', ' return combined_buyer_annual_df\n', ' else:\n', " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", ' \n', "def get_summ_combined_buyer_monthly(state, year, county = '',verification = True, key = 'WaPo'):\n", " '''(str(two letter abbreviation), str, bool, str) -> pd.df\n", ' Returns dosages by pharmacy or practitioner by county, state, and yea \n', '\n', " >>>get_summ_combined_buyer_monthly('OH', 'Summit')\n", ' EXAMPLE OUTPUT\n', " '''\n", '\n', " base_url = 'https://arcos-api.ext.nile.works/v1/'\n", " function_url = 'combined_buyer_monthly?'\n", " add_state = 'state=' + state\n", " add_county = '&county=' + county\n", " add_year = '&year=' + year\n", " add_key = '&key=' + key\n", ' full_url = base_url + function_url + add_state + add_county + add_year + add_key\n', '\n', ' if verification == True:\n', ' print(full_url)\n', ' combined_buyer_monthly_df = json_normalize(requests.get(full_url).json())\n', ' return combined_buyer_monthly_df\n', ' else:\n', " print('Problem encountered, not returning data:')\n", " print('Either verification == False')\n", " print('Or problem with API encountered, please verify URL, state and county are correct: ', full_url)\n", ' ']}], 'metadata': {'kernelspec': {'display_name': 'Python 3', 'language': 'python', 'name': 'python3'}, 'language_info': {'codemirror_mode': {'name': 'ipython', 'version': 3}, 'file_extension': '.py', 'mimetype': 'text/x-python', 'name': 'python', 'nbconvert_exporter': 'python', 'pygments_lexer': 'ipython3', 'version': '3.6.9'}}, 'nbformat': 4, 'nbformat_minor': 4}
# -*- coding: utf-8 -*- def ventilation_rates( number_of_chimneys_main_heating, number_of_chimneys_secondary_heating, number_of_chimneys_other, number_of_open_flues_main_heating, number_of_open_flues_secondary_heating, number_of_open_flues_other, number_of_intermittant_fans_total, number_of_passive_vents_total, number_of_flueless_gas_fires_total, dwelling_volume, air_permeability_value_q50, number_of_storeys_in_the_dwelling, structural_infiltration, suspended_wooden_ground_floor_infiltration, no_draft_lobby_infiltration, percentage_of_windows_and_doors_draught_proofed, number_of_sides_on_which_dwelling_is_sheltered, monthly_average_wind_speed, applicable_case, mechanical_ventilation_air_change_rate_through_system, exhaust_air_heat_pump_using_Appendix_N, mechanical_ventilation_throughput_factor, efficiency_allowing_for_in_use_factor, ): """Calculates the ventilation rates, Section 2. :param number_of_chimneys_main_heating: :type number_of_chimneys_main_heating: int :param number_of_chimneys_secondary_heating: :type number_of_chimneys_secondary_heating: int :param number_of_chimneys_other: :type number_of_chimneys_other: int :param number_of_open_flues_main_heating: :type number_of_open_flues_main_heating: int :param number_of_open_flues_secondary_heating: :type number_of_open_flues_secondary_heating: int :param number_of_open_flues_other: :type number_of_open_flues_other: int :param number_of_intermittant_fans_total: :type number_of_intermittant_fans_total: int :param number_of_passive_vents_total: :type number_of_passive_vents_total: int :param number_of_flueless_gas_fires_total: :type number_of_flueless_gas_fires_total: int :param dwelling_volume: See (5). :type dwelling_volume: float :param air_permeability_value_q50: See (17). Use None if not carried out. :type air_permeability_value_q50: float or None :param number_of_storeys_in_the_dwelling: See (9). :type number_of_storeys_in_the_dwelling: int :param structural_infiltration: See (11). :type structural_infiltration: float :param suspended_wooden_ground_floor_infiltration: See (12). :type suspended_wooden_ground_floor_infiltration: float :param no_draft_lobby_infiltration: See (13). :type no_draft_lobby_infiltration: float :param percentage_of_windows_and_doors_draught_proofed: See (14). :type percentage_of_windows_and_doors_draught_proofed: float :param number_of_sides_on_which_dwelling_is_sheltered: See (19). :type number_of_sides_on_which_dwelling_is_sheltered: int :param monthly_average_wind_speed: A list of the monthly wind speeds. 12 items, from Jan to Dec, see (22). :type monthly_average_wind_speed: list (float) :param applicable_case: One of the following options: 'balanced mechanical ventilation with heat recovery'; 'balanced mechanical ventilation without heat recovery'; 'whole house extract ventilation or positive input ventilation from outside'; or 'natural ventilation or whole house positive input ventilation from loft'. :type applicable_case: str :param mechanical_ventilation_air_change_rate_through_system: See (23a). :type mechanical_ventilation_air_change_rate_through_system: float :param exhaust_air_heat_pump_using_Appendix_N: True if exhaust air heat pump using Appendix N, otherwise False. :type exhaust_air_heat_pump_using_Appendix_N: bool :param mechanical_ventilation_throughput_factor: F_mv, see Equation N4. :type mechanical_ventilation_throughput_factor: float :param efficiency_allowing_for_in_use_factor: In %, see (23c). :type efficiency_allowing_for_in_use_factor: float :returns: A dictionary with keys of ( number_of_chimneys_total, number_of_chimneys_m3_per_hour, number_of_open_flues_total, number_of_open_flues_m3_per_hour, number_of_intermittant_fans_m3_per_hour, number_of_passive_vents_m3_per_hour, number_of_flueless_gas_fires_m3_per_hour, infiltration_due_to_chimnneys_flues_fans_PSVs, additional_infiltration, window_infiltration, infiltration_rate, infiltration_rate2, shelter_factor, infiltration_rate_incorporating_shelter_factor, wind_factor, adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed, exhaust_air_heat_pump_air_change_rate_through_system, effective_air_change_rate ) - **number_of_chimneys_total** (`int`) - - **number_of_chimneys_m3_per_hour** (`float`) - See (6a). - **number_of_open_flues_total** (`int`) - - **number_of_open_flues_m3_per_hour** (`float`) - See (6b). - **infiltration_due_to_chimenys_flues_fans_PSVs** (`float`) - See (8). - **additional_infiltration** (`float`) - See (10). - **window_infiltration** (`float`) - See (15). - **infiltration_rate** (`float`) - See (16). - **infiltration_rate2** (`float`) - See (18). - **shelter_factor** (`float`) - See (20). - **infiltration_rate_incorporating_shelter_factor** (`float`) - See (21). - **wind_factor** list (`float`) - See (22a). - **adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed**: list (`float`) - See (22b). - **exhaust_air_heat_pump_air_change_rate_through_system** (`float`) - See (23b). - **effective_air_change_rate** list (`float`) - See (25). :rtype: dict """ # number_of_chimneys number_of_chimneys_total=(number_of_chimneys_main_heating + number_of_chimneys_secondary_heating + number_of_chimneys_other) number_of_chimneys_m3_per_hour=number_of_chimneys_total * 40.0 # number_of_open_flues number_of_open_flues_total=(number_of_open_flues_main_heating + number_of_open_flues_secondary_heating + number_of_open_flues_other) number_of_open_flues_m3_per_hour=number_of_open_flues_total * 20.0 # number_of_intermittant_fans number_of_intermittant_fans_m3_per_hour=number_of_intermittant_fans_total * 10.0 # number_of_passive_vents number_of_passive_vents_m3_per_hour=number_of_passive_vents_total * 10.0 # number_of_flueless_gas_fires number_of_flueless_gas_fires_m3_per_hour=number_of_flueless_gas_fires_total * 40.0 # infiltration_due_to_chimenys_flues_fans_PSVs infiltration_due_to_chimneys_flues_fans_PSVs=((number_of_chimneys_m3_per_hour + number_of_open_flues_m3_per_hour + number_of_intermittant_fans_m3_per_hour + number_of_passive_vents_m3_per_hour + number_of_flueless_gas_fires_m3_per_hour) / dwelling_volume) if air_permeability_value_q50 is None: # changed from 'air_permeability_value_q50 == 0:' on 4-FEB-2021 additional_infiltration=(number_of_storeys_in_the_dwelling-1)*0.1 window_infiltration=0.25 - (0.2 * percentage_of_windows_and_doors_draught_proofed / 100.0) infiltration_rate=(infiltration_due_to_chimneys_flues_fans_PSVs + additional_infiltration + structural_infiltration + suspended_wooden_ground_floor_infiltration + no_draft_lobby_infiltration + window_infiltration ) infiltration_rate2=infiltration_rate else: additional_infiltration=None window_infiltration=None infiltration_rate=None infiltration_rate2=((air_permeability_value_q50 / 20) + infiltration_due_to_chimneys_flues_fans_PSVs) # shelter_factor shelter_factor = 1 - (0.075 * number_of_sides_on_which_dwelling_is_sheltered) # infiltration_rate_incorporating_shelter_factor infiltration_rate_incorporating_shelter_factor = (infiltration_rate2 * shelter_factor) # wind_factor wind_factor=[None]*12 for i in range(12): wind_factor[i]=monthly_average_wind_speed[i] / 4.0 # adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed=[None]*12 for i in range(12): adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed[i] = ( infiltration_rate_incorporating_shelter_factor * wind_factor[i] ) # exhaust_air_heat_pump_air_change_rate_through_system if applicable_case in ['balanced mechanical ventilation with heat recovery', 'balanced mechanical ventilation without heat recovery', 'whole house extract ventilation or positive input ventilation from outside']: if exhaust_air_heat_pump_using_Appendix_N: exhaust_air_heat_pump_air_change_rate_through_system = ( mechanical_ventilation_air_change_rate_through_system * mechanical_ventilation_throughput_factor) else: exhaust_air_heat_pump_air_change_rate_through_system = \ mechanical_ventilation_air_change_rate_through_system else: exhaust_air_heat_pump_air_change_rate_through_system = None # effective_air_change_rate effective_air_change_rate=[None]*12 if applicable_case=='balanced mechanical ventilation with heat recovery': for i in range(12): effective_air_change_rate[i]=( adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed[i] + exhaust_air_heat_pump_air_change_rate_through_system * (1.0 - efficiency_allowing_for_in_use_factor / 100.0) ) elif applicable_case=='balanced mechanical ventilation without heat recovery': for i in range(12): effective_air_change_rate[i]=( adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed[i] + exhaust_air_heat_pump_air_change_rate_through_system) elif applicable_case=='whole house extract ventilation or positive input ventilation from outside': for i in range(12): if (adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed[i] < 0.5 * exhaust_air_heat_pump_air_change_rate_through_system): effective_air_change_rate[i]=exhaust_air_heat_pump_air_change_rate_through_system else: effective_air_change_rate[i]=( adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed[i] + 0.5 * exhaust_air_heat_pump_air_change_rate_through_system) elif applicable_case=='natural ventilation or whole house positive input ventilation from loft': for i in range(12): if adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed[i]>1: effective_air_change_rate[i]=adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed[i] else: effective_air_change_rate[i]=0.5 + (adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed[i]**2 * 0.5) return dict( number_of_chimneys_total=number_of_chimneys_total, number_of_chimneys_m3_per_hour=number_of_chimneys_m3_per_hour, number_of_open_flues_total=number_of_open_flues_total, number_of_open_flues_m3_per_hour=number_of_open_flues_m3_per_hour, number_of_intermittant_fans_m3_per_hour=number_of_intermittant_fans_m3_per_hour, number_of_passive_vents_m3_per_hour=number_of_passive_vents_m3_per_hour, number_of_flueless_gas_fires_m3_per_hour=number_of_flueless_gas_fires_m3_per_hour, infiltration_due_to_chimneys_flues_fans_PSVs=infiltration_due_to_chimneys_flues_fans_PSVs, additional_infiltration=additional_infiltration, window_infiltration=window_infiltration, infiltration_rate=infiltration_rate, infiltration_rate2=infiltration_rate2, shelter_factor=shelter_factor, infiltration_rate_incorporating_shelter_factor=infiltration_rate_incorporating_shelter_factor, wind_factor=wind_factor, adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed=adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed, exhaust_air_heat_pump_air_change_rate_through_system=exhaust_air_heat_pump_air_change_rate_through_system, effective_air_change_rate=effective_air_change_rate )
def ventilation_rates(number_of_chimneys_main_heating, number_of_chimneys_secondary_heating, number_of_chimneys_other, number_of_open_flues_main_heating, number_of_open_flues_secondary_heating, number_of_open_flues_other, number_of_intermittant_fans_total, number_of_passive_vents_total, number_of_flueless_gas_fires_total, dwelling_volume, air_permeability_value_q50, number_of_storeys_in_the_dwelling, structural_infiltration, suspended_wooden_ground_floor_infiltration, no_draft_lobby_infiltration, percentage_of_windows_and_doors_draught_proofed, number_of_sides_on_which_dwelling_is_sheltered, monthly_average_wind_speed, applicable_case, mechanical_ventilation_air_change_rate_through_system, exhaust_air_heat_pump_using_Appendix_N, mechanical_ventilation_throughput_factor, efficiency_allowing_for_in_use_factor): """Calculates the ventilation rates, Section 2. :param number_of_chimneys_main_heating: :type number_of_chimneys_main_heating: int :param number_of_chimneys_secondary_heating: :type number_of_chimneys_secondary_heating: int :param number_of_chimneys_other: :type number_of_chimneys_other: int :param number_of_open_flues_main_heating: :type number_of_open_flues_main_heating: int :param number_of_open_flues_secondary_heating: :type number_of_open_flues_secondary_heating: int :param number_of_open_flues_other: :type number_of_open_flues_other: int :param number_of_intermittant_fans_total: :type number_of_intermittant_fans_total: int :param number_of_passive_vents_total: :type number_of_passive_vents_total: int :param number_of_flueless_gas_fires_total: :type number_of_flueless_gas_fires_total: int :param dwelling_volume: See (5). :type dwelling_volume: float :param air_permeability_value_q50: See (17). Use None if not carried out. :type air_permeability_value_q50: float or None :param number_of_storeys_in_the_dwelling: See (9). :type number_of_storeys_in_the_dwelling: int :param structural_infiltration: See (11). :type structural_infiltration: float :param suspended_wooden_ground_floor_infiltration: See (12). :type suspended_wooden_ground_floor_infiltration: float :param no_draft_lobby_infiltration: See (13). :type no_draft_lobby_infiltration: float :param percentage_of_windows_and_doors_draught_proofed: See (14). :type percentage_of_windows_and_doors_draught_proofed: float :param number_of_sides_on_which_dwelling_is_sheltered: See (19). :type number_of_sides_on_which_dwelling_is_sheltered: int :param monthly_average_wind_speed: A list of the monthly wind speeds. 12 items, from Jan to Dec, see (22). :type monthly_average_wind_speed: list (float) :param applicable_case: One of the following options: 'balanced mechanical ventilation with heat recovery'; 'balanced mechanical ventilation without heat recovery'; 'whole house extract ventilation or positive input ventilation from outside'; or 'natural ventilation or whole house positive input ventilation from loft'. :type applicable_case: str :param mechanical_ventilation_air_change_rate_through_system: See (23a). :type mechanical_ventilation_air_change_rate_through_system: float :param exhaust_air_heat_pump_using_Appendix_N: True if exhaust air heat pump using Appendix N, otherwise False. :type exhaust_air_heat_pump_using_Appendix_N: bool :param mechanical_ventilation_throughput_factor: F_mv, see Equation N4. :type mechanical_ventilation_throughput_factor: float :param efficiency_allowing_for_in_use_factor: In %, see (23c). :type efficiency_allowing_for_in_use_factor: float :returns: A dictionary with keys of ( number_of_chimneys_total, number_of_chimneys_m3_per_hour, number_of_open_flues_total, number_of_open_flues_m3_per_hour, number_of_intermittant_fans_m3_per_hour, number_of_passive_vents_m3_per_hour, number_of_flueless_gas_fires_m3_per_hour, infiltration_due_to_chimnneys_flues_fans_PSVs, additional_infiltration, window_infiltration, infiltration_rate, infiltration_rate2, shelter_factor, infiltration_rate_incorporating_shelter_factor, wind_factor, adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed, exhaust_air_heat_pump_air_change_rate_through_system, effective_air_change_rate ) - **number_of_chimneys_total** (`int`) - - **number_of_chimneys_m3_per_hour** (`float`) - See (6a). - **number_of_open_flues_total** (`int`) - - **number_of_open_flues_m3_per_hour** (`float`) - See (6b). - **infiltration_due_to_chimenys_flues_fans_PSVs** (`float`) - See (8). - **additional_infiltration** (`float`) - See (10). - **window_infiltration** (`float`) - See (15). - **infiltration_rate** (`float`) - See (16). - **infiltration_rate2** (`float`) - See (18). - **shelter_factor** (`float`) - See (20). - **infiltration_rate_incorporating_shelter_factor** (`float`) - See (21). - **wind_factor** list (`float`) - See (22a). - **adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed**: list (`float`) - See (22b). - **exhaust_air_heat_pump_air_change_rate_through_system** (`float`) - See (23b). - **effective_air_change_rate** list (`float`) - See (25). :rtype: dict """ number_of_chimneys_total = number_of_chimneys_main_heating + number_of_chimneys_secondary_heating + number_of_chimneys_other number_of_chimneys_m3_per_hour = number_of_chimneys_total * 40.0 number_of_open_flues_total = number_of_open_flues_main_heating + number_of_open_flues_secondary_heating + number_of_open_flues_other number_of_open_flues_m3_per_hour = number_of_open_flues_total * 20.0 number_of_intermittant_fans_m3_per_hour = number_of_intermittant_fans_total * 10.0 number_of_passive_vents_m3_per_hour = number_of_passive_vents_total * 10.0 number_of_flueless_gas_fires_m3_per_hour = number_of_flueless_gas_fires_total * 40.0 infiltration_due_to_chimneys_flues_fans_ps_vs = (number_of_chimneys_m3_per_hour + number_of_open_flues_m3_per_hour + number_of_intermittant_fans_m3_per_hour + number_of_passive_vents_m3_per_hour + number_of_flueless_gas_fires_m3_per_hour) / dwelling_volume if air_permeability_value_q50 is None: additional_infiltration = (number_of_storeys_in_the_dwelling - 1) * 0.1 window_infiltration = 0.25 - 0.2 * percentage_of_windows_and_doors_draught_proofed / 100.0 infiltration_rate = infiltration_due_to_chimneys_flues_fans_PSVs + additional_infiltration + structural_infiltration + suspended_wooden_ground_floor_infiltration + no_draft_lobby_infiltration + window_infiltration infiltration_rate2 = infiltration_rate else: additional_infiltration = None window_infiltration = None infiltration_rate = None infiltration_rate2 = air_permeability_value_q50 / 20 + infiltration_due_to_chimneys_flues_fans_PSVs shelter_factor = 1 - 0.075 * number_of_sides_on_which_dwelling_is_sheltered infiltration_rate_incorporating_shelter_factor = infiltration_rate2 * shelter_factor wind_factor = [None] * 12 for i in range(12): wind_factor[i] = monthly_average_wind_speed[i] / 4.0 adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed = [None] * 12 for i in range(12): adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed[i] = infiltration_rate_incorporating_shelter_factor * wind_factor[i] if applicable_case in ['balanced mechanical ventilation with heat recovery', 'balanced mechanical ventilation without heat recovery', 'whole house extract ventilation or positive input ventilation from outside']: if exhaust_air_heat_pump_using_Appendix_N: exhaust_air_heat_pump_air_change_rate_through_system = mechanical_ventilation_air_change_rate_through_system * mechanical_ventilation_throughput_factor else: exhaust_air_heat_pump_air_change_rate_through_system = mechanical_ventilation_air_change_rate_through_system else: exhaust_air_heat_pump_air_change_rate_through_system = None effective_air_change_rate = [None] * 12 if applicable_case == 'balanced mechanical ventilation with heat recovery': for i in range(12): effective_air_change_rate[i] = adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed[i] + exhaust_air_heat_pump_air_change_rate_through_system * (1.0 - efficiency_allowing_for_in_use_factor / 100.0) elif applicable_case == 'balanced mechanical ventilation without heat recovery': for i in range(12): effective_air_change_rate[i] = adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed[i] + exhaust_air_heat_pump_air_change_rate_through_system elif applicable_case == 'whole house extract ventilation or positive input ventilation from outside': for i in range(12): if adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed[i] < 0.5 * exhaust_air_heat_pump_air_change_rate_through_system: effective_air_change_rate[i] = exhaust_air_heat_pump_air_change_rate_through_system else: effective_air_change_rate[i] = adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed[i] + 0.5 * exhaust_air_heat_pump_air_change_rate_through_system elif applicable_case == 'natural ventilation or whole house positive input ventilation from loft': for i in range(12): if adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed[i] > 1: effective_air_change_rate[i] = adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed[i] else: effective_air_change_rate[i] = 0.5 + adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed[i] ** 2 * 0.5 return dict(number_of_chimneys_total=number_of_chimneys_total, number_of_chimneys_m3_per_hour=number_of_chimneys_m3_per_hour, number_of_open_flues_total=number_of_open_flues_total, number_of_open_flues_m3_per_hour=number_of_open_flues_m3_per_hour, number_of_intermittant_fans_m3_per_hour=number_of_intermittant_fans_m3_per_hour, number_of_passive_vents_m3_per_hour=number_of_passive_vents_m3_per_hour, number_of_flueless_gas_fires_m3_per_hour=number_of_flueless_gas_fires_m3_per_hour, infiltration_due_to_chimneys_flues_fans_PSVs=infiltration_due_to_chimneys_flues_fans_PSVs, additional_infiltration=additional_infiltration, window_infiltration=window_infiltration, infiltration_rate=infiltration_rate, infiltration_rate2=infiltration_rate2, shelter_factor=shelter_factor, infiltration_rate_incorporating_shelter_factor=infiltration_rate_incorporating_shelter_factor, wind_factor=wind_factor, adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed=adjusted_infiltration_rate_allowing_for_shelter_and_wind_speed, exhaust_air_heat_pump_air_change_rate_through_system=exhaust_air_heat_pump_air_change_rate_through_system, effective_air_change_rate=effective_air_change_rate)
# -*- coding: utf-8 -*- class ProxyMiddleware(object): def __init__(self, proxy_url): self.proxy_url = proxy_url def process_request(self, request, spider): request.meta['proxy'] = self.proxy_url @classmethod def from_crawler(cls, crawler): return cls( proxy_url=crawler.settings.get('PROXY_URL') )
class Proxymiddleware(object): def __init__(self, proxy_url): self.proxy_url = proxy_url def process_request(self, request, spider): request.meta['proxy'] = self.proxy_url @classmethod def from_crawler(cls, crawler): return cls(proxy_url=crawler.settings.get('PROXY_URL'))
if 5 == 2: print("aaaa") print("sjjdjdd") else : print("else")
if 5 == 2: print('aaaa') print('sjjdjdd') else: print('else')
n = int(input()) print('*' * (2*n+1)) print('.' + '*' + ' ' * (2*n - 3) + '*' + '.') for i in range(1, n-1): print('.' + '.' * i + '*' + '@' * ((2*n-3) -2*i) + '*' + '.' * i + '.') print('.' * n +'*' + '.' * n) for i in range(1, n-1): print('.' * ((n)-i) + '*' + ' ' * (i-1) + '@' + ' ' * (i-1) + '*' + '.' * ((n)-i)) print('.' + '*' + '@' * (2*n-3) + '*' + '.') print('*' * (2*n+1))
n = int(input()) print('*' * (2 * n + 1)) print('.' + '*' + ' ' * (2 * n - 3) + '*' + '.') for i in range(1, n - 1): print('.' + '.' * i + '*' + '@' * (2 * n - 3 - 2 * i) + '*' + '.' * i + '.') print('.' * n + '*' + '.' * n) for i in range(1, n - 1): print('.' * (n - i) + '*' + ' ' * (i - 1) + '@' + ' ' * (i - 1) + '*' + '.' * (n - i)) print('.' + '*' + '@' * (2 * n - 3) + '*' + '.') print('*' * (2 * n + 1))
class Enum: def __init__(self,list): self.list = list def enume(self): for index, val in enumerate(self.list,start=1): print(index,val) e1 = Enum([5,15,45,4,53]) e1.enume()
class Enum: def __init__(self, list): self.list = list def enume(self): for (index, val) in enumerate(self.list, start=1): print(index, val) e1 = enum([5, 15, 45, 4, 53]) e1.enume()
# -*- coding: utf-8 -*- # Copyright 2016 CloudFlare, Inc. All rights reserved. # # The contents of this file are licensed under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with the # License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. """ Defines pyPluribus specific exceptions. """ class TimeoutError(Exception): """Raised in case of exceeded runtime for a command.""" pass class CommandExecutionError(Exception): """Raised in case the output cannot be retrieved.""" pass class ConnectionError(Exception): """Raised when the connection with the pluribus device cannot be open.""" pass class ConfigLoadError(Exception): """Raised when not able to upload configuration on the device""" pass class ConfigurationDiscardError(Exception): """Raised when not possible to discard a candidate configuration""" pass class MergeConfigError(Exception): """Raised when not able to merge the config""" pass class ReplaceConfigError(Exception): """Raised when not able to replace the config""" pass class RollbackError(Exception): """Raised in case of rollback failure.""" pass
""" Defines pyPluribus specific exceptions. """ class Timeouterror(Exception): """Raised in case of exceeded runtime for a command.""" pass class Commandexecutionerror(Exception): """Raised in case the output cannot be retrieved.""" pass class Connectionerror(Exception): """Raised when the connection with the pluribus device cannot be open.""" pass class Configloaderror(Exception): """Raised when not able to upload configuration on the device""" pass class Configurationdiscarderror(Exception): """Raised when not possible to discard a candidate configuration""" pass class Mergeconfigerror(Exception): """Raised when not able to merge the config""" pass class Replaceconfigerror(Exception): """Raised when not able to replace the config""" pass class Rollbackerror(Exception): """Raised in case of rollback failure.""" pass
"""Module defining linked list.""" class LinkedList(object): """Classic linked list data structure.""" def __init__(self, iterable=None): """Initialize LinkedList instance.""" self.head = None self._length = 0 try: for el in iterable: self.push(el) except TypeError: self.head = iterable def push(self, val): """Insert val at the head of linked list.""" self.head = Node(val, self.head) self._length += 1 def pop(self): """Pop the first value off of the head and return it.""" if self.head is None: raise IndexError("Cannot pop from an empty linked list.") first = self.head.val self.head = self.head.next self._length -= 1 return first def size(self): """Return length of linked list.""" return self._length def search(self, val): """Will return the node from the list if present, otherwise none.""" search = self.head while search: if search.val == val: return search search = search.next return None def remove(self, node): """Remove a node from linked list.""" prev = None curr = self.head while curr: if curr is node: if prev: prev.next = curr.next else: self.head = curr.next self._length -= 1 prev = curr curr = curr.next def display(self): """Display linked list in tuple literal form.""" res = "(" curr = self.head while curr: val = curr.val if type(val) is str: val = "'" + val + "'" else: val = str(val) res += val if curr.next: res += ', ' curr = curr.next return res + ')' def __len__(self): """Return length of linked_list.""" return self.size() class Node(object): """Node class.""" def __init__(self, val, next=None): """Initialize Node instance.""" self.val = val self.next = next
"""Module defining linked list.""" class Linkedlist(object): """Classic linked list data structure.""" def __init__(self, iterable=None): """Initialize LinkedList instance.""" self.head = None self._length = 0 try: for el in iterable: self.push(el) except TypeError: self.head = iterable def push(self, val): """Insert val at the head of linked list.""" self.head = node(val, self.head) self._length += 1 def pop(self): """Pop the first value off of the head and return it.""" if self.head is None: raise index_error('Cannot pop from an empty linked list.') first = self.head.val self.head = self.head.next self._length -= 1 return first def size(self): """Return length of linked list.""" return self._length def search(self, val): """Will return the node from the list if present, otherwise none.""" search = self.head while search: if search.val == val: return search search = search.next return None def remove(self, node): """Remove a node from linked list.""" prev = None curr = self.head while curr: if curr is node: if prev: prev.next = curr.next else: self.head = curr.next self._length -= 1 prev = curr curr = curr.next def display(self): """Display linked list in tuple literal form.""" res = '(' curr = self.head while curr: val = curr.val if type(val) is str: val = "'" + val + "'" else: val = str(val) res += val if curr.next: res += ', ' curr = curr.next return res + ')' def __len__(self): """Return length of linked_list.""" return self.size() class Node(object): """Node class.""" def __init__(self, val, next=None): """Initialize Node instance.""" self.val = val self.next = next
class UserState(object): def __init__(self, user_id): self.user_id = user_id self.conversation_context = {} self.conversation_started = False self.user = None self.ingredient_cuisine = None self.recipe = None
class Userstate(object): def __init__(self, user_id): self.user_id = user_id self.conversation_context = {} self.conversation_started = False self.user = None self.ingredient_cuisine = None self.recipe = None
class Solution: def findKthNumber(self, n: int, k: int) -> int: cur = 1 k -= 1 while k > 0: step, first, last = 0, cur, cur + 1 while first <= n: step += min(last, n + 1) - first first *= 10 last *= 10 if k >= step: k -= step cur += 1 else: cur *= 10 k -= 1 return cur
class Solution: def find_kth_number(self, n: int, k: int) -> int: cur = 1 k -= 1 while k > 0: (step, first, last) = (0, cur, cur + 1) while first <= n: step += min(last, n + 1) - first first *= 10 last *= 10 if k >= step: k -= step cur += 1 else: cur *= 10 k -= 1 return cur
mylines = [] with open('resume.txt', 'rt') as myfile: for myline in myfile: mylines.append(myline) print("Name: ", end="") for c in mylines[0]: if c == "|": break; else: print(c, end="")
mylines = [] with open('resume.txt', 'rt') as myfile: for myline in myfile: mylines.append(myline) print('Name: ', end='') for c in mylines[0]: if c == '|': break else: print(c, end='')
""" examples package. Author: - 2020-2021 Nicola Creati - 2020-2021 Roberto Vidmar Copyright: 2020-2021 Nicola Creati <ncreati@inogs.it> 2020-2021 Roberto Vidmar <rvidmar@inogs.it> License: MIT/X11 License (see :download:`license.txt <../../../license.txt>`) """
""" examples package. Author: - 2020-2021 Nicola Creati - 2020-2021 Roberto Vidmar Copyright: 2020-2021 Nicola Creati <ncreati@inogs.it> 2020-2021 Roberto Vidmar <rvidmar@inogs.it> License: MIT/X11 License (see :download:`license.txt <../../../license.txt>`) """
class Interaction: current_os = -1 logger = None def __init__(self, current_os, logger): self.current_os = current_os self.logger = logger def print_info(self): self.logger.raw("This is interaction with Maya") # framework interactions def schema_item_double_click(self, param): self.logger.raw(("Double click on schema item", param))
class Interaction: current_os = -1 logger = None def __init__(self, current_os, logger): self.current_os = current_os self.logger = logger def print_info(self): self.logger.raw('This is interaction with Maya') def schema_item_double_click(self, param): self.logger.raw(('Double click on schema item', param))
class GameObject: # this is a generic object: the player, a monster, an item, the stairs... # it's always represented by a character on screen. def __init__(self, x, y, char, color): self.x = x self.y = y self.char = char self.color = color def move(self, dx, dy, tile_map): # move by the given amount, IFF the tile is not blocked tile = tile_map[self.x + dx][self.y + dy] if not tile.blocked: self.x += dx self.y += dy def draw(self, console, visible_tiles, bg=None): # draw the character that represents this object at its position if visible_tiles is None or (self.x, self.y) in visible_tiles: console.draw_char(self.x, self.y, self.char, self.color, bg=bg) def clear(self, console, bg=None): # erase the character that represents this object console.draw_char(self.x, self.y, ' ', self.color, bg=bg)
class Gameobject: def __init__(self, x, y, char, color): self.x = x self.y = y self.char = char self.color = color def move(self, dx, dy, tile_map): tile = tile_map[self.x + dx][self.y + dy] if not tile.blocked: self.x += dx self.y += dy def draw(self, console, visible_tiles, bg=None): if visible_tiles is None or (self.x, self.y) in visible_tiles: console.draw_char(self.x, self.y, self.char, self.color, bg=bg) def clear(self, console, bg=None): console.draw_char(self.x, self.y, ' ', self.color, bg=bg)
class Institution(object): institution_name = None website = None industry = None type = None headquarters = None company_size = None founded = None def __init__(self, name=None, website=None, industry=None, type=None, headquarters=None, company_size=None, founded=None): self.name = name self.website = website self.industry = industry self.type = type self.headquarters = headquarters self.company_size = company_size self.founded = founded class Experience(Institution): from_date = None to_date = None description = None position_title = None duration = None def __init__(self, from_date = None, to_date = None, description = None, position_title = None, duration = None, location = None): self.from_date = from_date self.to_date = to_date self.description = description self.position_title = position_title self.duration = duration self.location = location def __repr__(self): return "{position_title} at {company} from {from_date} to {to_date} for {duration} based at {location}".format( from_date = self.from_date, to_date = self.to_date, position_title = self.position_title, company = self.institution_name, duration = self.duration, location = self.location) def __str__(self): return "{company}, {location}\n {position_title}\n{from_date} - {to_date}".format( from_date = self.from_date, to_date = self.to_date, position_title = ('\n').join(self.position_title), company = self.institution_name, location = list(set(self.location))) class Education(Institution): from_date = None to_date = None description = None degree = None def __init__(self, from_date = None, to_date = None, description = None, degree = None): self.from_date = from_date self.to_date = to_date self.description = description self.degree = degree def __repr__(self): return "{degree} at {company} from {from_date} to {to_date}".format( from_date = self.from_date, to_date = self.to_date, degree = self.degree, company = self.institution_name) def __str__(self): return "{company}, {degree}\n{from_date} - {to_date}".format( from_date = self.from_date[-2:], to_date = self.to_date[-2:], degree = self.degree, company = self.institution_name) class Scraper(object): driver = None def is_signed_in(self): try: self.driver.find_element_by_id("profile-nav-item") return True except: pass return False def __find_element_by_class_name__(self, class_name): try: self.driver.find_element_by_class_name(class_name) return True except: pass return False
class Institution(object): institution_name = None website = None industry = None type = None headquarters = None company_size = None founded = None def __init__(self, name=None, website=None, industry=None, type=None, headquarters=None, company_size=None, founded=None): self.name = name self.website = website self.industry = industry self.type = type self.headquarters = headquarters self.company_size = company_size self.founded = founded class Experience(Institution): from_date = None to_date = None description = None position_title = None duration = None def __init__(self, from_date=None, to_date=None, description=None, position_title=None, duration=None, location=None): self.from_date = from_date self.to_date = to_date self.description = description self.position_title = position_title self.duration = duration self.location = location def __repr__(self): return '{position_title} at {company} from {from_date} to {to_date} for {duration} based at {location}'.format(from_date=self.from_date, to_date=self.to_date, position_title=self.position_title, company=self.institution_name, duration=self.duration, location=self.location) def __str__(self): return '{company}, {location}\n {position_title}\n{from_date} - {to_date}'.format(from_date=self.from_date, to_date=self.to_date, position_title='\n'.join(self.position_title), company=self.institution_name, location=list(set(self.location))) class Education(Institution): from_date = None to_date = None description = None degree = None def __init__(self, from_date=None, to_date=None, description=None, degree=None): self.from_date = from_date self.to_date = to_date self.description = description self.degree = degree def __repr__(self): return '{degree} at {company} from {from_date} to {to_date}'.format(from_date=self.from_date, to_date=self.to_date, degree=self.degree, company=self.institution_name) def __str__(self): return '{company}, {degree}\n{from_date} - {to_date}'.format(from_date=self.from_date[-2:], to_date=self.to_date[-2:], degree=self.degree, company=self.institution_name) class Scraper(object): driver = None def is_signed_in(self): try: self.driver.find_element_by_id('profile-nav-item') return True except: pass return False def __find_element_by_class_name__(self, class_name): try: self.driver.find_element_by_class_name(class_name) return True except: pass return False
class NoSuchPointError(ValueError): pass class Point(tuple): """ A point on an elliptic curve. This is a subclass of tuple (forced to a 2-tuple), and also includes a reference to the underlying Curve. """ def __new__(self, x, y, curve): return tuple.__new__(self, (x, y)) def __init__(self, x, y, curve): self._curve = curve super(Point, self).__init__() self.check_on_curve() def check_on_curve(self): """raise NoSuchPointError (which is a ValueError) if the point is not actually on the curve.""" if not self._curve.contains_point(*self): raise NoSuchPointError('({},{}) is not on the curve {}'.format(self[0], self[1], self._curve)) def __add__(self, other): """Add one point to another point.""" return self._curve.add(self, other) def __sub__(self, other): """Subtract one point from another point.""" return self._curve.add(self, -other) def __mul__(self, e): """Multiply a point by an integer.""" return self._curve.multiply(self, e) def __rmul__(self, other): """Multiply a point by an integer.""" return self * other def __neg__(self): """Unary negation""" return self.__class__(self[0], self._curve.p()-self[1], self._curve) def curve(self): """The curve this point is on.""" return self._curve
class Nosuchpointerror(ValueError): pass class Point(tuple): """ A point on an elliptic curve. This is a subclass of tuple (forced to a 2-tuple), and also includes a reference to the underlying Curve. """ def __new__(self, x, y, curve): return tuple.__new__(self, (x, y)) def __init__(self, x, y, curve): self._curve = curve super(Point, self).__init__() self.check_on_curve() def check_on_curve(self): """raise NoSuchPointError (which is a ValueError) if the point is not actually on the curve.""" if not self._curve.contains_point(*self): raise no_such_point_error('({},{}) is not on the curve {}'.format(self[0], self[1], self._curve)) def __add__(self, other): """Add one point to another point.""" return self._curve.add(self, other) def __sub__(self, other): """Subtract one point from another point.""" return self._curve.add(self, -other) def __mul__(self, e): """Multiply a point by an integer.""" return self._curve.multiply(self, e) def __rmul__(self, other): """Multiply a point by an integer.""" return self * other def __neg__(self): """Unary negation""" return self.__class__(self[0], self._curve.p() - self[1], self._curve) def curve(self): """The curve this point is on.""" return self._curve
#!/usr/bin/env python3 def insertion_sort(lst): #times for i in range(1,len(lst)): #n - 1 while i > 0 and lst[i-1] > lst[i]: #(n - 1)n lst[i], lst[i-1] = lst[i-1], lst[i] #(n - 1)n/2 i -= 1 #1 return lst print(insertion_sort([6, 4, 3, 8, 5]))
def insertion_sort(lst): for i in range(1, len(lst)): while i > 0 and lst[i - 1] > lst[i]: (lst[i], lst[i - 1]) = (lst[i - 1], lst[i]) i -= 1 return lst print(insertion_sort([6, 4, 3, 8, 5]))
""" sp_tool package Sub-Packages are * sharepoint - the core library * tool - the frontend tool """
""" sp_tool package Sub-Packages are * sharepoint - the core library * tool - the frontend tool """
def access_required(): pass
def access_required(): pass
class cacheFilesManagerInterface: def __init__(self, cacheRootPath, resourcesRootPath): super().__init__() def createCacheFiles (self, fileInfo): raise NotImplementedError() def deleteCacheFiles (self, fileInfo): raise NotImplementedError() def getCacheFile (self, fileUID, chunkNumber): raise NotImplementedError() def restoreFileFromCache (self, fileInfo): raise NotImplementedError() def copyFileIntoChunks (self, cachedFileInfo): raise NotImplementedError() def writeChunkContent (self, content, fileName): raise NotImplementedError()
class Cachefilesmanagerinterface: def __init__(self, cacheRootPath, resourcesRootPath): super().__init__() def create_cache_files(self, fileInfo): raise not_implemented_error() def delete_cache_files(self, fileInfo): raise not_implemented_error() def get_cache_file(self, fileUID, chunkNumber): raise not_implemented_error() def restore_file_from_cache(self, fileInfo): raise not_implemented_error() def copy_file_into_chunks(self, cachedFileInfo): raise not_implemented_error() def write_chunk_content(self, content, fileName): raise not_implemented_error()
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # # $Id$ # class SystemManagementInterface(object): """ Interface description for hardware management controllers - IPMI - IOL """ def __init__(self, config): self.config = config def getPowerStatus(self): raise NotImplementedError def isPowered(self): ''' Return boolean if system is powered on or not ''' raise NotImplementedError def powerOn(self): ''' Powers on a system ''' raise NotImplementedError def powerOff(self): ''' Powers off a system ''' raise NotImplementedError def powerOffSoft(self): ''' Powers off a system via acpi''' raise NotImplementedError def powerCycle(self): ''' Powers cycles a system ''' raise NotImplementedError def powerReset(self): ''' Resets a system ''' raise NotImplementedError def activateConsole(self): ''' Activate Console''' raise NotImplementedError def registerToZoni(self): ''' register hardware to zoni''' raise NotImplementedError # Extensions from MIMOS - specific to HP Blade via c7000 Blade Enclosure def powerOnNet(self): ''' Power HP Blade Server directly from PXE ''' raise NotImplementedError
class Systemmanagementinterface(object): """ Interface description for hardware management controllers - IPMI - IOL """ def __init__(self, config): self.config = config def get_power_status(self): raise NotImplementedError def is_powered(self): """ Return boolean if system is powered on or not """ raise NotImplementedError def power_on(self): """ Powers on a system """ raise NotImplementedError def power_off(self): """ Powers off a system """ raise NotImplementedError def power_off_soft(self): """ Powers off a system via acpi""" raise NotImplementedError def power_cycle(self): """ Powers cycles a system """ raise NotImplementedError def power_reset(self): """ Resets a system """ raise NotImplementedError def activate_console(self): """ Activate Console""" raise NotImplementedError def register_to_zoni(self): """ register hardware to zoni""" raise NotImplementedError def power_on_net(self): """ Power HP Blade Server directly from PXE """ raise NotImplementedError
#!/usr/bin/python # -*- encoding: utf-8; py-indent-offset: 4 -*- # +------------------------------------------------------------------+ # | ____ _ _ __ __ _ __ | # | / ___| |__ ___ ___| | __ | \/ | |/ / | # | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / | # | | |___| | | | __/ (__| < | | | | . \ | # | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ | # | | # | Copyright Mathias Kettner 2014 mk@mathias-kettner.de | # +------------------------------------------------------------------+ # # This file is part of Check_MK. # The official homepage is at http://mathias-kettner.de/check_mk. # # check_mk is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation in version 2. check_mk is distributed # in the hope that it will be useful, but WITHOUT ANY WARRANTY; with- # out even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. See the GNU General Public License for more de- # tails. You should have received a copy of the GNU General Public # License along with GNU Make; see the file COPYING. If not, write # to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, # Boston, MA 02110-1301 USA. # This file contains the defaults settings for almost all configuration # variables that can be overridden in main.mk. Some configuration # variables are preset in checks/* as well. monitoring_core = "nagios" # other option: "cmc" agent_port = 6556 agent_ports = [] snmp_ports = [] # UDP ports used for SNMP tcp_connect_timeout = 5.0 use_dns_cache = True # prevent DNS by using own cache file delay_precompile = False # delay Python compilation to Nagios execution restart_locking = "abort" # also possible: "wait", None check_submission = "file" # alternative: "pipe" aggr_summary_hostname = "%s-s" agent_min_version = 0 # warn, if plugin has not at least version check_max_cachefile_age = 0 # per default do not use cache files when checking cluster_max_cachefile_age = 90 # secs. piggyback_max_cachefile_age = 3600 # secs piggyback_translation = [] # Ruleset for translating piggyback host names simulation_mode = False agent_simulator = False perfdata_format = "pnp" # also possible: "standard" check_mk_perfdata_with_times = True debug_log = False # deprecated monitoring_host = None # deprecated max_num_processes = 50 fallback_agent_output_encoding = 'latin1' # SNMP communities and encoding has_inline_snmp = False # is set to True by inline_snmp module, when available use_inline_snmp = True non_inline_snmp_hosts = [] # Ruleset to disable Inline-SNMP per host when # use_inline_snmp is enabled. snmp_limit_oid_range = [] # Ruleset to recduce fetched OIDs of a check, only inline SNMP record_inline_snmp_stats = False snmp_default_community = 'public' snmp_communities = [] snmp_timing = [] snmp_character_encodings = [] explicit_snmp_communities = {} # override the rule based configuration # RRD creation (only with CMC) cmc_log_rrdcreation = None # also: "terse", "full" cmc_host_rrd_config = [] # Rule for per-host configuration of RRDs cmc_service_rrd_config = [] # Rule for per-service configuration of RRDs # Inventory and inventory checks inventory_check_interval = None # Nagios intervals (4h = 240) inventory_check_severity = 1 # warning inventory_check_do_scan = True # include SNMP scan for SNMP devices inventory_max_cachefile_age = 120 # seconds inventory_check_autotrigger = True # Automatically trigger inv-check after automation-inventory always_cleanup_autochecks = None # For compatiblity with old configuration periodic_discovery = [] # Nagios templates and other settings concerning generation # of Nagios configuration files. No need to change these values. # Better adopt the content of the templates host_template = 'check_mk_host' cluster_template = 'check_mk_cluster' pingonly_template = 'check_mk_pingonly' active_service_template = 'check_mk_active' inventory_check_template = 'check_mk_inventory' passive_service_template = 'check_mk_passive' passive_service_template_perf = 'check_mk_passive_perf' summary_service_template = 'check_mk_summarized' service_dependency_template = 'check_mk' default_host_group = 'check_mk' generate_hostconf = True generate_dummy_commands = True dummy_check_commandline = 'echo "ERROR - you did an active check on this service - please disable active checks" && exit 1' nagios_illegal_chars = '`;~!$%^&*|\'"<>?,()=' # Data to be defined in main.mk checks = [] static_checks = {} check_parameters = [] checkgroup_parameters = {} inv_parameters = {} # for HW/SW-Inventory legacy_checks = [] # non-WATO variant of legacy checks active_checks = {} # WATO variant for fully formalized checks special_agents = {} # WATO variant for datasource_programs custom_checks = [] # WATO variant for free-form custom checks without formalization all_hosts = [] host_paths = {} snmp_hosts = [ (['snmp'], ALL_HOSTS) ] tcp_hosts = [ (['tcp'], ALL_HOSTS), (NEGATE, ['snmp'], ALL_HOSTS), (['!ping'], ALL_HOSTS) ] bulkwalk_hosts = [] snmpv2c_hosts = [] snmp_without_sys_descr = [] snmpv3_contexts = [] usewalk_hosts = [] dyndns_hosts = [] # use host name as ip address for these hosts primary_address_family = [] ignored_checktypes = [] # exclude from inventory ignored_services = [] # exclude from inventory ignored_checks = [] # exclude from inventory host_groups = [] service_groups = [] service_contactgroups = [] service_notification_periods = [] # deprecated, will be removed soon. host_notification_periods = [] # deprecated, will be removed soon. host_contactgroups = [] parents = [] define_hostgroups = None define_servicegroups = None define_contactgroups = None contactgroup_members = {} contacts = {} timeperiods = {} # needed for WATO clusters = {} clustered_services = [] clustered_services_of = {} # new in 1.1.4 clustered_services_mapping = [] # new for 1.2.5i1 Wato Rule datasource_programs = [] service_aggregations = [] service_dependencies = [] non_aggregated_hosts = [] aggregate_check_mk = False aggregation_output_format = "multiline" # new in 1.1.6. Possible also: "multiline" summary_host_groups = [] summary_service_groups = [] # service groups for aggregated services summary_service_contactgroups = [] # service contact groups for aggregated services summary_host_notification_periods = [] summary_service_notification_periods = [] ipaddresses = {} # mapping from hostname to IPv4 address ipv6addresses = {} # mapping from hostname to IPv6 address only_hosts = None distributed_wato_site = None # used by distributed WATO extra_host_conf = {} extra_summary_host_conf = {} extra_service_conf = {} extra_summary_service_conf = {} extra_nagios_conf = "" service_descriptions = {} donation_hosts = [] donation_command = 'mail -r checkmk@yoursite.de -s "Host donation %s" donatehosts@mathias-kettner.de' % check_mk_version scanparent_hosts = [ ( ALL_HOSTS ) ] host_attributes = {} # needed by WATO, ignored by Check_MK ping_levels = [] # special parameters for host/PING check_command host_check_commands = [] # alternative host check instead of check_icmp check_mk_exit_status = [] # Rule for specifying CMK's exit status in case of various errors check_mk_agent_target_versions = [] # Rule for defining expected version for agents check_periods = [] snmp_check_interval = [] inv_exports = {} # Rulesets for inventory export hooks notification_parameters = {} # Rulesets for parameters of notification scripts use_new_descriptions_for = [] host_icons_and_actions = [] # Custom user icons / actions to be configured service_icons_and_actions = [] # Custom user icons / actions to be configured # Rulesets for agent bakery agent_config = {} bake_agents_on_restart = False
monitoring_core = 'nagios' agent_port = 6556 agent_ports = [] snmp_ports = [] tcp_connect_timeout = 5.0 use_dns_cache = True delay_precompile = False restart_locking = 'abort' check_submission = 'file' aggr_summary_hostname = '%s-s' agent_min_version = 0 check_max_cachefile_age = 0 cluster_max_cachefile_age = 90 piggyback_max_cachefile_age = 3600 piggyback_translation = [] simulation_mode = False agent_simulator = False perfdata_format = 'pnp' check_mk_perfdata_with_times = True debug_log = False monitoring_host = None max_num_processes = 50 fallback_agent_output_encoding = 'latin1' has_inline_snmp = False use_inline_snmp = True non_inline_snmp_hosts = [] snmp_limit_oid_range = [] record_inline_snmp_stats = False snmp_default_community = 'public' snmp_communities = [] snmp_timing = [] snmp_character_encodings = [] explicit_snmp_communities = {} cmc_log_rrdcreation = None cmc_host_rrd_config = [] cmc_service_rrd_config = [] inventory_check_interval = None inventory_check_severity = 1 inventory_check_do_scan = True inventory_max_cachefile_age = 120 inventory_check_autotrigger = True always_cleanup_autochecks = None periodic_discovery = [] host_template = 'check_mk_host' cluster_template = 'check_mk_cluster' pingonly_template = 'check_mk_pingonly' active_service_template = 'check_mk_active' inventory_check_template = 'check_mk_inventory' passive_service_template = 'check_mk_passive' passive_service_template_perf = 'check_mk_passive_perf' summary_service_template = 'check_mk_summarized' service_dependency_template = 'check_mk' default_host_group = 'check_mk' generate_hostconf = True generate_dummy_commands = True dummy_check_commandline = 'echo "ERROR - you did an active check on this service - please disable active checks" && exit 1' nagios_illegal_chars = '`;~!$%^&*|\'"<>?,()=' checks = [] static_checks = {} check_parameters = [] checkgroup_parameters = {} inv_parameters = {} legacy_checks = [] active_checks = {} special_agents = {} custom_checks = [] all_hosts = [] host_paths = {} snmp_hosts = [(['snmp'], ALL_HOSTS)] tcp_hosts = [(['tcp'], ALL_HOSTS), (NEGATE, ['snmp'], ALL_HOSTS), (['!ping'], ALL_HOSTS)] bulkwalk_hosts = [] snmpv2c_hosts = [] snmp_without_sys_descr = [] snmpv3_contexts = [] usewalk_hosts = [] dyndns_hosts = [] primary_address_family = [] ignored_checktypes = [] ignored_services = [] ignored_checks = [] host_groups = [] service_groups = [] service_contactgroups = [] service_notification_periods = [] host_notification_periods = [] host_contactgroups = [] parents = [] define_hostgroups = None define_servicegroups = None define_contactgroups = None contactgroup_members = {} contacts = {} timeperiods = {} clusters = {} clustered_services = [] clustered_services_of = {} clustered_services_mapping = [] datasource_programs = [] service_aggregations = [] service_dependencies = [] non_aggregated_hosts = [] aggregate_check_mk = False aggregation_output_format = 'multiline' summary_host_groups = [] summary_service_groups = [] summary_service_contactgroups = [] summary_host_notification_periods = [] summary_service_notification_periods = [] ipaddresses = {} ipv6addresses = {} only_hosts = None distributed_wato_site = None extra_host_conf = {} extra_summary_host_conf = {} extra_service_conf = {} extra_summary_service_conf = {} extra_nagios_conf = '' service_descriptions = {} donation_hosts = [] donation_command = 'mail -r checkmk@yoursite.de -s "Host donation %s" donatehosts@mathias-kettner.de' % check_mk_version scanparent_hosts = [ALL_HOSTS] host_attributes = {} ping_levels = [] host_check_commands = [] check_mk_exit_status = [] check_mk_agent_target_versions = [] check_periods = [] snmp_check_interval = [] inv_exports = {} notification_parameters = {} use_new_descriptions_for = [] host_icons_and_actions = [] service_icons_and_actions = [] agent_config = {} bake_agents_on_restart = False
#Command line program that performs various conversions def errorMsg(ErrorCode): #Assigning menu methods as values to dictionary keys codes = { 1: MainMenu, 2: CurrencyConverter, 3: TemperatureConverter, 4: MassConverter, 5: LengthConverter } #Extracting the method from dictionary using key, NOTE get() is dictionary function func = codes.get(ErrorCode) #Error message which will be displayed if the input is bad print("\nUnrecognized action! Try again.\n") return func() def ExitProgram(): print("\nExiting Program.") #CURRENCY def convertEuroDin(euros): try: course = 119.643 #Current euro course convert = float(euros) * course print("Euro to din conversion:",round(convert,3)) CurrencyConverter() except ValueError: errorMsg(2) def convertDinEuro(din): try: course = 119.643 convert = float(din) / course print("Dinar to euro conversion:",round(convert,3)) CurrencyConverter() except ValueError: errorMsg(2) #TEMPERATURE def convertC(f): try: celsius = (float(f)-32)*5/9 print(f,"Fahrenheits in Celsius is:",round(celsius,2)) TemperatureConverter() except ValueError: errorMsg(3) def convertF(c): try: fahrenheit = float(c) * 9/5 + 32 print(c,"Celsiuses in Fahrenheits is:",round(fahrenheit,2)) TemperatureConverter() except ValueError: errorMsg(3) #MASS def convertKg(lb): try: kg = float(lb) * 0.45359237 print(lb,"Pound is equal to",round(kg,2),"Kilograms.") MassConverter() except ValueError: errorMsg(4) def convertLb(kg): try: lb = float(kg) / 0.45359237 print(kg,"Kilogram is equal to",round(lb,2),"Pound.") MassConverter() except ValueError: errorMsg(4) #LENGTH def convertM(ft): #Convert ft to m try: meter=float(ft) * 0.3048 print(ft,"Feet is equal to",round(meter,2),"Meter.") LengthConverter() except ValueError: errorMsg(5) def convertFt(m): try: feet=float(m) / 0.3048 print(m,"Meter is equal to",round(feet,2),"Feet.") LengthConverter() except ValueError: errorMsg(5) def convertCm(inch): try: cm = float(inch) * 2.54 print(inch,"Inch is equal to",round(cm,2),"Centimeter.") LengthConverter() except ValueError: errorMsg(5) def convertInch(cm): try: inch=float(cm) / 2.54 print(cm,"Centimeter is equal to",round(inch,2),"Inch.") LengthConverter() except ValueError: errorMsg(5) def convertMi(km): try: miles = float(km) / 1.609344 print(km,"Kilometer in miles is:",int(miles)) LengthConverter() except ValueError: errorMsg(5) def convertKm(mi): try: kms = float(mi) * 1.609344 print(mi,"Mile in kilometers is:",int(kms)) LengthConverter() except ValueError: errorMsg(5) #Handling inputs as menu options for each section, following functions are menu itself def CurrencyConverter(): option=str.capitalize(input("\n1 - Convert Euros to Din\n2 - Convert Din to Euro\nM:Main Menu\nChoose option:")) if option == '1': euro=input("\nEnter ammount of euros:") convertEuroDin(euro) elif option == '2': dinar = input("\nEnter ammount of dinars:") convertDinEuro(dinar) elif option == 'M': MainMenu() else: errorMsg(2) def TemperatureConverter(): option = str.capitalize(input("\n1 - Convert Fahrenheits to Celsius\n2 - Convert Celsius to Fahrenheit\nM:Main Menu\nChoose option:")) if option == '1': f = input("\nEnter fahrenheits:") convertC(f) elif option == '2': c = input("\nEnter celsiuses:") convertF(c) elif option == 'M': MainMenu() else: errorMsg(3) def MassConverter(): option = str.capitalize(input("\n1 - Convert kg to lb\n2 - Convert lb to kg\nM:Main Menu\nChoose option:")) if option == '1': kg = input("\nEnter kilograms:") convertLb(kg) elif option == '2': lb = input("\nEnter pounds:") convertKg(lb) elif option == 'M': MainMenu() else: errorMsg(4) def LengthConverter(): option = str.capitalize(input("\n1 - Convert m to ft\n2 - Convert ft to m\n3 - Convert cm to inch\n4 - Convert inch to cm\n5 - Convert km to miles\n6 - Convert miles to km\nM:Main Menu\nChoose option:")) if option == '1': meter = input("\nEnter meters:") convertFt(meter) elif option == '2': feet = input("\nEnter feets:") convertM(feet) elif option == '3': cm = input("\nEnter centimeters:") convertInch(cm) elif option == '4': inch = input("\nEnter inches:") convertCm(inch) elif option == '5': km = input("\nEnter kilometers:") convertMi(km) elif option == '6': mile = input("\nEnter miles:") convertKm(mile) elif option == 'M': MainMenu() else: errorMsg(5) def MainMenu(): option = str.capitalize(input("\nAvailable programs:\n1:Currency Converter\n2:Temperature Converter\n3:Mass Converter\n4:Length Converter\nX:Exit Program\nChoose option:")) if option == '1': CurrencyConverter() elif option =='2': TemperatureConverter() elif option =='3': MassConverter() elif option =='4': LengthConverter() elif option == 'X': ExitProgram() else: errorMsg(1) #Main Program active = True while(active): #Main Menu which displays all available converions by sections MainMenu() active = False
def error_msg(ErrorCode): codes = {1: MainMenu, 2: CurrencyConverter, 3: TemperatureConverter, 4: MassConverter, 5: LengthConverter} func = codes.get(ErrorCode) print('\nUnrecognized action! Try again.\n') return func() def exit_program(): print('\nExiting Program.') def convert_euro_din(euros): try: course = 119.643 convert = float(euros) * course print('Euro to din conversion:', round(convert, 3)) currency_converter() except ValueError: error_msg(2) def convert_din_euro(din): try: course = 119.643 convert = float(din) / course print('Dinar to euro conversion:', round(convert, 3)) currency_converter() except ValueError: error_msg(2) def convert_c(f): try: celsius = (float(f) - 32) * 5 / 9 print(f, 'Fahrenheits in Celsius is:', round(celsius, 2)) temperature_converter() except ValueError: error_msg(3) def convert_f(c): try: fahrenheit = float(c) * 9 / 5 + 32 print(c, 'Celsiuses in Fahrenheits is:', round(fahrenheit, 2)) temperature_converter() except ValueError: error_msg(3) def convert_kg(lb): try: kg = float(lb) * 0.45359237 print(lb, 'Pound is equal to', round(kg, 2), 'Kilograms.') mass_converter() except ValueError: error_msg(4) def convert_lb(kg): try: lb = float(kg) / 0.45359237 print(kg, 'Kilogram is equal to', round(lb, 2), 'Pound.') mass_converter() except ValueError: error_msg(4) def convert_m(ft): try: meter = float(ft) * 0.3048 print(ft, 'Feet is equal to', round(meter, 2), 'Meter.') length_converter() except ValueError: error_msg(5) def convert_ft(m): try: feet = float(m) / 0.3048 print(m, 'Meter is equal to', round(feet, 2), 'Feet.') length_converter() except ValueError: error_msg(5) def convert_cm(inch): try: cm = float(inch) * 2.54 print(inch, 'Inch is equal to', round(cm, 2), 'Centimeter.') length_converter() except ValueError: error_msg(5) def convert_inch(cm): try: inch = float(cm) / 2.54 print(cm, 'Centimeter is equal to', round(inch, 2), 'Inch.') length_converter() except ValueError: error_msg(5) def convert_mi(km): try: miles = float(km) / 1.609344 print(km, 'Kilometer in miles is:', int(miles)) length_converter() except ValueError: error_msg(5) def convert_km(mi): try: kms = float(mi) * 1.609344 print(mi, 'Mile in kilometers is:', int(kms)) length_converter() except ValueError: error_msg(5) def currency_converter(): option = str.capitalize(input('\n1 - Convert Euros to Din\n2 - Convert Din to Euro\nM:Main Menu\nChoose option:')) if option == '1': euro = input('\nEnter ammount of euros:') convert_euro_din(euro) elif option == '2': dinar = input('\nEnter ammount of dinars:') convert_din_euro(dinar) elif option == 'M': main_menu() else: error_msg(2) def temperature_converter(): option = str.capitalize(input('\n1 - Convert Fahrenheits to Celsius\n2 - Convert Celsius to Fahrenheit\nM:Main Menu\nChoose option:')) if option == '1': f = input('\nEnter fahrenheits:') convert_c(f) elif option == '2': c = input('\nEnter celsiuses:') convert_f(c) elif option == 'M': main_menu() else: error_msg(3) def mass_converter(): option = str.capitalize(input('\n1 - Convert kg to lb\n2 - Convert lb to kg\nM:Main Menu\nChoose option:')) if option == '1': kg = input('\nEnter kilograms:') convert_lb(kg) elif option == '2': lb = input('\nEnter pounds:') convert_kg(lb) elif option == 'M': main_menu() else: error_msg(4) def length_converter(): option = str.capitalize(input('\n1 - Convert m to ft\n2 - Convert ft to m\n3 - Convert cm to inch\n4 - Convert inch to cm\n5 - Convert km to miles\n6 - Convert miles to km\nM:Main Menu\nChoose option:')) if option == '1': meter = input('\nEnter meters:') convert_ft(meter) elif option == '2': feet = input('\nEnter feets:') convert_m(feet) elif option == '3': cm = input('\nEnter centimeters:') convert_inch(cm) elif option == '4': inch = input('\nEnter inches:') convert_cm(inch) elif option == '5': km = input('\nEnter kilometers:') convert_mi(km) elif option == '6': mile = input('\nEnter miles:') convert_km(mile) elif option == 'M': main_menu() else: error_msg(5) def main_menu(): option = str.capitalize(input('\nAvailable programs:\n1:Currency Converter\n2:Temperature Converter\n3:Mass Converter\n4:Length Converter\nX:Exit Program\nChoose option:')) if option == '1': currency_converter() elif option == '2': temperature_converter() elif option == '3': mass_converter() elif option == '4': length_converter() elif option == 'X': exit_program() else: error_msg(1) active = True while active: main_menu() active = False
if __name__ == "__main__": Motifs = [ "TCGGGGGTTTTT", "CCGGTGACTTAC", "ACGGGGATTTTC", "TTGGGGACTTTT", "AAGGGGACTTCC", "TTGGGGACTTCC", "TCGGGGATTCAT", "TCGGGGATTCCT", "TAGGGGAACTAC", "TCGGGTATAACC", ]
if __name__ == '__main__': motifs = ['TCGGGGGTTTTT', 'CCGGTGACTTAC', 'ACGGGGATTTTC', 'TTGGGGACTTTT', 'AAGGGGACTTCC', 'TTGGGGACTTCC', 'TCGGGGATTCAT', 'TCGGGGATTCCT', 'TAGGGGAACTAC', 'TCGGGTATAACC']
fibs = {0: 0, 1: 1} def fib(n): if n in fibs: return fibs[n] if n % 2 == 0: fibs[n] = ((2 * fib((n / 2) - 1)) + fib(n / 2)) * fib(n / 2) return fibs[n] fibs[n] = (fib((n - 1) / 2) ** 2) + (fib((n+1) / 2) ** 2) return fibs[n] # limit 100000
fibs = {0: 0, 1: 1} def fib(n): if n in fibs: return fibs[n] if n % 2 == 0: fibs[n] = (2 * fib(n / 2 - 1) + fib(n / 2)) * fib(n / 2) return fibs[n] fibs[n] = fib((n - 1) / 2) ** 2 + fib((n + 1) / 2) ** 2 return fibs[n]
t = int(input()) while t: X, Y = map(int, input().split()) while X>0 and Y>0 : if X>Y: if X%Y==0: X=Y break X = X%Y else: if Y%X==0: Y=X break Y = Y%X print(X+Y) t = t-1
t = int(input()) while t: (x, y) = map(int, input().split()) while X > 0 and Y > 0: if X > Y: if X % Y == 0: x = Y break x = X % Y else: if Y % X == 0: y = X break y = Y % X print(X + Y) t = t - 1
a = int(input()) b = int(input()) r = 0 if a < b: for x in range(a + 1, b): if x % 2 != 0: r += x elif a >= b: for x in range(a - 1, b, -1): if x % 2 != 0: r += x print(r)
a = int(input()) b = int(input()) r = 0 if a < b: for x in range(a + 1, b): if x % 2 != 0: r += x elif a >= b: for x in range(a - 1, b, -1): if x % 2 != 0: r += x print(r)
def regionQuery(self,P,eps): result = [] for d in self.dataSet: if (((d[0]-P[0])**2 + (d[1] - P[1])**2)**0.5)<=eps: result.append(d) return result def expandCluster(self,point,NeighbourPoints,C,eps,MinPts): C.addPoint(point) for p in NeighbourPoints: if p not in self.visited: self.visited.append(p) np = self.regionQuery(p,eps) if len(np) >= MinPts: for n in np: if n not in NeighbourPoints: NeighbourPoints.append(n) for c in self.Clusters: if not c.has(p): if not C.has(p): C.addPoint(p) if len(self.Clusters) == 0: if not C.has(p): C.addPoint(p) self.Clusters.append(C)
def region_query(self, P, eps): result = [] for d in self.dataSet: if ((d[0] - P[0]) ** 2 + (d[1] - P[1]) ** 2) ** 0.5 <= eps: result.append(d) return result def expand_cluster(self, point, NeighbourPoints, C, eps, MinPts): C.addPoint(point) for p in NeighbourPoints: if p not in self.visited: self.visited.append(p) np = self.regionQuery(p, eps) if len(np) >= MinPts: for n in np: if n not in NeighbourPoints: NeighbourPoints.append(n) for c in self.Clusters: if not c.has(p): if not C.has(p): C.addPoint(p) if len(self.Clusters) == 0: if not C.has(p): C.addPoint(p) self.Clusters.append(C)
def main(): n = int(input()) x = sorted(map(int, input().split())) q = int(input()) m = [int(input()) for _ in range(q)] for coin in m: l, r = -1, n while l + 1 < r: mid = (l + r) // 2 if x[mid] <= coin: l = mid else: r = mid print(l + 1) main()
def main(): n = int(input()) x = sorted(map(int, input().split())) q = int(input()) m = [int(input()) for _ in range(q)] for coin in m: (l, r) = (-1, n) while l + 1 < r: mid = (l + r) // 2 if x[mid] <= coin: l = mid else: r = mid print(l + 1) main()
"""A library for installing Python wheels. """ __version__ = "0.2.0.dev0"
"""A library for installing Python wheels. """ __version__ = '0.2.0.dev0'
with open("opcodes.txt") as f: code = tuple([int(x) for x in f.read().strip().split(',')]) def op1(code, a, b, c, pos): code[c] = a + b return pos+4 def op2(code, a, b, c, pos): code[c] = a * b return pos+4 def op3(code, a, pos): global sysID code[a] = sysID return pos+2 def op4(code, a, pos): global output output = a return pos+2 def op5(code, a, b, pos): return b if a != 0 else pos+3 def op6(code, a, b, pos): return b if a == 0 else pos+3 def op7(code, a, b, c, pos): code[c] = 1 if a < b else 0 return pos+4 def op8(code, a, b, c, pos): code[c] = 1 if a == b else 0 return pos+4 ops = [op1, op2, op3, op4, op5, op6, op7, op8] def run_code(code): pos = 0 while True: opcode = code[pos] % 100 a_mode = (code[pos] % 1000) // 100 b_mode = (code[pos] % 10000) // 1000 if opcode in [1,2,7,8]: a = code[code[pos+1]] if a_mode == 0 else code[pos+1] b = code[code[pos+2]] if b_mode == 0 else code[pos+2] c = code[pos+3] pos = ops[opcode-1](code, a, b, c, pos) elif opcode in [5, 6]: a = code[code[pos+1]] if a_mode == 0 else code[pos+1] b = code[code[pos+2]] if b_mode == 0 else code[pos+2] pos = ops[opcode-1](code, a, b, pos) elif opcode == 4: a = code[code[pos+1]] if a_mode == 0 else code[pos+1] pos = ops[3](code, a, pos) elif opcode == 3: pos = ops[2](code, code[pos+1], pos) else: if opcode != 99: print ("Unknown opcode:", opcode) break sysID = 1 output = None run_code(list(code)) print("Part 1:", output) sysID = 5 run_code(list(code)) print("Part 2:", output)
with open('opcodes.txt') as f: code = tuple([int(x) for x in f.read().strip().split(',')]) def op1(code, a, b, c, pos): code[c] = a + b return pos + 4 def op2(code, a, b, c, pos): code[c] = a * b return pos + 4 def op3(code, a, pos): global sysID code[a] = sysID return pos + 2 def op4(code, a, pos): global output output = a return pos + 2 def op5(code, a, b, pos): return b if a != 0 else pos + 3 def op6(code, a, b, pos): return b if a == 0 else pos + 3 def op7(code, a, b, c, pos): code[c] = 1 if a < b else 0 return pos + 4 def op8(code, a, b, c, pos): code[c] = 1 if a == b else 0 return pos + 4 ops = [op1, op2, op3, op4, op5, op6, op7, op8] def run_code(code): pos = 0 while True: opcode = code[pos] % 100 a_mode = code[pos] % 1000 // 100 b_mode = code[pos] % 10000 // 1000 if opcode in [1, 2, 7, 8]: a = code[code[pos + 1]] if a_mode == 0 else code[pos + 1] b = code[code[pos + 2]] if b_mode == 0 else code[pos + 2] c = code[pos + 3] pos = ops[opcode - 1](code, a, b, c, pos) elif opcode in [5, 6]: a = code[code[pos + 1]] if a_mode == 0 else code[pos + 1] b = code[code[pos + 2]] if b_mode == 0 else code[pos + 2] pos = ops[opcode - 1](code, a, b, pos) elif opcode == 4: a = code[code[pos + 1]] if a_mode == 0 else code[pos + 1] pos = ops[3](code, a, pos) elif opcode == 3: pos = ops[2](code, code[pos + 1], pos) else: if opcode != 99: print('Unknown opcode:', opcode) break sys_id = 1 output = None run_code(list(code)) print('Part 1:', output) sys_id = 5 run_code(list(code)) print('Part 2:', output)
# # PySNMP MIB module MSERIES-PORT-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/MSERIES-PORT-MIB # Produced by pysmi-0.3.4 at Wed May 1 14:15:42 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ConstraintsIntersection, ValueRangeConstraint, ConstraintsUnion, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "ConstraintsUnion", "ValueSizeConstraint") mseries, = mibBuilder.importSymbols("MSERIES-MIB", "mseries") PortMode, PortStatus, PortType = mibBuilder.importSymbols("MSERIES-TC", "PortMode", "PortStatus", "PortType") ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup") ModuleIdentity, NotificationType, MibIdentifier, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, Bits, Counter64, ObjectIdentity, Unsigned32, iso, TimeTicks, Gauge32, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "NotificationType", "MibIdentifier", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "Bits", "Counter64", "ObjectIdentity", "Unsigned32", "iso", "TimeTicks", "Gauge32", "Counter32") TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString") smartPort = ModuleIdentity((1, 3, 6, 1, 4, 1, 30826, 1, 3)) smartPort.setRevisions(('2014-02-12 13:44',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: smartPort.setRevisionsDescriptions(('The initial revision of the MSERIES Port MIB.',)) if mibBuilder.loadTexts: smartPort.setLastUpdated('201402121344Z') if mibBuilder.loadTexts: smartPort.setOrganization('SmartOptics') if mibBuilder.loadTexts: smartPort.setContactInfo('http://www.smartoptics.com') if mibBuilder.loadTexts: smartPort.setDescription('This is the enterprise specific Port MIB for SmartOptics M-Series.') smartPortObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1)) smartPortGeneral = MibIdentifier((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 1)) smartPortList = MibIdentifier((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2)) smartPortMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 30826, 1, 3, 2)) smartPortGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 30826, 1, 3, 2, 1)) smartPortCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 30826, 1, 3, 2, 2)) smartPortTable = MibTable((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1), ) if mibBuilder.loadTexts: smartPortTable.setStatus('current') if mibBuilder.loadTexts: smartPortTable.setDescription('A port table.') smartPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1), ).setIndexNames((0, "MSERIES-PORT-MIB", "smartPortIndex")) if mibBuilder.loadTexts: smartPortEntry.setStatus('current') if mibBuilder.loadTexts: smartPortEntry.setDescription('An entry in the port list.') smartPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly") if mibBuilder.loadTexts: smartPortIndex.setStatus('current') if mibBuilder.loadTexts: smartPortIndex.setDescription('A unique index for each port that corresponds to the index in the interface table') smartPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: smartPortName.setStatus('current') if mibBuilder.loadTexts: smartPortName.setDescription('The name of the port.') smartPortAlias = MibTableColumn((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 3), DisplayString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: smartPortAlias.setStatus('current') if mibBuilder.loadTexts: smartPortAlias.setDescription('User configurable Port Alias for the port. Not writeable in SmartOS v2.3') smartPortType = MibTableColumn((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 4), PortType()).setMaxAccess("readonly") if mibBuilder.loadTexts: smartPortType.setStatus('current') if mibBuilder.loadTexts: smartPortType.setDescription('The type of port. rx(1) - Receiving port. tx(2) - Transmitting port. biDi(3) - Bidirectional port.') smartPortPower = MibTableColumn((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 5), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: smartPortPower.setStatus('current') if mibBuilder.loadTexts: smartPortPower.setDescription('The power level in units of 0.1 dBm.') smartPortStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 6), PortStatus()).setMaxAccess("readonly") if mibBuilder.loadTexts: smartPortStatus.setStatus('current') if mibBuilder.loadTexts: smartPortStatus.setDescription('The operational state for a port. idle(1) - The port is not activated down(2) - The port traffic is lost. up(3) - There is traffic on the port. high(4) - The port got to high power. low(5) - The port got to low power. eyeSafety(6) - The Line Tx port is in Eye Safety Mode. This means that either the connector on the Line Tx port is not inserted or that you have too strong reflection from the line fiber. cd(7) - Channel detected. ncd(8) - No channel detected.') smartPortMode = MibTableColumn((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 7), PortMode()).setMaxAccess("readwrite") if mibBuilder.loadTexts: smartPortMode.setStatus('current') if mibBuilder.loadTexts: smartPortMode.setDescription("The Mode of the Port. normal (1) - The port is active. No alarms are beeing suppressed. service (2) . The port is in service mode and alarms are beeing suppressed. When service is ready smartPortMode should be set to 'normal' again. Not writeable in SmartOS v2.3") smartPortHighPowerAlarmThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 8), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: smartPortHighPowerAlarmThreshold.setStatus('current') if mibBuilder.loadTexts: smartPortHighPowerAlarmThreshold.setDescription('The threshold for the High Power alarm. Not writeable in SmartOS v2.3') smartPortLowPowerAlarmThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 9), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: smartPortLowPowerAlarmThreshold.setStatus('current') if mibBuilder.loadTexts: smartPortLowPowerAlarmThreshold.setDescription('The threshold for the Low Power alarm. Not writeable in SmartOS v2.3') smartPortListGroupV1 = ObjectGroup((1, 3, 6, 1, 4, 1, 30826, 1, 3, 2, 1, 1)).setObjects(("MSERIES-PORT-MIB", "smartPortIndex"), ("MSERIES-PORT-MIB", "smartPortName"), ("MSERIES-PORT-MIB", "smartPortAlias"), ("MSERIES-PORT-MIB", "smartPortType"), ("MSERIES-PORT-MIB", "smartPortPower"), ("MSERIES-PORT-MIB", "smartPortStatus"), ("MSERIES-PORT-MIB", "smartPortMode"), ("MSERIES-PORT-MIB", "smartPortHighPowerAlarmThreshold"), ("MSERIES-PORT-MIB", "smartPortLowPowerAlarmThreshold")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): smartPortListGroupV1 = smartPortListGroupV1.setStatus('current') if mibBuilder.loadTexts: smartPortListGroupV1.setDescription('The Port List MIB objects v1.') smartPortBasicComplV1 = ModuleCompliance((1, 3, 6, 1, 4, 1, 30826, 1, 3, 2, 2, 1)).setObjects(("MSERIES-PORT-MIB", "smartPortListGroupV1")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): smartPortBasicComplV1 = smartPortBasicComplV1.setStatus('current') if mibBuilder.loadTexts: smartPortBasicComplV1.setDescription('Basic implementation requirements for the port MIB V1.') mibBuilder.exportSymbols("MSERIES-PORT-MIB", smartPortCompliances=smartPortCompliances, smartPortPower=smartPortPower, smartPortHighPowerAlarmThreshold=smartPortHighPowerAlarmThreshold, smartPortTable=smartPortTable, PYSNMP_MODULE_ID=smartPort, smartPortBasicComplV1=smartPortBasicComplV1, smartPortGeneral=smartPortGeneral, smartPortType=smartPortType, smartPortName=smartPortName, smartPortGroups=smartPortGroups, smartPortIndex=smartPortIndex, smartPort=smartPort, smartPortAlias=smartPortAlias, smartPortStatus=smartPortStatus, smartPortList=smartPortList, smartPortMode=smartPortMode, smartPortLowPowerAlarmThreshold=smartPortLowPowerAlarmThreshold, smartPortEntry=smartPortEntry, smartPortMIBConformance=smartPortMIBConformance, smartPortListGroupV1=smartPortListGroupV1, smartPortObjects=smartPortObjects)
(integer, octet_string, object_identifier) = mibBuilder.importSymbols('ASN1', 'Integer', 'OctetString', 'ObjectIdentifier') (named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues') (single_value_constraint, constraints_intersection, value_range_constraint, constraints_union, value_size_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'SingleValueConstraint', 'ConstraintsIntersection', 'ValueRangeConstraint', 'ConstraintsUnion', 'ValueSizeConstraint') (mseries,) = mibBuilder.importSymbols('MSERIES-MIB', 'mseries') (port_mode, port_status, port_type) = mibBuilder.importSymbols('MSERIES-TC', 'PortMode', 'PortStatus', 'PortType') (module_compliance, object_group, notification_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'ObjectGroup', 'NotificationGroup') (module_identity, notification_type, mib_identifier, integer32, mib_scalar, mib_table, mib_table_row, mib_table_column, ip_address, bits, counter64, object_identity, unsigned32, iso, time_ticks, gauge32, counter32) = mibBuilder.importSymbols('SNMPv2-SMI', 'ModuleIdentity', 'NotificationType', 'MibIdentifier', 'Integer32', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'IpAddress', 'Bits', 'Counter64', 'ObjectIdentity', 'Unsigned32', 'iso', 'TimeTicks', 'Gauge32', 'Counter32') (textual_convention, display_string) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'DisplayString') smart_port = module_identity((1, 3, 6, 1, 4, 1, 30826, 1, 3)) smartPort.setRevisions(('2014-02-12 13:44',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: smartPort.setRevisionsDescriptions(('The initial revision of the MSERIES Port MIB.',)) if mibBuilder.loadTexts: smartPort.setLastUpdated('201402121344Z') if mibBuilder.loadTexts: smartPort.setOrganization('SmartOptics') if mibBuilder.loadTexts: smartPort.setContactInfo('http://www.smartoptics.com') if mibBuilder.loadTexts: smartPort.setDescription('This is the enterprise specific Port MIB for SmartOptics M-Series.') smart_port_objects = mib_identifier((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1)) smart_port_general = mib_identifier((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 1)) smart_port_list = mib_identifier((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2)) smart_port_mib_conformance = mib_identifier((1, 3, 6, 1, 4, 1, 30826, 1, 3, 2)) smart_port_groups = mib_identifier((1, 3, 6, 1, 4, 1, 30826, 1, 3, 2, 1)) smart_port_compliances = mib_identifier((1, 3, 6, 1, 4, 1, 30826, 1, 3, 2, 2)) smart_port_table = mib_table((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1)) if mibBuilder.loadTexts: smartPortTable.setStatus('current') if mibBuilder.loadTexts: smartPortTable.setDescription('A port table.') smart_port_entry = mib_table_row((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1)).setIndexNames((0, 'MSERIES-PORT-MIB', 'smartPortIndex')) if mibBuilder.loadTexts: smartPortEntry.setStatus('current') if mibBuilder.loadTexts: smartPortEntry.setDescription('An entry in the port list.') smart_port_index = mib_table_column((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 1), unsigned32().subtype(subtypeSpec=value_range_constraint(1, 2147483647))).setMaxAccess('readonly') if mibBuilder.loadTexts: smartPortIndex.setStatus('current') if mibBuilder.loadTexts: smartPortIndex.setDescription('A unique index for each port that corresponds to the index in the interface table') smart_port_name = mib_table_column((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 2), display_string()).setMaxAccess('readonly') if mibBuilder.loadTexts: smartPortName.setStatus('current') if mibBuilder.loadTexts: smartPortName.setDescription('The name of the port.') smart_port_alias = mib_table_column((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 3), display_string()).setMaxAccess('readwrite') if mibBuilder.loadTexts: smartPortAlias.setStatus('current') if mibBuilder.loadTexts: smartPortAlias.setDescription('User configurable Port Alias for the port. Not writeable in SmartOS v2.3') smart_port_type = mib_table_column((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 4), port_type()).setMaxAccess('readonly') if mibBuilder.loadTexts: smartPortType.setStatus('current') if mibBuilder.loadTexts: smartPortType.setDescription('The type of port. rx(1) - Receiving port. tx(2) - Transmitting port. biDi(3) - Bidirectional port.') smart_port_power = mib_table_column((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 5), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: smartPortPower.setStatus('current') if mibBuilder.loadTexts: smartPortPower.setDescription('The power level in units of 0.1 dBm.') smart_port_status = mib_table_column((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 6), port_status()).setMaxAccess('readonly') if mibBuilder.loadTexts: smartPortStatus.setStatus('current') if mibBuilder.loadTexts: smartPortStatus.setDescription('The operational state for a port. idle(1) - The port is not activated down(2) - The port traffic is lost. up(3) - There is traffic on the port. high(4) - The port got to high power. low(5) - The port got to low power. eyeSafety(6) - The Line Tx port is in Eye Safety Mode. This means that either the connector on the Line Tx port is not inserted or that you have too strong reflection from the line fiber. cd(7) - Channel detected. ncd(8) - No channel detected.') smart_port_mode = mib_table_column((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 7), port_mode()).setMaxAccess('readwrite') if mibBuilder.loadTexts: smartPortMode.setStatus('current') if mibBuilder.loadTexts: smartPortMode.setDescription("The Mode of the Port. normal (1) - The port is active. No alarms are beeing suppressed. service (2) . The port is in service mode and alarms are beeing suppressed. When service is ready smartPortMode should be set to 'normal' again. Not writeable in SmartOS v2.3") smart_port_high_power_alarm_threshold = mib_table_column((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 8), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: smartPortHighPowerAlarmThreshold.setStatus('current') if mibBuilder.loadTexts: smartPortHighPowerAlarmThreshold.setDescription('The threshold for the High Power alarm. Not writeable in SmartOS v2.3') smart_port_low_power_alarm_threshold = mib_table_column((1, 3, 6, 1, 4, 1, 30826, 1, 3, 1, 2, 1, 1, 9), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: smartPortLowPowerAlarmThreshold.setStatus('current') if mibBuilder.loadTexts: smartPortLowPowerAlarmThreshold.setDescription('The threshold for the Low Power alarm. Not writeable in SmartOS v2.3') smart_port_list_group_v1 = object_group((1, 3, 6, 1, 4, 1, 30826, 1, 3, 2, 1, 1)).setObjects(('MSERIES-PORT-MIB', 'smartPortIndex'), ('MSERIES-PORT-MIB', 'smartPortName'), ('MSERIES-PORT-MIB', 'smartPortAlias'), ('MSERIES-PORT-MIB', 'smartPortType'), ('MSERIES-PORT-MIB', 'smartPortPower'), ('MSERIES-PORT-MIB', 'smartPortStatus'), ('MSERIES-PORT-MIB', 'smartPortMode'), ('MSERIES-PORT-MIB', 'smartPortHighPowerAlarmThreshold'), ('MSERIES-PORT-MIB', 'smartPortLowPowerAlarmThreshold')) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): smart_port_list_group_v1 = smartPortListGroupV1.setStatus('current') if mibBuilder.loadTexts: smartPortListGroupV1.setDescription('The Port List MIB objects v1.') smart_port_basic_compl_v1 = module_compliance((1, 3, 6, 1, 4, 1, 30826, 1, 3, 2, 2, 1)).setObjects(('MSERIES-PORT-MIB', 'smartPortListGroupV1')) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): smart_port_basic_compl_v1 = smartPortBasicComplV1.setStatus('current') if mibBuilder.loadTexts: smartPortBasicComplV1.setDescription('Basic implementation requirements for the port MIB V1.') mibBuilder.exportSymbols('MSERIES-PORT-MIB', smartPortCompliances=smartPortCompliances, smartPortPower=smartPortPower, smartPortHighPowerAlarmThreshold=smartPortHighPowerAlarmThreshold, smartPortTable=smartPortTable, PYSNMP_MODULE_ID=smartPort, smartPortBasicComplV1=smartPortBasicComplV1, smartPortGeneral=smartPortGeneral, smartPortType=smartPortType, smartPortName=smartPortName, smartPortGroups=smartPortGroups, smartPortIndex=smartPortIndex, smartPort=smartPort, smartPortAlias=smartPortAlias, smartPortStatus=smartPortStatus, smartPortList=smartPortList, smartPortMode=smartPortMode, smartPortLowPowerAlarmThreshold=smartPortLowPowerAlarmThreshold, smartPortEntry=smartPortEntry, smartPortMIBConformance=smartPortMIBConformance, smartPortListGroupV1=smartPortListGroupV1, smartPortObjects=smartPortObjects)
# # Pelican_manager # __title__ = 'pelican_manager' __description__ = 'easy way to management pelican blog.' __url__ = 'https://github.com/xiaojieluo/pelican-manager' __version__ = '0.2.1' __build__ = 0x021801 __author__ = 'Xiaojie Luo' __author_email__ = 'xiaojieluoff@gmail.com' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2017 Xiaojie Luo' __cake__ = u'\u2728 \U0001f370 \u2728'
__title__ = 'pelican_manager' __description__ = 'easy way to management pelican blog.' __url__ = 'https://github.com/xiaojieluo/pelican-manager' __version__ = '0.2.1' __build__ = 137217 __author__ = 'Xiaojie Luo' __author_email__ = 'xiaojieluoff@gmail.com' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2017 Xiaojie Luo' __cake__ = u'✨ 🍰 ✨'
#!/usr/bin/env python #pylint: skip-file # This source code is licensed under the Apache license found in the # LICENSE file in the root directory of this project. class ScheduleInfoOutput(object): def __init__(self): """ Attributes: swaggerTypes (dict): The key is attribute name and the value is attribute type. attributeMap (dict): The key is attribute name and the value is json key in definition. """ self.swaggerTypes = { 'description': 'str', 'startTime': 'date-time', 'endTime': 'date-time', 'origin': 'str', 'operation': 'str', 'taskId': 'str', 'groupName': 'str', 'scheduledWorkSpecId': 'str', 'prevTime': 'date-time', 'nextTime': 'date-time' } self.attributeMap = { 'description': 'description', 'startTime': 'startTime', 'endTime': 'endTime', 'origin': 'origin', 'operation': 'operation', 'taskId': 'taskId', 'groupName': 'groupName', 'scheduledWorkSpecId': 'scheduledWorkSpecId', 'prevTime': 'prevTime', 'nextTime': 'nextTime' } #Simple description to be shown to end-users self.description = None # str #The time at which the trigger should first fire. If the schedule has fired and will not fire again, this value will be null self.startTime = None # date-time #The time at which the trigger should quit repeating self.endTime = None # date-time #Contextual field used to identify work spcifications originating from the same source self.origin = None # str #Contextual field used by the service to identify an operation self.operation = None # str #UUID of the Task self.taskId = None # str #A grouping name that can be specified by the service to allow for filtered work spec retrieval self.groupName = None # str #UUID of the ScheduledWorkSpec associated with the scheduled task self.scheduledWorkSpecId = None # str #The previous time at which the trigger fired. If the trigger has not yet fired, null will be returned self.prevTime = None # date-time #The next time at which the trigger should fire self.nextTime = None # date-time
class Scheduleinfooutput(object): def __init__(self): """ Attributes: swaggerTypes (dict): The key is attribute name and the value is attribute type. attributeMap (dict): The key is attribute name and the value is json key in definition. """ self.swaggerTypes = {'description': 'str', 'startTime': 'date-time', 'endTime': 'date-time', 'origin': 'str', 'operation': 'str', 'taskId': 'str', 'groupName': 'str', 'scheduledWorkSpecId': 'str', 'prevTime': 'date-time', 'nextTime': 'date-time'} self.attributeMap = {'description': 'description', 'startTime': 'startTime', 'endTime': 'endTime', 'origin': 'origin', 'operation': 'operation', 'taskId': 'taskId', 'groupName': 'groupName', 'scheduledWorkSpecId': 'scheduledWorkSpecId', 'prevTime': 'prevTime', 'nextTime': 'nextTime'} self.description = None self.startTime = None self.endTime = None self.origin = None self.operation = None self.taskId = None self.groupName = None self.scheduledWorkSpecId = None self.prevTime = None self.nextTime = None