blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4cce22269773651f5e8260b8a82c4e13ef2f377d | e2de3f6fe4373f1d98b67af61dd558a813250d54 | /Algorithm/baekjoon/1546_평균.py | 01a88b1be3fc1e2c36405cedb1ab135649d5e59f | [] | no_license | Hansung-Lee/TIL | 3fd6d48427a8b24f7889116297143855d493535b | c24ebab8b631f5c1b835fdc8bd036acbebc8d187 | refs/heads/master | 2020-04-14T11:18:54.035863 | 2019-04-05T07:26:55 | 2019-04-05T07:26:55 | 163,810,436 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 192 | py | import sys
N = int(sys.stdin.readline())
X = list(map(int, sys.stdin.readline().split()))
M = max(X)
new_X = []
for score in X:
new_X.append(score/M*100)
print(sum(new_X)/len(new_X)) | [
"ajtwlsgkst@naver.com"
] | ajtwlsgkst@naver.com |
27b0dfc843974ab2897bb7c12bc2b7fa9de0dd72 | e76f47d5e6752b838d5f7e23e22cfef65482b8e1 | /SeniorProject/pages/forms.py | d830a5d3dfe4395e0c5225d7ae57c111cf95cde5 | [] | no_license | AmirIdris/Final-Project | b006adfc4074df6687abaac83942b1b151300a51 | 7b0e28d01b7d5b4e4825d5d8b98ba193bd3f49e8 | refs/heads/master | 2023-06-10T21:13:12.875771 | 2021-07-08T20:23:59 | 2021-07-08T20:23:59 | 362,912,491 | 0 | 1 | null | 2021-07-08T20:24:00 | 2021-04-29T18:34:24 | CSS | UTF-8 | Python | false | false | 520 | py | from django import forms
from django.contrib.auth.forms import UserCreationForm,UserChangeForm
from django.contrib.auth.models import User
from django.forms import fields
class CustomUserCreationForm(UserCreationForm):
def __init__(self,*args,**kwargs):
super(CustomUserCreationForm, self).__init__(*args,**kwargs)
self.fields['is_staff']=forms.BooleanField(label=("Traffic Police"),required=False)
class Meta:
model=User
fields=UserCreationForm.Meta.fields+('is_staff',)
| [
"you@example.com"
] | you@example.com |
07dcdb14cb25c0f0e790ee172cf9949f2147abbd | 0b5cf584134d0334a408b3666f94a9daa3a20e30 | /week6/homework/parsetabjs.py | 2c76eb264c6cbc2156912ce3c77e80612784c118 | [] | no_license | benzitohhh/cs262-languages | fa63ba77da2147b0ddc968bbf8421f50e6bce3cb | 7b6d5d143b4df8260de3a03f0de5df832658515f | refs/heads/master | 2020-05-07T13:36:07.445605 | 2012-06-04T20:50:18 | 2012-06-04T20:50:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,046 | py |
# parsetabjs.py
# This file is automatically generated. Do not edit.
_tabversion = '3.2'
_lr_method = 'LALR'
_lr_signature = 'E\x04\xca\x9f\x90\x1e03o\x1e<\xe4\xc6H)\xc3'
_lr_action_items = {'OROR':([3,5,7,8,13,15,17,19,20,25,27,42,46,49,54,59,60,61,62,63,64,65,66,67,68,69,70,76,81,84,88,],[-27,-23,-26,-25,-28,30,-23,30,30,30,30,-29,30,30,-24,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,-43,30,-9,-30,]),'RETURN':([0,3,4,7,8,13,14,17,27,33,42,43,44,45,54,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,72,73,74,75,76,82,83,84,88,90,91,92,93,94,],[1,-27,-4,-26,-25,-28,1,-23,1,-22,-29,-20,1,-13,-24,-11,-13,-12,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,1,-17,-14,-19,-43,-16,1,-9,-30,-21,-13,-12,-3,-18,]),'NUMBER':([0,1,2,3,4,7,8,10,12,13,14,16,17,21,22,27,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,72,73,74,75,76,77,82,83,84,88,90,91,92,93,94,],[8,8,8,-27,-4,-26,-25,8,8,-28,8,8,-23,8,8,8,8,8,8,8,-22,8,8,8,8,8,8,8,8,-29,-20,8,-13,-24,8,-11,-13,-12,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,8,-17,-14,-19,-43,8,-16,8,-9,-30,-21,-13,-12,-3,-18,]),'WHILE':([0,3,4,7,8,13,14,17,27,33,42,43,44,45,54,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,72,73,74,75,76,82,83,84,88,90,91,92,93,94,],[2,-27,-4,-26,-25,-28,2,-23,2,-22,-29,-20,2,-13,-24,-11,-13,-12,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,2,-17,-14,-19,-43,-16,2,-9,-30,-21,-13,-12,-3,-18,]),'TRUE':([0,1,2,3,4,7,8,10,12,13,14,16,17,21,22,27,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,72,73,74,75,76,77,82,83,84,88,90,91,92,93,94,],[3,3,3,-27,-4,-26,-25,3,3,-28,3,3,-23,3,3,3,3,3,3,3,-22,3,3,3,3,3,3,3,3,-29,-20,3,-13,-24,3,-11,-13,-12,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,3,-17,-14,-19,-43,3,-16,3,-9,-30,-21,-13,-12,-3,-18,]),'MINUS':([3,5,7,8,13,15,17,19,20,25,27,42,46,49,54,59,60,61,62,63,64,65,66,67,68,69,70,76,81,84,88,],[-27,-23,-26,-25,-28,40,-23,40,40,40,40,-29,40,40,-24,40,40,40,-35,40,-33,40,40,-31,40,-32,-34,-43,40,-9,-30,]),'DIVIDE':([3,5,7,8,13,15,17,19,20,25,27,42,46,49,54,59,60,61,62,63,64,65,66,67,68,69,70,76,81,84,88,],[-27,-23,-26,-25,-28,32,-23,32,32,32,32,-29,32,32,-24,32,32,32,-35,32,-33,32,32,32,32,32,-34,-43,32,-9,-30,]),'LE':([3,5,7,8,13,15,17,19,20,25,27,42,46,49,54,59,60,61,62,63,64,65,66,67,68,69,70,76,81,84,88,],[-27,-23,-26,-25,-28,31,-23,31,31,31,31,-29,31,31,-24,31,31,-37,-35,-39,-33,-38,-40,-31,31,-32,-34,-43,31,-9,-30,]),'RPAREN':([3,7,8,13,17,22,23,25,42,47,48,49,50,51,52,53,54,59,60,61,62,63,64,65,66,67,68,69,70,76,80,84,86,87,88,],[-27,-26,-25,-28,-23,-45,-6,54,-29,-44,76,-47,-5,-8,79,-6,-24,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,-43,89,-9,-46,-7,-30,]),'SEMICOLON':([3,5,7,8,13,15,17,19,33,42,43,45,46,54,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,73,74,75,76,81,82,84,88,90,91,92,94,],[-27,-23,-26,-25,-28,33,-23,43,-22,-29,-20,74,75,-24,-11,74,-12,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,-17,-14,-19,-43,90,-16,-9,-30,-21,74,-12,-18,]),'LT':([3,5,7,8,13,15,17,19,20,25,27,42,46,49,54,59,60,61,62,63,64,65,66,67,68,69,70,76,81,84,88,],[-27,-23,-26,-25,-28,36,-23,36,36,36,36,-29,36,36,-24,36,36,-37,-35,-39,-33,-38,-40,-31,36,-32,-34,-43,36,-9,-30,]),'PLUS':([3,5,7,8,13,15,17,19,20,25,27,42,46,49,54,59,60,61,62,63,64,65,66,67,68,69,70,76,81,84,88,],[-27,-23,-26,-25,-28,38,-23,38,38,38,38,-29,38,38,-24,38,38,38,-35,38,-33,38,38,-31,38,-32,-34,-43,38,-9,-30,]),'COMMA':([3,7,8,13,17,42,49,51,54,59,60,61,62,63,64,65,66,67,68,69,70,76,84,88,],[-27,-26,-25,-28,-23,-29,77,78,-24,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,-43,-9,-30,]),'EQUALEQUAL':([3,5,7,8,13,15,17,19,20,25,27,42,46,49,54,59,60,61,62,63,64,65,66,67,68,69,70,76,81,84,88,],[-27,-23,-26,-25,-28,39,-23,39,39,39,39,-29,39,39,-24,39,39,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,-43,39,-9,-30,]),'IDENTIFIER':([0,1,2,3,4,6,7,8,10,11,12,13,14,16,17,21,22,23,27,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,72,73,74,75,76,77,78,82,83,84,88,90,91,92,93,94,],[5,17,17,-27,-4,24,-26,-25,17,26,17,-28,5,17,-23,17,17,51,5,17,17,17,17,-22,17,17,17,17,17,17,17,17,-29,-20,5,-13,51,-24,17,-11,-13,-12,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,5,-17,-14,-19,-43,17,51,-16,5,-9,-30,-21,-13,-12,-3,-18,]),'$end':([0,4,9,14,28,33,43,45,56,57,58,73,74,75,82,84,90,91,92,93,94,],[-2,-4,0,-2,-1,-22,-20,-13,-11,-13,-12,-17,-14,-19,-16,-9,-21,-13,-12,-3,-18,]),'FUNCTION':([0,1,2,3,4,7,8,10,12,13,14,16,17,21,22,27,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,72,73,74,75,76,77,82,83,84,88,90,91,92,93,94,],[6,18,18,-27,-4,-26,-25,18,18,-28,6,18,-23,18,18,18,18,18,18,18,-22,18,18,18,18,18,18,18,18,-29,-20,18,-13,-24,18,-11,-13,-12,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,18,-17,-14,-19,-43,18,-16,18,-9,-30,-21,-13,-12,-3,-18,]),'GT':([3,5,7,8,13,15,17,19,20,25,27,42,46,49,54,59,60,61,62,63,64,65,66,67,68,69,70,76,81,84,88,],[-27,-23,-26,-25,-28,37,-23,37,37,37,37,-29,37,37,-24,37,37,-37,-35,-39,-33,-38,-40,-31,37,-32,-34,-43,37,-9,-30,]),'STRING':([0,1,2,3,4,7,8,10,12,13,14,16,17,21,22,27,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,72,73,74,75,76,77,82,83,84,88,90,91,92,93,94,],[7,7,7,-27,-4,-26,-25,7,7,-28,7,7,-23,7,7,7,7,7,7,7,-22,7,7,7,7,7,7,7,7,-29,-20,7,-13,-24,7,-11,-13,-12,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,7,-17,-14,-19,-43,7,-16,7,-9,-30,-21,-13,-12,-3,-18,]),'EQUAL':([5,26,],[21,55,]),'RBRACE':([33,43,44,45,56,57,58,71,72,73,74,75,82,84,85,90,91,92,94,],[-22,-20,-15,-13,-11,-13,-12,84,-15,-17,-14,-19,-16,-9,-10,-21,-13,-12,-18,]),'TIMES':([3,5,7,8,13,15,17,19,20,25,27,42,46,49,54,59,60,61,62,63,64,65,66,67,68,69,70,76,81,84,88,],[-27,-23,-26,-25,-28,35,-23,35,35,35,35,-29,35,35,-24,35,35,35,-35,35,-33,35,35,35,35,35,-34,-43,35,-9,-30,]),'GE':([3,5,7,8,13,15,17,19,20,25,27,42,46,49,54,59,60,61,62,63,64,65,66,67,68,69,70,76,81,84,88,],[-27,-23,-26,-25,-28,34,-23,34,34,34,34,-29,34,34,-24,34,34,-37,-35,-39,-33,-38,-40,-31,34,-32,-34,-43,34,-9,-30,]),'LPAREN':([0,1,2,3,4,5,6,7,8,10,12,13,14,16,17,18,21,22,24,27,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,72,73,74,75,76,77,82,83,84,88,90,91,92,93,94,],[10,10,10,-27,-4,22,23,-26,-25,10,10,-28,10,10,22,23,10,10,53,10,10,10,10,10,-22,10,10,10,10,10,10,10,10,-29,-20,10,-13,-24,10,-11,-13,-12,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,10,-17,-14,-19,-43,10,-16,10,-9,-30,-21,-13,-12,-3,-18,]),'VAR':([0,3,4,7,8,13,14,17,27,33,42,43,44,45,54,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,72,73,74,75,76,82,83,84,88,90,91,92,93,94,],[11,-27,-4,-26,-25,-28,11,-23,11,-22,-29,-20,11,-13,-24,-11,-13,-12,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,11,-17,-14,-19,-43,-16,11,-9,-30,-21,-13,-12,-3,-18,]),'ELSE':([58,84,],[83,-9,]),'IF':([0,3,4,7,8,13,14,17,27,33,42,43,44,45,54,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,72,73,74,75,76,82,83,84,88,90,91,92,93,94,],[12,-27,-4,-26,-25,-28,12,-23,12,-22,-29,-20,12,-13,-24,-11,-13,-12,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,12,-17,-14,-19,-43,-16,12,-9,-30,-21,-13,-12,-3,-18,]),'ANDAND':([3,5,7,8,13,15,17,19,20,25,27,42,46,49,54,59,60,61,62,63,64,65,66,67,68,69,70,76,81,84,88,],[-27,-23,-26,-25,-28,29,-23,29,29,29,29,-29,29,29,-24,-41,29,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,-43,29,-9,-30,]),'LBRACE':([3,7,8,13,17,20,27,42,54,59,60,61,62,63,64,65,66,67,68,69,70,76,79,83,84,88,89,],[-27,-26,-25,-28,-23,44,44,-29,-24,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,-43,44,44,-9,-30,44,]),'FALSE':([0,1,2,3,4,7,8,10,12,13,14,16,17,21,22,27,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,72,73,74,75,76,77,82,83,84,88,90,91,92,93,94,],[13,13,13,-27,-4,-26,-25,13,13,-28,13,13,-23,13,13,13,13,13,13,13,-22,13,13,13,13,13,13,13,13,-29,-20,13,-13,-24,13,-11,-13,-12,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,13,-17,-14,-19,-43,13,-16,13,-9,-30,-21,-13,-12,-3,-18,]),'NOT':([0,1,2,3,4,7,8,10,12,13,14,16,17,21,22,27,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,72,73,74,75,76,77,82,83,84,88,90,91,92,93,94,],[16,16,16,-27,-4,-26,-25,16,16,-28,16,16,-23,16,16,16,16,16,16,16,-22,16,16,16,16,16,16,16,16,-29,-20,16,-13,-24,16,-11,-13,-12,-41,-42,-37,-35,-39,-33,-38,-40,-31,-36,-32,-34,16,-17,-14,-19,-43,16,-16,16,-9,-30,-21,-13,-12,-3,-18,]),'MOD':([3,5,7,8,13,15,17,19,20,25,27,42,46,49,54,59,60,61,62,63,64,65,66,67,68,69,70,76,81,84,88,],[-27,-23,-26,-25,-28,41,-23,41,41,41,41,-29,41,41,-24,41,41,41,-35,41,-33,41,41,41,41,41,-34,-43,41,-9,-30,]),}
_lr_action = { }
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = { }
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'stmt_or_compound':([27,83,],[57,91,]),'sstmt':([0,14,27,44,72,83,],[4,4,56,72,72,56,]),'optsemi':([45,57,91,],[73,82,94,]),'args':([22,77,],[47,86,]),'element':([0,14,],[14,14,]),'stmts':([44,72,],[71,85,]),'params':([23,53,78,],[50,50,87,]),'exp':([0,1,2,10,12,14,16,21,22,27,29,30,31,32,34,35,36,37,38,39,40,41,44,55,72,77,83,],[15,19,20,25,27,15,42,46,49,15,59,60,61,62,63,64,65,66,67,68,69,70,15,81,15,49,15,]),'js':([0,14,],[9,28,]),'optparams':([23,53,],[52,80,]),'compoundstmt':([20,27,79,83,89,],[45,58,88,92,93,]),'optargs':([22,],[48,]),}
_lr_goto = { }
for _k, _v in _lr_goto_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_goto: _lr_goto[_x] = { }
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> js","S'",1,None,None,None),
('js -> element js','js',2,'p_js','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',23),
('js -> <empty>','js',0,'p_js_empty','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',26),
('element -> FUNCTION IDENTIFIER LPAREN optparams RPAREN compoundstmt','element',6,'p_element_function','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',30),
('element -> sstmt','element',1,'p_element_stmt','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',34),
('optparams -> params','optparams',1,'p_optparams','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',38),
('optparams -> <empty>','optparams',0,'p_optparams_empty','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',41),
('params -> IDENTIFIER COMMA params','params',3,'p_params','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',44),
('params -> IDENTIFIER','params',1,'p_params_one','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',47),
('compoundstmt -> LBRACE stmts RBRACE','compoundstmt',3,'p_compoundstmt','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',50),
('stmts -> sstmt stmts','stmts',2,'p_stmts','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',53),
('stmt_or_compound -> sstmt','stmt_or_compound',1,'p_stmt_or_compound','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',57),
('stmt_or_compound -> compoundstmt','stmt_or_compound',1,'p_stmt_or_compound_c','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',60),
('optsemi -> <empty>','optsemi',0,'p_optsemi_none','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',64),
('optsemi -> SEMICOLON','optsemi',1,'p_optsemi_some','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',68),
('stmts -> <empty>','stmts',0,'p_stmts_empty','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',72),
('sstmt -> IF exp stmt_or_compound optsemi','sstmt',4,'p_sstmt_if','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',76),
('sstmt -> WHILE exp compoundstmt optsemi','sstmt',4,'p_sstmt_while','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',79),
('sstmt -> IF exp compoundstmt ELSE stmt_or_compound optsemi','sstmt',6,'p_sstmt_if_else','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',82),
('sstmt -> IDENTIFIER EQUAL exp SEMICOLON','sstmt',4,'p_sstmt_assigment','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',85),
('sstmt -> RETURN exp SEMICOLON','sstmt',3,'p_sstmt_return','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',88),
('sstmt -> VAR IDENTIFIER EQUAL exp SEMICOLON','sstmt',5,'p_sstmt_var','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',91),
('sstmt -> exp SEMICOLON','sstmt',2,'p_sstmt_exp','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',94),
('exp -> IDENTIFIER','exp',1,'p_exp_identifier','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',98),
('exp -> LPAREN exp RPAREN','exp',3,'p_exp_paren','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',101),
('exp -> NUMBER','exp',1,'p_exp_number','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',104),
('exp -> STRING','exp',1,'p_exp_string','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',107),
('exp -> TRUE','exp',1,'p_exp_true','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',110),
('exp -> FALSE','exp',1,'p_exp_false','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',113),
('exp -> NOT exp','exp',2,'p_exp_not','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',116),
('exp -> FUNCTION LPAREN optparams RPAREN compoundstmt','exp',5,'p_exp_lambda','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',119),
('exp -> exp PLUS exp','exp',3,'p_exp_binop','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',122),
('exp -> exp MINUS exp','exp',3,'p_exp_binop','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',123),
('exp -> exp TIMES exp','exp',3,'p_exp_binop','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',124),
('exp -> exp MOD exp','exp',3,'p_exp_binop','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',125),
('exp -> exp DIVIDE exp','exp',3,'p_exp_binop','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',126),
('exp -> exp EQUALEQUAL exp','exp',3,'p_exp_binop','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',127),
('exp -> exp LE exp','exp',3,'p_exp_binop','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',128),
('exp -> exp LT exp','exp',3,'p_exp_binop','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',129),
('exp -> exp GE exp','exp',3,'p_exp_binop','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',130),
('exp -> exp GT exp','exp',3,'p_exp_binop','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',131),
('exp -> exp ANDAND exp','exp',3,'p_exp_binop','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',132),
('exp -> exp OROR exp','exp',3,'p_exp_binop','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',133),
('exp -> IDENTIFIER LPAREN optargs RPAREN','exp',4,'p_exp_call','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',136),
('optargs -> args','optargs',1,'p_optargs','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',140),
('optargs -> <empty>','optargs',0,'p_optargs_empty','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',143),
('args -> exp COMMA args','args',3,'p_args','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',146),
('args -> exp','args',1,'p_args_one','/Users/immanuel_ben/Desktop/cs262-languages/week6/homework/jsgrammar.py',149),
]
| [
"immanuel_ben@yahoo.co.uk"
] | immanuel_ben@yahoo.co.uk |
6abe0bee3a06fab69de5f3271544996e2c44bdba | 1c1f8e6e66dad142d35c88710d52b25d6e0640c6 | /tests/inventory/test_macos_apps_views.py | 765458fba224a26a3e12594b44e0465091fe4957 | [
"Apache-2.0"
] | permissive | ChefAustin/zentral | 4a5190434f9010f71385bff4c2b6f02120b651ed | 1749eeb2a0c727d3bff7a3b893158c6fe36e9d9c | refs/heads/main | 2023-01-23T00:52:12.417530 | 2020-12-03T07:41:36 | 2020-12-03T07:41:36 | 318,111,990 | 1 | 0 | Apache-2.0 | 2020-12-03T07:33:27 | 2020-12-03T07:33:27 | null | UTF-8 | Python | false | false | 6,759 | py | from datetime import datetime
from django.urls import reverse
from django.utils.http import urlencode
from django.test import TestCase, override_settings
from zentral.contrib.inventory.models import MachineSnapshotCommit
from accounts.models import User
@override_settings(STATICFILES_STORAGE='django.contrib.staticfiles.storage.StaticFilesStorage')
class MacOSAppsViewsTestCase(TestCase):
@classmethod
def setUpTestData(cls):
# user
cls.pwd = "godzillapwd"
cls.user = User.objects.create_user("godzilla", "godzilla@zentral.io", cls.pwd)
# machine snapshot
cls.computer_name = "yolozulu"
source = {"module": "tests.zentral.io", "name": "Zentral Tests"}
tree = {
"source": source,
"business_unit": {"name": "yo bu",
"reference": "bu1",
"source": source,
"links": [{"anchor_text": "bu link",
"url": "http://bu-link.de"}]},
"groups": [{"name": "yo grp",
"reference": "grp1",
"source": source,
"links": [{"anchor_text": "group link",
"url": "http://group-link.de"}]}],
"serial_number": "0123456789",
"system_info": {"computer_name": cls.computer_name},
"os_version": {'name': 'OS X', 'major': 10, 'minor': 11, 'patch': 1},
"osx_app_instances": [
{'app': {'bundle_id': 'io.zentral.baller',
'bundle_name': 'Baller.app',
'bundle_version': '123',
'bundle_version_str': '1.2.3'},
'bundle_path': "/Applications/Baller.app",
'signed_by': {
"common_name": "Developer ID Application: GODZILLA",
"organization": "GOZILLA INC",
"organizational_unit": "ATOM",
"sha_1": 40 * "a",
"sha_256": 64 * "a",
"valid_from": datetime(2015, 1, 1),
"valid_until": datetime(2026, 1, 1),
"signed_by": {
"common_name": "Developer ID Certification Authority",
"organization": "Apple Inc.",
"organizational_unit": "Apple Certification Authority",
"sha_1": "3b166c3b7dc4b751c9fe2afab9135641e388e186",
"sha_256": "7afc9d01a62f03a2de9637936d4afe68090d2de18d03f29c88cfb0b1ba63587f",
"valid_from": datetime(2012, 12, 1),
"valid_until": datetime(2027, 12, 1),
"signed_by": {
"common_name": "Apple Root CA",
"organization": "Apple Inc.",
"organizational_unit": "Apple Certification Authority",
"sha_1": "611e5b662c593a08ff58d14ae22452d198df6c60",
"sha_256": "b0b1730ecbc7ff4505142c49f1295e6eda6bcaed7e2c68c5be91b5a11001f024",
"valid_from": datetime(2006, 4, 25),
"valid_until": datetime(2035, 2, 9)
}
}
}}
]
}
_, cls.ms = MachineSnapshotCommit.objects.commit_machine_snapshot_tree(tree)
cls.osx_app_instance = cls.ms.osx_app_instances.all()[0]
cls.osx_app = cls.osx_app_instance.app
def log_user_in(self):
self.client.post(reverse('login'), {'username': self.user.username, 'password': self.pwd})
def test_macos_apps(self):
self.log_user_in()
response = self.client.get(reverse("inventory:macos_apps"))
self.assertContains(response, "1 macOS application")
def test_macos_apps_sha_256_search(self):
self.log_user_in()
# cert signature
response = self.client.get("{}?{}".format(
reverse("inventory:macos_apps"),
urlencode({"sha_256": "7afc9d01a62f03a2de9637936d4afe68090d2de18d03f29c88cfb0b1ba63587f"})
))
self.assertContains(response, "1 macOS application")
# binary signature
response = self.client.get("{}?{}".format(
reverse("inventory:macos_apps"),
urlencode({"sha_256": 64 * "a"})
))
self.assertContains(response, "1 macOS application")
# bad sha 256
response = self.client.get("{}?{}".format(
reverse("inventory:macos_apps"),
urlencode({"sha_256": 64 * "z"})
))
self.assertFormError(response, "search_form", "sha_256", "Enter a valid sha256.")
# another sha 256
response = self.client.get("{}?{}".format(
reverse("inventory:macos_apps"),
urlencode({"sha_256": 64 * "f"})
))
self.assertContains(response, "0 macOS applications")
def test_macos_apps_source_search(self):
self.log_user_in()
response = self.client.get("{}?{}".format(
reverse("inventory:macos_apps"),
urlencode({"source": self.ms.source.id})
))
self.assertContains(response, "1 macOS application")
def test_macos_apps_bundle_name(self):
self.log_user_in()
response = self.client.get("{}?{}".format(
reverse("inventory:macos_apps"),
urlencode({"bundle_name": "baller"})
))
self.assertContains(response, "1 macOS application")
response = self.client.get("{}?{}".format(
reverse("inventory:macos_apps"),
urlencode({"bundle_name": "yolo"})
))
self.assertContains(response, "0 macOS applications")
def test_macos_app(self):
self.log_user_in()
response = self.client.get(reverse("inventory:macos_app", args=(self.osx_app.id,)))
self.assertContains(response, "Baller.app 1.2.3")
self.assertContains(response, "1 application instance")
self.assertContains(response, self.osx_app_instance.signed_by.sha_256)
def test_macos_app_instance_machines(self):
self.log_user_in()
response = self.client.get(reverse("inventory:macos_app_instance_machines",
args=(self.osx_app.id, self.osx_app_instance.id)),
follow=True)
self.assertContains(response, "Baller.app 1.2.3")
self.assertContains(response, "1 Machine")
self.assertContains(response, self.osx_app_instance.signed_by.sha_256)
self.assertContains(response, self.computer_name)
| [
"eric.falconnier@112hz.com"
] | eric.falconnier@112hz.com |
a3c79f13e09b4df0d392ba14bcce693c8a8e0004 | bb33e6be8316f35decbb2b81badf2b6dcf7df515 | /source/res/scripts/client/gui/scaleform/daapi/view/lobby/header/BattleTypeSelectPopover.py | 17cc2c813eae3014346e7d39c69d740d4c574aec | [] | no_license | StranikS-Scan/WorldOfTanks-Decompiled | 999c9567de38c32c760ab72c21c00ea7bc20990c | d2fe9c195825ececc728e87a02983908b7ea9199 | refs/heads/1.18 | 2023-08-25T17:39:27.718097 | 2022-09-22T06:49:44 | 2022-09-22T06:49:44 | 148,696,315 | 103 | 39 | null | 2022-09-14T17:50:03 | 2018-09-13T20:49:11 | Python | UTF-8 | Python | false | false | 4,906 | py | # Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/lobby/header/BattleTypeSelectPopover.py
from adisp import process
from frameworks.wulf import WindowLayer
from gui.Scaleform.daapi.settings.views import VIEW_ALIAS
from gui.Scaleform.daapi.view.lobby.header import battle_selector_items
from gui.Scaleform.daapi.view.meta.BattleTypeSelectPopoverMeta import BattleTypeSelectPopoverMeta
from gui.Scaleform.framework.managers.containers import POP_UP_CRITERIA
from gui.Scaleform.framework.managers.loaders import SFViewLoadParams
from gui.Scaleform.genConsts.TOOLTIPS_CONSTANTS import TOOLTIPS_CONSTANTS
from gui.Scaleform.locale.TOOLTIPS import TOOLTIPS
from gui.prb_control.settings import PREBATTLE_ACTION_NAME
from gui.shared import EVENT_BUS_SCOPE
from gui.shared.events import LoadViewEvent
from helpers import dependency
from skeletons.gui.game_control import IRankedBattlesController
from skeletons.gui.lobby_context import ILobbyContext
class BattleTypeSelectPopover(BattleTypeSelectPopoverMeta):
__rankedController = dependency.descriptor(IRankedBattlesController)
__lobbyContext = dependency.descriptor(ILobbyContext)
def __init__(self, _=None):
super(BattleTypeSelectPopover, self).__init__()
self._tooltip = None
return
def selectFight(self, actionName):
self.__selectFight(actionName)
def getTooltipData(self, itemData, itemIsDisabled):
if itemData is None:
return
else:
tooltip = ''
isSpecial = False
if itemData == PREBATTLE_ACTION_NAME.RANDOM:
tooltip = TOOLTIPS.BATTLETYPES_STANDART
elif itemData == PREBATTLE_ACTION_NAME.EPIC:
tooltip, isSpecial = self.__getEpicAvailabilityData()
elif itemData == PREBATTLE_ACTION_NAME.RANKED:
tooltip, isSpecial = self.__getRankedAvailabilityData()
elif itemData == PREBATTLE_ACTION_NAME.E_SPORT:
tooltip = TOOLTIPS.BATTLETYPES_UNIT
elif itemData == PREBATTLE_ACTION_NAME.STRONGHOLDS_BATTLES_LIST:
if not itemIsDisabled:
tooltip = TOOLTIPS.BATTLETYPES_STRONGHOLDS
else:
tooltip = TOOLTIPS.BATTLETYPES_STRONGHOLDS_DISABLED
elif itemData == PREBATTLE_ACTION_NAME.TRAININGS_LIST:
tooltip = TOOLTIPS.BATTLETYPES_TRAINING
elif itemData == PREBATTLE_ACTION_NAME.EPIC_TRAINING_LIST:
tooltip = TOOLTIPS.BATTLETYPES_EPIC_TRAINING
elif itemData == PREBATTLE_ACTION_NAME.SPEC_BATTLES_LIST:
tooltip = TOOLTIPS.BATTLETYPES_SPEC
elif itemData == PREBATTLE_ACTION_NAME.BATTLE_TUTORIAL:
tooltip = TOOLTIPS.BATTLETYPES_BATTLETUTORIAL
elif itemData == PREBATTLE_ACTION_NAME.SANDBOX:
isSpecial = True
tooltip = TOOLTIPS_CONSTANTS.BATTLE_TRAINING
elif itemData == PREBATTLE_ACTION_NAME.BATTLE_ROYALE:
tooltip = TOOLTIPS_CONSTANTS.BATTLE_ROYALE_SELECTOR_INFO
isSpecial = True
elif itemData == PREBATTLE_ACTION_NAME.MAPBOX:
tooltip = TOOLTIPS_CONSTANTS.MAPBOX_SELECTOR_INFO
isSpecial = True
elif itemData == PREBATTLE_ACTION_NAME.EVENT_BATTLE or itemData == PREBATTLE_ACTION_NAME.EVENT_SQUAD:
isSpecial = True
tooltip = TOOLTIPS_CONSTANTS.EVENT_BATTLES_SELECTOR_INFO
result = {'isSpecial': isSpecial,
'tooltip': tooltip}
self._tooltip = tooltip
return result
def demoClick(self):
demonstratorWindow = self.app.containerManager.getView(WindowLayer.WINDOW, criteria={POP_UP_CRITERIA.VIEW_ALIAS: VIEW_ALIAS.DEMONSTRATOR_WINDOW})
if demonstratorWindow is not None:
demonstratorWindow.onWindowClose()
else:
self.fireEvent(LoadViewEvent(SFViewLoadParams(VIEW_ALIAS.DEMONSTRATOR_WINDOW)), EVENT_BUS_SCOPE.LOBBY)
return
def update(self):
if not self.isDisposed():
self.as_updateS(*battle_selector_items.getItems().getVOs())
def _populate(self):
super(BattleTypeSelectPopover, self)._populate()
self.update()
def __getRankedAvailabilityData(self):
return (TOOLTIPS_CONSTANTS.RANKED_SELECTOR_INFO, True) if self.__rankedController.isAvailable() else (TOOLTIPS_CONSTANTS.RANKED_UNAVAILABLE_INFO, True)
def __getEpicAvailabilityData(self):
return (TOOLTIPS_CONSTANTS.EPIC_BATTLE_SELECTOR_INFO, True)
@process
def __selectFight(self, actionName):
navigationPossible = yield self.__lobbyContext.isHeaderNavigationPossible()
if not navigationPossible:
return
battle_selector_items.getItems().select(actionName)
| [
"StranikS_Scan@mail.ru"
] | StranikS_Scan@mail.ru |
8924fe0c6f40a20395b0789d49f31baaa30cc805 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /5S97Me79PDAefLEXv_17.py | c79aeef96fef03d2d9fb182f6d28a6120e6e6907 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 436 | py |
def lambda_to_def(code):
eq_index = code.index("=")
sem_index = code.index(":")
lambda_index = code.index("lambda")
try:
secound_sem = code.index(":",sem_index+1)
except ValueError:
secound_sem = None
if secound_sem != None:
if code[secound_sem+1] != "'": sem_index = secound_sem
return "def "+code[:eq_index-1]+"("+code[lambda_index+7:sem_index]+"):\n\treturn"+code[sem_index+1:]
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
414740141acf860e1fbaf7a03a2bb1f455ce5bb1 | 0c66e77b8449b6cc4740951311ff5655db43e1a2 | /corral/qa.py | ec499b7351e2f7e285a5360ecd6456b5a9a65ee2 | [] | no_license | EdwardBetts/corral | b3b2c6731dae8486c77683953eef0ae3ce8eef9e | ad46cfa39a7620636cbc35c5f037b74c0406e02f | refs/heads/master | 2021-01-16T22:02:51.352888 | 2016-04-06T05:36:46 | 2016-04-06T05:36:46 | 56,488,397 | 0 | 0 | null | 2016-04-18T07:58:23 | 2016-04-18T07:58:22 | null | UTF-8 | Python | false | false | 12,644 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# =============================================================================
# IMPORT
# =============================================================================
import abc
import logging
import unittest
import inspect
import pkgutil
import os
import sys
import tempfile
import multiprocessing
from collections import defaultdict, OrderedDict
import six
import mock
import xmltodict
from flake8 import engine, reporter
import sh
from . import util, conf, core, db, run, cli
from .db import database_exists, create_database, drop_database
from .exceptions import ImproperlyConfigured
from .run.base import Processor
# =============================================================================
# CONSTANTS
# =============================================================================
TESTS_MODULE = "{}.tests".format(conf.PACKAGE)
IS_WINDOWS = sys.platform.startswith("win")
DEFAULT_SCORE_COMMENTS = {
0: "F",
1: "F+",
2: "E",
3: "E+",
4: "D",
5: "D+",
6: "C",
7: "C+",
8: "B",
9: "A",
10: "A+"
}
SCORE_COMMENTS = conf.settings.get("SCORE_COMMENTS", DEFAULT_SCORE_COMMENTS)
# =============================================================================
# TEST CASE
# =============================================================================
@six.add_metaclass(abc.ABCMeta)
class TestCase(unittest.TestCase):
@classmethod
def get_subject(cls):
return getattr(cls, "subject", None)
def __init__(self, conn, proc):
super(TestCase, self).__init__()
self.__conn = conn
self.__proc = proc
self.__session = None
def runTest(self):
with db.session_scope() as session:
self.__session = session
self.setup()
self.execute_processor()
with db.session_scope() as session:
self.__session = session
self.validate()
with db.session_scope() as session:
self.__session = session
self.teardown()
def setUp(self):
if database_exists(self.conn):
drop_database(self.conn)
create_database(self.conn)
db.create_all()
def setup(self):
pass
def execute_processor(self):
proc = self.__proc
if issubclass(proc, run.Loader):
run.execute_loader(self.proc, sync=True)
elif issubclass(proc, run.Step):
run.execute_step(proc, sync=True)
elif issubclass(proc, run.Alert):
run.execute_alert(proc, sync=True)
@abc.abstractmethod
def validate(self):
raise NotImplementedError()
def tearDown(self):
if database_exists(self.conn):
drop_database(self.conn)
def teardown(self):
pass
# PY2 COMP
if six.PY2:
assertRaisesRegex = six.assertRaisesRegex
assertCountEqual = six.assertCountEqual
# asserts
def save(self, obj):
if isinstance(obj, db.Model):
self.session.add(obj)
def delete(self, obj):
if isinstance(obj, db.Model):
self.session.delete(obj)
def assertStreamHas(self, model, *filters):
query = self.__session.query(model)
if filters:
query = query.filter(*filters)
if query.count() == 0:
filters_str = ", ".join(["'{}'".format(str(f)) for f in filters])
msg = "Model '{}' with filters {} not found".format(
model.__name__, filters_str)
self.fail(msg)
def assertStreamHasNot(self, model, *filters):
query = self.__session.query(model)
if filters:
query = query.filter(*filters)
if query.count() != 0:
filters_str = ", ".join(["'{}'".format(str(f)) for f in filters])
msg = "Model '{}' with filters {} found".format(
model.__name__, filters_str)
self.fail(msg)
def assertStreamCount(self, expected, model, *filters):
query = self.__session.query(model)
if filters:
query = query.filter(*filters)
self.assertEquals(query.count(), expected)
@property
def processor(self):
return self.__proc
@property
def session(self):
return self.__session
@property
def conn(self):
return self.__conn
class QAResult(object):
def __init__(self, processors, ts_report, cov_report, style_report):
self._processors = processors
self._ts_report = ts_report
self._cov_report, self._cov_xml = cov_report
self._style_report, self._style_report_text = style_report
def qai(self):
"""QAI = (TP * (T/PN) * COV) / (1+MSE)
Where:
TP: If all tests passes is 1, 0 otherwise.
T: The number of test cases.
PN: The number number of processors (Loader, Steps and Alerts).
COV: The code coverage (between 0 and 1).
MSE: The Maintainability and Style Errors.
"""
works = 1. if self._ts_report.wasSuccessful() else 0.
test_by_procs = float(self._ts_report.testsRun) / len(self._processors)
cov = float(self._cov_xml["coverage"]["@line-rate"])
style = 1. + self._style_report.total_errors
return (works * test_by_procs * cov) / style
def qai_score(self):
qai_10 = int(round(self.qai() * 10))
return SCORE_COMMENTS.get(qai_10, str(qai_10))
def full_output(self):
return "\n".join(
["", "**** COVERAGE ****", str(self._cov_report), "-" * 80] +
["", "**** MAINTAINABILITY & STYLE ****",
self._style_report_text, "-" * 80])
def resume(self):
data = OrderedDict()
data["Tests Sucess"] = self._ts_report.wasSuccessful()
data["Test Number"] = self._ts_report.testsRun
data["Procesors Number"] = len(self._processors)
data["Coverage (%)"] = "{:.2f}%".format(
float(self._cov_xml["coverage"]["@line-rate"]) * 100)
data["Maintainability & Style Errors"] = (
self._style_report.total_errors)
data["-> QA Index (%)"] = "{0:.2f}%".format(self.qai() * 100)
data["-> QA Score"] = self.qai_score()
return data
# =============================================================================
# FUNCTIONS
# =============================================================================
def get_test_module():
return util.dimport(TESTS_MODULE)
def retrieve_all_pipeline_modules_names():
def recursive_search(pkg_name):
modules = []
pkg = util.dimport(pkg_name)
for mod, mod_name, is_pkg in pkgutil.iter_modules(pkg.__path__):
mod_fullname = ".".join([pkg_name, mod_name])
modules.append(mod_fullname)
if is_pkg:
modules.extend(recursive_search(mod_fullname))
return modules
modules_names = [
TESTS_MODULE,
db.MODELS_MODULE, cli.COMMANDS_MODULE,
conf.CORRAL_SETTINGS_MODULE, conf.PACKAGE,
conf.settings.PIPELINE_SETUP, conf.settings.PIPELINE_SETUP,
conf.settings.LOADER]
modules_names.extend(conf.settings.STEPS)
modules_names.extend(conf.settings.ALERTS)
modules_names.extend(recursive_search(conf.PACKAGE))
return tuple(set(modules_names))
def get_processors_testcases(processors, test_module):
buff = defaultdict(list)
for cls in vars(test_module).values():
if inspect.isclass(cls) and issubclass(cls, TestCase):
subject = cls.get_subject()
if not issubclass(subject, Processor):
msg = "'{}' subject must be a Processor instance. Found '{}'"
raise ImproperlyConfigured(msg.format(subject, type(subject)))
buff[subject].append(cls)
db_url = db.get_url(core.in_test())
testscases = []
for proc in processors:
tests = [cls(db_url, proc) for cls in buff[proc]]
testscases.append((proc, tests))
return testscases
def run_tests(processors, failfast, verbosity, default_logging=False):
if not default_logging:
for k, logger in logging.Logger.manager.loggerDict.items():
if k.startswith("Corral") or k.startswith("sqlalchemy"):
if isinstance(logger, logging.Logger):
logger.setLevel(logging.WARNING)
suite = unittest.TestSuite()
test_module = get_test_module()
testcases = get_processors_testcases(processors, test_module)
for _, tests in testcases:
suite.addTests(tests)
runner = unittest.runner.TextTestRunner(
verbosity=verbosity, failfast=failfast)
suite_result = runner.run(suite)
return suite_result
def run_coverage(failfast, verbosity, default_logging=False):
report, xml_report = None, None
executable = os.path.join(os.getcwd(), sys.argv[0])
to_coverage = ",".join(retrieve_all_pipeline_modules_names())
params = {}
if failfast:
params["failfast"] = True
if verbosity == 1:
params["verbose"] = True
elif verbosity > 1:
params["vverbose"] = True
if default_logging:
params["default-logging"] = True
sh.coverage.erase()
try:
sh.coverage.run("--source", to_coverage, executable, "test", **params)
except sh.ErrorReturnCode as err:
core.logger.error(err)
report = sh.coverage.report()
with tempfile.NamedTemporaryFile() as tfp:
sh.coverage.xml(ignore_errors=True, o=tfp.name, _no_out=True)
with open(tfp.name) as fp:
xml_src = fp.read()
xml_report = xmltodict.parse(xml_src)
return report, xml_report
def run_style():
prj_path = util.dimport(conf.PACKAGE).__file__
prj_path_len = len(os.path.dirname(os.path.dirname(prj_path)))
# THANKS
# https://github.com/oTree-org/otree-core/blob/96d6ffa/tests/test_style.py
class FileCollectReport(reporter.QueueReport):
def __init__(self, *args, **kwargs):
super(FileCollectReport, self).__init__(*args, **kwargs)
self._errs_queue = multiprocessing.Queue()
self._errors = []
def error(self, line_number, offset, text, check):
super(FileCollectReport, self).error(
line_number, offset, text, check)
self._errs_queue.put((self.filename, line_number, offset, text))
def error_list(self):
while self._errs_queue.qsize():
filepath, line_number, offset, text = (
self._errs_queue.get_nowait())
filename = filepath[prj_path_len:]
error = u"{}:{}:{}: {}".format(
filename, line_number, offset, text)
self._errors.append(error)
return tuple(self._errors)
def configure_pep8():
if IS_WINDOWS:
# WINDOWS UGLY AND HACKISH PATCH for flake 8 is based on
# http://goo.gl/2b53SG
sys.argv.append(".")
pep8 = engine.get_style_guide(jobs=1)
else:
pep8 = engine.get_style_guide()
pep8.reporter = FileCollectReport
report = pep8.init_report(pep8.reporter)
report.input_file = pep8.input_file
pep8.runner = report.task_queue.put
return pep8
pep8 = configure_pep8()
mod_names = retrieve_all_pipeline_modules_names()
top_mod_names = set([mn.split(".", 1)[0] for mn in mod_names])
paths = [
os.path.dirname(util.dimport(mn).__file__) for mn in top_mod_names]
for path in paths:
pep8.paths.append(path)
with mock.patch("sys.stdout"):
report = pep8.check_files()
if report.total_errors:
lines = ["Found pep8-style errors."]
lines.append(
"Please check the Python code style reference: "
"https://www.python.org/dev/peps/pep-0008/"
)
lines.append("\nErrors found: ")
for error in report.error_list():
if error.startswith("/") or error.startswith("\\"):
error = error[1:]
lines.append(error)
text = "\n".join(lines)
else:
text = ""
return report, text
def qa_report(processors, verbosity, *args, **kwargs):
ts_result = run_tests(
processors, failfast=False, verbosity=verbosity, *args, **kwargs)
cov_result = run_coverage(
failfast=False, verbosity=0, default_logging=False)
style_result = run_style()
report = QAResult(processors, ts_result, cov_result, style_result)
return report
| [
"jbc.develop@gmail.com"
] | jbc.develop@gmail.com |
5f49a46cba72f2d75b3f06ab89300e953baf3963 | b75a259624cd91c75d584b9f9548a2b7e179a81f | /models/LSTM_Classifier.py | 83a458cfc4d9ca4942586f6f0e1d74485a9dbd0a | [] | no_license | lucaskingjade/Classification | 911f73b8ec037d9f560065aa71116650c128d721 | 506c036c141df5eee69a5f84672f9635e0ad242b | refs/heads/master | 2021-01-20T06:43:26.815927 | 2017-06-02T07:58:54 | 2017-06-02T07:58:54 | 89,916,858 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,574 | py | #This is a simple implementation of LSTM
from keras.layers import Input,Embedding,RepeatVector,Reshape,LSTM,merge,Dense
from keras.models import Model
from keras.optimizers import SGD,RMSprop
def lstm_model(max_len=200,dof=70,embd_dim=2,
hidden_dim_list=[100,20],activation_list=['tanh','tanh'],
optimizer ='sgd',lr='0.01',momentum=0.0):
input = Input(shape = (max_len,dof),name='input')
label_input =Input(shape=(1,),name='label_input')
embd_label = Embedding(input_dim=8,output_dim=embd_dim)(label_input)
embd_label = Reshape(target_shape=(embd_dim,))(embd_label)
embd_label = RepeatVector(max_len)(embd_label)
encoded = merge([input, embd_label], mode='concat',concat_axis=2)
for i, (dim, activation) in enumerate(zip(hidden_dim_list, activation_list)):
if i ==len(hidden_dim_list)-1:
encoded = LSTM(output_dim=dim, activation=activation, return_sequences=False)(encoded)
else:
encoded = LSTM(output_dim=dim, activation=activation, return_sequences=True)(encoded)
encoded = Dense(output_dim=8, activation='sigmoid')(encoded)
model = Model(input=[input, label_input], output = encoded, name='Encoder')
if optimizer=='sgd':
optimizer_model = SGD(lr=lr,momentum=momentum)
elif optimizer=='rmsprop':
optimizer_model = RMSprop(lr=lr)
else:
raise ValueError('No such kind optimizer')
#compile model
model.compile(optimizer=optimizer_model,loss='binary_crossentropy',metrics='accuracy')
model.summary()
return model | [
"wangqi531@hotmail.com"
] | wangqi531@hotmail.com |
eaf7a34c14cd4e1b28388a48ae0963692d9f010e | afebbb07b2b4eada17a5853c1ce63b4075d280df | /marketsim/gen/_out/orderbook/_VolumeLevels.py | b906b3b76b1fa1729242c7a4bbcd934491c3730f | [] | no_license | peter1000/marketsimulator | 8c0a55fc6408b880311d3ad49defc55e9af57824 | 1b677200a9d5323f2970c83f076c2b83d39d4fe6 | refs/heads/master | 2021-01-18T01:39:04.869755 | 2015-03-29T17:47:24 | 2015-03-29T17:47:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,579 | py | # generated with class generator.python.intrinsic_observable$Import
from marketsim import registry
from marketsim.gen._out._observable._observableivolumelevels import ObservableIVolumeLevels
from marketsim.gen._out._ivolumelevels import IVolumeLevels
from marketsim.gen._intrinsic.orderbook.volume_levels import VolumeLevels_Impl
from marketsim.gen._out._iorderqueue import IOrderQueue
@registry.expose(["Asset", "VolumeLevels"])
class VolumeLevels_IOrderQueueFloatInt(ObservableIVolumeLevels,VolumeLevels_Impl):
""" **Returns arrays of levels for given volumes [i*volumeDelta for i in range(0, volumeCount)]**
Level of volume V is a price at which cumulative volume of better orders is V
Parameters are:
**queue**
**volumeDelta**
distance between two volumes
**volumeCount**
number of volume levels to track
"""
def __init__(self, queue = None, volumeDelta = None, volumeCount = None):
from marketsim.gen._out._ivolumelevels import IVolumeLevels
from marketsim.gen._out._observable._observableivolumelevels import ObservableIVolumeLevels
from marketsim.gen._out.orderbook._asks import Asks_IOrderBook as _orderbook_Asks_IOrderBook
from marketsim import deref_opt
ObservableIVolumeLevels.__init__(self)
self.queue = queue if queue is not None else deref_opt(_orderbook_Asks_IOrderBook())
self.volumeDelta = volumeDelta if volumeDelta is not None else 30.0
self.volumeCount = volumeCount if volumeCount is not None else 10
VolumeLevels_Impl.__init__(self)
@property
def label(self):
return repr(self)
_properties = {
'queue' : IOrderQueue,
'volumeDelta' : float,
'volumeCount' : int
}
def __repr__(self):
return "VolumeLevels(%(queue)s)" % dict([ (name, getattr(self, name)) for name in self._properties.iterkeys() ])
def bind_ex(self, ctx):
if self.__dict__.get('_bound_ex', False): return
self.__dict__['_bound_ex'] = True
if self.__dict__.get('_processing_ex', False):
raise Exception('cycle detected')
self.__dict__['_processing_ex'] = True
self.__dict__['_ctx_ex'] = ctx.updatedFrom(self)
if hasattr(self, '_internals'):
for t in self._internals:
v = getattr(self, t)
if type(v) in [list, set]:
for w in v: w.bind_ex(self.__dict__['_ctx_ex'])
else:
v.bind_ex(self.__dict__['_ctx_ex'])
self.queue.bind_ex(self._ctx_ex)
self.bind_impl(self.__dict__['_ctx_ex'])
if hasattr(self, '_subscriptions'):
for s in self._subscriptions: s.bind_ex(self.__dict__['_ctx_ex'])
self.__dict__['_processing_ex'] = False
def reset_ex(self, generation):
if self.__dict__.get('_reset_generation_ex', -1) == generation: return
self.__dict__['_reset_generation_ex'] = generation
if self.__dict__.get('_processing_ex', False):
raise Exception('cycle detected')
self.__dict__['_processing_ex'] = True
if hasattr(self, '_internals'):
for t in self._internals:
v = getattr(self, t)
if type(v) in [list, set]:
for w in v: w.reset_ex(generation)
else:
v.reset_ex(generation)
self.queue.reset_ex(generation)
self.reset()
if hasattr(self, '_subscriptions'):
for s in self._subscriptions: s.reset_ex(generation)
self.__dict__['_processing_ex'] = False
def typecheck(self):
from marketsim import rtti
from marketsim.gen._out._iorderqueue import IOrderQueue
rtti.typecheck(IOrderQueue, self.queue)
rtti.typecheck(float, self.volumeDelta)
rtti.typecheck(int, self.volumeCount)
def registerIn(self, registry):
if self.__dict__.get('_id', False): return
self.__dict__['_id'] = True
if self.__dict__.get('_processing_ex', False):
raise Exception('cycle detected')
self.__dict__['_processing_ex'] = True
registry.insert(self)
self.queue.registerIn(registry)
if hasattr(self, '_subscriptions'):
for s in self._subscriptions: s.registerIn(registry)
if hasattr(self, '_internals'):
for t in self._internals:
v = getattr(self, t)
if type(v) in [list, set]:
for w in v: w.registerIn(registry)
else:
v.registerIn(registry)
self.__dict__['_processing_ex'] = False
def bind_impl(self, ctx):
VolumeLevels_Impl.bind_impl(self, ctx)
def reset(self):
VolumeLevels_Impl.reset(self)
def VolumeLevels(queue = None,volumeDelta = None,volumeCount = None):
from marketsim.gen._out._iorderqueue import IOrderQueue
from marketsim import rtti
if queue is None or rtti.can_be_casted(queue, IOrderQueue):
if volumeDelta is None or rtti.can_be_casted(volumeDelta, float):
if volumeCount is None or rtti.can_be_casted(volumeCount, int):
return VolumeLevels_IOrderQueueFloatInt(queue,volumeDelta,volumeCount)
raise Exception('Cannot find suitable overload for VolumeLevels('+str(queue) +':'+ str(type(queue))+','+str(volumeDelta) +':'+ str(type(volumeDelta))+','+str(volumeCount) +':'+ str(type(volumeCount))+')')
| [
"anton.kolotaev@gmail.com"
] | anton.kolotaev@gmail.com |
3e99befc5438312e713a1e78d14e1bc4f6d79697 | f9c98f9c127fa1cd9fba17abe17199fb5440b36b | /md_rahaman/python/full_stack_django/dojo_secret/apps/dojo_secret_app/migrations/0001_initial.py | 8a6981c5b5c67aeb2eb76efcddae0dfeca2b27b1 | [] | no_license | RibRibble/python_april_2017 | 162e543f97afc77d44fcc858106e4730d3f7f760 | 3cc4240d371a8bad8da2ea085e3675272cca2de3 | refs/heads/master | 2021-01-19T01:12:34.667828 | 2017-04-27T22:11:53 | 2017-04-27T22:11:53 | 87,233,010 | 1 | 0 | null | 2017-04-04T20:41:44 | 2017-04-04T20:41:44 | null | UTF-8 | Python | false | false | 1,758 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-04-20 21:41
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Secret',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('secret', models.TextField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=255)),
('last_name', models.CharField(max_length=255)),
('email', models.CharField(max_length=255)),
('password', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.AddField(
model_name='secret',
name='likes',
field=models.ManyToManyField(related_name='secrets_liked', to='dojo_secret_app.User'),
),
migrations.AddField(
model_name='secret',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='secrets', to='dojo_secret_app.User'),
),
]
| [
"soikatesc@gmail.com"
] | soikatesc@gmail.com |
b6b21288b2195932f2a24c5efb924383b55353da | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /2iQhC3t4SDZ6LGMWw_20.py | 495d30cd14904c8d1eb8876a74a2a751502893ec | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 764 | py | """
Given a list of 2D points `[x, y]`, create a function that returns `True` if
those points can be on the _bounds_ of a rectangle, `False` otherwise.

### Examples
on_rectangle_bounds([[0, 1], [1, 0], [1, 1], [0, 0]]) ➞ True
on_rectangle_bounds([[0, 1], [1, 0], [1, 1], [0.5, 0.5]]) ➞ False
on_rectangle_bounds([[0, 1], [10, 0], [10, 1]]) ➞ True
on_rectangle_bounds([[0, 1]]) ➞ True
### Notes
Only rectangles with sides parallel to _x-axis_ and _y-axis_ will be
considered.
"""
def on_rectangle_bounds(dots):
bounds = [[min(axis), max(axis)] for axis in zip(*dots)]
return all(map(lambda dot: dot[0] in bounds[0] or dot[1] in bounds[1], dots))
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
4397382269e0dec61503218124ea7997ba367489 | 0bcab6ea00eeb88516c218604d96c9cfc9ea5642 | /cs212-docp/07-18.py | 1a3dfb413c3c95e0814933cab7a4d0d8af72b932 | [] | no_license | alexchonglian/udacity | c11ffbbdbf19aa4247ec917ec51d3baab1411fa4 | cc3d990542fd1d14574ca42b64ebdd2d31f99d93 | refs/heads/master | 2022-08-28T17:17:10.428118 | 2020-09-10T02:12:40 | 2020-09-10T02:12:40 | 187,312,321 | 0 | 0 | null | 2022-07-29T23:11:54 | 2019-05-18T04:04:07 | Jupyter Notebook | UTF-8 | Python | false | false | 1,253 | py | # --------------
# User Instructions
#
# Fill out the function match(pattern, text), so that
# remainders is properly assigned.
def match(pattern, text):
"Match pattern against start of text; return longest match found or None."
remainders = pattern(text)
if remainders:
shortest = min(remainders, key=len)
return text[:len(text)-len(shortest)]
def lit(s): return lambda t: set([t[len(s):]]) if t.startswith(s) else null
def seq(x, y): return lambda t: set().union(*map(y, x(t)))
def alt(x, y): return lambda t: x(t) | y(t)
def oneof(chars): return lambda t: set([t[1:]]) if (t and t[0] in chars) else null
dot = lambda t: set([t[1:]]) if t else null
eol = lambda t: set(['']) if t == '' else null
def star(x): return lambda t: (set([t]) |
set(t2 for t1 in x(t) if t1 != t
for t2 in star(x)(t1)))
null = frozenset([])
def test():
assert match(star(lit('a')), 'aaaaabbbaa') == 'aaaaa'
assert match(lit('hello'), 'hello how are you?') == 'hello'
assert match(lit('x'), 'hello how are you?') == None
assert match(oneof('xyz'), 'x**2 + y**2 = r**2') == 'x'
assert match(oneof('xyz'), ' x is here!') == None
return 'tests pass' | [
"alexchonglian@gmail.com"
] | alexchonglian@gmail.com |
f6449926a5b1dd8b1dc863a3cf3df671d50dc093 | f47863b3a595cbe7ec1c02040e7214481e4f078a | /plugins/scan/discuz/449.py | 79f9d31a89d3baa6200731d2dbeb750ee717ca15 | [] | no_license | gobiggo/0bscan | fe020b8f6f325292bda2b1fec25e3c49a431f373 | 281cf7c5c2181907e6863adde27bd3977b4a3474 | refs/heads/master | 2020-04-10T20:33:55.008835 | 2018-11-17T10:05:41 | 2018-11-17T10:05:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 511 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#__author__ = 'darkkid'
# Name:Discuz! X3 tools
def assign(service, arg):
if service == "discuz":
return True, arg
def audit(arg):
payload = 'source/plugin/tools/tools.php'
verify_url = arg + payload
code, head, res, errcode, _ = curl.curl(verify_url)
if code == 200 and "Discuz" in res:
security_warning(verify_url + ' Discuz! X3 tools')
if __name__ == '__main__':
audit(assign('discuz', 'http://www.example.com/')[1]) | [
"zer0i3@aliyun.com"
] | zer0i3@aliyun.com |
3ec05c59ff77a33bf89d81b92c7d0dac1f34f4f0 | 22da4a564696d905bed0e4f21a1cb724fcadbbcf | /frappe/commands/scheduler.py | af994a5fc4d40ed500e53f051b472ea519e0b85c | [
"MIT"
] | permissive | ektai/erp2Dodock | b96512b112183a71d79c12513216b3fc6dd9293f | 5ad64b01cba9b07437f9a27751101258679379e8 | refs/heads/master | 2023-01-02T14:00:12.701247 | 2020-10-28T07:18:45 | 2020-10-28T07:18:45 | 305,179,080 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,466 | py | from __future__ import unicode_literals, absolute_import, print_function
import click
import sys
import frappe
from frappe.utils import cint
from frappe.commands import pass_context, get_site
def _is_scheduler_enabled():
enable_scheduler = False
try:
frappe.connect()
enable_scheduler = cint(frappe.db.get_single_value("System Settings", "enable_scheduler")) and True or False
except:
pass
finally:
frappe.db.close()
return enable_scheduler
@click.command('trigger-scheduler-event')
@click.argument('event')
@pass_context
def trigger_scheduler_event(context, event):
"Trigger a scheduler event"
import frappe.utils.scheduler
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
frappe.utils.scheduler.trigger(site, event, now=True)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('enable-scheduler')
@pass_context
def enable_scheduler(context):
"Enable scheduler"
import frappe.utils.scheduler
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
frappe.utils.scheduler.enable_scheduler()
frappe.db.commit()
print("Enabled for", site)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('disable-scheduler')
@pass_context
def disable_scheduler(context):
"Disable scheduler"
import frappe.utils.scheduler
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
frappe.utils.scheduler.disable_scheduler()
frappe.db.commit()
print("Disabled for", site)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('scheduler')
@click.option('--site', help='site name')
@click.argument('state', type=click.Choice(['pause', 'resume', 'disable', 'enable']))
@pass_context
def scheduler(context, state, site=None):
from frappe.installer import update_site_config
import frappe.utils.scheduler
if not site:
site = get_site(context)
try:
frappe.init(site=site)
if state == 'pause':
update_site_config('pause_scheduler', 1)
elif state == 'resume':
update_site_config('pause_scheduler', 0)
elif state == 'disable':
frappe.connect()
frappe.utils.scheduler.disable_scheduler()
frappe.db.commit()
elif state == 'enable':
frappe.connect()
frappe.utils.scheduler.enable_scheduler()
frappe.db.commit()
print('Scheduler {0}d for site {1}'.format(state, site))
finally:
frappe.destroy()
@click.command('set-maintenance-mode')
@click.option('--site', help='site name')
@click.argument('state', type=click.Choice(['on', 'off']))
@pass_context
def set_maintenance_mode(context, state, site=None):
from frappe.installer import update_site_config
if not site:
site = get_site(context)
try:
frappe.init(site=site)
update_site_config('maintenance_mode', 1 if (state == 'on') else 0)
finally:
frappe.destroy()
@click.command('doctor') #Passing context always gets a site and if there is no use site it breaks
@click.option('--site', help='site name')
@pass_context
def doctor(context, site=None):
"Get diagnostic info about background workers"
from frappe.utils.doctor import doctor as _doctor
if not site:
site = get_site(context, raise_err=False)
return _doctor(site=site)
@click.command('show-pending-jobs')
@click.option('--site', help='site name')
@pass_context
def show_pending_jobs(context, site=None):
"Get diagnostic info about background jobs"
from frappe.utils.doctor import pending_jobs as _pending_jobs
if not site:
site = get_site(context)
with frappe.init_site(site):
pending_jobs = _pending_jobs(site=site)
return pending_jobs
@click.command('purge-jobs')
@click.option('--site', help='site name')
@click.option('--queue', default=None, help='one of "low", "default", "high')
@click.option('--event', default=None, help='one of "all", "weekly", "monthly", "hourly", "daily", "weekly_long", "daily_long"')
def purge_jobs(site=None, queue=None, event=None):
"Purge any pending periodic tasks, if event option is not given, it will purge everything for the site"
from frappe.utils.doctor import purge_pending_jobs
frappe.init(site or '')
count = purge_pending_jobs(event=event, site=site, queue=queue)
print("Purged {} jobs".format(count))
@click.command('schedule')
def start_scheduler():
from frappe.utils.scheduler import start_scheduler
start_scheduler()
@click.command('worker')
@click.option('--queue', type=str)
@click.option('--quiet', is_flag = True, default = False, help = 'Hide Log Outputs')
def start_worker(queue, quiet = False):
from frappe.utils.background_jobs import start_worker
start_worker(queue, quiet = quiet)
@click.command('ready-for-migration')
@click.option('--site', help='site name')
@pass_context
def ready_for_migration(context, site=None):
from frappe.utils.doctor import get_pending_jobs
if not site:
site = get_site(context)
try:
frappe.init(site=site)
pending_jobs = get_pending_jobs(site=site)
if pending_jobs:
print('NOT READY for migration: site {0} has pending background jobs'.format(site))
sys.exit(1)
else:
print('READY for migration: site {0} does not have any background jobs'.format(site))
return 0
finally:
frappe.destroy()
commands = [
disable_scheduler,
doctor,
enable_scheduler,
purge_jobs,
ready_for_migration,
scheduler,
set_maintenance_mode,
show_pending_jobs,
start_scheduler,
start_worker,
trigger_scheduler_event,
]
| [
"63931935+ektai@users.noreply.github.com"
] | 63931935+ektai@users.noreply.github.com |
b21593202db3ec2c9fca713de182051eab520cf7 | 01822d2ae38a95edcd188a51c377bb07b0a0c57d | /Notes/Sprint1/MaxMinInt.py | 05aac91498379a492ba180a67bfa4816f3851804 | [
"MIT"
] | permissive | mark-morelos/CS_Notes | bc298137971295023e5e3caf964fe7d3f8cf1af9 | 339c47ae5d7e678b7ac98d6d78857d016c611e38 | refs/heads/main | 2023-03-10T11:56:52.691282 | 2021-03-02T15:09:31 | 2021-03-02T15:09:31 | 338,211,631 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 625 | py | """
Challenge #10:
Given a string of space separated integers, write a function that returns the
maximum and minimum integers.
Example:
- max_and_min("1 2 3 4 5") -> "5 1"
- max_and_min("1 2 -3 4 5") -> "5 -3"
- max_and_min("1 9 3 4 -5") -> "9 -5"
Notes:
- All inputs are valid integers.
- There will always be at least one number in the input string.
- The return string must be two numbers separated by a single space, and
the maximum number is first.
"""
# def max_and_min(input_str):
# Your code here
def csAntyhingButFive(start, end):
rangeNum = range(start, end)
return rangeNum
csAntyhingButFive(2, 5) | [
"makoimorelos@gmail.com"
] | makoimorelos@gmail.com |
e48e2e127a3980f24a9bca937240644815c580b0 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /cXfcK7iXpuZ67taSh_6.py | 4967e77250cf5ea5287e57b8266ffdaa52afb031 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 722 | py | """
This is a **reverse coding challenge**. Normally you're given explicit
directions with how to create a function. Here, you must generate your own
function to satisfy the relationship between the inputs and outputs.
Your task is to create a function that, when fed the inputs below, produce the
sample outputs shown.
### Examples
"A4B5C2" ➞ "AAAABBBBBCC"
"C2F1E5" ➞ "CCFEEEEE"
"T4S2V2" ➞ "TTTTSSVV"
"A1B2C3D4" ➞ "ABBCCCDDDD"
### Notes
If you get stuck, check the **Comments** for help.
"""
def mystery_func(txt):
letters = txt[::2]
nums = txt[1::2]
final = []
for i in zip(letters, nums):
final.append(i[0]*int(i[1]))
return ''.join(final)
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
4cc866417f831788e09a74381142e2d46e6cea3c | f8f8651ab604acc4937f8725caadaca1fb97a5e8 | /src/pytorch_lightning/utilities/warnings.py | 57b56ba0685c880e835b00dab30bc3648f85ac42 | [
"Apache-2.0"
] | permissive | neptune-ai/pytorch-lightning | ac59e746a486e07e21abae426b28e5d72812ac98 | 702014418e2ec0437e67d8bf97809edef686a02c | refs/heads/master | 2022-09-28T09:34:07.653729 | 2022-09-12T11:13:48 | 2022-09-12T11:13:48 | 229,063,811 | 1 | 1 | Apache-2.0 | 2022-09-26T03:29:49 | 2019-12-19T13:48:16 | Python | UTF-8 | Python | false | false | 725 | py | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Warning-related utilities."""
# backwards compatibility
from lightning_lite.utilities.warnings import PossibleUserWarning # noqa: F401
| [
"noreply@github.com"
] | neptune-ai.noreply@github.com |
db7e523a58f687eac59bf3abe458af25e9ee7bb6 | 06f0ae3ecaaf47b1c23e231838afa524d8446f5e | /contest/migrations/0005_auto_20160321_2239.py | 116551554284e7daca17853af53c60b75e9ed8e3 | [] | no_license | nakamotohideyoshi/draftboard-web | c20a2a978add93268617b4547654b89eda11abfd | 4796fa9d88b56f80def011e2b043ce595bfce8c4 | refs/heads/master | 2022-12-15T06:18:24.926893 | 2017-09-17T12:40:03 | 2017-09-17T12:40:03 | 224,877,650 | 0 | 0 | null | 2022-12-08T00:02:57 | 2019-11-29T15:20:17 | Python | UTF-8 | Python | false | false | 3,517 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('prize', '0006_auto_20160209_2241'),
('draftgroup', '0018_draftgroup_fantasy_points_finalized'),
('sports', '0001_squashed_0008_auto_20160119_2124'),
('contest', '0004_auto_20160321_2142'),
]
operations = [
migrations.CreateModel(
name='ContestPool',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('cid', models.CharField(editable=False, blank=True, max_length=6, help_text='unique, randomly chosen when Contest is created', default='')),
('name', models.CharField(default='', max_length=64, help_text='The front-end name of the Contest', verbose_name='Public Name')),
('start', models.DateTimeField(help_text='the start should coincide with the start of a real-life game.', verbose_name='Start Time')),
('end', models.DateTimeField(blank=True, help_text='forces the end time of the contest (will override "Ends tonight" checkbox!!', verbose_name='Cutoff Time')),
('max_entries', models.PositiveIntegerField(default=1, help_text='USER entry limit')),
('entries', models.PositiveIntegerField(default=2, help_text='CONTEST limit')),
('current_entries', models.PositiveIntegerField(default=0, help_text='The current # of entries in the contest')),
('gpp', models.BooleanField(default=False, help_text='a gpp Contest will not be cancelled if it does not fill')),
('respawn', models.BooleanField(default=False, help_text='indicates whether a new identical Contest should be created when this one fills up')),
('doubleup', models.BooleanField(default=False, help_text='whether this contest has a double-up style prize structure')),
('status', models.CharField(choices=[('Scheduled', (('scheduled', 'Scheduled'),)), ('Created', (('created', 'Created'),))], default='scheduled', max_length=32)),
('draft_group', models.ForeignKey(blank=True, to='draftgroup.DraftGroup', help_text='the pool of draftable players and their salaries, for the games this contest includes.', null=True, verbose_name='DraftGroup')),
('prize_structure', models.ForeignKey(to='prize.PrizeStructure')),
('site_sport', models.ForeignKey(to='sports.SiteSport', related_name='contest_contestpool_site_sport')),
],
options={
'abstract': False,
'verbose_name_plural': 'Contest Pools',
'verbose_name': 'Contest Pools',
},
),
migrations.RemoveField(
model_name='pool',
name='draft_group',
),
migrations.RemoveField(
model_name='pool',
name='prize_structure',
),
migrations.RemoveField(
model_name='pool',
name='site_sport',
),
migrations.DeleteModel(
name='Pool',
),
migrations.CreateModel(
name='CurrentContestPool',
fields=[
],
options={
'proxy': True,
},
bases=('contest.contestpool',),
),
]
| [
"cbanister@coderden.com"
] | cbanister@coderden.com |
48d33bfa29646585b6db9ca254362c02f141d4cc | e02405f3db787275545f87a23bf7eb3510ddd2f4 | /test_main.py | 4e68d61ebcec6bfcbe8da0934a8fedd93d0a5e41 | [] | no_license | kevlab/flasktaskr_project | a51ca6d62b080fb1fdbe0fb34ad5d7cff36ba329 | 3a3ba1dfbe2571deb7958970d52437abb03b03e7 | refs/heads/master | 2021-01-17T17:07:33.064404 | 2015-06-07T14:38:25 | 2015-06-07T14:38:25 | 33,480,265 | 0 | 0 | null | 2015-04-06T13:25:34 | 2015-04-06T12:10:55 | Python | UTF-8 | Python | false | false | 1,633 | py | import os
import unittest
from project import app, db
from config import basedir
from project.models import User, Task
TEST_DB = 'test.db'
class Alltests(unittest.TestCase):
def setUp(self):
app.config['TESTING'] = True
app.config['WTF_CSRF_ENABLED'] = False
app.config['DEBUG'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + \
os.path.join(basedir, TEST_DB)
self.app = app.test_client()
db.create_all()
def tearDown(self):
db.drop_all()
def login(self, name, password):
return self.app.post('users/', data=dict(name=name, password=password),
follow_redirects=True)
def test_404_error(self):
response = self.app.get('/not-actually-a-route/')
self.assertEqual(response.status_code, 404)
self.assertIn('Sorry. There\'s nothing here.', response.data)
#def test_500_error(self):
#bad_user = User(name='baduser',
#email='baduser@gmail.com',
#password='django')
#db.session.add(bad_user)
#db.session.commit()
#response = self.login('baduser', 'django')
#self.assertEqual(response.status_code, 500)
#self.assertIn('Something went terribly wrong.', response.data)
#self.assertNotIn('ValueError: Invalid salt', response.data)
def test_index(self):
""" Ensure flask was set up properly """
response = self.app.get('/', content_type='html/text')
self.assertEqual(response.status_code, 302)
if __name__ == "__main__":
unittest.main()
| [
"greenleaf1348@gmail.com"
] | greenleaf1348@gmail.com |
ed9bd818265cb5b56fa0412338d0e74acfdcfc60 | fd390bfa1f471d09cafb72ad85e5143214abf32d | /shakecast/app/orm/migrations.py | 467a024c4ef7ab9b73b5df4072e90f4cf3b5c714 | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-public-domain-disclaimer"
] | permissive | usgs/shakecast | 0ac6ac955aaff3029b133c4ce9264adc90004d86 | e287d697d93467e5e25c99d27b70754a0a5e6e39 | refs/heads/master | 2023-05-13T09:22:13.001195 | 2022-03-08T03:26:18 | 2022-03-08T03:26:18 | 52,902,913 | 9 | 24 | NOASSERTION | 2023-05-01T22:45:23 | 2016-03-01T19:15:42 | Python | UTF-8 | Python | false | false | 5,320 | py | from sqlalchemy import Column, Integer, String, Float, PrimaryKeyConstraint
############### DB Migrations ################
def migrate_1to2(engine):
'''
Add updated and updated_by columns to keep track of which users
are updating inventory
'''
updated = Column('updated', Integer)
updated_by = Column('updated_by', String)
try:
add_column(engine, 'user', updated)
except Exception:
pass
try:
add_column(engine, 'user', updated_by)
except Exception:
pass
try:
add_column(engine, 'group', updated)
except Exception:
pass
try:
add_column(engine, 'group', updated_by)
except Exception:
pass
try:
add_column(engine, 'facility', updated)
except Exception:
pass
try:
add_column(engine, 'facility', updated_by)
except Exception:
pass
return engine
def migrate_2to3(engine):
'''
Add updated and updated_by columns to keep track of which users
are updating inventory
'''
mms = Column('mms', String(255))
try:
add_column(engine, 'user', mms)
except Exception:
pass
return engine
def migrate_3to4(engine):
'''
Add updated and updated_by columns to keep track of which users
are updating inventory
'''
aebm = Column('aebm', String(50))
try:
add_column(engine, 'facility_shaking', aebm)
except Exception:
pass
return engine
def migrate_4to5(engine):
sent_timestamp = Column('sent_timestamp', Float)
try:
add_column(engine, 'notification', sent_timestamp)
except Exception:
pass
return engine
def migrate_5to6(engine):
type_ = Column('type', String(64))
try:
add_column(engine, 'shakemap', type_)
except Exception:
pass
try:
add_column(engine, 'event', type_)
except Exception:
pass
return engine
def migrate_6to7(engine):
epicentral_distance = Column('epicentral_distance', String(64))
try:
add_column(engine, 'facility_shaking', epicentral_distance)
except Exception:
pass
return engine
def migrate_7to8(engine):
override_directory = Column('override_directory', String(255))
try:
add_column(engine, 'shakemap', override_directory)
except Exception:
pass
try:
add_column(engine, 'event', override_directory)
except Exception:
pass
return engine
def migrate_8to9(engine):
product_string = Column('product_string', String(255))
try:
add_column(engine, 'group', product_string)
except Exception:
pass
return engine
def migrate_9to10(engine):
generated_timestamp = Column('generated_timestamp', Float)
try:
add_column(engine, 'notification', generated_timestamp)
except Exception:
pass
return engine
def migrate_10to11(engine):
file_name = Column('file_name', String)
try:
add_column(engine, 'local_product_types', file_name)
except Exception:
pass
name = Column('name', String)
try:
add_column(engine, 'local_product_types', name)
except Exception:
pass
try:
engine.execute('drop table local_product_types')
except Exception:
pass
begin_timestamp = Column('begin_timestamp', Float)
try:
add_column(engine, 'local_products', begin_timestamp)
except Exception:
pass
finish_timestamp = Column('finish_timestamp', Float, default=0)
try:
add_column(engine, 'local_products', finish_timestamp)
except Exception:
pass
error = Column('error', String(255))
try:
add_column(engine, 'notification', error)
except Exception:
pass
return engine
def migrate_11to12(engine):
update = Column('updated', Integer)
try:
add_column(engine, 'event', update)
except Exception:
pass
return engine
def migrate_12to13(engine):
dependencies = Column('dependencies', String)
tries = Column('tries', Integer, default=0)
try:
add_column(engine, 'local_product_types', dependencies)
except Exception:
pass
try:
add_column(engine, 'local_products', tries)
except Exception:
pass
return engine
def add_column(engine, table_name, column):
'''
Add a column to an existing table
'''
column_name = column.compile(dialect=engine.dialect)
column_type = column.type.compile(engine.dialect)
if 'sqlite' in str(engine):
engine.execute('ALTER TABLE "%s" ADD COLUMN %s %s' % (table_name, column_name, column_type))
elif 'mysql' in str(engine):
engine.execute('ALTER TABLE `%s` ADD COLUMN %s %s' % (table_name, column_name, column_type))
#######################################################################
# List of database migrations for export
migrations = [migrate_1to2, migrate_2to3, migrate_3to4, migrate_4to5,
migrate_5to6, migrate_6to7, migrate_7to8, migrate_8to9, migrate_9to10,
migrate_10to11, migrate_11to12, migrate_12to13]
def migrate(engine):
'''
Run all database migrations
'''
for migration in migrations:
engine = migration(engine)
return engine
| [
"dslosky@usgs.gov"
] | dslosky@usgs.gov |
5a3513f9f209d1ef28d755df10e6aa2cfc4607aa | 6f2d5600b65b062151bab88c592796b878de7465 | /InterfaceTest/common/do_mysql.py | 5eac443a50281fb43beeeb98044f8353bb135ad0 | [] | no_license | zhouyanmeng/python_api_test | 1e6549321c20ee9a71beffac2533c917b5ecc157 | 7303352c9b5baacba5296b088f89ba4c702fb485 | refs/heads/master | 2022-12-17T14:34:26.351566 | 2019-03-01T13:02:06 | 2019-03-01T13:02:06 | 185,185,856 | 0 | 0 | null | 2022-12-08T01:45:15 | 2019-05-06T11:45:55 | Python | UTF-8 | Python | false | false | 1,038 | py | import pymysql
class DoMysql:
#完成与sql数据库的交互
def __init__(self):###实例化对象的时候就建立连接
#def connect(self):
host="test.lemonban.com"
user="test"
password="test"
port=3306
self.mysql=pymysql.connect(host=host,user=user,password=password,port=3306)
#self.cursor = self.mysql.cursor()
self.cursor=self.mysql.cursor(pymysql.cursors.DictCursor)###设置返回字典游标
def fetch_one(self,sql):
self.cursor.execute(sql)
self.mysql.commit()
return self.cursor.fetchone()
def fetch_all(self,sql):
self.cursor.executee(sql)
return self.cursor.fetchall()
def close(self):
self.cursor.close()##关闭游标,查询
self.mysql.close()#关闭链接
if __name__ == '__main__':
mysql=DoMysql()###实例化,建立连接
# mysql.connect()
result=mysql.fetch_one('select max(mobilephone) from future.member')##输入查询语句
print(result)
mysql.close() | [
"2440269710@qq.com"
] | 2440269710@qq.com |
6deb89e092b5b74f23029184b835d8e808de678a | 3b81dfbacf97918d36fb5accbcef0b610378e1a8 | /data-struct/probelm/02-second-try.py | 3a304359cfc1762c450a8f2640ca20afd38eef61 | [] | no_license | XiaoFei-97/the-way-to-python | 11706f0845f56246ba8ea0df8ff34e622bbdad2d | 3667a24f4f4238998e9c6ed42cdc49c68881a529 | refs/heads/master | 2020-03-21T06:46:36.939073 | 2018-06-23T03:51:11 | 2018-06-23T03:51:11 | 138,241,410 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 845 | py | #如果a+b+c=1000,且a^2+b^2=c^2(a,b,c为自然数),求出abc所以可能的组合
#运行时间1秒
#
#a = 0
#b = 0
#c = 0
#
import time
#
#start_time = time.time()
#for a in range(1000):
# for b in range(1000):
# for c in range(1000):
# if a+b+c == 1000 and a**2 +b**2 == c**2:
# print("a=%d,b=%d,c=%d"%(a,b,c))
#end_time = time.time()
#
#print("times:%d"%(end_time-start_time))
#T=1000 * 1000 * 1000 * 2(此处细化是10步)
#时间复杂度T(n) = n^3 * 2
#大O表示法:g(n)=n^3
start_time = time.time()
for a in range(1001):
for b in range(1001-a):
c = 1000-a-b
if a**2 +b**2 == c**2:
print("a=%d,b=%d,c=%d"%(a,b,c))
#T= 1000 * 1000 * 3
#时间复杂度T(n) = n^2 *3
#大O表示法:g(n)=n^2
end_time = time.time()
print("times:%d"%(end_time-start_time))
| [
"jack_970124@163.com"
] | jack_970124@163.com |
f41ff08bfd79dc9007fd2de6edc00cb85adf391a | c46754b9600a12df4f9d7a6320dfc19aa96b1e1d | /src/transformers/models/clipseg/__init__.py | 0e2e250e507a811c0f1cbbf45dabf236e1721e4a | [
"Apache-2.0"
] | permissive | huggingface/transformers | ccd52a0d7c59e5f13205f32fd96f55743ebc8814 | 4fa0aff21ee083d0197a898cdf17ff476fae2ac3 | refs/heads/main | 2023-09-05T19:47:38.981127 | 2023-09-05T19:21:33 | 2023-09-05T19:21:33 | 155,220,641 | 102,193 | 22,284 | Apache-2.0 | 2023-09-14T20:44:49 | 2018-10-29T13:56:00 | Python | UTF-8 | Python | false | false | 2,179 | py | # Copyright 2022 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available
_import_structure = {
"configuration_clipseg": [
"CLIPSEG_PRETRAINED_CONFIG_ARCHIVE_MAP",
"CLIPSegConfig",
"CLIPSegTextConfig",
"CLIPSegVisionConfig",
],
"processing_clipseg": ["CLIPSegProcessor"],
}
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_clipseg"] = [
"CLIPSEG_PRETRAINED_MODEL_ARCHIVE_LIST",
"CLIPSegModel",
"CLIPSegPreTrainedModel",
"CLIPSegTextModel",
"CLIPSegVisionModel",
"CLIPSegForImageSegmentation",
]
if TYPE_CHECKING:
from .configuration_clipseg import (
CLIPSEG_PRETRAINED_CONFIG_ARCHIVE_MAP,
CLIPSegConfig,
CLIPSegTextConfig,
CLIPSegVisionConfig,
)
from .processing_clipseg import CLIPSegProcessor
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_clipseg import (
CLIPSEG_PRETRAINED_MODEL_ARCHIVE_LIST,
CLIPSegForImageSegmentation,
CLIPSegModel,
CLIPSegPreTrainedModel,
CLIPSegTextModel,
CLIPSegVisionModel,
)
else:
import sys
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
| [
"noreply@github.com"
] | huggingface.noreply@github.com |
83003f3e74b140fc689183f6eeb5e495015a3296 | 4e67c2edd71493a98a3f13e5b2073c1d05b1b656 | /Semestre 02/ProjetoIntegrador2/Aula 09.24.2020/aluno_get_set.py | 1569fbeb2a3d0bad8480b29df252044a40f1b2c6 | [] | no_license | felipellima83/UniCEUB | 05991d7a02b13cd4e236f3be3a34726af2dc1504 | dbc44866545b5247d1b5f76ec6e9b7778e54093e | refs/heads/master | 2023-07-08T19:04:19.830473 | 2021-08-12T12:33:49 | 2021-08-12T12:33:49 | 249,958,282 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,714 | py | ''' UniCEUB - Ciência da Computação - Prof. Barbosa
Atalho de teclado: ctlr <d>, duplica linha. ctrl <y>, apaga linha. ctrl </>, comenta linha
Sintaxe: POO em Python com gets e sets
class NomeClasse (object):
def __init__ (self, p_1, p_2, ...): # Método construtor
self.nome_atributo1 = p_1 # Atributos
self.nome_atributo2 = p_2
...
def get_nome_atributo1 (self): # Modelo do método get (retorna valor do atributo)
return self.nome_atributo1
def set_nome_atributo1 (self, valor): # Modelo do método set (altera valor do atributo)
self.nome_atributo1 = valor
def outro_metodo (self): # Outros métodos da classe (métodos fundionais)
...
[return ...]
if __name__ == '__main__': # mai <tab>
nome_objeto1 = NomeClasse(a_1, a_2, ...) # Cria (instancia) o primeiro objeto da classe
nome_objeto2 = NomeClasse(a_1, a_2, ...) # Cria (instancia) o segundo objeto da classe
. . .
r = nome_objeto1.get_nome_atributo1() # Consulta um atributo
print(r)
nome_objeto1.set_nome_atributo1(novo_valor) # Altera um atributo
------------------------------- Com base no modelo acima, implemente estes itens:
1- Crie a classe Aluno.
- Crie o construtor da classe com os atributos: nome, mensalidade, idade
- Crie os métodos gets e sets
- No main, crie pelo menos dois objetos da classe Aluno. Teste
5- Use os métodos gets e sets para os objetos criados
- Crie o método mostra_dados. Mostra os dados (atrigutos) dentro do método. Teste.
- Refaça o anterior sem usar o nome do atributo, crie o método mostra_dados_2. Teste
- Crie o método retorna_dados, retorne todos os dados (atributos) concatenados.Teste
- Crie o método aumento_mensalidade_valor, ele recebe o valor do aumento. Teste
10- Crie o método aumento_mensalidade_porcentagem (Recebe: 10%, 15% etc.). Teste
---
11- Altere o construtor com estes valor default: Mensalidade = 1000 e idade = 0.
- No main, crie o objeto aluno3 da classe Aluno passando apenas o nome. Teste
- No main, crie o objeto aluno4 da classe Aluno passando o nome e a mensalidade. Teste
14- No main, crie o objeto aluno5 da classe Aluno passando somente o nome e a idade.
não passe o argumento mensalidade. Teste ----- '''
class Aluno(object): # class Aluno:
def __init__(self, nome, mensalidade=1000, idade=0): # Método construtor com valores default
# def __init__(self, nome, mensalidade, idade): # Método construtor
self.nome = nome # Atributos
self.mensalidade = mensalidade
self.idade = idade
def get_nome(self): # Métodos gets e sets
return self.nome
def set_nome(self, nome):
self.nome = nome
def get_mensalidade(self):
return self.mensalidade
def set_mensalidade(self, valor):
self.mensalidade = valor
def get_idade(self):
return self.idade
def set_idade(self, idade):
self.idade = idade
def mostra_dados (self): # Metodos funcionais
print('Nome: ', self.nome)
print('Mensalidade: ', self.mensalidade)
print('Idade: ', self.idade)
def mostra_dados_2(self):
print('Nome: ', self.get_nome())
print('Mensalidade: ', self.get_mensalidade())
print('Idade: ', self.get_idade())
def retorna_dados(self):
dados = self.nome + ' - ' + str(self.mensalidade) + ' - ' + str(self.idade)
# dados = self.get_nome() + ' - ' + str(self.get_mensalidade()) + ' - ' + str(self.get_idade())
return dados
def aumento_mensalidade_valor(self, valor):
self.mensalidade += valor # self.mensalidade = self. mensalidade + valor
def aumento_mensalidade_porcentagem(self, pct):
self.mensalidade += self.mensalidade * pct / 100 # self.mensalidade=self.mensalidade + self.mensalidade * pct/100
if __name__ == '__main__': # Atalho: main <tab>
aluno1 = Aluno('Paulo', 1000, 21) # Chamando o construtor __init__
aluno2 = Aluno('Carla', 900, 20)
print("Aluno 1:")
print("Nome: ", aluno1.get_nome()) # print(nome_objeto.nome_metodo())
print("Mensalidade: ", aluno1.get_mensalidade())
print("Idade: ", aluno1.get_idade())
print("Aluno 2:")
print("Nome: ", aluno2.get_nome()) # print(nome_objeto.nome_metodo())
print("Mensalidade: ", aluno2.get_mensalidade())
print("Idade: ", aluno2.get_idade())
novo_nome = input("Novo nome: ") # Solução 1
aluno1.set_nome(novo_nome) # nome_objeto.nome_metodo()
aluno2.set_nome("João") # Solução 2
aluno1.mostra_dados()
aluno2.mostra_dados()
aluno1.mostra_dados_2()
aluno2.mostra_dados_2()
print('Dados concatenados: ', aluno1.retorna_dados())
print('Dados concatenados: ', aluno2.retorna_dados())
aluno1.aumento_mensalidade_valor(110)
print('Nova mensalidade', aluno1.get_mensalidade())
aluno1.mostra_dados_2()
aluno2.aumento_mensalidade_porcentagem(10)
aluno2.mostra_dados_2()
print('Nova mensalidade', aluno2.get_mensalidade())
aluno3 = Aluno('Ailton')
aluno3.mostra_dados()
aluno4 = Aluno('Ana', 800)
aluno4.mostra_dados()
aluno5 = Aluno('Rogério', idade = 31) # Sem passar mensalidade
aluno5.mostra_dados()
aluno6 = Aluno( idade = 30, nome= 'Vinicius') # Fora da sequência
aluno6.mostra_dados() | [
"felipellima83@gmail.com"
] | felipellima83@gmail.com |
6889f8b569293a0d5a19b7bd90753cd841b53e2d | 5b323fd78bb143667aedd64bc5ce17bc90f82370 | /量化投资书/量化投资以Python为工具/ch14/02.py | cc38019e0cd7e0daf38ee2a7614cb501b91e8e74 | [] | no_license | xiaotfeng/Some-thing-interesting-for-me | 3cbd3c70d0631b5687d5a55cac33bbfc7e4044c0 | 4fdb63376fa421f2aa17e3246a44454f62ca342e | refs/heads/master | 2023-02-04T22:04:03.210810 | 2020-12-29T05:36:37 | 2020-12-29T05:36:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 471 | py | import numpy as np
import scipy.stats
import pandas as pd
np.random.binomial(100, 0.5, 20)
np.random.binomial(10, 0.3, 3)
scipy.stats.binom.pmf(20, 100, 0.5)
scipy.stats.binom.pmf(50, 100, 0.5)
dd = scipy.stats.binom.pmf(np.arange(0, 21, 1), 100, 0.5)
dd.sum()
scipy.stats.binom.cdf(20, 100, 0.5)
HSRet300 = pd.read_csv('return300.csv')
ret = HSRet300.iloc[:, 1]
print(ret.head(3))
p = len(ret[ret>0])/len(ret)
print(p)
prob = scipy.stats.binom.pmf(6, 10, p)
print(prob) | [
"395871987@qq.com"
] | 395871987@qq.com |
c43d097db7860899069dbeea9d5aab443e90d32d | 4a48593a04284ef997f377abee8db61d6332c322 | /python/scipy/convolution.py | a36f5bc1c66a64d0984fd54a799186fe02f633df | [
"MIT"
] | permissive | jeremiedecock/snippets | 8feaed5a8d873d67932ef798e16cb6d2c47609f0 | b90a444041c42d176d096fed14852d20d19adaa7 | refs/heads/master | 2023-08-31T04:28:09.302968 | 2023-08-21T07:22:38 | 2023-08-21T07:22:38 | 36,926,494 | 26 | 9 | MIT | 2023-06-06T02:17:44 | 2015-06-05T10:19:09 | Python | UTF-8 | Python | false | false | 286 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import scipy.signal
import numpy as np
import matplotlib.pyplot as plt
# The imput signal x
x = np.zeros(100)
x[30:60] = 1.
# ...
y = np.ones(30)
xc = scipy.signal.convolve(x, y)
plt.plot(x)
plt.plot(xc / 30.)
plt.show()
| [
"jd.jdhp@gmail.com"
] | jd.jdhp@gmail.com |
205164dfbe737ccf177fc019dbe8431eef2978a0 | 7ce5260f92af81063d8fc8d38605eeda335e5660 | /DjangoProjects/services_projects/courses_project/apps/courses/admin.py | fa983f1261f51c5f9bed7b91beff67e2db0e9875 | [] | no_license | MHNorth/Coding-Dojo-Projects | 15ce009f4dcc24a912d224ffa15f1ea07221d482 | e82ef45a91667ab580dc4dd4cbf13195e1e67e9e | refs/heads/master | 2020-03-21T03:45:44.790723 | 2018-10-04T00:15:25 | 2018-10-04T00:15:25 | 138,072,873 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 91 | py | from django.contrib import admin
from .models import Course
admin.site.register(Course)
| [
"maikanorthrop@gmail.com"
] | maikanorthrop@gmail.com |
e34519fbbd240b89574559ceabb90d807dc27c96 | 67309cbca4ead3623c86647ac7bfaa067b029fdc | /SWEA/Tree/5176_이진탐색.py | f1a4a1749e5c902773c5aafb4225cbc310b9161b | [] | no_license | Jeukoh/OJ | b6df132927ec15ab816fee8681952240b5a69e13 | 182b54554896d9860d5e5d09f8eccc07d99aa8e8 | refs/heads/master | 2023-08-30T19:18:10.812225 | 2021-10-15T09:57:14 | 2021-10-15T09:57:14 | 402,799,549 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 336 | py | import math
def inserttree(idx):
global cnt
if idx <= V:
inserttree(2*idx)
tree[idx] = cnt
cnt += 1
inserttree(2*idx+1)
for tc in range(1,int(input())+1):
V = int(input().rstrip())
tree = [[] for _ in range(V+1)]
cnt = 1
inserttree(1)
print(f'#{tc}', tree[1], tree[V//2]) | [
"jeukoh@gmail.com"
] | jeukoh@gmail.com |
d81c9a6c45e667fc3c2e7e5eeb80d0aeb2926124 | ff6248be9573caec94bea0fa2b1e4b6bf0aa682b | /log-20190927/132.230.102.123-10.21.11.31/1569571753.py | b4d079745312f44675d25424b0cf91732161be53 | [] | no_license | LennartElbe/codeEvo | 0e41b1a7705204e934ef71a5a28c047366c10f71 | e89b329bc9edd37d5d9986f07ca8a63d50686882 | refs/heads/master | 2020-12-21T17:28:25.150352 | 2020-03-26T10:22:35 | 2020-03-26T10:22:35 | 236,498,032 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,074 | py | import functools
import typing
import string
import random
import pytest
## Lösung Teil 1.
def mysum(zs: list) -> int:
return sum(xs)
## Lösung Teil 2. (Tests)
def test_2():
assert mysum([1, 2, 3]) == 6
######################################################################
## hidden tests
pytest.main (["-v", "--assert=plain", "-p", "no:cacheprovider"])
from inspect import getfullargspec
mysumargs = getfullargspec(mysum).args
class TestName:
def test_mysum (self):
assert mysum
assert 'xs' in mysumargs
class TestGrades:
def test_docstring_present(self):
assert False
def test_typing_present(self):
assert True
def test_coverage(self):
assert False
def sum_oracle(self, xs:list)->int:
return sum(xs)
def check_sum (self,xs):
assert mysum (xs) == self.sum_oracle (xs)
def test_correctness(self):
for i in range (100):
l = random.randrange (6)
xs = [ random.randrange (10) for z in range(l) ]
self.check_sum (xs)
| [
"lenni.elbe@gmail.com"
] | lenni.elbe@gmail.com |
d87f5492302e50c6f0e7424b27dbb5ec6a5e1d99 | e972d1a5eaf1e82425d0f2ef43147b1a9e817198 | /Examples/Howto/UseFonts/HelloCircleSquare.py | 3157371a905aae8164269e23f04c9e5026cebb41 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | 13-Graphics/PageBot | 86b2ba5f051b2ffaa16ee26da4747b790481ec46 | 8d815cf3fa20eb891d0e5c11253376dbe77b71e6 | refs/heads/master | 2021-09-06T21:36:58.986635 | 2018-02-11T22:21:12 | 2018-02-11T22:21:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,061 | py | #!/usr/bin/env python
# -----------------------------------------------------------------------------
#
# P A G E B O T
#
# Copyright (c) 2016+ Buro Petr van Blokland + Claudia Mens & Font Bureau
# www.pagebot.io
# Licensed under MIT conditions
#
# Supporting usage of DrawBot, www.drawbot.com
# Supporting usage of Flat, https://github.com/xxyxyz/flat
# -----------------------------------------------------------------------------
#
from random import random
from pagebot.contexts import defaultContext as c
for p in range(20):
c.newPage(1000, 1000)
for n in range(50):
c.fill(random(), 0, random(), 0.5 + random()*0.2)
ch = random()
x = 20 + random()*800
y = 20 + random()*800
if ch < 0.2:
c.oval(x, y, 80, 80 )
elif ch < 0.4:
c.rect(x, y, 80, 80 )
else:
fs = c.newString('Hello world on %d,%d' % (x, y),
style=dict(fontSize=24))
c.text(fs, (x, y))
c.saveImage('_export/OurNiceDrawing.pdf')
| [
"fsanches@metamaquina.com.br"
] | fsanches@metamaquina.com.br |
7d687c8408d9304dd798befbb08fb879e847d396 | 62bbfb6c50bba16304202aea96d1de4990f95e04 | /dependencies/pulumi_aws/ec2/traffic_mirror_session.py | c902951d96335eab2535b48a7a1387fd8b3543a7 | [] | no_license | adriell/lambda-autoservico-storagegateway | b40b8717c8de076e61bbd422461c7d624a0d2273 | f6e3dea61b004b73943a5438c658d3f019f106f7 | refs/heads/main | 2023-03-16T14:41:16.821675 | 2021-03-11T03:30:33 | 2021-03-11T03:30:33 | 345,865,704 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 12,423 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
__all__ = ['TrafficMirrorSession']
class TrafficMirrorSession(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
network_interface_id: Optional[pulumi.Input[str]] = None,
packet_length: Optional[pulumi.Input[int]] = None,
session_number: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
traffic_mirror_filter_id: Optional[pulumi.Input[str]] = None,
traffic_mirror_target_id: Optional[pulumi.Input[str]] = None,
virtual_network_id: Optional[pulumi.Input[int]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Provides an Traffic mirror session.\
Read [limits and considerations](https://docs.aws.amazon.com/vpc/latest/mirroring/traffic-mirroring-considerations.html) for traffic mirroring
## Example Usage
To create a basic traffic mirror session
```python
import pulumi
import pulumi_aws as aws
filter = aws.ec2.TrafficMirrorFilter("filter",
description="traffic mirror filter - example",
network_services=["amazon-dns"])
target = aws.ec2.TrafficMirrorTarget("target", network_load_balancer_arn=aws_lb["lb"]["arn"])
session = aws.ec2.TrafficMirrorSession("session",
description="traffic mirror session - example",
network_interface_id=aws_instance["test"]["primary_network_interface_id"],
traffic_mirror_filter_id=filter.id,
traffic_mirror_target_id=target.id)
```
## Import
Traffic mirror sessions can be imported using the `id`, e.g.
```sh
$ pulumi import aws:ec2/trafficMirrorSession:TrafficMirrorSession session tms-0d8aa3ca35897b82e
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: A description of the traffic mirror session.
:param pulumi.Input[str] network_interface_id: ID of the source network interface. Not all network interfaces are eligible as mirror sources. On EC2 instances only nitro based instances support mirroring.
:param pulumi.Input[int] packet_length: The number of bytes in each packet to mirror. These are bytes after the VXLAN header. Do not specify this parameter when you want to mirror the entire packet. To mirror a subset of the packet, set this to the length (in bytes) that you want to mirror.
:param pulumi.Input[int] session_number: - The session number determines the order in which sessions are evaluated when an interface is used by multiple sessions. The first session with a matching filter is the one that mirrors the packets.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Key-value map of resource tags.
:param pulumi.Input[str] traffic_mirror_filter_id: ID of the traffic mirror filter to be used
:param pulumi.Input[str] traffic_mirror_target_id: ID of the traffic mirror target to be used
:param pulumi.Input[int] virtual_network_id: - The VXLAN ID for the Traffic Mirror session. For more information about the VXLAN protocol, see RFC 7348. If you do not specify a VirtualNetworkId, an account-wide unique id is chosen at random.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['description'] = description
if network_interface_id is None and not opts.urn:
raise TypeError("Missing required property 'network_interface_id'")
__props__['network_interface_id'] = network_interface_id
__props__['packet_length'] = packet_length
if session_number is None and not opts.urn:
raise TypeError("Missing required property 'session_number'")
__props__['session_number'] = session_number
__props__['tags'] = tags
if traffic_mirror_filter_id is None and not opts.urn:
raise TypeError("Missing required property 'traffic_mirror_filter_id'")
__props__['traffic_mirror_filter_id'] = traffic_mirror_filter_id
if traffic_mirror_target_id is None and not opts.urn:
raise TypeError("Missing required property 'traffic_mirror_target_id'")
__props__['traffic_mirror_target_id'] = traffic_mirror_target_id
__props__['virtual_network_id'] = virtual_network_id
__props__['arn'] = None
super(TrafficMirrorSession, __self__).__init__(
'aws:ec2/trafficMirrorSession:TrafficMirrorSession',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
network_interface_id: Optional[pulumi.Input[str]] = None,
packet_length: Optional[pulumi.Input[int]] = None,
session_number: Optional[pulumi.Input[int]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
traffic_mirror_filter_id: Optional[pulumi.Input[str]] = None,
traffic_mirror_target_id: Optional[pulumi.Input[str]] = None,
virtual_network_id: Optional[pulumi.Input[int]] = None) -> 'TrafficMirrorSession':
"""
Get an existing TrafficMirrorSession resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: The ARN of the traffic mirror session.
:param pulumi.Input[str] description: A description of the traffic mirror session.
:param pulumi.Input[str] network_interface_id: ID of the source network interface. Not all network interfaces are eligible as mirror sources. On EC2 instances only nitro based instances support mirroring.
:param pulumi.Input[int] packet_length: The number of bytes in each packet to mirror. These are bytes after the VXLAN header. Do not specify this parameter when you want to mirror the entire packet. To mirror a subset of the packet, set this to the length (in bytes) that you want to mirror.
:param pulumi.Input[int] session_number: - The session number determines the order in which sessions are evaluated when an interface is used by multiple sessions. The first session with a matching filter is the one that mirrors the packets.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Key-value map of resource tags.
:param pulumi.Input[str] traffic_mirror_filter_id: ID of the traffic mirror filter to be used
:param pulumi.Input[str] traffic_mirror_target_id: ID of the traffic mirror target to be used
:param pulumi.Input[int] virtual_network_id: - The VXLAN ID for the Traffic Mirror session. For more information about the VXLAN protocol, see RFC 7348. If you do not specify a VirtualNetworkId, an account-wide unique id is chosen at random.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["arn"] = arn
__props__["description"] = description
__props__["network_interface_id"] = network_interface_id
__props__["packet_length"] = packet_length
__props__["session_number"] = session_number
__props__["tags"] = tags
__props__["traffic_mirror_filter_id"] = traffic_mirror_filter_id
__props__["traffic_mirror_target_id"] = traffic_mirror_target_id
__props__["virtual_network_id"] = virtual_network_id
return TrafficMirrorSession(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
The ARN of the traffic mirror session.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
A description of the traffic mirror session.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="networkInterfaceId")
def network_interface_id(self) -> pulumi.Output[str]:
"""
ID of the source network interface. Not all network interfaces are eligible as mirror sources. On EC2 instances only nitro based instances support mirroring.
"""
return pulumi.get(self, "network_interface_id")
@property
@pulumi.getter(name="packetLength")
def packet_length(self) -> pulumi.Output[Optional[int]]:
"""
The number of bytes in each packet to mirror. These are bytes after the VXLAN header. Do not specify this parameter when you want to mirror the entire packet. To mirror a subset of the packet, set this to the length (in bytes) that you want to mirror.
"""
return pulumi.get(self, "packet_length")
@property
@pulumi.getter(name="sessionNumber")
def session_number(self) -> pulumi.Output[int]:
"""
- The session number determines the order in which sessions are evaluated when an interface is used by multiple sessions. The first session with a matching filter is the one that mirrors the packets.
"""
return pulumi.get(self, "session_number")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Key-value map of resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="trafficMirrorFilterId")
def traffic_mirror_filter_id(self) -> pulumi.Output[str]:
"""
ID of the traffic mirror filter to be used
"""
return pulumi.get(self, "traffic_mirror_filter_id")
@property
@pulumi.getter(name="trafficMirrorTargetId")
def traffic_mirror_target_id(self) -> pulumi.Output[str]:
"""
ID of the traffic mirror target to be used
"""
return pulumi.get(self, "traffic_mirror_target_id")
@property
@pulumi.getter(name="virtualNetworkId")
def virtual_network_id(self) -> pulumi.Output[int]:
"""
- The VXLAN ID for the Traffic Mirror session. For more information about the VXLAN protocol, see RFC 7348. If you do not specify a VirtualNetworkId, an account-wide unique id is chosen at random.
"""
return pulumi.get(self, "virtual_network_id")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| [
"adriel@infoway-pi.com.br"
] | adriel@infoway-pi.com.br |
5f9e636c6010925d82f15eace74cd8c7a403412f | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nnsargent.py | e73661a1f985eccfa118f44df317869c58f65351 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 202 | py | ii = [('WilbRLW4.py', 7), ('WilbRLW5.py', 4), ('WilbRLW2.py', 3), ('CarlTFR.py', 3), ('WadeJEB.py', 4), ('MereHHB3.py', 1), ('MereHHB.py', 1), ('MackCNH2.py', 1), ('JacoWHI2.py', 1), ('WilbRLW3.py', 1)] | [
"varunwachaspati@gmail.com"
] | varunwachaspati@gmail.com |
3ce59fe285a3d1b8c667c707840ca6e57b7e6946 | 8a53534be41fe26e43b492876fa9cf9b7d4f17d7 | /blob/experiment.py | c156a7e9a9d66466f50078382c49808816a42cf6 | [] | no_license | etijskens/hpc-tnt-1.2 | 244af207b57e9ddb4eebaea2d450f95e5dc1c536 | 691e38d899a2003a54f090581e3317ec34eb26ee | refs/heads/master | 2021-06-16T07:33:13.843385 | 2017-03-27T11:14:58 | 2017-03-27T11:14:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21 | py | ../pyMD/experiment.py | [
"engelbert.tijskens@uantwerpen.be"
] | engelbert.tijskens@uantwerpen.be |
27a23d8d90b5c1ff97590888e4cb48d90f8da8d7 | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/hamming/b238090e4c274187a6c3f8eafb85f032.py | 151f1fa976cb89eac830a5420ad3574fe92a6af1 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 130 | py | def distance(strand1, strand2):
dist = 0
for i in range(len(strand1)):
if strand1[i] != strand2[i]:
dist += 1
return dist
| [
"rrc@berkeley.edu"
] | rrc@berkeley.edu |
dcf702d039bf020112bc112189290a6a56115097 | 90419da201cd4948a27d3612f0b482c68026c96f | /sdk/python/pulumi_azure_nextgen/insights/v20160301/get_alert_rule.py | e2bb242205dcdb83a87e1935f3e3f51f683543d5 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | test-wiz-sec/pulumi-azure-nextgen | cd4bee5d70cb0d332c04f16bb54e17d016d2adaf | 20a695af0d020b34b0f1c336e1b69702755174cc | refs/heads/master | 2023-06-08T02:35:52.639773 | 2020-11-06T22:39:06 | 2020-11-06T22:39:06 | 312,993,761 | 0 | 0 | Apache-2.0 | 2023-06-02T06:47:28 | 2020-11-15T09:04:00 | null | UTF-8 | Python | false | false | 5,814 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetAlertRuleResult',
'AwaitableGetAlertRuleResult',
'get_alert_rule',
]
@pulumi.output_type
class GetAlertRuleResult:
"""
The alert rule resource.
"""
def __init__(__self__, actions=None, condition=None, description=None, is_enabled=None, last_updated_time=None, location=None, name=None, tags=None, type=None):
if actions and not isinstance(actions, list):
raise TypeError("Expected argument 'actions' to be a list")
pulumi.set(__self__, "actions", actions)
if condition and not isinstance(condition, dict):
raise TypeError("Expected argument 'condition' to be a dict")
pulumi.set(__self__, "condition", condition)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if is_enabled and not isinstance(is_enabled, bool):
raise TypeError("Expected argument 'is_enabled' to be a bool")
pulumi.set(__self__, "is_enabled", is_enabled)
if last_updated_time and not isinstance(last_updated_time, str):
raise TypeError("Expected argument 'last_updated_time' to be a str")
pulumi.set(__self__, "last_updated_time", last_updated_time)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def actions(self) -> Optional[Sequence[Any]]:
"""
the array of actions that are performed when the alert rule becomes active, and when an alert condition is resolved.
"""
return pulumi.get(self, "actions")
@property
@pulumi.getter
def condition(self) -> Any:
"""
the condition that results in the alert rule being activated.
"""
return pulumi.get(self, "condition")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
the description of the alert rule that will be included in the alert email.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="isEnabled")
def is_enabled(self) -> bool:
"""
the flag that indicates whether the alert rule is enabled.
"""
return pulumi.get(self, "is_enabled")
@property
@pulumi.getter(name="lastUpdatedTime")
def last_updated_time(self) -> str:
"""
Last time the rule was updated in ISO8601 format.
"""
return pulumi.get(self, "last_updated_time")
@property
@pulumi.getter
def location(self) -> str:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Azure resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Azure resource type
"""
return pulumi.get(self, "type")
class AwaitableGetAlertRuleResult(GetAlertRuleResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetAlertRuleResult(
actions=self.actions,
condition=self.condition,
description=self.description,
is_enabled=self.is_enabled,
last_updated_time=self.last_updated_time,
location=self.location,
name=self.name,
tags=self.tags,
type=self.type)
def get_alert_rule(resource_group_name: Optional[str] = None,
rule_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetAlertRuleResult:
"""
Use this data source to access information about an existing resource.
:param str resource_group_name: The name of the resource group.
:param str rule_name: The name of the rule.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['ruleName'] = rule_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:insights/v20160301:getAlertRule', __args__, opts=opts, typ=GetAlertRuleResult).value
return AwaitableGetAlertRuleResult(
actions=__ret__.actions,
condition=__ret__.condition,
description=__ret__.description,
is_enabled=__ret__.is_enabled,
last_updated_time=__ret__.last_updated_time,
location=__ret__.location,
name=__ret__.name,
tags=__ret__.tags,
type=__ret__.type)
| [
"public@paulstack.co.uk"
] | public@paulstack.co.uk |
6d3b4f5751fa19cc815f562e1a5788314b5b42fa | 940d7b93fb27e8eead9b6e52bc5c7444666744dd | /python/src/Lib/test/test_sunaudiodev.py | 2c5e7d4a6e0e0efd99a539e114dcbb37ed1addd3 | [
"Apache-2.0",
"GPL-1.0-or-later",
"LicenseRef-scancode-other-copyleft",
"Python-2.0",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-python-cwi"
] | permissive | pilotx45/sl4a | d446531d310cc17d93f24aab7271a0813e8f628d | 150e3e46b5103a9b9a391034ef3fbc5bd5160d0f | refs/heads/master | 2022-03-24T19:48:30.340479 | 2022-03-08T16:23:58 | 2022-03-08T16:23:58 | 277,016,574 | 1 | 0 | Apache-2.0 | 2022-03-08T16:23:59 | 2020-07-04T01:25:36 | null | UTF-8 | Python | false | false | 678 | py | from test.test_support import findfile, TestFailed, TestSkipped, import_module
sunaudiodev = import_module('sunaudiodev', deprecated=True)
import os
try:
audiodev = os.environ["AUDIODEV"]
except KeyError:
audiodev = "/dev/audio"
if not os.path.exists(audiodev):
raise TestSkipped("no audio device found!")
def play_sound_file(path):
fp = open(path, 'r')
data = fp.read()
fp.close()
try:
a = sunaudiodev.open('w')
except sunaudiodev.error, msg:
raise TestFailed, msg
else:
a.write(data)
a.close()
def test_main():
play_sound_file(findfile('audiotest.au'))
if __name__ == '__main__':
test_main()
| [
"damonkohler@gmail.com"
] | damonkohler@gmail.com |
39b54d8b58c400cac04a9cbea46ae5e0a1b4b85f | 204693758329743d0637c53dbf76a9221620fb4e | /src/products/migrations/0001_initial.py | e4d40ef262bb622d02e388a92278ed77f4a07619 | [] | no_license | dipayandutta/djangoimproveo | 122741224795e58f970439e40112871f331258c2 | 20a19bb61eaa88e6134437f582b0f987a71cd899 | refs/heads/master | 2022-11-11T09:53:21.889847 | 2020-06-29T14:59:49 | 2020-06-29T14:59:49 | 273,153,946 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 686 | py | # Generated by Django 3.0 on 2020-06-18 16:05
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('short_code', models.CharField(max_length=20)),
('updated', models.DateTimeField(auto_now=True)),
('created', models.DateTimeField(auto_now_add=True)),
],
),
]
| [
"inbox.dipayan@gmail.com"
] | inbox.dipayan@gmail.com |
6c6fab78b18c5a33216e8a8aa5a9a6893756c6b3 | 0aa7b75531b629f4101fe7b152023e26e711d5cf | /mootdx/logger.py | 62a457b094133a1ae97a9ea502d5312a51a4a9cd | [
"MIT"
] | permissive | ZuoVJ/mootdx-1 | 103adb7a8a10254038cbfafde1671a050e02adc2 | b1a1eb7c6d371894225855c76b6a3048d0bdc405 | refs/heads/master | 2023-06-20T04:36:53.603888 | 2021-07-14T15:48:53 | 2021-07-14T15:48:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 100 | py | import sys
from loguru import logger
log = logger
log.remove()
log.add(sys.stderr, level='INFO')
| [
"ubuntu@localhost.localdomain"
] | ubuntu@localhost.localdomain |
a2725503da8b2935c112ff3ac71d8d6368b1be50 | fb9371054b6ce236c6b202cc51c6bc98a3483060 | /python_20190615/Python网络爬虫与信息提取/BaiduStocks/BaiduStocks/spiders/stocks.py | 02363f2ae2e17bf1cd272dc9bcb77a3e83c09fcb | [] | no_license | skymoonfp/python_learning | 621d5e72c5b356fd507e4a00c463ea8d565588fb | 1e8340303809d8c7c3af3201084b158c1784f22e | refs/heads/master | 2020-06-04T10:07:27.009212 | 2019-06-19T13:52:44 | 2019-06-19T13:52:44 | 191,978,868 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,796 | py | # -*- coding: utf-8 -*-
import random
import re
import scrapy
class StocksSpider(scrapy.Spider):
name = 'stocks'
allowed_domains = ['baidu.com']
start_urls = ['http://quote.eastmoney.com/stock_list.html']
user_agent_list = [
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1"
"Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24"
]
def parse(self, response):
for href in response.css('a::attr(href)').extract():
try:
stock = re.findall(r"[s][hz]\d{6}", href)[0]
url = "https://gupiao.baidu.com/stock/" + stock + ".html"
ua = random.choice(self.user_agent_list) # 随机抽取User-Agent
headers = {
'Accept-Encoding': 'gzip, deflate, sdch, br',
'Accept-Language': 'zh-CN,zh;q=0.8',
'Connection': 'keep-alive',
'Referer': 'https://gupiao.baidu.com/',
'User-Agent': ua
}
yield scrapy.Request(url, callback=self.parse_stock, headers=headers)
except:
continue
def parse_stock(self, response):
infoDict = dict()
stockInfo = response.css(".stock-bets")
name = stockInfo.css(".bets-name").extract()[0]
keyList = stockInfo.css("dt").extract()
valueList = stockInfo.css("dd").extract()
for i in range(len(keyList)):
key = re.findall(r'>.*</dt>', keyList[i])[0][1:-5]
try:
val = re.findall(r'\d+\.?.*</dd>', valueList[i])[0][0:-5]
except:
val = "--"
infoDict[key] = val
infoDict.update(
{"股票名称": re.findall('\s.*\(', name)[0].split()[0] + re.findall('\>.*\<', name)[0][1:-1]}
)
yield infoDict
| [
"954918@qq.com"
] | 954918@qq.com |
602cd68614e126e6881b023d30d8fa3219b9db34 | 5963c12367490ffc01c9905c028d1d5480078dec | /tests/components/tado/test_config_flow.py | 77656f1c81fd066ab3f22f5e1d614987fc455bd0 | [
"Apache-2.0"
] | permissive | BenWoodford/home-assistant | eb03f73165d11935e8d6a9756272014267d7d66a | 2fee32fce03bc49e86cf2e7b741a15621a97cce5 | refs/heads/dev | 2023-03-05T06:13:30.354545 | 2021-07-18T09:51:53 | 2021-07-18T09:51:53 | 117,122,037 | 11 | 6 | Apache-2.0 | 2023-02-22T06:16:51 | 2018-01-11T16:10:19 | Python | UTF-8 | Python | false | false | 4,912 | py | """Test the Tado config flow."""
from unittest.mock import MagicMock, patch
import requests
from homeassistant import config_entries, setup
from homeassistant.components.tado.const import DOMAIN
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from tests.common import MockConfigEntry
def _get_mock_tado_api(getMe=None):
mock_tado = MagicMock()
if isinstance(getMe, Exception):
type(mock_tado).getMe = MagicMock(side_effect=getMe)
else:
type(mock_tado).getMe = MagicMock(return_value=getMe)
return mock_tado
async def test_form(hass):
"""Test we can setup though the user path."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
mock_tado_api = _get_mock_tado_api(getMe={"homes": [{"id": 1, "name": "myhome"}]})
with patch(
"homeassistant.components.tado.config_flow.Tado",
return_value=mock_tado_api,
), patch(
"homeassistant.components.tado.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"username": "test-username", "password": "test-password"},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "myhome"
assert result2["data"] == {
"username": "test-username",
"password": "test-password",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
response_mock = MagicMock()
type(response_mock).status_code = 401
mock_tado_api = _get_mock_tado_api(getMe=requests.HTTPError(response=response_mock))
with patch(
"homeassistant.components.tado.config_flow.Tado",
return_value=mock_tado_api,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"username": "test-username", "password": "test-password"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
response_mock = MagicMock()
type(response_mock).status_code = 500
mock_tado_api = _get_mock_tado_api(getMe=requests.HTTPError(response=response_mock))
with patch(
"homeassistant.components.tado.config_flow.Tado",
return_value=mock_tado_api,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"username": "test-username", "password": "test-password"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
async def test_no_homes(hass):
"""Test we handle no homes error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mock_tado_api = _get_mock_tado_api(getMe={"homes": []})
with patch(
"homeassistant.components.tado.config_flow.Tado",
return_value=mock_tado_api,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"username": "test-username", "password": "test-password"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "no_homes"}
async def test_form_homekit(hass):
"""Test that we abort from homekit if tado is already setup."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HOMEKIT},
data={"properties": {"id": "AA:BB:CC:DD:EE:FF"}},
)
assert result["type"] == "form"
assert result["errors"] == {}
flow = next(
flow
for flow in hass.config_entries.flow.async_progress()
if flow["flow_id"] == result["flow_id"]
)
assert flow["context"]["unique_id"] == "AA:BB:CC:DD:EE:FF"
entry = MockConfigEntry(
domain=DOMAIN, data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"}
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HOMEKIT},
data={"properties": {"id": "AA:BB:CC:DD:EE:FF"}},
)
assert result["type"] == "abort"
| [
"noreply@github.com"
] | BenWoodford.noreply@github.com |
7ecd0ab348fff8b275e8175146fd946a9f3c5919 | 3e5150447a2c90c26354500f1df9660ef35c990b | /std_modules/plistlib/.readPlist() | 91b219cacac94b718cb81c036acd8ba166b04078 | [] | no_license | kilirobbs/python-fiddle | 8d6417ebff9d6530e713b6724f8416da86c24c65 | 9c2f320bd2391433288cd4971c2993f1dd5ff464 | refs/heads/master | 2016-09-11T03:56:39.808358 | 2013-03-19T19:26:19 | 2013-03-19T19:26:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 423 | #!/usr/bin/env python
from os.path import expanduser
from plistlib import readPlist
plist="~/Library/LaunchAgents/org.lighttpd.plist"
path=expanduser(plist)
pl = readPlist(path)
print pl.__class__,pl
print pl.keys()
print pl.Label
print "Label" in pl
print "KeepAlive" in pl
if "Program" in pl:
print pl.Program
if "ProgramArguments" in pl:
print pl.ProgramArguments
# print pl.programarguments # AttributeError | [
"cancerhermit@gmail.com"
] | cancerhermit@gmail.com | |
a888662066106e82fb29a9eaaa36a2b8addc126c | eba3e4a3935d6422d1ed85aaf69337f5ba15fc74 | /transaction/transaction/tests/test_transaction.py | a2aeed957f46647d8746dd2ddbaf0d0cd564b1ad | [
"ZPL-2.1"
] | permissive | arianepaola/tg2jython | 2ae74250ca43b021323ef0951a9763712c2eb3d6 | 971b9c3eb8ca941d1797bb4b458f275bdca5a2cb | refs/heads/master | 2021-01-21T12:07:48.815690 | 2009-03-27T02:38:11 | 2009-03-27T02:38:11 | 160,242 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,764 | py | ##############################################################################
#
# Copyright (c) 2001, 2002, 2005 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
"""Test transaction behavior for variety of cases.
I wrote these unittests to investigate some odd transaction
behavior when doing unittests of integrating non sub transaction
aware objects, and to insure proper txn behavior. these
tests test the transaction system independent of the rest of the
zodb.
you can see the method calls to a jar by passing the
keyword arg tracing to the modify method of a dataobject.
the value of the arg is a prefix used for tracing print calls
to that objects jar.
the number of times a jar method was called can be inspected
by looking at an attribute of the jar that is the method
name prefixed with a c (count/check).
i've included some tracing examples for tests that i thought
were illuminating as doc strings below.
TODO
add in tests for objects which are modified multiple times,
for example an object that gets modified in multiple sub txns.
$Id: test_transaction.py 81644 2007-11-09 16:27:32Z chrism $
"""
import struct
import unittest
import warnings
import transaction
from transaction.tests.warnhook import WarningsHook
_ADDRESS_MASK = 256 ** struct.calcsize('P')
def positive_id(obj):
"""Return id(obj) as a non-negative integer."""
result = id(obj)
if result < 0:
result += _ADDRESS_MASK
assert result > 0
return result
class TransactionTests(unittest.TestCase):
def setUp(self):
mgr = self.transaction_manager = transaction.TransactionManager()
self.sub1 = DataObject(mgr)
self.sub2 = DataObject(mgr)
self.sub3 = DataObject(mgr)
self.nosub1 = DataObject(mgr, nost=1)
# basic tests with two sub trans jars
# really we only need one, so tests for
# sub1 should identical to tests for sub2
def testTransactionCommit(self):
self.sub1.modify()
self.sub2.modify()
self.transaction_manager.commit()
assert self.sub1._p_jar.ccommit_sub == 0
assert self.sub1._p_jar.ctpc_finish == 1
def testTransactionAbort(self):
self.sub1.modify()
self.sub2.modify()
self.transaction_manager.abort()
assert self.sub2._p_jar.cabort == 1
def testTransactionNote(self):
t = self.transaction_manager.get()
t.note('This is a note.')
self.assertEqual(t.description, 'This is a note.')
t.note('Another.')
self.assertEqual(t.description, 'This is a note.\n\nAnother.')
t.abort()
# repeat adding in a nonsub trans jars
def testNSJTransactionCommit(self):
self.nosub1.modify()
self.transaction_manager.commit()
assert self.nosub1._p_jar.ctpc_finish == 1
def testNSJTransactionAbort(self):
self.nosub1.modify()
self.transaction_manager.abort()
assert self.nosub1._p_jar.ctpc_finish == 0
assert self.nosub1._p_jar.cabort == 1
### Failure Mode Tests
#
# ok now we do some more interesting
# tests that check the implementations
# error handling by throwing errors from
# various jar methods
###
# first the recoverable errors
def testExceptionInAbort(self):
self.sub1._p_jar = BasicJar(errors='abort')
self.nosub1.modify()
self.sub1.modify(nojar=1)
self.sub2.modify()
try:
self.transaction_manager.abort()
except TestTxnException: pass
assert self.nosub1._p_jar.cabort == 1
assert self.sub2._p_jar.cabort == 1
def testExceptionInCommit(self):
self.sub1._p_jar = BasicJar(errors='commit')
self.nosub1.modify()
self.sub1.modify(nojar=1)
try:
self.transaction_manager.commit()
except TestTxnException: pass
assert self.nosub1._p_jar.ctpc_finish == 0
assert self.nosub1._p_jar.ccommit == 1
assert self.nosub1._p_jar.ctpc_abort == 1
def testExceptionInTpcVote(self):
self.sub1._p_jar = BasicJar(errors='tpc_vote')
self.nosub1.modify()
self.sub1.modify(nojar=1)
try:
self.transaction_manager.commit()
except TestTxnException: pass
assert self.nosub1._p_jar.ctpc_finish == 0
assert self.nosub1._p_jar.ccommit == 1
assert self.nosub1._p_jar.ctpc_abort == 1
assert self.sub1._p_jar.ctpc_abort == 1
def testExceptionInTpcBegin(self):
"""
ok this test reveals a bug in the TM.py
as the nosub tpc_abort there is ignored.
nosub calling method tpc_begin
nosub calling method commit
sub calling method tpc_begin
sub calling method abort
sub calling method tpc_abort
nosub calling method tpc_abort
"""
self.sub1._p_jar = BasicJar(errors='tpc_begin')
self.nosub1.modify()
self.sub1.modify(nojar=1)
try:
self.transaction_manager.commit()
except TestTxnException:
pass
assert self.nosub1._p_jar.ctpc_abort == 1
assert self.sub1._p_jar.ctpc_abort == 1
def testExceptionInTpcAbort(self):
self.sub1._p_jar = BasicJar(errors=('tpc_abort', 'tpc_vote'))
self.nosub1.modify()
self.sub1.modify(nojar=1)
try:
self.transaction_manager.commit()
except TestTxnException:
pass
assert self.nosub1._p_jar.ctpc_abort == 1
# last test, check the hosing mechanism
## def testHoserStoppage(self):
## # It's hard to test the "hosed" state of the database, where
## # hosed means that a failure occurred in the second phase of
## # the two phase commit. It's hard because the database can
## # recover from such an error if it occurs during the very first
## # tpc_finish() call of the second phase.
## for obj in self.sub1, self.sub2:
## j = HoserJar(errors='tpc_finish')
## j.reset()
## obj._p_jar = j
## obj.modify(nojar=1)
## try:
## transaction.commit()
## except TestTxnException:
## pass
## self.assert_(Transaction.hosed)
## self.sub2.modify()
## try:
## transaction.commit()
## except Transaction.POSException.TransactionError:
## pass
## else:
## self.fail("Hosed Application didn't stop commits")
class DataObject:
def __init__(self, transaction_manager, nost=0):
self.transaction_manager = transaction_manager
self.nost = nost
self._p_jar = None
def modify(self, nojar=0, tracing=0):
if not nojar:
if self.nost:
self._p_jar = BasicJar(tracing=tracing)
else:
self._p_jar = BasicJar(tracing=tracing)
self.transaction_manager.get().join(self._p_jar)
class TestTxnException(Exception):
pass
class BasicJar:
def __init__(self, errors=(), tracing=0):
if not isinstance(errors, tuple):
errors = errors,
self.errors = errors
self.tracing = tracing
self.cabort = 0
self.ccommit = 0
self.ctpc_begin = 0
self.ctpc_abort = 0
self.ctpc_vote = 0
self.ctpc_finish = 0
self.cabort_sub = 0
self.ccommit_sub = 0
def __repr__(self):
return "<%s %X %s>" % (self.__class__.__name__,
positive_id(self),
self.errors)
def sortKey(self):
# All these jars use the same sort key, and Python's list.sort()
# is stable. These two
return self.__class__.__name__
def check(self, method):
if self.tracing:
print '%s calling method %s'%(str(self.tracing),method)
if method in self.errors:
raise TestTxnException("error %s" % method)
## basic jar txn interface
def abort(self, *args):
self.check('abort')
self.cabort += 1
def commit(self, *args):
self.check('commit')
self.ccommit += 1
def tpc_begin(self, txn, sub=0):
self.check('tpc_begin')
self.ctpc_begin += 1
def tpc_vote(self, *args):
self.check('tpc_vote')
self.ctpc_vote += 1
def tpc_abort(self, *args):
self.check('tpc_abort')
self.ctpc_abort += 1
def tpc_finish(self, *args):
self.check('tpc_finish')
self.ctpc_finish += 1
class HoserJar(BasicJar):
# The HoserJars coordinate their actions via the class variable
# committed. The check() method will only raise its exception
# if committed > 0.
committed = 0
def reset(self):
# Calling reset() on any instance will reset the class variable.
HoserJar.committed = 0
def check(self, method):
if HoserJar.committed > 0:
BasicJar.check(self, method)
def tpc_finish(self, *args):
self.check('tpc_finish')
self.ctpc_finish += 1
HoserJar.committed += 1
def test_join():
"""White-box test of the join method
The join method is provided for "backward-compatability" with ZODB 4
data managers.
The argument to join must be a zodb4 data manager,
transaction.interfaces.IDataManager.
>>> from transaction.tests.sampledm import DataManager
>>> from transaction._transaction import DataManagerAdapter
>>> t = transaction.Transaction()
>>> dm = DataManager()
>>> t.join(dm)
The end result is that a data manager adapter is one of the
transaction's objects:
>>> isinstance(t._resources[0], DataManagerAdapter)
True
>>> t._resources[0]._datamanager is dm
True
"""
def hook():
pass
def test_addBeforeCommitHook():
"""Test addBeforeCommitHook.
Let's define a hook to call, and a way to see that it was called.
>>> log = []
>>> def reset_log():
... del log[:]
>>> def hook(arg='no_arg', kw1='no_kw1', kw2='no_kw2'):
... log.append("arg %r kw1 %r kw2 %r" % (arg, kw1, kw2))
Now register the hook with a transaction.
>>> import transaction
>>> t = transaction.begin()
>>> t.addBeforeCommitHook(hook, '1')
We can see that the hook is indeed registered.
>>> [(hook.func_name, args, kws)
... for hook, args, kws in t.getBeforeCommitHooks()]
[('hook', ('1',), {})]
When transaction commit starts, the hook is called, with its
arguments.
>>> log
[]
>>> t.commit()
>>> log
["arg '1' kw1 'no_kw1' kw2 'no_kw2'"]
>>> reset_log()
A hook's registration is consumed whenever the hook is called. Since
the hook above was called, it's no longer registered:
>>> len(list(t.getBeforeCommitHooks()))
0
>>> transaction.commit()
>>> log
[]
The hook is only called for a full commit, not for a savepoint.
>>> t = transaction.begin()
>>> t.addBeforeCommitHook(hook, 'A', dict(kw1='B'))
>>> dummy = t.savepoint()
>>> log
[]
>>> t.commit()
>>> log
["arg 'A' kw1 'B' kw2 'no_kw2'"]
>>> reset_log()
If a transaction is aborted, no hook is called.
>>> t = transaction.begin()
>>> t.addBeforeCommitHook(hook, ["OOPS!"])
>>> transaction.abort()
>>> log
[]
>>> transaction.commit()
>>> log
[]
The hook is called before the commit does anything, so even if the
commit fails the hook will have been called. To provoke failures in
commit, we'll add failing resource manager to the transaction.
>>> class CommitFailure(Exception):
... pass
>>> class FailingDataManager:
... def tpc_begin(self, txn, sub=False):
... raise CommitFailure
... def abort(self, txn):
... pass
>>> t = transaction.begin()
>>> t.join(FailingDataManager())
>>> t.addBeforeCommitHook(hook, '2')
>>> t.commit()
Traceback (most recent call last):
...
CommitFailure
>>> log
["arg '2' kw1 'no_kw1' kw2 'no_kw2'"]
>>> reset_log()
Let's register several hooks.
>>> t = transaction.begin()
>>> t.addBeforeCommitHook(hook, '4', dict(kw1='4.1'))
>>> t.addBeforeCommitHook(hook, '5', dict(kw2='5.2'))
They are returned in the same order by getBeforeCommitHooks.
>>> [(hook.func_name, args, kws) #doctest: +NORMALIZE_WHITESPACE
... for hook, args, kws in t.getBeforeCommitHooks()]
[('hook', ('4',), {'kw1': '4.1'}),
('hook', ('5',), {'kw2': '5.2'})]
And commit also calls them in this order.
>>> t.commit()
>>> len(log)
2
>>> log #doctest: +NORMALIZE_WHITESPACE
["arg '4' kw1 '4.1' kw2 'no_kw2'",
"arg '5' kw1 'no_kw1' kw2 '5.2'"]
>>> reset_log()
While executing, a hook can itself add more hooks, and they will all
be called before the real commit starts.
>>> def recurse(txn, arg):
... log.append('rec' + str(arg))
... if arg:
... txn.addBeforeCommitHook(hook, '-')
... txn.addBeforeCommitHook(recurse, (txn, arg-1))
>>> t = transaction.begin()
>>> t.addBeforeCommitHook(recurse, (t, 3))
>>> transaction.commit()
>>> log #doctest: +NORMALIZE_WHITESPACE
['rec3',
"arg '-' kw1 'no_kw1' kw2 'no_kw2'",
'rec2',
"arg '-' kw1 'no_kw1' kw2 'no_kw2'",
'rec1',
"arg '-' kw1 'no_kw1' kw2 'no_kw2'",
'rec0']
>>> reset_log()
"""
def test_addAfterCommitHook():
"""Test addAfterCommitHook.
Let's define a hook to call, and a way to see that it was called.
>>> log = []
>>> def reset_log():
... del log[:]
>>> def hook(status, arg='no_arg', kw1='no_kw1', kw2='no_kw2'):
... log.append("%r arg %r kw1 %r kw2 %r" % (status, arg, kw1, kw2))
Now register the hook with a transaction.
>>> import transaction
>>> t = transaction.begin()
>>> t.addAfterCommitHook(hook, '1')
We can see that the hook is indeed registered.
>>> [(hook.func_name, args, kws)
... for hook, args, kws in t.getAfterCommitHooks()]
[('hook', ('1',), {})]
When transaction commit is done, the hook is called, with its
arguments.
>>> log
[]
>>> t.commit()
>>> log
["True arg '1' kw1 'no_kw1' kw2 'no_kw2'"]
>>> reset_log()
A hook's registration is consumed whenever the hook is called. Since
the hook above was called, it's no longer registered:
>>> len(list(t.getAfterCommitHooks()))
0
>>> transaction.commit()
>>> log
[]
The hook is only called after a full commit, not for a savepoint.
>>> t = transaction.begin()
>>> t.addAfterCommitHook(hook, 'A', dict(kw1='B'))
>>> dummy = t.savepoint()
>>> log
[]
>>> t.commit()
>>> log
["True arg 'A' kw1 'B' kw2 'no_kw2'"]
>>> reset_log()
If a transaction is aborted, no hook is called.
>>> t = transaction.begin()
>>> t.addAfterCommitHook(hook, ["OOPS!"])
>>> transaction.abort()
>>> log
[]
>>> transaction.commit()
>>> log
[]
The hook is called after the commit is done, so even if the
commit fails the hook will have been called. To provoke failures in
commit, we'll add failing resource manager to the transaction.
>>> class CommitFailure(Exception):
... pass
>>> class FailingDataManager:
... def tpc_begin(self, txn):
... raise CommitFailure
... def abort(self, txn):
... pass
>>> t = transaction.begin()
>>> t.join(FailingDataManager())
>>> t.addAfterCommitHook(hook, '2')
>>> t.commit()
Traceback (most recent call last):
...
CommitFailure
>>> log
["False arg '2' kw1 'no_kw1' kw2 'no_kw2'"]
>>> reset_log()
Let's register several hooks.
>>> t = transaction.begin()
>>> t.addAfterCommitHook(hook, '4', dict(kw1='4.1'))
>>> t.addAfterCommitHook(hook, '5', dict(kw2='5.2'))
They are returned in the same order by getAfterCommitHooks.
>>> [(hook.func_name, args, kws) #doctest: +NORMALIZE_WHITESPACE
... for hook, args, kws in t.getAfterCommitHooks()]
[('hook', ('4',), {'kw1': '4.1'}),
('hook', ('5',), {'kw2': '5.2'})]
And commit also calls them in this order.
>>> t.commit()
>>> len(log)
2
>>> log #doctest: +NORMALIZE_WHITESPACE
["True arg '4' kw1 '4.1' kw2 'no_kw2'",
"True arg '5' kw1 'no_kw1' kw2 '5.2'"]
>>> reset_log()
While executing, a hook can itself add more hooks, and they will all
be called before the real commit starts.
>>> def recurse(status, txn, arg):
... log.append('rec' + str(arg))
... if arg:
... txn.addAfterCommitHook(hook, '-')
... txn.addAfterCommitHook(recurse, (txn, arg-1))
>>> t = transaction.begin()
>>> t.addAfterCommitHook(recurse, (t, 3))
>>> transaction.commit()
>>> log #doctest: +NORMALIZE_WHITESPACE
['rec3',
"True arg '-' kw1 'no_kw1' kw2 'no_kw2'",
'rec2',
"True arg '-' kw1 'no_kw1' kw2 'no_kw2'",
'rec1',
"True arg '-' kw1 'no_kw1' kw2 'no_kw2'",
'rec0']
>>> reset_log()
If an after commit hook is raising an exception then it will log a
message at error level so that if other hooks are registered they
can be executed. We don't support execution dependencies at this level.
>>> mgr = transaction.TransactionManager()
>>> do = DataObject(mgr)
>>> def hookRaise(status, arg='no_arg', kw1='no_kw1', kw2='no_kw2'):
... raise TypeError("Fake raise")
>>> t = transaction.begin()
>>> t.addAfterCommitHook(hook, ('-', 1))
>>> t.addAfterCommitHook(hookRaise, ('-', 2))
>>> t.addAfterCommitHook(hook, ('-', 3))
>>> transaction.commit()
>>> log
["True arg '-' kw1 1 kw2 'no_kw2'", "True arg '-' kw1 3 kw2 'no_kw2'"]
>>> reset_log()
Test that the associated transaction manager has been cleanup when
after commit hooks are registered
>>> mgr = transaction.TransactionManager()
>>> do = DataObject(mgr)
>>> t = transaction.begin()
>>> len(t._manager._txns)
1
>>> t.addAfterCommitHook(hook, ('-', 1))
>>> transaction.commit()
>>> log
["True arg '-' kw1 1 kw2 'no_kw2'"]
>>> len(t._manager._txns)
0
>>> reset_log()
"""
def test_suite():
from zope.testing.doctest import DocTestSuite, DocFileSuite
return unittest.TestSuite((
DocFileSuite('doom.txt'),
DocTestSuite(),
unittest.makeSuite(TransactionTests),
))
# additional_tests is for setuptools "setup.py test" support
additional_tests = test_suite
if __name__ == '__main__':
unittest.TextTestRunner().run(test_suite())
| [
"ariane@venus.(none)"
] | ariane@venus.(none) |
1c816c5f80b1281d92ecec69626cd50125706f1c | cc13092b652113221a877db2bf907c050dc30aaa | /kws_streaming/models/tc_resnet.py | 03c0408ad9a532fc355b5e35dd569b70ed1d7f5e | [
"Apache-2.0"
] | permissive | Th-Fo/google-research | 1e62ee50f76c2931fdb6db1de736a85e94251e25 | 9d7bd968843c27216d01c92ff832b1cd58cafa85 | refs/heads/master | 2020-12-27T17:30:43.916109 | 2020-05-25T17:06:20 | 2020-05-25T17:06:20 | 237,989,659 | 1 | 3 | Apache-2.0 | 2020-05-25T17:06:22 | 2020-02-03T14:52:08 | null | UTF-8 | Python | false | false | 6,116 | py | # coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model based on combination of n by 1 convolutions with residual blocks."""
from kws_streaming.layers import speech_features
from kws_streaming.layers.compat import tf
from kws_streaming.models.utils import parse
def model_parameters(parser_nn):
"""Temporal Convolution Resnet model parameters.
In more details parameters are described at:
https://arxiv.org/pdf/1904.03814.pdf
We converted model to Keras and made it compatible with TF V2
https://github.com/hyperconnect/TC-ResNet
Args:
parser_nn: global command line args parser
Returns: parser with updated arguments
"""
parser_nn.add_argument(
'--channels',
type=str,
default='24, 36, 36, 48, 48, 72, 72',
help='Number of channels per convolutional block (including first conv)',
)
parser_nn.add_argument(
'--debug_2d',
type=int,
default=0,
help='if 0 conv_kernel will be [3, 3], else conv_kernel [3, 1]',
)
parser_nn.add_argument(
'--pool_size',
type=str,
default='',
help="pool size for example '4,4'",
)
parser_nn.add_argument(
'--pool_stride',
type=int,
default=0,
help='pool stride, for example 4',
)
parser_nn.add_argument(
'--bn_momentum',
type=float,
default=0.997,
help='Momentum for the moving average',
)
parser_nn.add_argument(
'--bn_center',
type=int,
default=1,
help='If True, add offset of beta to normalized tensor.'
'If False, beta is ignored',
)
parser_nn.add_argument(
'--bn_scale',
type=int,
default=1,
help='If True, multiply by gamma. If False, gamma is not used. '
'When the next layer is linear (also e.g. nn.relu), this can be disabled'
'since the scaling will be done by the next layer.',
)
parser_nn.add_argument(
'--bn_renorm',
type=int,
default=0,
help='Whether to use Batch Renormalization',
)
parser_nn.add_argument(
'--dropout',
type=float,
default=0.2,
help='Percentage of data dropped',
)
def model(flags):
"""Temporal Convolution ResNet model.
It is based on paper:
Temporal Convolution for Real-time Keyword Spotting on Mobile Devices
https://arxiv.org/pdf/1904.03814.pdf
Args:
flags: data/model parameters
Returns:
Keras model for training
"""
input_audio = tf.keras.layers.Input(
shape=(flags.desired_samples,), batch_size=flags.batch_size)
net = speech_features.SpeechFeatures(
speech_features.SpeechFeatures.get_params(flags))(
input_audio)
time_size, feature_size = net.shape[1:3]
channels = parse(flags.channels)
net = tf.keras.backend.expand_dims(net)
if flags.debug_2d:
conv_kernel = first_conv_kernel = (3, 3)
else:
net = tf.reshape(
net, [-1, time_size, 1, feature_size]) # [batch, time, 1, feature]
first_conv_kernel = (3, 1)
conv_kernel = (9, 1)
net = tf.keras.layers.Conv2D(
filters=channels[0],
kernel_size=first_conv_kernel,
strides=1,
padding='same',
activation='linear')(
net)
net = tf.keras.layers.BatchNormalization(
momentum=flags.bn_momentum,
center=flags.bn_center,
scale=flags.bn_scale,
renorm=flags.bn_renorm)(
net)
net = tf.keras.layers.Activation('relu')(net)
if parse(flags.pool_size):
net = tf.keras.layers.AveragePooling2D(
pool_size=parse(flags.pool_size), strides=flags.pool_stride)(
net)
channels = channels[1:]
# residual blocks
for n in channels:
if n != net.shape[-1]:
stride = 2
layer_in = tf.keras.layers.Conv2D(
filters=n,
kernel_size=1,
strides=stride,
padding='same',
activation='linear')(
net)
layer_in = tf.keras.layers.BatchNormalization(
momentum=flags.bn_momentum,
center=flags.bn_center,
scale=flags.bn_scale,
renorm=flags.bn_renorm)(
layer_in)
layer_in = tf.keras.layers.Activation('relu')(layer_in)
else:
layer_in = net
stride = 1
net = tf.keras.layers.Conv2D(
filters=n,
kernel_size=conv_kernel,
strides=stride,
padding='same',
activation='linear')(
net)
net = tf.keras.layers.BatchNormalization(
momentum=flags.bn_momentum,
center=flags.bn_center,
scale=flags.bn_scale,
renorm=flags.bn_renorm)(
net)
net = tf.keras.layers.Activation('relu')(net)
net = tf.keras.layers.Conv2D(
filters=n,
kernel_size=conv_kernel,
strides=1,
padding='same',
activation='linear')(
net)
net = tf.keras.layers.BatchNormalization(
momentum=flags.bn_momentum,
center=flags.bn_center,
scale=flags.bn_scale,
renorm=flags.bn_renorm)(
net)
# residual connection
net = tf.keras.layers.Add()([net, layer_in])
net = tf.keras.layers.Activation('relu')(net)
net = tf.keras.layers.AveragePooling2D(
pool_size=net.shape[1:3], strides=1)(
net)
net = tf.keras.layers.Dropout(rate=flags.dropout)(net)
# fully connected layer
net = tf.keras.layers.Conv2D(
filters=flags.label_count,
kernel_size=1,
strides=1,
padding='same',
activation='linear')(
net)
net = tf.reshape(net, shape=(-1, net.shape[3]))
return tf.keras.Model(input_audio, net)
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
03e25ec68fa24203aeca5c8b73e78b1da4d5005b | d4517268b8724ef208e9d07e59628208cb0832e9 | /BalancedSmileys/py2/bs.py | ae9c513dbea5334261132bb501da379f9a70fc60 | [] | no_license | StevenDunn/CodeEval | 5a8a47d3a90d9bed350228f6647e41f1298f46c2 | b81bb31a600826d4b3b242a9a35aa1d28c2eb484 | refs/heads/master | 2021-01-15T15:33:53.155975 | 2018-04-01T13:54:16 | 2018-04-01T13:54:16 | 11,549,566 | 6 | 4 | null | null | null | null | UTF-8 | Python | false | false | 732 | py | # balanced smileys soln in py2 for code eval by steven a dunn
import sys
def is_balanced(line):
if line == "":
return "YES"
line = line.replace(":)", "}").replace(":(", "{")
min_val = 0
max_val = 0
for i in range(0, len(line)):
c = line[i]
if c == '(':
min_val += 1
max_val += 1
elif c == ')':
min_val -= 1
max_val -= 1
if max_val < 0:
return "NO"
elif c == '{':
max_val += 1
elif c == '}':
min_val -= 1
if min_val <= 0 and 0 <= max_val:
return "YES"
return "NO"
f = open(sys.argv[1], 'r')
for line in f:
print is_balanced(line)
f.close() | [
"stevendunn@gmail.com"
] | stevendunn@gmail.com |
d2fb1ee1dc5bf60a4b7ed477b3f0d3dd9283e44f | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf.0/gsn-edf_ut=3.5_rd=0.5_rw=0.06_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=66/params.py | d68d8385fdd6a0806cc34b1f280849c3602ecd11 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | py | {'cpus': 4,
'duration': 30,
'final_util': '3.527214',
'max_util': '3.5',
'periods': 'harmonic-2',
'release_master': False,
'res_distr': '0.5',
'res_nmb': '4',
'res_weight': '0.06',
'scheduler': 'GSN-EDF',
'trial': 66,
'utils': 'uni-medium-3'}
| [
"ricardo.btxr@gmail.com"
] | ricardo.btxr@gmail.com |
be8e93fb9885fd8b2e99c287d74219d031f769ab | 95d144459ff7cbfc009c20ec16f3c06ddda867cc | /archimedes/archimedesquestion/urls.py | 676fcfc9f625eacd64a157dd402ef044404dc22f | [] | no_license | cvelas11/mmobious | b9c75fde17b038ac6bab8c2d698d588e27f8d8d6 | 605a9425dda1ccb9978c998eff679d5ddc7c0db8 | refs/heads/master | 2021-07-09T06:01:59.712677 | 2021-04-05T15:26:30 | 2021-04-05T15:26:30 | 236,180,518 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,572 | py | """archimedes URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.conf.urls import url
from django.conf.urls import include
from archimedesquestion import views
from django.views.generic import TemplateView
app_name= 'archimedesquestion'
urlpatterns = [
url("^register", views.register, name = 'register'),
url("^ingresar", views.ingresar, name = 'ingresar'),
url("^logout", views.logout_func, name = 'logout'),
url("^practicar", views.practicar, name = 'practicar'),
url("^recursos", views.recursos, name = 'recursos'),
url("^questions", views.questions, name = 'questions'),
url("^iniciar", views.iniciar, name = 'iniciar'),
url("^proyecto", views.proyecto, name = 'proyecto'),
url("^dashboard", views.dashboard, name = 'dashboard'),
# path('practicar', TemplateView.as_view(template_name='archimedesquestion/practicar.html')), # <--
#path('accounts/', include('allauth.urls'))
]
| [
"(none)"
] | (none) |
cf3fcd0da0ce278ca386e594acdeb7ccd3d457e6 | d24a6e0be809ae3af8bc8daa6dacfc1789d38a84 | /ABC/ABC151-200/ABC196/B.py | 76ae65459e878d6624bf37e1e3c5fb61dc875458 | [] | no_license | k-harada/AtCoder | 5d8004ce41c5fc6ad6ef90480ef847eaddeea179 | 02b0a6c92a05c6858b87cb22623ce877c1039f8f | refs/heads/master | 2023-08-21T18:55:53.644331 | 2023-08-05T14:21:25 | 2023-08-05T14:21:25 | 184,904,794 | 9 | 0 | null | 2023-05-22T16:29:18 | 2019-05-04T14:24:18 | Python | UTF-8 | Python | false | false | 390 | py | def solve(xs):
if "." in xs:
return int(xs[:xs.index(".")])
else:
return int(xs)
def main():
xs = input()
res = solve(xs)
print(res)
def test():
assert solve("5.90") == 5
assert solve("0") == 0
assert solve("84939825309432908832902189.9092309409809091329") == 84939825309432908832902189
if __name__ == "__main__":
test()
main()
| [
"cashfeg@gmail.com"
] | cashfeg@gmail.com |
88387b3005840626d21e96362ebb5b34eee260d1 | 5ef5abb189907b010e544e3c42ce1a38ed8e298f | /mysite/baseItems/models.py | aa8e62fe30b17d1bf2c62081b129658066edc4bb | [] | no_license | LKingJ23/Gymkana-formacion-Django | 5a5daf27abdd100ee8f9e0e61af5eb010899928a | 63d49b7ed81e8a3d0c5ed9c9765801680fb51ef7 | refs/heads/master | 2021-04-15T19:03:33.830028 | 2018-04-09T16:29:59 | 2018-04-09T16:29:59 | 126,459,951 | 0 | 0 | null | 2018-04-09T16:30:00 | 2018-03-23T09:05:57 | Python | UTF-8 | Python | false | false | 1,023 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.db.models.signals import post_delete
from django.dispatch import receiver
import os
class BaseItems(models.Model):
title = models.CharField(max_length=200, blank=False, null=False)
subtitle = models.CharField(max_length=200, blank=False, null=False)
body = models.TextField()
class Meta:
abstract = True
def __str__(self):
return self.title
class New(BaseItems):
publish_date = models.DateField(auto_now_add=True)
image = models.ImageField(upload_to="img", default='/img/periodico.jpg')
class Event(BaseItems):
start_date = models.DateField(blank=False, null=False)
end_date = models.DateField(blank=False, null=False)
@receiver(post_delete, sender=New)
def delete_img(sender, **kwargs):
try:
image = kwargs.get('instance').image
if image.name != "/img/periodico.jpg":
os.remove(image.path)
except OSError:
pass
| [
"lkingj23@gmail.com"
] | lkingj23@gmail.com |
e61ef3bc74137cb2c14a765724f0967c90ad8dfc | 48e124e97cc776feb0ad6d17b9ef1dfa24e2e474 | /sdk/python/pulumi_azure_native/apimanagement/v20210801/get_policy.py | f723b56f81345494660930c00c7d052abe191863 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | bpkgoud/pulumi-azure-native | 0817502630062efbc35134410c4a784b61a4736d | a3215fe1b87fba69294f248017b1591767c2b96c | refs/heads/master | 2023-08-29T22:39:49.984212 | 2021-11-15T12:43:41 | 2021-11-15T12:43:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,750 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'GetPolicyResult',
'AwaitableGetPolicyResult',
'get_policy',
'get_policy_output',
]
@pulumi.output_type
class GetPolicyResult:
"""
Policy Contract details.
"""
def __init__(__self__, format=None, id=None, name=None, type=None, value=None):
if format and not isinstance(format, str):
raise TypeError("Expected argument 'format' to be a str")
pulumi.set(__self__, "format", format)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if value and not isinstance(value, str):
raise TypeError("Expected argument 'value' to be a str")
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def format(self) -> Optional[str]:
"""
Format of the policyContent.
"""
return pulumi.get(self, "format")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def value(self) -> str:
"""
Contents of the Policy as defined by the format.
"""
return pulumi.get(self, "value")
class AwaitableGetPolicyResult(GetPolicyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetPolicyResult(
format=self.format,
id=self.id,
name=self.name,
type=self.type,
value=self.value)
def get_policy(format: Optional[str] = None,
policy_id: Optional[str] = None,
resource_group_name: Optional[str] = None,
service_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetPolicyResult:
"""
Policy Contract details.
:param str format: Policy Export Format.
:param str policy_id: The identifier of the Policy.
:param str resource_group_name: The name of the resource group.
:param str service_name: The name of the API Management service.
"""
__args__ = dict()
__args__['format'] = format
__args__['policyId'] = policy_id
__args__['resourceGroupName'] = resource_group_name
__args__['serviceName'] = service_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:apimanagement/v20210801:getPolicy', __args__, opts=opts, typ=GetPolicyResult).value
return AwaitableGetPolicyResult(
format=__ret__.format,
id=__ret__.id,
name=__ret__.name,
type=__ret__.type,
value=__ret__.value)
@_utilities.lift_output_func(get_policy)
def get_policy_output(format: Optional[pulumi.Input[Optional[str]]] = None,
policy_id: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetPolicyResult]:
"""
Policy Contract details.
:param str format: Policy Export Format.
:param str policy_id: The identifier of the Policy.
:param str resource_group_name: The name of the resource group.
:param str service_name: The name of the API Management service.
"""
...
| [
"noreply@github.com"
] | bpkgoud.noreply@github.com |
021c16b3aa2b8424555798eccb39edf29e4d790f | fe8a0f9e08869115a9190bcfe47cde554bbec0ff | /client/tests/source_database_buck_builder_test.py | deccf6ec4dfadf32749c2cbbb72287a7916660a1 | [
"MIT"
] | permissive | yuhan1212/pyre-check | 9eeec76fd2a84ad94d6ffb65c3b7b7f5812ea170 | 37a3fcb4dec517c37f381743bac8f8a269218f8c | refs/heads/main | 2023-08-22T01:22:52.001195 | 2021-10-08T17:53:09 | 2021-10-08T17:56:34 | 415,130,523 | 1 | 0 | MIT | 2021-10-08T21:23:13 | 2021-10-08T21:23:13 | null | UTF-8 | Python | false | false | 9,132 | py | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import json
import shutil
import unittest
from pathlib import Path
from unittest.mock import MagicMock, call, patch
from .. import source_database_buck_builder
class SourceDatabaseBuckBuilderTest(unittest.TestCase):
def setUp(self) -> None:
self._query_arguments = [
"query",
"--json",
'kind("python_binary|python_library|python_test", %s) '
"- attrfilter(labels, generated, %s) "
"+ attrfilter(labels, unittest-library, %s) "
"- attrfilter(labels, no_pyre, %s)",
"//foo/bar/...",
"//bar:baz",
]
def test_get_buck_query_arguments(self) -> None:
arguments = source_database_buck_builder._get_buck_query_arguments(
specifications=["//foo/bar/...", "//bar:baz"], mode=None
)
self.assertEqual(arguments, self._query_arguments)
def test_get_buck_query_arguments__with_mode(self) -> None:
arguments = source_database_buck_builder._get_buck_query_arguments(
specifications=["//foo/bar/...", "//bar:baz"], mode="foo"
)
self.assertEqual(
arguments,
[
"query",
"--json",
"@mode/foo",
'kind("python_binary|python_library|python_test", %s) '
"- attrfilter(labels, generated, %s) "
"+ attrfilter(labels, unittest-library, %s) "
"- attrfilter(labels, no_pyre, %s)",
"//foo/bar/...",
"//bar:baz",
],
)
# pyre-fixme[56]: Pyre was not able to infer the type of argument
# `tools.pyre.client.source_database_buck_builder` to decorator factory
# `unittest.mock.patch.object`.
@patch.object(source_database_buck_builder, "_buck")
def test_query_targets(self, buck: MagicMock) -> None:
query_output = {
"//foo/bar/...": ["//foo/bar:baz", "//foo/bar:tests-library"],
"//bar:baz": [
"//bar:baz",
"//bar:tests-mypy_ini",
"//bar:tests-library-testmodules-lib",
],
}
buck.return_value = json.dumps(query_output)
self.assertEqual(
source_database_buck_builder._query_targets(
["//foo/bar/...", "//bar:baz"],
isolation_prefix=None,
mode=None,
buck_root=Path(""),
),
["//foo/bar:baz", "//foo/bar:tests-library", "//bar:baz"],
)
def test_buck_build_arguments(self) -> None:
self.assertEqual(
source_database_buck_builder._get_buck_build_arguments(
mode="opt", targets=["//foo/bar:baz", "//foo/bar:tests-library"]
),
[
"@mode/opt",
"--show-full-json-output",
"//foo/bar:baz#source-db",
"//foo/bar:tests-library#source-db",
],
)
# pyre-fixme[56]: Argument `json` to decorator factory
# `unittest.mock.patch.object` could not be resolved in a global scope.
@patch.object(json, "loads")
@patch.object(Path, "read_text")
def test_load_source_databases(
self, read_text: MagicMock, loads: MagicMock
) -> None:
expected_database = {
"sources": {"bar.py": "some/other/bar.py"},
"dependencies": {"foo.py": "some/foo.py"},
}
loads.return_value = expected_database
source_databases = source_database_buck_builder._load_source_databases(
{"//foo:bar#source-db": "/some/bar#source-db/db.json"}
)
self.assertEqual(source_databases, {"//foo:bar#source-db": expected_database})
def test_merge_source_databases(self) -> None:
actual = source_database_buck_builder._merge_source_databases(
{
"hello": {
"sources": {
"foo.py": "foo.py",
"duplicate.py": "duplicate_in_hello.py",
},
"dependencies": {
"bar.pyi": "buck-out/bar.pyi",
"bar.cpp": "bar.cpp",
},
},
"foo": {
"sources": {},
"dependencies": {
"foo2.pyi": "buck-out/foo2.pyi",
"bar2.cpp": "bar2.cpp",
"duplicate.py": "duplicate_in_foo.py",
"__manifest__.py": "__manifest__.py",
"__test_modules__.py": "__test_modules__.py",
"__test_main__.py": "__test_main__.py",
},
},
}
)
self.assertEqual(
actual,
{
"foo.py": "foo.py",
"duplicate.py": "duplicate_in_foo.py",
"bar.pyi": "buck-out/bar.pyi",
"foo2.pyi": "buck-out/foo2.pyi",
},
)
# pyre-fixme[56]: Argument `shutil` to decorator factory
# `unittest.mock.patch.object` could not be resolved in a global scope.
@patch.object(shutil, "rmtree")
@patch.object(Path, "exists")
@patch.object(Path, "mkdir")
@patch.object(Path, "symlink_to")
def test_build_link_tree(
self,
symlink_to: MagicMock,
make_directory: MagicMock,
exists: MagicMock,
remove_tree: MagicMock,
) -> None:
source_database_buck_builder._build_link_tree(
{"foo.py": "foo.py", "bar/baz.pyi": "buck-out/bar.pyi"},
Path("foo_directory"),
Path("/root"),
)
self.assertEqual(
make_directory.call_args_list,
[
call(parents=True),
call(parents=True, exist_ok=True),
call(parents=True, exist_ok=True),
],
)
self.assertEqual(
symlink_to.call_args_list,
[call(Path("/root/foo.py")), call(Path("/root/buck-out/bar.pyi"))],
)
@patch.object(source_database_buck_builder, "_build_link_tree")
@patch.object(source_database_buck_builder, "_load_source_databases")
@patch.object(source_database_buck_builder, "_build_targets")
# pyre-fixme[56]: Argument
# `tools.pyre.tools.buck_project_builder.source_database_buck_builder` to
# decorator factory `unittest.mock.patch.object` could not be resolved in a global
# scope.
@patch.object(source_database_buck_builder, "_query_targets")
def test_build(
self,
query_targets: MagicMock,
build_targets: MagicMock,
load_source_databases: MagicMock,
build_link_tree: MagicMock,
) -> None:
load_source_databases.return_value = {
"hello": {"sources": {"foo.py": "foo.py"}, "dependencies": {}},
"foo": {"sources": {}, "dependencies": {"bar.pyi": "buck-out/bar.pyi"}},
}
source_database_buck_builder.build(
["//foo/bar/..."],
output_directory=Path("output_directory"),
buck_root=Path("buck_root"),
isolation_prefix=None,
mode=None,
)
query_targets.assert_called_once()
build_targets.assert_called_once()
build_link_tree.assert_called_once_with(
{"foo.py": "foo.py", "bar.pyi": "buck-out/bar.pyi"},
Path("output_directory"),
Path("buck_root"),
)
def test_normalize_specification(self) -> None:
self.assertEqual(
source_database_buck_builder._normalize_specification("foo/bar:baz"),
"//foo/bar:baz",
)
self.assertEqual(
source_database_buck_builder._normalize_specification(
"some_root//foo/bar:baz"
),
"some_root//foo/bar:baz",
)
def test_load_json__no_extra_data(self) -> None:
self.assertEqual(
source_database_buck_builder._load_json_ignoring_extra_data(
"""
{
"a": "b",
"a2": "b2"
}
"""
),
{"a": "b", "a2": "b2"},
)
def test_load_json__extra_data(self) -> None:
self.assertEqual(
source_database_buck_builder._load_json_ignoring_extra_data(
"""
{
"a": "b",
"a2": "b2"
}
Some error message.
Some error message.
"""
),
{"a": "b", "a2": "b2"},
)
def test_load_json__exception(self) -> None:
with self.assertRaises(json.JSONDecodeError):
source_database_buck_builder._load_json_ignoring_extra_data(
"""
Malformed JSON.
"""
)
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
152ea5d19b0c59f0c85e729ac604a8d44fb1fa72 | 105ef2d5f8bba13c15deb8c4a2a9af307b4e547a | /Programmers/level2_python/42860.py | ba611eaf3ea88a982dd35ae7396d3d3a6cd9df6f | [] | no_license | caniro/algo-note | 1ec4c0e08adcb542d3356daf7b6e943af722394f | d237a5b58a67ca453dc7a1a335f99428af2c5df5 | refs/heads/master | 2023-08-29T22:39:35.189711 | 2021-11-04T11:18:07 | 2021-11-04T11:18:07 | 260,473,565 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,096 | py | # 조이스틱 : https://programmers.co.kr/learn/courses/30/lessons/42860
def solution(name):
def go_next_idx():
nonlocal answer, current_idx
diff_min = 100
next_idx = 0
for i in range(length):
if name[i] == current_string[i]:
continue
diff = min(abs(i - current_idx), length - abs(i - current_idx))
if diff_min > diff:
diff_min = diff
next_idx = i
current_idx = next_idx
answer += diff_min
def change_char():
nonlocal answer, current_string
to_change = name[current_idx]
current_string[current_idx] = to_change
cost = min(abs(ord(to_change) - ord('A')),\
(ord('Z') - ord('A') + 1) - abs(ord(to_change) - ord('A')))
answer += cost
answer = 0
length = len(name)
current_string = ['A' for _ in range(length)]
current_idx = 0
while True:
if ''.join(current_string) == name:
break
go_next_idx()
change_char()
return answer
| [
"caniro@naver.com"
] | caniro@naver.com |
9ef188c317fcaecca1e9dba557f4b68ad439a924 | ccc04ec2fe54772797682642aa17665fff848575 | /supplier_rfq/migrations.py | 0d6c919e6d1fdc1257d774773dbf55fb6eacf74a | [
"MIT"
] | permissive | admin627863/supplier_rfq | dcf33b8db0af1a8ef7dd470c338631a8a4d4c230 | 81434ea527c724e95ed2724d212fd9f548afb81a | refs/heads/main | 2023-08-10T11:37:11.793389 | 2021-09-24T05:32:25 | 2021-09-24T05:32:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,755 | py | import frappe
from frappe.modules.import_file import import_file_by_path
from frappe.utils import get_bench_path
import os
from os.path import join
def after_migrate(**args):
supplier_rfq_create_custom_fields(**args)
make_property_setter()
def make_property_setter():
supplier_rfq_app_folder_path='/apps/supplier_rfq/supplier_rfq/import_records'
if(not frappe.db.exists('Property Setter','Request for Quotation-main-title_field')):
fname="property_setter.json"
import_folder_path="{bench_path}/{app_folder_path}".format(bench_path=get_bench_path(),app_folder_path=supplier_rfq_app_folder_path)
make_records(import_folder_path,fname)
def make_records(path, fname):
if os.path.isdir(path):
import_file_by_path("{path}/{fname}".format(path=path, fname=fname))
def supplier_rfq_create_custom_fields(**args):
from frappe.custom.doctype.custom_field.custom_field import create_custom_fields
custom_fields = {
'Request for Quotation':[
dict(
allow_in_quick_entry= 0,
allow_on_submit= 0,
bold= 0,
collapsible= 0,
columns= 0,
docstatus= 0,
doctype= 'Custom Field',
dt= 'Request for Quotation',
fetch_if_empty= 0,
fieldname= 'project_cf',
fieldtype= 'Link',
hidden= 0,
ignore_user_permissions= 0,
ignore_xss_filter= 0,
in_global_search= 0,
in_list_view= 0,
in_standard_filter= 0,
insert_after= 'vendor',
label= 'Project',
length= 0,
name= 'Request for Quotation-project_cf',
no_copy= 0,
options= 'Project',
permlevel= 0,
print_hide= 0,
print_hide_if_no_value= 0,
read_only= 0,
report_hide= 0,
reqd= 0,
search_index= 0,
translatable= 0,
unique= 0
),
dict(
allow_in_quick_entry= 0,
allow_on_submit= 0,
bold= 0,
collapsible= 0,
columns= 0,
docstatus= 0,
doctype= 'Custom Field',
dt= 'Request for Quotation',
fetch_if_empty= 0,
fieldname= 'supplier_comparison_section',
fieldtype= 'Section Break',
hidden= 0,
ignore_user_permissions= 0,
ignore_xss_filter= 0,
in_global_search= 0,
in_list_view= 0,
in_standard_filter= 0,
insert_after= 'items',
label= 'Supplier Comparison',
length= 0,
name= 'Request for Quotation-supplier_comparison_section',
no_copy= 0,
permlevel= 0,
print_hide= 0,
print_hide_if_no_value= 0,
read_only= 0,
report_hide= 0,
reqd= 0,
search_index= 0,
translatable= 0,
unique= 0
),
dict(
allow_in_quick_entry= 0,
allow_on_submit= 1,
bold= 0,
collapsible= 0,
columns= 0,
docstatus= 0,
doctype= 'Custom Field',
dt= 'Request for Quotation',
fetch_if_empty= 0,
fieldname= 'supplier_quotation_comparisons',
fieldtype= 'Table',
hidden= 0,
ignore_user_permissions= 0,
ignore_xss_filter= 0,
in_global_search= 0,
in_list_view= 0,
in_standard_filter= 0,
insert_after= 'supplier_comparison_section',
label= '',
length= 0,
name= 'Request for Quotation-supplier_quotation_comparisons',
no_copy= 0,
options= 'Supplier Quotation Comparison CT',
permlevel= 0,
print_hide= 0,
print_hide_if_no_value= 0,
read_only= 0,
report_hide= 0,
reqd= 0,
search_index= 0,
translatable= 0,
unique= 0
)
],
'Supplier Quotation Item':[
dict(
allow_in_quick_entry= 0,
allow_on_submit= 0,
bold= 0,
collapsible= 0,
columns= 0,
docstatus= 0,
doctype= 'Custom Field',
dt= 'Supplier Quotation Item',
fetch_if_empty= 0,
fieldname= 'schedule_date',
fieldtype= 'Date',
hidden= 0,
ignore_user_permissions= 0,
ignore_xss_filter= 0,
in_global_search= 0,
in_list_view= 1,
in_standard_filter= 0,
insert_after= 'item_name',
label= 'Required Date',
length= 0,
name= 'Supplier Quotation Item-schedule_date',
no_copy= 0,
permlevel= 0,
print_hide= 0,
print_hide_if_no_value= 0,
read_only= 0,
report_hide= 0,
reqd= 0,
search_index= 0,
translatable= 0,
unique= 0
)
],
'Supplier Quotation':[
dict(
allow_in_quick_entry= 0,
allow_on_submit= 0,
bold= 0,
collapsible= 0,
columns= 0,
docstatus= 0,
doctype= 'Custom Field',
dt= 'Supplier Quotation',
fetch_if_empty= 0,
fieldname= 'supplier_uploaded_attachment_cf',
fieldtype= 'Attach',
hidden= 0,
ignore_user_permissions= 0,
ignore_xss_filter= 0,
in_global_search= 0,
in_list_view= 0,
in_standard_filter= 0,
insert_after= 'supplier_name',
label= 'Supplier Uploaded Attachment',
length= 0,
name= 'Supplier Quotation-supplier_uploaded_attachment_cf',
no_copy= 0,
permlevel= 0,
print_hide= 0,
print_hide_if_no_value= 0,
read_only= 0,
report_hide= 0,
reqd= 0,
search_index= 0,
translatable= 0,
unique= 0,
),
dict(
allow_in_quick_entry= 0,
allow_on_submit= 0,
bold= 0,
collapsible= 0,
columns= 0,
docstatus= 0,
doctype= 'Custom Field',
dt= 'Supplier Quotation',
fetch_if_empty= 0,
fieldname= 'supplier_comparison',
fieldtype= 'Section Break',
hidden= 0,
ignore_user_permissions= 0,
ignore_xss_filter= 0,
in_global_search= 0,
in_list_view= 0,
in_standard_filter= 0,
insert_after= 'amended_from',
label= 'Supplier Comparison',
length= 0,
name= 'Supplier Quotation-supplier_comparison',
no_copy= 0,
permlevel= 0,
print_hide= 0,
print_hide_if_no_value= 0,
read_only= 0,
report_hide= 0,
reqd= 0,
search_index= 0,
translatable= 0,
unique= 0
),
dict(
allow_in_quick_entry= 0,
allow_on_submit= 1,
bold= 0,
collapsible= 0,
columns= 0,
docstatus= 0,
doctype= 'Custom Field',
dt= 'Supplier Quotation',
fetch_if_empty= 0,
fieldname= 'supplier_quotation_comparisons',
fieldtype= 'Table',
hidden= 0,
ignore_user_permissions= 0,
ignore_xss_filter= 0,
in_global_search= 0,
in_list_view= 0,
in_standard_filter= 0,
insert_after= 'supplier_comparison',
label= '',
length= 0,
name= 'Supplier Quotation-supplier_quotation_comparisons',
no_copy= 0,
options= 'Supplier Quotation Comparison CT',
permlevel= 0,
print_hide= 0,
print_hide_if_no_value= 0,
read_only= 1,
report_hide= 0,
reqd= 0,
search_index= 0,
translatable= 0,
unique= 0
),
dict(
allow_in_quick_entry= 0,
allow_on_submit= 0,
bold= 0,
collapsible= 0,
columns= 0,
docstatus= 0,
doctype= 'Custom Field',
dt= 'Supplier Quotation',
fetch_if_empty= 0,
fieldname= 'supplier_notes',
fieldtype= 'Text',
hidden= 0,
ignore_user_permissions= 0,
ignore_xss_filter= 0,
in_global_search= 0,
in_list_view= 0,
in_standard_filter= 0,
insert_after= 'terms',
label= 'Supplier Notes',
length= 0,
name= 'Supplier Quotation-supplier_notes',
no_copy= 0,
permlevel= 0,
print_hide= 0,
print_hide_if_no_value= 0,
read_only= 0,
report_hide= 0,
reqd= 0,
search_index= 0,
translatable= 0,
unique= 0
)
]
}
create_custom_fields(custom_fields)
frappe.db.commit() # to avoid implicit-commit errors
| [
"mr.ashish.shah@gmail.com"
] | mr.ashish.shah@gmail.com |
f1c8081f521eee2c2e999506d85961e520a8ba5b | d50685a3f3d612349b1f6627ed8b807f0eec3095 | /demo/framework/ControlNode/aa.py | 94eb965303f8c805e95a0d88a91a316126e60660 | [] | no_license | Erich6917/python_littlespider | b312c5d018bce17d1c45769e59243c9490e46c63 | 062206f0858e797945ce50fb019a1dad200cccb4 | refs/heads/master | 2023-02-12T23:22:27.352262 | 2021-01-05T06:21:20 | 2021-01-05T06:21:20 | 113,631,826 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 192 | py | # -*- coding: utf-8 -*-
# @Time : 2018/2/2
# @Author : LIYUAN134
# @File : aa.py
# @Commment:
#
urls = []
urls = None
if urls:
print 'loops'
else:
print 'empty'
| [
"1065120559@qq.com"
] | 1065120559@qq.com |
f90ff3052a6142fae24866037d0d30b94299f3da | 41147a2cdc221c2a8db84852ef7f29175b3409b3 | /caso/messenger/ssm.py | 36b1e4af9b37a37e62b77128d5d83278a5275f21 | [
"Apache-2.0"
] | permissive | aidaph/caso | ee1437d189c20044fe45fedeb4326a6f5fccd775 | e40e883c2eb729f040ef63085c94b48b0c7661e3 | refs/heads/master | 2023-08-01T13:52:34.556841 | 2023-06-07T08:11:52 | 2023-06-07T09:52:08 | 140,431,401 | 0 | 0 | Apache-2.0 | 2018-07-10T12:50:11 | 2018-07-10T12:50:09 | null | UTF-8 | Python | false | false | 6,487 | py | # -*- coding: utf-8 -*-
# Copyright 2014 Spanish National Research Council (CSIC)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Module containing the APEL SSM Messenger."""
import json
import typing
import warnings
# We are not parsing XML so this is safe
import xml.etree.ElementTree as ETree # nosec
import dirq.QueueSimple
from oslo_config import cfg
from oslo_log import log
import caso.exception
import caso.messenger
import caso.record
from caso import utils
LOG = log.getLogger(__name__)
opts = [
cfg.StrOpt(
"output_path",
default="/var/spool/apel/outgoing/openstack",
help="Directory to put the generated SSM records.",
),
cfg.IntOpt(
"max_size", default=100, help="Maximum number of records to send per message"
),
]
CONF = cfg.CONF
CONF.register_opts(opts, group="ssm")
__all__ = ["SSMMessenger", "SSMMessengerV04"]
class SSMMessenger(caso.messenger.BaseMessenger):
"""SSM Messenger that pushes formatted messages to a dirq instance."""
version_cloud = "0.4"
version_ip = "0.2"
version_accelerator = "0.1"
version_storage = None # FIXME: this cannot have a none version
def __init__(self):
"""Initialize the SSM messenger with configured values."""
try:
utils.makedirs(CONF.ssm.output_path)
except Exception as err:
LOG.error(f"Failed to create path {CONF.ssm.output_path} because {err}")
raise err
self.queue = dirq.QueueSimple.QueueSimple(CONF.ssm.output_path)
def _push_message_cloud(self, entries: typing.List[str]):
"""Push a compute message, formatted following the CloudRecord."""
message = f"APEL-cloud-message: v{self.version_cloud}\n"
aux = "\n%%\n".join(entries)
message += f"{aux}\n"
self.queue.add(message.encode("utf-8"))
def _push_message_json(
self,
entries: typing.List[str],
msg_type: str,
version: str,
):
"""Push a JSON message with a UsageRecords list."""
message = {
"Type": msg_type,
"Version": version,
"UsageRecords": [json.loads(r) for r in entries],
}
self.queue.add(json.dumps(message))
def _push_message_ip(self, entries: typing.List[str]):
"""Push an IP message."""
self._push_message_json(entries, "APEL Public IP message", self.version_ip)
def _push_message_accelerator(self, entries: typing.List[str]):
"""Push an accelerator message."""
self._push_message_json(
entries, "APEL-accelerator-message", self.version_accelerator
)
def _push_message_storage(self, entries):
"""Push a storage message."""
ETree.register_namespace(
"sr", "http://eu-emi.eu/namespaces/2011/02/storagerecord"
)
root = ETree.Element("sr:StorageUsageRecords")
for record in entries:
# We are not parsing XML so this is safe
sr = ETree.fromstring(record) # nosec
root.append(sr)
self.queue.add(ETree.tostring(root))
def _push(self, entries_cloud, entries_ip, entries_accelerator, entries_storage):
"""Push all messages, dividing them into smaller chunks.
This method gets lists of messages to be pushed in smaller chucks as per GGUS
ticket 143436: https://ggus.eu/index.php?mode=ticket_info&ticket_id=143436
"""
for i in range(0, len(entries_cloud), CONF.ssm.max_size):
entries = entries_cloud[i : i + CONF.ssm.max_size] # noqa(E203)
self._push_message_cloud(entries)
for i in range(0, len(entries_ip), CONF.ssm.max_size):
entries = entries_ip[i : i + CONF.ssm.max_size] # noqa(E203)
self._push_message_ip(entries)
for i in range(0, len(entries_accelerator), CONF.ssm.max_size):
entries = entries_accelerator[i : i + CONF.ssm.max_size] # noqa(E203)
self._push_message_accelerator(entries)
for i in range(0, len(entries_storage), CONF.ssm.max_size):
entries = entries_storage[i : i + CONF.ssm.max_size] # noqa(E203)
self._push_message_storage(entries)
def push(self, records):
"""Push all records to SSM.
This includes pushing the following records:
- Cloud records
- IP records
- Accelerator records
- Storage records
This method will iterate over all the records, transforming them into the
correct messages, then pushing it.
"""
if not records:
return
entries_cloud = []
entries_ip = []
entries_accelerator = []
entries_storage = []
for record in records:
if isinstance(record, caso.record.CloudRecord):
entries_cloud.append(record.ssm_message())
elif isinstance(record, caso.record.IPRecord):
entries_ip.append(record.ssm_message())
elif isinstance(record, caso.record.AcceleratorRecord):
entries_accelerator.append(record.ssm_message())
elif isinstance(record, caso.record.StorageRecord):
entries_storage.append(record.ssm_message())
else:
raise caso.exception.CasoError("Unexpected record format!")
self._push(entries_cloud, entries_ip, entries_accelerator, entries_storage)
class SSMMessengerV04(SSMMessenger):
"""Deprecated versioned SSM Messenger."""
def __init__(self):
"""Initialize the SSM V04 messenger.
Deprecated not to be used, please stop using SSM versioned messengers.
"""
msg = (
"Using an versioned SSM messenger is deprecated, please use "
"'ssm' as messenger instead in order to use the latest "
"version."
)
warnings.warn(msg, DeprecationWarning)
super(SSMMessengerV04, self).__init__()
| [
"aloga@ifca.unican.es"
] | aloga@ifca.unican.es |
7ff358f10e084e30bb2c22cfd323b7f85f03fce4 | 390d19c3159133d8c688396cb11b4ed3f8178d09 | /Programmers/Score_Kit/01_3_위장.py | 0591e5aaa95ab645a8f56734b027b4f3db4deddb | [] | no_license | JJayeee/CodingPractice | adba64cbd1d030b13a877f0b2e5ccc1269cb2e11 | 60f8dce48c04850b9b265a9a31f49eb6d9fc13c8 | refs/heads/master | 2021-08-16T17:14:01.161390 | 2021-07-16T00:42:18 | 2021-07-16T00:42:18 | 226,757,079 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 344 | py |
def solution(clothes):
answer = 1
closet = {}
for cloth in clothes:
closet[cloth[1]] = closet.get(cloth[1], 0) + 1
for k, v in closet.items():
answer *= v + 1
return answer - 1
clothes = [['yellow_hat', 'headgear'], ['blue_sunglasses', 'eyewear'], ['green_turban', 'headgear']]
print(solution(clothes))
| [
"jay.hyundong@gmail.com"
] | jay.hyundong@gmail.com |
b6cf9861fcbe501ba63482c704836a9e1863cc30 | 4d6975caece0acdc793a41e8bc6d700d8c2fec9a | /leetcode/1513.find-all-good-strings/1513.find-all-good-strings.py | 06db229ca57ad0b9dbecfb62b96139b4cb180787 | [] | no_license | guiconti/workout | 36a3923f2381d6e7023e127100409b3a2e7e4ccb | 5162d14cd64b720351eb30161283e8727cfcf376 | refs/heads/master | 2021-08-03T10:32:02.108714 | 2021-07-26T04:38:14 | 2021-07-26T04:38:14 | 221,025,113 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 99 | py | class Solution:
def findGoodStrings(self, n: int, s1: str, s2: str, evil: str) -> int:
| [
"guibasconti@gmail.com"
] | guibasconti@gmail.com |
d23def31f939fd7f0eb5ff5d4fde51ddad4a27cb | 6fb37fee016346120d4c14c4343516532304055a | /src/genie/libs/parser/iosxe/tests/test_show_crypto.py | f1e2ff61c5c3ad22aae41719fa89eb3f6e49188a | [
"Apache-2.0"
] | permissive | devbollinger/genieparser | 011526ebbd747c6dcd767535ce4bd33167e15536 | ad5ce7ba8f5153d1aeb9cffcfc4dde0871f3401c | refs/heads/master | 2020-12-20T11:36:00.750128 | 2020-01-24T18:45:40 | 2020-01-24T18:45:40 | 236,061,155 | 0 | 0 | Apache-2.0 | 2020-01-24T18:38:43 | 2020-01-24T18:38:42 | null | UTF-8 | Python | false | false | 6,649 | py |
# Python
import unittest
from unittest.mock import Mock
# ATS
from ats.topology import Device
# Genie
from genie.metaparser.util.exceptions import SchemaEmptyParserError
from genie.libs.parser.iosxe.show_crypto import ShowCryptoPkiCertificates
# ====================================================
# Unit test for 'show crypto pki certificates <WORD>'
# ====================================================
class test_show_crypto_pki_certificate(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output_c3850 = {
"trustpoints": {
"CISCO_IDEVID_SUDI": {
"associated_trustpoints":{
"certificate": {
"status": "Available",
"serial_number_in_hex": "793B572700000003750B",
"subject": {
"name": "WS-C3850-24P-0057D21BC800",
"pid": "WS-C3850-24P",
"cn": "WS-C3850-24P-0057D21BC800",
"serial_number": "FCW1947C0GF"
},
"issuer": {
"cn": "Cisco Manufacturing CA SHA2",
"o": "Cisco"
},
"crl_distribution_points": "http://www.cisco.com/security/pki/crl/cmca2.crl",
"usage": "General Purpose",
"validity_date": {
"start_date": "00:34:52 UTC Nov 20 2015",
"end_date": "00:44:52 UTC Nov 20 2025"
}
},
"ca_certificate": {
"status": "Available",
"serial_number_in_hex": "02",
"subject": {
"cn": "Cisco Manufacturing CA SHA2",
"o": "Cisco"
},
"issuer": {
"cn": "Cisco Root CA M2",
"o": "Cisco"
},
"crl_distribution_points": "http://www.cisco.com/security/pki/crl/crcam2.crl",
"usage": "Signature",
"validity_date": {
"start_date": "13:50:58 UTC Nov 12 2012",
"end_date": "13:00:17 UTC Nov 12 2037"
}
}
}
}
}
}
golden_output_c3850 = {'execute.return_value': '''
Certificate
Status: Available
Certificate Serial Number (hex): 793B572700000003750B
Certificate Usage: General Purpose
Issuer:
cn=Cisco Manufacturing CA SHA2
o=Cisco
Subject:
Name: WS-C3850-24P-0057D21BC800
Serial Number: PID:WS-C3850-24P SN:FCW1947C0GF
cn=WS-C3850-24P-0057D21BC800
serialNumber=PID:WS-C3850-24P SN:FCW1947C0GF
CRL Distribution Points:
http://www.cisco.com/security/pki/crl/cmca2.crl
Validity Date:
start date: 00:34:52 UTC Nov 20 2015
end date: 00:44:52 UTC Nov 20 2025
Associated Trustpoints: CISCO_IDEVID_SUDI
CA Certificate
Status: Available
Certificate Serial Number (hex): 02
Certificate Usage: Signature
Issuer:
cn=Cisco Root CA M2
o=Cisco
Subject:
cn=Cisco Manufacturing CA SHA2
o=Cisco
CRL Distribution Points:
http://www.cisco.com/security/pki/crl/crcam2.crl
Validity Date:
start date: 13:50:58 UTC Nov 12 2012
end date: 13:00:17 UTC Nov 12 2037
Associated Trustpoints: CISCO_IDEVID_SUDI Trustpool
'''}
golden_parsed_output_csr1000 = {
'trustpoints':
{'TP-self-signed-4146203551':
{'associated_trustpoints':
{'router_self_signed_certificate':
{'issuer':
{'cn': 'IOS-Self-Signed-Certificate-4146203551'},
'serial_number_in_hex': '01',
'status': 'Available',
'storage': 'nvram:IOS-Self-Sig#1.cer',
'subject':
{'cn': 'IOS-Self-Signed-Certificate-4146203551',
'name': 'IOS-Self-Signed-Certificate-4146203551'},
'usage': 'General Purpose',
'validity_date':
{'end_date': '00:00:00 UTC Jan 1 2020',
'start_date': '21:37:27 UTC Apr 23 2018'}}}}}}
golden_output_csr1000 = {'execute.return_value': '''
Router Self-Signed Certificate
Status: Available
Certificate Serial Number (hex): 01
Certificate Usage: General Purpose
Issuer:
cn=IOS-Self-Signed-Certificate-4146203551
Subject:
Name: IOS-Self-Signed-Certificate-4146203551
cn=IOS-Self-Signed-Certificate-4146203551
Validity Date:
start date: 21:37:27 UTC Apr 23 2018
end date: 00:00:00 UTC Jan 1 2020
Associated Trustpoints: TP-self-signed-4146203551
Storage: nvram:IOS-Self-Sig#1.cer
'''}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowCryptoPkiCertificates(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_c3850(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_c3850)
obj = ShowCryptoPkiCertificates(device=self.device)
parsed_output = obj.parse(trustpoint_name='CISCO_IDEVID_SUDI')
self.assertEqual(parsed_output, self.golden_parsed_output_c3850)
def test_csr1000(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_csr1000)
obj = ShowCryptoPkiCertificates(device=self.device)
parsed_output = obj.parse(trustpoint_name='TP-self-signed-4146203551')
self.assertEqual(parsed_output, self.golden_parsed_output_csr1000)
if __name__ == '__main__':
unittest.main()
| [
"jeaubin@cisco.com"
] | jeaubin@cisco.com |
a634a215e16936e0746a4d323b85a847e4296c61 | f7d343efc7b48818cac4cf9b98423b77345a0067 | /deployment_validation/csv_to_yaml_convertor/csv_to_yaml_convertor.py | ccc797444b3541460f1b9eb12eabc0c558df00f4 | [] | no_license | vijaymaddukuri/python_repo | 70e0e24d0554c9fac50c5bdd85da3e15c6f64e65 | 93dd6d14ae4b0856aa7c6f059904cc1f13800e5f | refs/heads/master | 2023-06-06T02:55:10.393125 | 2021-06-25T16:41:52 | 2021-06-25T16:41:52 | 151,547,280 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,684 | py | from os.path import dirname, abspath, join
from robot.api import logger
from shutil import copyfile
import csv
import re
import sys
import yaml
current_dir = dirname(dirname(abspath(__file__)))
class CsvToYamlConvertor:
"""
Take the CSV input and covert it into yaml format.
"""
def __init__(self, service, base_yaml_file_path, csv_file_path, dir_to_store_yaml, filename=None):
"""
:param service: TAS or Middlewware or worker or deployment
:param yaml_file_path: Base YAML file name along with the location
:param csv_file_path: CSV file name with the location
"""
self.yaml_file_path = base_yaml_file_path
self.dir_to_store_yaml = dir_to_store_yaml
self.file_name = filename
# Open our data file in read-mode.
self.csvfile = open(csv_file_path, 'r')
# Save a CSV Reader object.
self.datareader = csv.reader(self.csvfile, delimiter=',', quotechar='"')
# Service name
self.service = service
# Empty array for data headings, which we will fill with the first row from our CSV.
self.data_headings = []
def load_yaml_file(self, filename):
"""
load YAML file
In case of any error, this function calls sys.exit(1)
:param filename: YAML filename along with the location
:return: YAML as dict
"""
try:
with open(filename, 'r') as stream:
try:
return yaml.safe_load(stream)
except yaml.YAMLError as exc:
logger.error(exc)
sys.exit(1)
except IOError as e:
logger.error(e)
sys.exit(1)
def update_yaml_data(self, myYaml, key, value, append_mode=False):
"""
Set or add a key to given YAML data. Call itself recursively.
:param myYaml: YAML data to be modified
:param key: key as array of key tokens
:param value: value of any data type
:param append_mode default is False
:return: modified YAML data
"""
if len(key) == 1:
if not append_mode or not key[0] in myYaml:
myYaml[key[0]] = value
else:
if type(myYaml[key[0]]) is not list:
myYaml[key[0]] = [myYaml[key[0]]]
myYaml[key[0]].append(value)
else:
if not key[0] in myYaml or type(myYaml[key[0]]) is not dict:
myYaml[key[0]] = {}
myYaml[key[0]] = self.update_yaml_data(myYaml[key[0]], key[1:], value, append_mode)
return myYaml
def rm_yaml_data(self, myYaml, key):
"""
Remove a key and it's value from given YAML data structure.
No error or such thrown if the key doesn't exist.
:param myYaml: YAML data to be modified
:param key: key as array of key tokens
:return: modified YAML data
"""
if len(key) == 1 and key[0] in myYaml:
del myYaml[key[0]]
elif key[0] in myYaml:
myYaml[key[0]] = self.rm_yaml_data(myYaml[key[0]], key[1:])
return myYaml
def save_yaml(self, data, yaml_file):
"""
Saves given YAML data to file and upload yaml file to remote machine
:param data: YAML data
:param yaml_file: Location to save the yaml file
"""
try:
with open(yaml_file, 'w') as outfile:
yaml.dump(data, outfile, default_flow_style=False)
except IOError as e:
logger.error(e)
sys.exit(1)
def convert_csv_to_yaml(self):
"""
Update the yaml file and save it
"""
# Loop through each row...
for row_index, row in enumerate(self.datareader):
# If this is the first row, populate our data_headings variable.
if row_index == 0:
data_headings = row
# Othrwise, create a YAML file from the data in this row...
else:
# Create a new config.yaml with filename based on index number (Tenant ID) of our current row
# and service
if self.file_name is None:
file_name = str(row[0]) + '_' + self.service.lower() + '_config' + '.yaml'
else:
file_name = self.file_name + '_' + 'config' + '.yaml'
updated_yaml_file_path = join(self.dir_to_store_yaml, file_name)
copyfile(self.yaml_file_path, updated_yaml_file_path)
readyamldata = self.load_yaml_file(updated_yaml_file_path)
# Empty string that we will fill with YAML formatted text based on data extracted from our CSV.
yaml_text = ""
# Loop through each cell in this row...
for cell_index, cell in enumerate(row):
# Compile a line of YAML text from our headings list and the text of the current cell,
# followed by a linebreak.
# Heading text is converted to lowercase. Spaces are converted to underscores and hyphens
# are removed.
# In the cell text, line endings are replaced with commas.
cell_heading = data_headings[cell_index].replace(" ", "_").replace("-", "")
# Create the list of keys
cell_items = cell_heading.split('.')
if len(cell_items) == 1:
cell_keys = [cell_heading]
else:
cell_keys = cell_items
# Get the cell value
cell_value = cell.replace("\n", ", ")
# Update the data in yaml format
set_value = self.update_yaml_data(readyamldata, cell_keys, cell_value)
# Save the yaml data into a file
self.save_yaml(set_value, updated_yaml_file_path)
# Open the above yaml file to update the list formatted data
f = open(updated_yaml_file_path, 'r')
f = f.read()
# Convert the data into list format using regex
final = (re.sub(r'(\'[0-9]\'\:\s+)', '- ', str(f)))
# Save the file
with open(updated_yaml_file_path, 'w') as f:
f.write(final)
# Close the CSV
self.csvfile.close()
# Sample Execution
# yamlObj = CsvToYamlConvertor('tas', r'C:\Users\config.yaml', r'C:\Users\Downloads\inputfile.csv')
# yamlObj.convert_csv_to_yaml()
| [
"Vijay.Maddukuri@virtustream.com"
] | Vijay.Maddukuri@virtustream.com |
1d877ae0dcc1e80b1288dcf6fc5c06f53a8b53c2 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03326/s143256207.py | 2b339363954b6a8306957c1299b9177c0449d16f | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | n, m = [int(i) for i in input().split()]
xyz = [[int(i) for i in input().split()] for _ in range(n)]
ans = 0
a = [0]*3
for a[0] in range(-1, 2, 2):
for a[1] in range(-1, 2, 2):
for a[2] in range(-1, 2, 2):
d = list(map(lambda x: sum([i * j for i, j in zip(a, x)]), xyz))
d.sort(reverse=1)
ans = max(ans, sum(d[:m]))
print(ans)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
41e6e4d4a28bedaf4a97e2cec0c2c861189a34ea | 57d8323ca9bdd0965d487fe2e453a3cfb8dfa86f | /src/train_parabola.py | 1bbe280db5eed7ac6840269310212c152769c849 | [
"Unlicense"
] | permissive | mountain/suan-demo | 20ac79ddaf8b749c21badda37d07a3aeccdf7ba7 | 5136ae050156a2538aea4f718735995d3a289457 | refs/heads/master | 2023-04-07T10:19:49.148272 | 2021-04-09T05:24:20 | 2021-04-09T05:24:20 | 288,619,647 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,677 | py | import os
import arrow
import logging
import argparse
import numpy as np
import torch
import torch as th
import torch.nn as nn
from pathlib import Path
from leibniz.unet.base import UNet
from leibniz.unet.complex_hyperbolic import CmplxHyperBottleneck
from leibniz.unet.hyperbolic import HyperBottleneck
from leibniz.unet.senet import SEBottleneck
from leibniz.nn.activation import CappingRelu
from blks.direct import DirectBlocks
from blks.am import AMBlocks
from dataset.chaos_parabola import ChaosParabolaDataSet
parser = argparse.ArgumentParser()
parser.add_argument("-g", "--gpu", type=str, default='0', help="index of gpu")
parser.add_argument("-c", "--n_cpu", type=int, default=64, help="number of cpu threads to use during batch generation")
parser.add_argument("-b", "--batch_size", type=int, default=64, help="size of the batches")
parser.add_argument("-e", "--epoch", type=int, default=0, help="current epoch to start training from")
parser.add_argument("-n", "--n_epochs", type=int, default=500, help="number of epochs of training")
parser.add_argument("-m", "--model", type=str, default='', help="metrological model to load")
parser.add_argument("-k", "--check", type=str, default='', help="checkpoint file to load")
opt = parser.parse_args()
os.environ['CUDA_VISIBLE_DEVICES'] = opt.gpu
print('cudnn:', th.backends.cudnn.version())
np.core.arrayprint._line_width = 150
np.set_printoptions(linewidth=np.inf)
name = opt.model
time_str = arrow.now().format('YYYYMMDD_HHmmss')
model_path = Path(f'./_log-{time_str}')
model_path.mkdir(exist_ok=True)
log_file = model_path / Path('train.log')
logging.basicConfig(level=logging.INFO, filename=log_file, filemode='w')
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logger.info(str(opt))
root = './data'
if not os.path.exists(root):
os.mkdir(root)
train_set = ChaosParabolaDataSet(length=800)
test_set = ChaosParabolaDataSet(length=200)
batch_size = opt.batch_size
train_loader = torch.utils.data.DataLoader(
dataset=train_set,
batch_size=batch_size,
shuffle=True)
test_loader = torch.utils.data.DataLoader(
dataset=test_set,
batch_size=batch_size,
shuffle=False)
mean_loader = torch.utils.data.DataLoader(
dataset=train_set,
batch_size=batch_size,
shuffle=True)
total_sum = 0.0
total_cnt = 0.0
for step, sample in enumerate(mean_loader):
input, target = sample
input, target = input.float(), target.float()
data = th.cat((input, target), dim=1)
total_sum += data.sum().item()
total_cnt += np.prod(data.size())
mean = total_sum / total_cnt
print(mean)
std_loader = torch.utils.data.DataLoader(
dataset=train_set,
batch_size=batch_size,
shuffle=True)
total_std = 0.0
total_cnt = 0.0
for step, sample in enumerate(mean_loader):
input, target = sample
input, target = input.float(), target.float()
data = th.cat((input, target), dim=1)
total_std += ((data - mean) * (data - mean)).sum().item()
total_cnt += np.prod(data.size())
std = total_std / total_cnt
print(std)
class LearningModel(nn.Module):
def __init__(self):
super().__init__()
self.unet = UNet(2, 10, normalizor='batch', spatial=(32, 32), layers=5, ratio=0,
vblks=[2, 2, 2, 2, 2], hblks=[2, 2, 2, 2, 2],
scales=[-1, -1, -1, -1, -1], factors=[1, 1, 1, 1, 1],
block=AMBlocks, relu=CappingRelu(), final_normalized=True)
def forward(self, input):
input = (input - mean) / std
output = self.unet(input)
output = output * std + mean
return output
class PerfectModel(nn.Module):
def __init__(self):
super().__init__()
self.dummy = th.zeros(1)
def forward(self, input):
result = []
z = input[:, 1:2]
for ix in range(10):
z = 1 - 2 * z * z
result.append(z)
return th.cat(result, dim=1)
mdl = LearningModel()
pfc = PerfectModel()
mse = nn.MSELoss()
optimizer = th.optim.Adam(mdl.parameters())
def train(epoch):
train_size = 0
loss_per_epoch = 0.0
mdl.train()
for step, sample in enumerate(train_loader):
input, target = sample
input, target = input.float(), target.float()
if th.cuda.is_available():
input = input.cuda()
target = target.cuda()
mdl.cuda()
result = mdl(input)
loss = mse(result, target)
optimizer.zero_grad()
loss.backward()
optimizer.step()
batch = result.size()[0]
logger.info(f'Epoch: {epoch + 1:03d} | Step: {step + 1:03d} | Loss: {loss.item()}')
loss_per_epoch += loss.item() * batch
train_size += batch
logger.info(f'Epoch: {epoch + 1:03d} | Train Loss: {loss_per_epoch / train_size}')
def test(epoch):
mdl.eval()
test_size = 0
loss_per_epoch = 0.0
for step, sample in enumerate(test_loader):
input, target = sample
input, target = input.float(), target.float()
if th.cuda.is_available():
input = input.cuda()
target = target.cuda()
mdl.cuda()
with th.no_grad():
result = mdl(input)
loss = mse(result, target)
batch = result.size()[0]
logger.info(f'Epoch: {epoch + 1:03d} | Step: {step + 1:03d} | Loss: {loss.item()}')
loss_per_epoch += loss.item() * batch
test_size += batch
logger.info(f'Epoch: {epoch + 1:03d} | Test Loss: {loss_per_epoch / test_size}')
def baseline(epoch):
test_size = 0
loss_per_epoch = 0.0
for step, sample in enumerate(test_loader):
input, target = sample
input, target = input.float(), target.float()
if th.cuda.is_available():
input = input.cuda()
target = target.cuda()
pfc.cuda()
with th.no_grad():
result = pfc(input)
loss = mse(result, target)
batch = result.size()[0]
logger.info(f'Epoch: {epoch + 1:03d} | Step: {step + 1:03d} | Loss: {loss.item()}')
loss_per_epoch += loss.item() * batch
test_size += batch
logger.info(f'Epoch: {epoch + 1:03d} | Baseline: {loss_per_epoch / test_size}')
if __name__ == '__main__':
for epoch in range(opt.n_epochs):
try:
train(epoch)
test(epoch)
baseline(epoch)
except Exception as e:
logger.exception(e)
break
| [
"mingli.yuan@gmail.com"
] | mingli.yuan@gmail.com |
662ce1fa1f93889996bcd99959ff20577d03d6bb | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/fv/cepg.py | d50cc2b513a4d9c62c8b470cb2f8060a86f72ae8 | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 41,647 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class CEPg(Mo):
meta = ClassMeta("cobra.model.fv.CEPg")
meta.isAbstract = True
meta.moClassName = "fvCEPg"
meta.moClassName = "fvCEPg"
meta.rnFormat = ""
meta.category = MoCategory.REGULAR
meta.label = "Client End-Point Group"
meta.writeAccessMask = 0x0
meta.readAccessMask = 0x608239400006c713
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.fv.RtExporterToEPg")
meta.childClasses.add("cobra.model.l2.EgrPktsAg15min")
meta.childClasses.add("cobra.model.l2.EgrBytesAgHist15min")
meta.childClasses.add("cobra.model.fv.RtSecInherited")
meta.childClasses.add("cobra.model.l2.IngrBytesPartHist1mo")
meta.childClasses.add("cobra.model.l2.EgrBytesPart15min")
meta.childClasses.add("cobra.model.vz.ProvSubjLbl")
meta.childClasses.add("cobra.model.fv.RtDestEpg")
meta.childClasses.add("cobra.model.fv.RtFromAbsEpg")
meta.childClasses.add("cobra.model.tag.Inst")
meta.childClasses.add("cobra.model.l2.IngrBytesPartHist1qtr")
meta.childClasses.add("cobra.model.fv.RtExtdevMgrMgmtEPg")
meta.childClasses.add("cobra.model.l2.EgrPktsAgHist1year")
meta.childClasses.add("cobra.model.vns.AbsParam")
meta.childClasses.add("cobra.model.fv.RtNtpProvToEpg")
meta.childClasses.add("cobra.model.vns.CRel")
meta.childClasses.add("cobra.model.l2.EgrBytesPartHist5min")
meta.childClasses.add("cobra.model.l2.IngrPktsAg1qtr")
meta.childClasses.add("cobra.model.l2.IngrBytesAg1h")
meta.childClasses.add("cobra.model.l2.IngrPktsPart1h")
meta.childClasses.add("cobra.model.l2.IngrBytesAg1d")
meta.childClasses.add("cobra.model.l2.EgrBytesAg15min")
meta.childClasses.add("cobra.model.l2.EgrBytesPart1year")
meta.childClasses.add("cobra.model.l2.IngrBytesAgHist1w")
meta.childClasses.add("cobra.model.fv.RtSrcToEpg")
meta.childClasses.add("cobra.model.health.NodeInst")
meta.childClasses.add("cobra.model.tag.ExtMngdInst")
meta.childClasses.add("cobra.model.l2.EgrBytesAgHist1mo")
meta.childClasses.add("cobra.model.l2.EgrPktsPartHist5min")
meta.childClasses.add("cobra.model.l2.EgrBytesAg1h")
meta.childClasses.add("cobra.model.fv.RInfoHolder")
meta.childClasses.add("cobra.model.fv.RtDevEpg")
meta.childClasses.add("cobra.model.l2.EgrBytesPart1qtr")
meta.childClasses.add("cobra.model.vns.SvcPol")
meta.childClasses.add("cobra.model.l2.IngrBytesPartHist1year")
meta.childClasses.add("cobra.model.l2.IngrBytesPart1w")
meta.childClasses.add("cobra.model.l2.EgrBytesAg1d")
meta.childClasses.add("cobra.model.l2.IngrBytesPart1h")
meta.childClasses.add("cobra.model.l2.EgrPktsAg1year")
meta.childClasses.add("cobra.model.vns.CFolder")
meta.childClasses.add("cobra.model.fv.RsSecInherited")
meta.childClasses.add("cobra.model.l2.EgrBytesAg1w")
meta.childClasses.add("cobra.model.l2.IngrBytesPart1d")
meta.childClasses.add("cobra.model.l2.EgrPktsPartHist1year")
meta.childClasses.add("cobra.model.fv.RsProv")
meta.childClasses.add("cobra.model.fv.RtLIfCtxToInstP")
meta.childClasses.add("cobra.model.fv.FltCounter1d")
meta.childClasses.add("cobra.model.l2.EgrPktsAgHist1qtr")
meta.childClasses.add("cobra.model.l2.EgrPktsAgHist1h")
meta.childClasses.add("cobra.model.fault.Inst")
meta.childClasses.add("cobra.model.fv.FltCounterHist1mo")
meta.childClasses.add("cobra.model.l2.EgrPktsAgHist1d")
meta.childClasses.add("cobra.model.l2.IngrBytesAg15min")
meta.childClasses.add("cobra.model.l2.EgrPktsPart1w")
meta.childClasses.add("cobra.model.l2.EgrPktsPart1h")
meta.childClasses.add("cobra.model.fv.FltCounter1w")
meta.childClasses.add("cobra.model.l2.EgrPktsAgHist1w")
meta.childClasses.add("cobra.model.fv.RtSvcMgmtEpg")
meta.childClasses.add("cobra.model.l2.IngrPktsAg1mo")
meta.childClasses.add("cobra.model.fv.FltCounter1year")
meta.childClasses.add("cobra.model.l2.EgrPktsPart1d")
meta.childClasses.add("cobra.model.l2.EgrBytesPart1w")
meta.childClasses.add("cobra.model.l2.IngrPktsPart1w")
meta.childClasses.add("cobra.model.l2.EgrPktsPartHist15min")
meta.childClasses.add("cobra.model.l2.IngrBytesPartHist15min")
meta.childClasses.add("cobra.model.l2.EgrBytesAgHist1w")
meta.childClasses.add("cobra.model.l2.IngrPktsPart15min")
meta.childClasses.add("cobra.model.l2.EgrBytesPart5min")
meta.childClasses.add("cobra.model.l2.IngrPktsAgHist1d")
meta.childClasses.add("cobra.model.l2.EgrPktsPartHist1d")
meta.childClasses.add("cobra.model.l2.IngrPktsAg1d")
meta.childClasses.add("cobra.model.l2.EgrBytesAgHist1h")
meta.childClasses.add("cobra.model.l2.IngrPktsAg1h")
meta.childClasses.add("cobra.model.l2.EgrBytesAgHist1d")
meta.childClasses.add("cobra.model.l2.EgrPktsPart1year")
meta.childClasses.add("cobra.model.l2.IngrPktsPartHist15min")
meta.childClasses.add("cobra.model.fv.RtVConnToEpgSubnet")
meta.childClasses.add("cobra.model.fv.RtVConnToEpgEp")
meta.childClasses.add("cobra.model.fv.RsConsIf")
meta.childClasses.add("cobra.model.l2.EgrBytesAgHist1year")
meta.childClasses.add("cobra.model.l2.EgrBytesPartHist15min")
meta.childClasses.add("cobra.model.l2.EgrBytesPartHist1year")
meta.childClasses.add("cobra.model.l2.EgrBytesPart1d")
meta.childClasses.add("cobra.model.fv.RsCustQosPol")
meta.childClasses.add("cobra.model.l2.EgrPktsAgHist15min")
meta.childClasses.add("cobra.model.l2.EgrBytesPart1h")
meta.childClasses.add("cobra.model.l2.EgrPktsPartHist1mo")
meta.childClasses.add("cobra.model.fv.RtVsrcToEpg")
meta.childClasses.add("cobra.model.fv.RtDevMgrEpg")
meta.childClasses.add("cobra.model.l2.IngrPktsPartHist1h")
meta.childClasses.add("cobra.model.vns.GFolder")
meta.childClasses.add("cobra.model.l2.IngrPktsAgHist1qtr")
meta.childClasses.add("cobra.model.fv.RtToAbsEpg")
meta.childClasses.add("cobra.model.fv.UpdateContract")
meta.childClasses.add("cobra.model.l2.EgrBytesPartHist1mo")
meta.childClasses.add("cobra.model.l2.EgrPktsAg1mo")
meta.childClasses.add("cobra.model.vz.ConsSubjLbl")
meta.childClasses.add("cobra.model.l2.IngrPktsPartHist1w")
meta.childClasses.add("cobra.model.l2.IngrPktsPart5min")
meta.childClasses.add("cobra.model.fault.Counts")
meta.childClasses.add("cobra.model.l2.IngrBytesAg1w")
meta.childClasses.add("cobra.model.l2.EgrBytesAg1mo")
meta.childClasses.add("cobra.model.fv.CtrctCtxDefCont")
meta.childClasses.add("cobra.model.vns.GRel")
meta.childClasses.add("cobra.model.fv.FltCounterHist1year")
meta.childClasses.add("cobra.model.l2.IngrPktsPartHist5min")
meta.childClasses.add("cobra.model.l2.IngrBytesAgHist1mo")
meta.childClasses.add("cobra.model.fv.RtSecProvToEpg")
meta.childClasses.add("cobra.model.fv.OrchsInfo")
meta.childClasses.add("cobra.model.fv.RsCons")
meta.childClasses.add("cobra.model.l2.IngrBytesPart5min")
meta.childClasses.add("cobra.model.vz.ConsLbl")
meta.childClasses.add("cobra.model.l2.IngrBytesAgHist1year")
meta.childClasses.add("cobra.model.l2.IngrPktsAgHist1year")
meta.childClasses.add("cobra.model.l2.IngrPktsPart1qtr")
meta.childClasses.add("cobra.model.fv.FltCounter1mo")
meta.childClasses.add("cobra.model.l2.IngrBytesPart1qtr")
meta.childClasses.add("cobra.model.l2.IngrBytesAg1qtr")
meta.childClasses.add("cobra.model.fv.SharedService")
meta.childClasses.add("cobra.model.l2.EgrPktsPart1mo")
meta.childClasses.add("cobra.model.telemetry.MatchedSelector")
meta.childClasses.add("cobra.model.fv.FltCounterHist1w")
meta.childClasses.add("cobra.model.fv.FltCounterHist1h")
meta.childClasses.add("cobra.model.fv.FltCounter1h")
meta.childClasses.add("cobra.model.vns.AbsCfgRel")
meta.childClasses.add("cobra.model.l2.EgrPktsPartHist1qtr")
meta.childClasses.add("cobra.model.l2.EgrPktsAg1qtr")
meta.childClasses.add("cobra.model.fv.FltCounterHist1d")
meta.childClasses.add("cobra.model.l2.IngrBytesPartHist1h")
meta.childClasses.add("cobra.model.health.Inst")
meta.childClasses.add("cobra.model.l2.EgrPktsPartHist1w")
meta.childClasses.add("cobra.model.l2.EgrBytesPartHist1d")
meta.childClasses.add("cobra.model.fv.RtARemoteHostToEpg")
meta.childClasses.add("cobra.model.l2.EgrBytesPartHist1h")
meta.childClasses.add("cobra.model.l2.IngrBytesAg1year")
meta.childClasses.add("cobra.model.l2.IngrBytesPartHist1d")
meta.childClasses.add("cobra.model.l2.IngrPktsPartHist1qtr")
meta.childClasses.add("cobra.model.l2.IngrBytesPart1year")
meta.childClasses.add("cobra.model.l2.IngrBytesPart1mo")
meta.childClasses.add("cobra.model.l2.IngrPktsAgHist15min")
meta.childClasses.add("cobra.model.l2.EgrBytesPartHist1w")
meta.childClasses.add("cobra.model.l2.EgrPktsPart15min")
meta.childClasses.add("cobra.model.tag.AliasInst")
meta.childClasses.add("cobra.model.l2.IngrPktsPartHist1year")
meta.childClasses.add("cobra.model.l2.EgrPktsPartHist1h")
meta.childClasses.add("cobra.model.fv.RtToAbsEpgForEpgToEpg")
meta.childClasses.add("cobra.model.l2.IngrBytesPartHist1w")
meta.childClasses.add("cobra.model.vz.ProvLbl")
meta.childClasses.add("cobra.model.vz.ProvCtrctLbl")
meta.childClasses.add("cobra.model.l2.IngrBytesAgHist1h")
meta.childClasses.add("cobra.model.l2.EgrPktsPart1qtr")
meta.childClasses.add("cobra.model.fv.FltCounter1qtr")
meta.childClasses.add("cobra.model.l2.EgrPktsPart5min")
meta.childClasses.add("cobra.model.vns.CfgRelInst")
meta.childClasses.add("cobra.model.tag.AliasDelInst")
meta.childClasses.add("cobra.model.vns.ParamInst")
meta.childClasses.add("cobra.model.vns.GParam")
meta.childClasses.add("cobra.model.vns.FolderInst")
meta.childClasses.add("cobra.model.l2.IngrPktsAg15min")
meta.childClasses.add("cobra.model.l2.IngrBytesPart15min")
meta.childClasses.add("cobra.model.l2.EgrPktsAg1h")
meta.childClasses.add("cobra.model.l2.EgrBytesPartHist1qtr")
meta.childClasses.add("cobra.model.fv.RtSvrToMgmtEPg")
meta.childClasses.add("cobra.model.l2.EgrPktsAg1d")
meta.childClasses.add("cobra.model.fv.RtChassisEpg")
meta.childClasses.add("cobra.model.l2.IngrPktsAgHist1mo")
meta.childClasses.add("cobra.model.l2.IngrPktsAgHist1h")
meta.childClasses.add("cobra.model.l2.EgrBytesPart1mo")
meta.childClasses.add("cobra.model.l2.IngrPktsPartHist1d")
meta.childClasses.add("cobra.model.l2.EgrPktsAg1w")
meta.childClasses.add("cobra.model.l2.IngrBytesAgHist1d")
meta.childClasses.add("cobra.model.l2.IngrPktsAgHist1w")
meta.childClasses.add("cobra.model.fv.RsProtBy")
meta.childClasses.add("cobra.model.l2.EgrBytesAgHist1qtr")
meta.childClasses.add("cobra.model.l2.IngrPktsPart1mo")
meta.childClasses.add("cobra.model.l2.IngrPktsPart1year")
meta.childClasses.add("cobra.model.l2.IngrPktsPart1d")
meta.childClasses.add("cobra.model.l2.IngrBytesAgHist1qtr")
meta.childClasses.add("cobra.model.fv.RtProfileToEpg")
meta.childClasses.add("cobra.model.fv.RtEpg")
meta.childClasses.add("cobra.model.fv.FltCounterHist15min")
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childClasses.add("cobra.model.l2.IngrBytesAg1mo")
meta.childClasses.add("cobra.model.fv.RtTermToEPg")
meta.childClasses.add("cobra.model.vns.AbsFolder")
meta.childClasses.add("cobra.model.fv.RsIntraEpg")
meta.childClasses.add("cobra.model.orchs.LDevVipCfg")
meta.childClasses.add("cobra.model.vns.CParam")
meta.childClasses.add("cobra.model.l2.EgrPktsAgHist1mo")
meta.childClasses.add("cobra.model.l2.IngrPktsAg1year")
meta.childClasses.add("cobra.model.fv.RtMgmtEPg")
meta.childClasses.add("cobra.model.vz.ConsCtrctLbl")
meta.childClasses.add("cobra.model.l2.IngrPktsAg1w")
meta.childClasses.add("cobra.model.fv.RtFuncToEpg")
meta.childClasses.add("cobra.model.l2.IngrPktsPartHist1mo")
meta.childClasses.add("cobra.model.l2.EgrBytesAg1qtr")
meta.childClasses.add("cobra.model.l2.EgrBytesAg1year")
meta.childClasses.add("cobra.model.fv.FltCounterHist1qtr")
meta.childClasses.add("cobra.model.fv.RtProv")
meta.childClasses.add("cobra.model.fv.FltCounterHist5min")
meta.childClasses.add("cobra.model.l2.IngrBytesPartHist5min")
meta.childClasses.add("cobra.model.l2.IngrBytesAgHist15min")
meta.childClasses.add("cobra.model.fv.RtPoeEpg")
meta.childClasses.add("cobra.model.fv.FltCounter5min")
meta.childClasses.add("cobra.model.fv.FltCounter15min")
meta.childClasses.add("cobra.model.fv.RtSvrEpg")
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtToAbsEpgForEpgToEpg", "rtdbgacToAbsEpgForEpgToEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtExtdevMgrMgmtEPg", "rtextdevExtdevMgrMgmtEPg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtExporterToEPg", "rtnetflowExporterToEPg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtNtpProvToEpg", "rtdatetimeNtpProvToEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPartHist1year", "HDl2IngrBytesPart1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPartHist15min", "HDl2IngrBytesPart15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtARemoteHostToEpg", "rtfileARemoteHostToEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPart1year", "CDl2IngrBytesPart1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPart15min", "CDl2IngrBytesPart15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPart15min", "CDl2EgrBytesPart15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPartHist1qtr", "HDl2IngrBytesPart1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPart1year", "CDl2EgrBytesPart1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPart15min", "CDl2IngrPktsPart15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPartHist15min", "HDl2IngrPktsPart15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtVConnToEpgSubnet", "rtvnsVConnToEpgSubnet-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPartHist15min", "HDl2EgrBytesPart15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPartHist1year", "HDl2EgrBytesPart1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPart5min", "CDl2IngrBytesPart5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPart1qtr", "CDl2IngrBytesPart1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPartHist1year", "HDl2IngrPktsPart1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPart1year", "CDl2IngrPktsPart1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPartHist5min", "HDl2IngrBytesPart5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPartHist1mo", "HDl2IngrBytesPart1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPartHist5min", "HDl2EgrBytesPart5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPart1qtr", "CDl2EgrBytesPart1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPartHist1year", "HDl2EgrPktsPart1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPartHist15min", "HDl2EgrPktsPart15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPart5min", "CDl2EgrBytesPart5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPart1year", "CDl2EgrPktsPart1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPart5min", "CDl2IngrPktsPart5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPartHist5min", "HDl2IngrPktsPart5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAgHist1year", "HDl2IngrBytesAg1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPart1qtr", "CDl2IngrPktsPart1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPartHist1qtr", "HDl2IngrPktsPart1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPart1mo", "CDl2IngrBytesPart1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPart15min", "CDl2EgrPktsPart15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPartHist1qtr", "HDl2EgrBytesPart1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAgHist15min", "HDl2IngrBytesAg15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAgHist15min", "HDl2EgrBytesAg15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPartHist5min", "HDl2EgrPktsPart5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPart1w", "CDl2IngrBytesPart1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPart1h", "CDl2IngrBytesPart1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPart1d", "CDl2IngrBytesPart1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAg15min", "CDl2IngrBytesAg15min"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAgHist1year", "HDl2EgrBytesAg1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPartHist1mo", "HDl2EgrBytesPart1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.FltCounterHist1year", "HDfvFltCounter1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAgHist1year", "HDl2IngrPktsAg1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPartHist1qtr", "HDl2EgrPktsPart1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPartHist1h", "HDl2IngrBytesPart1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAg1year", "CDl2IngrBytesAg1year"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPartHist1d", "HDl2IngrBytesPart1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAgHist15min", "HDl2IngrPktsAg15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPartHist1w", "HDl2IngrBytesPart1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPart1qtr", "CDl2EgrPktsPart1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPart5min", "CDl2EgrPktsPart5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPart1mo", "CDl2EgrBytesPart1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPart1mo", "CDl2IngrPktsPart1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAgHist1qtr", "HDl2IngrBytesAg1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.FltCounterHist15min", "HDfvFltCounter15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPartHist1mo", "HDl2IngrPktsPart1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAgHist1year", "HDl2EgrPktsAg1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPart1h", "CDl2IngrPktsPart1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAg15min", "CDl2EgrBytesAg15min"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtLIfCtxToInstP", "rtvnsLIfCtxToInstP-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.FltCounter1year", "CDfvFltCounter1year"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPart1w", "CDl2EgrBytesPart1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPart1w", "CDl2IngrPktsPart1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPart1d", "CDl2EgrBytesPart1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAgHist15min", "HDl2EgrPktsAg15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPart1h", "CDl2EgrBytesPart1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPartHist1mo", "HDl2EgrPktsPart1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPartHist1h", "HDl2IngrPktsPart1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAgHist1qtr", "HDl2IngrPktsAg1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPartHist1w", "HDl2IngrPktsPart1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAgHist1mo", "HDl2IngrBytesAg1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAg1qtr", "CDl2IngrBytesAg1qtr"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPart1mo", "CDl2EgrPktsPart1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPartHist1d", "HDl2EgrBytesPart1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPartHist1h", "HDl2EgrBytesPart1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPartHist1w", "HDl2EgrBytesPart1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAg15min", "CDl2IngrPktsAg15min"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtSvrToMgmtEPg", "rtauthSvrToMgmtEPg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPartHist1d", "HDl2IngrPktsPart1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAgHist1qtr", "HDl2EgrBytesAg1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPart1d", "CDl2IngrPktsPart1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAg1year", "CDl2IngrPktsAg1year"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAg1year", "CDl2EgrBytesAg1year"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.FltCounterHist1qtr", "HDfvFltCounter1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.FltCounterHist5min", "HDfvFltCounter5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.FltCounter15min", "CDfvFltCounter15min"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAg15min", "CDl2EgrPktsAg15min"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtFromAbsEpg", "rtdbgacFromAbsEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAg1qtr", "CDl2IngrPktsAg1qtr"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAgHist1w", "HDl2IngrBytesAg1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAgHist1mo", "HDl2EgrBytesAg1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAg1year", "CDl2EgrPktsAg1year"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAgHist1qtr", "HDl2EgrPktsAg1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.FltCounterHist1mo", "HDfvFltCounter1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPart1w", "CDl2EgrPktsPart1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPart1h", "CDl2EgrPktsPart1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPart1d", "CDl2EgrPktsPart1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPartHist1d", "HDl2EgrPktsPart1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtVConnToEpgEp", "rtvnsVConnToEpgEp-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtSecProvToEpg", "rtaaaSecProvToEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.SharedService", "sharedServiceAlloc"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPartHist1w", "HDl2EgrPktsPart1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPartHist1h", "HDl2EgrPktsPart1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAgHist1h", "HDl2IngrBytesAg1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.FltCounter1qtr", "CDfvFltCounter1qtr"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAgHist1mo", "HDl2IngrPktsAg1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAgHist1d", "HDl2IngrBytesAg1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtProfileToEpg", "rtdnsProfileToEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAg1mo", "CDl2IngrBytesAg1mo"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAg1qtr", "CDl2EgrBytesAg1qtr"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.FltCounter5min", "CDfvFltCounter5min"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAg1h", "CDl2IngrBytesAg1h"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAg1d", "CDl2IngrBytesAg1d"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAg1mo", "CDl2IngrPktsAg1mo"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAgHist1w", "HDl2EgrBytesAg1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAgHist1d", "HDl2IngrPktsAg1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAgHist1h", "HDl2EgrBytesAg1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAgHist1d", "HDl2EgrBytesAg1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAg1w", "CDl2IngrBytesAg1w"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAg1mo", "CDl2EgrBytesAg1mo"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.FltCounter1mo", "CDfvFltCounter1mo"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.FltCounterHist1w", "HDfvFltCounter1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.FltCounterHist1h", "HDfvFltCounter1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAg1qtr", "CDl2EgrPktsAg1qtr"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.FltCounterHist1d", "HDfvFltCounter1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAgHist1h", "HDl2IngrPktsAg1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAgHist1w", "HDl2IngrPktsAg1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAgHist1mo", "HDl2EgrPktsAg1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtFuncToEpg", "rtinfraFuncToEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAg1h", "CDl2EgrBytesAg1h"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAg1d", "CDl2EgrBytesAg1d"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAg1w", "CDl2EgrBytesAg1w"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.FltCounter1d", "CDfvFltCounter1d"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAgHist1h", "HDl2EgrPktsAg1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAgHist1d", "HDl2EgrPktsAg1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.FltCounter1w", "CDfvFltCounter1w"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAgHist1w", "HDl2EgrPktsAg1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtSvcMgmtEpg", "rtvnsSvcMgmtEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAg1d", "CDl2IngrPktsAg1d"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAg1h", "CDl2IngrPktsAg1h"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtVsrcToEpg", "rtspanVsrcToEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtToAbsEpg", "rtdbgacToAbsEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAg1mo", "CDl2EgrPktsAg1mo"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.FltCounter1h", "CDfvFltCounter1h"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtChassisEpg", "rtvnsChassisEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAg1w", "CDl2IngrPktsAg1w"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtSecInherited", "rtsecInherited-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtSrcToEpg", "rtspanSrcToEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RsSecInherited", "rssecInherited-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtDevMgrEpg", "rtvnsDevMgrEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAg1h", "CDl2EgrPktsAg1h"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAg1d", "CDl2EgrPktsAg1d"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAg1w", "CDl2EgrPktsAg1w"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtTermToEPg", "rtvnsTermToEPg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtSvrEpg", "rtdnsepgSvrEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtDestEpg", "rtspanDestEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.UpdateContract", "updateContract"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.CtrctCtxDefCont", "ctrctCtxdefDn-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtMgmtEPg", "rtvmmMgmtEPg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.vz.ProvSubjLbl", "provsubjlbl-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtDevEpg", "rtvnsDevEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RsCustQosPol", "rscustQosPol"))
meta.childNamesAndRnPrefix.append(("cobra.model.vz.ConsSubjLbl", "conssubjlbl-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtPoeEpg", "rtpoePoeEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.NodeInst", "nodehealth-"))
meta.childNamesAndRnPrefix.append(("cobra.model.telemetry.MatchedSelector", "matchedSel-"))
meta.childNamesAndRnPrefix.append(("cobra.model.vns.CfgRelInst", "CfgRelInst-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RsIntraEpg", "rsintraEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtProv", "rtdhcpProv-"))
meta.childNamesAndRnPrefix.append(("cobra.model.vz.ProvCtrctLbl", "pCtrctLbl-"))
meta.childNamesAndRnPrefix.append(("cobra.model.vns.ParamInst", "ParamInst-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtEpg", "rtsnmpEpg-"))
meta.childNamesAndRnPrefix.append(("cobra.model.vns.AbsFolder", "absFolder-"))
meta.childNamesAndRnPrefix.append(("cobra.model.vz.ConsCtrctLbl", "cCtrctLbl-"))
meta.childNamesAndRnPrefix.append(("cobra.model.vns.AbsParam", "absParam-"))
meta.childNamesAndRnPrefix.append(("cobra.model.vns.SvcPol", "svcpol_C-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RsConsIf", "rsconsIf-"))
meta.childNamesAndRnPrefix.append(("cobra.model.tag.AliasDelInst", "aliasdel-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RsProtBy", "rsprotBy-"))
meta.childNamesAndRnPrefix.append(("cobra.model.vns.CFolder", "cFolder-"))
meta.childNamesAndRnPrefix.append(("cobra.model.vns.GFolder", "gFolder-"))
meta.childNamesAndRnPrefix.append(("cobra.model.vz.ConsLbl", "conslbl-"))
meta.childNamesAndRnPrefix.append(("cobra.model.vz.ProvLbl", "provlbl-"))
meta.childNamesAndRnPrefix.append(("cobra.model.tag.ExtMngdInst", "extmngd"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RsProv", "rsprov-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Counts", "fltCnts"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RsCons", "rscons-"))
meta.childNamesAndRnPrefix.append(("cobra.model.vns.AbsCfgRel", "absRel-"))
meta.childNamesAndRnPrefix.append(("cobra.model.vns.GParam", "gParam-"))
meta.childNamesAndRnPrefix.append(("cobra.model.vns.CParam", "cParam-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Inst", "fault-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.OrchsInfo", "Orchs-"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.Inst", "health"))
meta.childNamesAndRnPrefix.append(("cobra.model.vns.CRel", "cRel-"))
meta.childNamesAndRnPrefix.append(("cobra.model.vns.GRel", "gRel-"))
meta.childNamesAndRnPrefix.append(("cobra.model.tag.AliasInst", "alias"))
meta.childNamesAndRnPrefix.append(("cobra.model.vns.FolderInst", "FI_C-"))
meta.childNamesAndRnPrefix.append(("cobra.model.tag.Inst", "tag-"))
meta.childNamesAndRnPrefix.append(("cobra.model.orchs.LDevVipCfg", "lls-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RInfoHolder", "to-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.superClasses.add("cobra.model.fv.ATg")
meta.superClasses.add("cobra.model.pol.Comp")
meta.superClasses.add("cobra.model.fv.EPg")
meta.superClasses.add("cobra.model.fv.Comp")
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.pol.Obj")
meta.concreteSubClasses.add("cobra.model.fv.AEPg")
meta.concreteSubClasses.add("cobra.model.vns.REPpInfo")
meta.concreteSubClasses.add("cobra.model.vns.SHEPpInfo")
meta.concreteSubClasses.add("cobra.model.vns.EPpInfo")
meta.concreteSubClasses.add("cobra.model.vns.SDEPpInfo")
meta.concreteSubClasses.add("cobra.model.fv.TnlEPg")
meta.rnPrefixes = [
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "configIssues", "configIssues", 1715, PropCategory.REGULAR)
prop.label = "Config Issues"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "none"
prop._addConstant("bd-id-not-allocated", "bd-ids-not-allocated", 256)
prop._addConstant("bd-not-present", "bd-not-present", 1)
prop._addConstant("context-id-not-allocated", "context-ids-not-allocated", 512)
prop._addConstant("context-not-present", "context-not-present.-configure-context-and/or-attach-context-to-bridge-group-domain.", 2)
prop._addConstant("encap-assignment", "encapsulation-not-valid", 8)
prop._addConstant("id-not-allocated", "ids-not-allocated", 4)
prop._addConstant("instrimedcy-unsupported", "on-demand-deployment-immediacy-is-not-supported-for-useg-epg-associated-to-vmware-dvs-domain", 1024)
prop._addConstant("invalid-rel-to-rtctrlProfile", "invalid-association-to-route-control-policy", 32)
prop._addConstant("l3port-and-sub-interface-on-path", "l3-port-and-sub-interface-not-allowed-on-same-path", 128)
prop._addConstant("none", "none", 0)
prop._addConstant("not-associated-with-mgmt-zone", "not-associated-with-management-zone", 64)
meta.props.add("configIssues", prop)
prop = PropMeta("str", "configSt", "configSt", 1714, PropCategory.REGULAR)
prop.label = "Deployment Status"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "not-applied"
prop._addConstant("applied", "applied", 2)
prop._addConstant("applying", "applying", 1)
prop._addConstant("failed-to-apply", "failed-to-apply", 3)
prop._addConstant("not-applied", "not-applied", 0)
prop._addConstant("temp-failed-to-apply", "temp-failed-to-apply", 4)
meta.props.add("configSt", prop)
prop = PropMeta("str", "descr", "descr", 5582, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "exceptionTag", "exceptionTag", 37061, PropCategory.REGULAR)
prop.label = "Contract Exception Tag"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 512)]
meta.props.add("exceptionTag", prop)
prop = PropMeta("str", "floodOnEncap", "floodOnEncap", 35418, PropCategory.REGULAR)
prop.label = "Handling of L2 Multicast/Broadcast and Link-Layer traffic at EPG level"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "disabled"
prop._addConstant("disabled", "disabled", 0)
prop._addConstant("enabled", "enabled", 1)
meta.props.add("floodOnEncap", prop)
prop = PropMeta("str", "isSharedSrvMsiteEPg", "isSharedSrvMsiteEPg", 45493, PropCategory.REGULAR)
prop.label = "Multisite Shared Service EPG"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("isSharedSrvMsiteEPg", prop)
prop = PropMeta("str", "matchT", "matchT", 1726, PropCategory.REGULAR)
prop.label = "Provider Label Match Criteria"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 2
prop.defaultValueStr = "AtleastOne"
prop._addConstant("All", "all", 1)
prop._addConstant("AtleastOne", "atleastone", 2)
prop._addConstant("AtmostOne", "atmostone", 3)
prop._addConstant("None", "none", 4)
meta.props.add("matchT", prop)
prop = PropMeta("str", "name", "name", 1713, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(1, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "pcTag", "pcTag", 1717, PropCategory.REGULAR)
prop.label = "Policy Enforcement Tag"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("any", "any", 0)
meta.props.add("pcTag", prop)
prop = PropMeta("str", "prefGrMemb", "prefGrMemb", 27679, PropCategory.REGULAR)
prop.label = "Preferred Group Member"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 2
prop.defaultValueStr = "exclude"
prop._addConstant("exclude", "exclude", 2)
prop._addConstant("include", "include", 1)
meta.props.add("prefGrMemb", prop)
prop = PropMeta("str", "prio", "prio", 1718, PropCategory.REGULAR)
prop.label = "QOS Class"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 9)]
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("level1", "level1", 3)
prop._addConstant("level2", "level2", 2)
prop._addConstant("level3", "level3-(default)", 1)
prop._addConstant("level4", "level4", 9)
prop._addConstant("level5", "level5", 8)
prop._addConstant("level6", "level6", 7)
prop._addConstant("unspecified", "unspecified", 0)
meta.props.add("prio", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "scope", "scope", 1716, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 16777215)]
prop.defaultValue = 0
prop.defaultValueStr = "0"
meta.props.add("scope", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "triggerSt", "triggerSt", 1725, PropCategory.REGULAR)
prop.label = "Ability to Trigger Task"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "not_triggerable"
prop._addConstant("not_triggerable", "not_triggerable", 0)
prop._addConstant("triggerable", "triggerable", 1)
meta.props.add("triggerSt", prop)
prop = PropMeta("str", "txId", "txId", 21191, PropCategory.REGULAR)
prop.label = "Transaction Id when EPg was created"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("txId", prop)
meta.deploymentCategory = DeploymentCategory("epg", "EPG")
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Path"
meta.deploymentQueryPaths.append(DeploymentPathMeta("ATgToGraphInst", "Graph Instances", "cobra.model.vns.GraphInst"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("EPgToNwIf", "Interface", "cobra.model.nw.If"))
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"rrishike@cisco.com"
] | rrishike@cisco.com |
ad5f1a7e30e5cb32c1168b8a2e1ced91d1f31fa2 | afa9fcd0f2443515ba89e96ed4eb9416e9d11847 | /python/Gaffer/OpMatcher.py | bc89c2c9e05284e5cf226f31a38735f92aa6aa92 | [
"BSD-3-Clause",
"IJG"
] | permissive | dneg/gaffer | 6eb12b3ab3cde00afdf170c456969a38f5968237 | e87cb50f55a048cd7f6d5dcdfe6f95e38db2c5b6 | refs/heads/master | 2021-01-16T18:13:33.456876 | 2013-09-24T17:23:58 | 2013-09-24T17:23:58 | 13,094,917 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,396 | py | ##########################################################################
#
# Copyright (c) 2012, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import threading
import traceback
import weakref
import IECore
import Gaffer
## The OpMatcher class provides a means of searching for Ops suitable to
# act upon a given input value.
#
# The following Op userData entries are supported :
#
# ["OpMatcher"]["ignore"] - when this BoolData is True, the Op is not
# considered by the matcher.
#
# The following Parameter usedData entries are supported :
#
# ["OpMatcher"]["ignore"] - when this BoolData is True, the Parameter is not
# considered by the matcher.
class OpMatcher() :
def __init__( self, classLoader, classNamesMatchString = "*", reportErrors=True ) :
# these are filled with tuples of the form ( opClass, parameter, parameterPath )
self.__ops = []
for className in classLoader.classNames( classNamesMatchString ) :
try :
opClass = classLoader.load( className )
opInstance = opClass()
except Exception, m :
if reportErrors :
IECore.msg( IECore.Msg.Level.Error, "Gaffer.OpMatcher", "Error loading op \"%s\" : %s" % ( className, traceback.format_exc() ) )
continue
ignore = False
with IECore.IgnoredExceptions( KeyError ) :
# backwards compatibility with something proprietary
ignore = opInstance.userData()["UI"]["OpMatcher"]["ignore"].value
with IECore.IgnoredExceptions( KeyError ) :
ignore = opInstance.userData()["OpMatcher"]["ignore"].value
if ignore :
continue
parameters = []
self.__findParameters( opInstance.parameters(), parameters )
if len( parameters ) :
self.__ops.append( ( opClass, parameters ) )
## Returns a list of ( op, parameter ) tuples. Each op will be an Op instance
# where the corresponding parameter has already been set with parameterValue.
def matches( self, parameterValue ) :
processedValues = []
if isinstance( parameterValue, ( Gaffer.FileSystemPath, Gaffer.SequencePath ) ) :
# we might be able to match a single file
processedValues.append( IECore.StringData( str( parameterValue ) ) )
# or provide a single file input to an op which accepts multiple files
processedValues.append( IECore.StringVectorData( [ str( parameterValue ) ] ) )
elif isinstance( parameterValue, list ) :
processedValue = IECore.StringVectorData()
for value in parameterValue :
assert( isinstance( value, ( Gaffer.FileSystemPath, Gaffer.SequencePath ) ) )
processedValue.append( str( value ) )
elif isinstance( parameterValue, IECore.Object ) :
processedValue = parameterValue
if not processedValues :
return []
result = []
for opClass, parameters in self.__ops :
for testParameter, parameterPath in parameters :
for processedValue in processedValues :
if testParameter.valueValid( processedValue )[0] :
op = opClass()
parameter = op.parameters()
for name in parameterPath :
parameter = parameter[name]
parameter.setValue( processedValue )
result.append( ( op, parameter ) )
return result
__defaultInstances = weakref.WeakKeyDictionary()
__defaultInstancesMutex = threading.Lock()
## Returns an OpMatcher suitable for sharing by everyone - initialising one
# takes considerable time so it's preferable to reuse one if one has been created
# for the classLoader in question already. If classLoader is not specified then
# it defaults to IECore.ClassLoader.defaultOpLoader().
@classmethod
def defaultInstance( cls, classLoader=None ) :
if classLoader is None :
classLoader = IECore.ClassLoader.defaultOpLoader()
with cls.__defaultInstancesMutex :
result = cls.__defaultInstances.get( classLoader, None )
if result is None :
result = OpMatcher( classLoader )
cls.__defaultInstances[classLoader] = result
return result
def __findParameters( self, parameter, result, path = None ) :
if path is None :
path = []
for child in parameter.values() :
ignore = False
with IECore.IgnoredExceptions( KeyError ) :
# backwards compatibility with something proprietary
ignore = child.userData()["UI"]["OpMatcher"]["ignore"].value
with IECore.IgnoredExceptions( KeyError ) :
# backwards compatibility with something proprietary
ignore = child.userData()["OpMatcher"]["ignore"].value
if ignore :
continue
childPath = path + [ child.name ]
if isinstance( child, IECore.CompoundParameter ) :
self.__findParameters( child, result, childPath )
elif isinstance( child, ( IECore.PathParameter, IECore.PathVectorParameter ) ) :
if child.mustExist :
result.append( ( child, childPath ) )
| [
"thehaddonyoof@gmail.com"
] | thehaddonyoof@gmail.com |
6d82aec23779880869c6b12dcd1a18d3c756863f | 4f111dfacab0acc93900e7746538f85e0b3d8d78 | /day10/07普通装饰器.py | f04378d67d7cac40b5ae5f3509f4fccb5581cfef | [] | no_license | ljxproject/basecode | 5541f25cfe90d5fad26eac0b6e72802aa1fad1f4 | 485e4b41593839bfc61e67261247fb88dc80cc1d | refs/heads/master | 2020-03-26T16:16:26.422617 | 2018-08-17T08:05:11 | 2018-08-17T08:05:11 | 145,091,118 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 563 | py |
def check():
print("模拟的扩展功能")
def outter(func):
def inner(name):
check()
func(name)
return inner
# 在函数的上方加上@符号,@符号加上一个装饰器名,表示将该名字对应的装饰器应用在该函数上
# 本质: 相当于执行了 func1 = outter(func1) 跟函数名同名的一个装饰后的函数名 = 装饰器名(函数名)
@outter
def func1(name):
print("基础功能1")
print("名字是:%s"%(name))
# def func2():
# print("基础功能2")
func1("金三胖胖")
| [
"403496369@qq.com"
] | 403496369@qq.com |
66c032ccb5c1ce0fdd8c3e3418e5435357afed56 | b640cd3b332e83293ea765cb60b6e642a7278786 | /GanttChartPanel.py | 47e55f2e09a9427c80ee7c8bd7e1ceb6444cedf0 | [] | no_license | tymiles003/CrossMgr | 971dd651efb140e16a0c78591377a1e53258d9b6 | 17acb1f4d9ed494421fc07091c70dd58fde2aada | refs/heads/master | 2021-01-23T16:13:20.702380 | 2015-04-23T00:40:52 | 2015-04-23T00:40:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,034 | py | import wx
import random
import math
import sys
import bisect
import Utils
from PhotoFinish import hasPhoto
def SetScrollbarParameters( sb, thumbSize, range, pageSize ):
sb.SetScrollbar( min(sb.GetThumbPosition(), range - thumbSize), thumbSize, range, pageSize )
def makeColourGradient(frequency1, frequency2, frequency3,
phase1, phase2, phase3,
center = 128, width = 127, len = 50 ):
fp = [(frequency1,phase1), (frequency2,phase2), (frequency3,phase3)]
grad = [wx.Colour(*[math.sin(f*i + p) * width + center for f, p in fp]) for i in xrange(len+1)]
return grad[1:]
def makePastelColours( len = 50 ):
return makeColourGradient(2.4,2.4,2.4,0,2,4,128,127,len+1)
def lighterColour( c ):
rgb = c.Get( False )
return wx.Colour( *[int(v + (255 - v) * 0.6) for v in rgb] )
def numFromLabel( s ):
try:
lastSpace = s.rfind( ' ' )
if lastSpace >= 0:
try:
return int(s[lastSpace+1:])
except ValueError:
pass
firstSpace = s.find( ' ' )
if firstSpace < 0:
return int(s)
return int(s[:firstSpace])
except Exception as e:
return None
class GanttChartPanel(wx.PyPanel):
def __init__(self, parent, id=wx.ID_ANY, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=wx.NO_BORDER,
name="GanttChartPanel" ):
wx.PyPanel.__init__(self, parent, id, pos, size, style, name)
self.SetBackgroundColour(wx.WHITE)
self.SetBackgroundStyle( wx.BG_STYLE_CUSTOM )
self.data = None
self.labels = None
self.greyOutSet = None
self.nowTime = None
self.numSelect = None
self.dClickCallback = None
self.lClickCallback = None
self.rClickCallback = None
self.getNowTimeCallback = None
self.minimizeLabels = False
self.headerSet = set()
self.xMove = -1
self.yMove = -1
self.moveIRider = None
self.moveLap = None
self.moveTimer = wx.Timer( self, wx.NewId() )
self.barHeight = 8
self.labelsWidthLeft = 8000
self.xFactor = 1
self.yellowColour = wx.Colour(220,220,0)
self.orangeColour = wx.Colour(255,165,0)
self.horizontalSB = wx.ScrollBar( self, style=wx.SB_HORIZONTAL )
self.horizontalSB.Bind( wx.EVT_SCROLL, self.OnHorizontalScroll )
self.verticalSB = wx.ScrollBar( self, style=wx.SB_VERTICAL )
self.verticalSB.Bind( wx.EVT_SCROLL, self.OnVerticalScroll )
self.scrollbarWidth = 16
self.horizontalSB.Show( False )
self.verticalSB.Show( False )
self.colours = makeColourGradient(2.4,2.4,2.4,0,2,4,128,127,500)
self.lighterColours = [lighterColour(c) for c in self.colours]
# Bind the events related to our control: first of all, we use a
# combination of wx.BufferedPaintDC and an empty handler for
# wx.EVT_ERASE_BACKGROUND (see later) to reduce flicker
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_MOTION, self.OnMove)
self.Bind(wx.EVT_LEFT_UP, self.OnLeftClick)
self.Bind(wx.EVT_LEFT_DCLICK, self.OnLeftDClick)
self.Bind(wx.EVT_RIGHT_UP, self.OnRightClick)
self.Bind(wx.EVT_MOUSEWHEEL, self.OnWheel )
self.Bind(wx.EVT_TIMER, self.OnMoveTimer, self.moveTimer)
def OnWheel( self, event ):
if not self.verticalSB.IsShown():
return
amt = event.GetWheelRotation()
units = -amt / event.GetWheelDelta()
sb = self.verticalSB
pos = sb.GetThumbPosition() + units
pos = min( max( pos, 0 ), sb.GetRange() - sb.GetThumbSize() )
if pos != sb.GetThumbPosition():
sb.SetThumbPosition( pos )
self.OnVerticalScroll( event )
def DoGetBestSize(self):
return wx.Size(128, 100)
def SetForegroundColour(self, colour):
wx.PyPanel.SetForegroundColour(self, colour)
self.Refresh()
def SetBackgroundColour(self, colour):
wx.PyPanel.SetBackgroundColour(self, colour)
self.Refresh()
def GetDefaultAttributes(self):
"""
Overridden base class virtual. By default we should use
the same font/colour attributes as the native wx.StaticText.
"""
return wx.StaticText.GetClassDefaultAttributes()
def ShouldInheritColours(self):
"""
Overridden base class virtual. If the parent has non-default
colours then we want this control to inherit them.
"""
return True
def SetData( self, data, labels = None, nowTime = None, interp = None, greyOutSet = set(),
numTimeInfo = None, lapNote = None,
headerSet = None ):
"""
* data is a list of lists. Each list is a list of times.
* labels are the names of the series. Optional.
"""
self.data = None
self.labels = None
self.nowTime = None
self.greyOutSet = greyOutSet
self.numTimeInfo = numTimeInfo
self.lapNote = lapNote
self.headerSet = headerSet or set()
if data and any( s for s in data ):
self.data = data
self.dataMax = max(max(s) if s else -sys.float_info.max for s in self.data)
if labels:
self.labels = [unicode(lab) for lab in labels]
if len(self.labels) < len(self.data):
self.labels = self.labels + [None] * (len(self.data) - len(self.labels))
elif len(self.labels) > len(self.data):
self.labels = self.labels[:len(self.data)]
else:
self.labels = [''] * len(data)
self.nowTime = nowTime
self.interp = interp
self.Refresh()
def OnPaint(self, event ):
#dc = wx.PaintDC(self)
dc = wx.BufferedPaintDC(self)
self.Draw(dc)
def OnVerticalScroll( self, event ):
dc = wx.ClientDC(self)
if not self.IsDoubleBuffered():
dc = wx.BufferedDC( dc )
self.Draw( dc )
def OnHorizontalScroll( self, event ):
self.OnVerticalScroll( event )
def OnSize(self, event):
self.Refresh()
event.Skip()
def OnLeftClick( self, event ):
if getattr(self, 'empty', True):
return
if not self.data:
return
iRider, iLap = self.getRiderLap( event )
if iRider is None or iLap is None:
return
self.numSelect = numFromLabel( self.labels[iRider] )
if self.getNowTimeCallback:
self.nowTime = self.getNowTimeCallback()
self.Refresh()
lClickCallback = getattr(self, 'lClickCallback', None)
if lClickCallback is not None:
xPos, yPos = event.GetPositionTuple()
lClickCallback( xPos, yPos, self.numSelect, iRider, iLap, self.data[iRider][iLap] )
def OnLeftDClick( self, event ):
if getattr(self, 'empty', True):
return
iRider, iLap = self.getRiderLap( event )
if iRider is None:
return
self.numSelect = numFromLabel( self.labels[iRider] )
if self.numSelect is not None:
if self.dClickCallback:
self.dClickCallback( self.numSelect )
def OnMove( self, event ):
self.xMove, self.yMove = event.GetPosition()
redrawRequired = (self.moveIRider is not None)
self.moveIRider, self.moveLap = None, None
self.Refresh()
self.moveTimer.Start( 100, True )
def OnMoveTimer( self, event ):
self.moveIRider, self.moveLap = self.getRiderLapXY( self.xMove, self.yMove )
self.Refresh()
def getRiderLapXY( self, x, y ):
if not self.data:
return None, None
y -= self.barHeight
x -= self.labelsWidthLeft
iRider = int(y / self.barHeight)
if self.verticalSB.IsShown():
iRider += self.verticalSB.GetThumbPosition()
if not 0 <= iRider < len(self.data):
return None, None
t = x / self.xFactor
if self.horizontalSB.IsShown():
t += self.horizontalSB.GetThumbPosition()
iLap = bisect.bisect_left( self.data[iRider], t )
if not 1 <= iLap < len(self.data[iRider]):
return iRider, None
return iRider, iLap
def getRiderLap( self, event ):
x, y = event.GetPositionTuple()
return self.getRiderLapXY( x, y )
def OnRightClick( self, event ):
if getattr(self, 'empty', True):
return
iRider, iLap = self.getRiderLap( event )
if iRider is None:
return
self.numSelect = numFromLabel(self.labels[iRider])
if self.getNowTimeCallback:
self.nowTime = self.getNowTimeCallback()
self.Refresh()
if iLap is None:
return
rClickCallback = getattr(self, 'rClickCallback', None)
if rClickCallback is not None:
xPos, yPos = event.GetPositionTuple()
rClickCallback( xPos, yPos, self.numSelect, iRider, iLap )
def Draw( self, dc ):
size = self.GetClientSize()
width = size.width
height = size.height
minBarWidth = 48
minBarHeight = 18
maxBarHeight = 28
backColour = self.GetBackgroundColour()
backBrush = wx.Brush(backColour, wx.SOLID)
greyBrush = wx.Brush( wx.Colour(196,196,196), wx.SOLID )
backPen = wx.Pen(backColour, 0)
dc.SetBackground(backBrush)
dc.Clear()
tooSmall = (width < 50 or height < 24)
if not self.data or self.dataMax == 0 or tooSmall:
self.empty = True
self.verticalSB.Show( False )
self.horizontalSB.Show( False )
if tooSmall:
dc.SetPen( wx.BLACK_DASHED_PEN )
dc.DrawLine( 0, height//2, width, height//2 )
return
self.empty = False
barHeight = int(float(height) / float(len(self.data) + 2))
barHeight = max( barHeight, minBarHeight )
barHeight = min( barHeight, maxBarHeight )
fontBarLabel = wx.FontFromPixelSize( wx.Size(0,int(min(barHeight-2, barHeight*0.9))), wx.FONTFAMILY_SWISS, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL )
dc.SetFont( fontBarLabel )
textWidthLeftMax, textHeightMax = dc.GetTextExtent( '0000' )
textWidthRightMax = textWidthLeftMax
for label in self.labels:
if label not in self.headerSet:
textWidthLeftMax = max( textWidthLeftMax, dc.GetTextExtent(label)[0] )
textWidthRightMax = max( textWidthRightMax, dc.GetTextExtent( '{}'.format(numFromLabel(label)) )[0] )
if textWidthLeftMax + textWidthRightMax > width:
self.horizontalSB.Show( False )
self.verticalSB.Show( False )
self.empty = True
return
maxLaps = max( len(d) for d in self.data )
if maxLaps and (width - textWidthLeftMax - textWidthRightMax) / maxLaps < minBarWidth:
self.horizontalSB.Show( True )
else:
self.horizontalSB.Show( False )
if self.horizontalSB.IsShown():
height -= self.scrollbarWidth
barHeight = int(float(height) / float(len(self.data) + 2))
barHeight = max( barHeight, minBarHeight )
barHeight = min( barHeight, maxBarHeight )
drawHeight = height - 2 * barHeight
if barHeight * len(self.data) > drawHeight:
self.verticalSB.Show( True )
self.verticalSB.SetPosition( (width - self.scrollbarWidth, barHeight) )
self.verticalSB.SetSize( (self.scrollbarWidth, drawHeight) )
pageSize = int(drawHeight / barHeight)
SetScrollbarParameters( self.verticalSB, pageSize-1, len(self.data)-1, pageSize )
else:
self.verticalSB.Show( False )
if self.verticalSB.IsShown():
width -= self.scrollbarWidth
iDataShowStart = self.verticalSB.GetThumbPosition() if self.verticalSB.IsShown() else 0
iDataShowEnd = iDataShowStart + self.verticalSB.GetThumbSize() + 1 if self.verticalSB.IsShown() else len(self.data)
tShowStart = self.horizontalSB.GetThumbPosition() if self.horizontalSB.IsShown() else 0
dc.SetFont( fontBarLabel )
textWidthLeftMax, textHeightMax = dc.GetTextExtent( '0000' )
textWidthRightMax = textWidthLeftMax
for label in self.labels:
if label not in self.headerSet:
textWidthLeftMax = max( textWidthLeftMax, dc.GetTextExtent(label)[0] )
textWidthRightMax = max( textWidthRightMax, dc.GetTextExtent( '{}'.format(numFromLabel(label)) )[0] )
if textWidthLeftMax + textWidthRightMax > width:
self.horizontalSB.Show( False )
self.verticalSB.Show( False )
self.empty = True
return
legendSep = 4 # Separations between legend entries and the Gantt bars.
labelsWidthLeft = textWidthLeftMax + legendSep
labelsWidthRight = textWidthRightMax + legendSep
'''
if labelsWidthLeft > width / 2:
labelsWidthLeft = 0
labelsWidthRight = 0
drawLabels = False
'''
xLeft = labelsWidthLeft
xRight = width - labelsWidthRight
yBottom = min( barHeight * (len(self.data) + 1), barHeight + drawHeight )
yTop = barHeight
if self.horizontalSB.IsShown():
viewWidth = minBarWidth * maxLaps
ratio = float(xRight - xLeft) / float(viewWidth)
sbMax = int(self.dataMax) + 1
pageSize = int(sbMax * ratio)
SetScrollbarParameters( self.horizontalSB, pageSize-1, sbMax, pageSize )
self.horizontalSB.SetPosition( (labelsWidthLeft, height) )
self.horizontalSB.SetSize( (xRight - xLeft, self.scrollbarWidth) )
fontLegend = wx.FontFromPixelSize( wx.Size(0,barHeight*.75), wx.FONTFAMILY_SWISS, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL )
fontNote = wx.FontFromPixelSize( wx.Size(0,barHeight*.8), wx.FONTFAMILY_SWISS, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL )
dc.SetFont( fontLegend )
textWidth, textHeight = dc.GetTextExtent( '00:00' if self.dataMax < 60*60 else '00:00:00' )
# Draw the horizontal labels.
# Find some reasonable tickmarks for the x axis.
numLabels = (xRight - xLeft) / (textWidth * 1.5)
tView = self.dataMax if not self.horizontalSB.IsShown() else self.horizontalSB.GetThumbSize()
d = tView / float(numLabels)
intervals = [1, 2, 5, 10, 15, 20, 30, 1*60, 2*60, 5*60, 10*60, 15*60, 20*60, 30*60, 1*60*60, 2*60*60, 4*60*60, 8*60*60, 12*60*60, 24*60*60]
d = intervals[bisect.bisect_left(intervals, d, 0, len(intervals)-1)]
if self.horizontalSB.IsShown():
tAdjust = self.horizontalSB.GetThumbPosition()
viewWidth = minBarWidth * maxLaps
dFactor = float(viewWidth) / float(self.dataMax)
else:
tAdjust = 0
dFactor = (xRight - xLeft) / float(self.dataMax)
dc.SetPen(wx.Pen(wx.BLACK, 1))
for t in xrange(0, int(self.dataMax), d):
x = xLeft + (t-tAdjust) * dFactor
if x < xLeft:
continue
if x > xRight:
break
if t < 60*60:
s = '%d:%02d' % ((t / 60), t%60)
else:
s = '%d:%02d:%02d' % (t/(60*60), (t / 60)%60, t%60)
w, h = dc.GetTextExtent(s)
xText = x - w/2
#xText = x
dc.DrawText( s, xText, 0 + 4 )
dc.DrawText( s, xText, yBottom + 4)
dc.DrawLine( x, yBottom+3, x, yTop-3 )
# Draw the Gantt chart.
dc.SetFont( fontBarLabel )
textWidth, textHeight = dc.GetTextExtent( '0000' )
penBar = wx.Pen( wx.Colour(128,128,128), 1 )
penBar.SetCap( wx.CAP_BUTT )
penBar.SetJoin( wx.JOIN_MITER )
dc.SetPen( penBar )
brushBar = wx.Brush( wx.BLACK )
transparentBrush = wx.Brush( wx.WHITE, style = wx.TRANSPARENT )
ctx = wx.GraphicsContext_Create(dc)
ctx.SetPen( wx.Pen(wx.BLACK, 1) )
xyInterp = []
xyNumTimeInfo = []
xyDuplicate = []
xFactor = dFactor
yLast = barHeight
yHighlight = None
tLeaderLast = None
dy = 0
for i, s in enumerate(self.data):
# Record the leader's last x position.
if tLeaderLast is None:
tLeaderLast = s[-1] if s else 0.0
if not( iDataShowStart <= i < iDataShowEnd ):
continue
try:
num = numFromLabel(self.labels[i])
except (TypeError, IndexError):
num = -1
yCur = yLast + barHeight
xLast = labelsWidthLeft
xCur = xLast
tTooShort = 15.0 # If a lap is shorter than 15 seconds, consider it a duplicate entry.
for j, t in enumerate(s):
if xLast >= xRight:
break
xCur = xOriginal = int(labelsWidthLeft + (t-tAdjust) * xFactor)
if xCur < labelsWidthLeft:
continue
if xCur > xRight:
xCur = xRight
if j == 0:
brushBar.SetColour( wx.WHITE )
dc.SetBrush( brushBar )
dc.DrawRectangle( xLast, yLast, xCur - xLast + 1, yCur - yLast + 1 )
else:
ctx.SetPen( wx.Pen(wx.WHITE, 1, style=wx.TRANSPARENT ) )
dy = yCur - yLast + 1
dd = int(dy * 0.3)
ic = j % len(self.colours)
b1 = ctx.CreateLinearGradientBrush(0, yLast, 0, yLast + dd + 1, self.colours[ic], self.lighterColours[ic])
ctx.SetBrush(b1)
ctx.DrawRectangle(xLast, yLast , xCur - xLast + 1, dd + 1)
b2 = ctx.CreateLinearGradientBrush(0, yLast + dd, 0, yLast + dy, self.lighterColours[ic], self.colours[ic])
ctx.SetBrush(b2)
ctx.DrawRectangle(xLast, yLast + dd, xCur - xLast + 1, dy-dd )
dc.SetBrush( transparentBrush )
dc.SetPen( penBar )
dc.DrawRectangle( xLast, yLast, xCur - xLast + 1, dy )
if self.lapNote:
note = self.lapNote.get( (num, j), None )
if note:
dc.SetFont( fontNote )
noteWidth, noteHeight = dc.GetTextExtent( note )
noteBorderWidth = int(dc.GetTextExtent( ' ' )[0] / 2)
noteBarWidth = xCur - xLast - noteBorderWidth * 2
if noteBarWidth <= 0:
noteBarWidth = xCur - xLast
noteBorderWidth = 0
note = '...'
noteWidth, noteHeight = dc.GetTextExtent( note )
elif noteWidth > noteBarWidth:
lenLeft, lenRight = 1, len(note)
while lenRight - lenLeft > 1:
lenMid = (lenRight + lenLeft) // 2
noteWidth, noteHeight = dc.GetTextExtent( note[:lenMid].strip() + '...' )
if noteWidth < noteBarWidth:
lenLeft = lenMid
else:
lenRight = lenMid
note = note[:lenLeft].strip() + '...'
noteWidth, noteHeight = dc.GetTextExtent( note )
dc.DrawText( note, xLast + noteBorderWidth, yLast + (dy - noteHeight) / 2 )
dc.SetFont( fontBarLabel )
if j == self.moveLap and self.moveIRider == i:
if hasPhoto(num, t):
# Draw a little camera icon.
cameraHeight = int(dy * 0.75)
cameraWidth = int(cameraHeight * 1.5)
dc.SetBrush( wx.BLACK_BRUSH )
dc.DrawRoundedRectangle( xCur - 2 - cameraWidth, yLast + (dy - cameraHeight) / 2, cameraWidth, cameraHeight, cameraHeight/5 )
dc.SetPen( wx.WHITE_PEN )
dc.SetBrush( transparentBrush )
dc.DrawCircle( xCur - 2 - cameraWidth / 2, yLast + dy / 2, cameraHeight * (0.6 / 2) )
if xOriginal <= xRight:
try:
if self.interp[i][j]:
xyInterp.append( (xOriginal, yLast) )
except (ValueError, IndexError):
pass
if self.numTimeInfo and self.numTimeInfo.getInfo(num, t) is not None:
xyNumTimeInfo.append( (xOriginal, yLast) )
if t - s[j-1] < tTooShort:
xyDuplicate.append( (xOriginal, yLast) )
xLast = xCur
# Draw the last empty bar.
xCur = int(labelsWidthLeft + self.dataMax * xFactor)
if xCur > xRight:
xCur = xRight
dc.SetPen( penBar )
brushBar.SetColour( wx.WHITE )
dc.SetBrush( brushBar )
dc.DrawRectangle( xLast, yLast, xCur - xLast + 1, yCur - yLast + 1 )
# Draw the label on both ends.
if self.greyOutSet and i in self.greyOutSet:
dc.SetPen( wx.TRANSPARENT_PEN )
dc.SetBrush( greyBrush )
dc.DrawRectangle( 0, yLast, textWidthLeftMax, yCur - yLast + 1 )
dc.SetBrush( backBrush )
if self.labels[i] in self.headerSet:
dc.DrawText( self.labels[i], labelsWidthLeft + 4, yLast ) # This is a Category Label.
else:
labelWidth = dc.GetTextExtent( self.labels[i] )[0]
dc.DrawText( self.labels[i], textWidthLeftMax - labelWidth, yLast )
if not self.minimizeLabels:
label = self.labels[i]
lastSpace = label.rfind( ' ' )
if lastSpace > 0:
label = label[lastSpace+1:]
dc.DrawText( label, width - labelsWidthRight + legendSep, yLast )
if u'{}'.format(self.numSelect) == u'{}'.format(numFromLabel(self.labels[i])):
yHighlight = yCur
yLast = yCur
if yHighlight is not None and len(self.data) > 1:
dc.SetPen( wx.Pen(wx.BLACK, 2) )
dc.SetBrush( wx.TRANSPARENT_BRUSH )
dc.DrawLine( 0, yHighlight, width, yHighlight )
yHighlight -= barHeight
dc.DrawLine( 0, yHighlight, width, yHighlight )
# Draw indicators for interpolated values.
radius = (dy/2) * 0.9
# Define a path for the interp indicator about the origin.
diamondPath = ctx.CreatePath()
diamondPath.MoveToPoint( 0, -radius )
diamondPath.AddLineToPoint( -radius, 0 )
diamondPath.AddLineToPoint( 0, radius )
diamondPath.AddLineToPoint( radius, 0 )
diamondPath.AddLineToPoint( 0, -radius )
def getStarPath( ctx, numPoints, radius, radiusInner ):
path = ctx.CreatePath()
angle = (math.pi * 2.0) / numPoints
angle2 = angle / 2.0
path.MoveToPoint( 0, -radius )
for p in xrange(numPoints):
a = p * angle + angle2 + math.pi / 2.0
path.AddLineToPoint( math.cos(a) * radiusInner, -math.sin(a) * radiusInner )
a = (p + 1) * angle + math.pi / 2.0
path.AddLineToPoint( math.cos(a) * radius, -math.sin(a) * radius )
path.AddLineToPoint( 0, -radius )
return path
starPath = getStarPath( ctx, 5, radius, radius / 2 )
# Draw the interp indicators.
ctx.SetPen( penBar )
ctx.SetBrush( ctx.CreateRadialGradientBrush( 0, - radius*0.50, 0, 0, radius + 1, wx.WHITE, self.yellowColour ) )
for xCur, yCur in xyInterp:
ctx.PushState()
ctx.Translate( xCur, yCur + dy/2.0 - (dy/2.0 - radius) / 4 )
ctx.DrawPath( diamondPath )
ctx.PopState()
# Draw the edit indictors.
ctx.SetPen( penBar )
ctx.SetBrush( ctx.CreateRadialGradientBrush( 0, - radius*0.50, 0, 0, radius + 1, wx.WHITE, self.orangeColour ) )
for xCur, yCur in xyNumTimeInfo:
ctx.PushState()
ctx.Translate( xCur, yCur + dy/2.0 - (dy/2.0 - radius) / 4 )
ctx.DrawPath( starPath )
ctx.PopState()
# Draw the duplicate indicators.
radius = int(radius * 1.5)
ctx.SetPen( wx.Pen(wx.RED, 3) )
ctx.SetBrush( wx.TRANSPARENT_BRUSH )
for xCur, yCur in xyDuplicate:
ctx.DrawEllipse( xCur - radius, yCur + dy/2.0 - radius, radius*2, radius*2 )
# Draw the now timeline.
timeLineTime = self.nowTime if self.nowTime and self.nowTime < self.dataMax else tLeaderLast
nowTimeStr = Utils.formatTime( timeLineTime )
labelWidth, labelHeight = dc.GetTextExtent( nowTimeStr )
x = int(labelsWidthLeft + (timeLineTime - tAdjust) * xFactor)
ntColour = '#339966'
dc.SetPen( wx.Pen(ntColour, 3) )
dc.DrawLine( x, barHeight - 4, x, yLast + 4 )
dc.SetPen( wx.Pen(wx.WHITE, 1) )
dc.DrawLine( x, barHeight - 4, x, yLast + 4 )
dc.SetBrush( wx.Brush(ntColour) )
dc.SetPen( wx.Pen(ntColour,1) )
rect = wx.Rect( x - labelWidth/2-2, 0, labelWidth+4, labelHeight )
dc.DrawRectangleRect( rect )
if not self.minimizeLabels:
rect.SetY( yLast+2 )
dc.DrawRectangleRect( rect )
dc.SetTextForeground( wx.WHITE )
dc.DrawText( nowTimeStr, x - labelWidth / 2, 0 )
if not self.minimizeLabels:
dc.DrawText( nowTimeStr, x - labelWidth / 2, yLast + 2 )
# Store the drawing scale parameters.
self.xFactor = xFactor
self.barHeight = barHeight
self.labelsWidthLeft = labelsWidthLeft
def OnEraseBackground(self, event):
# This is intentionally empty, because we are using the combination
# of wx.BufferedPaintDC + an empty OnEraseBackground event to
# reduce flicker
pass
if __name__ == '__main__':
def GetData():
data = []
interp = []
for i in xrange(40):
data.append( [t + i*10.0 for t in xrange(0, 60*60 * 3, 7*60)] )
if i % 5 == 1:
data[-1].insert( (i//3) + 1, data[-1][i//3] + 0.05 )
interp.append( [((t + i*10)%100)//10 for t in xrange(0, 60*60 * 3, 7*60)] )
return data, interp
app = wx.App(False)
mainWin = wx.Frame(None,title="GanttChartPanel", size=(600,400))
gantt = GanttChartPanel(mainWin)
random.seed( 10 )
t = 55*60
tVar = t * 0.15
data, interp = GetData()
gantt.SetData( data, ['{}'.format(i) for i in xrange(100, 100+len(data))], interp = interp )
mainWin.Show()
app.MainLoop()
| [
"edward.sitarski@gmail.com"
] | edward.sitarski@gmail.com |
f8d32401ff16e6d846bfc328c7542055956a6336 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/network/azure-mgmt-network/generated_samples/virtual_network_create_subnet_with_address_prefixes.py | b010b5096c523a684115794891bc8db241fd15c0 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 1,904 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.network import NetworkManagementClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-network
# USAGE
python virtual_network_create_subnet_with_address_prefixes.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = NetworkManagementClient(
credential=DefaultAzureCredential(),
subscription_id="subid",
)
response = client.virtual_networks.begin_create_or_update(
resource_group_name="rg1",
virtual_network_name="test-vnet",
parameters={
"location": "eastus",
"properties": {
"addressSpace": {"addressPrefixes": ["10.0.0.0/16"]},
"subnets": [{"name": "test-2", "properties": {"addressPrefixes": ["10.0.0.0/28", "10.0.1.0/28"]}}],
},
},
).result()
print(response)
# x-ms-original-file: specification/network/resource-manager/Microsoft.Network/stable/2023-04-01/examples/VirtualNetworkCreateSubnetWithAddressPrefixes.json
if __name__ == "__main__":
main()
| [
"noreply@github.com"
] | Azure.noreply@github.com |
466265e6d44645df2b813ff931b7d2359c095200 | 641347d14ddf44263a5e9c93ecf28263640b179e | /string_based_problems/longest_common_prefix/solution.py | 419c06399dcb511e59196881dcefd2ac8537ec2b | [] | no_license | Ranjit007ai/InterviewBit-String | 5ee97a13f8ab04d458ac148da800d18cfe5b8579 | c8f89caa6f57adf91920a066707ddffe814eea9e | refs/heads/main | 2023-03-29T00:18:08.914419 | 2021-03-27T14:31:49 | 2021-03-27T14:31:49 | 352,092,868 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,452 | py | # Given a array of strings ,we need to find the longest common prefix in them.
# this function take the input array as a parameter and return the longest common prefix .
def longest_common_prefix(input_array):
min_length = 999999999999999 # min_length to store the length of the string with minimum size among all other strings
min_index = -1 # min_index to store the index of the minimum length string
n = len(input_array) # n store the length of the input array
prefix = ''
# traversing the input_array
for i in range(0,n):
length = len(input_array[i]) # length store the length the ith string in the array
if length < min_length :
min_length = length
min_index = i
min_word = input_array[min_index] # min_word is the string with minimum length
for i in range(0,min_length): # traversing the min_word
cur_alphabet = min_word[i]
count = 0
for j in range(0,n): # traversing the array
if input_array[j][i] == cur_alphabet :
count += 1
else:
break
if count == n :
prefix += cur_alphabet
else:
break
return prefix
# test case
input_array = ['abcdef','abcdf']
answer = longest_common_prefix(input_array)
print(answer)
| [
"noreply@github.com"
] | Ranjit007ai.noreply@github.com |
ce0fcf78cceaca4c2e0ca7774665e851f2ca73e1 | df7b40e95718ac0f6071a0ba571b42efc81cf6de | /tests/test_models/test_heads/test_nl_head.py | 6f4bede5e7f377b68aecf731d23634a8a5a04e69 | [
"Apache-2.0"
] | permissive | shinianzhihou/ChangeDetection | 87fa2c498248e6124aeefb8f0ee8154bda36deee | 354e71234bef38b6e142b6ba02f23db958582844 | refs/heads/master | 2023-01-23T20:42:31.017006 | 2023-01-09T11:37:24 | 2023-01-09T11:37:24 | 218,001,748 | 162 | 29 | Apache-2.0 | 2022-11-03T04:11:00 | 2019-10-28T08:41:54 | Python | UTF-8 | Python | false | false | 446 | py | import torch
from mmseg.models.decode_heads import NLHead
from .utils import to_cuda
def test_nl_head():
head = NLHead(in_channels=32, channels=16, num_classes=19)
assert len(head.convs) == 2
assert hasattr(head, 'nl_block')
inputs = [torch.randn(1, 32, 45, 45)]
if torch.cuda.is_available():
head, inputs = to_cuda(head, inputs)
outputs = head(inputs)
assert outputs.shape == (1, head.num_classes, 45, 45)
| [
"1178396201@qq.com"
] | 1178396201@qq.com |
eb6e2c9044e35cef3bf2e9ee94d1ee64c533edb7 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /9sN5tvXZjYCsKb4Mx_16.py | d4bffa4586f541c0ce06952de52cc80ac2f622b8 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 64 | py |
def cube_diagonal(vol):
return round(vol**(1/3) * 3**.5, 2)
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
21d3d280c5d2176baa2730b07b6b4d3790cfc623 | 80bb9bb3c9c811a64e7916f5a76ac75acfde1548 | /Material para n2/N2 ESTUDO/Listas/somaLista.py | 419ed0413cb52dee63f283aa647178c87600cdd4 | [] | no_license | RafaelSanzio0/FACULDADE-PYTHON.2 | 83e75b7bbe42e78a1eeb03b8b80afda00d95bacf | ea4f306f1e7c068a24f03eab7231e41bb466d2a1 | refs/heads/master | 2020-03-27T06:55:58.326782 | 2018-11-24T04:28:47 | 2018-11-24T04:28:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 352 | py | '''Escreva uma função não recursiva que receba como parâmetro uma lista de inteiros e
retorne como resposta a soma de todos os elementos da lista'''
lista = [10,20,30]
def soma_elementos(lista):
soma = 0
for elemento in lista:
soma += int(elemento)
print("A soma dos elementos é", soma)
soma = soma_elementos(lista)
| [
"rafaelsanzio16@gmail.com"
] | rafaelsanzio16@gmail.com |
dc5f56487684026050bb1ee73a2dba2e41624b0a | e1c9db908a9cefe46e293c7dcb1b6008e2e46951 | /synthetic.py | bdf3bfbead77b26faf256228be5bed41acac58e2 | [
"MIT"
] | permissive | luckyyangrun/gfnn | dbcfeca1910f6333474c6756b076dcac8601a2f3 | 36667861caacba921469d43917d002896e832c3f | refs/heads/master | 2023-03-16T20:10:15.025568 | 2020-08-06T10:32:52 | 2020-08-06T10:32:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,535 | py | import time
import argparse
import numpy as np
import torch
import torch.nn.functional as F
import torch.optim as optim
from utils import sgc_precompute, set_seed, stack_feat, load_donuts
from models import get_model
from metrics import accuracy
import pickle as pkl
from args import get_syn_args
from time import perf_counter
from noise import zero_idx, gaussian
from train import train_regression, test_regression,\
train_gcn, test_gcn,\
train_kgcn, test_kgcn
# Arguments
args = get_syn_args()
# setting random seeds
set_seed(args.seed, args.cuda)
adj, features, labels, idx_train,\
idx_val, idx_test, mesh_pack = load_donuts(args.gen_num_samples,
args.gen_noise,
args.gen_factor,
args.gen_test_size,
args.gen_num_neigh,
args.normalization,
args.cuda,
args.invlap_alpha,
args.gen_mesh,
args.gen_mesh_step)
### NOISE TO FEATURES ONLY USE ZERO HERE
if args.noise != "None":
features = features.numpy()
if args.noise == "gaussian":
features = gaussian(features,
mean=args.gaussian_opt[0],
std=args.gaussian_opt[1])
if args.noise == "zero_test":
idx_test = idx_test.numpy()
features = zero_idx(features, idx_test)
idx_test = torch.LongTensor(idx_test)
if args.cuda:
idx_test = idx_test.cuda()
if args.noise != "None":
features = torch.FloatTensor(features).float()
if args.cuda:
features = features.cuda()
### END NOISE TO FEATURES
# Monkey patch for Stacked Logistic Regression
if args.model == "SLG":
nfeat = features.size(1) * args.degree
else:
nfeat = features.size(1)
model = get_model(model_opt=args.model,
nfeat=nfeat,
nclass=labels.max().item()+1,
nhid=args.hidden,
dropout=args.dropout,
cuda=args.cuda,
degree=args.degree)
if args.model == "SGC" or args.model == "gfnn":
features, precompute_time = sgc_precompute(features, adj, args.degree)
print("{:.4f}s".format(precompute_time))
model, acc_val, train_time = train_regression(model,
features[idx_train],
labels[idx_train],
features[idx_val],
labels[idx_val],
args.epochs,
args.weight_decay,
args.lr,
args.dropout)
acc_test = test_regression(model, features[idx_test], labels[idx_test])
print("Validation Accuracy: {:.4f} Test Accuracy: {:.4f}".format(acc_val,\
acc_test))
print("Pre-compute time: {:.4f}s, train time: {:.4f}s, total: {:.4f}s".format(precompute_time, train_time, precompute_time+train_time))
if args.model == "SLG":
features, precompute_time = stack_feat(features, adj, args.degree)
features = torch.FloatTensor(features).float()
if args.cuda:
features = features.cuda()
print("{:.4f}s".format(precompute_time))
model, acc_val, train_time = train_regression(model,
features[idx_train],
labels[idx_train],
features[idx_val],
labels[idx_val],
args.epochs,
args.weight_decay,
args.lr,
args.dropout)
acc_test = test_regression(model, features[idx_test], labels[idx_test])
print("Validation Accuracy: {:.4f} Test Accuracy: {:.4f}".format(acc_val,\
acc_test))
print("Pre-compute time: {:.4f}s, train time: {:.4f}s, total: {:.4f}s".format(precompute_time, train_time, precompute_time+train_time))
if args.model == "GCN":
model, acc_val, train_time = train_gcn(model,
adj,
features,
labels,
idx_train,
idx_val,
args.epochs,
args.weight_decay,
args.lr,
args.dropout)
acc_test = test_gcn(model, adj, features, labels, idx_test)
print("Validation Accuracy: {:.4f} Test Accuracy: {:.4f}".format(acc_val,\
acc_test))
precompute_time = 0
print("Pre-compute time: {:.4f}s, train time: {:.4f}s, total: {:.4f}s".format(precompute_time, train_time, precompute_time+train_time))
if args.model == "KGCN":
model, acc_val, train_time = train_kgcn(model,
adj,
features,
labels,
idx_train,
idx_val,
args.epochs,
args.weight_decay,
args.lr,
args.dropout)
acc_test = test_kgcn(model, adj, features, labels, idx_test)
precompute_time = 0
print("Validation Accuracy: {:.4f} Test Accuracy: {:.4f}".format(acc_val,\
acc_test))
print("Pre-compute time: {:.4f}s, train time: {:.4f}s, total: {:.4f}s".format(precompute_time, train_time, precompute_time+train_time))
| [
"hoangnt.titech@gmail.com"
] | hoangnt.titech@gmail.com |
f20790bffb57e8b31c4439e153e75454285669f6 | 9f78c2bfadd1e87d779a786e7cd0952b6fbc96f1 | /jobs/tasks/pay/index.py | 4ad10e9160a8d058533bfc8a0dc68c69ef51f8a4 | [] | no_license | Erick-LONG/order | 08393ed9b315cf2c6af5e2b9bfd6917605fe8d94 | 4b853403c9c949b3ecbe2766ec77750557cf11fc | refs/heads/master | 2022-11-11T09:32:53.570524 | 2020-06-30T09:20:18 | 2020-06-30T09:20:18 | 262,786,005 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 683 | py | import datetime
from common.models.pay.PayOrder import PayOrder
from application import app,db
from common.libs.pay.PayService import PayService
'''
python manager.py runjob -m pay/index
'''
class JobTask():
def __init__(self):
pass
def run(self):
now = datetime.datetime.now()
date_before_30min = now + datetime.timedelta(minutes=-30)
list = PayOrder.query.filter_by(status=-8)\
.filter(PayOrder.created_time <= date_before_30min.strftime('%Y-%m-%d %H:%M:%S')).all()
if not list:
return
pay_target = PayService()
for item in list:
pay_target.closeOrder(pay_order_id= item.id)
| [
"834424581@qq.com"
] | 834424581@qq.com |
cb8f947de7142b0c8636844086b34bb65bf2e752 | f0a4ba1f1f941092e68e4b1ef9cff0d3852199ef | /프로그래머스/레벨1/K번째수.py | 1b25a50c2ba4db15fe16d9ea88c5b10318662d36 | [] | no_license | lsb530/Algorithm-Python | d41ddd3ca7675f6a69d322a4646d75801f0022b2 | a48c6df50567c9943b5d7218f874a5c0a85fcc6d | refs/heads/master | 2023-06-18T04:36:09.221769 | 2021-06-28T16:49:35 | 2021-06-28T16:49:35 | 367,775,760 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,750 | py | """배열 array의 i번째 숫자부터 j번째 숫자까지 자르고 정렬했을 때, k번째에 있는 수를 구하려 합니다.
예를 들어 array가 [1, 5, 2, 6, 3, 7, 4], i = 2, j = 5, k = 3이라면
array의 2번째부터 5번째까지 자르면 [5, 2, 6, 3]입니다.
1에서 나온 배열을 정렬하면 [2, 3, 5, 6]입니다.
2에서 나온 배열의 3번째 숫자는 5입니다.
배열 array, [i, j, k]를 원소로 가진 2차원 배열 commands가 매개변수로 주어질 때, commands의 모든
원소에 대해 앞서 설명한 연산을 적용했을 때 나온 결과를 배열에 담아 return 하도록 solution 함수를 작성해주세요."""
array = [1, 5, 2, 6, 3, 7, 4]
commands = [[2, 5, 3], [4, 4, 1], [1, 7, 3]]
# 예상 답 : [5, 6, 3]
def solutioned(array, commands):
answer = []
div_arr = []
for i in range(len(commands)):
for j in range(len(commands[i])):
div_arr.append(commands[i][j])
print(f'index : {i}, {j} => {commands[i][j]}')
if (j % 2) == 0:
lst = array[div_arr[0] - 1:div_arr[1]]
print(lst)
lst.sort()
answer.append(lst[div_arr[2] - 1])
print(f'sorted:{lst}')
print(div_arr)
div_arr.clear()
print(answer)
return answer
# solutioned(array,commands)
def solution(array, commands):
answer = []
div_arr = []
for i in range(len(commands)):
for j in range(len(commands[i])):
div_arr.append(commands[i][j])
if (j % 2) == 0:
lst = array[div_arr[0] - 1:div_arr[1]]
lst.sort()
answer.append(lst[div_arr[2] - 1])
div_arr.clear()
return answer
solutioned(array, commands)
| [
"lsb530@naver.com"
] | lsb530@naver.com |
818880e7d059aa3fec801d8a9246894b9e8abd74 | 58f81a20e6a22d17af626d423c6e1a5b160f784c | /services/core-api/app/api/securities/namespace.py | 8793e106eea95aca7cc765859cc08cff227fb56b | [
"Apache-2.0"
] | permissive | cryptobuks1/mds | 5e115c641dfa2d1a91097d49de9eeba1890f2b34 | 6e3f7006aeb5a93f061717e90846b2b0d620d616 | refs/heads/master | 2022-04-23T21:11:37.124243 | 2020-04-14T17:55:39 | 2020-04-14T17:55:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,261 | py | from flask_restplus import Namespace
from app.api.securities.resources.bond import BondListResource, BondResource
from app.api.securities.resources.bond_status import BondStatusResource
from app.api.securities.resources.bond_type import BondTypeResource
from app.api.securities.resources.bond_document import BondDocumentListResource
from app.api.securities.resources.reclamation_invoice import ReclamationInvoiceListResource, ReclamationInvoiceResource
from app.api.securities.resources.reclamation_invoice_document import ReclamationInvoiceDocumentListResource
api = Namespace('securities', description='Securities operations')
# Bonds
api.add_resource(BondListResource, '/bonds')
api.add_resource(BondResource, '/bonds/<bond_guid>')
api.add_resource(BondStatusResource, '/bonds/status-codes')
api.add_resource(BondTypeResource, '/bonds/type-codes')
api.add_resource(BondDocumentListResource, '/<string:mine_guid>/bonds/documents')
# Reclamation Invoices
api.add_resource(ReclamationInvoiceListResource, '/reclamation-invoices')
api.add_resource(ReclamationInvoiceResource, '/reclamation-invoices/<reclamation_invoice_guid>')
api.add_resource(ReclamationInvoiceDocumentListResource,
'/<string:mine_guid>/reclamation-invoices/documents')
| [
"bcgov-csnr-cd@gov.bc.ca"
] | bcgov-csnr-cd@gov.bc.ca |
af88ad9615211bfd9385154819ff3b187fb7e9f7 | 86036dc1f245b64a8054bc8ff988a777cb6105e7 | /proxy/getproxy.py | c538f855ad7f5225fef51805c1579f982e1c17e1 | [
"MIT"
] | permissive | iofu728/spider | a13576c26d96de230697f46eed1710f70ba2c54b | 3b4565a40411888bb06c23461800089269d56b2c | refs/heads/master | 2021-12-28T04:38:51.511252 | 2020-06-06T06:22:12 | 2020-06-06T06:22:12 | 152,982,734 | 107 | 36 | MIT | 2020-06-06T06:22:13 | 2018-10-14T14:28:20 | Python | UTF-8 | Python | false | false | 27,729 | py | # -*- coding: utf-8 -*-
# @Author: gunjianpan
# @Date: 2018-10-18 23:10:19
# @Last Modified by: gunjianpan
# @Last Modified time: 2020-06-01 13:50:44
import argparse
import codecs
import functools
import http.cookiejar as cj
import os
import random
import re
import sys
import threading
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
import requests
from apscheduler.schedulers.blocking import BlockingScheduler
from bs4 import BeautifulSoup
sys.path.append(os.getcwd())
from util.db import Db
from util.util import (
basic_req,
begin_time,
can_retry,
changeHtmlTimeout,
changeJsonTimeout,
echo,
end_time,
read_file,
time_str,
get_accept,
get_content_type,
)
"""
* www.proxyserverlist24.top
* www.live-socks.net
* gatherproxy.com
* goubanjia.com
xicidaili.com
data5u.com
66ip.com
kuaidaili.com
.data/
├── gatherproxy // gather proxy list
└── passage // gather passage
"""
data_dir = "proxy/data/"
MAXN = 0x3FFFFFFF
type_map = {1: "https", 0: "http"}
class GetFreeProxy:
""" proxy pool """
def __init__(self):
self.Db = Db("proxy")
self.insert_sql = """INSERT INTO ip_proxy( `address`, `http_type`) VALUES %s """
self.select_list = (
"""SELECT address, http_type from ip_proxy WHERE `is_failured` = 0"""
)
self.select_sql = """SELECT `id`, address, `is_failured` from ip_proxy WHERE `address` in %s """
self.select_all = """SELECT `address`, `http_type` from ip_proxy WHERE `is_failured` != 5 and http_type in %s"""
self.random_select = """SELECT `address`, `http_type` FROM ip_proxy WHERE `is_failured` >= 5 and (`id` >= ((SELECT MAX(`id`) FROM ip_proxy)-(SELECT MIN(`id`) FROM ip_proxy)) * RAND() + (SELECT MIN(`id`) FROM ip_proxy)) and http_type in %s LIMIT 6000"""
self.replace_ip = """REPLACE INTO ip_proxy(`id`, `address`, `http_type`, `is_failured`) VALUES %s"""
self.can_use_ip = {}
self.waitjudge = []
self.cannot_use_ip = {}
self.failured_time = {}
self.canuse_proxies = []
self.init_proxy()
def proxy_req(
self,
url: str,
types: int,
data=None,
header=None,
test_func=None,
need_cookie: bool = False,
config: dict = {},
proxies: dict = {},
):
"""
use proxy to send requests, and record the proxy can't use
@types S0XY: X=0.->get; =1.->post;
Y=0.->html; =1.->json; =2.->basic
S=0.->basic ;=1.->ss
support failured retry && failured auto record
"""
httptype = url[4] == "s"
ss_type = types // 1000
types %= 1000
if ss_type:
proxylist = self.proxylists_ss if httptype else self.proxylist_ss
else:
proxylist = self.proxylists if httptype else self.proxylist
if proxies != {}:
proxies = proxies
elif not len(proxylist):
if self.Db.db:
echo(
"0|critical",
"Proxy pool empty!!! Please check the db conn & db dataset!!!",
)
proxies = {}
else:
index = random.randint(0, len(proxylist) - 1)
proxies_url = proxylist[index]
proxies = {type_map[httptype]: proxies_url}
try:
result = basic_req(
url,
types=types,
proxies=proxies,
data=data,
header=header,
need_cookie=need_cookie,
config=config,
)
if test_func is not None:
if not test_func(result):
if self.check_retry(url):
return self.proxy_req(
url,
types=types + 1000 * ss_type,
data=data,
header=header,
test_func=test_func,
need_cookie=need_cookie,
config=config,
proxies=proxies,
)
else:
self.failured_time[url] = 0
return
return result
return result
except:
self.cannot_use_ip[random.randint(0, MAXN)] = proxies_url
if proxies_url in proxylist:
proxylist.remove(proxylist.index(proxies_url))
if not len(self.cannot_use_ip.keys()) % 10:
self.clean_cannot_use()
if self.check_retry(url):
return self.proxy_req(
url,
types=types + 1000 * ss_type,
data=data,
test_func=test_func,
header=header,
need_cookie=need_cookie,
config=config,
proxies=proxies,
)
def check_retry(self, url: str) -> bool:
""" check try time """
if url not in self.failured_time:
self.failured_time[url] = 0
return True
elif self.failured_time[url] < 3:
self.failured_time[url] += 1
return True
else:
self.log_write(url)
self.failured_time[url] = 0
return False
def log_write(self, url: str):
""" failure log """
echo("0|warning", "url {} retry max time".format(url))
def insert_proxy(self, insert_list: list):
""" insert data to db """
results = self.Db.insert_db(self.insert_sql % str(insert_list)[1:-1])
if results:
echo("2|info", "Insert " + str(len(insert_list)) + " items Success!")
def update_proxy(self, update_list: list, types: int):
""" update data to db"""
results = self.Db.update_db(self.replace_ip % str(update_list)[1:-1])
typemap = {0: "can use ", 1: "can not use "}
if results:
echo(
"2|info",
"Update",
typemap[types],
str(len(update_list)),
" items Success!",
)
def select_proxy(self, target_list: list) -> list:
""" select ip proxy by ids """
if not len(target_list):
return []
elif len(target_list) == 1:
waitlist = "('" + target_list[0] + "')"
else:
waitlist = tuple(target_list)
return self.Db.select_db(self.select_sql % str(waitlist))
def db_can_use_proxy(self):
""" test db have or not this data """
results = self.select_proxy([ii[0] for ii in self.can_use_ip.values()])
ss_len = len([1 for ii in self.can_use_ip.values() if ii[1] > 1])
echo("2|info", "SS proxies", ss_len)
insert_list = []
update_list = []
ip_map = {}
if results != False:
for ip_info in results:
ip_map[ip_info[1]] = [ip_info[0], ip_info[2]]
for ip_now in self.can_use_ip.values():
http_type = ip_now[1]
ip_now = ip_now[0]
if ip_now in ip_map:
if ip_map[ip_now][1]:
update_list.append((ip_map[ip_now][0], ip_now, http_type, 0))
else:
insert_list.append((ip_now, http_type))
if len(insert_list):
self.insert_proxy(insert_list)
if len(update_list):
self.update_proxy(update_list, 0)
else:
pass
self.can_use_ip = {}
def clean_cannot_use(self):
""" update db proxy cannot use """
results = self.select_proxy(self.cannot_use_ip.values())
update_list = []
ip_map = {}
if results:
for ip_info in results:
ip_map[ip_info[1]] = [ip_info[0], ip_info[2]]
for ip_now in self.cannot_use_ip.values():
http_type = ip_now[4] == "s"
if ip_now in ip_map:
update_list.append(
(ip_map[ip_now][0], ip_now, http_type, ip_map[ip_now][1] + 1)
)
if len(update_list):
self.update_proxy(update_list, 1)
self.cannot_use_ip = {}
def init_proxy(self):
""" init proxy list """
results = self.Db.select_db(self.select_list)
self.proxylist = []
self.proxylists = []
self.proxylist_ss = []
self.proxylists_ss = []
if not results:
echo(
"0|error", "Please check db configure!!! The proxy pool cant use!!!>>>"
)
return
for index in results:
if index[1] == 1:
self.proxylists.append(index[0])
elif index[1] == 2:
self.proxylist.append(index[0])
self.proxylist_ss.append(index[0])
elif index[1] == 3:
self.proxylists.append(index[0])
self.proxylists_ss.append(index[0])
else:
self.proxylist.append(index[0])
echo("2|info", len(self.proxylist), " http proxy can use.")
echo("2|info", len(self.proxylists), " https proxy can use.")
echo("2|info", len(self.proxylist_ss), " ss http proxy can use.")
echo("2|info", len(self.proxylists_ss), " ss https proxy can use.")
def judge_url(self, urls: str, index: int, times: int, ss_test: bool = False):
"""
use /api/playlist to judge http; use /discover/playlist judge https
1. don't timeout = 5
2. response.result.tracks.size() != 1
"""
http_type = urls[4] == "s"
proxies = {type_map[http_type]: urls}
test_url = (
type_map[http_type] + "://music.163.com/api/playlist/detail?id=432853362"
)
ss_url = "https://www.google.com/?gws_rd=ssl"
try:
data = basic_req(test_url, 1, proxies)
result = data["result"]
tracks = result["tracks"]
if len(tracks) == 10:
if times < 0:
self.judge_url(urls, index, times + 1)
else:
echo("1|debug", urls, proxies, "Proxies can use.")
self.canuse_proxies.append(urls)
self.can_use_ip[index] = [urls, int(http_type)]
if ss_test:
data = basic_req(ss_url, 0)
if len(str(data)) > 5000:
self.can_use_ip[index] = [urls, int(http_type) + 2]
else:
echo("0|debug", urls, proxies, "Tracks len error ^--<^>--^ ")
self.cannot_use_ip[index] = urls
except:
echo("0|debug", urls, proxies, "return error [][][][][][]")
if not index in self.can_use_ip:
self.cannot_use_ip[index] = urls
def thread_judge(self, batch_size: int = 500):
""" threading to judge proxy """
changeJsonTimeout(2)
changeHtmlTimeout(3)
proxy_exec = ThreadPoolExecutor(max_workers=batch_size // 2)
text = self.waitjudge
num = len(text)
for block in range(num // batch_size + 1):
proxy_th = [
proxy_exec.submit(self.judge_url, jj, ii, 0)
for ii, jj in enumerate(
text[block * batch_size : batch_size * (block + 1)]
)
]
list(as_completed(proxy_th))
self.db_can_use_proxy()
self.clean_cannot_use()
self.waitjudge = []
def test_db(self, types: int):
""" test proxy in db can use """
version = begin_time()
typestr = ""
if types == 2:
typestr = "(0,1,2,3)"
elif types == 1:
typestr = "(1,3)"
else:
typestr = "(0,2)"
results = self.Db.select_db(self.select_all % typestr)
random_select = self.Db.select_db(self.random_select % typestr)
if not results:
results = []
if not random_select:
random_select = []
for index in results + random_select:
self.waitjudge.append(index[0])
self.thread_judge()
self.init_proxy()
end_time(version, 2)
def xici_proxy(self, page: int):
"""
xici proxy http://www.xicidaili.com/nn/{page}
The first proxy I use, but now it can not use it mostly.
"""
if not str(page).isdigit():
echo("0|warning", "Please input num!")
return []
version = begin_time()
url = "http://www.xicidaili.com/nn/%d"
for index in range(1, page + 1):
html = basic_req(url % index, 0)
tem = html.find_all("tr")
for index in range(1, len(tem)):
tds = tem[index].find_all("td")
ip = tds[5].text.lower()
self.waitjudge.append("{}://{}:{}".format(ip, tds[1].text, tds[2].text))
self.thread_judge()
end_time(version, 2)
def gatherproxy(self, types: int):
"""
:100: very nice website
first of all you should download proxy ip txt from:
http://www.gatherproxy.com/zh/proxylist/country/?c=China
"""
if not os.path.exists("{}gatherproxy".format(data_dir)):
echo("0|warning", "Gather file not exist!!!")
return
file_d = read_file("{}gatherproxy".format(data_dir))
waitjudge_http = ["http://" + ii for ii in file_d]
waitjudge_https = ["https://" + ii for ii in file_d]
if not types:
self.waitjudge += waitjudge_http
elif types == 1:
self.waitjudge += waitjudge_https
elif types == 2:
self.waitjudge += waitjudge_http + waitjudge_https
else:
self.waitjudge += file_d
echo("2|warning", "load gather over!")
def goubanjia(self):
"""
:-1: html tag mixed with invalid data
:100:And the most important thing is the port writed in 'class' rather in text.
The website is difficult to spider, but the proxys are very goog
goubanjia proxy http://www.goubanjia.com
"""
version = begin_time()
host = "http://www.goubanjia.com"
html = self.proxy_req(host, 0)
if not html:
return []
trs = html.find_all("tr", class_=["warning", "success"])
for tr in trs:
tds = tr.find_all("td")
ip = tds[2].find_all("a")[0].text + "://"
iplist = tds[0].find_all(["div", "span", not "p"], class_=not "port")
for index in iplist:
ip += index.text
encode = tds[0].find_all(["div", "span", "p"], class_="port")[0]["class"][1]
uncode = functools.reduce(
lambda x, y: x * 10 + (ord(y) - ord("A")), map(lambda x: x, encode), 0
)
self.waitjudge.append(ip + ":" + str(int(uncode / 8)))
self.thread_judge()
end_time(version, 2)
def schedulegou(self):
sched = BlockingScheduler()
sched.add_job(self.goubanjia, "interval", seconds=100)
sched.start()
def data5u(self):
"""
data5u proxy http://www.data5u.com/
no one can use
"""
version = begin_time()
url_list = ["", "free/gngn/index.shtml", "free/gwgn/index.shtml"]
host = "http://www.data5u.com/"
for uri in url_list:
html = self.proxy_req(host + uri, 0)
if not html:
continue
table = html.find_all("ul", class_="l2")
for index in table:
tds = index.find_all("li")
ip = tds[3].text
self.waitjudge.append("{}://{}:{}".format(ip, tds[1].text, tds[2].text))
self.thread_judge()
end_time(version, 2)
def sixsixip(self, area: int, page: int):
"""
66ip proxy http://www.66ip.cn/areaindex_{area}/{page}.html
"""
version = begin_time()
threadings = []
for index in range(1, area + 1):
for pageindex in range(1, page + 1):
echo("2|debug", "{} {}".format(index, pageindex))
work = threading.Thread(
target=self.sixsixthread, args=(index, pageindex)
)
threadings.append(work)
for work in threadings:
work.start()
for work in threadings:
work.join()
self.thread_judge()
end_time(version, 2)
def sixsixthread(self, index: int, pageindex: int):
host = """http://www.66ip.cn/areaindex_%d/%d.html"""
html = self.proxy_req(host % (index, pageindex), 0)
if not html:
return []
trs = html.find_all("table")[2].find_all("tr")
for test in range(1, len(trs) - 1):
tds = trs[test].find_all("td")
self.waitjudge.append("http://{}:{}".format(tds[0].text, tds[1].text))
self.waitjudge.append("https://{}:{}".format(tds[0].text, tds[1].text))
def kuaidaili(self, page: int):
"""
kuaidaili https://www.kuaidaili.com/free/
"""
version = begin_time()
threadings = []
for index in range(1, page + 1):
work = threading.Thread(target=self.kuaidailithread, args=(index,))
threadings.append(work)
for work in threadings:
work.start()
for work in threadings:
work.join()
self.thread_judge()
end_time(version, 2)
def kuaidailithread(self, index: int):
host = """https://www.kuaidaili.com/free/inha/%d/"""
html = self.proxy_req(host % index, 0)
if not html:
return []
trs = html.find_all("tr")
for index in range(1, len(trs)):
tds = trs[index].find_all("td")
ip = tds[3].text.lower() + "://" + tds[0].text + ":" + tds[1].text
self.waitjudge.append(ip)
def get_cookie(self):
"""
make cookie login
PS: Though cookie expired time is more than 1 year,
but It will be break when the connect close.
So you need reactive the cookie by this function.
"""
headers = {
"Cookie": "_lang=en-US; _ga=GA1.2.1084455496.1548351129; _gid=GA1.2.1515017701.1552361687; ASP.NET_SessionId=ckin3pzyqyoyt3zg54zrtrct; _gat=1; arp_scroll_position=57",
"Accept": get_accept("html") + ";q=0.9",
}
login_url = "http://www.gatherproxy.com/subscribe/login"
cookie_html = basic_req(login_url, 3, header=headers)
try:
verify_text = re.findall('<span class="blue">(.*?)</span>', cookie_html)[0]
except:
return
verify_list = verify_text.replace("= ", "").strip().split()
num_map = {
"Zero": 0,
"One": 1,
"Two": 2,
"Three": 3,
"Four": 4,
"Five": 5,
"Six": 6,
"Seven": 7,
"Eight": 8,
"Nine": 9,
"Ten": 10,
}
verify_num = [verify_list[0], verify_list[2]]
for index, num in enumerate(verify_num):
if num.isdigit():
verify_num[index] = int(num)
elif num in num_map:
verify_num[index] = num_map[num]
else:
echo("0|error", "Error", num)
# return False
verify_code = 0
error = True
operation = verify_list[1]
if (
operation == "+"
or operation == "plus"
or operation == "add"
or operation == "multiplied"
):
verify_code = verify_num[0] + verify_num[1]
error = False
if operation == "-" or operation == "minus":
verify_code = verify_num[0] - verify_num[1]
error = False
if operation == "X" or operation == "multiplication":
verify_code = verify_num[0] * verify_num[1]
error = False
if error:
echo("0|error", "Error", operation)
if not os.path.exists("%spassage" % data_dir):
echo("0|warning", "gather passage not exist!!!")
return
with codecs.open("%spassage" % data_dir, "r", encoding="utf-8") as f:
passage = [index[:-1] for index in f.readlines()]
data = {
"Username": passage[0],
"Password": passage[1],
"Captcha": str(verify_code),
}
time.sleep(2.163)
r = requests.session()
r.cookies = cj.LWPCookieJar()
login_req = r.post(login_url, headers=headers, data=data, verify=False)
def load_gather(self):
"""
load gather proxy pool text
If failured, you should reactive the cookie.
"""
headers = {
"Cookie": "_lang=en-US; _ga=GA1.2.1084455496.1548351129; _gid=GA1.2.1515017701.1552361687; ASP.NET_SessionId=ckin3pzyqyoyt3zg54zrtrct; _gat=1; arp_scroll_position=57",
"Accept": get_accept("html") + ";q=0.9",
}
url = "http://www.gatherproxy.com/subscribe/infos"
try:
sid_url_req = requests.get(url, headers=headers, verify=False, timeout=10)
except:
return
sid_url_html = BeautifulSoup(sid_url_req.text, "html.parser")
sid_url = sid_url_html.find_all("div", class_="wrapper")[1].find_all("a")[0][
"href"
]
if len(sid_url.split("sid=")) < 2:
echo("0|warning", "cookie error")
self.get_cookie()
self.load_gather()
return
sid = sid_url.split("sid=")[1]
sid_url = "http://www.gatherproxy.com" + sid_url
data = {"ID": sid, "C": "", "P": "", "T": "", "U": "0"}
gatherproxy = requests.post(sid_url, headers=headers, data=data, verify=False)
with codecs.open(data_dir + "gatherproxy", "w", encoding="utf-8") as f:
f.write(gatherproxy.text)
def load_proxies_list(self, types: int = 2):
""" load proxies """
SITES = ["http://www.proxyserverlist24.top/", "http://www.live-socks.net/"]
spider_pool = []
self.waitjudge = []
for site in SITES:
self.get_other_proxies(site)
self.gatherproxy(3)
waitjudge = list(set(self.waitjudge))
waitjudge_http = ["http://" + ii for ii in waitjudge]
waitjudge_https = ["https://" + ii for ii in waitjudge]
if not types:
self.waitjudge = waitjudge_http
elif types == 1:
self.waitjudge = waitjudge_https
else:
self.waitjudge = waitjudge_http + waitjudge_https
echo(
"1|info",
"-_-_-_-_-_-_-",
len(waitjudge),
"Proxies wait to judge -_-_-_-_-_-_-",
)
def request_text(self, url: str) -> str:
""" requests text """
req = basic_req(url, 2)
if req is None:
echo("0|debug", url)
if can_retry(url):
return self.request_text(url)
else:
return ""
echo("1|debug", url)
text = req.text
if type(text) == str:
return text
elif type(text) == bytes:
return text.decode()
else:
return ""
def get_free_proxy(self, url: str):
req = basic_req(url, 2)
if req is None:
return []
tt = req.text
t_list = re.findall("<tr><td>(\d*\.\d*\.\d*\.\d*)</td><td>(\d*?)</td>", tt)
echo(1, "Get Free proxy List", url, len(t_list))
return ["{}:{}".format(ii, jj) for ii, jj in t_list]
def get_proxy_free(self):
urls = [
"https://www.sslproxies.org",
"https://free-proxy-list.net",
"https://www.us-proxy.org",
"https://free-proxy-list.net/uk-proxy.html",
"https://free-proxy-list.net/anonymous-proxy.html",
"http://www.google-proxy.net",
]
t_list = []
for url in urls:
t_list.extend(self.get_free_proxy(url))
t_list.extend(self.get_api())
for ii in ["http", "https"]:
t_list.extend(self.get_download(ii))
t_list = list(set(t_list))
with open(data_dir + "gatherproxy", "w") as f:
f.write("\n".join(t_list))
def ip_decoder(self, data: str):
data = re.sub("\+", "\x20", data)
data = re.sub(
"%([a-fA-F0-9][a-fA-F0-9])",
lambda i: chr(int("0x" + i.group()[1:], 16)),
data,
)
return re.findall(">(.*?)</a", data)
def get_api(self):
API_KEY = "xxx"
url = "http://api.scraperapi.com/?api_key={}&url=http://httpbin.org/ip".format(
API_KEY
)
t_list = []
for ii in range(38):
tt = basic_req(url, 1)
if tt is None:
continue
t_list.append(tt["origin"])
echo(1, "Get scraperapi", len(t_list))
return t_list
def get_download(self, types: str):
url = "https://www.proxy-list.download/api/v0/get?l=en&t=" + types
tt = basic_req(url, 1)
if tt is None:
return []
tt_list = tt[0]["LISTA"]
echo(1, "Get download", types, len(tt_list))
return ["{}:{}".format(ii["IP"], ii["PORT"]) for ii in tt_list]
def get_other_proxies(self, url: str):
""" get other proxies """
pages = re.findall(
r"<h3[\s\S]*?<a.*?(http.*?\.html).*?</a>", self.request_text(url)
)
if not len(pages):
echo("0|warning", "Please do not frequently request {}!!!".format(url))
else:
proxies = [
re.findall(
r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5}", self.request_text(ii)
)
for ii in pages
]
self.waitjudge = [*self.waitjudge, *sum(proxies, [])]
def load_proxies_test(self):
""" load mode & test proxies """
version = begin_time()
self.load_proxies_list()
proxies_len = len(self.waitjudge)
self.thread_judge()
canuse_len = len(self.canuse_proxies)
echo(
"1|info",
"\nTotal Proxies num: {}\nCan use num: {}\nTime spend: {}\n".format(
proxies_len, canuse_len, end_time(version)
),
)
with open("{}canuse_proxies.txt".format(data_dir), "w") as f:
f.write("\n".join(self.canuse_proxies))
if __name__ == "__main__":
if not os.path.exists(data_dir):
os.makedirs(data_dir)
parser = argparse.ArgumentParser(description="gunjianpan proxy pool code")
parser.add_argument(
"--model", type=int, default=0, metavar="model", help="model 0/1"
)
parser.add_argument(
"--is_service",
type=bool,
default=False,
metavar="service",
help="True or False",
)
parser.add_argument(
"--test_time", type=int, default=1, metavar="test_time", help="test_time"
)
model = parser.parse_args().model
a = GetFreeProxy()
if model == 1:
a.get_proxy_free()
elif model == 0:
a.load_proxies_test()
a.test_db(2)
else:
a.test_db(2)
| [
"iofu728@163.com"
] | iofu728@163.com |
b071005e1bfea85be1a41e5bb422cd485b68feca | fd48fba90bb227017ac2da9786d59f9b9130aaf0 | /digsby/src/gui/native/mac/macfonts.py | 4366c8b020a76f6279dfea111cce5545dc170c0d | [
"Python-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | niterain/digsby | bb05b959c66b957237be68cd8576e3a7c0f7c693 | 16a62c7df1018a49eaa8151c0f8b881c7e252949 | refs/heads/master | 2021-01-18T10:07:10.244382 | 2013-11-03T02:48:25 | 2013-11-03T02:48:25 | 5,991,568 | 1 | 0 | null | 2013-11-03T02:48:26 | 2012-09-28T02:24:50 | Python | UTF-8 | Python | false | false | 871 | py | '''
some Mac code from the internet
// Path to resource files in Mac bundle
wxString m_resourceDir;
// container of locally activated font
ATSFontContainerRef m_fontContainer;
FSSpec spec;
wxMacFilename2FSSpec(m_resourceDir + _T("Bank Gothic Light.ttf"),
&spec);
OSStatus status = ATSFontActivateFromFileSpecification(&spec,
kATSFontContextLocal, kATSFontFormatUnspecified, NULL,
kATSOptionFlagsDefault, &m_fontContainer);
wxASSERT_MSG(status == noErr, _T("font activation failed"));
and then anywhere in the app this works fine:
wxFont(9, wxFONTFAMILY_DEFAULT, wxFONTSTYLE_NORMAL |
wxFONTFLAG_ANTIALIASED, wxFONTWEIGHT_NORMAL, false, _T("Bank Gothic
Light"));
'''
import gui.native
def loadfont(fontpath, private = True, enumerable = False):
gui.native.notImplemented()
return False
def unloadfont(fontpath):
gui.native.notImplemented()
return False | [
"mdougherty@tagged.com"
] | mdougherty@tagged.com |
dd440b62bf6470ab5d7c635c8125d93e37dabbb2 | 9746def392fc2a97c0bbd0ec1eb84ccdd45cf14f | /src/fcu_boto/route53/record.py | 4c7ae8abe51b17eb0a15e609c838ffe866201d9f | [
"MIT"
] | permissive | bopopescu/fcu-boto | ec04a9e9f3f224b3f34656297015a8f214f0fc9f | 9e4245ec6fbbadf8c1fcb958bbf08804e38568a6 | refs/heads/master | 2022-11-22T09:55:34.877878 | 2017-05-05T13:38:51 | 2017-05-05T13:38:51 | 282,385,186 | 0 | 0 | null | 2020-07-25T06:24:50 | 2020-07-25T06:24:49 | null | UTF-8 | Python | false | false | 14,701 | py | # Copyright (c) 2010 Chris Moyer http://coredumped.org/
# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
RECORD_TYPES = ['A', 'AAAA', 'TXT', 'CNAME', 'MX', 'PTR', 'SRV', 'SPF']
from fcu_boto.resultset import ResultSet
class ResourceRecordSets(ResultSet):
"""
A list of resource records.
:ivar hosted_zone_id: The ID of the hosted zone.
:ivar comment: A comment that will be stored with the change.
:ivar changes: A list of changes.
"""
ChangeResourceRecordSetsBody = """<?xml version="1.0" encoding="UTF-8"?>
<ChangeResourceRecordSetsRequest xmlns="https://route53.amazonaws.com/doc/2013-04-01/">
<ChangeBatch>
<Comment>%(comment)s</Comment>
<Changes>%(changes)s</Changes>
</ChangeBatch>
</ChangeResourceRecordSetsRequest>"""
ChangeXML = """<Change>
<Action>%(action)s</Action>
%(record)s
</Change>"""
def __init__(self, connection=None, hosted_zone_id=None, comment=None):
self.connection = connection
self.hosted_zone_id = hosted_zone_id
self.comment = comment
self.changes = []
self.next_record_name = None
self.next_record_type = None
self.next_record_identifier = None
super(ResourceRecordSets, self).__init__([('ResourceRecordSet', Record)])
def __repr__(self):
if self.changes:
record_list = ','.join([c.__repr__() for c in self.changes])
else:
record_list = ','.join([record.__repr__() for record in self])
return '<ResourceRecordSets:%s [%s]' % (self.hosted_zone_id,
record_list)
def add_change(self, action, name, type, ttl=600,
alias_hosted_zone_id=None, alias_dns_name=None, identifier=None,
weight=None, region=None, alias_evaluate_target_health=None,
health_check=None, failover=None):
"""
Add a change request to the set.
:type action: str
:param action: The action to perform ('CREATE'|'DELETE'|'UPSERT')
:type name: str
:param name: The name of the domain you want to perform the action on.
:type type: str
:param type: The DNS record type. Valid values are:
* A
* AAAA
* CNAME
* MX
* NS
* PTR
* SOA
* SPF
* SRV
* TXT
:type ttl: int
:param ttl: The resource record cache time to live (TTL), in seconds.
:type alias_hosted_zone_id: str
:param alias_dns_name: *Alias resource record sets only* The value
of the hosted zone ID, CanonicalHostedZoneNameId, for
the LoadBalancer.
:type alias_dns_name: str
:param alias_hosted_zone_id: *Alias resource record sets only*
Information about the domain to which you are redirecting traffic.
:type identifier: str
:param identifier: *Weighted and latency-based resource record sets
only* An identifier that differentiates among multiple resource
record sets that have the same combination of DNS name and type.
:type weight: int
:param weight: *Weighted resource record sets only* Among resource
record sets that have the same combination of DNS name and type,
a value that determines what portion of traffic for the current
resource record set is routed to the associated location
:type region: str
:param region: *Latency-based resource record sets only* Among resource
record sets that have the same combination of DNS name and type,
a value that determines which region this should be associated with
for the latency-based routing
:type alias_evaluate_target_health: bool
:param alias_evaluate_target_health: *Required for alias resource record
sets* Indicates whether this Resource Record Set should respect the
health status of any health checks associated with the ALIAS target
record which it is linked to.
:type health_check: str
:param health_check: Health check to associate with this record
:type failover: str
:param failover: *Failover resource record sets only* Whether this is the
primary or secondary resource record set.
"""
change = Record(name, type, ttl,
alias_hosted_zone_id=alias_hosted_zone_id,
alias_dns_name=alias_dns_name, identifier=identifier,
weight=weight, region=region,
alias_evaluate_target_health=alias_evaluate_target_health,
health_check=health_check, failover=failover)
self.changes.append([action, change])
return change
def add_change_record(self, action, change):
"""Add an existing record to a change set with the specified action"""
self.changes.append([action, change])
return
def to_xml(self):
"""Convert this ResourceRecordSet into XML
to be saved via the ChangeResourceRecordSetsRequest"""
changesXML = ""
for change in self.changes:
changeParams = {"action": change[0], "record": change[1].to_xml()}
changesXML += self.ChangeXML % changeParams
params = {"comment": self.comment, "changes": changesXML}
return self.ChangeResourceRecordSetsBody % params
def commit(self):
"""Commit this change"""
if not self.connection:
import fcu_boto
self.connection = fcu_boto.connect_route53()
return self.connection.change_rrsets(self.hosted_zone_id, self.to_xml())
def endElement(self, name, value, connection):
"""Overwritten to also add the NextRecordName,
NextRecordType and NextRecordIdentifier to the base object"""
if name == 'NextRecordName':
self.next_record_name = value
elif name == 'NextRecordType':
self.next_record_type = value
elif name == 'NextRecordIdentifier':
self.next_record_identifier = value
else:
return super(ResourceRecordSets, self).endElement(name, value, connection)
def __iter__(self):
"""Override the next function to support paging"""
results = super(ResourceRecordSets, self).__iter__()
truncated = self.is_truncated
while results:
for obj in results:
yield obj
if self.is_truncated:
self.is_truncated = False
results = self.connection.get_all_rrsets(self.hosted_zone_id, name=self.next_record_name,
type=self.next_record_type,
identifier=self.next_record_identifier)
else:
results = None
self.is_truncated = truncated
class Record(object):
"""An individual ResourceRecordSet"""
HealthCheckBody = """<HealthCheckId>%s</HealthCheckId>"""
XMLBody = """<ResourceRecordSet>
<Name>%(name)s</Name>
<Type>%(type)s</Type>
%(weight)s
%(body)s
%(health_check)s
</ResourceRecordSet>"""
WRRBody = """
<SetIdentifier>%(identifier)s</SetIdentifier>
<Weight>%(weight)s</Weight>
"""
RRRBody = """
<SetIdentifier>%(identifier)s</SetIdentifier>
<Region>%(region)s</Region>
"""
FailoverBody = """
<SetIdentifier>%(identifier)s</SetIdentifier>
<Failover>%(failover)s</Failover>
"""
ResourceRecordsBody = """
<TTL>%(ttl)s</TTL>
<ResourceRecords>
%(records)s
</ResourceRecords>"""
ResourceRecordBody = """<ResourceRecord>
<Value>%s</Value>
</ResourceRecord>"""
AliasBody = """<AliasTarget>
<HostedZoneId>%(hosted_zone_id)s</HostedZoneId>
<DNSName>%(dns_name)s</DNSName>
%(eval_target_health)s
</AliasTarget>"""
EvaluateTargetHealth = """<EvaluateTargetHealth>%s</EvaluateTargetHealth>"""
def __init__(self, name=None, type=None, ttl=600, resource_records=None,
alias_hosted_zone_id=None, alias_dns_name=None, identifier=None,
weight=None, region=None, alias_evaluate_target_health=None,
health_check=None, failover=None):
self.name = name
self.type = type
self.ttl = ttl
if resource_records is None:
resource_records = []
self.resource_records = resource_records
self.alias_hosted_zone_id = alias_hosted_zone_id
self.alias_dns_name = alias_dns_name
self.identifier = identifier
self.weight = weight
self.region = region
self.alias_evaluate_target_health = alias_evaluate_target_health
self.health_check = health_check
self.failover = failover
def __repr__(self):
return '<Record:%s:%s:%s>' % (self.name, self.type, self.to_print())
def add_value(self, value):
"""Add a resource record value"""
self.resource_records.append(value)
def set_alias(self, alias_hosted_zone_id, alias_dns_name,
alias_evaluate_target_health=False):
"""Make this an alias resource record set"""
self.alias_hosted_zone_id = alias_hosted_zone_id
self.alias_dns_name = alias_dns_name
self.alias_evaluate_target_health = alias_evaluate_target_health
def to_xml(self):
"""Spit this resource record set out as XML"""
if self.alias_hosted_zone_id is not None and self.alias_dns_name is not None:
# Use alias
if self.alias_evaluate_target_health is not None:
eval_target_health = self.EvaluateTargetHealth % ('true' if self.alias_evaluate_target_health else 'false')
else:
eval_target_health = ""
body = self.AliasBody % {"hosted_zone_id": self.alias_hosted_zone_id,
"dns_name": self.alias_dns_name,
"eval_target_health": eval_target_health}
else:
# Use resource record(s)
records = ""
for r in self.resource_records:
records += self.ResourceRecordBody % r
body = self.ResourceRecordsBody % {
"ttl": self.ttl,
"records": records,
}
weight = ""
if self.identifier is not None and self.weight is not None:
weight = self.WRRBody % {"identifier": self.identifier,
"weight": self.weight}
elif self.identifier is not None and self.region is not None:
weight = self.RRRBody % {"identifier": self.identifier,
"region": self.region}
elif self.identifier is not None and self.failover is not None:
weight = self.FailoverBody % {"identifier": self.identifier,
"failover": self.failover}
health_check = ""
if self.health_check is not None:
health_check = self.HealthCheckBody % (self.health_check)
params = {
"name": self.name,
"type": self.type,
"weight": weight,
"body": body,
"health_check": health_check
}
return self.XMLBody % params
def to_print(self):
rr = ""
if self.alias_hosted_zone_id is not None and self.alias_dns_name is not None:
# Show alias
rr = 'ALIAS ' + self.alias_hosted_zone_id + ' ' + self.alias_dns_name
if self.alias_evaluate_target_health is not None:
rr += ' (EvalTarget %s)' % self.alias_evaluate_target_health
else:
# Show resource record(s)
rr = ",".join(self.resource_records)
if self.identifier is not None and self.weight is not None:
rr += ' (WRR id=%s, w=%s)' % (self.identifier, self.weight)
elif self.identifier is not None and self.region is not None:
rr += ' (LBR id=%s, region=%s)' % (self.identifier, self.region)
elif self.identifier is not None and self.failover is not None:
rr += ' (FAILOVER id=%s, failover=%s)' % (self.identifier, self.failover)
return rr
def endElement(self, name, value, connection):
if name == 'Name':
self.name = value
elif name == 'Type':
self.type = value
elif name == 'TTL':
self.ttl = value
elif name == 'Value':
self.resource_records.append(value)
elif name == 'HostedZoneId':
self.alias_hosted_zone_id = value
elif name == 'DNSName':
self.alias_dns_name = value
elif name == 'SetIdentifier':
self.identifier = value
elif name == 'EvaluateTargetHealth':
self.alias_evaluate_target_health = value.lower() == 'true'
elif name == 'Weight':
self.weight = value
elif name == 'Region':
self.region = value
elif name == 'Failover':
self.failover = value
elif name == 'HealthCheckId':
self.health_check = value
def startElement(self, name, attrs, connection):
return None
| [
"cyril.gratecos.ext@outscale.com"
] | cyril.gratecos.ext@outscale.com |
a69a510fba4f2708d9033225e05685db3b0c696d | 3f6c16ea158a8fb4318b8f069156f1c8d5cff576 | /.PyCharm2019.1/system/python_stubs/-417879439/_imp.py | 9b5c6a7460b88191080182531faaf5de3ec605ae | [] | no_license | sarthak-patidar/dotfiles | 08494170d2c0fedc0bbe719cc7c60263ce6fd095 | b62cd46f3491fd3f50c704f0255730af682d1f80 | refs/heads/master | 2020-06-28T23:42:17.236273 | 2019-10-01T13:56:27 | 2019-10-01T13:56:27 | 200,369,900 | 0 | 0 | null | 2019-08-03T12:56:33 | 2019-08-03T11:53:29 | Shell | UTF-8 | Python | false | false | 5,612 | py | # encoding: utf-8
# module _imp
# from (built-in)
# by generator 1.147
""" (Extremely) low-level import machinery bits as used by importlib and imp. """
# no imports
# functions
def acquire_lock(*args, **kwargs): # real signature unknown
"""
Acquires the interpreter's import lock for the current thread.
This lock should be used by import hooks to ensure thread-safety when importing
modules. On platforms without threads, this function does nothing.
"""
pass
def create_builtin(*args, **kwargs): # real signature unknown
""" Create an extension module. """
pass
def create_dynamic(*args, **kwargs): # real signature unknown
""" Create an extension module. """
pass
def exec_builtin(*args, **kwargs): # real signature unknown
""" Initialize a built-in module. """
pass
def exec_dynamic(*args, **kwargs): # real signature unknown
""" Initialize an extension module. """
pass
def extension_suffixes(*args, **kwargs): # real signature unknown
""" Returns the list of file suffixes used to identify extension modules. """
pass
def get_frozen_object(*args, **kwargs): # real signature unknown
""" Create a code object for a frozen module. """
pass
def init_frozen(*args, **kwargs): # real signature unknown
""" Initializes a frozen module. """
pass
def is_builtin(*args, **kwargs): # real signature unknown
""" Returns True if the module name corresponds to a built-in module. """
pass
def is_frozen(*args, **kwargs): # real signature unknown
""" Returns True if the module name corresponds to a frozen module. """
pass
def is_frozen_package(*args, **kwargs): # real signature unknown
""" Returns True if the module name is of a frozen package. """
pass
def lock_held(*args, **kwargs): # real signature unknown
"""
Return True if the import lock is currently held, else False.
On platforms without threads, return False.
"""
pass
def release_lock(*args, **kwargs): # real signature unknown
"""
Release the interpreter's import lock.
On platforms without threads, this function does nothing.
"""
pass
def _fix_co_filename(*args, **kwargs): # real signature unknown
"""
Changes code.co_filename to specify the passed-in file path.
code
Code object to change.
path
File path to use.
"""
pass
# classes
class __loader__(object):
"""
Meta path import for built-in modules.
All methods are either class or static methods to avoid the need to
instantiate the class.
"""
@classmethod
def create_module(cls, *args, **kwargs): # real signature unknown
""" Create a built-in module """
pass
@classmethod
def exec_module(cls, *args, **kwargs): # real signature unknown
""" Exec a built-in module """
pass
@classmethod
def find_module(cls, *args, **kwargs): # real signature unknown
"""
Find the built-in module.
If 'path' is ever specified then the search is considered a failure.
This method is deprecated. Use find_spec() instead.
"""
pass
@classmethod
def find_spec(cls, *args, **kwargs): # real signature unknown
pass
@classmethod
def get_code(cls, *args, **kwargs): # real signature unknown
""" Return None as built-in modules do not have code objects. """
pass
@classmethod
def get_source(cls, *args, **kwargs): # real signature unknown
""" Return None as built-in modules do not have source code. """
pass
@classmethod
def is_package(cls, *args, **kwargs): # real signature unknown
""" Return False as built-in modules are never packages. """
pass
@classmethod
def load_module(cls, *args, **kwargs): # real signature unknown
"""
Load the specified module into sys.modules and return it.
This method is deprecated. Use loader.exec_module instead.
"""
pass
def module_repr(module): # reliably restored by inspect
"""
Return repr for the module.
The method is deprecated. The import machinery does the job itself.
"""
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
__dict__ = None # (!) real value is "mappingproxy({'__module__': '_frozen_importlib', '__doc__': 'Meta path import for built-in modules.\\n\\n All methods are either class or static methods to avoid the need to\\n instantiate the class.\\n\\n ', 'module_repr': <staticmethod object at 0x7fd1b260b048>, 'find_spec': <classmethod object at 0x7fd1b260b080>, 'find_module': <classmethod object at 0x7fd1b260b0b8>, 'create_module': <classmethod object at 0x7fd1b260b0f0>, 'exec_module': <classmethod object at 0x7fd1b260b128>, 'get_code': <classmethod object at 0x7fd1b260b198>, 'get_source': <classmethod object at 0x7fd1b260b208>, 'is_package': <classmethod object at 0x7fd1b260b278>, 'load_module': <classmethod object at 0x7fd1b260b2b0>, '__dict__': <attribute '__dict__' of 'BuiltinImporter' objects>, '__weakref__': <attribute '__weakref__' of 'BuiltinImporter' objects>})"
# variables with complex values
__spec__ = None # (!) real value is "ModuleSpec(name='_imp', loader=<class '_frozen_importlib.BuiltinImporter'>)"
| [
"sarthakpatidar15@gmail.com"
] | sarthakpatidar15@gmail.com |
a92353589502f2b857fbde712c4b2740871f7138 | e68c3cbb9d6291fcdd51adae8a55616dcfafe55c | /spf/mr/lambda_/visitor/get_all_predicates.py | 43cd13146bb18cb49f553127deab68a8e4eee648 | [] | no_license | Oneplus/pyspf | 26126f5094065960d5f034fea2be4709aa1a4c50 | 175f90b4f837aa60fd660cba850d10a82dd578a1 | refs/heads/master | 2016-08-12T15:18:25.606712 | 2015-11-22T02:49:07 | 2015-11-22T02:49:07 | 45,725,414 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,329 | py | #!/usr/bin/env python
from spf.mr.language.type_.complex_type import ComplexType
from spf.mr.lambda_.logic_language_services import LogicLanguageServices
from spf.mr.lambda_.visitor.api import LogicalExpressionVisitorI
class GetAllPredicates(LogicalExpressionVisitorI):
def __init__(self):
self.predicates = set()
@staticmethod
def of(expr):
visitor = GetAllPredicates()
visitor.visit(expr)
return visitor.predicates
def visit_lambda(self, lambda_):
lambda_.get_argument().accept(self)
lambda_.get_body().accept(self)
def visit_literal(self, literal):
literal.get_predicate().accept(self)
for arg in literal.get_arguments():
arg.accept(self)
def visit_logical_constant(self, logical_constant):
if (isinstance(logical_constant.get_type(), ComplexType) and
not LogicLanguageServices.is_coordination_predicate(logical_constant) and
not LogicLanguageServices.is_array_index_predicate(logical_constant) and
not LogicLanguageServices.is_array_sub_predicate(logical_constant)):
self.predicates.add(logical_constant)
def visit_logical_expression(self, logical_expr):
logical_expr.accept(self)
def visit_variable(self, variable):
return
| [
"oneplus.lau@gmail.com"
] | oneplus.lau@gmail.com |
98979e69783d8cbbee9c7bc30f837780e05f0867 | e88dbf121b5b9669daca50c536df74b159968e10 | /Client/Tests/FunctionalTestSuite/lfc_del_dir.py | 97798116138274351d1dfbfb086063476526ff91 | [] | no_license | dmwm/DLSAPI | 82fef7fd322bd8478299be87ae785ab4d1fa1274 | a8618da6aec68df001cc7bd05ac64b8916c34e71 | refs/heads/master | 2021-01-20T11:31:48.878894 | 2013-10-30T12:08:00 | 2013-10-30T12:08:00 | 13,985,003 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,801 | py | #!/usr/bin/env python
# lfc_del_dir.py
# 11/05
# Author: Antonio Delgado Peris. CERN, LCG.
"""
This module provides functions and a command line tool to delete directories
in an LFC catalog, including the contained LFNs and associated replicas, if
specified.
These methods do not check if a physical file is associated with a replica name,
and does not try to delete those physical files either. It just deletes the
directories, LFNs and SFNs in the catalog.
This module invokes the lfc_del_lfn for the LFNs and SFNs deletions.
"""
import lfc
import lfc_del_lfn
import sys
import commands
import getopt
import time
######################## FUNCTIONS ########################
def usage():
"""
Provides usage information
"""
print "Usage: lfc-del-dir [-v] [-r] [-l] [-x] <LFN>"
print " lfc-del-dir [-v] [-r] [-l] [-x] -f <listfile>"
print " lfc-del-dir -u"
print " lfc-del-dir -h"
def options():
"""
Provides some information regarding the available options
"""
print """Options summary:
-h, --help
-u, --usage
-v, --verbose
-r, --recursive
-l, --remove-ext-links
-x, --remove-ext-lfns
-f, --from-file
"""
def help():
"""
Provides some help information
"""
print """Removes the specified directory from the LFC name server.
If the "-r" option is specified, then the directory will be removed even if it
is not empty and all the entries in the directory (including subdirectories) will
be removed as well, in a recursive way. Otherwise, the deletion will be performed
only if the directory is empty.
ATTENTION: This script does not check if a physical file is associated with a replica
name, and does not try to delete those physical files either. It just deletes the
directory, the LFNs and the SFNs in the catalog.
The default action is to remove only the specified directory contents and not to
touch external sym links pointing to LFNs in the directory, nor entries pointed
to by links located in the directory.
If "-l" is specified, then links located in other directories that point to LFNs in
the specified directory are also removed.
If "-x" is specified, then LFNs (and their replicas and all their sym links)
located in other directories and pointed to by links in the specified directory are
also removed.
The "-f" option can be used to specify directory names in a file rather than in the
arguments. The file must contain one dir per line (and nothing else in each line).
In this case, the "-r", "-l" and "-x" option have the same meaning as before and
affect all directories included in <listfile>.
If "-u" is specified, usage information is displayed.
If "-h" is specified, help information is displayed.
"""
options()
usage()
def deleteOneDir(pDir, pForce, pExtLinks, pExtLfns, pVerbose):
"""
Tries to delete the specified directory. If "pForce" is true, then the
directory will be removed even if it is not empty and all the entries in
the directory (including subdirectories) will be removed as well, in a
recursive way. Otherwise, the deletion will be performed only if the
directory is empty.
If pExtLinks == pExtLfns == False, the function will remove only the
specified directory contents and will not to touch external sym links
pointing to LFNs in the directory, nor entries pointed to by links located
in the directory.
If pExtLinks == True, then links located in other directories that point
to LFNs in the specified directory are also removed.
If pExtLfns == True, then LFNs (and their replicas and all their sym links)
located in other directories and pointed to by links in the specified
directory are also removed.
"""
dir = pDir
force = pForce
extLinks = pExtLinks
extLfns = pExtLfns
verbose = pVerbose
rc = 0
err = 0
subdirlist = [] # Subdirectories to remove when we are done with current one
[dir, parentdir] = lfc_del_lfn.checkHomeDir(dir)
fulldir = parentdir+dir
if(verbose):
print "--Deleting Dir: "+dir
err = lfc.lfc_chdir(fulldir)
if(err < 0):
sys.stderr.write("Error changing dir to: "+fulldir+" : "+lfc.sstrerror(lfc.cvar.serrno)+"\n")
return err
dir_p=lfc.lfc_DIR()
dir_entry=lfc.lfc_direnstatg()
dir_p=lfc.lfc_opendirg(".", "")
if(dir_p < 0):
sys.stderr.write("Error opening specified dir: "+dir+": "+lfc.sstrerror(lfc.cvar.serrno)+"\n")
return -1
# Read first entry
lfc.lfc_rewinddir(dir_p)
dir_entry=lfc.lfc_readdirg(dir_p)
if(force):
# Remove all LFNs in a loop
S_IFDIR = 0x4000
S_IFLNK = 0xA000
while(dir_entry):
lfn = dir_entry.d_name
if(dir_entry.filemode & S_IFDIR):
# This entry is a directory
subdirlist.append(lfn)
else:
if((dir_entry.filemode & S_IFLNK) == S_IFLNK):
# This entry is a sym link
if(extLfns):
# Remove the replicas and all the links (including main LFN)
err = lfc_del_lfn.deleteOneEntry(lfn, fulldir, False, True, verbose)
else:
# Remove only the sym link (no replicas)
err = lfc_del_lfn.deleteOneLFN(lfn, verbose)
else:
# This entry is a main LFN
# # First check if the file has been alredy deleted (links, etc...)
# fstat = lfc.lfc_filestatg()
# if(lfc.lfc_statg(lfn, "", fstat)<0):
# if(verbose):
# print "--Warning. Skipping deletion of non-accessible file:",lfn,":",\
# lfc.sstrerror(lfc.cvar.serrno)
# else:
if(extLinks):
# Remove the replicas and all the links that point to this LFN
err = lfc_del_lfn.deleteOneEntry(lfn, fulldir, False, True, verbose)
else:
# Remove only this LFN and replicas (but no external sym links)
err = lfc_del_lfn.deleteOneEntry(lfn, fulldir, False, False, verbose)
if(err): rc = err
dir_entry=lfc.lfc_readdirg(dir_p)
else:
if(dir_entry):
sys.stderr.write("Error: Directory "+dir+" not empty! Consider using -r.\n")
return -1
# Close the directory
if(lfc.lfc_closedir(dir_p) < 0):
sys.stderr.write("Error closing dir: "+dir+" : "+lfc.sstrerror(lfc.cvar.serrno)+"\n")
# Remove all subdirectories in the list
for subdir in subdirlist:
err = deleteOneDir(fulldir+'/'+subdir, force, extLinks, extLfns, verbose)
if(err): rc=err
# Finally, remove also the top directory itself
err = lfc.lfc_chdir("..")
if(err < 0):
sys.stderr.write("Error changing dir to \"..\" : "+lfc.sstrerror(lfc.cvar.serrno)+"\n")
return err
if(verbose):
print "--lfc.lfc_unlink(\""+dir+"\")"
err = lfc.lfc_rmdir(dir)
if(err<0):
sys.stderr.write("Error removing dir: "+dir+": "+lfc.sstrerror(lfc.cvar.serrno)+"\n")
return err
# Return the error code
# return rc
return err
def deleteDirs(pDirList, pForce, pExtLinks, pExtLfns, pVerbose):
"""
Tries to delete all the directories specified in the list by calling
deleteOneDir. Please check the help information of that function.
"""
dirList = pDirList
force = pForce
extLinks = pExtLinks
extLfns = pExtLfns
verbose = pVerbose
rc = 0
for dir in dirList:
dir = dir.strip()
err = deleteOneDir(dir, force, extLinks, extLfns, verbose)
if(err): rc=err
# Return the error code
return rc
###################### MAIN FUNCTION ########################
def main(pArgs):
"""
Performes the main task of the script (invoked directly).
For information on its functionality, please call the help function.
"""
# Options and args...
longoptions=["help", "usage", "verbose", "--recursive",\
"--remove-ext-links", "--remove-ext-links", "from-file"]
try:
optlist, args = getopt.getopt(pArgs, 'huvrlxf:', longoptions)
except getopt.GetoptError, inst:
sys.stderr.write("Bad usage: "+str(inst)+'\n')
usage()
sys.exit(-1)
force = False
verbose = False
extLinks = False
extLfns = False
fromFile = False
fname=""
for opt, val in optlist:
if opt in ("-h", "--help"):
help()
return -1
elif opt in ("-u", "--usage"):
usage()
return -1
elif opt in ("-v", "--verbose"):
verbose = True
elif opt in ("-r", "--recursive"):
force = True
elif opt in ("-l", "--remove-ext-links"):
extLinks = True
elif opt in ("-x", "--remove-ext-lfns"):
extLfns = True
elif opt in ("-f","--from-file"):
fromFile = True
fname = val
# Build the list of directories to remove
# From file
if(fromFile):
try:
file=open(fname, 'r')
except IOError, inst:
msg="The file "+fname+" could not be opened: "+str(inst)+"\n"
sys.stderr.write(msg)
return -1
dirList=file.readlines()
# From command line options
else:
if(len(args)<1):
print "Not enough input arguments"
usage()
return(-1)
dirList=[args[0]]
# Do the removal (under session)
lfc.lfc_startsess("", "")
err = deleteDirs(dirList, force, extLinks, extLfns, verbose)
lfc.lfc_endsess()
# Finally, if no error exited before, exit succesfully
return err
######################### SCRIPT ###########################
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
| [
"giulio.eulisse@cern.ch"
] | giulio.eulisse@cern.ch |
6698430d0534a3155967eb8132b19896306de410 | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/bob/1db4d26d8e394cf98ebb2776a7ba6fe5.py | c62d39d883d3124df740dd392844ba4d555c4865 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 427 | py | class Bob:
def hey(self, msg):
if self.is_silence(msg): return "Fine. Be that way!"
elif self.is_shout(msg): return "Woah, chill out!"
elif self.is_question(msg): return "Sure."
else: return "Whatever."
def is_silence(self, msg): return msg is None or msg.strip() == ''
def is_shout(self, msg): return msg.isupper()
def is_question(self, msg): return msg.endswith("?")
| [
"rrc@berkeley.edu"
] | rrc@berkeley.edu |
86e8830dc41912e66dbfe47098a7850059068a9d | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/sieve-big-3715.py | b6d89870565f6b0f4c141fe60397888c2390703c | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,746 | py | # A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
$Index = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
| [
"647530+Virtlink@users.noreply.github.com"
] | 647530+Virtlink@users.noreply.github.com |
b24b55082c3a32467c6afbcd5ff6a5a3d8a35825 | abad82a1f487c5ff2fb6a84059a665aa178275cb | /Codewars/8kyu/sum-arrays/Python/test.py | faa9af95abd70d991ed0505af0f19b5b5af928ae | [
"MIT"
] | permissive | RevansChen/online-judge | 8ae55f136739a54f9c9640a967ec931425379507 | ad1b07fee7bd3c49418becccda904e17505f3018 | refs/heads/master | 2021-01-19T23:02:58.273081 | 2019-07-05T09:42:40 | 2019-07-05T09:42:40 | 88,911,035 | 9 | 0 | null | null | null | null | UTF-8 | Python | false | false | 280 | py | # Python - 3.6.0
test.describe('Testing sum_array')
test.assert_equals(sum_array([]), 0)
test.assert_equals(sum_array([1, 2, 3]), 6)
test.assert_equals(sum_array([1.1, 2.2, 3.3]), 6.6)
test.assert_equals(sum_array([4, 5, 6]), 15)
test.assert_equals(sum_array(range(101)), 5050)
| [
"d79523@hotmail.com"
] | d79523@hotmail.com |
1f0a5fb8f0d658bce1db0b8ddd311f764e35e0b3 | d6eca1b4b056beb41ac494db7399e1f146099c97 | /chapter7/tickets.py | ffd2252421041380e52db53708e975b7c02002bf | [] | no_license | liangsongyou/python-crash-course-code | 15090b48d77de1115bfaaaa6e5638a9bb9b3c7cc | f369e18030f2aafe358dd0fab1e479ca7bf4ceb8 | refs/heads/master | 2021-05-08T06:42:29.147923 | 2017-08-11T06:41:30 | 2017-08-11T06:41:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 291 | py |
while True:
age = input('Enter your age: ')
age = int(age)
if age <= 3:
print('Your ticked is free.')
elif age > 3 and age <= 12:
print('Your ticket costs $10.')
elif age > 12 and age < 100:
print('Your ticket costs $15.')
else:
break | [
"ramzanm461@gmail.com"
] | ramzanm461@gmail.com |
92256c004ae0664949bd41cfb353f346ebcd4d51 | 8dc84558f0058d90dfc4955e905dab1b22d12c08 | /third_party/blink/tools/blinkpy/tool/commands/analyze_baselines_unittest.py | e4f87d47ea4bb316fe05d5395299a3a5e1ec8af5 | [
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | meniossin/src | 42a95cc6c4a9c71d43d62bc4311224ca1fd61e03 | 44f73f7e76119e5ab415d4593ac66485e65d700a | refs/heads/master | 2022-12-16T20:17:03.747113 | 2020-09-03T10:43:12 | 2020-09-03T10:43:12 | 263,710,168 | 1 | 0 | BSD-3-Clause | 2020-05-13T18:20:09 | 2020-05-13T18:20:08 | null | UTF-8 | Python | false | false | 1,865 | py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
from blinkpy.common.checkout.baseline_optimizer import BaselineOptimizer
from blinkpy.tool.commands.analyze_baselines import AnalyzeBaselines
from blinkpy.tool.commands.rebaseline_unittest import BaseTestCase
class _FakeOptimizer(BaselineOptimizer):
def read_results_by_directory(self, baseline_name):
if baseline_name.endswith('txt'):
return {'LayoutTests/passes/text.html': '123456'}
return {}
class TestAnalyzeBaselines(BaseTestCase):
command_constructor = AnalyzeBaselines
def setUp(self):
super(TestAnalyzeBaselines, self).setUp()
self.port = self.tool.port_factory.get('test')
self.tool.port_factory.get = (lambda port_name=None, options=None: self.port)
self.lines = []
self.command._optimizer_class = _FakeOptimizer
self.command._write = (lambda msg: self.lines.append(msg))
def test_default(self):
self.command.execute(optparse.Values(dict(suffixes='txt', missing=False, platform=None)), ['passes/text.html'], self.tool)
self.assertEqual(self.lines,
['passes/text-expected.txt:',
' (generic): 123456'])
def test_missing_baselines(self):
self.command.execute(
optparse.Values(
dict(
suffixes='png,txt',
missing=True,
platform=None)),
['passes/text.html'],
self.tool)
self.assertEqual(self.lines,
['passes/text-expected.png: (no baselines found)',
'passes/text-expected.txt:',
' (generic): 123456'])
| [
"arnaud@geometry.ee"
] | arnaud@geometry.ee |
608b5e977d1b0a6a8f79dfba90f9ab04016f5ca6 | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /lightsail_write_3/disk_attach.py | b938c3125f8d75bda179ba8d7dde6dc7059c53e4 | [] | no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,459 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_three_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/lightsail/attach-disk.html
if __name__ == '__main__':
"""
create-disk : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/lightsail/create-disk.html
delete-disk : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/lightsail/delete-disk.html
detach-disk : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/lightsail/detach-disk.html
get-disk : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/lightsail/get-disk.html
get-disks : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/lightsail/get-disks.html
"""
parameter_display_string = """
# disk-name : The unique Lightsail disk name (e.g., my-disk ).
# instance-name : The name of the Lightsail instance where you want to utilize the storage disk.
# disk-path : The disk path to expose to the instance (e.g., /dev/xvdf ).
"""
add_option_dict = {}
add_option_dict["parameter_display_string"] = parameter_display_string
# ex: add_option_dict["no_value_parameter_list"] = "--single-parameter"
write_three_parameter("lightsail", "attach-disk", "disk-name", "instance-name", "disk-path", add_option_dict)
| [
"hcseo77@gmail.com"
] | hcseo77@gmail.com |
5dadcf6769ca9cfb7e4763f9392c7c09c08719c0 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_betrayals.py | 05743bfbaf3cff77441e9c0efd52c66000980796 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py |
#calss header
class _BETRAYALS():
def __init__(self,):
self.name = "BETRAYALS"
self.definitions = betrayal
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['betrayal']
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
c3c01681e8c4b575ef58c93015e09cc5d33ba439 | c618bbf2719431999b1007461df0865bab60c883 | /docs/examples/use_cases/tensorflow/efficientdet/pipeline/dali/ops_util.py | 95646d590d6ac4f04b21f56c818feb8efd9f31f9 | [
"Apache-2.0"
] | permissive | NVIDIA/DALI | 3d0d061135d19e092647e6522046b2ff23d4ef03 | 92ebbe5c20e460050abd985acb590e6c27199517 | refs/heads/main | 2023-09-04T01:53:59.033608 | 2023-09-01T13:45:03 | 2023-09-01T13:45:03 | 135,768,037 | 4,851 | 648 | Apache-2.0 | 2023-09-12T18:00:22 | 2018-06-01T22:18:01 | C++ | UTF-8 | Python | false | false | 5,776 | py | # Copyright 2021 Kacper Kluk, Paweł Anikiel, Jagoda Kamińska. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import math
import nvidia.dali as dali
def input_tfrecord(
tfrecord_files, tfrecord_idxs, device, shard_id, num_shards, random_shuffle=True
):
inputs = dali.fn.readers.tfrecord(
path=tfrecord_files,
index_path=tfrecord_idxs,
features={
"image/encoded": dali.tfrecord.FixedLenFeature(
(), dali.tfrecord.string, ""
),
"image/height": dali.tfrecord.FixedLenFeature((), dali.tfrecord.int64, -1),
"image/width": dali.tfrecord.FixedLenFeature((), dali.tfrecord.int64, -1),
"image/object/bbox/xmin": dali.tfrecord.VarLenFeature(
dali.tfrecord.float32, 0.0
),
"image/object/bbox/xmax": dali.tfrecord.VarLenFeature(
dali.tfrecord.float32, 0.0
),
"image/object/bbox/ymin": dali.tfrecord.VarLenFeature(
dali.tfrecord.float32, 0.0
),
"image/object/bbox/ymax": dali.tfrecord.VarLenFeature(
dali.tfrecord.float32, 0.0
),
"image/object/class/label": dali.tfrecord.VarLenFeature(
dali.tfrecord.int64, 0
),
},
shard_id=shard_id,
num_shards=num_shards,
random_shuffle=random_shuffle,
)
images = dali.fn.decoders.image(
inputs["image/encoded"],
device="mixed" if device == "gpu" else "cpu",
output_type=dali.types.RGB,
)
xmin = inputs["image/object/bbox/xmin"]
xmax = inputs["image/object/bbox/xmax"]
ymin = inputs["image/object/bbox/ymin"]
ymax = inputs["image/object/bbox/ymax"]
bboxes = dali.fn.transpose(dali.fn.stack(xmin, ymin, xmax, ymax), perm=[1, 0])
classes = dali.fn.cast(inputs["image/object/class/label"], dtype=dali.types.INT32)
return (
images,
bboxes,
classes,
dali.fn.cast(inputs["image/width"], dtype=dali.types.FLOAT),
dali.fn.cast(inputs["image/height"], dtype=dali.types.FLOAT),
)
def input_coco(
images_path, annotations_path, device, shard_id, num_shards, random_shuffle=True
):
encoded, bboxes, classes = dali.fn.readers.coco(
file_root=images_path,
annotations_file=annotations_path,
ratio=True,
ltrb=True,
shard_id=shard_id,
num_shards=num_shards,
random_shuffle=random_shuffle,
)
images = dali.fn.decoders.image(
encoded,
device="mixed" if device == "gpu" else "cpu",
output_type=dali.types.RGB,
)
shape = dali.fn.peek_image_shape(encoded, dtype=dali.types.FLOAT)
heights = shape[0]
widths = shape[1]
return (
images,
bboxes,
classes,
widths,
heights,
)
def normalize_flip(images, bboxes, p=0.5):
flip = dali.fn.random.coin_flip(probability=p)
images = dali.fn.crop_mirror_normalize(
images,
mirror=flip,
mean=[0.485 * 255, 0.456 * 255, 0.406 * 255],
std=[0.229 * 255, 0.224 * 255, 0.225 * 255],
output_layout=dali.types.NHWC
)
bboxes = dali.fn.bb_flip(bboxes, horizontal=flip, ltrb=True)
return images, bboxes
def gridmask(images, widths, heights):
p = dali.fn.random.coin_flip()
ratio = 0.4 * p
angle = dali.fn.random.normal(mean=-1, stddev=1) * 10.0 * (math.pi / 180.0)
l = dali.math.min(0.5 * heights, 0.3 * widths)
r = dali.math.max(0.5 * heights, 0.3 * widths)
tile = dali.fn.cast(
(dali.fn.random.uniform(range=[0.0, 1.0]) * (r - l) + l),
dtype=dali.types.INT32,
)
gridmask = dali.fn.grid_mask(
images, ratio=ratio, angle=angle, tile=tile
)
return images
def random_crop_resize(
images, bboxes, classes, widths, heights, output_size, scaling=[0.1, 2.0]
):
if scaling is None:
scale_factor = 1.0
else:
scale_factor = dali.fn.random.uniform(range=scaling)
sizes = dali.fn.stack(heights, widths)
image_scale = dali.math.min(
scale_factor * output_size[0] / widths,
scale_factor * output_size[1] / heights,
)
scaled_sizes = dali.math.floor(sizes * image_scale + 0.5)
images = dali.fn.resize(
images,
size=scaled_sizes
)
anchors, shapes, bboxes, classes = dali.fn.random_bbox_crop(
bboxes,
classes,
crop_shape=output_size,
input_shape=dali.fn.cast(scaled_sizes, dtype=dali.types.INT32),
bbox_layout="xyXY",
allow_no_crop=False,
total_num_attempts=64,
)
images = dali.fn.slice(
images,
anchors,
shapes,
normalized_anchor=False,
normalized_shape=False,
out_of_bounds_policy="pad"
)
return (
images,
bboxes,
classes,
)
def bbox_to_effdet_format(bboxes, image_size):
w = image_size[0]
h = image_size[1]
M = [0.0, h, 0.0, 0.0,
w, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, h,
0.0, 0.0, w, 0.0]
return dali.fn.coord_transform(bboxes, M=M)
| [
"noreply@github.com"
] | NVIDIA.noreply@github.com |
88cab9cb1c0d4c437de927380f3ad17b376e84de | 9e8d98c48035d4ee61fa930c324c822a61e5ae55 | /_examples/chebyshevinputs.py | fd844e75495d4edbc26e2c045064fd1e94220c7e | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | GRSEB9S/mystic | 59ac0c284a19f7b685a98420cd49d21bb10ff0cd | 748e0030c8d7d8b005f2eafa17a4581c2b3ddb47 | refs/heads/master | 2021-08-14T07:11:04.439139 | 2017-11-14T23:49:22 | 2017-11-14T23:49:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,165 | py | #!/usr/bin/env python
#
# Author: Alta Fang (altafang @caltech and alta @princeton)
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Copyright (c) 1997-2016 California Institute of Technology.
# Copyright (c) 2016-2017 The Uncertainty Quantification Foundation.
# License: 3-clause BSD. The full license text is available at:
# - https://github.com/uqfoundation/mystic/blob/master/LICENSE
"""
chebyshevinputs.py -- cost function container module for NelderMeadSimplexSolver
and PowellDirectionalSolver for testsolvers_pyre.py
"""
from mystic.models.poly import chebyshev8cost as cost
from mystic.models.poly import chebyshev8coeffs
from mystic.termination import *
ND = 9
maxiter = 999
from numpy import inf
import random
from mystic.tools import random_seed
random_seed(123)
x0 = [random.uniform(-5,5) + chebyshev8coeffs[i] for i in range(ND)]
# used with SetStrictRanges
min_bounds = [ 0,-1,-300,-1, 0,-1,-100,-inf,-inf]
max_bounds = [200, 1, 0, 1,200, 1, 0, inf, inf]
termination = CandidateRelativeTolerance()
#termination = VTR()
#termination = ChangeOverGeneration()
#termination = NormalizedChangeOverGeneration()
# End of file
| [
"mmckerns@968178ea-60bd-409e-af13-df8a517b6005"
] | mmckerns@968178ea-60bd-409e-af13-df8a517b6005 |
05778ada3455877fbc552bfc4121758fc5656931 | 31fb7c74b94e46a325e6b05501c6972a401cf423 | /PYTHON/BASIC_PYTHON/수업내용/02/02-004.py | 817847885b9201127341bbd8b38a25796a623130 | [] | no_license | superf2t/TIL | f2dacc30d6b89f3717c0190ac449730ef341f6a4 | cadaaf952c44474bed9b8af71e70754f3dbf86fa | refs/heads/master | 2022-04-10T13:55:24.019310 | 2019-12-12T11:15:31 | 2019-12-12T11:15:31 | 268,215,746 | 1 | 0 | null | 2020-05-31T05:32:46 | 2020-05-31T05:32:46 | null | UTF-8 | Python | false | false | 132 | py | #02-004.py
print('yyyy', 'mm', 'dd')
print('2022', '02', '20', sep='-', end='')
print(' -- BIG EVENT DAY --')
| [
"noreply@github.com"
] | superf2t.noreply@github.com |
e0875b356ad0a43764964da670ce52fd0c555baf | 632eee486e432d1bc2a7c771db7e9a06f7cad7a9 | /1003-soma-simples.py | 5db3660b37e947402ed9bff53646d073e66e5ad0 | [] | no_license | L4res/URI-Python | d1c578d87201151540876a6b8eca2aecd833a953 | 2f59387ca38e16f6396a6ea677d71f7c2c919fc2 | refs/heads/master | 2023-03-25T17:34:57.635187 | 2020-06-15T18:20:03 | 2020-06-15T18:20:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 58 | py | a = int(input())
b = int(input())
print("SOMA =", a+b)
| [
"noreply@github.com"
] | L4res.noreply@github.com |
8802f191cac1c6ef42a71b05affacdd0c7c9eebf | cc231776124e9b596e4d9557ec09f9275d15eb20 | /example/DjangoApp/wsgi.py | 3b1bae77e146866ec61b9f61a19834583fb350b3 | [
"MIT"
] | permissive | grengojbo/docker-django | 2c742bc9f46939b6614b2f49a0981b848c250851 | 8e1f4b3ce622bc1fd3a9127f4f3519ace7aeee5b | refs/heads/master | 2016-09-15T18:20:41.607328 | 2014-09-12T12:51:37 | 2014-09-12T12:51:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,604 | py | """
WSGI config for fiber project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import SocketServer
from wsgiref import handlers
SocketServer.BaseServer.handle_error = lambda *args, **kwargs: None
handlers.BaseHandler.log_exception = lambda *args, **kwargs: None
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "fiber.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "DjangoApp.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| [
"oleg.dolya@gmail.com"
] | oleg.dolya@gmail.com |
9451e8ae129916ff18db2c412765cd8c447a2097 | f3c891e43aa828ad0653e6c6bf918f46b78ad7b4 | /setup.py | 5e6d4a4c8d2416841ef9bb3af7755c0fd3db1d10 | [] | no_license | ojustino/pywwt-web | 6a655c1b30c43c2c44ab425a4b815345831e1642 | db70c20da496c52f400cdae0f301d7b5940f530a | refs/heads/master | 2021-06-16T16:08:50.804918 | 2017-11-27T16:40:18 | 2017-11-27T16:40:18 | 112,221,435 | 0 | 0 | null | 2017-11-27T16:38:31 | 2017-11-27T16:38:30 | null | UTF-8 | Python | false | false | 5,854 | py | from __future__ import print_function
from setuptools import setup, find_packages, Command
from setuptools.command.sdist import sdist
from setuptools.command.build_py import build_py
from setuptools.command.egg_info import egg_info
from subprocess import check_call
import os
import sys
import platform
here = os.path.dirname(os.path.abspath(__file__))
node_root = os.path.join(here, 'js')
is_repo = os.path.exists(os.path.join(here, '.git'))
npm_path = os.pathsep.join([
os.path.join(node_root, 'node_modules', '.bin'),
os.environ.get('PATH', os.defpath),
])
from distutils import log
log.set_verbosity(log.DEBUG)
log.info('setup.py entered')
log.info('$PATH=%s' % os.environ['PATH'])
LONG_DESCRIPTION = 'WorldWideTelescope Jupyter widget'
def js_prerelease(command, strict=False):
"""decorator for building minified js/css prior to another command"""
class DecoratedCommand(command):
def run(self):
jsdeps = self.distribution.get_command_obj('jsdeps')
if not is_repo and all(os.path.exists(t) for t in jsdeps.targets):
# sdist, nothing to do
command.run(self)
return
try:
self.distribution.run_command('jsdeps')
except Exception as e:
missing = [t for t in jsdeps.targets if not os.path.exists(t)]
if strict or missing:
log.warn('rebuilding js and css failed')
if missing:
log.error('missing files: %s' % missing)
raise e
else:
log.warn('rebuilding js and css failed (not a problem)')
log.warn(str(e))
command.run(self)
update_package_data(self.distribution)
return DecoratedCommand
def update_package_data(distribution):
"""update package_data to catch changes during setup"""
build_py = distribution.get_command_obj('build_py')
# distribution.package_data = find_package_data()
# re-init build_py options which load package_data
build_py.finalize_options()
class NPM(Command):
description = 'install package.json dependencies using npm'
user_options = []
node_modules = os.path.join(node_root, 'node_modules')
targets = [
os.path.join(here, 'pywwt_web', 'static', 'extension.js'),
os.path.join(here, 'pywwt_web', 'static', 'index.js')
]
def initialize_options(self):
pass
def finalize_options(self):
pass
def get_npm_name(self):
npmName = 'npm';
if platform.system() == 'Windows':
npmName = 'npm.cmd';
return npmName;
def has_npm(self):
npmName = self.get_npm_name();
try:
check_call([npmName, '--version'])
return True
except:
return False
def should_run_npm_install(self):
package_json = os.path.join(node_root, 'package.json')
node_modules_exists = os.path.exists(self.node_modules)
return self.has_npm()
def run(self):
has_npm = self.has_npm()
if not has_npm:
log.error("`npm` unavailable. If you're running this command using sudo, make sure `npm` is available to sudo")
env = os.environ.copy()
env['PATH'] = npm_path
if self.should_run_npm_install():
log.info("Installing build dependencies with npm. This may take a while...")
npmName = self.get_npm_name();
check_call([npmName, 'install'], cwd=node_root, stdout=sys.stdout, stderr=sys.stderr)
os.utime(self.node_modules, None)
for t in self.targets:
if not os.path.exists(t):
msg = 'Missing file: %s' % t
if not has_npm:
msg += '\nnpm is required to build a development version of a widget extension'
raise ValueError(msg)
# update package data in case this created new files
update_package_data(self.distribution)
version_ns = {}
with open(os.path.join(here, 'pywwt_web', '_version.py')) as f:
exec(f.read(), {}, version_ns)
setup_args = {
'name': 'pywwt_web',
'version': version_ns['__version__'],
'description': 'WorldWideTelescope Jupyter widget',
'long_description': LONG_DESCRIPTION,
'include_package_data': True,
'data_files': [
('share/jupyter/nbextensions/pywwt_web', [
'pywwt_web/static/extension.js',
'pywwt_web/static/index.js',
'pywwt_web/static/index.js.map',
'pywwt_web/static/wwt.html',
'pywwt_web/static/wwt_json_api.js',
]),
],
'install_requires': [
'ipywidgets>=7.0.0',
'ipyevents',
'traitlets',
'astropy',
],
'packages': find_packages(),
'zip_safe': False,
'cmdclass': {
'build_py': js_prerelease(build_py),
'egg_info': js_prerelease(egg_info),
'sdist': js_prerelease(sdist, strict=True),
'jsdeps': NPM,
},
'author': 'Thomas P. Robitaille',
'author_email': 'thomas.robitaille@gmail.com',
'url': 'https://github.com/astrofrog/pywwt_web',
'keywords': [
'ipython',
'jupyter',
'widgets',
],
'classifiers': [
'Development Status :: 4 - Beta',
'Framework :: IPython',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Multimedia :: Graphics',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
}
setup(**setup_args)
| [
"thomas.robitaille@gmail.com"
] | thomas.robitaille@gmail.com |
3d3bc331ab73351b87c588a6cf3ba8fd4c1b2615 | 036a41c913b3a4e7ae265e22a672dd89302d3200 | /0701-0800/0753/0753_Python_1.py | d567ce2e39bf38d32a69b1d68ed59a8f1b2f5b4c | [] | no_license | ChangxingJiang/LeetCode | e76f96ebda68d7ade53575354479cfc33ad4f627 | a2209206cdd7229dd33e416f611e71a984a8dd9e | refs/heads/master | 2023-04-13T15:23:35.174390 | 2021-04-24T05:54:14 | 2021-04-24T05:54:14 | 272,088,506 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 647 | py | class Solution:
def crackSafe(self, n: int, k: int) -> str:
visited = set()
def dfs(s1):
visited.add(s1)
for v in range(k):
s2 = s1[1:] + str(v)
if s2 not in visited:
dfs(s2)
stack.append(s1[0])
# 处理其他进制的情况
stack = ["0" * (n - 1)]
dfs("0" * n)
return "".join(stack[::-1])
if __name__ == "__main__":
print(Solution().crackSafe(1, 1)) # 0
print(Solution().crackSafe(1, 2)) # 01
print(Solution().crackSafe(2, 2)) # 00110
print(Solution().crackSafe(3, 2)) # 0011101000
| [
"1278729001@qq.com"
] | 1278729001@qq.com |
6d5cafe13cb9ef0722a1efcabebe9b56f3fa71b8 | bfda3af75d94767a5cb265bd68c17cfbf94e3ee1 | /rabbithole/zombit_infection/solution.py | 89fa08b0dd806b896d3f205eee6feea9361d0b2f | [] | no_license | orenlivne/euler | d0e5b956a46eacfe423fbd6c52918beb91eea140 | 2afdd8bccdc5789c233e955b1ca626cea618eb9b | refs/heads/master | 2020-12-29T02:24:36.479708 | 2016-12-15T21:27:33 | 2016-12-15T21:27:33 | 20,263,482 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,420 | py | #-------------------------------------------------------------------------------
# Rabbit hole - zombit_infection problem
#-------------------------------------------------------------------------------
INFECTED = -1
MAX_STRENGTH = 10000
class Population(object):
__DUMMY = MAX_STRENGTH + 1
def __init__(self, population):
self.__population = Population.__pad(population)
def __eq__(self, other):
return self.__population == other.__population
def __ne__(self, other):
return self.__population != other.__population
def __repr__(self):
return self.population().__repr__()
def copy(self):
# Returns a deep copy of the 2D population array.
other = Population([[0]])
other.__population = [row[:] for row in self.__population]
return other
def population(self):
return [row[1:-1] for row in self.__population[1:-1]]
def attempt_to_infect(self, i, j, strength):
# Accounts for padding.
self.__attempt_to_infect(i+1, j+1, strength)
def spread_infection(self, strength):
n, m = self.__size()
for i in xrange(1, n):
for j in xrange(1, m):
self.__spread_infection_at(i, j, strength)
def __spread_infection_at(self, i, j, strength):
p = self.__population
if p[i][j] == INFECTED:
self.__attempt_to_infect(i-1, j, strength)
self.__attempt_to_infect(i+1, j, strength)
self.__attempt_to_infect(i, j-1, strength)
self.__attempt_to_infect(i, j+1, strength)
def __attempt_to_infect(self, i, j, strength):
if self.__population[i][j] <= strength: self.__population[i][j] = INFECTED
@staticmethod
def __pad(population):
m = len(population[0])
d = Population.__DUMMY
return [[d] * m] + [[d] + row + [d] for row in population] + [[d] * m]
def __size(self):
return len(self.__population), len(self.__population[0])
def answer(population, x, y, strength):
p = Population(population)
old_p = p.copy()
p.attempt_to_infect(y, x, strength)
while p != old_p:
old_p = p.copy()
p.spread_infection(strength)
return p.population()
if __name__ == '__main__':
assert answer([[1, 2, 3], [2, 3, 4], [3, 2, 1]], 0, 0, 2) == [[-1, -1, 3], [-1, 3, 4], [3, 2, 1]]
assert answer([[6, 7, 2, 7, 6], [6, 3, 1, 4, 7], [0, 2, 4, 1, 10], [8, 1, 1, 4, 9], [8, 7, 4, 9, 9]], 2, 1, 5) == [[6, 7, -1, 7, 6], [6, -1, -1, -1, 7], [-1, -1, -1, -1, 10], [8, -1, -1, -1, 9], [8, 7, -1, 9, 9]]
| [
"oren.livne@gmail.com"
] | oren.livne@gmail.com |
f6e37e100ed84bec7cb2e06e599b1549c0149ecc | 1061216c2c33c1ed4ffb33e6211565575957e48f | /python-legacy/openapi_client/models/messages.py | f0fa073aa3db330847d3428292d600d7d9399889 | [] | no_license | MSurfer20/test2 | be9532f54839e8f58b60a8e4587348c2810ecdb9 | 13b35d72f33302fa532aea189e8f532272f1f799 | refs/heads/main | 2023-07-03T04:19:57.548080 | 2021-08-11T19:16:42 | 2021-08-11T19:16:42 | 393,920,506 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,567 | py | # coding: utf-8
"""
Zulip REST API
Powerful open source group chat # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
try:
from inspect import getfullargspec
except ImportError:
from inspect import getargspec as getfullargspec
import pprint
import re # noqa: F401
import six
from openapi_client.configuration import Configuration
class Messages(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'avatar_url': 'object',
'client': 'object',
'content': 'object',
'content_type': 'object',
'display_recipient': 'object',
'id': 'object',
'is_me_message': 'object',
'reactions': 'object',
'recipient_id': 'object',
'sender_email': 'object',
'sender_full_name': 'object',
'sender_id': 'object',
'sender_realm_str': 'object',
'stream_id': 'object',
'subject': 'object',
'topic_links': 'object',
'submessages': 'object',
'timestamp': 'object',
'type': 'object'
}
attribute_map = {
'avatar_url': 'avatar_url',
'client': 'client',
'content': 'content',
'content_type': 'content_type',
'display_recipient': 'display_recipient',
'id': 'id',
'is_me_message': 'is_me_message',
'reactions': 'reactions',
'recipient_id': 'recipient_id',
'sender_email': 'sender_email',
'sender_full_name': 'sender_full_name',
'sender_id': 'sender_id',
'sender_realm_str': 'sender_realm_str',
'stream_id': 'stream_id',
'subject': 'subject',
'topic_links': 'topic_links',
'submessages': 'submessages',
'timestamp': 'timestamp',
'type': 'type'
}
def __init__(self, avatar_url=None, client=None, content=None, content_type=None, display_recipient=None, id=None, is_me_message=None, reactions=None, recipient_id=None, sender_email=None, sender_full_name=None, sender_id=None, sender_realm_str=None, stream_id=None, subject=None, topic_links=None, submessages=None, timestamp=None, type=None, local_vars_configuration=None): # noqa: E501
"""Messages - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self._avatar_url = None
self._client = None
self._content = None
self._content_type = None
self._display_recipient = None
self._id = None
self._is_me_message = None
self._reactions = None
self._recipient_id = None
self._sender_email = None
self._sender_full_name = None
self._sender_id = None
self._sender_realm_str = None
self._stream_id = None
self._subject = None
self._topic_links = None
self._submessages = None
self._timestamp = None
self._type = None
self.discriminator = None
self.avatar_url = avatar_url
self.client = client
self.content = content
self.content_type = content_type
self.display_recipient = display_recipient
self.id = id
self.is_me_message = is_me_message
self.reactions = reactions
self.recipient_id = recipient_id
self.sender_email = sender_email
self.sender_full_name = sender_full_name
self.sender_id = sender_id
self.sender_realm_str = sender_realm_str
self.stream_id = stream_id
self.subject = subject
self.topic_links = topic_links
self.submessages = submessages
self.timestamp = timestamp
self.type = type
@property
def avatar_url(self):
"""Gets the avatar_url of this Messages. # noqa: E501
:return: The avatar_url of this Messages. # noqa: E501
:rtype: object
"""
return self._avatar_url
@avatar_url.setter
def avatar_url(self, avatar_url):
"""Sets the avatar_url of this Messages.
:param avatar_url: The avatar_url of this Messages. # noqa: E501
:type avatar_url: object
"""
self._avatar_url = avatar_url
@property
def client(self):
"""Gets the client of this Messages. # noqa: E501
:return: The client of this Messages. # noqa: E501
:rtype: object
"""
return self._client
@client.setter
def client(self, client):
"""Sets the client of this Messages.
:param client: The client of this Messages. # noqa: E501
:type client: object
"""
self._client = client
@property
def content(self):
"""Gets the content of this Messages. # noqa: E501
:return: The content of this Messages. # noqa: E501
:rtype: object
"""
return self._content
@content.setter
def content(self, content):
"""Sets the content of this Messages.
:param content: The content of this Messages. # noqa: E501
:type content: object
"""
self._content = content
@property
def content_type(self):
"""Gets the content_type of this Messages. # noqa: E501
:return: The content_type of this Messages. # noqa: E501
:rtype: object
"""
return self._content_type
@content_type.setter
def content_type(self, content_type):
"""Sets the content_type of this Messages.
:param content_type: The content_type of this Messages. # noqa: E501
:type content_type: object
"""
self._content_type = content_type
@property
def display_recipient(self):
"""Gets the display_recipient of this Messages. # noqa: E501
:return: The display_recipient of this Messages. # noqa: E501
:rtype: object
"""
return self._display_recipient
@display_recipient.setter
def display_recipient(self, display_recipient):
"""Sets the display_recipient of this Messages.
:param display_recipient: The display_recipient of this Messages. # noqa: E501
:type display_recipient: object
"""
self._display_recipient = display_recipient
@property
def id(self):
"""Gets the id of this Messages. # noqa: E501
:return: The id of this Messages. # noqa: E501
:rtype: object
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this Messages.
:param id: The id of this Messages. # noqa: E501
:type id: object
"""
self._id = id
@property
def is_me_message(self):
"""Gets the is_me_message of this Messages. # noqa: E501
:return: The is_me_message of this Messages. # noqa: E501
:rtype: object
"""
return self._is_me_message
@is_me_message.setter
def is_me_message(self, is_me_message):
"""Sets the is_me_message of this Messages.
:param is_me_message: The is_me_message of this Messages. # noqa: E501
:type is_me_message: object
"""
self._is_me_message = is_me_message
@property
def reactions(self):
"""Gets the reactions of this Messages. # noqa: E501
:return: The reactions of this Messages. # noqa: E501
:rtype: object
"""
return self._reactions
@reactions.setter
def reactions(self, reactions):
"""Sets the reactions of this Messages.
:param reactions: The reactions of this Messages. # noqa: E501
:type reactions: object
"""
self._reactions = reactions
@property
def recipient_id(self):
"""Gets the recipient_id of this Messages. # noqa: E501
:return: The recipient_id of this Messages. # noqa: E501
:rtype: object
"""
return self._recipient_id
@recipient_id.setter
def recipient_id(self, recipient_id):
"""Sets the recipient_id of this Messages.
:param recipient_id: The recipient_id of this Messages. # noqa: E501
:type recipient_id: object
"""
self._recipient_id = recipient_id
@property
def sender_email(self):
"""Gets the sender_email of this Messages. # noqa: E501
:return: The sender_email of this Messages. # noqa: E501
:rtype: object
"""
return self._sender_email
@sender_email.setter
def sender_email(self, sender_email):
"""Sets the sender_email of this Messages.
:param sender_email: The sender_email of this Messages. # noqa: E501
:type sender_email: object
"""
self._sender_email = sender_email
@property
def sender_full_name(self):
"""Gets the sender_full_name of this Messages. # noqa: E501
:return: The sender_full_name of this Messages. # noqa: E501
:rtype: object
"""
return self._sender_full_name
@sender_full_name.setter
def sender_full_name(self, sender_full_name):
"""Sets the sender_full_name of this Messages.
:param sender_full_name: The sender_full_name of this Messages. # noqa: E501
:type sender_full_name: object
"""
self._sender_full_name = sender_full_name
@property
def sender_id(self):
"""Gets the sender_id of this Messages. # noqa: E501
:return: The sender_id of this Messages. # noqa: E501
:rtype: object
"""
return self._sender_id
@sender_id.setter
def sender_id(self, sender_id):
"""Sets the sender_id of this Messages.
:param sender_id: The sender_id of this Messages. # noqa: E501
:type sender_id: object
"""
self._sender_id = sender_id
@property
def sender_realm_str(self):
"""Gets the sender_realm_str of this Messages. # noqa: E501
:return: The sender_realm_str of this Messages. # noqa: E501
:rtype: object
"""
return self._sender_realm_str
@sender_realm_str.setter
def sender_realm_str(self, sender_realm_str):
"""Sets the sender_realm_str of this Messages.
:param sender_realm_str: The sender_realm_str of this Messages. # noqa: E501
:type sender_realm_str: object
"""
self._sender_realm_str = sender_realm_str
@property
def stream_id(self):
"""Gets the stream_id of this Messages. # noqa: E501
:return: The stream_id of this Messages. # noqa: E501
:rtype: object
"""
return self._stream_id
@stream_id.setter
def stream_id(self, stream_id):
"""Sets the stream_id of this Messages.
:param stream_id: The stream_id of this Messages. # noqa: E501
:type stream_id: object
"""
self._stream_id = stream_id
@property
def subject(self):
"""Gets the subject of this Messages. # noqa: E501
:return: The subject of this Messages. # noqa: E501
:rtype: object
"""
return self._subject
@subject.setter
def subject(self, subject):
"""Sets the subject of this Messages.
:param subject: The subject of this Messages. # noqa: E501
:type subject: object
"""
self._subject = subject
@property
def topic_links(self):
"""Gets the topic_links of this Messages. # noqa: E501
:return: The topic_links of this Messages. # noqa: E501
:rtype: object
"""
return self._topic_links
@topic_links.setter
def topic_links(self, topic_links):
"""Sets the topic_links of this Messages.
:param topic_links: The topic_links of this Messages. # noqa: E501
:type topic_links: object
"""
self._topic_links = topic_links
@property
def submessages(self):
"""Gets the submessages of this Messages. # noqa: E501
:return: The submessages of this Messages. # noqa: E501
:rtype: object
"""
return self._submessages
@submessages.setter
def submessages(self, submessages):
"""Sets the submessages of this Messages.
:param submessages: The submessages of this Messages. # noqa: E501
:type submessages: object
"""
self._submessages = submessages
@property
def timestamp(self):
"""Gets the timestamp of this Messages. # noqa: E501
:return: The timestamp of this Messages. # noqa: E501
:rtype: object
"""
return self._timestamp
@timestamp.setter
def timestamp(self, timestamp):
"""Sets the timestamp of this Messages.
:param timestamp: The timestamp of this Messages. # noqa: E501
:type timestamp: object
"""
self._timestamp = timestamp
@property
def type(self):
"""Gets the type of this Messages. # noqa: E501
:return: The type of this Messages. # noqa: E501
:rtype: object
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this Messages.
:param type: The type of this Messages. # noqa: E501
:type type: object
"""
self._type = type
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = getfullargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Messages):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, Messages):
return True
return self.to_dict() != other.to_dict()
| [
"suyash.mathur@research.iiit.ac.in"
] | suyash.mathur@research.iiit.ac.in |
5c627dc5216d922fdc4254bb62414d3558ec12db | 3a9f2b3d79cf214704829427ee280f4b49dca70a | /saigon/rat/RuckusAutoTest/scripts/zd/ats_ZD_Palo_Alto_32WLANs_Integration_With_Specific_APs.py | 2d9f389c80118284cc0d9e9f02b44988b22e74c2 | [] | no_license | jichunwei/MyGitHub-1 | ae0c1461fe0a337ef459da7c0d24d4cf8d4a4791 | f826fc89a030c6c4e08052d2d43af0b1b4b410e3 | refs/heads/master | 2021-01-21T10:19:22.900905 | 2016-08-20T03:34:52 | 2016-08-20T03:34:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,472 | py | import sys, time
import random
import libZD_TestSuite as testsuite
from RuckusAutoTest.common import lib_KwList as kwlist
from RuckusAutoTest.common import lib_Constant as const
def tcid(tcid, ap_model_id, ap_role_id):
return "TCID:%s.%02d.%s.%s" % (14.02, tcid, ap_model_id, ap_role_id)
def defineTestConfiguration(target_station_1, target_station_2, ap_sym_dict, ap_conn_mode):
test_cfgs = []
target_ip = '172.126.0.253'
wlan_profile_set = 'set_of_32_open_none_wlans'
wlan_profile_set_for_guest = 'set_of_32_open_none_wlans'
print '---------------------------------------------------'
print 'Please pick up the Root APs to test: '
active_root_ap_list = testsuite.getActiveAp(ap_sym_dict)
print '---------------------------------------------------\n'
print '---------------------------------------------------'
print 'Please pick up the Mesh APs to test: '
active_mesh_ap_list = testsuite.getActiveAp(ap_sym_dict)
print '---------------------------------------------------'
if ap_conn_mode =='l3':
test_name = 'ZD_MultiWlans_ZeroIT_Integration'
for rootAP in active_root_ap_list:
active_ap_conf = ap_sym_dict[rootAP]
ap_model_id = const.get_ap_model_id(active_ap_conf['model'])
ap_role_id = const.get_ap_role_by_status(active_ap_conf['status'])
ap_type = active_ap_conf['model']
common_name = '%s + VLAN + ZeroIT + L3LWAPP + Root AP %s' % (wlan_profile_set, ap_type)
test_cfgs.append(({'target_station': target_station_1, 'wlan_config_set':wlan_profile_set, 'active_ap':rootAP,
'vlan_id':'2', 'ip':'20.0.2.252/255.255.255.0'},
test_name, common_name, tcid(12, ap_model_id, ap_role_id)))
common_name = '%s + Tunnel + ZeroIT + L3LWAPP + Root AP %s' % (wlan_profile_set, ap_type)
test_cfgs.append(({'target_station': target_station_1, 'wlan_config_set':wlan_profile_set, 'active_ap':rootAP,
'do_tunnel': True, 'ip':'192.168.33.0'},
test_name, common_name, tcid(14, ap_model_id, ap_role_id)))
for meshAP in active_root_ap_list:
active_ap_conf = ap_sym_dict[meshAP]
ap_model_id = const.get_ap_model_id(active_ap_conf['model'])
ap_role_id = const.get_ap_role_by_status(active_ap_conf['status'])
ap_type = active_ap_conf['model']
common_name = '%s + VLAN + ZeroIT + L3LWAPP + Mesh AP %s' % (wlan_profile_set, ap_type)
test_cfgs.append(({'target_station': target_station_1, 'wlan_config_set':wlan_profile_set, 'active_ap':meshAP,
'vlan_id':'2', 'ip':'20.0.2.252/255.255.255.0'},
test_name, common_name, tcid(13, ap_model_id, ap_role_id)))
common_name = '%s + Tunnel + ZeroIT + L3LWAPP + Mesh AP %s' % (wlan_profile_set, ap_type)
test_cfgs.append(({'target_station': target_station_1, 'wlan_config_set':wlan_profile_set, 'active_ap':meshAP,
'do_tunnel': True, 'ip':'192.168.33.0'},
test_name, common_name, tcid(15, ap_model_id, ap_role_id)))
return test_cfgs
for rootAP in active_root_ap_list:
active_ap_conf = ap_sym_dict[rootAP]
ap_model_id = const.get_ap_model_id(active_ap_conf['model'])
ap_role_id = const.get_ap_role_by_status(active_ap_conf['status'])
ap_type = active_ap_conf['model']
test_name = 'ZD_MultiWlans_Isolation_Integration'
common_name = '%s + Isolation + Root AP %s' % (wlan_profile_set, ap_type)
test_cfgs.append(({'target_station_1': target_station_1, 'target_station_2': target_station_2,
'wlan_config_set':wlan_profile_set, 'active_ap':rootAP},
test_name, common_name, tcid(2, ap_model_id, ap_role_id)))
test_name = 'ZD_MultiWlans_ACL_Integration'
common_name = '%s + ACL + Root AP %s' % (wlan_profile_set, ap_type)
test_cfgs.append(({'target_station': target_station_1, 'wlan_config_set':wlan_profile_set, 'active_ap':rootAP},
test_name, common_name, tcid(4, ap_model_id, ap_role_id)))
test_name = 'ZD_MultiWlans_Rate_Limit_Integration'
common_name = '%s + Rate Limit + Root AP %s' % (wlan_profile_set, ap_type)
test_cfgs.append(({'target_station': target_station_1, 'wlan_config_set':wlan_profile_set, 'active_ap':rootAP},
test_name, common_name, tcid(6, ap_model_id, ap_role_id)))
test_name = 'ZD_MultiWlans_ZeroIT_Integration'
common_name = '%s + VLAN + ZeroIT + L2LWAPP + Root AP %s' % (wlan_profile_set, ap_type)
test_cfgs.append(({'target_station': target_station_1, 'wlan_config_set':wlan_profile_set, 'active_ap':rootAP,
'vlan_id':'2', 'ip':'20.0.2.252/255.255.255.0'},
test_name, common_name, tcid(8, ap_model_id, ap_role_id)))
common_name = '%s + Tunnel + ZeroIT + L2LWAPP + Root AP %s' % (wlan_profile_set, ap_type)
test_cfgs.append(({'target_station': target_station_1, 'wlan_config_set':wlan_profile_set, 'active_ap':rootAP,
'do_tunnel': True, 'ip':'192.168.0.0'},
test_name, common_name, tcid(10, ap_model_id, ap_role_id)))
for meshAP in active_root_ap_list:
active_ap_conf = ap_sym_dict[meshAP]
ap_model_id = const.get_ap_model_id(active_ap_conf['model'])
ap_role_id = const.get_ap_role_by_status(active_ap_conf['status'])
ap_type = active_ap_conf['model']
test_name = 'ZD_MultiWlans_Isolation_Integration'
common_name = '%s + Isolation + Mesh AP %s' % (wlan_profile_set, ap_type)
test_cfgs.append(({'target_station_1': target_station_1, 'target_station_2': target_station_2,
'wlan_config_set':wlan_profile_set, 'active_ap':meshAP},
test_name, common_name, tcid(3, ap_model_id, ap_role_id)))
test_name = 'ZD_MultiWlans_ACL_Integration'
common_name = '%s + ACL + Mesh AP %s' % (wlan_profile_set, ap_type)
test_cfgs.append(({'target_station': target_station_1, 'wlan_config_set':wlan_profile_set, 'active_ap':meshAP},
test_name, common_name, tcid(5, ap_model_id, ap_role_id)))
test_name = 'ZD_MultiWlans_Rate_Limit_Integration'
common_name = '%s + Rate Limit + Mesh AP %s' % (wlan_profile_set, ap_type)
test_cfgs.append(({'target_station': target_station_1, 'wlan_config_set':wlan_profile_set, 'active_ap':meshAP},
test_name, common_name, tcid(7, ap_model_id, ap_role_id)))
test_name = 'ZD_MultiWlans_ZeroIT_Integration'
common_name = '%s + VLAN + ZeroIT + L2LWAPP + Mesh AP %s' % (wlan_profile_set, ap_type)
test_cfgs.append(({'target_station': target_station_1, 'wlan_config_set':wlan_profile_set, 'active_ap':meshAP,
'vlan_id':'2', 'ip':'20.0.2.252/255.255.255.0'},
test_name, common_name, tcid(9, ap_model_id, ap_role_id)))
common_name = '%s + Tunnel + ZeroIT + L2LWAPP + Mesh AP %s' % (wlan_profile_set, ap_type)
test_cfgs.append(({'target_station': target_station_1, 'wlan_config_set':wlan_profile_set, 'active_ap':meshAP,
'do_tunnel': True, 'ip':'192.168.0.0'},
test_name, common_name, tcid(11, ap_model_id, ap_role_id)))
return test_cfgs
def make_test_suite(**kwargs):
#tbi = getTestbed(**kwargs)
#tb_cfg = testsuite.getTestbedConfig(tbi)
tb = testsuite.getTestbed2(**kwargs)
tbcfg = testsuite.getTestbedConfig(tb)
sta_ip_list = tbcfg['sta_ip_list']
ap_sym_dict = tbcfg['ap_sym_dict']
target_sta_1 = testsuite.getTargetStation(sta_ip_list, "Pick a wireless station: ")
#print 'Pick up another station for Isolation testing'
target_sta_2 = testsuite.getTargetStation(sta_ip_list, "Pick another wireless station for Isolation testing: ")
tested_wlan_list = [0,31]
for i in range(4):
index = random.randint(1,30)
while index in tested_wlan_list:
index = random.randint(1,30)
time.sleep(0.1)
tested_wlan_list.append(index)
tested_wlan_list.sort()
ap_conn_mode = ''
while ap_conn_mode not in ['l2', 'l3']:
ap_conn_mode = raw_input('Please select the connection mode of APs in your testbed (l2/l3): ')
ts_name = '32 WLANs - Integration'
test_cfgs = defineTestConfiguration(target_sta_1, target_sta_2, ap_sym_dict, ap_conn_mode)
ts = testsuite.get_testsuite(ts_name, 'Verify 32 WLANs Integration')
test_order = 1
test_added = 0
for test_params, test_name, common_name, tcid in test_cfgs:
cname = "%s - %s" % (tcid, common_name)
test_params['tested_wlan_list'] = tested_wlan_list
if testsuite.addTestCase(ts, test_name, cname, test_params, test_order) > 0:
test_added += 1
test_order += 1
print "Add test case with test_name: %s\n\tcommon_name: %s" % (test_name, cname)
print "\n-- Summary: added %d test cases into test suite '%s'" % (test_added, ts.name)
if __name__ == "__main__":
_dict = kwlist.as_dict( sys.argv[1:] )
make_test_suite(**_dict)
| [
"tan@xx.com"
] | tan@xx.com |
280b56265c17385bb306b00dd2eac116091880da | 632099ac0d895943cbbeb9048a2cdfcd21102411 | /LV2_LX2_LC2_LD2/FaderfoxScript.py | ab0b79d02ffebba36ac72ee6d9b6a6dc837d6ef1 | [] | no_license | Toniigor/AbletonLive9_RemoteScripts | 7f4bbf759a79629584413f6d1797005e8cd7f2ff | fed1e5ee61ea12ea6360107a65a6e666364353ff | refs/heads/master | 2021-01-16T21:19:25.330221 | 2014-06-06T12:33:03 | 2014-06-06T12:33:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,154 | py | #Embedded file name: /Users/versonator/Jenkins/live/Projects/AppLive/Resources/MIDI Remote Scripts/LV2_LX2_LC2_LD2/FaderfoxScript.py
import Live
from consts import *
import sys
from FaderfoxHelper import FaderfoxHelper
from ParamMap import ParamMap
from Devices import *
class FaderfoxScript:
__filter_funcs__ = ['update_display', 'log', 'song']
__module__ = __name__
__doc__ = 'Automap script for Faderfox controllers'
__version__ = 'V1.1'
__name__ = 'Generic Faderfox Script'
def __init__(self, c_instance):
self.suffix = ''
self.is_lv1 = False
FaderfoxScript.realinit(self, c_instance)
def realinit(self, c_instance):
self.c_instance = c_instance
self.helper = FaderfoxHelper(self)
self.param_map = ParamMap(self)
self.mixer_controller = None
self.device_controller = None
self.transport_controller = None
self.components = []
live = 'Live 6 & 7'
if self.is_live_5():
live = 'Live 5'
self.show_message(self.__name__ + ' ' + self.__version__ + ' for ' + live)
self.is_lv1 = False
def is_live_5(self):
return hasattr(Live, 'is_live_5')
def log(self, string):
pass
def logfmt(self, fmt, *args):
pass
def disconnect(self):
for c in self.components:
c.disconnect()
def application(self):
return Live.Application.get_application()
def song(self):
return self.c_instance.song()
def suggest_input_port(self):
return str('')
def suggest_output_port(self):
return str('')
def can_lock_to_devices(self):
return True
def lock_to_device(self, device):
if self.device_controller:
self.device_controller.lock_to_device(device)
def unlock_to_device(self, device):
if self.device_controller:
self.device_controller.unlock_from_device(device)
def set_appointed_device(self, device):
if self.device_controller:
self.device_controller.set_appointed_device(device)
def toggle_lock(self):
self.c_instance.toggle_lock()
def suggest_map_mode(self, cc_no, channel):
return Live.MidiMap.MapMode.absolute
def restore_bank(self, bank):
pass
def show_message(self, message):
if hasattr(self.c_instance, 'show_message'):
self.c_instance.show_message(message)
def instance_identifier(self):
return self.c_instance.instance_identifier()
def connect_script_instances(self, instanciated_scripts):
pass
def request_rebuild_midi_map(self):
self.c_instance.request_rebuild_midi_map()
def send_midi(self, midi_event_bytes):
self.c_instance.send_midi(midi_event_bytes)
def refresh_state(self):
for c in self.components:
c.refresh_state()
def build_midi_map(self, midi_map_handle):
self.log('script build midi map')
script_handle = self.c_instance.handle()
self.param_map.remove_mappings()
for c in self.components:
self.log('build midi map on %s' % c)
c.build_midi_map(script_handle, midi_map_handle)
def update_display(self):
for c in self.components:
c.update_display()
def receive_midi(self, midi_bytes):
channel = midi_bytes[0] & CHAN_MASK
status = midi_bytes[0] & STATUS_MASK
if status == CC_STATUS:
cc_no = midi_bytes[1]
cc_value = midi_bytes[2]
for c in self.components:
c.receive_midi_cc(channel, cc_no, cc_value)
self.param_map.receive_midi_cc(channel, cc_no, cc_value)
elif status == NOTEON_STATUS or status == NOTEOFF_STATUS:
note_no = midi_bytes[1]
note_vel = midi_bytes[2]
for c in self.components:
c.receive_midi_note(channel, status, note_no, note_vel)
self.param_map.receive_midi_note(channel, status, note_no, note_vel)
else:
raise False or AssertionError, 'Unknown MIDI message %s' % str(midi_bytes) | [
"julien@julienbayle.net"
] | julien@julienbayle.net |
60e5e0f2797c0695cd4072eff7577dd65a303961 | a0dda8be5892a390836e19bf04ea1d098e92cf58 | /视频+刷题/python3/匿名函数的应用.py | 540378ee420e3f304633dae548a39b13d84afdac | [] | no_license | wmm98/homework1 | d9eb67c7491affd8c7e77458ceadaf0357ea5e6b | cd1f7f78e8dbd03ad72c7a0fdc4a8dc8404f5fe2 | refs/heads/master | 2020-04-14T19:22:21.733111 | 2019-01-08T14:09:58 | 2019-01-08T14:09:58 | 164,055,018 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 364 | py | '''用匿名函数实现多个功能'''
'''
def sum(a, b, function):
result = function(a, b)
print(result)
sum(22, 11, lambda x, y: x + y)
sum(22, 11, lambda x, y: x - y)
'''
def num(a, b, func):
result = func(a, b)
print(result)
func_new = input("请输入一个新的匿名函数")
func_new = eval(func_new) # 转换
num(11, 12, func_new)
| [
"792545884@qq.com"
] | 792545884@qq.com |
b9f8c48b18733c4773a4d947d22798aadc2d97fe | e9aa61aa74eb69f946a66b6ac5a90f12ec744295 | /tests/integration/test_resolve_command.py | d4b4d243f266411ac5114bfc3f14e42ef0a36bfa | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | abueide/lbry | 2c2bc35255f82fade22561d673b299c0df35dfea | 7f5deaf6c80422a30b3714d4bf12e028756ed9fe | refs/heads/master | 2021-01-21T12:58:41.093354 | 2019-04-12T12:58:00 | 2019-04-16T18:18:23 | 181,083,296 | 0 | 0 | MIT | 2019-04-12T21:04:56 | 2019-04-12T21:04:56 | null | UTF-8 | Python | false | false | 3,855 | py | import json
from lbrynet.testcase import CommandTestCase
class ResolveCommand(CommandTestCase):
async def test_resolve(self):
tx = await self.channel_create('@abc', '0.01')
channel_id = tx['outputs'][0]['claim_id']
# resolving a channel @abc
response = await self.resolve('lbry://@abc')
self.assertSetEqual({'lbry://@abc'}, set(response))
self.assertIn('certificate', response['lbry://@abc'])
self.assertNotIn('claim', response['lbry://@abc'])
self.assertEqual(response['lbry://@abc']['certificate']['name'], '@abc')
self.assertEqual(response['lbry://@abc']['claims_in_channel'], 0)
await self.stream_create('foo', '0.01', channel_id=channel_id)
await self.stream_create('foo2', '0.01', channel_id=channel_id)
# resolving a channel @abc with some claims in it
response = await self.resolve('lbry://@abc')
self.assertSetEqual({'lbry://@abc'}, set(response))
self.assertIn('certificate', response['lbry://@abc'])
self.assertNotIn('claim', response['lbry://@abc'])
self.assertEqual(response['lbry://@abc']['certificate']['name'], '@abc')
self.assertEqual(response['lbry://@abc']['claims_in_channel'], 2)
# resolving claim foo within channel @abc
response = await self.resolve('lbry://@abc/foo')
self.assertSetEqual({'lbry://@abc/foo'}, set(response))
claim = response['lbry://@abc/foo']
self.assertIn('certificate', claim)
self.assertIn('claim', claim)
self.assertEqual(claim['claim']['name'], 'foo')
self.assertEqual(claim['claim']['channel_name'], '@abc')
self.assertEqual(claim['certificate']['name'], '@abc')
self.assertEqual(claim['claims_in_channel'], 0)
# resolving claim foo by itself
response = await self.resolve('lbry://foo')
self.assertSetEqual({'lbry://foo'}, set(response))
claim = response['lbry://foo']
self.assertIn('certificate', claim)
self.assertIn('claim', claim)
self.assertEqual(claim['claim']['name'], 'foo')
self.assertEqual(claim['claim']['channel_name'], '@abc')
self.assertEqual(claim['certificate']['name'], '@abc')
self.assertEqual(claim['claims_in_channel'], 0)
# resolving from the given permanent url
new_response = await self.resolve(claim['claim']['permanent_url'])
self.assertEqual(new_response[claim['claim']['permanent_url']], claim)
# resolving multiple at once
response = await self.resolve(['lbry://foo', 'lbry://foo2'])
self.assertSetEqual({'lbry://foo', 'lbry://foo2'}, set(response))
claim = response['lbry://foo2']
self.assertIn('certificate', claim)
self.assertIn('claim', claim)
self.assertEqual(claim['claim']['name'], 'foo2')
self.assertEqual(claim['claim']['channel_name'], '@abc')
self.assertEqual(claim['certificate']['name'], '@abc')
self.assertEqual(claim['claims_in_channel'], 0)
# resolve has correct depth
tx_details = await self.blockchain.get_raw_transaction(claim['claim']['txid'])
self.assertEqual(claim['claim']['depth'], json.loads(tx_details)['confirmations'])
# resolve handles invalid data
txid = await self.blockchain_claim_name("gibberish", "cafecafe", "0.1")
await self.generate(1)
response = await self.resolve("lbry://gibberish")
self.assertSetEqual({'lbry://gibberish'}, set(response))
claim = response['lbry://gibberish']['claim']
self.assertEqual(claim['name'], 'gibberish')
self.assertEqual(claim['hex'], 'cafecafe')
self.assertFalse(claim['decoded_claim'])
self.assertEqual(claim['txid'], txid)
self.assertEqual(claim['effective_amount'], "0.1")
| [
"lex@damoti.com"
] | lex@damoti.com |
c1fb96119dc8b430cc8beef382d077d4d77e4f87 | d490148c6ab81d83522c6bda712b8cf226e28f75 | /trio/_highlevel_socket.py | 903f5ecabb0bb5a528d9b896cee1ed837801eea4 | [
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | mjpieters/trio | e25e271c2465606b6b7eccb157d920e50b9e8c76 | f5e6cf36af66a20f2e09729004817064e01c444f | refs/heads/master | 2021-09-04T08:56:29.064686 | 2018-01-12T06:38:03 | 2018-01-12T06:38:03 | 117,841,928 | 0 | 0 | null | 2018-01-17T13:44:47 | 2018-01-17T13:44:47 | null | UTF-8 | Python | false | false | 14,437 | py | # "High-level" networking interface
import errno
from contextlib import contextmanager
from . import _core
from . import socket as tsocket
from ._socket import real_socket_type
from ._util import ConflictDetector
from .abc import HalfCloseableStream, Listener
from ._highlevel_generic import (
ClosedStreamError, BrokenStreamError, ClosedListenerError
)
__all__ = ["SocketStream", "SocketListener"]
_closed_stream_errnos = {
# Unix
errno.EBADF,
# Windows
errno.ENOTSOCK,
}
@contextmanager
def _translate_socket_errors_to_stream_errors():
try:
yield
except OSError as exc:
if exc.errno in _closed_stream_errnos:
raise ClosedStreamError("this socket was already closed") from None
else:
raise BrokenStreamError(
"socket connection broken: {}".format(exc)
) from exc
class SocketStream(HalfCloseableStream):
"""An implementation of the :class:`trio.abc.HalfCloseableStream`
interface based on a raw network socket.
Args:
socket: The trio socket object to wrap. Must have type ``SOCK_STREAM``,
and be connected.
By default, :class:`SocketStream` enables ``TCP_NODELAY``, and (on
platforms where it's supported) enables ``TCP_NOTSENT_LOWAT`` with a
reasonable buffer size (currently 16 KiB) – see `issue #72
<https://github.com/python-trio/trio/issues/72>`__ for discussion. You can
of course override these defaults by calling :meth:`setsockopt`.
Once a :class:`SocketStream` object is constructed, it implements the full
:class:`trio.abc.HalfCloseableStream` interface. In addition, it provides
a few extra features:
.. attribute:: socket
The Trio socket object that this stream wraps.
"""
def __init__(self, socket):
if not isinstance(socket, tsocket.SocketType):
raise TypeError("SocketStream requires trio socket object")
if real_socket_type(socket.type) != tsocket.SOCK_STREAM:
raise ValueError("SocketStream requires a SOCK_STREAM socket")
try:
socket.getpeername()
except OSError:
err = ValueError("SocketStream requires a connected socket")
raise err from None
self.socket = socket
self._send_conflict_detector = ConflictDetector(
"another task is currently sending data on this SocketStream"
)
# Socket defaults:
# Not supported on e.g. unix domain sockets
try:
self.setsockopt(tsocket.IPPROTO_TCP, tsocket.TCP_NODELAY, True)
except OSError:
pass
if hasattr(tsocket, "TCP_NOTSENT_LOWAT"):
try:
# 16 KiB is pretty arbitrary and could probably do with some
# tuning. (Apple is also setting this by default in CFNetwork
# apparently -- I'm curious what value they're using, though I
# couldn't find it online trivially. CFNetwork-129.20 source
# has no mentions of TCP_NOTSENT_LOWAT. This presentation says
# "typically 8 kilobytes":
# http://devstreaming.apple.com/videos/wwdc/2015/719ui2k57m/719/719_your_app_and_next_generation_networks.pdf?dl=1
# ). The theory is that you want it to be bandwidth *
# rescheduling interval.
self.setsockopt(
tsocket.IPPROTO_TCP, tsocket.TCP_NOTSENT_LOWAT, 2**14
)
except OSError:
pass
async def send_all(self, data):
if self.socket.did_shutdown_SHUT_WR:
await _core.checkpoint()
raise ClosedStreamError("can't send data after sending EOF")
with self._send_conflict_detector.sync:
with _translate_socket_errors_to_stream_errors():
with memoryview(data) as data:
if not data:
await _core.checkpoint()
return
total_sent = 0
while total_sent < len(data):
with data[total_sent:] as remaining:
sent = await self.socket.send(remaining)
total_sent += sent
async def wait_send_all_might_not_block(self):
async with self._send_conflict_detector:
if self.socket.fileno() == -1:
raise ClosedStreamError
with _translate_socket_errors_to_stream_errors():
await self.socket.wait_writable()
async def send_eof(self):
async with self._send_conflict_detector:
# On MacOS, calling shutdown a second time raises ENOTCONN, but
# send_eof needs to be idempotent.
if self.socket.did_shutdown_SHUT_WR:
return
with _translate_socket_errors_to_stream_errors():
self.socket.shutdown(tsocket.SHUT_WR)
async def receive_some(self, max_bytes):
if max_bytes < 1:
await _core.checkpoint()
raise ValueError("max_bytes must be >= 1")
with _translate_socket_errors_to_stream_errors():
return await self.socket.recv(max_bytes)
async def aclose(self):
self.socket.close()
await _core.checkpoint()
# __aenter__, __aexit__ inherited from HalfCloseableStream are OK
def setsockopt(self, level, option, value):
"""Set an option on the underlying socket.
See :meth:`socket.socket.setsockopt` for details.
"""
return self.socket.setsockopt(level, option, value)
def getsockopt(self, level, option, buffersize=0):
"""Check the current value of an option on the underlying socket.
See :meth:`socket.socket.getsockopt` for details.
"""
# This is to work around
# https://bitbucket.org/pypy/pypy/issues/2561
# We should be able to drop it when the next PyPy3 beta is released.
if buffersize == 0:
return self.socket.getsockopt(level, option)
else:
return self.socket.getsockopt(level, option, buffersize)
################################################################
# SocketListener
################################################################
# Accept error handling
# =====================
#
# Literature review
# -----------------
#
# Here's a list of all the possible errors that accept() can return, according
# to the POSIX spec or the Linux, FreeBSD, MacOS, and Windows docs:
#
# Can't happen with a trio socket:
# - EAGAIN/(WSA)EWOULDBLOCK
# - EINTR
# - WSANOTINITIALISED
# - WSAEINPROGRESS: a blocking call is already in progress
# - WSAEINTR: someone called WSACancelBlockingCall, but we don't make blocking
# calls in the first place
#
# Something is wrong with our call:
# - EBADF: not a file descriptor
# - (WSA)EINVAL: socket isn't listening, or (Linux, BSD) bad flags
# - (WSA)ENOTSOCK: not a socket
# - (WSA)EOPNOTSUPP: this kind of socket doesn't support accept
# - (Linux, FreeBSD, Windows) EFAULT: the sockaddr pointer points to readonly
# memory
#
# Something is wrong with the environment:
# - (WSA)EMFILE: this process hit its fd limit
# - ENFILE: the system hit its fd limit
# - (WSA)ENOBUFS, ENOMEM: unspecified memory problems
#
# Something is wrong with the connection we were going to accept. There's a
# ton of variability between systems here:
# - ECONNABORTED: documented everywhere, but apparently only the BSDs do this
# (signals a connection was closed/reset before being accepted)
# - EPROTO: unspecified protocol error
# - (Linux) EPERM: firewall rule prevented connection
# - (Linux) ENETDOWN, EPROTO, ENOPROTOOPT, EHOSTDOWN, ENONET, EHOSTUNREACH,
# EOPNOTSUPP, ENETUNREACH, ENOSR, ESOCKTNOSUPPORT, EPROTONOSUPPORT,
# ETIMEDOUT, ... or any other error that the socket could give, because
# apparently if an error happens on a connection before it's accept()ed,
# Linux will report that error from accept().
# - (Windows) WSAECONNRESET, WSAENETDOWN
#
#
# Code review
# -----------
#
# What do other libraries do?
#
# Twisted on Unix or when using nonblocking I/O on Windows:
# - ignores EPERM, with comment about Linux firewalls
# - logs and ignores EMFILE, ENOBUFS, ENFILE, ENOMEM, ECONNABORTED
# Comment notes that ECONNABORTED is a BSDism and that Linux returns the
# socket before having it fail, and MacOS just silently discards it.
# - other errors are raised, which is logged + kills the socket
# ref: src/twisted/internet/tcp.py, Port.doRead
#
# Twisted using IOCP on Windows:
# - logs and ignores all errors
# ref: src/twisted/internet/iocpreactor/tcp.py, Port.handleAccept
#
# Tornado:
# - ignore ECONNABORTED (comments notes that it was observed on FreeBSD)
# - everything else raised, but all this does (by default) is cause it to be
# logged and then ignored
# (ref: tornado/netutil.py, tornado/ioloop.py)
#
# libuv on Unix:
# - ignores ECONNABORTED
# - does a "trick" for EMFILE or ENFILE
# - all other errors passed to the connection_cb to be handled
# (ref: src/unix/stream.c:uv__server_io, uv__emfile_trick)
#
# libuv on Windows:
# src/win/tcp.c:uv_tcp_queue_accept
# this calls AcceptEx, and then arranges to call:
# src/win/tcp.c:uv_process_tcp_accept_req
# this gets the result from AcceptEx. If the original AcceptEx call failed,
# then "we stop accepting connections and report this error to the
# connection callback". I think this is for things like ENOTSOCK. If
# AcceptEx successfully queues an overlapped operation, and then that
# reports an error, it's just discarded.
#
# asyncio, selector mode:
# - ignores EWOULDBLOCK, EINTR, ECONNABORTED
# - on EMFILE, ENFILE, ENOBUFS, ENOMEM, logs an error and then disables the
# listening loop for 1 second
# - everything else raises, but then the event loop just logs and ignores it
# (selector_events.py: BaseSelectorEventLoop._accept_connection)
#
#
# What should we do?
# ------------------
#
# When accept() returns an error, we can either ignore it or raise it.
#
# We have a long list of errors that should be ignored, and a long list of
# errors that should be raised. The big question is what to do with an error
# that isn't on either list. On Linux apparently you can get nearly arbitrary
# errors from accept() and they should be ignored, because it just indicates a
# socket that crashed before it began, and there isn't really anything to be
# done about this, plus on other platforms you may not get any indication at
# all, so programs have to tolerate not getting any indication too. OTOH if we
# get an unexpected error then it could indicate something arbitrarily bad --
# after all, it's unexpected.
#
# Given that we know that other libraries seem to be getting along fine with a
# fairly minimal list of errors to ignore, I think we'll be OK if we write
# down that list and then raise on everything else.
#
# The other question is what to do about the capacity problem errors: EMFILE,
# ENFILE, ENOBUFS, ENOMEM. Just flat out ignoring these is clearly not optimal
# -- at the very least you want to log them, and probably you want to take
# some remedial action. And if we ignore them then it prevents higher levels
# from doing anything clever with them. So we raise them.
_ignorable_accept_errno_names = [
# Linux can do this when the a connection is denied by the firewall
"EPERM",
# BSDs with an early close/reset
"ECONNABORTED",
# All the other miscellany noted above -- may not happen in practice, but
# whatever.
"EPROTO",
"ENETDOWN",
"ENOPROTOOPT",
"EHOSTDOWN",
"ENONET",
"EHOSTUNREACH",
"EOPNOTSUPP",
"ENETUNREACH",
"ENOSR",
"ESOCKTNOSUPPORT",
"EPROTONOSUPPORT",
"ETIMEDOUT",
"ECONNRESET",
]
# Not all errnos are defined on all platforms
_ignorable_accept_errnos = set()
for name in _ignorable_accept_errno_names:
try:
_ignorable_accept_errnos.add(getattr(errno, name))
except AttributeError:
pass
class SocketListener(Listener):
"""A :class:`~trio.abc.Listener` that uses a listening socket to accept
incoming connections as :class:`SocketStream` objects.
Args:
socket: The trio socket object to wrap. Must have type ``SOCK_STREAM``,
and be listening.
Note that the :class:`SocketListener` "takes ownership" of the given
socket; closing the :class:`SocketListener` will also close the socket.
.. attribute:: socket
The Trio socket object that this stream wraps.
"""
def __init__(self, socket):
if not isinstance(socket, tsocket.SocketType):
raise TypeError("SocketListener requires trio socket object")
if real_socket_type(socket.type) != tsocket.SOCK_STREAM:
raise ValueError("SocketListener requires a SOCK_STREAM socket")
try:
listening = socket.getsockopt(
tsocket.SOL_SOCKET, tsocket.SO_ACCEPTCONN
)
except OSError:
# SO_ACCEPTCONN fails on MacOS; we just have to trust the user.
pass
else:
if not listening:
raise ValueError("SocketListener requires a listening socket")
self.socket = socket
async def accept(self):
"""Accept an incoming connection.
Returns:
:class:`SocketStream`
Raises:
OSError: if the underlying call to ``accept`` raises an unexpected
error.
ClosedListenerError: if you already closed the socket.
This method handles routine errors like ``ECONNABORTED``, but passes
other errors on to its caller. In particular, it does *not* make any
special effort to handle resource exhaustion errors like ``EMFILE``,
``ENFILE``, ``ENOBUFS``, ``ENOMEM``.
"""
while True:
try:
sock, _ = await self.socket.accept()
except OSError as exc:
if exc.errno in _closed_stream_errnos:
raise ClosedListenerError
if exc.errno not in _ignorable_accept_errnos:
raise
else:
return SocketStream(sock)
async def aclose(self):
"""Close this listener and its underlying socket.
"""
try:
self.socket.close()
finally:
await _core.checkpoint()
| [
"njs@pobox.com"
] | njs@pobox.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.