blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a8bc57e7b9636358a469235f83d9e06dd7c1bf0e
|
3f18a27cfea243d24be2d9428afad3bbd0ad6ec2
|
/gcamp_analysis_files_finished/180222-08-bottom-experiment/src/delta_video_config.py
|
989e334e7465dcf08bc26f105263ad6a24c65454
|
[
"MIT"
] |
permissive
|
riffelllab/Mosquito-larvae-analyses-1
|
a4d7e8cd29b6481438798ed7b455a931f1f8c2b5
|
2701b1b2055d6ee1722898f0fa4e64a9b12e7b24
|
refs/heads/master
| 2023-01-19T04:18:56.894245
| 2020-10-16T19:01:08
| 2020-10-16T19:01:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 278
|
py
|
class Config:
def __init__(self):
self.basename = 'delta_video'
self.directory = '/home/eleanor/Documents/gcamp_analysis_files_temp/180222-08-bottom-experiment/data'
self.topics = ['/multi_tracker/1/delta_video',]
self.record_length_hours = 1
|
[
"tabletopwhale@outlook.com"
] |
tabletopwhale@outlook.com
|
62a7ddba3c3f9ec3e0cfaf00284c72841ec16e9d
|
687928e5bc8d5cf68d543005bb24c862460edcfc
|
/nssrc/com/citrix/netscaler/nitro/resource/config/ssl/sslpolicylabel_sslpolicy_binding.py
|
bb2b0192c9040431360e0a21ca878f47addcb79c
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"Python-2.0"
] |
permissive
|
mbs91/nitro
|
c6c81665d6abd04de8b9f09554e5e8e541f4a2b8
|
be74e1e177f5c205c16126bc9b023f2348788409
|
refs/heads/master
| 2021-05-29T19:24:04.520762
| 2015-06-26T02:03:09
| 2015-06-26T02:03:09
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,488
|
py
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class sslpolicylabel_sslpolicy_binding(base_resource) :
""" Binding class showing the sslpolicy that can be bound to sslpolicylabel.
"""
def __init__(self) :
self._policyname = ""
self._priority = 0
self._gotopriorityexpression = ""
self._labeltype = ""
self._invoke_labelname = ""
self._labelname = ""
self._invoke = False
self.___count = 0
@property
def priority(self) :
"""Specifies the priority of the policy.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
"""Specifies the priority of the policy.
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def gotopriorityexpression(self) :
"""Expression specifying the priority of the next policy which will get evaluated if the current policy rule evaluates to TRUE.
"""
try :
return self._gotopriorityexpression
except Exception as e:
raise e
@gotopriorityexpression.setter
def gotopriorityexpression(self, gotopriorityexpression) :
"""Expression specifying the priority of the next policy which will get evaluated if the current policy rule evaluates to TRUE.
"""
try :
self._gotopriorityexpression = gotopriorityexpression
except Exception as e:
raise e
@property
def policyname(self) :
"""Name of the SSL policy to bind to the policy label.
"""
try :
return self._policyname
except Exception as e:
raise e
@policyname.setter
def policyname(self, policyname) :
"""Name of the SSL policy to bind to the policy label.
"""
try :
self._policyname = policyname
except Exception as e:
raise e
@property
def labeltype(self) :
"""Type of policy label invocation.<br/>Possible values = vserver, service, policylabel.
"""
try :
return self._labeltype
except Exception as e:
raise e
@labeltype.setter
def labeltype(self, labeltype) :
"""Type of policy label invocation.<br/>Possible values = vserver, service, policylabel
"""
try :
self._labeltype = labeltype
except Exception as e:
raise e
@property
def labelname(self) :
"""Name of the SSL policy label to which to bind policies.
"""
try :
return self._labelname
except Exception as e:
raise e
@labelname.setter
def labelname(self, labelname) :
"""Name of the SSL policy label to which to bind policies.
"""
try :
self._labelname = labelname
except Exception as e:
raise e
@property
def invoke_labelname(self) :
"""Name of the label to invoke if the current policy rule evaluates to TRUE.
"""
try :
return self._invoke_labelname
except Exception as e:
raise e
@invoke_labelname.setter
def invoke_labelname(self, invoke_labelname) :
"""Name of the label to invoke if the current policy rule evaluates to TRUE.
"""
try :
self._invoke_labelname = invoke_labelname
except Exception as e:
raise e
@property
def invoke(self) :
"""Invoke policies bound to a policy label. After the invoked policies are evaluated, the flow returns to the policy with the next priority.
"""
try :
return self._invoke
except Exception as e:
raise e
@invoke.setter
def invoke(self, invoke) :
"""Invoke policies bound to a policy label. After the invoked policies are evaluated, the flow returns to the policy with the next priority.
"""
try :
self._invoke = invoke
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(sslpolicylabel_sslpolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.sslpolicylabel_sslpolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.labelname) :
return str(self.labelname)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = sslpolicylabel_sslpolicy_binding()
updateresource.labelname = resource.labelname
updateresource.policyname = resource.policyname
updateresource.gotopriorityexpression = resource.gotopriorityexpression
updateresource.invoke = resource.invoke
updateresource.labeltype = resource.labeltype
updateresource.invoke_labelname = resource.invoke_labelname
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [sslpolicylabel_sslpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].labelname = resource[i].labelname
updateresources[i].policyname = resource[i].policyname
updateresources[i].gotopriorityexpression = resource[i].gotopriorityexpression
updateresources[i].invoke = resource[i].invoke
updateresources[i].labeltype = resource[i].labeltype
updateresources[i].invoke_labelname = resource[i].invoke_labelname
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = sslpolicylabel_sslpolicy_binding()
deleteresource.labelname = resource.labelname
deleteresource.policyname = resource.policyname
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [sslpolicylabel_sslpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].labelname = resource[i].labelname
deleteresources[i].policyname = resource[i].policyname
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, labelname) :
""" Use this API to fetch sslpolicylabel_sslpolicy_binding resources.
"""
try :
obj = sslpolicylabel_sslpolicy_binding()
obj.labelname = labelname
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, labelname, filter_) :
""" Use this API to fetch filtered set of sslpolicylabel_sslpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = sslpolicylabel_sslpolicy_binding()
obj.labelname = labelname
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, labelname) :
""" Use this API to count sslpolicylabel_sslpolicy_binding resources configued on NetScaler.
"""
try :
obj = sslpolicylabel_sslpolicy_binding()
obj.labelname = labelname
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, labelname, filter_) :
""" Use this API to count the filtered set of sslpolicylabel_sslpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = sslpolicylabel_sslpolicy_binding()
obj.labelname = labelname
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class Labeltype:
vserver = "vserver"
service = "service"
policylabel = "policylabel"
class sslpolicylabel_sslpolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.sslpolicylabel_sslpolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.sslpolicylabel_sslpolicy_binding = [sslpolicylabel_sslpolicy_binding() for _ in range(length)]
|
[
"bensassimaha@gmail.com"
] |
bensassimaha@gmail.com
|
20544d415fefef5a3cdeef196113798818d32e24
|
1588a1d601d29c18942d220657185d3bf7b17160
|
/programmers/level3/pro12979.py
|
9eedf137f30657867380e7152b1ebc68e0008148
|
[] |
no_license
|
geonwoomun/AlgorithmStudy
|
1f8148e981beebd2e6f70e65193ce445fa59df96
|
d43b624aad80f10d687a8f4b37cc79d88fc772b3
|
refs/heads/master
| 2020-08-01T05:24:26.980370
| 2020-07-15T05:04:35
| 2020-07-15T05:04:35
| 210,878,062
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,325
|
py
|
# 프로그래머스 기지국 설치
# 풀다가 50점 정도는 나오는데 더 이상 풀기 어려워서... 블로그 참고 했습니다.
# 좀 더 단순하고 효율적으로 생각하는 방법을 길러야겠다..
import math
def solution(n, stations, w):
result = 0
distance = []
for i in range(1, len(stations)):
distance.append((stations[i] -w -1) - (stations[i-1] +w))
# 뒤의 기지국과 앞의 기지국 사이의 거리를 잰다.
distance.append(stations[0] - w -1) # 첫번째 기지국과 첫번째 아파트와의 거리
distance.append(n - (stations[-1] +w)) # 마지막 기지국과 마지막 아파트와의 거리
# 이렇게 하면 처음 기지국이 세워진 곳 빼고의 거리들을 모두 잴 수 있다.
# w가 2 일 때 가운데 지을 경우 총 5개의 거리를 커버할 수 있다.
# 거리가 6개 이상일 경우에는 기지국 1개를 더 지어야한다.
width = 2* w + 1 # 주파수가 끼칠 수 있는 범위
for dist in distance: # 거리들을 확인한다.
if(dist == 0): # 0이면 기지국 필요가 없으니 패스
continue
else :
result += math.ceil(dist / width) # 위에 설명처럼 dist /width의 올림한 것 만큼 기지국이 필요하다.
return result
|
[
"ansejrrhkd@naver.com"
] |
ansejrrhkd@naver.com
|
9d3ed49cac3834d5be43d76726bc795f44e0a3c5
|
2ff7e53d5e512cd762217ca54317982e07a2bb0c
|
/eve-8.51.857815/carbon/common/lib/aiming.py
|
0091bdb2195110fac1d6ee59e5cdd282d592b617
|
[] |
no_license
|
nanxijw/Clara-Pretty-One-Dick
|
66d3d69426642b79e8fd4cc8e0bec23adeeca6d6
|
50de3488a2140343c364efc2615cf6e67f152be0
|
refs/heads/master
| 2021-01-19T09:25:07.555284
| 2015-02-17T21:49:33
| 2015-02-17T21:49:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,536
|
py
|
#Embedded file name: carbon/common/lib\aiming.py
"""
Constants for the AI aiming system.
"""
AIMING_VALID_TARGET_GAZE_ID = 1
AIMING_VALID_TARGET_COMBAT_ID = 2
AIMING_CLIENTSERVER_FLAG_CLIENT = 1
AIMING_CLIENTSERVER_FLAG_SERVER = 2
AIMING_CLIENTSERVER_FLAG_BOTH = AIMING_CLIENTSERVER_FLAG_CLIENT | AIMING_CLIENTSERVER_FLAG_SERVER
AIMING_VALID_TARGETS = {AIMING_VALID_TARGET_GAZE_ID: (AIMING_VALID_TARGET_GAZE_ID,
'GazeTarget',
5.0,
AIMING_CLIENTSERVER_FLAG_BOTH),
AIMING_VALID_TARGET_COMBAT_ID: (AIMING_VALID_TARGET_COMBAT_ID,
'CombatTarget',
1.0,
AIMING_CLIENTSERVER_FLAG_BOTH)}
AIMING_VALID_TARGETS_FIELD_ID = 0
AIMING_VALID_TARGETS_FIELD_NAME = 1
AIMING_VALID_TARGETS_FIELD_RESELECTDELAY = 2
AIMING_VALID_TARGETS_FIELD_CLIENTSERVER_FLAG = 3
AIMING_CLIENTSERVER_FLAGS = {AIMING_CLIENTSERVER_FLAG_CLIENT: ' (Client only)',
AIMING_CLIENTSERVER_FLAG_SERVER: ' (Server only)',
AIMING_CLIENTSERVER_FLAG_BOTH: ' (Client & Server)'}
AIMING_COMPONENT_ENTITY_TYPE = 'entityType'
AIMING_ENTITY_TYPE_PC = 'PC'
AIMING_ENTITY_TYPE_NPC = 'NPC'
AIMING_ENTITY_TYPE_OBJECT = 'OBJECT'
AIMING_ENTITY_PC_TYPEID = 1
AIMING_ENTITY_NPC_TYPEID = 2
AIMING_ENTITY_OBJECT_TYPEID = 3
AIMING_ENTITY_TYPE_TO_ID = {AIMING_ENTITY_TYPE_PC: AIMING_ENTITY_PC_TYPEID,
AIMING_ENTITY_TYPE_NPC: AIMING_ENTITY_NPC_TYPEID,
AIMING_ENTITY_TYPE_OBJECT: AIMING_ENTITY_OBJECT_TYPEID}
|
[
"billchang.e@gmail.com"
] |
billchang.e@gmail.com
|
6ff33ad59dc302faa0e2bc760b2c46e5e5f389c5
|
85a9ffeccb64f6159adbd164ff98edf4ac315e33
|
/pysnmp-with-texts/HUAWEI-SWITCH-L2MAM-EXT-MIB.py
|
41d6ca43ecd8951b1d1366acb16a4a65062a8545
|
[
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
agustinhenze/mibs.snmplabs.com
|
5d7d5d4da84424c5f5a1ed2752f5043ae00019fb
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
refs/heads/master
| 2020-12-26T12:41:41.132395
| 2019-08-16T15:51:41
| 2019-08-16T15:53:57
| 237,512,469
| 0
| 0
|
Apache-2.0
| 2020-01-31T20:41:36
| 2020-01-31T20:41:35
| null |
UTF-8
|
Python
| false
| false
| 13,606
|
py
|
#
# PySNMP MIB module HUAWEI-SWITCH-L2MAM-EXT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HUAWEI-SWITCH-L2MAM-EXT-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:48:46 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion")
entPhysicalName, = mibBuilder.importSymbols("ENTITY-MIB", "entPhysicalName")
hwBaseTrapProbableCause, hwBaseTrapSeverity, hwBaseTrapEventType = mibBuilder.importSymbols("HUAWEI-BASE-TRAP-MIB", "hwBaseTrapProbableCause", "hwBaseTrapSeverity", "hwBaseTrapEventType")
hwCfgFdbVlanId, hwMacEntityUsage, hwPortSecurityProtectAction, hwCfgFdbMac, hwMacEntityUsageThreshold, hwCfgFdbVsiName = mibBuilder.importSymbols("HUAWEI-L2MAM-MIB", "hwCfgFdbVlanId", "hwMacEntityUsage", "hwPortSecurityProtectAction", "hwCfgFdbMac", "hwMacEntityUsageThreshold", "hwCfgFdbVsiName")
hwDatacomm, = mibBuilder.importSymbols("HUAWEI-MIB", "hwDatacomm")
InterfaceIndex, ifDescr = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex", "ifDescr")
NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
Counter64, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, IpAddress, iso, ObjectIdentity, Integer32, Counter32, Unsigned32, Bits, MibIdentifier, NotificationType, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "IpAddress", "iso", "ObjectIdentity", "Integer32", "Counter32", "Unsigned32", "Bits", "MibIdentifier", "NotificationType", "Gauge32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
hwSWITCH_L2MAM_EXT = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315)).setLabel("hwSWITCH-L2MAM-EXT")
hwSWITCH_L2MAM_EXT.setRevisions(('2014-03-26 16:00', '2014-03-26 16:00', '2014-03-19 16:00', '2014-02-14 16:00', '2004-06-08 00:00', '1996-10-31 00:00', '1999-12-07 00:00', '2004-06-08 00:00', '2004-06-08 00:00', '1996-10-31 00:00', '1999-12-07 00:00', '2004-06-08 00:00', '1996-10-31 00:00', '1999-12-07 00:00', '1996-10-31 00:00', '1999-12-07 00:00', '2004-06-08 00:00', '2004-06-08 00:00', '2004-06-08 00:00', '2010-08-11 16:00', '2014-02-14 16:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: hwSWITCH_L2MAM_EXT.setRevisionsDescriptions(('Modify code hwMacTrapPortCfgAlarm', 'Modify code hwMacTrapHashConflictAlarm ', 'Edit the mib file by OM stard guide', 'Add code hwMacTrapPortCfgAlarm', ' V1.00, Inital version. ', 'Initial version (version 1), published as RFC 2037.', 'Initial Version of Entity MIB (Version 2). This revision obsoletes RFC 2037. This version published as RFC 2737.', ' V1.00, Inital version. ', ' V1.00, Inital version. ', 'Initial version (version 1), published as RFC 2037.', 'Initial Version of Entity MIB (Version 2). This revision obsoletes RFC 2037. This version published as RFC 2737.', ' V1.00, Inital version. ', 'Initial version (version 1), published as RFC 2037.', 'Initial Version of Entity MIB (Version 2). This revision obsoletes RFC 2037. This version published as RFC 2737.', 'Initial version (version 1), published as RFC 2037.', 'Initial Version of Entity MIB (Version 2). This revision obsoletes RFC 2037. This version published as RFC 2737.', ' V1.00, Inital version. ', ' V1.00, Inital version. ', ' V1.00, Inital version. ', 'V1.00, initial version.', 'Add code hwMacTrapPortCfgAlarm',))
if mibBuilder.loadTexts: hwSWITCH_L2MAM_EXT.setLastUpdated('201403261600Z')
if mibBuilder.loadTexts: hwSWITCH_L2MAM_EXT.setOrganization('Huawei Technologies Co.,Ltd.')
if mibBuilder.loadTexts: hwSWITCH_L2MAM_EXT.setContactInfo("Huawei Industrial Base Bantian, Longgang Shenzhen 518129 People's Republic of China Website: http://www.huawei.com Email: support@huawei.com")
if mibBuilder.loadTexts: hwSWITCH_L2MAM_EXT.setDescription('THIS IS THE LAYER 2 MAC-ADDRESS MANAGEMENT MIB.')
hwSwitchL2MamExtObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 1))
hwMacTrapPortCfgTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 1, 1), )
if mibBuilder.loadTexts: hwMacTrapPortCfgTable.setStatus('current')
if mibBuilder.loadTexts: hwMacTrapPortCfgTable.setDescription('The mac-trap Table.The table that contains the mac-trap information currently operating on this device.')
hwMacTrapPortCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 1, 1, 1), ).setIndexNames((0, "HUAWEI-SWITCH-L2MAM-EXT-MIB", "hwMacTrapPortCfgIfIndex"))
if mibBuilder.loadTexts: hwMacTrapPortCfgEntry.setStatus('current')
if mibBuilder.loadTexts: hwMacTrapPortCfgEntry.setDescription('Entries of the mac-trap table.')
hwMacTrapPortCfgIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 1, 1, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: hwMacTrapPortCfgIfIndex.setStatus('current')
if mibBuilder.loadTexts: hwMacTrapPortCfgIfIndex.setDescription('The IfIndex which will be configured mac-trap.')
hwMacTrapPortCfgLearn = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMacTrapPortCfgLearn.setStatus('current')
if mibBuilder.loadTexts: hwMacTrapPortCfgLearn.setDescription('The alarm switch of the new mac. The meanings of the values are: enable(1),disanle(2).')
hwMacTrapPortCfgAging = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMacTrapPortCfgAging.setStatus('current')
if mibBuilder.loadTexts: hwMacTrapPortCfgAging.setDescription('The alarm switch of the delete mac. The meanings of the values are: enable(1),disanle(2).')
hwSwitchL2MamExtGeneralObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 2))
hwMacTrapInterval = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 2, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwMacTrapInterval.setStatus('current')
if mibBuilder.loadTexts: hwMacTrapInterval.setDescription('The interval of trap the mac change.')
hwMacTrapMacInfo = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 2, 2), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwMacTrapMacInfo.setStatus('current')
if mibBuilder.loadTexts: hwMacTrapMacInfo.setDescription('The information of mac-trap.')
hwSwitchL2MamExtTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 3))
hwMacTrapAlarm = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 3, 1)).setObjects(("HUAWEI-SWITCH-L2MAM-EXT-MIB", "hwMacTrapMacInfo"))
if mibBuilder.loadTexts: hwMacTrapAlarm.setStatus('current')
if mibBuilder.loadTexts: hwMacTrapAlarm.setDescription('The trap when mac-address is learned or aging.')
hwPortVlanSecureMacAlarm = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 3, 2)).setObjects(("IF-MIB", "ifDescr"), ("HUAWEI-L2MAM-MIB", "hwCfgFdbMac"), ("HUAWEI-L2MAM-MIB", "hwCfgFdbVlanId"), ("HUAWEI-L2MAM-MIB", "hwPortSecurityProtectAction"))
if mibBuilder.loadTexts: hwPortVlanSecureMacAlarm.setStatus('current')
if mibBuilder.loadTexts: hwPortVlanSecureMacAlarm.setDescription('The alarm when MAC addresses that received are illegal ')
hwSlotMacUsageRaisingThreshold = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 3, 3)).setObjects(("HUAWEI-BASE-TRAP-MIB", "hwBaseTrapEventType"), ("HUAWEI-BASE-TRAP-MIB", "hwBaseTrapSeverity"), ("HUAWEI-BASE-TRAP-MIB", "hwBaseTrapProbableCause"), ("HUAWEI-L2MAM-MIB", "hwMacEntityUsage"), ("HUAWEI-L2MAM-MIB", "hwMacEntityUsageThreshold"), ("ENTITY-MIB", "entPhysicalName"))
if mibBuilder.loadTexts: hwSlotMacUsageRaisingThreshold.setStatus('current')
if mibBuilder.loadTexts: hwSlotMacUsageRaisingThreshold.setDescription('This notification indicates the MAC usage of the slot raising the threshold ')
hwSlotMacUsageFallingThreshold = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 3, 4)).setObjects(("HUAWEI-BASE-TRAP-MIB", "hwBaseTrapEventType"), ("HUAWEI-BASE-TRAP-MIB", "hwBaseTrapSeverity"), ("HUAWEI-BASE-TRAP-MIB", "hwBaseTrapProbableCause"), ("ENTITY-MIB", "entPhysicalName"))
if mibBuilder.loadTexts: hwSlotMacUsageFallingThreshold.setStatus('current')
if mibBuilder.loadTexts: hwSlotMacUsageFallingThreshold.setDescription('This notification indicates the MAC usage of the slot falling the threshold ')
hwMacTrapPortCfgAlarm = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 3, 5)).setObjects(("HUAWEI-SWITCH-L2MAM-EXT-MIB", "hwMacTrapMacInfo"), ("HUAWEI-L2MAM-MIB", "hwCfgFdbMac"), ("HUAWEI-L2MAM-MIB", "hwCfgFdbVlanId"), ("IF-MIB", "ifDescr"))
if mibBuilder.loadTexts: hwMacTrapPortCfgAlarm.setStatus('current')
if mibBuilder.loadTexts: hwMacTrapPortCfgAlarm.setDescription('The trap when mac-address is learned or aging.')
hwMacTrapHashConflictAlarm = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 3, 6)).setObjects(("IF-MIB", "ifDescr"), ("HUAWEI-L2MAM-MIB", "hwCfgFdbMac"), ("HUAWEI-L2MAM-MIB", "hwCfgFdbVlanId"), ("HUAWEI-L2MAM-MIB", "hwCfgFdbVsiName"))
if mibBuilder.loadTexts: hwMacTrapHashConflictAlarm.setStatus('current')
if mibBuilder.loadTexts: hwMacTrapHashConflictAlarm.setDescription('This notification indicates that MAC address hash conflict occurred.')
hwSwitchL2MamExtConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 4))
hwSwitchL2MamExtCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 4, 1))
hwSwitchL2MamExtFullCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 4, 1, 1)).setObjects(("HwSWITCH-L2MAM-EXT", "hwMacTrapGroups"), ("HwSWITCH-L2MAM-EXT", "hwL2MAMExtGeneralGrops"), ("HwSWITCH-L2MAM-EXT", "hwL2MAMExtTrapGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwSwitchL2MamExtFullCompliance = hwSwitchL2MamExtFullCompliance.setStatus('current')
if mibBuilder.loadTexts: hwSwitchL2MamExtFullCompliance.setDescription('hwSwitchL2MamExtFullCompliance')
hwSwitchL2MamExtGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 4, 2))
hwMacTrapGroups = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 4, 2, 1)).setObjects(("HUAWEI-SWITCH-L2MAM-EXT-MIB", "hwMacTrapPortCfgLearn"), ("HUAWEI-SWITCH-L2MAM-EXT-MIB", "hwMacTrapPortCfgAging"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwMacTrapGroups = hwMacTrapGroups.setStatus('current')
if mibBuilder.loadTexts: hwMacTrapGroups.setDescription('A collection of objects providing information about mac trap table.')
hwL2MAMExtGeneralGrops = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 4, 2, 2)).setObjects(("HUAWEI-SWITCH-L2MAM-EXT-MIB", "hwMacTrapInterval"), ("HUAWEI-SWITCH-L2MAM-EXT-MIB", "hwMacTrapMacInfo"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwL2MAMExtGeneralGrops = hwL2MAMExtGeneralGrops.setStatus('current')
if mibBuilder.loadTexts: hwL2MAMExtGeneralGrops.setDescription('A collection of objects providing information about Mac addresses.')
hwL2MAMExtTrapGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 315, 4, 2, 3)).setObjects(("HUAWEI-SWITCH-L2MAM-EXT-MIB", "hwMacTrapAlarm"), ("HUAWEI-SWITCH-L2MAM-EXT-MIB", "hwMacTrapPortCfgAlarm"), ("HUAWEI-SWITCH-L2MAM-EXT-MIB", "hwPortVlanSecureMacAlarm"), ("HUAWEI-SWITCH-L2MAM-EXT-MIB", "hwSlotMacUsageFallingThreshold"), ("HUAWEI-SWITCH-L2MAM-EXT-MIB", "hwSlotMacUsageRaisingThreshold"), ("HUAWEI-SWITCH-L2MAM-EXT-MIB", "hwMacTrapHashConflictAlarm"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwL2MAMExtTrapGroup = hwL2MAMExtTrapGroup.setStatus('current')
if mibBuilder.loadTexts: hwL2MAMExtTrapGroup.setDescription('The trap enable group.')
mibBuilder.exportSymbols("HUAWEI-SWITCH-L2MAM-EXT-MIB", hwMacTrapInterval=hwMacTrapInterval, hwMacTrapAlarm=hwMacTrapAlarm, hwSlotMacUsageFallingThreshold=hwSlotMacUsageFallingThreshold, hwL2MAMExtTrapGroup=hwL2MAMExtTrapGroup, hwSwitchL2MamExtGeneralObjects=hwSwitchL2MamExtGeneralObjects, hwL2MAMExtGeneralGrops=hwL2MAMExtGeneralGrops, hwMacTrapMacInfo=hwMacTrapMacInfo, hwMacTrapPortCfgEntry=hwMacTrapPortCfgEntry, PYSNMP_MODULE_ID=hwSWITCH_L2MAM_EXT, hwMacTrapHashConflictAlarm=hwMacTrapHashConflictAlarm, hwSwitchL2MamExtFullCompliance=hwSwitchL2MamExtFullCompliance, hwSlotMacUsageRaisingThreshold=hwSlotMacUsageRaisingThreshold, hwSwitchL2MamExtObjects=hwSwitchL2MamExtObjects, hwSwitchL2MamExtGroups=hwSwitchL2MamExtGroups, hwSwitchL2MamExtCompliances=hwSwitchL2MamExtCompliances, hwSWITCH_L2MAM_EXT=hwSWITCH_L2MAM_EXT, hwMacTrapGroups=hwMacTrapGroups, hwSwitchL2MamExtConformance=hwSwitchL2MamExtConformance, hwMacTrapPortCfgLearn=hwMacTrapPortCfgLearn, hwSwitchL2MamExtTraps=hwSwitchL2MamExtTraps, hwMacTrapPortCfgTable=hwMacTrapPortCfgTable, hwMacTrapPortCfgAging=hwMacTrapPortCfgAging, hwPortVlanSecureMacAlarm=hwPortVlanSecureMacAlarm, hwMacTrapPortCfgAlarm=hwMacTrapPortCfgAlarm, hwMacTrapPortCfgIfIndex=hwMacTrapPortCfgIfIndex)
|
[
"dcwangmit01@gmail.com"
] |
dcwangmit01@gmail.com
|
fc9e43bf3077454377017ccba304ace293c1bc05
|
214e67e48b5f9c24bd64d9c04c94db86ee0c85e0
|
/arcerojas/Propietario/urls.py
|
98ae012e41de1138d6b45a94c070f8f795612e75
|
[] |
no_license
|
efnaranjo6/arcerojas
|
238542f11a91958cf5d3221781c8425c23a8a1c1
|
ace90508d2a95f837c255f9245af3d1bff0d8f02
|
refs/heads/main
| 2023-08-11T10:40:07.196737
| 2021-09-17T21:20:46
| 2021-09-17T21:20:46
| 407,409,322
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 457
|
py
|
from django.urls import path
from .views import Propietarioview,Propietarioinsertar,Propietarioeditar,Propietarioeliminar
urlpatterns = [
path('', Propietarioview.as_view(), name='propietarios'),
path('propietario/new/', Propietarioinsertar.as_view(), name='Insertar'),
path('propietario/Editar/<int:pk>', Propietarioeditar.as_view(), name='Editar'),
path('propietario/eliminar/<int:pk>', Propietarioeliminar.as_view(), name='Eliminar'),
]
|
[
"efnaranjo6@misena.edu.co"
] |
efnaranjo6@misena.edu.co
|
5b8e8640194cc124752bcf19faabd9197a61a886
|
32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd
|
/benchmark/wikipedia/testcase/interestallcases/testcase2_008_2.py
|
7697a7118f975926627a9d91d9819390182e2f4e
|
[] |
no_license
|
Prefest2018/Prefest
|
c374d0441d714fb90fca40226fe2875b41cf37fc
|
ac236987512889e822ea6686c5d2e5b66b295648
|
refs/heads/master
| 2021-12-09T19:36:24.554864
| 2021-12-06T12:46:14
| 2021-12-06T12:46:14
| 173,225,161
| 5
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,859
|
py
|
#coding=utf-8
import os
import subprocess
import time
import traceback
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException, WebDriverException
desired_caps = {
'platformName' : 'Android',
'deviceName' : 'Android Emulator',
'platformVersion' : '4.4',
'appPackage' : 'org.wikipedia',
'appActivity' : 'org.wikipedia.main.MainActivity',
'resetKeyboard' : True,
'androidCoverage' : 'org.wikipedia/org.wikipedia.JacocoInstrumentation',
'noReset' : True
}
def command(cmd, timeout=5):
p = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=True)
time.sleep(timeout)
p.terminate()
return
def getElememt(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str)
return element
def getElememtBack(driver, str1, str2) :
for i in range(0, 2, 1):
try:
element = driver.find_element_by_android_uiautomator(str1)
except NoSuchElementException:
time.sleep(1)
else:
return element
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str2)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str2)
return element
def swipe(driver, startxper, startyper, endxper, endyper) :
size = driver.get_window_size()
width = size["width"]
height = size["height"]
try:
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
except WebDriverException:
time.sleep(1)
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
return
def scrollToFindElement(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
swipe(driver, 0.5, 0.6, 0.5, 0.2)
else:
return element
return
def clickoncheckable(driver, str, value = "true") :
parents = driver.find_elements_by_class_name("android.widget.LinearLayout")
for parent in parents:
try :
parent.find_element_by_android_uiautomator(str)
lists = parent.find_elements_by_class_name("android.widget.LinearLayout")
if (len(lists) == 1) :
innere = parent.find_element_by_android_uiautomator("new UiSelector().checkable(true)")
nowvalue = innere.get_attribute("checked")
if (nowvalue != value) :
innere.click()
break
except NoSuchElementException:
continue
# preference setting and exit
try :
os.popen("adb shell svc data enable")
time.sleep(5)
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
os.popen("adb shell am start -n org.wikipedia/org.wikipedia.settings.DeveloperSettingsActivity")
scrollToFindElement(driver, "new UiSelector().text(\"useRestbase_setManually\")").click()
clickoncheckable(driver, "new UiSelector().text(\"useRestbase_setManually\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"suppressNotificationPolling\")").click()
clickoncheckable(driver, "new UiSelector().text(\"suppressNotificationPolling\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"memoryLeakTest\")").click()
clickoncheckable(driver, "new UiSelector().text(\"memoryLeakTest\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"readingListLoginReminder\")").click()
clickoncheckable(driver, "new UiSelector().text(\"readingListLoginReminder\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"readingListsFirstTimeSync\")").click()
clickoncheckable(driver, "new UiSelector().text(\"readingListsFirstTimeSync\")", "false")
driver.press_keycode(4)
time.sleep(2)
os.popen("adb shell am start -n org.wikipedia/org.wikipedia.settings.SettingsActivity")
scrollToFindElement(driver, "new UiSelector().text(\"Show images\")").click()
clickoncheckable(driver, "new UiSelector().text(\"Show images\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"Prefer offline content\")").click()
clickoncheckable(driver, "new UiSelector().text(\"Prefer offline content\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"Send usage reports\")").click()
clickoncheckable(driver, "new UiSelector().text(\"Send usage reports\")", "true")
driver.press_keycode(4)
time.sleep(2)
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
finally :
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"2_008_pre\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
# testcase008
try :
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
swipe(driver, 0.5, 0.2, 0.5, 0.8)
driver.press_keycode(4)
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
else:
print 'OK'
finally:
cpackage = driver.current_package
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"2_008\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
if (cpackage != 'org.wikipedia'):
cpackage = "adb shell am force-stop " + cpackage
os.popen(cpackage)
os.popen("adb shell svc data enable")
|
[
"prefest2018@gmail.com"
] |
prefest2018@gmail.com
|
b6008a92acebeb1a4b43cc9e053f074773796fb9
|
00b0cf3d93b7033e1f419b49a0278f5d463733b0
|
/script1.py
|
4f5b757d7c19b4eb20f0bd6b2e4b5a1aebbcf2d4
|
[] |
no_license
|
aramidetosin/Nornir-BGP-OSPF
|
df46364b439fbfaa53542eeffbc39eba415fa950
|
c6837d109c8ce33053af3b1c023952b2cd315c9c
|
refs/heads/master
| 2022-12-10T06:11:18.312519
| 2020-09-13T08:37:18
| 2020-09-13T08:37:18
| 288,286,062
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 851
|
py
|
from pyats.async_ import pcall
from genie.conf import Genie
from genie.utils import Dq
from genie.testbed import load
from rich import print
def get_ospf(hostname, dev):
#get show output from routing table
parsed = dev.parse('show ip route')
# use DQ to parse the OSPF routes from the routing table
get_routes = (Dq(parsed).contains('O').get_values('routes'))
# count the number of those OSPF entries
num_routes = len(get_routes)
print(f"[red]{hostname} has {num_routes} OSPF routes in it's routing table[/red]")
def main():
# load testbed
testbed = load('testbed.yaml')
# connect and suppress output
testbed.connect(log_stdout=False)
# use pcall to execute on all devices in parallel
pcall(get_ospf, hostname=testbed.devices, dev=testbed.devices.values())
if __name__ == "__main__":
main()
|
[
"aoluwatosin10@gmail.com"
] |
aoluwatosin10@gmail.com
|
fd01446c2f4d6707e0f766fe7bd1160a36c15b5b
|
358519772669c73092f625f630722c38e1d33783
|
/mccetools/examples/titrateHEWL_e8.py
|
cc186c9419106ff7bf6afa5e57a1598f9ca8d378
|
[] |
no_license
|
minghao2016/mmtools
|
e7e61aca084498408ceae965dd6c9450ad89eafa
|
3ade988afb51cd54ee5a4067d8deaad88afbb0fe
|
refs/heads/master
| 2021-09-21T01:02:22.522187
| 2014-09-19T03:40:03
| 2014-09-19T03:40:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,015
|
py
|
import mmtools.mccetools.mcce as mcce
import os, sys
### INPUT LINES ####
#
# Specify the input and output PDB filenames
# NOTE: pdbfile and outpdbfile should be local (not absolute) paths for this project
pdbfile = '1e8l_model1.pdb'
outfile = os.path.join(os.curdir,'titrateHEWL_e8_pK.out')
# Specify the course of the pH titration
pHstart = 1.0
pHstep = 1.0
pHiters = 14
# Specify a MCCE parameter file with the desired set of parameters for calculating the pKa
prmfile = '../prmfiles/run.prm.quick'
prmfile = os.path.abspath(prmfile)
# Specify additional or different parameters than prmfile, if desired.
# xtraprms = {}
xtraprms = {'EPSILON_PROT':'8.0'} # NOTE: only eps=4.0 and eps=8.0 are supported!!!
# Write output PDB file with the correct protonation state
### work is done in a temporary dir; Setting cleanup=True will erase these temporary files
mcce.titratePDB(pdbfile, outfile, pHstart, pHstep, pHiters, os.environ['MCCE_LOCATION'], cleanup=False, prmfile=prmfile, xtraprms=xtraprms)
|
[
"choderaj@mskcc.org"
] |
choderaj@mskcc.org
|
4f3dc08a6a651f2e44d73ffb4fad06cf5da0274d
|
3453fc365a2f2e24aaf9b9770d94560440aedc4c
|
/settings.py
|
80abdc05ac5b330ad2d2737b6b010880904d5a50
|
[] |
no_license
|
star1986xk/LOL_DB
|
6cd97e8f14fead69f0eac522d49be3cc3b19596a
|
5431e965f67c17e26152b842420ec292b79ab73e
|
refs/heads/master
| 2022-08-22T16:33:28.809745
| 2020-05-23T13:13:27
| 2020-05-23T13:13:27
| 266,339,126
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 274
|
py
|
SQL = {
'host': 'xxxx',
'user': 'xxxx',
'password': 'xxxxxx',
'database': 'lol',
'charset': 'utf8'
}
headers = {
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36'
}
|
[
"173013905@qq.com"
] |
173013905@qq.com
|
2dcb3f7173cb22ab12265f533657a71a52f75d7b
|
8226d4d618b524bfe958232d9e7dff09378acaec
|
/cowrie/ssh/userauth.py
|
922be4a236cf9d621a03c5826e03103f692b64f3
|
[
"BSD-2-Clause"
] |
permissive
|
sergey-pronin/cowrie
|
4bb9d20a4a38fb338f1da8317f312bd61b777ffe
|
772cb2e41ea380a05e3d900fab4422f2583ec287
|
refs/heads/master
| 2021-05-14T06:38:08.606324
| 2018-01-03T04:49:57
| 2018-01-03T04:49:57
| 116,247,402
| 1
| 0
| null | 2018-01-04T10:34:00
| 2018-01-04T10:33:59
| null |
UTF-8
|
Python
| false
| false
| 6,132
|
py
|
# Copyright (c) 2009-2014 Upi Tamminen <desaster@gmail.com>
# See the COPYRIGHT file for more information
"""
This module contains ...
"""
from __future__ import division, absolute_import
import struct
from twisted.python import log
from twisted.python.compat import _bytesChr as chr
from twisted.internet import defer
from twisted.conch.interfaces import IConchUser
from twisted.conch.ssh import userauth
from twisted.conch.ssh.common import NS, getNS
from twisted.conch import error
from cowrie.core import credentials
class HoneyPotSSHUserAuthServer(userauth.SSHUserAuthServer):
"""
This contains modifications to the authentication system to do:
* Login banners (like /etc/issue.net)
* Anonymous authentication
* Keyboard-interactive authentication (PAM)
* IP based authentication
"""
def serviceStarted(self):
"""
"""
self.interfaceToMethod[credentials.IUsername] = b'none'
self.interfaceToMethod[credentials.IUsernamePasswordIP] = b'password'
self.interfaceToMethod[credentials.IPluggableAuthenticationModulesIP] = b'keyboard-interactive'
self.bannerSent = False
self._pamDeferred = None
userauth.SSHUserAuthServer.serviceStarted(self)
def sendBanner(self):
"""
Display contents of <honeyfs>/etc/issue.net
"""
if self.bannerSent:
return
self.bannerSent = True
try:
honeyfs = self.portal.realm.cfg.get('honeypot', 'contents_path')
issuefile = honeyfs + "/etc/issue.net"
data = open(issuefile).read()
except IOError:
return
if not data or not len(data.strip()):
return
self.transport.sendPacket(
userauth.MSG_USERAUTH_BANNER, NS(data) + NS(b'en'))
def ssh_USERAUTH_REQUEST(self, packet):
"""
"""
self.sendBanner()
return userauth.SSHUserAuthServer.ssh_USERAUTH_REQUEST(self, packet)
def auth_publickey(self, packet):
"""
We subclass to intercept non-dsa/rsa keys, or Conch will crash on ecdsa..
"""
algName, blob, rest = getNS(packet[1:], 2)
if not algName in (b'ssh-rsa', b'ssh-dsa'):
log.msg( "Attempted public key authentication with %s algorithm" % (algName,))
return defer.fail(error.ConchError("Incorrect signature"))
return userauth.SSHUserAuthServer.auth_publickey(self, packet)
def auth_none(self, packet):
"""
Allow every login
"""
c = credentials.Username(self.user)
srcIp = self.transport.transport.getPeer().host
return self.portal.login(c, srcIp, IConchUser)
def auth_password(self, packet):
"""
Overridden to pass src_ip to credentials.UsernamePasswordIP
"""
password = getNS(packet[1:])[0]
srcIp = self.transport.transport.getPeer().host
c = credentials.UsernamePasswordIP(self.user, password, srcIp)
return self.portal.login(c, srcIp,
IConchUser).addErrback(self._ebPassword)
def auth_keyboard_interactive(self, packet):
"""
Keyboard interactive authentication. No payload. We create a
PluggableAuthenticationModules credential and authenticate with our
portal.
Overridden to pass src_ip to credentials.PluggableAuthenticationModulesIP
"""
if self._pamDeferred is not None:
self.transport.sendDisconnect(
transport.DISCONNECT_PROTOCOL_ERROR,
"only one keyboard interactive attempt at a time")
return defer.fail(error.IgnoreAuthentication())
src_ip = self.transport.transport.getPeer().host
c = credentials.PluggableAuthenticationModulesIP(self.user,
self._pamConv, src_ip)
return self.portal.login(c, src_ip,
IConchUser).addErrback(self._ebPassword)
def _pamConv(self, items):
"""
Convert a list of PAM authentication questions into a
MSG_USERAUTH_INFO_REQUEST. Returns a Deferred that will be called
back when the user has responses to the questions.
@param items: a list of 2-tuples (message, kind). We only care about
kinds 1 (password) and 2 (text).
@type items: C{list}
@rtype: L{defer.Deferred}
"""
resp = []
for message, kind in items:
if kind == 1: # Password
resp.append((message, 0))
elif kind == 2: # Text
resp.append((message, 1))
elif kind in (3, 4):
return defer.fail(error.ConchError(
'cannot handle PAM 3 or 4 messages'))
else:
return defer.fail(error.ConchError(
'bad PAM auth kind %i' % (kind,)))
packet = NS(b'') + NS(b'') + NS(b'')
packet += struct.pack('>L', len(resp))
for prompt, echo in resp:
packet += NS(prompt)
packet += chr(echo)
self.transport.sendPacket(userauth.MSG_USERAUTH_INFO_REQUEST, packet)
self._pamDeferred = defer.Deferred()
return self._pamDeferred
def ssh_USERAUTH_INFO_RESPONSE(self, packet):
"""
The user has responded with answers to PAMs authentication questions.
Parse the packet into a PAM response and callback self._pamDeferred.
Payload::
uint32 numer of responses
string response 1
...
string response n
"""
d, self._pamDeferred = self._pamDeferred, None
try:
resp = []
numResps = struct.unpack('>L', packet[:4])[0]
packet = packet[4:]
while len(resp) < numResps:
response, packet = getNS(packet)
resp.append((response, 0))
if packet:
raise error.ConchError(
"{:d} bytes of extra data".format(len(packet)))
except:
d.errback(failure.Failure())
else:
d.callback(resp)
|
[
"michel@oosterhof.net"
] |
michel@oosterhof.net
|
8d1dcfdf120fab339b7e7a0c1a0a455b6cfa8730
|
8911d294dbdc2c1b415804ec36112db11ca56148
|
/Best_Buy/App_Best_Buy/urls.py
|
eae0c5b9dcc63d349485ff10c69706ba788a620c
|
[] |
no_license
|
generateintel/BestBuy_Scraper
|
8ab596fd0dd98bd4f57d3024f4e5862af67b0899
|
c6ffba85537250e41b0d450be8fafa4c96d004f7
|
refs/heads/master
| 2022-12-10T14:06:58.131437
| 2020-08-28T15:19:11
| 2020-08-28T15:19:11
| 291,078,961
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 368
|
py
|
# import Views as Views
from django.conf.urls import url, include
from django.urls import path,include
from rest_framework import routers
from . import views
from .views import *
# urlpatterns = [
# ]
router=routers.DefaultRouter()
router.register(r'bestbuy(?:/(?P<id>[0-9]+))?', Best_Buy, 'bestbuy')#User apis
urlpatterns=[
path('', include(router.urls)),
]
|
[
"frazmirza58@gmail.com"
] |
frazmirza58@gmail.com
|
6992e6208c0a2fe642723a1efe1d37f8798929dd
|
bb8838e3eec624fd35a61d6d646f941eac1b266a
|
/saga/adaptors/cpi/filesystem/__init__.py
|
6eb35fd84bb36b6735f61432bc6f9c3a4a067591
|
[
"MIT"
] |
permissive
|
agrill/saga-python
|
55087c03e72635ffbb2fe1ca56b5cc02b7ff2094
|
35101e3a40d3cfcb39cb9f0d0c5f64c6f8de5930
|
refs/heads/master
| 2021-01-22T10:14:11.922145
| 2013-11-19T14:38:50
| 2013-11-19T14:38:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 288
|
py
|
__author__ = "Andre Merzky"
__copyright__ = "Copyright 2012-2013, The SAGA Project"
__license__ = "MIT"
from saga.adaptors.cpi.filesystem.file import File
from saga.adaptors.cpi.filesystem.directory import Directory
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
[
"andre@merzky.net"
] |
andre@merzky.net
|
d59f6584aa265e16f758ceaa9eaaa52f77180d65
|
602bdbd1d8ef4d36ccfdcae5756bc8e448d30584
|
/share/pollen/yamlutil.py
|
a827b5412580893d319067869fc0182c3328adcc
|
[] |
no_license
|
timparkin/timparkingallery
|
1136027bf9cfbad31319958f20771a6fdc9f5fc4
|
6e6c02684a701817a2efae27e21b77765daa2c33
|
refs/heads/master
| 2016-09-06T00:28:16.965416
| 2008-11-25T21:15:45
| 2008-11-25T21:15:45
| 12,716
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,522
|
py
|
"""
Extension of PySyck that treats all scalars (implicit typing is switched off)
as UTF-8 encoded strings.
To convert scalars to specific types use the standard YAML syntax, i.e.
"!int 1".
"""
import syck
class Loader(syck.Loader):
def construct(self, node):
# Implicit typing is always disabled but we want unicode instances, not
# byte streams, where possible. So, if the node is a scalar and it's
# not been explicitly given a type then treat it as a utf-8 encoded
# string.
if node.kind == 'scalar' and node.tag is None:
return super(Loader, self).construct_python_unicode(node)
return super(Loader, self).construct(node)
def load(source, Loader=Loader):
return syck.load(source, Loader=Loader, implicit_typing=False)
if __name__ == '__main__':
import unittest
POUND = u'\xa3'
POUND_ENC = POUND.encode('utf-8')
class TestCase(unittest.TestCase):
def test_strings(self):
s = load("- foo\n- %s\n- !string %s" % (POUND_ENC, POUND_ENC))
self.assertEquals(s, [u'foo', POUND, POUND_ENC])
self.assertEquals(map(type, s), [unicode, unicode, str])
def test_likeNumbers(self):
s = load("- 1\n- 1.2")
self.assertEquals(s, [u'1', u'1.2'])
def test_explicitNumbers(self):
s = load("- !int 1\n- !float 1.2")
self.assertEquals(s, [1, 1.2])
self.assertEquals(map(type, s), [int, float])
unittest.main()
|
[
"info@timparkin.co.uk"
] |
info@timparkin.co.uk
|
ebca3fd3419746cb6ce74eb0a9f19695c7d634ac
|
b1c7a768f38e2e987a112da6170f49503b9db05f
|
/userprofile/migrations/0023_auto_20190315_1624.py
|
3deeef30d6004cff0aecc2b7f825b79a7f5bed21
|
[] |
no_license
|
Niladrykar/bracketerp
|
8b7491aa319f60ec3dcb5077258d75b0394db374
|
ca4ee60c2254c6c132a38ce52410059cc6b19cae
|
refs/heads/master
| 2022-12-11T04:23:07.504966
| 2019-03-18T06:58:13
| 2019-03-18T06:58:13
| 176,218,029
| 1
| 0
| null | 2022-12-08T03:01:46
| 2019-03-18T06:27:37
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 1,776
|
py
|
# Generated by Django 2.0.6 on 2019-03-15 10:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('userprofile', '0022_services'),
]
operations = [
migrations.CreateModel(
name='Pro_services',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('service_name', models.CharField(blank=True, max_length=100)),
('details', models.CharField(blank=True, max_length=100)),
('service_type', models.CharField(blank=True, choices=[('Returns', 'Returns'), ('Communication', 'Communication'), ('License', 'License')], default='Returns', max_length=100)),
('duration', models.CharField(blank=True, choices=[('ANNUALLY', 'ANNUALLY'), ('QUARTERLY', 'QUARTERLY'), ('ONE TIME', 'ONE TIME')], default='ANNUALLY', max_length=100)),
('service_mode', models.CharField(blank=True, choices=[('ON-PREMISES', 'ON-PREMISES'), ('CALLS - VOIP', 'CALLS - VOIP'), ('COLLECTION FROM CLIENT', 'COLLECTION FROM CLIENT')], default='ON-PREMISES', max_length=100)),
('rate', models.DecimalField(blank=True, decimal_places=2, default=0.0, max_digits=10)),
('User', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.RemoveField(
model_name='services',
name='User',
),
migrations.DeleteModel(
name='Services',
),
]
|
[
"niladry.kar85@gmail.com"
] |
niladry.kar85@gmail.com
|
ea506e927f56df9e77c165cbf17c39260478d62d
|
48a7b266737b62da330170ca4fe4ac4bf1d8b663
|
/molsysmt/build/make_bioassembly.py
|
ace9b6f3a60d45deaeeb2648f73cdfcb64aea9d5
|
[
"MIT"
] |
permissive
|
uibcdf/MolSysMT
|
ddab5a89b8ec2377f383884c5169d147cab01322
|
c3d713ba63db24eb8a2426115cf8d9cb3665d225
|
refs/heads/main
| 2023-08-08T15:04:16.217967
| 2023-08-04T05:49:56
| 2023-08-04T05:49:56
| 137,937,243
| 15
| 3
|
MIT
| 2023-06-04T20:27:06
| 2018-06-19T19:38:44
|
Python
|
UTF-8
|
Python
| false
| false
| 1,897
|
py
|
from molsysmt._private.digestion import digest
import numpy as np
@digest()
def make_bioassembly(molecular_system, bioassembly=None, structure_indices=0, to_form=None):
"""
To be written soon...
"""
from molsysmt.basic import extract, merge, get
from molsysmt.structure import rotate, translate
if bioassembly is None:
aux_bioassemblies = get(molecular_system, bioassembly=True)
bioassembly = list(aux_bioassemblies.keys())[0]
bioassembly = aux_bioassemblies[bioassembly]
elif isinstance(bioassembly, str):
aux_bioassemblies = get(molecular_system, bioassembly=True)
bioassembly = aux_bioassemblies[bioassembly]
units = []
if _all_chains_equal(bioassembly):
chains = bioassembly['chain_indices'][0]
unit_0 = extract(molecular_system, structure_indices=0, selection='chain_index in @chains', syntax='MolSysMT')
for rotation, translation in zip(bioassembly['rotations'], bioassembly['translations']):
unit = rotate(unit_0, rotation=rotation)
unit = translate(unit, translation=translation)
units.append(unit)
else:
for chains, rotation, translation in zip(bioassembly['chain_indices'], bioassembly['rotations'], bioassembly['translations']):
unit = extract(molecular_system, structure_indices=0, selection='chain_index in @chains', syntax='MolSysMT')
unit = rotate(unit, rotation=rotation)
unit = translate(unit, translation=translation)
units.append(unit)
output = merge(units, to_form=to_form)
return output
def _all_chains_equal(bioassembly):
output = True
first_chains = bioassembly['chain_indices'][0]
for chains in bioassembly['chain_indices']:
if not np.all(chains==first_chains):
output = False
break
return output
|
[
"prada.gracia@gmail.com"
] |
prada.gracia@gmail.com
|
2161519984d316cb7df0e9c4a0aaf36ad2336703
|
d93a9c5e63612e26ce6d42b055caecac61f9e8f1
|
/src/analyse/cost_assumptions.py
|
2a7dfdfa8ebe97f09b9cb699dadcd36c0ab2b8d4
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
timtroendle/geographic-scale
|
525bd9820128cd8340750cab59815555e1ed6520
|
81ec940e10b8e692429797e6a066a177e1508a89
|
refs/heads/master
| 2023-04-18T21:27:11.533352
| 2021-08-04T15:05:55
| 2021-08-04T15:05:55
| 170,466,995
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,642
|
py
|
import math
import calliope
import pandas as pd
import xarray as xr
EUR_PER_KW = 1 / 1e3 # from €/MW(h) to €/kW(h)
CT_PER_KW = 1e2 / 1e3 # from €/MW(h) to €ct/kW(h)
M_TO_1000KM = 1e-6
EPSILON = 1e-12
TECHS = {
"open_field_pv": "Utility-scale PV",
"roof_mounted_pv": "Rooftop PV",
"wind_onshore_monopoly": "Onshore wind",
"wind_offshore": "Offshore wind",
"biofuel": "Biofuel",
"hydro_run_of_river": "Hydropower run of river",
"hydro_reservoir": "Hydropower with reservoir",
"pumped_hydro": "Pumped hydro storage",
"battery": "Short term storage",
"hydrogen": "Long term storage",
"ac_transmission": "AC transmission^",
}
COST_SOURCES = {
"open_field_pv": "Ref. @JRC:2014 Table 7",
"roof_mounted_pv": "Ref. @JRC:2014 Table 9",
"wind_onshore_monopoly": "Ref. @JRC:2014 Table 4",
"wind_offshore": "Ref. @JRC:2014 Table 5",
"biofuel": "Ref. @JRC:2014 Table 48, ref. @RuizCastello:2015",
"hydro_run_of_river": "Ref. @JRC:2014 Table 14",
"hydro_reservoir": "Ref. @JRC:2014 Table 12",
"pumped_hydro": "Ref. @Schmidt:2019",
"battery": "Ref. @Schmidt:2019",
"hydrogen": "Ref. @Schmidt:2019",
"ac_transmission": "Ref. @JRC:2014 Table 39",
}
def main(path_to_model, scaling_factors, path_to_output):
"""Create table of important cost assumptions."""
model = calliope.read_netcdf(path_to_model)
eur_per_kw = scaling_factors["power"] / scaling_factors["monetary"] * EUR_PER_KW
ct_per_kw = scaling_factors["power"] / scaling_factors["monetary"] * CT_PER_KW
energy_cap = (model.get_formatted_array("cost_energy_cap")
.squeeze("costs")
.reindex(techs=list(TECHS.keys()))
.groupby("techs")
.mean("locs")
.fillna(0)
.drop("costs")) * eur_per_kw
energy_cap.loc["ac_transmission"] = transmission_investment_cost(model, eur_per_kw)
annual_cost = (model.get_formatted_array("cost_om_annual")
.squeeze("costs")
.reindex(techs=list(TECHS.keys()))
.groupby("techs")
.mean("locs")
.fillna(0)
.drop("costs")) * eur_per_kw
annual_cost.loc["ac_transmission"] = transmission_annual_cost(model, eur_per_kw)
storage_cap = (model.get_formatted_array("cost_storage_cap")
.squeeze("costs")
.reindex(techs=list(TECHS.keys()))
.groupby("techs")
.mean("locs")
.fillna(0)
.drop("costs")) * eur_per_kw
lifetime = (model.get_formatted_array("lifetime")
.reindex(techs=list(TECHS.keys()))
.groupby("techs")
.mean("locs")
.fillna(0))
lifetime.loc["ac_transmission"] = transmission_lifetime(model)
variable_costs_prod = (model.get_formatted_array("cost_om_prod")
.squeeze("costs")
.reindex(techs=list(TECHS.keys()))
.groupby("techs")
.mean("locs")
.fillna(0)
.drop("costs")) * ct_per_kw
variable_costs_con = (model.get_formatted_array("cost_om_con")
.squeeze("costs")
.reindex(techs=list(TECHS.keys()))
.groupby("techs")
.mean("locs")
.fillna(0)
.drop("costs")) * ct_per_kw
variable_costs = variable_costs_prod + variable_costs_con
all_costs = xr.Dataset({
"Overnight cost (€/kW)": energy_cap,
"Overnight cost (€/kWh)": storage_cap,
"Annual cost (€/kW/yr)": annual_cost,
"Variable cost (€ct/kWh)": variable_costs,
"Lifetime (yr)": lifetime,
"Source": pd.Series(COST_SOURCES).to_xarray().rename(index="techs")
})
all_costs.rename(techs="Technology").to_dataframe().rename(index=TECHS).to_csv(
path_to_output,
index=True,
header=True,
float_format="%.0f"
)
def transmission_investment_cost(model, scaling_factor):
cost = model.get_formatted_array("cost_energy_cap").squeeze("costs") * scaling_factor
distance = model.get_formatted_array("distance") * M_TO_1000KM
rel_costs = (cost / distance).to_series().dropna()
assert math.isclose(rel_costs.std(), 0, abs_tol=EPSILON)
return rel_costs.iloc[0]
def transmission_annual_cost(model, scaling_factor):
rel_cost = (
model
.get_formatted_array("cost_om_annual_investment_fraction")
.squeeze("costs")
.to_series()
.dropna()
)
assert math.isclose(rel_cost.std(), 0, abs_tol=EPSILON)
investment_cost = transmission_investment_cost(model, scaling_factor)
return rel_cost.iloc[0] * investment_cost
def transmission_lifetime(model):
lifetimes = model.get_formatted_array("lifetime")
return (lifetimes
.groupby(lifetimes.techs.where(~lifetimes.techs.str.contains("ac_transmission"), "ac_transmission"))
.mean(["techs", "locs"])
.sel(techs="ac_transmission")
.item())
if __name__ == "__main__":
main(
path_to_model=snakemake.input.model,
scaling_factors=snakemake.params.scaling_factors,
path_to_output=snakemake.output[0]
)
|
[
"tim.troendle@usys.ethz.ch"
] |
tim.troendle@usys.ethz.ch
|
75e594d130d4bb87a04ccd13b6cc04528faf8c26
|
e267c91f23055397201c3d9c23d7583b269d51b8
|
/backend/pugorugh/tests/test_serializers.py
|
9985fdce7df4cae83ff5caa706f06c94f6628da4
|
[] |
no_license
|
mcintoshsg/pug_or_ugh_v1
|
8678213b4b4ea09a70f369aa08002ff4a8194a29
|
3e735cd840ffc5a85497eab48518800f0757d9f3
|
refs/heads/master
| 2020-03-19T15:26:41.152968
| 2018-06-14T01:30:49
| 2018-06-14T01:30:49
| 136,670,924
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,917
|
py
|
from django.contrib.auth.models import User
from rest_framework.test import APITestCase
from pugorugh.serializers import (DogSerializer, UserPrefSerializer)
class DogSerializerTests(APITestCase):
'''SETUP '''
def setUp(self):
''' setup up dummy data for the Dog serializer '''
self.dog_1_data = {
'name': 'dog_1',
'image_filename': '1.jpg',
'breed': 'mutt',
'age': 12,
'gender': 'm',
'size': 'm'
}
def test_get_correct_value(self):
serializer = DogSerializer(data=self.dog_1_data)
self.assertTrue(serializer.is_valid())
self.assertEqual(
serializer.data['name'],
self.dog_1_data['name']
)
class UserPrefSerializerTests(APITestCase):
'''SETUP '''
def setUp(self):
''' create user to be used in our dummy data '''
self.user_1 = User.objects.create(
username='test_user_1',
email='test_user_1@example.com',
password='password'
)
''' set up dummy data for UserPerf Serializer '''
self.user_pref_1 = {
'user': self.user_1,
'age': 'b,y',
'gender': 'm,f',
'size': 'l, xl'
}
def test_validate_userpref_bad_age(self):
self.user_pref_1['age'] = 'z'
serializer = UserPrefSerializer(data=self.user_pref_1)
self.assertFalse(serializer.is_valid())
self.assertEqual(set(serializer.errors.keys()), set(['age']))
def test_validate_userpref_good_age(self):
self.user_pref_1['age'] = 's'
serializer = UserPrefSerializer(data=self.user_pref_1)
self.assertTrue(serializer.is_valid())
def test_validate_userpref_bad_gender(self):
self.user_pref_1['gender'] = 'z'
serializer = UserPrefSerializer(data=self.user_pref_1)
self.assertFalse(serializer.is_valid())
self.assertEqual(set(serializer.errors.keys()), set(['gender']))
def test_validate_userpref_good_gender(self):
self.user_pref_1['gender'] = 'm'
serializer = UserPrefSerializer(data=self.user_pref_1)
self.assertTrue(serializer.is_valid())
def test_validate_userpref_bad_size(self):
self.user_pref_1['size'] = 'z'
serializer = UserPrefSerializer(data=self.user_pref_1)
self.assertFalse(serializer.is_valid())
self.assertEqual(set(serializer.errors.keys()), set(['size']))
def test_validate_userpref_good_size(self):
self.user_pref_1['gender'] = 'm'
serializer = UserPrefSerializer(data=self.user_pref_1)
self.assertTrue(serializer.is_valid())
|
[
"s.g.mcintosh@gmail.com"
] |
s.g.mcintosh@gmail.com
|
c6f51cdc9597157d6863008f9a3144495adc25ba
|
b7f45072d056b80ed49e6bcde91877d8576e970d
|
/ImageJ/py/download_and_save_csv.py
|
e075d5dfd7ffc58315af83cc1b7b33371336e92f
|
[] |
no_license
|
jrminter/tips
|
128a18ee55655a13085c174d532c77bcea412754
|
f48f8b202f8bf9e36cb6d487a23208371c79718e
|
refs/heads/master
| 2022-06-14T08:46:28.972743
| 2022-05-30T19:29:28
| 2022-05-30T19:29:28
| 11,463,325
| 5
| 8
| null | 2019-12-18T16:24:02
| 2013-07-17T00:16:43
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 324
|
py
|
"""
download_and_save_csv.py
From:
http://wiki.cmci.info/documents/120206pyip_cooking/python_imagej_cookbook
"""
from ij import IJ
from java.io import PrintWriter
content = IJ.openUrlAsString('http://cmci.info/imgdata/tenFrameResults.csv')
out = PrintWriter('/Users/jrminter/tmp/test1.csv')
out.print(content)
out.close()
|
[
"jrminter@gmail.com"
] |
jrminter@gmail.com
|
f514aae0d2c12d93b9d619bb80ff773cf0f9e077
|
7c67952f1c18d42f283f395d02294e148f3dd349
|
/export/tracking/apps.py
|
148b73edaf24675007991d8d3903bdb8ad3ae9cb
|
[] |
no_license
|
314casso/cportal
|
cfb4cc9e53819950177728bc5d42e47f2aa3d45e
|
a0a2043616241429571ec1b99302dada49af1485
|
refs/heads/master
| 2022-12-11T07:19:14.209367
| 2021-10-05T21:27:00
| 2021-10-05T21:27:00
| 110,969,703
| 0
| 0
| null | 2022-12-07T23:47:26
| 2017-11-16T12:37:59
|
HTML
|
UTF-8
|
Python
| false
| false
| 156
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class TrackingConfig(AppConfig):
name = 'tracking'
|
[
"picasso75@yandex.ru"
] |
picasso75@yandex.ru
|
6db0fa2b73fdd334a453684a88261ba4cf2ee1cd
|
e1a71cc2773d94d1f6788f7ec830d3723b827745
|
/mayan/apps/redactions/tests/literals.py
|
d2c41f241b83003725ddd7d148d2aa308274f042
|
[
"Apache-2.0"
] |
permissive
|
azees-math/Mayan-EDMS
|
e6ddcee6f188b87e6d64990a85c5af7ad9b95b0c
|
4be3496b233f77d33e16376cb715a80286a50da2
|
refs/heads/master
| 2023-08-06T05:09:57.788757
| 2021-10-07T08:06:51
| 2021-10-07T08:06:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 445
|
py
|
import os
from django.conf import settings
TEST_REDACTION_ARGUMENT = "{'left': 10, 'top': 10, 'right': 10, 'bottom': 10}"
TEST_REDACTION_ARGUMENT_EDITED = "{'left': 20, 'top': 20, 'right': 20, 'bottom': 20}"
TEST_REDACTION_DOCUMENT_FILENAME = 'black_upper_left_corner.png'
TEST_REDACTION_DOCUMENT_PATH = os.path.join(
settings.BASE_DIR, 'apps', 'redactions', 'tests', 'contrib',
'sample_documents', TEST_REDACTION_DOCUMENT_FILENAME
)
|
[
"roberto.rosario@mayan-edms.com"
] |
roberto.rosario@mayan-edms.com
|
530588e4198ab812971feb9aac12ecfb9442af61
|
d9eef8dd3489682c8db41f2311e3058d1f369780
|
/.history/abel-network-files/mcmc_alg_implementation_own_two_20180701105942.py
|
7866626da6dfd8ef1f5122844411c56107d23daa
|
[] |
no_license
|
McKenzie-Lamb/Gerrymandering
|
93fe4a49fe39a0b307ed341e46ba8620ea1225be
|
b7a7c4129d6b0fcd760ba8952de51eafa701eac3
|
refs/heads/master
| 2021-01-25T06:06:43.824339
| 2018-10-16T14:27:01
| 2018-10-16T14:27:01
| 93,526,515
| 0
| 0
| null | 2018-07-12T19:07:35
| 2017-06-06T14:17:47
|
Python
|
UTF-8
|
Python
| false
| false
| 7,212
|
py
|
# Author: Abel Gonzalez
# Date: 06/26/18
#
# Description:
# This program uses the .shp file to create a network graph where each node
# represents a census tract and the edge represents adjacency between each
# tract, usign graph-tool instead of networkx
import random
import numpy as np
import graph_tool.all as gt
from pathlib import Path
def create_graph_views(district_total_no):
graph_views = list()
for i in range(district_total_no):
main_graph_view = gt.GraphView(graph)
graph_view_check = main_graph_view.new_vertex_property("bool")
matched_vertices = gt.find_vertex(graph, district_no, i)
for j in matched_vertices:
graph_view_check[j] = True
graph_view = gt.GraphView(main_graph_view, vfilt=graph_view_check)
graph_views.append(graph_view)
return graph_views
def turn_off_edges(districts_graphs):
turned_off_graphs = list()
# Iterate through districts and selects random edges
for district in range(len(districts_graphs)):
to_delete = districts_graphs[district].new_edge_property('bool')
edges = districts_graphs[district].get_edges()
selected = edges[np.random.randint(edges.shape[0], size = len(edges)//3.5), :]
for i in selected:
to_delete[i] = True
turned_off_graphs.append(gt.GraphView(districts_graphs[district], efilt=to_delete))
return turned_off_graphs
def get_cp_boundaries(graph, turned_on_graphs):
cp_boundary = list()
for g in range(len(turned_on_graphs)):
cp_label, hist = gt.label_components(turned_on_graphs[g])
labels = set(cp_label.a)
for l in labels:
cp = gt.find_vertex(turned_on_graphs[g], cp_label, l)
label_boun = 0
for v in cp:
vertex_bound = False
for n in graph.vertex(v).all_neighbors():
for g_two in range(len(turned_on_graphs)):
if g == g_two:
continue
try:
turned_on_graphs[g_two].vertex(n)
except ValueError:
continue
else:
vertex_bound = True
break
if vertex_bound == True:
label_boun += 1
break
if label_boun == len(cp):
cp_boundary.append(cp)
return cp_boundary
def get_non_adjacent_v(labels_in_boundaries, graph):
list_to_swap = random.sample(labels_in_boundaries, random.randint(2,len(labels_in_boundaries)//2))
index_to_del = list()
for l in range(len(list_to_swap)):
for v in range(len(list_to_swap[l])):
for l_two in range(len(list_to_swap)):
if l == l_two:
continue
for v_two in range(len(list_to_swap[l_two])):
if len(gt.shortest_path(graph, graph.vertex(list_to_swap[l][v]), graph.vertex(list_to_swap[l_two][v_two]))[0]) < 3:
index_to_del.append(l)
for i in range(len(list_to_swap)):
if i in index_to_del:
try:
del list_to_swap[i]
except IndexError:
print("Empty, Reapeating")
get_non_adjacent_v(labels_in_boundaries, graph)
return list_to_swap
def gather_districts_data(districts_graphs):
for i in range(len(districts_graphs)):
population = districts_graphs[i].new_graph_property('int')
districts_graphs[i].graph_properties["pop"] = population
districts_graphs[i].graph_properties["pop"] = 0
dem_vote = districts_graphs[i].new_graph_property('int')
districts_graphs[i].graph_properties["dem_vote"] = dem_vote
districts_graphs[i].graph_properties["dem_vote"] = 0
rep_vote = districts_graphs[i].new_graph_property('int')
districts_graphs[i].graph_properties["rep_vote"] = rep_vote
districts_graphs[i].graph_properties["rep_vote"] = 0
for v in districts_graphs[i].vertices():
districts_graphs[i].graph_properties["pop"] += graph.vp.data[v]["PERSONS"]
districts_graphs[i].graph_properties["dem_vote"] += graph.vp.data[v]["CONDEM14"]
districts_graphs[i].graph_properties["rep_vote"] += graph.vp.data[v]["CONREP14"]
print(districts_graphs[i].graph_properties["dem_vote"])
print(districts_graphs[i].graph_properties["rep_vote"])
def random_color():
return list(np.random.choice(range(256), size=3), 1)
def adjust_color(districts_graphs, color, ring_color):
for i in range(len(districts_graphs)):
ring_color_to = random_color()
print(ring_color_to)
if districts_graphs[i].graph_properties["dem_vote"] > districts_graphs[i].graph_properties["rep_vote"]:
color_ = (0,0,255,1)
else:
color_ = (255,0,0,1)
for v in districts_graphs[i].vertices():
color[v] = color_
ring_color[v] = ring_color_to
return color, ring_color
# Paths
main_folder = Path("abel-network-files/")
data_folder = Path("abel-network-files/data/")
images_folder = Path("abel-network-files/images/")
# Loading the previous created Graph and creating the prop maps
graph = gt.load_graph(str(data_folder / "tmp_graph.gt"))
color = graph.new_vertex_property("vector<double>")
ring_color = graph.new_vertex_property("vector<double>")
cp_label = graph.new_vertex_property("int")
# Init variables
district_total_no = 2
gt.graph_draw(graph, pos=graph.vp.pos,
output=str(main_folder / ('tmp.png')),
bg_color=(255, 255, 255, 1), vertex_text=graph.vertex_index,
vertex_fill_color=color, vertex_color = ring_color)
# Separates graph into blocks
districts = gt.minimize_blockmodel_dl(graph, district_total_no, district_total_no)
district_no = districts.get_blocks()
districts.draw(output='tmp.png', vertex_text=graph.vertex_index)
# Create the different graphs
districts_graphs = create_graph_views(district_total_no)
for i in range(len(districts_graphs)):
gt.graph_draw(
districts_graphs[i], pos=graph.vp.pos,
output=str(main_folder / ('tmp'+str(i)+'.png')),
bg_color=(255, 255, 255, 1))
turned_on_graphs = turn_off_edges(districts_graphs)
for i in range(len(districts_graphs)):
gt.graph_draw(
turned_on_graphs[i], pos=graph.vp.pos,bg_color=(255,255,255,1),vertex_size=2,
output=str(main_folder / ('tmp1'+str(i)+'.png')), vertex_text=graph.vertex_index)
labels_in_boundaries = get_cp_boundaries(graph, turned_on_graphs)
slected_vertices = get_non_adjacent_v(labels_in_boundaries, graph)
gather_districts_data(districts_graphs)
color, ring_color = adjust_color(districts_graphs, color, ring_color)
print(color[2])
print(ring_color[2])
print(ring_color[17])
gt.graph_draw(graph, pos=graph.vp.pos,
output=str(main_folder / ('tmp.png')),
bg_color=(255, 255, 255, 1), vertex_text=graph.vertex_index,vertex_color = ring_color, vertex_fill_color=color)
|
[
"gonzaleza@ripon.edu"
] |
gonzaleza@ripon.edu
|
cb8a337f99df522db71f2b13b2ef15b38319466d
|
d7753137a13c068cb0484bdc9a8237a36378db1b
|
/lintcode/array/search_in_rorated_sorted_array.py
|
52d5fbb376999b01e306bea3efa03460129f419d
|
[] |
no_license
|
alexkie007/offer
|
740b1e41b9d87de3b31df961c33371a5e3430133
|
85ceaf8f3da0efd66b4394ef16669ea673218265
|
refs/heads/master
| 2021-04-12T11:56:49.779558
| 2018-11-03T05:26:51
| 2018-11-03T05:26:51
| 126,163,525
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 928
|
py
|
class Solution:
@staticmethod
def search_in_rorated_sorted_array(nums, target):
if len(nums) < 1:
return -1
start = 0
end = len(nums) - 1
while start + 1 < end:
mid = start + (end - start) // 2
if nums[mid] == target:
return mid
if nums[mid] > nums[start]:
if nums[mid] >= target >= nums[start]:
end = mid
else:
start = mid
else:
if nums[mid] <= target <= nums[end]:
start = mid
else:
end = mid
if nums[start] == target:
return start
if nums[end] == target:
return end
return -1
s = Solution()
print(s.search_in_rorated_sorted_array([4, 5, 6, 1, 2, 3], 1))
print(s.search_in_rorated_sorted_array([4, 5, 1, 2, 3], 0))
|
[
"alexkie@yeah.net"
] |
alexkie@yeah.net
|
ea4ee80048587bafcca6e3c883d30179a89772a6
|
f0d713996eb095bcdc701f3fab0a8110b8541cbb
|
/eAnhzXPeGbobqk2P2_10.py
|
f3ba2d9fb2d0797b24386f1f2080ef7df5d01f84
|
[] |
no_license
|
daniel-reich/turbo-robot
|
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
|
a7a25c63097674c0a81675eed7e6b763785f1c41
|
refs/heads/main
| 2023-03-26T01:55:14.210264
| 2021-03-23T16:08:01
| 2021-03-23T16:08:01
| 350,773,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,152
|
py
|
"""
Create a function that takes in a year and returns the correct century.
### Examples
century(1756) ➞ "18th century"
century(1555) ➞ "16th century"
century(1000) ➞ "10th century"
century(1001) ➞ "11th century"
century(2005) ➞ "21st century"
### Notes
* All years will be between `1000` and `2010`.
* The 11th century is between 1001 and 1100.
* The 18th century is between 1701-1800.
"""
def century(year):
if year==1000:
return "10th century"
if year in range(1001,1101):
return "11th century"
if year in range(1101,1201):
return "12th century"
if year in range(1201,1301):
return "13th century"
if year in range(1301,1401):
return "14th century"
if year in range(1401,1501):
return "15th century"
if year in range(1501,1601):
return "16th century"
if year in range(1601,1701):
return "17th century"
if year in range(1701,1801):
return "18th century"
if year in range(1801,1901):
return "19th century"
if year in range(1901,2001):
return "20th century"
if year in range(2001,2010):
return "21st century"
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
810266a72fa8a60275fcde3efd5f40941c1a6a9b
|
779a603f16a13c3cfc14f9923fae00b95430d041
|
/sentry.conf.py
|
4ff55b11ffacaa19364dcea51db814a33f8f66c2
|
[] |
no_license
|
ImmaculateObsession/sentry-server
|
ee4b8e3b54e8b220efd479ba74486891cbbd68bd
|
f23a7098565d166200e2ee90b5db12555fff31ea
|
refs/heads/master
| 2021-01-25T04:01:57.710957
| 2013-10-12T01:13:41
| 2013-10-12T01:13:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,749
|
py
|
# This file is just Python, with a touch of Django which means you
# you can inherit and tweak settings to your hearts content.
from sentry.conf.server import *
import os
from django.core.exceptions import ImproperlyConfigured
from unipath import Path
def get_env_variable(var_name):
""" Get the environment variable or return exception """
try:
return os.environ[var_name]
except KeyError:
error_msg = "Set the %s env variable" % var_name
raise ImproperlyConfigured(error_msg)
CONF_ROOT = os.path.dirname(__file__)
DATABASES = {
'default': {
# You can swap out the engine for MySQL easily by changing this value
# to ``django.db.backends.mysql`` or to PostgreSQL with
# ``django.db.backends.postgresql_psycopg2``
# If you change this, you'll also need to install the appropriate python
# package: psycopg2 (Postgres) or mysql-python
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(CONF_ROOT, 'sentry.db'),
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
# If you're using Postgres, we recommend turning on autocommit
# 'OPTIONS': {
# 'autocommit': True,
# }
}
}
ALLOWED_HOSTS = ['localhost', '127.0.0.1']
# If you're expecting any kind of real traffic on Sentry, we highly recommend
# configuring the CACHES and Redis settings
###########
## CACHE ##
###########
# You'll need to install the required dependencies for Memcached:
# pip install python-memcached
#
# CACHES = {
# 'default': {
# 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
# 'LOCATION': ['127.0.0.1:11211'],
# }
# }
###########
## Queue ##
###########
# See http://sentry.readthedocs.org/en/latest/queue/index.html for more
# information on configuring your queue broker and workers. Sentry relies
# on a Python framework called Celery to manage queues.
# You can enable queueing of jobs by turning off the always eager setting:
# CELERY_ALWAYS_EAGER = False
# BROKER_URL = 'redis://localhost:6379'
####################
## Update Buffers ##
####################
# Buffers (combined with queueing) act as an intermediate layer between the
# database and the storage API. They will greatly improve efficiency on large
# numbers of the same events being sent to the API in a short amount of time.
# (read: if you send any kind of real data to Sentry, you should enable buffers)
# You'll need to install the required dependencies for Redis buffers:
# pip install redis hiredis nydus
#
# SENTRY_BUFFER = 'sentry.buffer.redis.RedisBuffer'
# SENTRY_REDIS_OPTIONS = {
# 'hosts': {
# 0: {
# 'host': '127.0.0.1',
# 'port': 6379,
# }
# }
# }
################
## Web Server ##
################
# You MUST configure the absolute URI root for Sentry:
SENTRY_URL_PREFIX = 'http://sentry.example.com' # No trailing slash!
# If you're using a reverse proxy, you should enable the X-Forwarded-Proto
# header, and uncomment the following setting
# SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SENTRY_WEB_HOST = '0.0.0.0'
SENTRY_WEB_PORT = 9000
SENTRY_WEB_OPTIONS = {
'workers': 3, # the number of gunicorn workers
'secure_scheme_headers': {'X-FORWARDED-PROTO': 'https'},
}
#################
## Mail Server ##
#################
# For more information check Django's documentation:
# https://docs.djangoproject.com/en/1.3/topics/email/?from=olddocs#e-mail-backends
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'localhost'
EMAIL_HOST_PASSWORD = ''
EMAIL_HOST_USER = ''
EMAIL_PORT = 25
EMAIL_USE_TLS = False
# The email address to send on behalf of
SERVER_EMAIL = 'root@localhost'
###########
## etc. ##
###########
# If this file ever becomes compromised, it's important to regenerate your SECRET_KEY
# Changing this value will result in all current sessions being invalidated
SECRET_KEY = get_env_variable('SENTRY_KEY')
# http://twitter.com/apps/new
# It's important that input a callback URL, even if its useless. We have no idea why, consult Twitter.
TWITTER_CONSUMER_KEY = ''
TWITTER_CONSUMER_SECRET = ''
# http://developers.facebook.com/setup/
FACEBOOK_APP_ID = ''
FACEBOOK_API_SECRET = ''
# http://code.google.com/apis/accounts/docs/OAuth2.html#Registering
GOOGLE_OAUTH2_CLIENT_ID = ''
GOOGLE_OAUTH2_CLIENT_SECRET = ''
# https://github.com/settings/applications/new
GITHUB_APP_ID = ''
GITHUB_API_SECRET = ''
# https://trello.com/1/appKey/generate
TRELLO_API_KEY = ''
TRELLO_API_SECRET = ''
# https://confluence.atlassian.com/display/BITBUCKET/OAuth+Consumers
BITBUCKET_CONSUMER_KEY = ''
BITBUCKET_CONSUMER_SECRET = ''
|
[
"pjj@philipjohnjames.com"
] |
pjj@philipjohnjames.com
|
13faf6f38fe17b5382a1c81a9664af97121e9db8
|
96dcea595e7c16cec07b3f649afd65f3660a0bad
|
/homeassistant/components/tod/const.py
|
3b6f8c23e17435323c275b6e9c860f138def77d0
|
[
"Apache-2.0"
] |
permissive
|
home-assistant/core
|
3455eac2e9d925c92d30178643b1aaccf3a6484f
|
80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743
|
refs/heads/dev
| 2023-08-31T15:41:06.299469
| 2023-08-31T14:50:53
| 2023-08-31T14:50:53
| 12,888,993
| 35,501
| 20,617
|
Apache-2.0
| 2023-09-14T21:50:15
| 2013-09-17T07:29:48
|
Python
|
UTF-8
|
Python
| false
| false
| 207
|
py
|
"""Constants for the Times of the Day integration."""
DOMAIN = "tod"
CONF_AFTER_TIME = "after_time"
CONF_AFTER_OFFSET = "after_offset"
CONF_BEFORE_TIME = "before_time"
CONF_BEFORE_OFFSET = "before_offset"
|
[
"noreply@github.com"
] |
home-assistant.noreply@github.com
|
b3018f21a87b0c01ed7dde5c583582ff0924abff
|
a4deea660ea0616f3b5ee0b8bded03373c5bbfa2
|
/concrete_instances/register-variants/mulq_r64/instructions/mulq_r64/mulq_r64.gen.vex.py
|
f053dc5ee41260d5e4a5978c50a17d898231afb4
|
[] |
no_license
|
Vsevolod-Livinskij/x86-64-instruction-summary
|
4a43472e26f0e4ec130be9a82f7e3f3c1361ccfd
|
c276edab1b19e3929efb3ebe7514489f66087764
|
refs/heads/master
| 2022-02-02T18:11:07.818345
| 2019-01-25T17:19:21
| 2019-01-25T17:19:21
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 174
|
py
|
import angr
proj = angr.Project('./instructions/mulq_r64/mulq_r64.o')
print proj.arch
print proj.entry
print proj.filename
irsb = proj.factory.block(proj.entry).vex
irsb.pp()
|
[
"sdasgup3@illinois.edu"
] |
sdasgup3@illinois.edu
|
8951b1f773c495a07ac3b2745594420b096337f6
|
2ff83d7af0bcbc5822593d826b0c3276346d1276
|
/transformers_local_rep/src/transformers/models/dpr/tokenization_dpr.py
|
f0d1118eee35b0ced0d7fd6799327472fe7337ad
|
[] |
no_license
|
mauricerupp/PolitBERT
|
43af66f5562bb5c5cf965aa99bb065d1c22f4fae
|
a8c4eb517eb38cb51101fc87780ed1de182560c8
|
refs/heads/master
| 2023-06-17T03:13:43.070682
| 2021-07-15T15:15:30
| 2021-07-15T15:15:30
| 386,334,080
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 19,919
|
py
|
# coding=utf-8
# Copyright 2018 The HuggingFace Inc. team, The Hugging Face Team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tokenization classes for DPR."""
import collections
from typing import List, Optional, Union
from ...file_utils import add_end_docstrings, add_start_docstrings
from ...tokenization_utils_base import BatchEncoding, TensorType
from ...utils import logging
from ..bert.tokenization_bert import BertTokenizer
logger = logging.get_logger(__name__)
VOCAB_FILES_NAMES = {"vocab_file": "vocab.txt", "tokenizer_file": "tokenizer.json"}
CONTEXT_ENCODER_PRETRAINED_VOCAB_FILES_MAP = {
"vocab_file": {
"facebook/dpr-ctx_encoder-single-nq-base": "https://huggingface.co/bert-base-uncased/resolve/main/vocab.txt",
"facebook/dpr-ctx_encoder-multiset-base": "https://huggingface.co/bert-base-uncased/resolve/main/vocab.txt",
},
"tokenizer_file": {
"facebook/dpr-ctx_encoder-single-nq-base": "https://huggingface.co/bert-base-uncased/resolve/main/tokenizer.json",
"facebook/dpr-ctx_encoder-multiset-base": "https://huggingface.co/bert-base-uncased/resolve/main/tokenizer.json",
},
}
QUESTION_ENCODER_PRETRAINED_VOCAB_FILES_MAP = {
"vocab_file": {
"facebook/dpr-question_encoder-single-nq-base": "https://huggingface.co/bert-base-uncased/resolve/main/vocab.txt",
"facebook/dpr-question_encoder-multiset-base": "https://huggingface.co/bert-base-uncased/resolve/main/vocab.txt",
},
"tokenizer_file": {
"facebook/dpr-question_encoder-single-nq-base": "https://huggingface.co/bert-base-uncased/resolve/main/tokenizer.json",
"facebook/dpr-question_encoder-multiset-base": "https://huggingface.co/bert-base-uncased/resolve/main/tokenizer.json",
},
}
READER_PRETRAINED_VOCAB_FILES_MAP = {
"vocab_file": {
"facebook/dpr-reader-single-nq-base": "https://huggingface.co/bert-base-uncased/resolve/main/vocab.txt",
"facebook/dpr-reader-multiset-base": "https://huggingface.co/bert-base-uncased/resolve/main/vocab.txt",
},
"tokenizer_file": {
"facebook/dpr-reader-single-nq-base": "https://huggingface.co/bert-base-uncased/resolve/main/tokenizer.json",
"facebook/dpr-reader-multiset-base": "https://huggingface.co/bert-base-uncased/resolve/main/tokenizer.json",
},
}
CONTEXT_ENCODER_PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = {
"facebook/dpr-ctx_encoder-single-nq-base": 512,
"facebook/dpr-ctx_encoder-multiset-base": 512,
}
QUESTION_ENCODER_PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = {
"facebook/dpr-question_encoder-single-nq-base": 512,
"facebook/dpr-question_encoder-multiset-base": 512,
}
READER_PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = {
"facebook/dpr-reader-single-nq-base": 512,
"facebook/dpr-reader-multiset-base": 512,
}
CONTEXT_ENCODER_PRETRAINED_INIT_CONFIGURATION = {
"facebook/dpr-ctx_encoder-single-nq-base": {"do_lower_case": True},
"facebook/dpr-ctx_encoder-multiset-base": {"do_lower_case": True},
}
QUESTION_ENCODER_PRETRAINED_INIT_CONFIGURATION = {
"facebook/dpr-question_encoder-single-nq-base": {"do_lower_case": True},
"facebook/dpr-question_encoder-multiset-base": {"do_lower_case": True},
}
READER_PRETRAINED_INIT_CONFIGURATION = {
"facebook/dpr-reader-single-nq-base": {"do_lower_case": True},
"facebook/dpr-reader-multiset-base": {"do_lower_case": True},
}
class DPRContextEncoderTokenizer(BertTokenizer):
r"""
Construct a DPRContextEncoder tokenizer.
:class:`~transformers_local.DPRContextEncoderTokenizer` is identical to :class:`~transformers_local.BertTokenizer` and runs
end-to-end tokenization: punctuation splitting and wordpiece.
Refer to superclass :class:`~transformers_local.BertTokenizer` for usage examples and documentation concerning
parameters.
"""
vocab_files_names = VOCAB_FILES_NAMES
pretrained_vocab_files_map = CONTEXT_ENCODER_PRETRAINED_VOCAB_FILES_MAP
max_model_input_sizes = CONTEXT_ENCODER_PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
pretrained_init_configuration = CONTEXT_ENCODER_PRETRAINED_INIT_CONFIGURATION
class DPRQuestionEncoderTokenizer(BertTokenizer):
r"""
Constructs a DPRQuestionEncoder tokenizer.
:class:`~transformers_local.DPRQuestionEncoderTokenizer` is identical to :class:`~transformers_local.BertTokenizer` and runs
end-to-end tokenization: punctuation splitting and wordpiece.
Refer to superclass :class:`~transformers_local.BertTokenizer` for usage examples and documentation concerning
parameters.
"""
vocab_files_names = VOCAB_FILES_NAMES
pretrained_vocab_files_map = QUESTION_ENCODER_PRETRAINED_VOCAB_FILES_MAP
max_model_input_sizes = QUESTION_ENCODER_PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
pretrained_init_configuration = QUESTION_ENCODER_PRETRAINED_INIT_CONFIGURATION
DPRSpanPrediction = collections.namedtuple(
"DPRSpanPrediction", ["span_score", "relevance_score", "doc_id", "start_index", "end_index", "text"]
)
DPRReaderOutput = collections.namedtuple("DPRReaderOutput", ["start_logits", "end_logits", "relevance_logits"])
CUSTOM_DPR_READER_DOCSTRING = r"""
Return a dictionary with the token ids of the input strings and other information to give to
:obj:`.decode_best_spans`. It converts the strings of a question and different passages (title and text) in a
sequence of IDs (integers), using the tokenizer and vocabulary. The resulting :obj:`input_ids` is a matrix of size
:obj:`(n_passages, sequence_length)` with the format:
::
[CLS] <question token ids> [SEP] <titles ids> [SEP] <texts ids>
Args:
questions (:obj:`str` or :obj:`List[str]`):
The questions to be encoded. You can specify one question for many passages. In this case, the question
will be duplicated like :obj:`[questions] * n_passages`. Otherwise you have to specify as many questions as
in :obj:`titles` or :obj:`texts`.
titles (:obj:`str` or :obj:`List[str]`):
The passages titles to be encoded. This can be a string or a list of strings if there are several passages.
texts (:obj:`str` or :obj:`List[str]`):
The passages texts to be encoded. This can be a string or a list of strings if there are several passages.
padding (:obj:`bool`, :obj:`str` or :class:`~transformers_local.tokenization_utils_base.PaddingStrategy`, `optional`, defaults to :obj:`False`):
Activates and controls padding. Accepts the following values:
* :obj:`True` or :obj:`'longest'`: Pad to the longest sequence in the batch (or no padding if only a single
sequence if provided).
* :obj:`'max_length'`: Pad to a maximum length specified with the argument :obj:`max_length` or to the
maximum acceptable input length for the model if that argument is not provided.
* :obj:`False` or :obj:`'do_not_pad'` (default): No padding (i.e., can output a batch with sequences of
different lengths).
truncation (:obj:`bool`, :obj:`str` or :class:`~transformers_local.tokenization_utils_base.TruncationStrategy`, `optional`, defaults to :obj:`False`):
Activates and controls truncation. Accepts the following values:
* :obj:`True` or :obj:`'longest_first'`: Truncate to a maximum length specified with the argument
:obj:`max_length` or to the maximum acceptable input length for the model if that argument is not
provided. This will truncate token by token, removing a token from the longest sequence in the pair if a
pair of sequences (or a batch of pairs) is provided.
* :obj:`'only_first'`: Truncate to a maximum length specified with the argument :obj:`max_length` or to the
maximum acceptable input length for the model if that argument is not provided. This will only truncate
the first sequence of a pair if a pair of sequences (or a batch of pairs) is provided.
* :obj:`'only_second'`: Truncate to a maximum length specified with the argument :obj:`max_length` or to
the maximum acceptable input length for the model if that argument is not provided. This will only
truncate the second sequence of a pair if a pair of sequences (or a batch of pairs) is provided.
* :obj:`False` or :obj:`'do_not_truncate'` (default): No truncation (i.e., can output batch with sequence
lengths greater than the model maximum admissible input size).
max_length (:obj:`int`, `optional`):
Controls the maximum length to use by one of the truncation/padding parameters.
If left unset or set to :obj:`None`, this will use the predefined model maximum length if a maximum
length is required by one of the truncation/padding parameters. If the model has no specific maximum
input length (like XLNet) truncation/padding to a maximum length will be deactivated.
return_tensors (:obj:`str` or :class:`~transformers_local.tokenization_utils_base.TensorType`, `optional`):
If set, will return tensors instead of list of python integers. Acceptable values are:
* :obj:`'tf'`: Return TensorFlow :obj:`tf.constant` objects.
* :obj:`'pt'`: Return PyTorch :obj:`torch.Tensor` objects.
* :obj:`'np'`: Return Numpy :obj:`np.ndarray` objects.
return_attention_mask (:obj:`bool`, `optional`):
Whether or not to return the attention mask. If not set, will return the attention mask according to the
specific tokenizer's default, defined by the :obj:`return_outputs` attribute.
`What are attention masks? <../glossary.html#attention-mask>`__
Returns:
:obj:`Dict[str, List[List[int]]]`: A dictionary with the following keys:
- ``input_ids``: List of token ids to be fed to a model.
- ``attention_mask``: List of indices specifying which tokens should be attended to by the model.
"""
@add_start_docstrings(CUSTOM_DPR_READER_DOCSTRING)
class CustomDPRReaderTokenizerMixin:
def __call__(
self,
questions,
titles: Optional[str] = None,
texts: Optional[str] = None,
padding: Union[bool, str] = False,
truncation: Union[bool, str] = False,
max_length: Optional[int] = None,
return_tensors: Optional[Union[str, TensorType]] = None,
return_attention_mask: Optional[bool] = None,
**kwargs
) -> BatchEncoding:
if titles is None and texts is None:
return super().__call__(
questions,
padding=padding,
truncation=truncation,
max_length=max_length,
return_tensors=return_tensors,
return_attention_mask=return_attention_mask,
**kwargs,
)
elif titles is None or texts is None:
text_pair = titles if texts is None else texts
return super().__call__(
questions,
text_pair,
padding=padding,
truncation=truncation,
max_length=max_length,
return_tensors=return_tensors,
return_attention_mask=return_attention_mask,
**kwargs,
)
titles = titles if not isinstance(titles, str) else [titles]
texts = texts if not isinstance(texts, str) else [texts]
n_passages = len(titles)
questions = questions if not isinstance(questions, str) else [questions] * n_passages
assert len(titles) == len(
texts
), "There should be as many titles than texts but got {} titles and {} texts.".format(len(titles), len(texts))
encoded_question_and_titles = super().__call__(questions, titles, padding=False, truncation=False)["input_ids"]
encoded_texts = super().__call__(texts, add_special_tokens=False, padding=False, truncation=False)["input_ids"]
encoded_inputs = {
"input_ids": [
(encoded_question_and_title + encoded_text)[:max_length]
if max_length is not None and truncation
else encoded_question_and_title + encoded_text
for encoded_question_and_title, encoded_text in zip(encoded_question_and_titles, encoded_texts)
]
}
if return_attention_mask is not False:
attention_mask = []
for input_ids in encoded_inputs["input_ids"]:
attention_mask.append([int(input_id != self.pad_token_id) for input_id in input_ids])
encoded_inputs["attention_mask"] = attention_mask
return self.pad(encoded_inputs, padding=padding, max_length=max_length, return_tensors=return_tensors)
def decode_best_spans(
self,
reader_input: BatchEncoding,
reader_output: DPRReaderOutput,
num_spans: int = 16,
max_answer_length: int = 64,
num_spans_per_passage: int = 4,
) -> List[DPRSpanPrediction]:
"""
Get the span predictions for the extractive Q&A model.
Returns: `List` of `DPRReaderOutput` sorted by descending `(relevance_score, span_score)`. Each
`DPRReaderOutput` is a `Tuple` with:
- **span_score**: ``float`` that corresponds to the score given by the reader for this span compared to
other spans in the same passage. It corresponds to the sum of the start and end logits of the span.
- **relevance_score**: ``float`` that corresponds to the score of the each passage to answer the question,
compared to all the other passages. It corresponds to the output of the QA classifier of the DPRReader.
- **doc_id**: ``int``` the id of the passage.
- **start_index**: ``int`` the start index of the span (inclusive).
- **end_index**: ``int`` the end index of the span (inclusive).
Examples::
>>> from transformers_local import DPRReader, DPRReaderTokenizer
>>> tokenizer = DPRReaderTokenizer.from_pretrained('facebook/dpr-reader-single-nq-base')
>>> model = DPRReader.from_pretrained('facebook/dpr-reader-single-nq-base')
>>> encoded_inputs = tokenizer(
... questions=["What is love ?"],
... titles=["Haddaway"],
... texts=["'What Is Love' is a song recorded by the artist Haddaway"],
... return_tensors='pt'
... )
>>> outputs = model(**encoded_inputs)
>>> predicted_spans = tokenizer.decode_best_spans(encoded_inputs, outputs)
>>> print(predicted_spans[0].text) # best span
"""
input_ids = reader_input["input_ids"]
start_logits, end_logits, relevance_logits = reader_output[:3]
n_passages = len(relevance_logits)
sorted_docs = sorted(range(n_passages), reverse=True, key=relevance_logits.__getitem__)
nbest_spans_predictions: List[DPRReaderOutput] = []
for doc_id in sorted_docs:
sequence_ids = list(input_ids[doc_id])
# assuming question & title information is at the beginning of the sequence
passage_offset = sequence_ids.index(self.sep_token_id, 2) + 1 # second sep id
if sequence_ids[-1] == self.pad_token_id:
sequence_len = sequence_ids.index(self.pad_token_id)
else:
sequence_len = len(sequence_ids)
best_spans = self._get_best_spans(
start_logits=start_logits[doc_id][passage_offset:sequence_len],
end_logits=end_logits[doc_id][passage_offset:sequence_len],
max_answer_length=max_answer_length,
top_spans=num_spans_per_passage,
)
for start_index, end_index in best_spans:
start_index += passage_offset
end_index += passage_offset
nbest_spans_predictions.append(
DPRSpanPrediction(
span_score=start_logits[doc_id][start_index] + end_logits[doc_id][end_index],
relevance_score=relevance_logits[doc_id],
doc_id=doc_id,
start_index=start_index,
end_index=end_index,
text=self.decode(sequence_ids[start_index : end_index + 1]),
)
)
if len(nbest_spans_predictions) >= num_spans:
break
return nbest_spans_predictions[:num_spans]
def _get_best_spans(
self,
start_logits: List[int],
end_logits: List[int],
max_answer_length: int,
top_spans: int,
) -> List[DPRSpanPrediction]:
"""
Finds the best answer span for the extractive Q&A model for one passage. It returns the best span by descending
`span_score` order and keeping max `top_spans` spans. Spans longer that `max_answer_length` are ignored.
"""
scores = []
for (start_index, start_score) in enumerate(start_logits):
for (answer_length, end_score) in enumerate(end_logits[start_index : start_index + max_answer_length]):
scores.append(((start_index, start_index + answer_length), start_score + end_score))
scores = sorted(scores, key=lambda x: x[1], reverse=True)
chosen_span_intervals = []
for (start_index, end_index), score in scores:
assert start_index <= end_index, "Wrong span indices: [{}:{}]".format(start_index, end_index)
length = end_index - start_index + 1
assert length <= max_answer_length, "Span is too long: {} > {}".format(length, max_answer_length)
if any(
[
start_index <= prev_start_index <= prev_end_index <= end_index
or prev_start_index <= start_index <= end_index <= prev_end_index
for (prev_start_index, prev_end_index) in chosen_span_intervals
]
):
continue
chosen_span_intervals.append((start_index, end_index))
if len(chosen_span_intervals) == top_spans:
break
return chosen_span_intervals
@add_end_docstrings(CUSTOM_DPR_READER_DOCSTRING)
class DPRReaderTokenizer(CustomDPRReaderTokenizerMixin, BertTokenizer):
r"""
Construct a DPRReader tokenizer.
:class:`~transformers_local.DPRReaderTokenizer` is almost identical to :class:`~transformers_local.BertTokenizer` and runs
end-to-end tokenization: punctuation splitting and wordpiece. The difference is that is has three inputs strings:
question, titles and texts that are combined to be fed to the :class:`~transformers_local.DPRReader` model.
Refer to superclass :class:`~transformers_local.BertTokenizer` for usage examples and documentation concerning
parameters.
"""
vocab_files_names = VOCAB_FILES_NAMES
pretrained_vocab_files_map = READER_PRETRAINED_VOCAB_FILES_MAP
max_model_input_sizes = READER_PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
pretrained_init_configuration = READER_PRETRAINED_INIT_CONFIGURATION
model_input_names = ["input_ids", "attention_mask"]
|
[
"maurice.rupp@gmail.com"
] |
maurice.rupp@gmail.com
|
30844235afba20a859c1af14b83712062e315731
|
b22588340d7925b614a735bbbde1b351ad657ffc
|
/athena/Control/AthenaExamples/AthExThinning/share/ReadNonSlimmedData_jobOptions.py
|
38f99092450c5b1a4e3e2f5c68191d7da6279a5f
|
[] |
no_license
|
rushioda/PIXELVALID_athena
|
90befe12042c1249cbb3655dde1428bb9b9a42ce
|
22df23187ef85e9c3120122c8375ea0e7d8ea440
|
refs/heads/master
| 2020-12-14T22:01:15.365949
| 2020-01-19T03:59:35
| 2020-01-19T03:59:35
| 234,836,993
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,756
|
py
|
###############################################################
#
# Job options file
#
#==============================================================
#
from AthenaCommon.AppMgr import ServiceMgr as svcMgr
from AthenaCommon.Constants import VERBOSE,DEBUG,INFO,WARNING,ERROR
from AthenaCommon.AlgSequence import AlgSequence
topSequence = AlgSequence()
#--------------------------------------------------------------
# Event related parameters
#--------------------------------------------------------------
# Number of events to be processed
if 'EVTMAX' not in dir() :
EVTMAX = -1
pass
theApp.EvtMax = EVTMAX
#--------------------------------------------------------------
# Load POOL support
#--------------------------------------------------------------
import AthenaPoolCnvSvc.ReadAthenaPool
if 'INPUT' not in dir():
INPUT = ["non.slimmed.my.data.pool"]
svcMgr.EventSelector.InputCollections = INPUT
svcMgr.PoolSvc.ReadCatalog = [ 'xmlcatalog_file:PoolFileCatalog.xml' ]
svcMgr.PoolSvc.WriteCatalog = 'xmlcatalog_file:PoolFileCatalog.xml'
#--------------------------------------------------------------
# Private Application Configuration options
#--------------------------------------------------------------
#####################################################
# read back AthExFatObject
#####################################################
from AthExThinning.Lib import PyReadFatObject
topSequence += PyReadFatObject(
"ReadFatObject",
particles = "Particles_test1",
fatobject = "FatObject_test1",
OutputLevel = INFO
)
#--------------------------------------------------------------
# POOL Persistency
#--------------------------------------------------------------
import AthenaPoolCnvSvc.WriteAthenaPool as wap
outStream = wap.AthenaPoolOutputStream("StreamUSR")
if 'OUTPUT' not in dir():
OUTPUT = "reaccessed.%s" % INPUT[0]
svcMgr.PoolSvc.CheckDictionary = True
# Stream's output file
outStream.OutputFile = OUTPUT
# Event Info
outStream.ItemList = [
"EventInfo#*",
"AthExParticles#*",
"AthExDecay#*",
"AthExElephantino#*",
"AthExFatObject#*",
]
svcMgr.AthenaPoolCnvSvc.CommitInterval = 10
##############################################################
#
# Customise OutputLevel
#
##############################################################
# OUTPUT PRINTOUT LEVEL
# Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
svcMgr.MessageSvc.useColors = False
svcMgr.MessageSvc.defaultLimit = 4000000
svcMgr.MessageSvc.OutputLevel = ERROR
#==============================================================
#
# End of job options file
#
###############################################################
|
[
"rushioda@lxplus754.cern.ch"
] |
rushioda@lxplus754.cern.ch
|
4426013e7d6c0671251a8c06ba7d5fc7a20944c8
|
80b7f2a10506f70477d8720e229d7530da2eff5d
|
/ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocolstack/fcoeclientfdiscrange_7e4a81c3388682c456129454127b408c.py
|
7b59fcd8d7b61898e28f0369178ff573d1c324b5
|
[
"MIT"
] |
permissive
|
OpenIxia/ixnetwork_restpy
|
00fdc305901aa7e4b26e4000b133655e2d0e346a
|
c8ecc779421bffbc27c906c1ea51af3756d83398
|
refs/heads/master
| 2023-08-10T02:21:38.207252
| 2023-07-19T14:14:57
| 2023-07-19T14:14:57
| 174,170,555
| 26
| 16
|
MIT
| 2023-02-02T07:02:43
| 2019-03-06T15:27:20
|
Python
|
UTF-8
|
Python
| false
| false
| 85,902
|
py
|
# MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
if sys.version_info >= (3, 5):
from typing import List, Any, Union
class FcoeClientFdiscRange(Base):
"""The configuration parameters for a range of FDISC VN_Ports.
The FcoeClientFdiscRange class encapsulates a required fcoeClientFdiscRange resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = "fcoeClientFdiscRange"
_SDM_ATT_MAP = {
"Count": "count",
"Enabled": "enabled",
"Name": "name",
"NameServerCommands": "nameServerCommands",
"NameServerQuery": "nameServerQuery",
"NameServerQueryCommand": "nameServerQueryCommand",
"NameServerQueryParameterType": "nameServerQueryParameterType",
"NameServerQueryParameterValue": "nameServerQueryParameterValue",
"NameServerRegistration": "nameServerRegistration",
"NodeWwnIncrement": "nodeWwnIncrement",
"NodeWwnStart": "nodeWwnStart",
"ObjectId": "objectId",
"OverrideNodeWwn": "overrideNodeWwn",
"PlogiDestId": "plogiDestId",
"PlogiEnabled": "plogiEnabled",
"PlogiMeshMode": "plogiMeshMode",
"PlogiTargetName": "plogiTargetName",
"PortWwnIncrement": "portWwnIncrement",
"PortWwnStart": "portWwnStart",
"PrliEnabled": "prliEnabled",
"SourceOui": "sourceOui",
"SourceOuiIncrement": "sourceOuiIncrement",
"StateChangeRegistration": "stateChangeRegistration",
"StateChangeRegistrationOption": "stateChangeRegistrationOption",
}
_SDM_ENUM_MAP = {}
def __init__(self, parent, list_op=False):
super(FcoeClientFdiscRange, self).__init__(parent, list_op)
@property
def Count(self):
# type: () -> int
"""
Returns
-------
- number: The number of FDISC VN_Ports per FLOGI VN_Port to be created by this VN_Port range.
"""
return self._get_attribute(self._SDM_ATT_MAP["Count"])
@Count.setter
def Count(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP["Count"], value)
@property
def Enabled(self):
# type: () -> bool
"""
Returns
-------
- bool: Disabled ranges won't be configured nor validated.
"""
return self._get_attribute(self._SDM_ATT_MAP["Enabled"])
@Enabled.setter
def Enabled(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["Enabled"], value)
@property
def Name(self):
# type: () -> str
"""
Returns
-------
- str: Name of range
"""
return self._get_attribute(self._SDM_ATT_MAP["Name"])
@Name.setter
def Name(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["Name"], value)
@property
def NameServerCommands(self):
# type: () -> List[int]
"""
Returns
-------
- list(number): Signifies the Name Server Commands that will be sent by the client.
"""
return self._get_attribute(self._SDM_ATT_MAP["NameServerCommands"])
@NameServerCommands.setter
def NameServerCommands(self, value):
# type: (List[int]) -> None
self._set_attribute(self._SDM_ATT_MAP["NameServerCommands"], value)
@property
def NameServerQuery(self):
# type: () -> bool
"""
Returns
-------
- bool: If set, the N_Port will attempt to query a Name Server.
"""
return self._get_attribute(self._SDM_ATT_MAP["NameServerQuery"])
@NameServerQuery.setter
def NameServerQuery(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["NameServerQuery"], value)
@property
def NameServerQueryCommand(self):
# type: () -> str
"""
Returns
-------
- str: Name Server request command codes.
"""
return self._get_attribute(self._SDM_ATT_MAP["NameServerQueryCommand"])
@NameServerQueryCommand.setter
def NameServerQueryCommand(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["NameServerQueryCommand"], value)
@property
def NameServerQueryParameterType(self):
# type: () -> str
"""
Returns
-------
- str: State Change Registration Parameter Type.
"""
return self._get_attribute(self._SDM_ATT_MAP["NameServerQueryParameterType"])
@NameServerQueryParameterType.setter
def NameServerQueryParameterType(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["NameServerQueryParameterType"], value)
@property
def NameServerQueryParameterValue(self):
# type: () -> str
"""
Returns
-------
- str: State Change Registration Parameter Value.
"""
return self._get_attribute(self._SDM_ATT_MAP["NameServerQueryParameterValue"])
@NameServerQueryParameterValue.setter
def NameServerQueryParameterValue(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["NameServerQueryParameterValue"], value)
@property
def NameServerRegistration(self):
# type: () -> bool
"""
Returns
-------
- bool: If set, the N_Port will attempt to register to a Name Server.
"""
return self._get_attribute(self._SDM_ATT_MAP["NameServerRegistration"])
@NameServerRegistration.setter
def NameServerRegistration(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["NameServerRegistration"], value)
@property
def NodeWwnIncrement(self):
# type: () -> str
"""
Returns
-------
- str: The Node Name incrementing value for this N_Port range.
"""
return self._get_attribute(self._SDM_ATT_MAP["NodeWwnIncrement"])
@NodeWwnIncrement.setter
def NodeWwnIncrement(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["NodeWwnIncrement"], value)
@property
def NodeWwnStart(self):
# type: () -> str
"""
Returns
-------
- str: The Node Name starting value for this N_Port range.
"""
return self._get_attribute(self._SDM_ATT_MAP["NodeWwnStart"])
@NodeWwnStart.setter
def NodeWwnStart(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["NodeWwnStart"], value)
@property
def ObjectId(self):
# type: () -> str
"""
Returns
-------
- str: Unique identifier for this object
"""
return self._get_attribute(self._SDM_ATT_MAP["ObjectId"])
@property
def OverrideNodeWwn(self):
# type: () -> bool
"""
Returns
-------
- bool: If set, the GUI will allow overriding the Node WWN for FDISC VN_Ports.
"""
return self._get_attribute(self._SDM_ATT_MAP["OverrideNodeWwn"])
@OverrideNodeWwn.setter
def OverrideNodeWwn(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["OverrideNodeWwn"], value)
@property
def PlogiDestId(self):
# type: () -> str
"""
Returns
-------
- str: Indicates FCIDs and WWNs that can be used as destination for PLOGI requests.
"""
return self._get_attribute(self._SDM_ATT_MAP["PlogiDestId"])
@PlogiDestId.setter
def PlogiDestId(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["PlogiDestId"], value)
@property
def PlogiEnabled(self):
# type: () -> bool
"""
Returns
-------
- bool: Enables these N_Ports to attempt a PLOGI connection with specified destinations.
"""
return self._get_attribute(self._SDM_ATT_MAP["PlogiEnabled"])
@PlogiEnabled.setter
def PlogiEnabled(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["PlogiEnabled"], value)
@property
def PlogiMeshMode(self):
# type: () -> str
"""
Returns
-------
- str: The association mode between PLOGI initiators and targets.
"""
return self._get_attribute(self._SDM_ATT_MAP["PlogiMeshMode"])
@PlogiMeshMode.setter
def PlogiMeshMode(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["PlogiMeshMode"], value)
@property
def PlogiTargetName(self):
# type: () -> str
"""
Returns
-------
- str: Indicates the N_Port range used as destination for PLOGI requests.
"""
return self._get_attribute(self._SDM_ATT_MAP["PlogiTargetName"])
@PlogiTargetName.setter
def PlogiTargetName(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["PlogiTargetName"], value)
@property
def PortWwnIncrement(self):
# type: () -> str
"""
Returns
-------
- str: The Port Name incrementing value for this N_Port range.
"""
return self._get_attribute(self._SDM_ATT_MAP["PortWwnIncrement"])
@PortWwnIncrement.setter
def PortWwnIncrement(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["PortWwnIncrement"], value)
@property
def PortWwnStart(self):
# type: () -> str
"""
Returns
-------
- str: The Port Name starting value for this N_Port range.
"""
return self._get_attribute(self._SDM_ATT_MAP["PortWwnStart"])
@PortWwnStart.setter
def PortWwnStart(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["PortWwnStart"], value)
@property
def PrliEnabled(self):
# type: () -> bool
"""
Returns
-------
- bool: Process login.
"""
return self._get_attribute(self._SDM_ATT_MAP["PrliEnabled"])
@PrliEnabled.setter
def PrliEnabled(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["PrliEnabled"], value)
@property
def SourceOui(self):
# type: () -> str
"""
Returns
-------
- str: The OUI ID (3-byte) associated to all N_Ports in this range.
"""
return self._get_attribute(self._SDM_ATT_MAP["SourceOui"])
@SourceOui.setter
def SourceOui(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["SourceOui"], value)
@property
def SourceOuiIncrement(self):
# type: () -> str
"""
Returns
-------
- str: The OUI ID incrementing value for this N_Port range.
"""
return self._get_attribute(self._SDM_ATT_MAP["SourceOuiIncrement"])
@SourceOuiIncrement.setter
def SourceOuiIncrement(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["SourceOuiIncrement"], value)
@property
def StateChangeRegistration(self):
# type: () -> bool
"""
Returns
-------
- bool: If set, the N_Port will attempt to subscribe to State Change events.
"""
return self._get_attribute(self._SDM_ATT_MAP["StateChangeRegistration"])
@StateChangeRegistration.setter
def StateChangeRegistration(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["StateChangeRegistration"], value)
@property
def StateChangeRegistrationOption(self):
# type: () -> str
"""
Returns
-------
- str: State Change Registration option.
"""
return self._get_attribute(self._SDM_ATT_MAP["StateChangeRegistrationOption"])
@StateChangeRegistrationOption.setter
def StateChangeRegistrationOption(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["StateChangeRegistrationOption"], value)
def update(
self,
Count=None,
Enabled=None,
Name=None,
NameServerCommands=None,
NameServerQuery=None,
NameServerQueryCommand=None,
NameServerQueryParameterType=None,
NameServerQueryParameterValue=None,
NameServerRegistration=None,
NodeWwnIncrement=None,
NodeWwnStart=None,
OverrideNodeWwn=None,
PlogiDestId=None,
PlogiEnabled=None,
PlogiMeshMode=None,
PlogiTargetName=None,
PortWwnIncrement=None,
PortWwnStart=None,
PrliEnabled=None,
SourceOui=None,
SourceOuiIncrement=None,
StateChangeRegistration=None,
StateChangeRegistrationOption=None,
):
# type: (int, bool, str, List[int], bool, str, str, str, bool, str, str, bool, str, bool, str, str, str, str, bool, str, str, bool, str) -> FcoeClientFdiscRange
"""Updates fcoeClientFdiscRange resource on the server.
Args
----
- Count (number): The number of FDISC VN_Ports per FLOGI VN_Port to be created by this VN_Port range.
- Enabled (bool): Disabled ranges won't be configured nor validated.
- Name (str): Name of range
- NameServerCommands (list(number)): Signifies the Name Server Commands that will be sent by the client.
- NameServerQuery (bool): If set, the N_Port will attempt to query a Name Server.
- NameServerQueryCommand (str): Name Server request command codes.
- NameServerQueryParameterType (str): State Change Registration Parameter Type.
- NameServerQueryParameterValue (str): State Change Registration Parameter Value.
- NameServerRegistration (bool): If set, the N_Port will attempt to register to a Name Server.
- NodeWwnIncrement (str): The Node Name incrementing value for this N_Port range.
- NodeWwnStart (str): The Node Name starting value for this N_Port range.
- OverrideNodeWwn (bool): If set, the GUI will allow overriding the Node WWN for FDISC VN_Ports.
- PlogiDestId (str): Indicates FCIDs and WWNs that can be used as destination for PLOGI requests.
- PlogiEnabled (bool): Enables these N_Ports to attempt a PLOGI connection with specified destinations.
- PlogiMeshMode (str): The association mode between PLOGI initiators and targets.
- PlogiTargetName (str): Indicates the N_Port range used as destination for PLOGI requests.
- PortWwnIncrement (str): The Port Name incrementing value for this N_Port range.
- PortWwnStart (str): The Port Name starting value for this N_Port range.
- PrliEnabled (bool): Process login.
- SourceOui (str): The OUI ID (3-byte) associated to all N_Ports in this range.
- SourceOuiIncrement (str): The OUI ID incrementing value for this N_Port range.
- StateChangeRegistration (bool): If set, the N_Port will attempt to subscribe to State Change events.
- StateChangeRegistrationOption (str): State Change Registration option.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def find(
self,
Count=None,
Enabled=None,
Name=None,
NameServerCommands=None,
NameServerQuery=None,
NameServerQueryCommand=None,
NameServerQueryParameterType=None,
NameServerQueryParameterValue=None,
NameServerRegistration=None,
NodeWwnIncrement=None,
NodeWwnStart=None,
ObjectId=None,
OverrideNodeWwn=None,
PlogiDestId=None,
PlogiEnabled=None,
PlogiMeshMode=None,
PlogiTargetName=None,
PortWwnIncrement=None,
PortWwnStart=None,
PrliEnabled=None,
SourceOui=None,
SourceOuiIncrement=None,
StateChangeRegistration=None,
StateChangeRegistrationOption=None,
):
# type: (int, bool, str, List[int], bool, str, str, str, bool, str, str, str, bool, str, bool, str, str, str, str, bool, str, str, bool, str) -> FcoeClientFdiscRange
"""Finds and retrieves fcoeClientFdiscRange resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve fcoeClientFdiscRange resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all fcoeClientFdiscRange resources from the server.
Args
----
- Count (number): The number of FDISC VN_Ports per FLOGI VN_Port to be created by this VN_Port range.
- Enabled (bool): Disabled ranges won't be configured nor validated.
- Name (str): Name of range
- NameServerCommands (list(number)): Signifies the Name Server Commands that will be sent by the client.
- NameServerQuery (bool): If set, the N_Port will attempt to query a Name Server.
- NameServerQueryCommand (str): Name Server request command codes.
- NameServerQueryParameterType (str): State Change Registration Parameter Type.
- NameServerQueryParameterValue (str): State Change Registration Parameter Value.
- NameServerRegistration (bool): If set, the N_Port will attempt to register to a Name Server.
- NodeWwnIncrement (str): The Node Name incrementing value for this N_Port range.
- NodeWwnStart (str): The Node Name starting value for this N_Port range.
- ObjectId (str): Unique identifier for this object
- OverrideNodeWwn (bool): If set, the GUI will allow overriding the Node WWN for FDISC VN_Ports.
- PlogiDestId (str): Indicates FCIDs and WWNs that can be used as destination for PLOGI requests.
- PlogiEnabled (bool): Enables these N_Ports to attempt a PLOGI connection with specified destinations.
- PlogiMeshMode (str): The association mode between PLOGI initiators and targets.
- PlogiTargetName (str): Indicates the N_Port range used as destination for PLOGI requests.
- PortWwnIncrement (str): The Port Name incrementing value for this N_Port range.
- PortWwnStart (str): The Port Name starting value for this N_Port range.
- PrliEnabled (bool): Process login.
- SourceOui (str): The OUI ID (3-byte) associated to all N_Ports in this range.
- SourceOuiIncrement (str): The OUI ID incrementing value for this N_Port range.
- StateChangeRegistration (bool): If set, the N_Port will attempt to subscribe to State Change events.
- StateChangeRegistrationOption (str): State Change Registration option.
Returns
-------
- self: This instance with matching fcoeClientFdiscRange resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of fcoeClientFdiscRange data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the fcoeClientFdiscRange resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def CustomProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the customProtocolStack operation on the server.
Create custom protocol stack under /vport/protocolStack
customProtocolStack(Arg2=list, Arg3=enum, async_operation=bool)
---------------------------------------------------------------
- Arg2 (list(str)): List of plugin types to be added in the new custom stack
- Arg3 (str(kAppend | kMerge | kOverwrite)): Append, merge or overwrite existing protocol stack
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute(
"customProtocolStack", payload=payload, response_object=None
)
def DisableProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[str, None]
"""Executes the disableProtocolStack operation on the server.
Disable a protocol under protocolStack using the class name
disableProtocolStack(Arg2=string, async_operation=bool)string
-------------------------------------------------------------
- Arg2 (str): Protocol class name to disable
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns str: Status of the exec
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self.href}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute(
"disableProtocolStack", payload=payload, response_object=None
)
def EnableProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[str, None]
"""Executes the enableProtocolStack operation on the server.
Enable a protocol under protocolStack using the class name
enableProtocolStack(Arg2=string, async_operation=bool)string
------------------------------------------------------------
- Arg2 (str): Protocol class name to enable
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns str: Status of the exec
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self.href}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute(
"enableProtocolStack", payload=payload, response_object=None
)
def FcoeClientFdisc(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the fcoeClientFdisc operation on the server.
Enable NPIV FDISC interface (transmits NPIV FDISC, if necessary).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
fcoeClientFdisc(async_operation=bool)
-------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
fcoeClientFdisc(Arg2=enum, async_operation=bool)
------------------------------------------------
- Arg2 (str(async | sync)): kArray[kObjref=/vport/protocolStack/ethernet/fcoeClientEndpoint,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFdiscRange]
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute("fcoeClientFdisc", payload=payload, response_object=None)
def FcoeClientNpivFlogo(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the fcoeClientNpivFlogo operation on the server.
Disable NPIV FDISC interface (transmits FLOGO, if already logged in).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
fcoeClientNpivFlogo(async_operation=bool)
-----------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
fcoeClientNpivFlogo(Arg2=enum, async_operation=bool)
----------------------------------------------------
- Arg2 (str(async | sync)): kArray[kObjref=/vport/protocolStack/ethernet/fcoeClientEndpoint,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFdiscRange]
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute(
"fcoeClientNpivFlogo", payload=payload, response_object=None
)
def FcoeClientPause(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the fcoeClientPause operation on the server.
Pause negotiation of FCoE sessions for selected plugins and ranges
fcoeClientPause(async_operation=bool)
-------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute("fcoeClientPause", payload=payload, response_object=None)
def FcoeClientPlogi(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the fcoeClientPlogi operation on the server.
Enable PLOGI (transmits PLOGI, to PLOGI Destination(s) if configured, and must display error message if not configured).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
fcoeClientPlogi(async_operation=bool)
-------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
fcoeClientPlogi(Arg2=enum, async_operation=bool)
------------------------------------------------
- Arg2 (str(async | sync)): kArray[kObjref=/vport/protocolStack/ethernet/fcoeClientEndpoint,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFdiscRange,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFlogiRange]
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute("fcoeClientPlogi", payload=payload, response_object=None)
def FcoeClientPlogo(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the fcoeClientPlogo operation on the server.
Disable PLOGI (transmits PLOGO, if already logged in to destination VN_Port(s).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
fcoeClientPlogo(async_operation=bool)
-------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
fcoeClientPlogo(Arg2=enum, async_operation=bool)
------------------------------------------------
- Arg2 (str(async | sync)): kArray[kObjref=/vport/protocolStack/ethernet/fcoeClientEndpoint,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFdiscRange,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFlogiRange]
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute("fcoeClientPlogo", payload=payload, response_object=None)
def FcoeClientResume(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the fcoeClientResume operation on the server.
Resume previously paused negotiation for selected plugins and ranges
fcoeClientResume(async_operation=bool)
--------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute("fcoeClientResume", payload=payload, response_object=None)
def FcoeClientStart(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the fcoeClientStart operation on the server.
Negotiate FCoE sessions for selected ranges
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
fcoeClientStart(async_operation=bool)
-------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
fcoeClientStart(Arg2=enum, async_operation=bool)
------------------------------------------------
- Arg2 (str(async | sync)): kArray[kObjref=/vport/protocolStack/ethernet/fcoeClientEndpoint,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFdiscRange,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFlogiRange]
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute("fcoeClientStart", payload=payload, response_object=None)
def FcoeClientStop(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the fcoeClientStop operation on the server.
Teardown FCoE sessions for selected ranges
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
fcoeClientStop(async_operation=bool)
------------------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
fcoeClientStop(Arg2=enum, async_operation=bool)
-----------------------------------------------
- Arg2 (str(async | sync)): kArray[kObjref=/vport/protocolStack/ethernet/fcoeClientEndpoint,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFdiscRange,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFlogiRange]
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute("fcoeClientStop", payload=payload, response_object=None)
def Start(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the start operation on the server.
Negotiate sessions for all protocols on all ranges belonging to selected plugins
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
start(async_operation=bool)
---------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
start(Arg2=enum, async_operation=bool)
--------------------------------------
- Arg2 (str(async | sync)): kArray[kObjref=/vport/protocolStack/atm,/vport/protocolStack/atm/dhcpEndpoint,/vport/protocolStack/atm/dhcpEndpoint/ancp,/vport/protocolStack/atm/dhcpEndpoint/range,/vport/protocolStack/atm/dhcpEndpoint/range/ancpRange,/vport/protocolStack/atm/dhcpServerEndpoint,/vport/protocolStack/atm/dhcpServerEndpoint/range,/vport/protocolStack/atm/emulatedRouter,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint/range,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip,/vport/protocolStack/atm/emulatedRouter/ip/ancp,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/twampClient,/vport/protocolStack/atm/emulatedRouter/ip/twampServer,/vport/protocolStack/atm/emulatedRouter/ipEndpoint,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/atm/emulatedRouterEndpoint,/vport/protocolStack/atm/emulatedRouterEndpoint/range/amtRange,/vport/protocolStack/atm/ip,/vport/protocolStack/atm/ip/ancp,/vport/protocolStack/atm/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tpEndpoint,/vport/protocolStack/atm/ip/l2tpEndpoint/range,/vport/protocolStack/atm/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/smDnsEndpoint,/vport/protocolStack/atm/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/twampClient,/vport/protocolStack/atm/ip/twampServer,/vport/protocolStack/atm/ipEndpoint,/vport/protocolStack/atm/ipEndpoint/ancp,/vport/protocolStack/atm/ipEndpoint/range/amtRange,/vport/protocolStack/atm/ipEndpoint/range/ancpRange,/vport/protocolStack/atm/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/ipEndpoint/twampClient,/vport/protocolStack/atm/ipEndpoint/twampServer,/vport/protocolStack/atm/pppox,/vport/protocolStack/atm/pppox/ancp,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/ancpRange,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/ancpRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/pppoxEndpoint,/vport/protocolStack/atm/pppoxEndpoint/ancp,/vport/protocolStack/atm/pppoxEndpoint/range,/vport/protocolStack/atm/pppoxEndpoint/range/ancpRange,/vport/protocolStack/atm/pppoxEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppoxEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet,/vport/protocolStack/ethernet/dcbxEndpoint,/vport/protocolStack/ethernet/dcbxEndpoint/range,/vport/protocolStack/ethernet/dhcpEndpoint,/vport/protocolStack/ethernet/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/dhcpEndpoint/range,/vport/protocolStack/ethernet/dhcpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/dhcpServerEndpoint,/vport/protocolStack/ethernet/dhcpServerEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip,/vport/protocolStack/ethernet/emulatedRouter/ip/ancp,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ip/twampServer,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/ethernet/emulatedRouterEndpoint,/vport/protocolStack/ethernet/emulatedRouterEndpoint/range/amtRange,/vport/protocolStack/ethernet/esmc,/vport/protocolStack/ethernet/fcoeClientEndpoint,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFdiscRange,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFlogiRange,/vport/protocolStack/ethernet/fcoeFwdEndpoint,/vport/protocolStack/ethernet/fcoeFwdEndpoint/range,/vport/protocolStack/ethernet/fcoeFwdEndpoint/secondaryRange,/vport/protocolStack/ethernet/ip,/vport/protocolStack/ethernet/ip/ancp,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/twampClient,/vport/protocolStack/ethernet/ip/twampServer,/vport/protocolStack/ethernet/ipEndpoint,/vport/protocolStack/ethernet/ipEndpoint/ancp,/vport/protocolStack/ethernet/ipEndpoint/range/amtRange,/vport/protocolStack/ethernet/ipEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ipEndpoint/twampClient,/vport/protocolStack/ethernet/ipEndpoint/twampServer,/vport/protocolStack/ethernet/pppox,/vport/protocolStack/ethernet/pppox/ancp,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/pppoxEndpoint,/vport/protocolStack/ethernet/pppoxEndpoint/ancp,/vport/protocolStack/ethernet/pppoxEndpoint/range,/vport/protocolStack/ethernet/pppoxEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppoxEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppoxEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/vepaEndpoint,/vport/protocolStack/ethernet/vepaEndpoint/range,/vport/protocolStack/ethernetEndpoint,/vport/protocolStack/ethernetEndpoint/esmc,/vport/protocolStack/fcClientEndpoint,/vport/protocolStack/fcClientEndpoint/range,/vport/protocolStack/fcClientEndpoint/range,/vport/protocolStack/fcClientEndpoint/range/fcClientFdiscRange,/vport/protocolStack/fcClientEndpoint/range/fcClientFlogiRange,/vport/protocolStack/fcFportFwdEndpoint,/vport/protocolStack/fcFportFwdEndpoint/range,/vport/protocolStack/fcFportFwdEndpoint/secondaryRange]
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute("start", payload=payload, response_object=None)
def Stop(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the stop operation on the server.
Teardown sessions for all protocols on all ranges belonging to selected plugins
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
stop(async_operation=bool)
--------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
stop(Arg2=enum, async_operation=bool)
-------------------------------------
- Arg2 (str(async | sync)): kArray[kObjref=/vport/protocolStack/atm,/vport/protocolStack/atm/dhcpEndpoint,/vport/protocolStack/atm/dhcpEndpoint/ancp,/vport/protocolStack/atm/dhcpEndpoint/range,/vport/protocolStack/atm/dhcpEndpoint/range/ancpRange,/vport/protocolStack/atm/dhcpServerEndpoint,/vport/protocolStack/atm/dhcpServerEndpoint/range,/vport/protocolStack/atm/emulatedRouter,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/dhcpEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint/range,/vport/protocolStack/atm/emulatedRouter/dhcpServerEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip,/vport/protocolStack/atm/emulatedRouter/ip/ancp,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ip/twampClient,/vport/protocolStack/atm/emulatedRouter/ip/twampServer,/vport/protocolStack/atm/emulatedRouter/ipEndpoint,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/amtRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/ancpRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/atm/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/atm/emulatedRouterEndpoint,/vport/protocolStack/atm/emulatedRouterEndpoint/range/amtRange,/vport/protocolStack/atm/ip,/vport/protocolStack/atm/ip/ancp,/vport/protocolStack/atm/ip/egtpPcrfEndpoint,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/l2tpEndpoint,/vport/protocolStack/atm/ip/l2tpEndpoint/range,/vport/protocolStack/atm/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/smDnsEndpoint,/vport/protocolStack/atm/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/atm/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/atm/ip/twampClient,/vport/protocolStack/atm/ip/twampServer,/vport/protocolStack/atm/ipEndpoint,/vport/protocolStack/atm/ipEndpoint/ancp,/vport/protocolStack/atm/ipEndpoint/range/amtRange,/vport/protocolStack/atm/ipEndpoint/range/ancpRange,/vport/protocolStack/atm/ipEndpoint/range/twampControlRange,/vport/protocolStack/atm/ipEndpoint/twampClient,/vport/protocolStack/atm/ipEndpoint/twampServer,/vport/protocolStack/atm/pppox,/vport/protocolStack/atm/pppox/ancp,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/ancpRange,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppox/dhcpoPppClientEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/ancpRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppox/dhcpoPppServerEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/atm/pppoxEndpoint,/vport/protocolStack/atm/pppoxEndpoint/ancp,/vport/protocolStack/atm/pppoxEndpoint/range,/vport/protocolStack/atm/pppoxEndpoint/range/ancpRange,/vport/protocolStack/atm/pppoxEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/atm/pppoxEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet,/vport/protocolStack/ethernet/dcbxEndpoint,/vport/protocolStack/ethernet/dcbxEndpoint/range,/vport/protocolStack/ethernet/dhcpEndpoint,/vport/protocolStack/ethernet/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/dhcpEndpoint/range,/vport/protocolStack/ethernet/dhcpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/dhcpServerEndpoint,/vport/protocolStack/ethernet/dhcpServerEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/dhcpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/dhcpServerEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip,/vport/protocolStack/ethernet/emulatedRouter/ip/ancp,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/emulatedRouter/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ip/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ip/twampServer,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/ancp,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/amtRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/ancpRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampClient,/vport/protocolStack/ethernet/emulatedRouter/ipEndpoint/twampServer,/vport/protocolStack/ethernet/emulatedRouterEndpoint,/vport/protocolStack/ethernet/emulatedRouterEndpoint/range/amtRange,/vport/protocolStack/ethernet/esmc,/vport/protocolStack/ethernet/fcoeClientEndpoint,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFdiscRange,/vport/protocolStack/ethernet/fcoeClientEndpoint/range/fcoeClientFlogiRange,/vport/protocolStack/ethernet/fcoeFwdEndpoint,/vport/protocolStack/ethernet/fcoeFwdEndpoint/range,/vport/protocolStack/ethernet/fcoeFwdEndpoint/secondaryRange,/vport/protocolStack/ethernet/ip,/vport/protocolStack/ethernet/ip/ancp,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpPcrfEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpPcrfS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpS5S8PgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpSgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/egtpUeS5S8SgwEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLacEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tp/dhcpoLnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/ip/l2tpEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/amtRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ip/smDnsEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ip/twampClient,/vport/protocolStack/ethernet/ip/twampServer,/vport/protocolStack/ethernet/ipEndpoint,/vport/protocolStack/ethernet/ipEndpoint/ancp,/vport/protocolStack/ethernet/ipEndpoint/range/amtRange,/vport/protocolStack/ethernet/ipEndpoint/range/ancpRange,/vport/protocolStack/ethernet/ipEndpoint/range/twampControlRange,/vport/protocolStack/ethernet/ipEndpoint/twampClient,/vport/protocolStack/ethernet/ipEndpoint/twampServer,/vport/protocolStack/ethernet/pppox,/vport/protocolStack/ethernet/pppox/ancp,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppox/dhcpoPppClientEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppox/dhcpoPppServerEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/pppoxEndpoint,/vport/protocolStack/ethernet/pppoxEndpoint/ancp,/vport/protocolStack/ethernet/pppoxEndpoint/range,/vport/protocolStack/ethernet/pppoxEndpoint/range/ancpRange,/vport/protocolStack/ethernet/pppoxEndpoint/range/dhcpv6PdClientRange,/vport/protocolStack/ethernet/pppoxEndpoint/range/dhcpv6ServerRange,/vport/protocolStack/ethernet/vepaEndpoint,/vport/protocolStack/ethernet/vepaEndpoint/range,/vport/protocolStack/ethernetEndpoint,/vport/protocolStack/ethernetEndpoint/esmc,/vport/protocolStack/fcClientEndpoint,/vport/protocolStack/fcClientEndpoint/range,/vport/protocolStack/fcClientEndpoint/range,/vport/protocolStack/fcClientEndpoint/range/fcClientFdiscRange,/vport/protocolStack/fcClientEndpoint/range/fcClientFlogiRange,/vport/protocolStack/fcFportFwdEndpoint,/vport/protocolStack/fcFportFwdEndpoint/range,/vport/protocolStack/fcFportFwdEndpoint/secondaryRange]
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute("stop", payload=payload, response_object=None)
|
[
"andy.balogh@keysight.com"
] |
andy.balogh@keysight.com
|
aa760ca3f7d7c9e12b8ef69e0da9c1b134bc975e
|
04b1803adb6653ecb7cb827c4f4aa616afacf629
|
/chrome/installer/linux/debian/package_version_interval.py
|
b386fb69728bdb3c6f0077166773d44d2f2e85a1
|
[
"BSD-3-Clause"
] |
permissive
|
Samsung/Castanets
|
240d9338e097b75b3f669604315b06f7cf129d64
|
4896f732fc747dfdcfcbac3d442f2d2d42df264a
|
refs/heads/castanets_76_dev
| 2023-08-31T09:01:04.744346
| 2021-07-30T04:56:25
| 2021-08-11T05:45:21
| 125,484,161
| 58
| 49
|
BSD-3-Clause
| 2022-10-16T19:31:26
| 2018-03-16T08:07:37
| null |
UTF-8
|
Python
| false
| false
| 4,844
|
py
|
#!/usr/bin/env python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
import sys
import deb_version
class PackageVersionIntervalEndpoint:
def __init__(self, is_open, is_inclusive, version):
self._is_open = is_open;
self._is_inclusive = is_inclusive
self._version = version
def _intersect(self, other, is_start):
if self._is_open and other._is_open:
return self
if self._is_open:
return other
if other._is_open:
return self
cmp_code = self._version.__cmp__(other._version)
if not is_start:
cmp_code *= -1
if cmp_code > 0:
return self
if cmp_code < 0:
return other
if not self._is_inclusive:
return self
return other
def __str__(self):
return 'PackageVersionIntervalEndpoint(%s, %s, %s)' % (
self._is_open, self._is_inclusive, self._version)
def __eq__(self, other):
if self._is_open and other._is_open:
return True
return (self._is_open == other._is_open and
self._is_inclusive == other._is_inclusive and
self._version == other._version)
class PackageVersionInterval:
def __init__(self, string_rep, package, start, end):
self.string_rep = string_rep
self.package = package
self.start = start
self.end = end
def contains(self, version):
if not self.start._is_open:
if self.start._is_inclusive:
if version < self.start._version:
return False
elif version <= self.start._version:
return False
if not self.end._is_open:
if self.end._is_inclusive:
if version > self.end._version:
return False
elif version >= self.end._version:
return False
return True
def intersect(self, other):
return PackageVersionInterval(
'', '', self.start._intersect(other.start, True),
self.end._intersect(other.end, False))
def implies(self, other):
if self.package != other.package:
return False
return self.intersect(other) == self
def __str__(self):
return 'PackageVersionInterval(%s)' % self.string_rep
def __eq__(self, other):
return self.start == other.start and self.end == other.end
class PackageVersionIntervalSet:
def __init__(self, intervals):
self.intervals = intervals
def formatted(self):
return ' | '.join([interval.string_rep for interval in self.intervals])
def _interval_implies_other_intervals(self, interval, other_intervals):
for other_interval in other_intervals:
if interval.implies(other_interval):
return True
return False
def implies(self, other):
# This disjunction implies |other| if every term in this
# disjunction implies some term in |other|.
for interval in self.intervals:
if not self._interval_implies_other_intervals(interval, other.intervals):
return False
return True
def version_interval_endpoints_from_exp(op, version):
open_endpoint = PackageVersionIntervalEndpoint(True, None, None)
inclusive_endpoint = PackageVersionIntervalEndpoint(False, True, version)
exclusive_endpoint = PackageVersionIntervalEndpoint(False, False, version)
if op == '>=':
return (inclusive_endpoint, open_endpoint)
if op == '<=':
return (open_endpoint, inclusive_endpoint)
if op == '>>' or op == '>':
return (exclusive_endpoint, open_endpoint)
if op == '<<' or op == '<':
return (open_endpoing, exclusive_endpoint)
assert op == '='
return (inclusive_endpoint, inclusive_endpoint)
def parse_dep(dep):
"""Parses a package and version requirement formatted by dpkg-shlibdeps.
Args:
dep: A string of the format "package (op version)"
Returns:
A PackageVersionInterval.
"""
package_name_regex = '[a-z][a-z0-9\+\-\.]+'
match = re.match('^(%s)$' % package_name_regex, dep)
if match:
return PackageVersionInterval(dep, match.group(1),
PackageVersionIntervalEndpoint(True, None, None),
PackageVersionIntervalEndpoint(True, None, None))
match = re.match('^(%s) \(([\>\=\<]+) ([\~0-9A-Za-z\+\-\.\:]+)\)$' %
package_name_regex, dep)
if match:
(start, end) = version_interval_endpoints_from_exp(
match.group(2), deb_version.DebVersion(match.group(3)))
return PackageVersionInterval(dep, match.group(1), start, end)
print >> sys.stderr, 'Failed to parse ' + dep
sys.exit(1)
def parse_interval_set(deps):
"""Parses a disjunction of package version requirements.
Args:
deps: A string of the format
"package \(op version\) (| package \(op version\))*"
Returns:
A list of PackageVersionIntervals
"""
return PackageVersionIntervalSet(
[parse_dep(dep.strip()) for dep in deps.split('|')])
|
[
"sunny.nam@samsung.com"
] |
sunny.nam@samsung.com
|
b6ec3b1ebb177f78d0a5ea783b47f81b2a88d998
|
62b84f877ccb4171f558c225fa0fdd4fd2c44d6c
|
/latplan/puzzles/objutil.py
|
4321a402e19a332adf0f002fa2cc46e670e4467a
|
[] |
no_license
|
guicho271828/latplan
|
b6dfb55f3cceac947df770fb623d496111f9ab19
|
75a2fc773de245b422a695b51fccaf17294da123
|
refs/heads/master
| 2022-10-25T02:02:05.547143
| 2022-03-25T20:42:06
| 2022-03-25T20:59:29
| 96,482,151
| 77
| 19
| null | 2023-03-04T14:10:46
| 2017-07-07T00:11:52
|
Python
|
UTF-8
|
Python
| false
| false
| 8,298
|
py
|
import numpy as np
def bboxes_to_onehot(bboxes,X,Y):
batch, objs = bboxes.shape[0:2]
bboxes_grid = bboxes // 5
x1 = bboxes_grid[...,0].flatten()
y1 = bboxes_grid[...,1].flatten()
x2 = bboxes_grid[...,2].flatten()
y2 = bboxes_grid[...,3].flatten()
x1o = np.eye(X)[x1].reshape((batch,objs,X))
y1o = np.eye(Y)[y1].reshape((batch,objs,Y))
x2o = np.eye(X)[x2].reshape((batch,objs,X))
y2o = np.eye(Y)[y2].reshape((batch,objs,Y))
bboxes_onehot = np.concatenate((x1o,y1o,x2o,y2o),axis=-1)
del x1,y1,x2,y2,x1o,y1o,x2o,y2o
return bboxes_onehot
def bboxes_to_coord(bboxes):
coord1, coord2 = bboxes[...,0:2], bboxes[...,2:4]
center, width = (coord2+coord1)/2, (coord2-coord1)/2
coords = np.concatenate((center,width),axis=-1)
return coords
def coord_to_bboxes(coord):
cxcy, hw = coord[...,0:2], coord[...,2:4]
x1y1 = cxcy - hw
x2y2 = cxcy + hw
bbox = np.concatenate((x1y1,x2y2),axis=-1)
return bbox
def bboxes_to_sinusoidal(bboxes,dimensions=16):
assert (dimensions % 2) == 0
*shape, F = bboxes.shape
assert F == 4
D = dimensions // 2
k = np.arange(D)
# w = 0.0001 ** (2 * k / dimensions)
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# we don't directly use this formulation in the transformer paper for easier decoding.
# we wish the unit sine to have width 2
# 0 1 2
# _/~\_ _
# \_/
#
# k = 1 : 2 -> 2pi
# k = 2 : 2 -> pi
# k = 3 : 2 -> pi/2
# therefore, in general,
import math
w = (2 * math.pi) * 2.0 ** (-k)
wt = np.outer(bboxes[...,None],w)
sin = np.sin(wt)
cos = np.cos(wt)
result = np.concatenate((sin,cos),axis=-1)
result = result.reshape((*shape,F*D*2))
return result
def bboxes_to_binary(bboxes,dimensions=16):
# similar to propositional
assert (dimensions % 2) == 0
*shape, F = bboxes.shape
assert F == 4
D = dimensions
k = np.arange(1,D+1)
# D = 5
# k = 1,2,3,4,5
# 2**k = 2,4,8,16,32
# bboxes = 9
# bboxes % (2**k) = 1, 1, 1, 0, 0
result = bboxes[...,None] % (2 ** k)
result = result.reshape((*shape,F*D))
return result
def bboxes_to_anchor(bboxes,picsize,cell_size):
*shape, F = bboxes.shape
assert F == 4
ymax, xmax, *_ = picsize
# avoid the corner case that points are right on the bottom/right edges
bboxes = np.clip(bboxes, 0.01, np.array([xmax,ymax,xmax,ymax])-0.01)
grid_x = xmax // cell_size # e.g. xmax = 48, cell_size = 16, grid_x = 3
grid_y = ymax // cell_size
anchors = (bboxes // cell_size).astype(int) # note: value should be in [0,2] because bboxes < 47.99
offsets = bboxes % cell_size # no need to subtract the half width, because we will normalize it to mean 0 later
x1 = anchors[...,0].flatten()
y1 = anchors[...,1].flatten()
x2 = anchors[...,2].flatten()
y2 = anchors[...,3].flatten()
x1 = np.eye(grid_x)[x1].reshape((*shape,grid_x))
y1 = np.eye(grid_y)[y1].reshape((*shape,grid_y))
x2 = np.eye(grid_x)[x2].reshape((*shape,grid_x))
y2 = np.eye(grid_y)[y2].reshape((*shape,grid_y))
anchors_onehot = np.concatenate((x1,y1,x2,y2),axis=-1)
return anchors_onehot, offsets, [grid_x, grid_y]
def binary_to_bboxes(binary):
shape = binary.shape
assert (shape[-1] % 4) == 0
D = shape[-1] // 4
k = np.arange(1,D+1)
binary = binary.reshape([*shape[:-1],4,D])
result = (binary * (2 ** k)).sum(axis=-1)
result = result.reshape([*shape[:-1], 4])
return result
def sinusoidal_to_bboxes(sinusoidal):
binary = sinusoidal >= 0
return binary_to_bboxes(binary)
def anchor_to_bboxes(anchors_onehot, offsets, grid_size, cell_size):
grid_x, grid_y = grid_size
x1 = np.argmax(anchors_onehot[...,:grid_x] ,axis=-1)[...,None]
y1 = np.argmax(anchors_onehot[...,grid_x:grid_x+grid_y] ,axis=-1)[...,None]
x2 = np.argmax(anchors_onehot[...,grid_x+grid_y:grid_x+grid_y+grid_x] ,axis=-1)[...,None]
y2 = np.argmax(anchors_onehot[...,grid_x+grid_y+grid_x:grid_x+grid_y+grid_x+grid_y],axis=-1)[...,None]
anchors = np.concatenate((x1,y1,x2,y2),axis=-1)
bboxes = anchors * cell_size + offsets
return bboxes.astype(int)
def tiled_bboxes(batch, height, width, tilesize):
x1 = np.tile(np.arange(width),height) # [9] : 0,1,2, 0,1,2, 0,1,2
y1 = np.repeat(np.arange(height),width) # [9] : 0,0,0, 1,1,1, 2,2,2
x2 = x1+1
y2 = y1+1
bboxes = \
np.repeat( # [B,9,4]
np.expand_dims( # [1,9,4]
np.stack([x1,y1,x2,y2],axis=1) * tilesize, # [9,4]
0),
batch, axis=0)
# [batch, objects, 4]
return bboxes
def image_to_tiled_objects(x, tilesize):
B, H, W, C = x.shape
sH, sW = H//tilesize, W//tilesize
x = x.reshape([B, sH, tilesize, sW, tilesize, C])
x = np.swapaxes(x, 2, 3) # [B, sH, sW, tilesize, tilesize, C]
x = x.reshape([B, sH*sW, tilesize*tilesize*C])
return x
def random_object_masking(transitions,target_number_of_object=5,threashold=1e-8,augmentation=1):
"""Given a set of transitions, remove the static objects randomly so that the total number of objects match the target.
The algorithm is based on reservoir sampling."""
B, _, O, F = transitions.shape
results = [_random_object_masking(transitions,target_number_of_object,threashold)
for i in range(augmentation)]
# reorder to avoid data leakage: first 90% remains in the first 90% after the augmentation.
# [aug_iter, B, 2, O', F] -> [B, aug_iter, 2, O', F] -> [B*aug_iter, 2, O', F]
return np.swapaxes(np.stack(results, axis=0),0,1).reshape((-1,2,target_number_of_object,F))
def _random_object_masking(transitions,target_number_of_object=5,threashold=1e-8):
B, _, O, F = transitions.shape
results = np.zeros((B, 2, target_number_of_object, F))
changed_item_count = np.zeros(B,dtype=np.int8)
pres = transitions[:,0] # B,O,F
sucs = transitions[:,1] # B,O,F
diff = abs(pres - sucs)
same = np.all(diff <= threashold, axis=2) # B,O
changed = np.logical_not(same) # B,O
# copy changed objects first
changed_idxs = np.where(changed)
for b, o in zip(*changed_idxs):
count = changed_item_count[b]
results[b,:,count] = transitions[b,:,o]
changed_item_count[b] = count+1
reservoir_size = target_number_of_object - changed_item_count # [B]
assert np.all(reservoir_size >= 0)
reservoir_count = np.zeros(B,dtype=np.int8) # [B]
# copy unchanged objects randomly, with reservoir sampling
import random
same_idxs = np.where(same)
for b, o in zip(*same_idxs):
size = reservoir_size[b]
count = reservoir_count[b]
offset = changed_item_count[b]
if count < size:
# fill the reservoir array.
results[b,:,offset+count] = transitions[b,:,o]
else:
j = random.randint(0, count)
if j < size:
results[b,:,offset+j] = transitions[b,:,o]
reservoir_count[b] = count+1
return results
def location_augmentation(transitions,height=100,width=100,augmentation=1):
results = [_location_augmentation(transitions,height,width)
for i in range(int(augmentation))]
# reorder to avoid data leakage: first 90% remains in the first 90% after the augmentation.
# [aug_iter, B, 2, O', F] -> [B, aug_iter, 2, O', F] -> [B*aug_iter, 2, O', F]
return np.swapaxes(np.stack(results, axis=0),0,1).reshape((-1,*transitions.shape[1:]))
def _location_augmentation(transitions,height,width):
B,_,O,F = transitions.shape
x_noise = np.random.uniform(low=0.0,high=width,size=(B,1,1)).astype(transitions.dtype)
y_noise = np.random.uniform(low=0.0,high=height,size=(B,1,1)).astype(transitions.dtype)
transitions = transitions.copy()
transitions[...,-4] += x_noise
transitions[...,-3] += y_noise
transitions[...,-2] += x_noise
transitions[...,-1] += y_noise
return transitions
|
[
"guicho2.71828@gmail.com"
] |
guicho2.71828@gmail.com
|
88c81e959b5d2d8b8108c70ef1ae00c46a5f20a4
|
7ff333dd18ebea4159160b07c2e281461e021e25
|
/parsers/linux_software_parser.py
|
bab26705bf0a9b28917c545ee615ac75f91acce5
|
[
"Apache-2.0",
"DOC"
] |
permissive
|
defaultnamehere/grr
|
d768240ea8ffc9d557f5fe2e272937b83398b6e3
|
ba1648b97a76f844ffb8e1891cc9e2680f9b1c6e
|
refs/heads/master
| 2021-01-21T19:09:18.863900
| 2014-12-07T01:49:53
| 2014-12-07T01:49:53
| 27,655,857
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,137
|
py
|
#!/usr/bin/env python
"""Simple parsers for Linux files."""
import re
from debian_bundle import deb822
from grr.lib import parsers
from grr.lib import rdfvalue
class DebianPackagesStatusParser(parsers.FileParser):
"""Parser for /var/lib/dpkg/status. Yields SoftwarePackage semantic values."""
output_types = ["SoftwarePackage"]
supported_artifacts = ["DebianPackagesStatus"]
installed_re = re.compile(r"^\w+ \w+ installed$")
def Parse(self, stat, file_object, knowledge_base):
"""Parse the status file."""
_, _ = stat, knowledge_base
try:
for pkg in deb822.Packages.iter_paragraphs(file_object):
if self.installed_re.match(pkg["Status"]):
soft = rdfvalue.SoftwarePackage(
name=pkg["Package"],
description=pkg["Description"],
version=pkg["Version"],
architecture=pkg["Architecture"],
publisher=pkg["Maintainer"],
install_state="INSTALLED")
yield soft
except SystemError:
yield rdfvalue.Anomaly(type="PARSER_ANOMALY",
symptom="Invalid dpkg status file")
|
[
"amoser@google.com"
] |
amoser@google.com
|
1f83f61dfa9497557e6936eb982a70d2efc4f3d7
|
9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97
|
/sdBs/AllRun/galex_j05369+3955/sdB_galex_j05369+3955_coadd.py
|
491e3294e315021e4dbeb2b572fa220d4d65db96
|
[] |
no_license
|
tboudreaux/SummerSTScICode
|
73b2e5839b10c0bf733808f4316d34be91c5a3bd
|
4dd1ffbb09e0a599257d21872f9d62b5420028b0
|
refs/heads/master
| 2021-01-20T18:07:44.723496
| 2016-08-08T16:49:53
| 2016-08-08T16:49:53
| 65,221,159
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 453
|
py
|
from gPhoton.gMap import gMap
def main():
gMap(band="NUV", skypos=[84.234625,39.921497], skyrange=[0.0333333333333,0.0333333333333], stepsz = 30., cntfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdBs/sdB_galex_j05369+3955/sdB_galex_j05369+3955_movie_count.fits", cntcoaddfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdB/sdB_galex_j05369+3955/sdB_galex_j05369+3955_count_coadd.fits", overwrite=True, verbose=3)
if __name__ == "__main__":
main()
|
[
"thomas@boudreauxmail.com"
] |
thomas@boudreauxmail.com
|
e5ea45b50f4220f2ee941a580567bfd3619b7a03
|
55c1218bdd3f87554b3e462ab3609d34442a427e
|
/ch06/codeListing06-8.py
|
a85ed3c9ddbee9cc137d4e073bdec128d50112b4
|
[] |
no_license
|
oilmcut2019/Teaching_material_python
|
b0b0706ea14c9ef70ddabb3ec705e4be7f7783aa
|
28fd3c344c49d004e20322e8d33b1f0bfec38e0c
|
refs/heads/master
| 2020-05-18T16:43:40.805479
| 2019-05-02T06:47:36
| 2019-05-02T06:47:36
| 184,533,910
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 248
|
py
|
def get_vowels_in_word(word):
"""Return vowels in string word--include repeats."""
vowel_str = "aeiou"
vowels_in_word = ""
for char in word:
if char in vowel_str:
vowels_in_word += char
return vowels_in_word
|
[
"m07158031@o365.mcut.edu.tw"
] |
m07158031@o365.mcut.edu.tw
|
823128508170e7b6f582e87466c71dc6f760b04d
|
cf7d96bdd34205ede987f0985dfc9e3ab415ee06
|
/visual_export/spreadsheet/base.py
|
c910b70539f3680f0a48fb6616e889e1eccc7a93
|
[] |
no_license
|
hendrasaputra0501/btxjalan
|
afc93467d54a6f20ef6ac46f7359e964ad5d42a0
|
d02bc085ad03efc982460d77f7af1eb5641db729
|
refs/heads/master
| 2020-12-30T11:02:05.416120
| 2017-07-31T01:34:08
| 2017-07-31T01:34:08
| 98,836,234
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 576
|
py
|
# -*- coding: utf-8 -*-
from cStringIO import StringIO
class SpreadSheetBase(object):
document = None
table = None
def __init__(self, title):
self.title = title
def tofile(self):
if self.document is None:
raise Exception('No document found')
fp = StringIO()
self.document.save(fp)
fp.seek(0)
data = fp.read()
fp.close()
return data
def AddRow(self, style=None):
raise Exception('Not implemented')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
[
"hendrasaputra0501@gmail.com"
] |
hendrasaputra0501@gmail.com
|
c1fbb8cbba54fa88ad00b3eaefacc4651f85a474
|
57397e9891525c53d7d02c91634e917e1de74772
|
/src/procesamiento/python/PSD2RS.py
|
1e572a6acb6b68555a9a2ee8cdd39cd2814cd5f1
|
[] |
no_license
|
gdiazh/pruebas_vibraciones
|
614964ad4fb2bb7770f67f885986f3b65e2c0571
|
0baabc76a6cd9444e6cedac8da14b0a78169b34e
|
refs/heads/master
| 2020-05-18T01:56:47.861642
| 2019-04-30T18:48:06
| 2019-04-30T18:48:06
| 184,103,338
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,607
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 4 15:13:08 2018
General approach
PSD1: MIL-STD-1540C Acceptance Level
PSD2: NASA random vibration for mass less than 22.7 kg (Acceptable condition in CubeSat)
Conservator Damping ratio = 0.05
@author: Elias Obreque
"""
import matplotlib.pyplot as plt
import numpy as np
# PSD input: MIL-STD-1540C
#HZ = [20, 150, 600, 2000]
#GRMS2 = [0.0053, 0.04, 0.04, 0.0036]
# PSD input: NASA
HZ = [20, 50, 800, 2000]
GRMS2 = [0.013, 0.08, 0.08, 0.013]
#==========================================================================
# Natural frequency
FN = np.arange(20, 2010, 10)
xi = 0.05
#============================================================================
# Slope of the curve
m = []
for i in range(1, len(HZ)):
base = np.log10(HZ[i]) - np.log10(HZ[i - 1])
alt = np.log10(GRMS2[i]) - np.log10(GRMS2[i - 1])
m.append(alt/base)
# GRMS2(f)
def G_f(f):
Gn = 0
for i in range(1, len(HZ)):
if f >= HZ[i - 1] and f <= HZ[i]:
Const = GRMS2[i - 1]/HZ[i - 1]**m[i-1]
Gn = Const*f**m[i-1]
elif f > max(HZ):
Const = GRMS2[-1]/HZ[-1]**m[-1]
Gn = Const*f**m[-1]
return Gn
def AreaRA(fx, x):
areaRA = 0
dx = x[1] - x[0]
for i in range(len(x)):
areaRA = areaRA + fx[i]*dx
return areaRA**0.5
#============================================================================
areaPSD = 0
for i in range(1, len(HZ)):
base = HZ[i] - HZ[i -1]
alt = np.abs(GRMS2[i] - GRMS2[i - 1])
slope = (np.log(GRMS2[i]) - np.log(GRMS2[i - 1]))/(np.log(HZ[i]) - np.log(HZ[i - 1]))
offset = GRMS2[i -1]/HZ[i - 1]**slope
if slope != -1:
areaPSD = areaPSD + (offset/(slope + 1))*(HZ[i]**(slope+1) - HZ[i - 1]**(slope+1))
else:
areaPSD = areaPSD + offset*(np.log(HZ[i]) - np.log(HZ[i - 1]))
Grms = np.sqrt(areaPSD)
GPeak = np.sqrt(2)*Grms
print('\nValor Grms: ', Grms)
print('Valor Gpeak: ', GPeak,"\n")
#==========================================================================
Acc = []
F = np.linspace(min(HZ), 2000, 10000)
df = F[1] - F[0]
k = 0
for fn in FN:
Acc.append([])
for i in range(len(F)):
p = F[i]/fn
C = (1 + (2*xi*p)**2)/((1 - p**2)**2 + (2*xi*p)**2)
Acc[k].append(C*G_f(F[i]))
k = k + 1
AreaGRMS = []
for m in range(len(FN)):
AreaGRMS.append(AreaRA(Acc[m], F))
#print("Response Accel (GRMS) [",FN[m], "Hz] =",AreaGRMS[m])
#==========================================================================
maxAccGRMS = max(AreaGRMS)
k = list(AreaGRMS).index(maxAccGRMS)
maxFn = FN[k]
print('Worst-Case point is:', maxAccGRMS,"g at", maxFn,"Hz")
#==========================================================================
# PLOT
#%%
textlegeng = []
plt.figure(1)
plt.title('Responce Power Spectral Density Curves')
plt.ylabel('PSD [$G^2 / Hz$]')
plt.xlabel('Frequency [$Hz$]')
plt.yscale('log')
plt.xscale('log')
for i in np.arange(0, int(100/20), 3):
plt.plot(F, Acc[i], '--')
textlegeng.append(str(FN[i]) +" Hz")
for i in np.arange(int(100/20) + 3, len(Acc), 50):
plt.plot(F, Acc[i], '--')
textlegeng.append(str(FN[i]) +" Hz")
plt.plot(HZ, GRMS2, 'k')
textlegeng.append("PSD")
plt.legend(textlegeng)
plt.ylim(0.001, 15)
plt.xlim(10, 10000)
plt.grid(which='both', axis='both')
plt.show()
plt.figure(2)
plt.title('Vibration Response Spectrum')
plt.ylabel('Accel [$G_{RMS}$]')
plt.xlabel('Frequency [$Hz$]')
plt.yscale('log')
plt.xscale('log')
plt.plot(FN, AreaGRMS)
plt.grid(which='both', axis='both')
plt.show()
|
[
"g.hernan.diaz@gmail.com"
] |
g.hernan.diaz@gmail.com
|
2e28a19cee408ed5614c805add202c7ba74dc8d9
|
e23a4f57ce5474d468258e5e63b9e23fb6011188
|
/125_algorithms/_exercises/templates/_algorithms_challenges/leetcode/LeetCode_with_solution/070_Climbing_Stairs.py
|
05f4b4ef7410b23e15f28f34f471ea6cc27c358c
|
[] |
no_license
|
syurskyi/Python_Topics
|
52851ecce000cb751a3b986408efe32f0b4c0835
|
be331826b490b73f0a176e6abed86ef68ff2dd2b
|
refs/heads/master
| 2023-06-08T19:29:16.214395
| 2023-05-29T17:09:11
| 2023-05-29T17:09:11
| 220,583,118
| 3
| 2
| null | 2023-02-16T03:08:10
| 2019-11-09T02:58:47
|
Python
|
UTF-8
|
Python
| false
| false
| 890
|
py
|
c_ Solution o..
# def climbStairs(self, n):
# """
# :type n: int
# :rtype: int
# """
# dp = [0] * (n + 1)
# dp[0] = 1
# dp[1] = 1
# for i in range(2, n + 1):
# dp[i] = dp[i - 2] + dp[i- 1]
# return dp[n]
___ climbStairs n
__ n <= 1:
r_ 1
dp = [1] * 2
___ i __ r.. 2, n + 1
dp[1], dp[0] = dp[1] + dp[0], dp[1]
r_ dp[1]
# C = {1: 1, 2: 2}
# def climbStairs(self, n):
# """
# :type n: int
# :rtype: int
# """
# if n in Solution.C:
# return Solution.C[n]
# else:
# result = Solution.C.get(n - 1, self.climbStairs(n - 1)) + \
# Solution.C.get(n - 2, self.climbStairs(n - 2))
# Solution.C[n] = result
# return result
|
[
"sergejyurskyj@yahoo.com"
] |
sergejyurskyj@yahoo.com
|
2a9667d79338cfcbe2d9bf465b8957a4e4ab8d3a
|
1cf7c11711303fc21c37fc091b2eefc30bc489c6
|
/moderate/number_pairs.py
|
88dee243e7fcc46404c1bcfac8a89aefbebe0783
|
[] |
no_license
|
yamaton/codeeval
|
1c68b23459b6329c42e046f07bd19b4cecafb95f
|
eacd28106f76364d44fae9f6a4c2860711ea0dcc
|
refs/heads/master
| 2020-04-15T05:53:11.929711
| 2013-09-06T18:48:29
| 2013-09-06T18:48:29
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,737
|
py
|
#!/usr/bin/env python
# encoding: utf-8
"""
number_pairs.py
Created by Yamato Matsuoka on 2012-07-17.
Description:
You are given a sorted array of positive integers and a number 'X'. Print out all pairs of numbers whose sum is equal to X. Print out only unique pairs and the pairs should be in ascending order
Input sample:
Your program should accept as its first argument a filename. This file will contain a comma separated list of sorted numbers and then the sum 'X', separated by semicolon. Ignore all empty lines. If no pair exists, print the string NULL eg.
1,2,3,4,6;5
2,4,5,6,9,11,15;20
1,2,3,4;50
Output sample:
Print out the pairs of numbers that equal to the sum X. The pairs should themselves be printed in sorted order i.e the first number of each pair should be in ascending order .e.g.
1,4;2,3
5,15;9,11
NULL
"""
import sys
def read_data(line):
x = line.rstrip().split(";")
seq = [int(i) for i in x[0].split(",")]
N = int(x[-1])
return (seq, N)
def find_pairs(seq, X):
"""Find number pairs from a sorted list, seq,
such that sum of each pair is X."""
bag1 = [i for i in seq if i <= X/2]
bag2 = [i for i in seq if i > X/2]
out = []
for i in bag1:
j = X - i
if j in bag2:
out.append((i,j))
return out
def format(lis):
"""Construct formatted string from a list of pairs"""
if lis:
return ";".join(",".join(str(i) for i in n) for n in lis)
else:
return "NULL"
if __name__ == '__main__':
with open(sys.argv[1], "r") as f:
data = [read_data(line) for line in f]
out = (find_pairs(seq, X) for (seq, X) in data)
formatted = "\n".join(format(x) for x in out)
print formatted
|
[
"yamaton@gmail.com"
] |
yamaton@gmail.com
|
44aed721687e6b4f500f365adb3852fb95f20ddd
|
5c465756e28ae021e7afac1dddbc6e11ec8c00ec
|
/setup.py
|
d405b626d8f809bb3cd8e9d499465ec7b331ae22
|
[
"Apache-2.0"
] |
permissive
|
waynew/pop
|
f9ffca2b98993ba1c6ddc95fdc66599c1eedfeff
|
d3b6128ada34ee31b16b9c6e1c600b3e059f4e31
|
refs/heads/master
| 2020-05-25T15:35:29.859674
| 2019-05-21T16:10:27
| 2019-05-21T16:10:27
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,827
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Import python libs
import os
import sys
import shutil
from setuptools import setup, Command
NAME = 'pop'
DESC = ('The Plugin Oriented Programming System')
# Version info -- read without importing
_locals = {}
with open('pop/version.py') as fp:
exec(fp.read(), None, _locals)
VERSION = _locals['version']
SETUP_DIRNAME = os.path.dirname(__file__)
if not SETUP_DIRNAME:
SETUP_DIRNAME = os.getcwd()
class Clean(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
for subdir in ('pop', 'tests'):
for root, dirs, files in os.walk(os.path.join(os.path.dirname(__file__), subdir)):
for dir_ in dirs:
if dir_ == '__pycache__':
shutil.rmtree(os.path.join(root, dir_))
def discover_packages():
modules = []
for package in ('pop', ):
for root, _, files in os.walk(os.path.join(SETUP_DIRNAME, package)):
pdir = os.path.relpath(root, SETUP_DIRNAME)
modname = pdir.replace(os.sep, '.')
modules.append(modname)
return modules
setup(name=NAME,
author='Thomas S Hatch',
author_email='thatch@saltstack.com',
url='https://saltstack.com',
version=VERSION,
description=DESC,
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Development Status :: 5 - Production/Stable',
],
scripts=['scripts/pop_seed'],
packages=discover_packages(),
cmdclass={'clean': Clean},
)
|
[
"thatch45@gmail.com"
] |
thatch45@gmail.com
|
9964c1a1e4d067a3b5d76587f7a21ab1eeb8d4e8
|
e0045eec29aab56212c00f9293a21eb3b4b9fe53
|
/calendar_sms/models/calendar.py
|
26a857dd032ad94c770a8f3a0e11e031594e3c22
|
[] |
no_license
|
tamam001/ALWAFI_P1
|
a3a9268081b9befc668a5f51c29ce5119434cc21
|
402ea8687c607fbcb5ba762c2020ebc4ee98e705
|
refs/heads/master
| 2020-05-18T08:16:50.583264
| 2019-04-30T14:43:46
| 2019-04-30T14:43:46
| 184,268,686
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,248
|
py
|
# -*- coding: utf-8 -*-
# Part of ALWAFI. See LICENSE file for full copyright and licensing details.
import logging
from odoo import api, fields, models, _
_logger = logging.getLogger(__name__)
class CalendarEvent(models.Model):
_inherit = 'calendar.event'
def _get_default_sms_recipients(self):
""" Method overriden from mail.thread (defined in the sms module).
SMS text messages will be sent to attendees that haven't declined the event(s).
"""
return self.mapped('attendee_ids').filtered(lambda att: att.state != 'declined').mapped('partner_id')
def _do_sms_reminder(self):
""" Send an SMS text reminder to attendees that haven't declined the event """
for event in self:
sms_msg = _("Event reminder: %s on %s.") % (event.name, event.start_datetime or event.start_date)
note_msg = _('SMS text message reminder sent !')
event.message_post_send_sms(sms_msg, note_msg=note_msg)
class CalendarAlarm(models.Model):
_inherit = 'calendar.alarm'
type = fields.Selection(selection_add=[('sms', 'SMS Text Message')])
class AlarmManager(models.AbstractModel):
_inherit = 'calendar.alarm_manager'
@api.model
def get_next_mail(self):
""" Cron method, overriden here to send SMS reminders as well
"""
result = super(AlarmManager, self).get_next_mail()
now = fields.Datetime.to_string(fields.Datetime.now())
last_sms_cron = self.env['ir.config_parameter'].get_param('calendar_sms.last_sms_cron', default=now)
cron = self.env['ir.model.data'].get_object('calendar', 'ir_cron_scheduler_alarm')
interval_to_second = {
"weeks": 7 * 24 * 60 * 60,
"days": 24 * 60 * 60,
"hours": 60 * 60,
"minutes": 60,
"seconds": 1
}
cron_interval = cron.interval_number * interval_to_second[cron.interval_type]
events_data = self.get_next_potential_limit_alarm('sms', seconds=cron_interval)
for event in self.env['calendar.event'].browse(events_data):
max_delta = events_data[event.id]['max_duration']
if event.recurrency:
found = False
for event_start in event._get_recurrent_date_by_event():
event_start = event_start.replace(tzinfo=None)
last_found = self.do_check_alarm_for_one_date(event_start, event, max_delta, 0, 'sms', after=last_sms_cron, missing=True)
for alert in last_found:
event.browse(alert['event_id'])._do_sms_reminder()
found = True
if found and not last_found: # if the precedent event had an alarm but not this one, we can stop the search for this event
break
else:
event_start = fields.Datetime.from_string(event.start)
for alert in self.do_check_alarm_for_one_date(event_start, event, max_delta, 0, 'sms', after=last_sms_cron, missing=True):
event.browse(alert['event_id'])._do_sms_reminder()
self.env['ir.config_parameter'].set_param('calendar_sms.last_sms_cron', now)
return result
|
[
"50145400+gilbertp7@users.noreply.github.com"
] |
50145400+gilbertp7@users.noreply.github.com
|
7ac9b3d42f3f46d4e3db59414eb2c88a9ebeaff6
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03137/s045740522.py
|
2b93ba5faee17d63aa69bda2f022a1c7348ac45b
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 325
|
py
|
import math
import sys
import collections
import bisect
def main():
n, m = map(int, input().split())
x = sorted(list(map(int, input().split())))
if m <= n:
print(0)
return
y = sorted([x[i + 1] - x[i] for i in range(m - 1)])
print(sum(y[0:(m-n)]))
if __name__ == '__main__':
main()
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
95eee9b7c9240054cd6ba3174b1915da8273f6bc
|
9242319ca7796c6a3b18e760ddbf8290944d4b49
|
/flock/frontend/amp.py
|
933a74893288d7941edc8e3bc315d4091639d9e7
|
[
"MIT"
] |
permissive
|
MainRo/python-flock
|
79cfd7ce4edab40439c556b6621768438868d16c
|
e1faa78d6aba374493336651848daadad82387a8
|
refs/heads/master
| 2021-01-10T19:16:52.907538
| 2015-11-18T21:15:38
| 2015-11-18T21:15:38
| 29,210,634
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,209
|
py
|
import logging
import json
from twisted.internet import reactor
from twisted.internet.protocol import Factory
from twisted.protocols import amp
from twisted.internet.endpoints import TCP4ServerEndpoint
from flock.router import Router
from flock.message import FlockMessage
# client API
class MessageReceived(amp.Command):
arguments = [('message', amp.String())]
response = [('status', amp.Boolean())]
class SetState(amp.Command):
arguments = [('message', amp.String())]
response = [('status', amp.Boolean())]
class FlockServer(amp.AMP):
def connectionMade(self):
router = Router.instantiate()
router.attach_frontend(self)
logging.debug("connected")
def connectionLost(self, reason):
router = Router.instantiate()
router.detach_frontend(self)
logging.debug("disconnected")
@SetState.responder
def SetState(self, message):
logging.debug("set_state" + message)
message = json.loads(message)
action = FlockMessage()
action.uid = message['id']
action.attributes[FlockMessage.MSG_ATTRIBUTE_SWITCH_BISTATE] = message['state']
action.type = FlockMessage.Type.set
action.namespace = 'controller'
router = Router.instantiate()
router.call(action)
return {'status': True}
def event(self, message):
"""
Sends the received message to the endpoint serialized as javascript.
@todo flatten message as AMP fields.
"""
legacy_message = {}
legacy_message['protocol'] = 'flock'
legacy_message['device_id'] = message.uid
legacy_message['private_data'] = ''
legacy_message['attributes'] = message.attributes
json_message = json.dumps(legacy_message, default=lambda o: o.__dict__, sort_keys=True, indent=4)
self.callRemote(MessageReceived, message=json_message)
return
class FlockServerFactory(Factory):
def buildProtocol(self, addr):
return FlockServer()
class Frontend(object):
def __init__(self, port, reactor):
endpoint = TCP4ServerEndpoint(reactor, port, interface='localhost')
endpoint.listen(FlockServerFactory())
|
[
"romain.picard@oakbits.com"
] |
romain.picard@oakbits.com
|
6880c878362b0b48c05526b91b9d4518b7206f2b
|
2ee29ea10cc2ad5577a2f8e7ed0fa1351d451a52
|
/django/bin/tox
|
19cb4c36ac6406b19087eb92ec3ba72ce35c0bf1
|
[] |
no_license
|
guille1194/votbit2
|
6d1c792f4f43cdea25e31a8fbb8e9f1e20d9670b
|
63497d17a249c082730f39cc54caf0e2c1d4dc3c
|
refs/heads/master
| 2021-04-22T13:26:22.635235
| 2016-12-03T21:16:26
| 2016-12-03T21:16:26
| 75,500,637
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 248
|
#!/home/guillermo/Documentos/votbit2/django/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from tox import cmdline
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(cmdline())
|
[
"guille1194@gmail.com"
] |
guille1194@gmail.com
|
|
e7607e9704c88c53d9835277d1ea1ef9a9502af4
|
55c250525bd7198ac905b1f2f86d16a44f73e03a
|
/Python/Projects/pyinstaller/build/lib/PyInstaller/hooks/hook-reportlab.pdfbase._fontdata.py
|
4765de6acb08fe810b8f6653f67d9aa9e9d91ae1
|
[
"LicenseRef-scancode-other-permissive"
] |
permissive
|
NateWeiler/Resources
|
213d18ba86f7cc9d845741b8571b9e2c2c6be916
|
bd4a8a82a3e83a381c97d19e5df42cbababfc66c
|
refs/heads/master
| 2023-09-03T17:50:31.937137
| 2023-08-28T23:50:57
| 2023-08-28T23:50:57
| 267,368,545
| 2
| 1
| null | 2022-09-08T15:20:18
| 2020-05-27T16:18:17
| null |
UTF-8
|
Python
| false
| false
| 128
|
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:70c3529a579452cddbe6fe3200d83c2064c7fa3a858851cb516a77177a79c258
size 538
|
[
"nateweiler84@gmail.com"
] |
nateweiler84@gmail.com
|
d257dfaf805251a449fcec03c06aa55d97d4de0a
|
107c161846246ead49747b8257f0bffe57ff3866
|
/megabeast/old/test_models.py
|
2734639aa5e69b50f99f8ba217e27761079d30f9
|
[
"BSD-3-Clause"
] |
permissive
|
BEAST-Fitting/megabeast
|
e1334786d23d33795bb5f613586fe1615406e102
|
b6b9efbe197bde0a5372bc9f09699ad9d3d99886
|
refs/heads/master
| 2023-06-07T18:14:52.276860
| 2022-02-17T21:25:07
| 2022-02-17T21:25:07
| 111,114,312
| 2
| 11
| null | 2023-05-27T08:09:38
| 2017-11-17T14:44:50
|
Python
|
UTF-8
|
Python
| false
| false
| 988
|
py
|
import pytest
from megabeast.mbsettings import mbsettings
from megabeast.singlepop_dust_model import MB_Model
fd_model = {
"Av": {
"name": "gaussian",
"varnames": ["mean", "sigma"],
"varinit": [1.0, 0.25],
"prior": {
"name": "flat",
"var_minmax": [[0.005, 5.0], [0.05, 1.0]],
},
},
"Rv": {
"name": "gaussian",
"varnames": ["mean", "sigma"],
"varinit": [3.1, 0.25],
"prior": {
"name": "flat",
"var_minmax": [[2.0, 6.0], [0.05, 1.0]],
},
}
}
models = [fd_model, fd_model]
@pytest.mark.parametrize("model", models)
def test_lnprior(model):
"""
Test that the lnprior handles the defined prior types
"""
priortypes = ["fixed", "flat"]
# setup params
params = mbsettings()
params.fd_model = model
mod = MB_Model(params)
for cprior in priortypes:
assert mod.lnprior(mod.start_params()) == 0.0, "test"
|
[
"kgordon@stsci.edu"
] |
kgordon@stsci.edu
|
c1dfc3fc35f5262d91497aa645cd83d66e1f6ebf
|
f4b60f5e49baf60976987946c20a8ebca4880602
|
/lib/python2.7/site-packages/acimodel-1.3_2j-py2.7.egg/cobra/modelimpl/comm/webconnstatesaghist15min.py
|
97af9701edebea0d009738a84053273d0f815f22
|
[] |
no_license
|
cqbomb/qytang_aci
|
12e508d54d9f774b537c33563762e694783d6ba8
|
a7fab9d6cda7fadcc995672e55c0ef7e7187696e
|
refs/heads/master
| 2022-12-21T13:30:05.240231
| 2018-12-04T01:46:53
| 2018-12-04T01:46:53
| 159,911,666
| 0
| 0
| null | 2022-12-07T23:53:02
| 2018-12-01T05:17:50
|
Python
|
UTF-8
|
Python
| false
| false
| 5,306
|
py
|
# coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2016 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class WebConnStatesAgHist15min(Mo):
"""
A class that represents historical aggregated statistics for web connections state in a 15 minute sampling interval. This class updates every 5 minutes.
"""
meta = StatsClassMeta("cobra.model.comm.WebConnStatesAgHist15min", "web connections state")
counter = CounterMeta("wait", CounterCategory.GAUGE, "connections", "current waiting connections")
meta._counters.append(counter)
counter = CounterMeta("write", CounterCategory.GAUGE, "connections", "current writing connections")
meta._counters.append(counter)
counter = CounterMeta("read", CounterCategory.GAUGE, "connections", "current reading connections")
meta._counters.append(counter)
meta.moClassName = "commWebConnStatesAgHist15min"
meta.rnFormat = "HDcommWebConnStatesAg15min-%(index)s"
meta.category = MoCategory.STATS_HISTORY
meta.label = "historical aggregated web connections state stats in 15 minute"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = True
meta.parentClasses.add("cobra.model.comm.Https")
meta.parentClasses.add("cobra.model.comm.Http")
meta.superClasses.add("cobra.model.stats.Item")
meta.superClasses.add("cobra.model.stats.Hist")
meta.superClasses.add("cobra.model.comm.WebConnStatesAgHist")
meta.rnPrefixes = [
('HDcommWebConnStatesAg15min-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "cnt", "cnt", 16212, PropCategory.REGULAR)
prop.label = "Number of Collections During this Interval"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("cnt", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "index", "index", 19390, PropCategory.REGULAR)
prop.label = "History Index"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
meta.props.add("index", prop)
prop = PropMeta("str", "lastCollOffset", "lastCollOffset", 111, PropCategory.REGULAR)
prop.label = "Collection Length"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("lastCollOffset", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "repIntvEnd", "repIntvEnd", 110, PropCategory.REGULAR)
prop.label = "Reporting End Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvEnd", prop)
prop = PropMeta("str", "repIntvStart", "repIntvStart", 109, PropCategory.REGULAR)
prop.label = "Reporting Start Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvStart", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
meta.namingProps.append(getattr(meta.props, "index"))
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Ancestor"
def __init__(self, parentMoOrDn, index, markDirty=True, **creationProps):
namingVals = [index]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
|
[
"collinsctk@qytang.com"
] |
collinsctk@qytang.com
|
628189ff34d80255a49ce3bab9404cf4d4218ed9
|
34bb97862512778eae0352a543c60de926b17878
|
/test/probe/test_empty_device_handoff.py
|
5c1880c1ec9874f5fb20f9d29d4b803c21f94060
|
[
"Apache-2.0"
] |
permissive
|
yasutaka-kamei/swift-token
|
2de8baec21ec6d97cf8f7af2ee5377badf1392c0
|
820366c0e35bd5244f23be19fc0423a96cb4ee59
|
refs/heads/master
| 2021-01-01T16:34:04.284109
| 2016-05-13T07:12:35
| 2016-05-16T00:02:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 15,922
|
py
|
begin_unit
comment|'#!/usr/bin/python -u'
nl|'\n'
comment|'# Copyright (c) 2010-2012 OpenStack Foundation'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License");'
nl|'\n'
comment|'# you may not use this file except in compliance with the License.'
nl|'\n'
comment|'# You may obtain a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS,'
nl|'\n'
comment|'# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or'
nl|'\n'
comment|'# implied.'
nl|'\n'
comment|'# See the License for the specific language governing permissions and'
nl|'\n'
comment|'# limitations under the License.'
nl|'\n'
nl|'\n'
name|'import'
name|'os'
newline|'\n'
name|'import'
name|'shutil'
newline|'\n'
name|'import'
name|'time'
newline|'\n'
nl|'\n'
name|'from'
name|'unittest'
name|'import'
name|'main'
newline|'\n'
name|'from'
name|'uuid'
name|'import'
name|'uuid4'
newline|'\n'
nl|'\n'
name|'from'
name|'swiftclient'
name|'import'
name|'client'
newline|'\n'
nl|'\n'
name|'from'
name|'swift'
op|'.'
name|'common'
name|'import'
name|'direct_client'
newline|'\n'
name|'from'
name|'swift'
op|'.'
name|'obj'
op|'.'
name|'diskfile'
name|'import'
name|'get_data_dir'
newline|'\n'
name|'from'
name|'swift'
op|'.'
name|'common'
op|'.'
name|'exceptions'
name|'import'
name|'ClientException'
newline|'\n'
name|'from'
name|'test'
op|'.'
name|'probe'
op|'.'
name|'common'
name|'import'
op|'('
nl|'\n'
name|'kill_server'
op|','
name|'ReplProbeTest'
op|','
name|'start_server'
op|','
name|'get_server_number'
op|')'
newline|'\n'
name|'from'
name|'swift'
op|'.'
name|'common'
op|'.'
name|'utils'
name|'import'
name|'readconf'
newline|'\n'
name|'from'
name|'swift'
op|'.'
name|'common'
op|'.'
name|'manager'
name|'import'
name|'Manager'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|TestEmptyDevice
name|'class'
name|'TestEmptyDevice'
op|'('
name|'ReplProbeTest'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|member|_get_objects_dir
indent|' '
name|'def'
name|'_get_objects_dir'
op|'('
name|'self'
op|','
name|'onode'
op|')'
op|':'
newline|'\n'
indent|' '
name|'device'
op|'='
name|'onode'
op|'['
string|"'device'"
op|']'
newline|'\n'
name|'_'
op|','
name|'node_id'
op|'='
name|'get_server_number'
op|'('
op|'('
name|'onode'
op|'['
string|"'ip'"
op|']'
op|','
name|'onode'
op|'['
string|"'port'"
op|']'
op|')'
op|','
nl|'\n'
name|'self'
op|'.'
name|'ipport2server'
op|')'
newline|'\n'
name|'obj_server_conf'
op|'='
name|'readconf'
op|'('
name|'self'
op|'.'
name|'configs'
op|'['
string|"'object-server'"
op|']'
op|'['
name|'node_id'
op|']'
op|')'
newline|'\n'
name|'devices'
op|'='
name|'obj_server_conf'
op|'['
string|"'app:object-server'"
op|']'
op|'['
string|"'devices'"
op|']'
newline|'\n'
name|'obj_dir'
op|'='
string|"'%s/%s'"
op|'%'
op|'('
name|'devices'
op|','
name|'device'
op|')'
newline|'\n'
name|'return'
name|'obj_dir'
newline|'\n'
nl|'\n'
DECL|member|test_main
dedent|''
name|'def'
name|'test_main'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Create container'
nl|'\n'
indent|' '
name|'container'
op|'='
string|"'container-%s'"
op|'%'
name|'uuid4'
op|'('
op|')'
newline|'\n'
name|'client'
op|'.'
name|'put_container'
op|'('
name|'self'
op|'.'
name|'url'
op|','
name|'self'
op|'.'
name|'token'
op|','
name|'container'
op|','
nl|'\n'
name|'headers'
op|'='
op|'{'
string|"'X-Storage-Policy'"
op|':'
nl|'\n'
name|'self'
op|'.'
name|'policy'
op|'.'
name|'name'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'cpart'
op|','
name|'cnodes'
op|'='
name|'self'
op|'.'
name|'container_ring'
op|'.'
name|'get_nodes'
op|'('
name|'self'
op|'.'
name|'account'
op|','
name|'container'
op|')'
newline|'\n'
name|'cnode'
op|'='
name|'cnodes'
op|'['
number|'0'
op|']'
newline|'\n'
name|'obj'
op|'='
string|"'object-%s'"
op|'%'
name|'uuid4'
op|'('
op|')'
newline|'\n'
name|'opart'
op|','
name|'onodes'
op|'='
name|'self'
op|'.'
name|'object_ring'
op|'.'
name|'get_nodes'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'account'
op|','
name|'container'
op|','
name|'obj'
op|')'
newline|'\n'
name|'onode'
op|'='
name|'onodes'
op|'['
number|'0'
op|']'
newline|'\n'
nl|'\n'
comment|'# Kill one container/obj primary server'
nl|'\n'
name|'kill_server'
op|'('
op|'('
name|'onode'
op|'['
string|"'ip'"
op|']'
op|','
name|'onode'
op|'['
string|"'port'"
op|']'
op|')'
op|','
nl|'\n'
name|'self'
op|'.'
name|'ipport2server'
op|','
name|'self'
op|'.'
name|'pids'
op|')'
newline|'\n'
nl|'\n'
comment|'# Delete the default data directory for objects on the primary server'
nl|'\n'
name|'obj_dir'
op|'='
string|"'%s/%s'"
op|'%'
op|'('
name|'self'
op|'.'
name|'_get_objects_dir'
op|'('
name|'onode'
op|')'
op|','
nl|'\n'
name|'get_data_dir'
op|'('
name|'self'
op|'.'
name|'policy'
op|')'
op|')'
newline|'\n'
name|'shutil'
op|'.'
name|'rmtree'
op|'('
name|'obj_dir'
op|','
name|'True'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'obj_dir'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# Create container/obj (goes to two primary servers and one handoff)'
nl|'\n'
name|'client'
op|'.'
name|'put_object'
op|'('
name|'self'
op|'.'
name|'url'
op|','
name|'self'
op|'.'
name|'token'
op|','
name|'container'
op|','
name|'obj'
op|','
string|"'VERIFY'"
op|')'
newline|'\n'
name|'odata'
op|'='
name|'client'
op|'.'
name|'get_object'
op|'('
name|'self'
op|'.'
name|'url'
op|','
name|'self'
op|'.'
name|'token'
op|','
name|'container'
op|','
name|'obj'
op|')'
op|'['
op|'-'
number|'1'
op|']'
newline|'\n'
name|'if'
name|'odata'
op|'!='
string|"'VERIFY'"
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'Exception'
op|'('
string|"'Object GET did not return VERIFY, instead it '"
nl|'\n'
string|"'returned: %s'"
op|'%'
name|'repr'
op|'('
name|'odata'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# Kill other two container/obj primary servers'
nl|'\n'
comment|'# to ensure GET handoff works'
nl|'\n'
dedent|''
name|'for'
name|'node'
name|'in'
name|'onodes'
op|'['
number|'1'
op|':'
op|']'
op|':'
newline|'\n'
indent|' '
name|'kill_server'
op|'('
op|'('
name|'node'
op|'['
string|"'ip'"
op|']'
op|','
name|'node'
op|'['
string|"'port'"
op|']'
op|')'
op|','
nl|'\n'
name|'self'
op|'.'
name|'ipport2server'
op|','
name|'self'
op|'.'
name|'pids'
op|')'
newline|'\n'
nl|'\n'
comment|'# Indirectly through proxy assert we can get container/obj'
nl|'\n'
dedent|''
name|'odata'
op|'='
name|'client'
op|'.'
name|'get_object'
op|'('
name|'self'
op|'.'
name|'url'
op|','
name|'self'
op|'.'
name|'token'
op|','
name|'container'
op|','
name|'obj'
op|')'
op|'['
op|'-'
number|'1'
op|']'
newline|'\n'
name|'if'
name|'odata'
op|'!='
string|"'VERIFY'"
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'Exception'
op|'('
string|"'Object GET did not return VERIFY, instead it '"
nl|'\n'
string|"'returned: %s'"
op|'%'
name|'repr'
op|'('
name|'odata'
op|')'
op|')'
newline|'\n'
comment|'# Restart those other two container/obj primary servers'
nl|'\n'
dedent|''
name|'for'
name|'node'
name|'in'
name|'onodes'
op|'['
number|'1'
op|':'
op|']'
op|':'
newline|'\n'
indent|' '
name|'start_server'
op|'('
op|'('
name|'node'
op|'['
string|"'ip'"
op|']'
op|','
name|'node'
op|'['
string|"'port'"
op|']'
op|')'
op|','
nl|'\n'
name|'self'
op|'.'
name|'ipport2server'
op|','
name|'self'
op|'.'
name|'pids'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'obj_dir'
op|')'
op|')'
newline|'\n'
comment|"# We've indirectly verified the handoff node has the object, but"
nl|'\n'
comment|"# let's directly verify it."
nl|'\n'
nl|'\n'
comment|'# Directly to handoff server assert we can get container/obj'
nl|'\n'
dedent|''
name|'another_onode'
op|'='
name|'next'
op|'('
name|'self'
op|'.'
name|'object_ring'
op|'.'
name|'get_more_nodes'
op|'('
name|'opart'
op|')'
op|')'
newline|'\n'
name|'odata'
op|'='
name|'direct_client'
op|'.'
name|'direct_get_object'
op|'('
nl|'\n'
name|'another_onode'
op|','
name|'opart'
op|','
name|'self'
op|'.'
name|'account'
op|','
name|'container'
op|','
name|'obj'
op|','
nl|'\n'
name|'headers'
op|'='
op|'{'
string|"'X-Backend-Storage-Policy-Index'"
op|':'
name|'self'
op|'.'
name|'policy'
op|'.'
name|'idx'
op|'}'
op|')'
op|'['
op|'-'
number|'1'
op|']'
newline|'\n'
name|'if'
name|'odata'
op|'!='
string|"'VERIFY'"
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'Exception'
op|'('
string|"'Direct object GET did not return VERIFY, instead '"
nl|'\n'
string|"'it returned: %s'"
op|'%'
name|'repr'
op|'('
name|'odata'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# Assert container listing (via proxy and directly) has container/obj'
nl|'\n'
dedent|''
name|'objs'
op|'='
op|'['
name|'o'
op|'['
string|"'name'"
op|']'
name|'for'
name|'o'
name|'in'
nl|'\n'
name|'client'
op|'.'
name|'get_container'
op|'('
name|'self'
op|'.'
name|'url'
op|','
name|'self'
op|'.'
name|'token'
op|','
name|'container'
op|')'
op|'['
number|'1'
op|']'
op|']'
newline|'\n'
name|'if'
name|'obj'
name|'not'
name|'in'
name|'objs'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'Exception'
op|'('
string|"'Container listing did not know about object'"
op|')'
newline|'\n'
dedent|''
name|'timeout'
op|'='
name|'time'
op|'.'
name|'time'
op|'('
op|')'
op|'+'
number|'5'
newline|'\n'
name|'found_objs_on_cnode'
op|'='
op|'['
op|']'
newline|'\n'
name|'while'
name|'time'
op|'.'
name|'time'
op|'('
op|')'
op|'<'
name|'timeout'
op|':'
newline|'\n'
indent|' '
name|'for'
name|'cnode'
name|'in'
op|'['
name|'c'
name|'for'
name|'c'
name|'in'
name|'cnodes'
name|'if'
name|'cnodes'
name|'not'
name|'in'
nl|'\n'
name|'found_objs_on_cnode'
op|']'
op|':'
newline|'\n'
indent|' '
name|'objs'
op|'='
op|'['
name|'o'
op|'['
string|"'name'"
op|']'
name|'for'
name|'o'
name|'in'
nl|'\n'
name|'direct_client'
op|'.'
name|'direct_get_container'
op|'('
nl|'\n'
name|'cnode'
op|','
name|'cpart'
op|','
name|'self'
op|'.'
name|'account'
op|','
name|'container'
op|')'
op|'['
number|'1'
op|']'
op|']'
newline|'\n'
name|'if'
name|'obj'
name|'in'
name|'objs'
op|':'
newline|'\n'
indent|' '
name|'found_objs_on_cnode'
op|'.'
name|'append'
op|'('
name|'cnode'
op|')'
newline|'\n'
dedent|''
dedent|''
name|'if'
name|'len'
op|'('
name|'found_objs_on_cnode'
op|')'
op|'>='
name|'len'
op|'('
name|'cnodes'
op|')'
op|':'
newline|'\n'
indent|' '
name|'break'
newline|'\n'
dedent|''
name|'time'
op|'.'
name|'sleep'
op|'('
number|'0.3'
op|')'
newline|'\n'
dedent|''
name|'if'
name|'len'
op|'('
name|'found_objs_on_cnode'
op|')'
op|'<'
name|'len'
op|'('
name|'cnodes'
op|')'
op|':'
newline|'\n'
indent|' '
name|'missing'
op|'='
op|'['
string|"'%s:%s'"
op|'%'
op|'('
name|'cnode'
op|'['
string|"'ip'"
op|']'
op|','
name|'cnode'
op|'['
string|"'port'"
op|']'
op|')'
name|'for'
name|'cnode'
name|'in'
nl|'\n'
name|'cnodes'
name|'if'
name|'cnode'
name|'not'
name|'in'
name|'found_objs_on_cnode'
op|']'
newline|'\n'
name|'raise'
name|'Exception'
op|'('
string|"'Container servers %r did not know about object'"
op|'%'
nl|'\n'
name|'missing'
op|')'
newline|'\n'
nl|'\n'
comment|'# Bring the first container/obj primary server back up'
nl|'\n'
dedent|''
name|'start_server'
op|'('
op|'('
name|'onode'
op|'['
string|"'ip'"
op|']'
op|','
name|'onode'
op|'['
string|"'port'"
op|']'
op|')'
op|','
nl|'\n'
name|'self'
op|'.'
name|'ipport2server'
op|','
name|'self'
op|'.'
name|'pids'
op|')'
newline|'\n'
nl|'\n'
comment|"# Assert that it doesn't have container/obj yet"
nl|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'obj_dir'
op|')'
op|')'
newline|'\n'
name|'try'
op|':'
newline|'\n'
indent|' '
name|'direct_client'
op|'.'
name|'direct_get_object'
op|'('
nl|'\n'
name|'onode'
op|','
name|'opart'
op|','
name|'self'
op|'.'
name|'account'
op|','
name|'container'
op|','
name|'obj'
op|','
name|'headers'
op|'='
op|'{'
nl|'\n'
string|"'X-Backend-Storage-Policy-Index'"
op|':'
name|'self'
op|'.'
name|'policy'
op|'.'
name|'idx'
op|'}'
op|')'
newline|'\n'
dedent|''
name|'except'
name|'ClientException'
name|'as'
name|'err'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'err'
op|'.'
name|'http_status'
op|','
number|'404'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'exists'
op|'('
name|'obj_dir'
op|')'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'fail'
op|'('
string|'"Expected ClientException but didn\'t get it"'
op|')'
newline|'\n'
nl|'\n'
comment|'# Run object replication for first container/obj primary server'
nl|'\n'
dedent|''
name|'_'
op|','
name|'num'
op|'='
name|'get_server_number'
op|'('
nl|'\n'
op|'('
name|'onode'
op|'['
string|"'ip'"
op|']'
op|','
name|'onode'
op|'.'
name|'get'
op|'('
string|"'replication_port'"
op|','
name|'onode'
op|'['
string|"'port'"
op|']'
op|')'
op|')'
op|','
nl|'\n'
name|'self'
op|'.'
name|'ipport2server'
op|')'
newline|'\n'
name|'Manager'
op|'('
op|'['
string|"'object-replicator'"
op|']'
op|')'
op|'.'
name|'once'
op|'('
name|'number'
op|'='
name|'num'
op|')'
newline|'\n'
nl|'\n'
comment|'# Run object replication for handoff node'
nl|'\n'
name|'_'
op|','
name|'another_num'
op|'='
name|'get_server_number'
op|'('
nl|'\n'
op|'('
name|'another_onode'
op|'['
string|"'ip'"
op|']'
op|','
nl|'\n'
name|'another_onode'
op|'.'
name|'get'
op|'('
string|"'replication_port'"
op|','
name|'another_onode'
op|'['
string|"'port'"
op|']'
op|')'
op|')'
op|','
nl|'\n'
name|'self'
op|'.'
name|'ipport2server'
op|')'
newline|'\n'
name|'Manager'
op|'('
op|'['
string|"'object-replicator'"
op|']'
op|')'
op|'.'
name|'once'
op|'('
name|'number'
op|'='
name|'another_num'
op|')'
newline|'\n'
nl|'\n'
comment|'# Assert the first container/obj primary server now has container/obj'
nl|'\n'
name|'odata'
op|'='
name|'direct_client'
op|'.'
name|'direct_get_object'
op|'('
nl|'\n'
name|'onode'
op|','
name|'opart'
op|','
name|'self'
op|'.'
name|'account'
op|','
name|'container'
op|','
name|'obj'
op|','
name|'headers'
op|'='
op|'{'
nl|'\n'
string|"'X-Backend-Storage-Policy-Index'"
op|':'
name|'self'
op|'.'
name|'policy'
op|'.'
name|'idx'
op|'}'
op|')'
op|'['
op|'-'
number|'1'
op|']'
newline|'\n'
name|'if'
name|'odata'
op|'!='
string|"'VERIFY'"
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'Exception'
op|'('
string|"'Direct object GET did not return VERIFY, instead '"
nl|'\n'
string|"'it returned: %s'"
op|'%'
name|'repr'
op|'('
name|'odata'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# Assert the handoff server no longer has container/obj'
nl|'\n'
dedent|''
name|'try'
op|':'
newline|'\n'
indent|' '
name|'direct_client'
op|'.'
name|'direct_get_object'
op|'('
nl|'\n'
name|'another_onode'
op|','
name|'opart'
op|','
name|'self'
op|'.'
name|'account'
op|','
name|'container'
op|','
name|'obj'
op|','
name|'headers'
op|'='
op|'{'
nl|'\n'
string|"'X-Backend-Storage-Policy-Index'"
op|':'
name|'self'
op|'.'
name|'policy'
op|'.'
name|'idx'
op|'}'
op|')'
newline|'\n'
dedent|''
name|'except'
name|'ClientException'
name|'as'
name|'err'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'err'
op|'.'
name|'http_status'
op|','
number|'404'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'fail'
op|'('
string|'"Expected ClientException but didn\'t get it"'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
dedent|''
name|'if'
name|'__name__'
op|'=='
string|"'__main__'"
op|':'
newline|'\n'
indent|' '
name|'main'
op|'('
op|')'
newline|'\n'
dedent|''
endmarker|''
end_unit
|
[
"dmg@uvic.ca"
] |
dmg@uvic.ca
|
a25f71c986a1212e69a747de32c5133c5b78a446
|
733496067584ee32eccc333056c82d60f673f211
|
/idfy_rest_client/models/person_navn_adresse.py
|
5f7e495f992d7a22d700bd3462ef1b39195572d8
|
[
"MIT"
] |
permissive
|
dealflowteam/Idfy
|
90ee5fefaa5283ce7dd3bcee72ace4615ffd15d2
|
fa3918a6c54ea0eedb9146578645b7eb1755b642
|
refs/heads/master
| 2020-03-07T09:11:15.410502
| 2018-03-30T08:12:40
| 2018-03-30T08:12:40
| 127,400,869
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,252
|
py
|
# -*- coding: utf-8 -*-
"""
idfy_rest_client.models.person_navn_adresse
This file was automatically generated for Idfy by APIMATIC v2.0 ( https://apimatic.io )
"""
from idfy_rest_client.api_helper import APIHelper
class PersonNavnAdresse(object):
"""Implementation of the 'Person.NavnAdresse' model.
TODO: type model description here.
Attributes:
status_field (string): TODO: type description here.
status_dato_field (datetime): TODO: type description here.
fodselsdato_field (datetime): TODO: type description here.
navn_field (string): TODO: type description here.
adresse_field (string): TODO: type description here.
postnr_field (string): TODO: type description here.
poststed_field (string): TODO: type description here.
kommune_field (string): TODO: type description here.
fylke_field (string): TODO: type description here.
alder_field (int): TODO: type description here.
kjonn_field (string): TODO: type description here.
telefon_field (list of string): TODO: type description here.
"""
# Create a mapping from Model property names to API property names
_names = {
"status_field":'statusField',
"status_dato_field":'statusDatoField',
"fodselsdato_field":'fodselsdatoField',
"navn_field":'navnField',
"adresse_field":'adresseField',
"postnr_field":'postnrField',
"poststed_field":'poststedField',
"kommune_field":'kommuneField',
"fylke_field":'fylkeField',
"alder_field":'alderField',
"kjonn_field":'kjonnField',
"telefon_field":'telefonField'
}
def __init__(self,
status_field=None,
status_dato_field=None,
fodselsdato_field=None,
navn_field=None,
adresse_field=None,
postnr_field=None,
poststed_field=None,
kommune_field=None,
fylke_field=None,
alder_field=None,
kjonn_field=None,
telefon_field=None,
additional_properties = {}):
"""Constructor for the PersonNavnAdresse class"""
# Initialize members of the class
self.status_field = status_field
self.status_dato_field = APIHelper.RFC3339DateTime(status_dato_field) if status_dato_field else None
self.fodselsdato_field = APIHelper.RFC3339DateTime(fodselsdato_field) if fodselsdato_field else None
self.navn_field = navn_field
self.adresse_field = adresse_field
self.postnr_field = postnr_field
self.poststed_field = poststed_field
self.kommune_field = kommune_field
self.fylke_field = fylke_field
self.alder_field = alder_field
self.kjonn_field = kjonn_field
self.telefon_field = telefon_field
# Add additional model properties to the instance
self.additional_properties = additional_properties
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
status_field = dictionary.get('statusField')
status_dato_field = APIHelper.RFC3339DateTime.from_value(dictionary.get("statusDatoField")).datetime if dictionary.get("statusDatoField") else None
fodselsdato_field = APIHelper.RFC3339DateTime.from_value(dictionary.get("fodselsdatoField")).datetime if dictionary.get("fodselsdatoField") else None
navn_field = dictionary.get('navnField')
adresse_field = dictionary.get('adresseField')
postnr_field = dictionary.get('postnrField')
poststed_field = dictionary.get('poststedField')
kommune_field = dictionary.get('kommuneField')
fylke_field = dictionary.get('fylkeField')
alder_field = dictionary.get('alderField')
kjonn_field = dictionary.get('kjonnField')
telefon_field = dictionary.get('telefonField')
# Clean out expected properties from dictionary
for key in cls._names.values():
if key in dictionary:
del dictionary[key]
# Return an object of this model
return cls(status_field,
status_dato_field,
fodselsdato_field,
navn_field,
adresse_field,
postnr_field,
poststed_field,
kommune_field,
fylke_field,
alder_field,
kjonn_field,
telefon_field,
dictionary)
|
[
"runes@unipluss.no"
] |
runes@unipluss.no
|
ec83755d7ceef5096637db15a3827324da7e2c2b
|
8311a4bc770d91b802b573b91ccc9c8e03f15123
|
/ac/administrative_communication/doctype/assignment_transaction_action/assignment_transaction_action.py
|
3c8b30c4d720c9453f81113f640d58aafa2d481c
|
[
"MIT"
] |
permissive
|
aymenit2008/ac
|
d6756d32d291255ae4ba6d08700489b27377890f
|
8791d86679e10b57417559fcc9ca149321104845
|
refs/heads/main
| 2023-03-20T21:21:51.168016
| 2021-02-17T08:55:26
| 2021-02-17T08:55:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 271
|
py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Aseel and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
# import frappe
from frappe.model.document import Document
class AssignmentTransactionAction(Document):
pass
|
[
"frappe@ubuntu.vm"
] |
frappe@ubuntu.vm
|
738d45fb6a0616db48e488290215ae5066900004
|
b3b68efa404a7034f0d5a1c10b281ef721f8321a
|
/Scripts/simulation/aspirations/aspiration_tuning.py
|
e5c80726c76e612d54e640a7fd276e7089624c5d
|
[
"Apache-2.0"
] |
permissive
|
velocist/TS4CheatsInfo
|
62195f3333076c148b2a59f926c9fb5202f1c6fb
|
b59ea7e5f4bd01d3b3bd7603843d525a9c179867
|
refs/heads/main
| 2023-03-08T01:57:39.879485
| 2021-02-13T21:27:38
| 2021-02-13T21:27:38
| 337,543,310
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 22,481
|
py
|
# uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\aspirations\aspiration_tuning.py
# Compiled at: 2020-03-21 01:18:58
# Size of source mod 2**32: 34129 bytes
from aspirations.aspiration_types import AspriationType
from event_testing import objective_tuning
from event_testing.milestone import Milestone
from event_testing.resolver import SingleSimResolver, GlobalResolver
from interactions.utils.display_mixin import get_display_mixin
from interactions.utils.loot import LootActions
from traits.traits import Trait
from sims import genealogy_tracker
from sims4.tuning.instances import HashedTunedInstanceMetaclass, lock_instance_tunables
from sims4.tuning.tunable import TunableEnumEntry, TunableSet, OptionalTunable, TunableReference
from sims4.tuning.tunable_base import GroupNames, SourceQueries
from sims4.utils import classproperty, constproperty
from singletons import DEFAULT
from ui.ui_dialog import UiDialogResponse
from ui.ui_dialog_notification import UiDialogNotification
import enum, itertools, server.online_tests, services, sims4.localization, sims4.log, sims4.tuning.tunable, ui.screen_slam
logger = sims4.log.Logger('AspirationTuning')
class AspirationBasic(Milestone, metaclass=HashedTunedInstanceMetaclass, manager=services.get_instance_manager(sims4.resources.Types.ASPIRATION)):
INSTANCE_SUBCLASSES_ONLY = True
INSTANCE_TUNABLES = {'do_not_register_events_on_load':sims4.tuning.tunable.Tunable(description='\n If checked we will not register these events on load.\n \n This should be checked for all aspirations that are part of an\n aspiration track.\n ',
tunable_type=bool,
default=False),
'screen_slam':OptionalTunable(description='\n Which screen slam to show when this aspiration is complete.\n Localization Tokens: Sim - {0.SimFirstName}, Milestone Name - \n {1.String}, Aspiration Track Name - {2.String}\n ',
tunable=ui.screen_slam.TunableScreenSlamSnippet(),
tuning_group=GroupNames.UI)}
@classmethod
def handle_event(cls, sim_info, event, resolver):
if sim_info is not None:
if sim_info.aspiration_tracker is not None:
sim_info.aspiration_tracker.handle_event(cls, event, resolver)
@constproperty
def aspiration_type():
return AspriationType.BASIC
@classmethod
def register_callbacks(cls, additional_objectives=[]):
objectives = itertools.chain(cls.objectives, additional_objectives)
tests = [objective.objective_test for objective in objectives]
services.get_event_manager().register_tests(cls, tests)
@classmethod
def setup_aspiration(cls, event_data_tracker):
for objective in cls.objectives:
objective.setup_objective(event_data_tracker, cls)
@classmethod
def cleanup_aspiration(cls, event_data_tracker):
for objective in cls.objectives:
objective.cleanup_objective(event_data_tracker, cls)
@classmethod
def unregister_callbacks(cls):
tests = [objective.objective_test for objective in cls.objectives]
services.get_event_manager().unregister_tests(cls, tests)
@classmethod
def apply_on_complete_loot_actions(cls, sim_info):
pass
@constproperty
def update_on_load():
return True
class Aspiration(AspirationBasic):
INSTANCE_TUNABLES = {'display_name':sims4.localization.TunableLocalizedString(description='\n Display name for this aspiration\n ',
allow_none=True,
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.UI),
'descriptive_text':sims4.localization.TunableLocalizedString(description='\n Description for this aspiration\n ',
allow_none=True,
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.UI),
'is_child_aspiration':sims4.tuning.tunable.Tunable(description='\n If checked then this aspiration can only be completed by a child\n Sim and will not be considered complete even if all of the\n Objectives are complete as a non-child.\n ',
tunable_type=bool,
default=False,
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.SPECIAL_CASES),
'reward':sims4.tuning.tunable.TunableReference(description='\n The reward given when this Aspiration is completed.\n ',
manager=services.get_instance_manager(sims4.resources.Types.REWARD),
allow_none=True,
tuning_group=GroupNames.REWARDS),
'on_complete_loot_actions':sims4.tuning.tunable.TunableList(description='\n List of loots operations that will be awarded when this aspiration\n completes.\n ',
tunable=LootActions.TunableReference(),
tuning_group=GroupNames.REWARDS)}
@constproperty
def aspiration_type():
return AspriationType.FULL_ASPIRATION
@classmethod
def _verify_tuning_callback(cls):
for objective in cls.objectives:
pass
logger.debug('Loading asset: {0}', cls)
@classmethod
def apply_on_complete_loot_actions(cls, sim_info):
resolver = SingleSimResolver(sim_info)
for loot_action in cls.on_complete_loot_actions:
loot_action.apply_to_resolver(resolver)
class AspirationSimInfoPanel(AspirationBasic):
INSTANCE_TUNABLES = {'display_name':sims4.localization.TunableLocalizedString(description='\n Display name for this aspiration.\n ',
allow_none=True,
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.UI),
'descriptive_text':sims4.localization.TunableLocalizedString(description='\n Description for this aspiration.\n ',
allow_none=True,
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.UI),
'category':sims4.tuning.tunable.TunableReference(description='\n The category that this Sim Info Panel Aspiration goes into within\n the Sim Info Panel.\n ',
manager=services.get_instance_manager(sims4.resources.Types.ASPIRATION_CATEGORY),
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.UI)}
@constproperty
def aspiration_type():
return AspriationType.SIM_INFO_PANEL
@classmethod
def _verify_tuning_callback(cls):
for objective in cls.objectives:
pass
lock_instance_tunables(AspirationSimInfoPanel, do_not_register_events_on_load=False)
class AspirationNotification(AspirationBasic):
INSTANCE_TUNABLES = {'objectives':sims4.tuning.tunable.TunableList(description='\n A list of all of the Objectives that will be tracked in order for\n this Milestone to be completed. Using the Objective Completion Type\n we will determine the action number of Objectives that need to be\n completed.\n ',
tunable=sims4.tuning.tunable.TunableReference(description='\n An Objective that is one of the requirements for this Milestone\n to be completed.\n ',
manager=(services.get_instance_manager(sims4.resources.Types.OBJECTIVE))),
tuning_group=GroupNames.CORE),
'notification':UiDialogNotification.TunableFactory(description='\n A TNS that will appear when this Aspiration is completed.\n ',
tuning_group=GroupNames.UI)}
@constproperty
def aspiration_type():
return AspriationType.NOTIFICATION
lock_instance_tunables(AspirationNotification, do_not_register_events_on_load=False)
AspirationCareerDisplayMixin = get_display_mixin(use_string_tokens=True, has_description=True, has_icon=True, has_tooltip=True)
class AspirationCareer(AspirationCareerDisplayMixin, AspirationBasic):
def reward(self, *args, **kwargs):
pass
@constproperty
def aspiration_type():
return AspriationType.CAREER
@classmethod
def _verify_tuning_callback(cls):
for objective in cls.objectives:
pass
lock_instance_tunables(AspirationCareer, do_not_register_events_on_load=True)
class AspirationAssignment(AspirationBasic):
def reward(self, *args, **kwargs):
pass
@classmethod
def satisfy_assignment(cls, sim_info):
current_career = sim_info.career_tracker.get_on_assignment_career()
if current_career is None:
return
if cls not in current_career.active_assignments:
return
current_career.handle_assignment_loot()
@classmethod
def send_assignment_update(cls, sim_info):
current_career = sim_info.career_tracker.get_on_assignment_career()
if current_career is None:
return
if cls not in current_career.active_assignments:
return
current_career.resend_at_work_info()
current_career.send_assignment_update()
@constproperty
def aspiration_type():
return AspriationType.ASSIGNMENT
@classmethod
def _verify_tuning_callback(cls):
for objective in cls.objectives:
pass
lock_instance_tunables(AspirationAssignment, do_not_register_events_on_load=True)
class AspirationGig(AspirationBasic):
def reward(self, *args, **kwargs):
pass
@classmethod
def satisfy_assignment(cls, sim_info):
for career in sim_info.career_tracker:
career.gig_aspiration_completed(cls)
@classmethod
def send_assignment_update(cls, sim_info):
pass
@constproperty
def aspiration_type():
return AspriationType.GIG
@classmethod
def _verify_tuning_callback(cls):
for objective in cls.objectives:
pass
lock_instance_tunables(AspirationGig, do_not_register_events_on_load=True)
class AspirationFamilialTrigger(AspirationBasic):
INSTANCE_TUNABLES = {'objectives':sims4.tuning.tunable.TunableList(description='\n A list of all of the Objectives that will be tracked in order for\n this Milestone to be completed. Using the Objective Completion Type\n we will determine the action number of Objectives that need to be\n completed.\n ',
tunable=sims4.tuning.tunable.TunableReference(description='\n An Objective that is one of the requirements for this Milestone\n to be completed.\n ',
manager=(services.get_instance_manager(sims4.resources.Types.OBJECTIVE))),
tuning_group=GroupNames.CORE),
'target_family_relationships':TunableSet(description='\n The genetic relationships that will be notified when this\n Aspiration is completed.\n ',
tunable=TunableEnumEntry(description='\n A genetic relationship that will be notified when this\n Aspiraiton is completed.\n ',
tunable_type=(genealogy_tracker.FamilyRelationshipIndex),
default=(genealogy_tracker.FamilyRelationshipIndex.FATHER)),
tuning_group=GroupNames.CORE)}
@constproperty
def aspiration_type():
return AspriationType.FAMILIAL
@classmethod
def _verify_tuning_callback(cls):
for objective in cls.objectives:
pass
lock_instance_tunables(AspirationFamilialTrigger, do_not_register_events_on_load=False)
class AspirationCategory(metaclass=HashedTunedInstanceMetaclass, manager=services.get_instance_manager(sims4.resources.Types.ASPIRATION_CATEGORY)):
INSTANCE_TUNABLES = {'display_text':sims4.localization.TunableLocalizedString(description="\n The Aspiration Category's name within the UI.\n ",
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.UI),
'ui_sort_order':sims4.tuning.tunable.Tunable(description='\n Order in which this category is sorted against other categories in\n the UI. If two categories share the same sort order, undefined\n behavior will ensue.\n ',
tunable_type=int,
default=0,
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.UI),
'icon':sims4.tuning.tunable.TunableResourceKey(description='\n The icon to be displayed in the panel view.\n ',
default=None,
resource_types=sims4.resources.CompoundTypes.IMAGE,
allow_none=True,
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.UI),
'is_sim_info_panel':sims4.tuning.tunable.Tunable(description='\n If checked then this Category will be marked for the Sim Info panel\n rather than for the Aspiration panel.\n ',
tunable_type=bool,
default=False,
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.UI),
'used_by_packs':sims4.tuning.tunable.TunableEnumSet(description='\n Optional set of packs which utilize this category. Used for\n excluding categories from the UI if their tuning resides in base\n game. (It is preferred to place category tuning in the appropriate\n pack, if possible.)\n ',
enum_type=sims4.common.Pack,
enum_default=sims4.common.Pack.BASE_GAME,
export_modes=sims4.tuning.tunable_base.ExportModes.ClientBinary,
tuning_group=GroupNames.UI)}
class AspirationTrackLevels(enum.Int):
LEVEL_1 = 1
LEVEL_2 = 2
LEVEL_3 = 3
LEVEL_4 = 4
LEVEL_5 = 5
LEVEL_6 = 6
TRACK_LEVEL_MAX = 6
class TunableHiddenTrackTestVariant(sims4.tuning.tunable.TunableVariant):
def __init__(self, description='A tunable test supporting hidden aspiration testing', **kwargs):
(super().__init__)(is_live_event_active=server.online_tests.IsLiveEventActive.TunableFactory(), description=description, **kwargs)
class AspirationTrack(metaclass=HashedTunedInstanceMetaclass, manager=services.get_instance_manager(sims4.resources.Types.ASPIRATION_TRACK)):
INSTANCE_TUNABLES = {'display_text':sims4.localization.TunableLocalizedString(description="\n The Aspiration Track's name.\n ",
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.UI),
'description_text':sims4.localization.TunableLocalizedString(description="\n The Aspiration Track's description.\n ",
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.UI),
'icon':sims4.tuning.tunable.TunableResourceKey(description="\n The Aspiration Track's icon.\n ",
default=None,
resource_types=sims4.resources.CompoundTypes.IMAGE,
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.UI),
'icon_high_res':sims4.tuning.tunable.TunableResourceKey(description="\n The icon to be displayed in aspiration track selection.\n The Aspiration Track's icon for display when selecting a track.\n ",
default=None,
resource_types=sims4.resources.CompoundTypes.IMAGE,
allow_none=True,
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.UI),
'category':sims4.tuning.tunable.TunableReference(description='\n The Aspiration Category that this Aspiration Track is a part of.\n ',
manager=services.get_instance_manager(sims4.resources.Types.ASPIRATION_CATEGORY),
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.UI),
'primary_trait':sims4.tuning.tunable.TunableReference(description='\n A trait that is given to Sims if this is the Aspiration Track that\n was selected for the Sim when they exited CAS for the first time or\n is the initial Aspiration Track selected when the Sim ages up from\n a child. \n ',
manager=services.get_instance_manager(sims4.resources.Types.TRAIT),
export_modes=sims4.tuning.tunable_base.ExportModes.All,
allow_none=True,
tuning_group=GroupNames.CORE),
'aspirations':sims4.tuning.tunable.TunableMapping(description='\n A mapping between the Aspiration Track Level and the Aspiration\n to be completed. The Aspirations will need to be completed in\n order and upon the final one being complete will have this\n Aspiration Track be considered complete.\n ',
key_type=TunableEnumEntry(description='\n The Level within the Aspiration Track that this Aspiration\n lives.\n ',
tunable_type=AspirationTrackLevels,
default=(AspirationTrackLevels.LEVEL_1)),
value_type=sims4.tuning.tunable.TunableReference(description='\n The Aspiration within the track that is associated with this\n level.\n ',
manager=(services.get_instance_manager(sims4.resources.Types.ASPIRATION)),
class_restrictions='Aspiration',
reload_dependent=True),
tuple_name='AspirationsMappingTuple',
minlength=1,
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.CORE),
'reward':sims4.tuning.tunable.TunableReference(description='\n The rewards that are given when a Sim completes this Aspiration\n Track.\n ',
manager=services.get_instance_manager(sims4.resources.Types.REWARD),
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.REWARDS),
'notification':UiDialogNotification.TunableFactory(description='\n A TNS that will be displayed with the Aspiration Track is\n completed.\n ',
locked_args={'text_tokens':DEFAULT,
'icon':None,
'primary_icon_response':UiDialogResponse(text=None, ui_request=UiDialogResponse.UiDialogUiRequest.SHOW_ASPIRATION_SELECTOR),
'secondary_icon':None},
tuning_group=GroupNames.UI),
'mood_asm_param':sims4.tuning.tunable.Tunable(description="\n The asm parameter for Sim's mood for use with CAS ASM state\n machine, driven by selection of this AspirationTrack, i.e. when a\n player selects the a romantic aspiration track, the Flirty ASM is\n given to the state machine to play. The name tuned here must match\n the animation state name parameter expected in Swing.\n ",
tunable_type=str,
default=None,
source_query=SourceQueries.SwingEnumNamePattern.format('mood'),
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.UI),
'is_hidden_unlockable':sims4.tuning.tunable.Tunable(description='\n If True, this track will be initially hidden until unlocked\n during gameplay.\n Note: It will never be able to be selected in CAS, even\n if it has been unlocked.\n ',
tunable_type=bool,
default=False,
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.UI),
'override_traits':sims4.tuning.tunable.TunableSet(description='\n Traits that are applied to the sim when they select this\n aspiration. Overrides any traits that are on the sim when the\n aspiration is selected. This is used for FTUE aspirations.\n ',
tunable=Trait.TunableReference(pack_safe=True),
export_modes=sims4.tuning.tunable_base.ExportModes.All,
tuning_group=GroupNames.SPECIAL_CASES),
'whim_set':OptionalTunable(description='\n If enabled then this Aspiration Track will give a Whim Set when it\n is active.\n ',
tunable=TunableReference(description='\n A Whim Set that is active when this Aspiration Track is active.\n ',
manager=(services.get_instance_manager(sims4.resources.Types.ASPIRATION)),
class_restrictions=('ObjectivelessWhimSet', )),
tuning_group=GroupNames.CORE),
'is_hidden_unlocked_tests':sims4.tuning.tunable.TunableList(description='\n All tests must pass for this track to remain\n unlocked on load. This does NOT unlock it.\n \n Uses GlobalResolver\n ',
tunable=TunableHiddenTrackTestVariant(),
tuning_group=GroupNames.SPECIAL_CASES)}
_sorted_aspirations = None
@classmethod
def get_aspirations(cls):
return cls._sorted_aspirations
@classmethod
def get_next_aspriation(cls, current_aspiration):
next_aspiration_level = None
current_aspiration_guid = current_aspiration.guid64
for level, track_aspiration in cls.aspirations.items():
if track_aspiration.guid64 == current_aspiration_guid:
next_aspiration_level = int(level) + 1
break
if next_aspiration_level in cls.aspirations:
return cls.aspirations[next_aspiration_level]
@classmethod
def is_available(cls):
if not cls.is_hidden_unlockable:
return True
resolver = GlobalResolver()
for test in cls.is_hidden_unlocked_tests:
if not resolver(test):
return False
return True
@classproperty
def is_child_aspiration_track(cls):
return cls._sorted_aspirations[0][1].is_child_aspiration
@classmethod
def _tuning_loaded_callback(cls):
cls._sorted_aspirations = tuple(sorted(cls.aspirations.items()))
@classmethod
def _verify_tuning_callback(cls):
aspiration_list = cls.aspirations.values()
aspiration_set = set(aspiration_list)
if len(aspiration_set) != len(aspiration_list):
logger.error('{} Aspiration Track has repeating aspiration values in the aspiration map.', cls,
owner='ddriscoll')
|
[
"cristina.caballero2406@gmail.com"
] |
cristina.caballero2406@gmail.com
|
f115526fdf25a9871453e6e4fb7ace173904b177
|
f0d713996eb095bcdc701f3fab0a8110b8541cbb
|
/YXjx9G5uQ4CdYPuB4_9.py
|
2d28dd3b028ee0f16f9bafa9b0ccc53ff8f93765
|
[] |
no_license
|
daniel-reich/turbo-robot
|
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
|
a7a25c63097674c0a81675eed7e6b763785f1c41
|
refs/heads/main
| 2023-03-26T01:55:14.210264
| 2021-03-23T16:08:01
| 2021-03-23T16:08:01
| 350,773,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,114
|
py
|
"""
**Mubashir** needs your help to compare two lists.
First list `lst1` contains some numbers and second list `lst2` contains
**squared values of numbers given in the first list**.
Create a function which takes these two lists and returns `True` if all square
values are available, `False` otherwise.
lst1 = [121, 144, 19, 161, 19, 144, 19, 11]
lst2 = [121, 14641, 20736, 361, 25921, 361, 20736, 361]
Returns `True` because **121 is square of 11, 14641 is square of 121, 20736 is
square of 144, 361 is square of 19, 25921 the square of 161, and so on...**
lst1 = [121, 144, 19, 161, 19, 144, 19, 11]
lst2 = [11*11, 121*121, 144*144, 19*19, 161*161, 19*19, 144*144, 19*19]
### Examples
simple_comp([121, 144, 19, 161, 19, 144, 19, 11], [121, 14641, 20736, 361, 25921, 361, 20736, 361]) ➞ True
simple_comp([4, 4], [1, 31]) ➞ False
simple_comp([2, 2, 3], [4, 4, 9]) ➞ True
### Notes
Numbers can be in any order.
"""
def simple_comp(lst1, lst2):
if lst1 == None or lst2 == None:
return False
return sorted(lst2) == sorted([i * i for i in lst1])
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
1915d25c38cc211538989aaa0b2bf604b6778c6e
|
1619511136a1d861a23f4e8c643916a303ac222c
|
/apps/blog/models.py
|
812adf98991dd66d4cd9c379fbf0b1c81af9a691
|
[] |
no_license
|
2644783865/django2
|
29d00b4faa4169a5969d967e992d22b5285a5817
|
3909bfab85be620d95702eff4db0483b3676ac94
|
refs/heads/master
| 2020-06-29T00:05:57.622128
| 2019-08-03T08:30:51
| 2019-08-03T08:30:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,911
|
py
|
from django.db import models
from django.db.models.signals import post_save
from django.contrib.auth import get_user_model
# from django.template.defaultfilters import slugify
# from ckeditor.fields import RichTextField # 不包含上传文件
from ckeditor_uploader.fields import RichTextUploadingField # 包含上传文件
from pyquery import PyQuery as pq # pip install pyquery, 获取到html中的img图片地址返回
# from pypinyin import lazy_pinyin # pip install pypinyin
from uuslug import slugify # pip install django-uuslug
User = get_user_model()
# Create your models here.
class Source(models.Model):
"""
文章来源
"""
name = models.CharField(max_length=128, default="原创", unique=True, verbose_name="站点名称")
url = models.URLField(max_length=128, blank=True, null=True, verbose_name="url")
time_create = models.DateTimeField(auto_now_add=True, verbose_name="创建时间")
class Meta:
verbose_name = "文章来源"
verbose_name_plural = "文章来源列表"
def __str__(self):
return self.name
class Category(models.Model):
"""
节点类别表
"""
name = models.CharField(max_length=128, unique=True, verbose_name="类别名称")
slug = models.SlugField(max_length=128, unique=True, verbose_name="url标识符")
time_create = models.DateTimeField(auto_now_add=True, verbose_name="创建时间")
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(Category, self).save(*args, **kwargs)
class Meta:
verbose_name = "节点类别"
verbose_name_plural = "节点分类列表"
def __str__(self):
return self.name
class Node(models.Model):
"""
节点表
"""
name = models.CharField(max_length=128, unique=True, verbose_name="节点名称")
# SlugField 是一个新闻术语(通常叫做短标题)。一个slug只能包含字母、数字、下划线或者是连字符,通常用来作为短标签。通常它们是用来放在地址栏的URL里的。
# 像CharField一样,你可以指定max_length(也请参阅该部分中的有关数据库可移植性的说明和max_length)。如果没有指定
# max_length, Django将会默认长度为50。
# 将Field.db_index设置为True。
# 根据某些其他值的值自动预填充SlugField通常很有用。你可以在admin中使用prepopulated_fields自动执行此操作。
slug = models.SlugField(max_length=128, unique=True, verbose_name="url标识符")
time_create = models.DateTimeField(auto_now_add=True, verbose_name="创建时间")
num_topics = models.IntegerField(default=0, verbose_name="主题数量")
category = models.ForeignKey(Category, on_delete=models.DO_NOTHING, verbose_name="所属类别")
show_status = models.BooleanField(default=True, verbose_name="显示状态")
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(Node, self).save(*args, **kwargs)
class Meta:
verbose_name = "节点"
verbose_name_plural = "节点列表"
def __str__(self):
return self.name
class Tag(models.Model):
"""
文章标签
"""
name = models.CharField(max_length=50, unique=True, verbose_name="标签")
slug = models.SlugField(max_length=128, unique=True, verbose_name="url标识符")
time_create = models.DateTimeField(auto_now_add=True, verbose_name="创建时间")
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(Tag, self).save(*args, **kwargs)
class Meta:
verbose_name = "文章标签"
verbose_name_plural = "文章标签列表"
def __str__(self):
return self.name
class Article(models.Model):
"""
主题表/文章表
"""
title = models.CharField(max_length=128, unique=True, verbose_name="标题")
slug = models.SlugField(max_length=128, unique=True, verbose_name="url标识符")
content = RichTextUploadingField(verbose_name="内容", config_name='awesome_ckeditor')
node = models.ForeignKey(Node, on_delete=models.DO_NOTHING, verbose_name="所属节点")
user = models.ForeignKey(User, on_delete=models.DO_NOTHING, related_name="user_article", verbose_name="作者")
source = models.ForeignKey(Source, on_delete=models.DO_NOTHING, verbose_name="来源", blank=True, null=True)
tags = models.ManyToManyField(Tag, verbose_name="标签", related_name="tags_article", blank=True)
num_views = models.IntegerField(default=0, verbose_name="浏览数量")
num_favorites = models.IntegerField(default=0, verbose_name="收藏数量")
last_answerer = models.ForeignKey(User, on_delete=models.DO_NOTHING, related_name="last_answerer_article", verbose_name="最后回复者", blank=True,
null=True)
show_status = models.BooleanField(default=True, verbose_name="显示状态")
time_create = models.DateTimeField(auto_now_add=True, verbose_name="发表时间")
time_update = models.DateTimeField(blank=True, null=True, auto_now=True, verbose_name="更新时间")
def save(self, *args, **kwargs):
self.slug = slugify(self.title)
super(Article, self).save(*args, **kwargs)
# 获取后台文本编辑器图文内容中图片url地址
def get_content_img_url(self):
temp = Article.objects.filter(pk=str(self.id)).values('content') # values获取Article数据表中的content字段内容
html = pq(temp[0]['content']) # pq方法获取编辑器html内容
# print(html, "\n", "----")
img_path = pq(html)('img').attr('src') # 截取html内容中的路径
# print("pic", img_path)
return img_path # 返回第一张图片路径
class Meta:
verbose_name = "文章"
verbose_name_plural = "文章列表"
def __str__(self):
title_short = self.title if len(self.title) < 15 else self.title[:12] + '...'
return "%s %s %s" % (self.id, self.user, title_short)
class FriendsURL(models.Model):
friend_name = models.CharField(max_length=50, unique=True, verbose_name="用户名称")
friend_image = models.ImageField(max_length=8 * 1024 * 1024 * 5, upload_to="friends", verbose_name="用户头像")
site_name = models.CharField(max_length=50, unique=True, verbose_name="网站名称")
site_link = models.URLField(max_length=256, blank=True, null=True, verbose_name="网站链接")
show_status = models.BooleanField(default=True, verbose_name="显示状态")
time_create = models.DateTimeField(auto_now_add=True, verbose_name="创建时间")
time_update = models.DateTimeField(blank=True, null=True, auto_now=True, verbose_name="更新时间")
class Meta:
verbose_name = "友情链接"
verbose_name_plural = "友情链接列表"
def __str__(self):
return self.friend_name
|
[
"zhuoqun527@qq.com"
] |
zhuoqun527@qq.com
|
602eb9121165edb4c2356d3bf98d987ff4c1ae16
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/pa3/sample/list_get_element_complex-2.py
|
6d104100d4429fa36c29aeb6f7ae3d25c91ca267
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038
| 2022-02-03T15:42:39
| 2022-02-03T15:42:39
| 451,969,776
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 199
|
py
|
$Definition
def next_int() -> int:
global next
next = next + 1
return next
def make_list() -> [int]:
return [next_int(), next_int(), next_int()]
print(make_list()[next_int() - 3])
|
[
"647530+Virtlink@users.noreply.github.com"
] |
647530+Virtlink@users.noreply.github.com
|
2a225dddd9fc8c12c4f1e8857ebbcc1a6a4cc4fc
|
e6e65a6704c20e6e0288cfc54915ee7ea9e1c0a7
|
/1recon/basicVersions/1pipeTrans/networks/modifiedVGG.py
|
ef58d51640d13f96c41606581d362c196412dd93
|
[] |
no_license
|
schatzkara/REU2019
|
fbb1f17d860c5d51a7ccae3ba106960d4c733949
|
6de28b5a8992f6122f2e9813de8b92d9e97ccbf3
|
refs/heads/master
| 2020-06-06T03:50:40.753334
| 2019-11-07T14:11:50
| 2019-11-07T14:11:50
| 192,629,267
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,172
|
py
|
# phase 3
# modified from https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py
import torch
import torch.nn as nn
from torchsummary import summary
__all__ = [
'VGG', 'vgg11', 'vgg11_bn', 'vgg13', 'vgg13_bn', 'vgg16', 'vgg16_bn',
'vgg19_bn', 'vgg19',
]
class VGG(nn.Module):
"""
Class representing the modified VGG network to be used.
"""
def __init__(self, features, num_classes=1000, init_weights=True,
pretrained=False, weights_path=''):
"""
Initializes the modified VGG network.
:param features: All the network layers.
:param num_classes: (int) The number of classes used for classification.
:param init_weights: (bool) True if the network weights should be initialized; False otherwise.
:param pretrained: (bool) True if the network should be pretrained; False otherwise.
:param weights_path: (str) The path at which to pretrained weights are located.
"""
super(VGG, self).__init__()
self.features = features
if init_weights:
self._initialize_weights()
if pretrained:
self.load_weights(weights_path=weights_path)
def load_weights(self, weights_path):
state_dict = torch.load(weights_path)
bad_weights = ["features.17", "features.19", "features.21", "features.24",
"features.26", "features.28", "classifier.0", "classifier.3",
"classifier.6"]
new_state_dict = {}
for key, weight in state_dict.items():
first_per = key.index('.')
second_per = key[first_per + 1:].index('.')
id_ = key[:first_per + second_per + 1]
if id_ not in bad_weights:
new_state_dict[key] = weight
self.load_state_dict(new_state_dict)
def forward(self, x):
self.features = self.features
x = self.features(x)
return x
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
nn.init.constant_(m.bias, 0)
def make_layers(cfg, num_layers=None, batch_norm=False):
if num_layers is not None:
cfg = cfg[:num_layers]
layers = []
in_channels = 3
for v in cfg:
if v == 'M':
layers += [nn.MaxPool2d(kernel_size=2, stride=2)]
else:
conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1)
if batch_norm:
layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)]
else:
layers += [conv2d, nn.ReLU(inplace=True)]
in_channels = v
# print('Modified VGG Model Successfully Built \n')
return nn.Sequential(*layers)
cfgs = {
'A': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512], # , 'M'],
'B': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512], # , 'M'],
'D': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512], # , 'M'],
'E': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512], # , 'M'],
}
num_layers_to_use = {
'A': 7,
'B': 9,
'D': 10,
'E': 11,
}
def _vgg(arch, cfg, batch_norm, pretrained, progress, weights_path='', **kwargs):
if pretrained:
kwargs['init_weights'] = False
model = VGG(make_layers(cfgs[cfg], num_layers=num_layers_to_use[cfg], batch_norm=batch_norm),
pretrained=pretrained, weights_path=weights_path, **kwargs)
return model
def vgg11(pretrained=False, progress=True, **kwargs):
"""VGG 11-layer generator (configuration "A")
Args:
pretrained (bool): If True, returns a generator pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
"""
return _vgg('vgg11', 'A', False, pretrained, progress, **kwargs)
def vgg11_bn(pretrained=False, progress=True, **kwargs):
"""VGG 11-layer generator (configuration "A") with batch normalization
Args:
pretrained (bool): If True, returns a generator pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
"""
return _vgg('vgg11_bn', 'A', True, pretrained, progress, **kwargs)
def vgg13(pretrained=False, progress=True, **kwargs):
"""VGG 13-layer generator (configuration "B")
Args:
pretrained (bool): If True, returns a generator pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
"""
return _vgg('vgg13', 'B', False, pretrained, progress, **kwargs)
def vgg13_bn(pretrained=False, progress=True, **kwargs):
"""VGG 13-layer generator (configuration "B") with batch normalization
Args:
pretrained (bool): If True, returns a generator pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
"""
return _vgg('vgg13_bn', 'B', True, pretrained, progress, **kwargs)
def vgg16(pretrained=False, progress=True, weights_path='', **kwargs):
"""VGG 16-layer generator (configuration "D")
Args:
pretrained (bool): If True, returns a generator pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
"""
return _vgg('vgg16', 'D', False, pretrained=pretrained, progress=progress, weights_path=weights_path, **kwargs)
def vgg16_bn(pretrained=False, progress=True, **kwargs):
"""VGG 16-layer generator (configuration "D") with batch normalization
Args:
pretrained (bool): If True, returns a generator pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
"""
return _vgg('vgg16_bn', 'D', True, pretrained, progress, **kwargs)
def vgg19(pretrained=False, progress=True, **kwargs):
"""VGG 19-layer generator (configuration "E")
Args:
pretrained (bool): If True, returns a generator pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
"""
return _vgg('vgg19', 'E', False, pretrained, progress, **kwargs)
def vgg19_bn(pretrained=False, progress=True, **kwargs):
"""VGG 19-layer generator (configuration 'E') with batch normalization
Args:
pretrained (bool): If True, returns a generator pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
"""
return _vgg('vgg19_bn', 'E', True, pretrained, progress, **kwargs)
if __name__ == "__main__":
print_summary = True
vgg = vgg16()
if print_summary:
summary(vgg, input_size=(3, 112, 112))
|
[
"36019128+schatzkara@users.noreply.github.com"
] |
36019128+schatzkara@users.noreply.github.com
|
39874ed6f673a12d3460b09f8f3745e4cbb3f6ed
|
65890d9024b035d997c78a3968c05908df81a9c4
|
/sharpy/managers/extensions/__init__.py
|
6728d67326888cd4880b83ceb0fe8f4bbd438321
|
[
"MIT"
] |
permissive
|
lladdy/sharpy-sc2
|
df43a266aa2ad90633b9a9f279fc14529740f30f
|
cf6f7850add6f8d33d6e7c5ccaf2a619e4838c52
|
refs/heads/develop
| 2023-08-03T17:49:51.939649
| 2023-07-31T10:04:34
| 2023-07-31T10:04:34
| 227,027,455
| 0
| 0
|
MIT
| 2023-08-30T08:39:00
| 2019-12-10T04:15:24
|
Python
|
UTF-8
|
Python
| false
| false
| 428
|
py
|
from .enemy_army_predicter import EnemyArmyPredicter
from .build_detector import BuildDetector
from .game_analyzer import GameAnalyzer
from .data_manager import DataManager
from .chat_manager import ChatManager
from .memory_manager import MemoryManager
from .archon import ArchonManager
from .heat_map import HeatMapManager
from .custom_func_manager import CustomFuncManager
from .enemy_vision_manager import EnemyVisionManager
|
[
"aki.vanttinen@sedgestudios.com"
] |
aki.vanttinen@sedgestudios.com
|
8a42f484b703ca5e6d8bacf2c9fbedaa62340aff
|
c380976b7c59dadaccabacf6b541124c967d2b5a
|
/.history/src/data/data_20191028083133.py
|
1bf93266354860aafa16ec794d7068be824878af
|
[
"MIT"
] |
permissive
|
bkraft4257/kaggle_titanic
|
b83603563b4a3c995b631e8142fe72e1730a0e2e
|
f29ea1773773109a867278c001dbd21a9f7b21dd
|
refs/heads/master
| 2020-08-17T12:45:28.653402
| 2019-11-15T16:20:04
| 2019-11-15T16:20:04
| 215,667,760
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,814
|
py
|
import pandas as pd
import numpy as np
from typing import Union
from pathlib import Path
from nameparser import HumanName
class ExtractData:
def __init__(self, filename: Union[str, Path], age_bins=None, drop_columns=None):
# """Extract Training Data from file or Path
# Arguments:
# filename {[str]} -- Filename of CSV data file containing data.
# drop_columns -- Columns in dataframe that should be dropped.
# """
if drop_columns is None:
drop_columns = ["age", "cabin", "name", "ticket"]
self.filename = filename
self.drop_columns = drop_columns
self.all_label_columns = ["survived"]
self.all_feature_columns = [
"pclass",
"name",
"sex",
"age",
"sibsp",
"parch",
"ticket",
"fare",
"cabin",
"embarked",
]
self.Xy_raw = None
self.extract_raw()
def extract_raw(self):
"""
Extracts data from a CSV file.
Returns:
pd.DataFrame -- [description]
"""
Xy_raw = pd.read_csv(self.filename)
Xy_raw.columns = Xy_raw.columns.str.lower().str.replace(" ", "_")
Xy_raw = Xy_raw.rename(columns={"age": "age_known"})
Xy_raw["pclass"] = Xy_raw["pclass"].astype("category")
self.Xy_raw = Xy_raw.set_index("passengerid")
class TransformData:
# Only one passenger with title Lady. She was traveling with a sibling and no husband. Set title to Miss
# 2 Mlle and 1 Mme. All 3 were 24 years old and travelling alone. Retitled as Miss.
# 1 Sir. Male 49 years old. Travelling with a sibling.
# Revs were all males.
# 8 Drs. (7 male, 1 female) changed to Mr. and Mrs. respectively.
title_translator = {
"Mlle.": "Miss.",
"Mme.": "Miss.",
"Sir.": "Mr.",
"Ms.": "Mrs.",
"Rev.": "Mr.",
"Col.": "Mr.",
"Capt.": "Mr.",
"Lady.": "Miss.",
"the Countess. of": "Mrs.",
"Dr.":np.nan,
}
def __init__(
self,
raw_data,
adult_age_threshold_min=13,
age_bins=None,
fare_mode=None,
embarked_mode=None,
Xy_age_estimate=None,
drop_columns=None,
):
# """Extract Training Data from file or Path
# Arguments:
# filename {[str]} -- Filename of CSV data file containing data.
# drop_columns -- Columns in dataframe that should be dropped.
# """
if age_bins is None:
age_bins = [0, 10, 20, 30, 40, 50, 60, np.inf]
if drop_columns is None:
drop_columns = ["age", "cabin", "name", "ticket"]
self.raw = raw_data
self.adult_age_threshold_min = adult_age_threshold_min
self.Xy_age_estimate = Xy_age_estimate
self.age_bins = age_bins
self.Xy = self.raw.Xy_raw.copy()
if fare_mode is None:
fare_mode = self.Xy["fare"].mode()[0]
if embarked_mode is None:
embarked_mode = self.Xy["embarked"].mode()[0]
self.fare_mode = fare_mode
self.embarked_mode = embarked_mode
self.impute_missing_fare()
self.impute_missing_embarked()
self.extract_title()
self.extract_last_name()
self.extract_cabin_number()
self.extract_cabin_prefix()
self.estimate_age()
self.calc_age_bins()
self.calc_is_child()
self.calc_is_travelling_alone()
def calc_is_travelling_alone(self):
"""Create Boolean feature if passenger is travelling alone. (True=Traveling alone, False=Traveling in group)
"""
self.Xy["is_travelling_alone"] = (self.Xy.sibsp == 0) & (self.Xy.parch == 0)
def calc_is_child(self):
"""Calculate Boolean feature if passenger is a child as determined by the self.adult_age_threshold_min
"""
self.Xy["is_child"] = self.Xy.age < self.adult_age_threshold_min
def extract_cabin_number(self):
"""
Extracts cabin number from ticket.
"""
self.Xy["cabin_number"] = self.Xy.ticket.str.extract("(\d+)$")
def extract_cabin_prefix(self):
"""Extracts cabin prefix from ticket.
"""
self.Xy["cabin_prefix"] = self.Xy.ticket.str.extract("^(.+) ")
def extract_title(self):
"""Extract title from the name using nameparser.
If the Title is empty then we will fill the title with either Mr or Mrs depending upon the sex. This
is adequate for the train and holdout data sets. The title being empty only occurs for passenger 1306
in the holdout data set. A more appropriate way to do this is to check on the sex and age to correctly
assign the title
"""
title = (self.Xy.name.apply(lambda x: HumanName(x).title)
.replace(self.title_translator)
.replace({"\.": ""}, regex=True)
.replace({"":np.nan})
.fillna(self.Xy['sex'])
.replace({'female':'Mrs', 'male':'Mr'})
)
self.Xy["title"] = title
def extract_last_name(self):
"Extracts last name from "
self.Xy["last_name"] = self.Xy.name.apply(lambda x: HumanName(x).last)
def calc_age_bins(self):
self.Xy["age_bin"] = pd.cut(
self.Xy.age, bins=[0, 10, 20, 30, 40, 50, 60, np.inf]
)
def clean(self,):
"""Clean data to remove missing data and "unnecessary" features.
Arguments:
in_raw_df {pd.DataFrame} -- Dataframe containing all columns and rows Kaggle Titanic Training Data set
"""
self.Xy = self.Xy_raw.drop(self.drop_columns, axis=1)
def estimate_age(self, groupby_columns=["sex", "title"]):
"""[summary]
Keyword Arguments:
groupby {list} -- [description] (default: {['sex','title']})
"""
if self.Xy_age_estimate is None:
self.Xy_age_estimate = (
self.Xy.groupby(groupby_columns).age_known.mean().to_frame().round(1)
)
self.Xy_age_estimate = self.Xy_age_estimate.rename(
columns={"age_known": "age_estimate"}
)
out_df = (
self.Xy.reset_index()
.merge(self.Xy_age_estimate, on=groupby_columns)
.set_index("passengerid")
)
out_df["age"] = out_df["age_known"].fillna(out_df["age_estimate"])
self.Xy = out_df
def impute_missing_fare(self):
self.Xy["fare"] = self.Xy["fare"].fillna(self.fare_mode)
def impute_missing_embarked(self):
self.Xy["embarked"] = self.Xy["embarked"].fillna(self.embarked_mode)
|
[
"bob.kraft@infiniteleap.net"
] |
bob.kraft@infiniteleap.net
|
77b6f71d6095a7aedcb4e9fa8fb816536a942ae1
|
762c307de73db674c214619778802b863548bf2e
|
/env/bin/pildriver.py
|
b966081163f93b8efd36985cfb7efe42e5de776a
|
[] |
no_license
|
mansourgueye275/django-bloggy
|
56d9d6a2131a71c20d6c341764503b76ba3a45c1
|
1b8080ad26244d3d60e20e24ad6520d7a663381b
|
refs/heads/master
| 2021-06-23T02:16:30.301697
| 2017-09-01T18:56:23
| 2017-09-01T18:56:23
| 102,141,125
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 15,558
|
py
|
#!/home/mansour/Documents/RealPython/django-bloggy/env/bin/python3
"""PILdriver, an image-processing calculator using PIL.
An instance of class PILDriver is essentially a software stack machine
(Polish-notation interpreter) for sequencing PIL image
transformations. The state of the instance is the interpreter stack.
The only method one will normally invoke after initialization is the
`execute' method. This takes an argument list of tokens, pushes them
onto the instance's stack, and then tries to clear the stack by
successive evaluation of PILdriver operators. Any part of the stack
not cleaned off persists and is part of the evaluation context for
the next call of the execute method.
PILDriver doesn't catch any exceptions, on the theory that these
are actually diagnostic information that should be interpreted by
the calling code.
When called as a script, the command-line arguments are passed to
a PILDriver instance. If there are no command-line arguments, the
module runs an interactive interpreter, each line of which is split into
space-separated tokens and passed to the execute method.
In the method descriptions below, a first line beginning with the string
`usage:' means this method can be invoked with the token that follows
it. Following <>-enclosed arguments describe how the method interprets
the entries on the stack. Each argument specification begins with a
type specification: either `int', `float', `string', or `image'.
All operations consume their arguments off the stack (use `dup' to
keep copies around). Use `verbose 1' to see the stack state displayed
before each operation.
Usage examples:
`show crop 0 0 200 300 open test.png' loads test.png, crops out a portion
of its upper-left-hand corner and displays the cropped portion.
`save rotated.png rotate 30 open test.tiff' loads test.tiff, rotates it
30 degrees, and saves the result as rotated.png (in PNG format).
"""
# by Eric S. Raymond <esr@thyrsus.com>
# $Id$
# TO DO:
# 1. Add PILFont capabilities, once that's documented.
# 2. Add PILDraw operations.
# 3. Add support for composing and decomposing multiple-image files.
#
from __future__ import print_function
from PIL import Image
class PILDriver(object):
verbose = 0
def do_verbose(self):
"""usage: verbose <int:num>
Set verbosity flag from top of stack.
"""
self.verbose = int(self.do_pop())
# The evaluation stack (internal only)
stack = [] # Stack of pending operations
def push(self, item):
"Push an argument onto the evaluation stack."
self.stack.insert(0, item)
def top(self):
"Return the top-of-stack element."
return self.stack[0]
# Stack manipulation (callable)
def do_clear(self):
"""usage: clear
Clear the stack.
"""
self.stack = []
def do_pop(self):
"""usage: pop
Discard the top element on the stack.
"""
return self.stack.pop(0)
def do_dup(self):
"""usage: dup
Duplicate the top-of-stack item.
"""
if hasattr(self, 'format'): # If it's an image, do a real copy
dup = self.stack[0].copy()
else:
dup = self.stack[0]
self.push(dup)
def do_swap(self):
"""usage: swap
Swap the top-of-stack item with the next one down.
"""
self.stack = [self.stack[1], self.stack[0]] + self.stack[2:]
# Image module functions (callable)
def do_new(self):
"""usage: new <int:xsize> <int:ysize> <int:color>:
Create and push a greyscale image of given size and color.
"""
xsize = int(self.do_pop())
ysize = int(self.do_pop())
color = int(self.do_pop())
self.push(Image.new("L", (xsize, ysize), color))
def do_open(self):
"""usage: open <string:filename>
Open the indicated image, read it, push the image on the stack.
"""
self.push(Image.open(self.do_pop()))
def do_blend(self):
"""usage: blend <image:pic1> <image:pic2> <float:alpha>
Replace two images and an alpha with the blended image.
"""
image1 = self.do_pop()
image2 = self.do_pop()
alpha = float(self.do_pop())
self.push(Image.blend(image1, image2, alpha))
def do_composite(self):
"""usage: composite <image:pic1> <image:pic2> <image:mask>
Replace two images and a mask with their composite.
"""
image1 = self.do_pop()
image2 = self.do_pop()
mask = self.do_pop()
self.push(Image.composite(image1, image2, mask))
def do_merge(self):
"""usage: merge <string:mode> <image:pic1>
[<image:pic2> [<image:pic3> [<image:pic4>]]]
Merge top-of stack images in a way described by the mode.
"""
mode = self.do_pop()
bandlist = []
for band in mode:
bandlist.append(self.do_pop())
self.push(Image.merge(mode, bandlist))
# Image class methods
def do_convert(self):
"""usage: convert <string:mode> <image:pic1>
Convert the top image to the given mode.
"""
mode = self.do_pop()
image = self.do_pop()
self.push(image.convert(mode))
def do_copy(self):
"""usage: copy <image:pic1>
Make and push a true copy of the top image.
"""
self.dup()
def do_crop(self):
"""usage: crop <int:left> <int:upper> <int:right> <int:lower>
<image:pic1>
Crop and push a rectangular region from the current image.
"""
left = int(self.do_pop())
upper = int(self.do_pop())
right = int(self.do_pop())
lower = int(self.do_pop())
image = self.do_pop()
self.push(image.crop((left, upper, right, lower)))
def do_draft(self):
"""usage: draft <string:mode> <int:xsize> <int:ysize>
Configure the loader for a given mode and size.
"""
mode = self.do_pop()
xsize = int(self.do_pop())
ysize = int(self.do_pop())
self.push(self.draft(mode, (xsize, ysize)))
def do_filter(self):
"""usage: filter <string:filtername> <image:pic1>
Process the top image with the given filter.
"""
from PIL import ImageFilter
imageFilter = getattr(ImageFilter, self.do_pop().upper())
image = self.do_pop()
self.push(image.filter(imageFilter))
def do_getbbox(self):
"""usage: getbbox
Push left, upper, right, and lower pixel coordinates of the top image.
"""
bounding_box = self.do_pop().getbbox()
self.push(bounding_box[3])
self.push(bounding_box[2])
self.push(bounding_box[1])
self.push(bounding_box[0])
def do_getextrema(self):
"""usage: extrema
Push minimum and maximum pixel values of the top image.
"""
extrema = self.do_pop().extrema()
self.push(extrema[1])
self.push(extrema[0])
def do_offset(self):
"""usage: offset <int:xoffset> <int:yoffset> <image:pic1>
Offset the pixels in the top image.
"""
xoff = int(self.do_pop())
yoff = int(self.do_pop())
image = self.do_pop()
self.push(image.offset(xoff, yoff))
def do_paste(self):
"""usage: paste <image:figure> <int:xoffset> <int:yoffset>
<image:ground>
Paste figure image into ground with upper left at given offsets.
"""
figure = self.do_pop()
xoff = int(self.do_pop())
yoff = int(self.do_pop())
ground = self.do_pop()
if figure.mode == "RGBA":
ground.paste(figure, (xoff, yoff), figure)
else:
ground.paste(figure, (xoff, yoff))
self.push(ground)
def do_resize(self):
"""usage: resize <int:xsize> <int:ysize> <image:pic1>
Resize the top image.
"""
ysize = int(self.do_pop())
xsize = int(self.do_pop())
image = self.do_pop()
self.push(image.resize((xsize, ysize)))
def do_rotate(self):
"""usage: rotate <int:angle> <image:pic1>
Rotate image through a given angle
"""
angle = int(self.do_pop())
image = self.do_pop()
self.push(image.rotate(angle))
def do_save(self):
"""usage: save <string:filename> <image:pic1>
Save image with default options.
"""
filename = self.do_pop()
image = self.do_pop()
image.save(filename)
def do_save2(self):
"""usage: save2 <string:filename> <string:options> <image:pic1>
Save image with specified options.
"""
filename = self.do_pop()
options = self.do_pop()
image = self.do_pop()
image.save(filename, None, options)
def do_show(self):
"""usage: show <image:pic1>
Display and pop the top image.
"""
self.do_pop().show()
def do_thumbnail(self):
"""usage: thumbnail <int:xsize> <int:ysize> <image:pic1>
Modify the top image in the stack to contain a thumbnail of itself.
"""
ysize = int(self.do_pop())
xsize = int(self.do_pop())
self.top().thumbnail((xsize, ysize))
def do_transpose(self):
"""usage: transpose <string:operator> <image:pic1>
Transpose the top image.
"""
transpose = self.do_pop().upper()
image = self.do_pop()
self.push(image.transpose(transpose))
# Image attributes
def do_format(self):
"""usage: format <image:pic1>
Push the format of the top image onto the stack.
"""
self.push(self.do_pop().format)
def do_mode(self):
"""usage: mode <image:pic1>
Push the mode of the top image onto the stack.
"""
self.push(self.do_pop().mode)
def do_size(self):
"""usage: size <image:pic1>
Push the image size on the stack as (y, x).
"""
size = self.do_pop().size
self.push(size[0])
self.push(size[1])
# ImageChops operations
def do_invert(self):
"""usage: invert <image:pic1>
Invert the top image.
"""
from PIL import ImageChops
self.push(ImageChops.invert(self.do_pop()))
def do_lighter(self):
"""usage: lighter <image:pic1> <image:pic2>
Pop the two top images, push an image of the lighter pixels of both.
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
self.push(ImageChops.lighter(image1, image2))
def do_darker(self):
"""usage: darker <image:pic1> <image:pic2>
Pop the two top images, push an image of the darker pixels of both.
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
self.push(ImageChops.darker(image1, image2))
def do_difference(self):
"""usage: difference <image:pic1> <image:pic2>
Pop the two top images, push the difference image
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
self.push(ImageChops.difference(image1, image2))
def do_multiply(self):
"""usage: multiply <image:pic1> <image:pic2>
Pop the two top images, push the multiplication image.
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
self.push(ImageChops.multiply(image1, image2))
def do_screen(self):
"""usage: screen <image:pic1> <image:pic2>
Pop the two top images, superimpose their inverted versions.
"""
from PIL import ImageChops
image2 = self.do_pop()
image1 = self.do_pop()
self.push(ImageChops.screen(image1, image2))
def do_add(self):
"""usage: add <image:pic1> <image:pic2> <int:offset> <float:scale>
Pop the two top images, produce the scaled sum with offset.
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
scale = float(self.do_pop())
offset = int(self.do_pop())
self.push(ImageChops.add(image1, image2, scale, offset))
def do_subtract(self):
"""usage: subtract <image:pic1> <image:pic2> <int:offset> <float:scale>
Pop the two top images, produce the scaled difference with offset.
"""
from PIL import ImageChops
image1 = self.do_pop()
image2 = self.do_pop()
scale = float(self.do_pop())
offset = int(self.do_pop())
self.push(ImageChops.subtract(image1, image2, scale, offset))
# ImageEnhance classes
def do_color(self):
"""usage: color <image:pic1>
Enhance color in the top image.
"""
from PIL import ImageEnhance
factor = float(self.do_pop())
image = self.do_pop()
enhancer = ImageEnhance.Color(image)
self.push(enhancer.enhance(factor))
def do_contrast(self):
"""usage: contrast <image:pic1>
Enhance contrast in the top image.
"""
from PIL import ImageEnhance
factor = float(self.do_pop())
image = self.do_pop()
enhancer = ImageEnhance.Contrast(image)
self.push(enhancer.enhance(factor))
def do_brightness(self):
"""usage: brightness <image:pic1>
Enhance brightness in the top image.
"""
from PIL import ImageEnhance
factor = float(self.do_pop())
image = self.do_pop()
enhancer = ImageEnhance.Brightness(image)
self.push(enhancer.enhance(factor))
def do_sharpness(self):
"""usage: sharpness <image:pic1>
Enhance sharpness in the top image.
"""
from PIL import ImageEnhance
factor = float(self.do_pop())
image = self.do_pop()
enhancer = ImageEnhance.Sharpness(image)
self.push(enhancer.enhance(factor))
# The interpreter loop
def execute(self, list):
"Interpret a list of PILDriver commands."
list.reverse()
while len(list) > 0:
self.push(list[0])
list = list[1:]
if self.verbose:
print("Stack: " + repr(self.stack))
top = self.top()
if not isinstance(top, str):
continue
funcname = "do_" + top
if not hasattr(self, funcname):
continue
else:
self.do_pop()
func = getattr(self, funcname)
func()
if __name__ == '__main__':
import sys
# If we see command-line arguments, interpret them as a stack state
# and execute. Otherwise go interactive.
driver = PILDriver()
if len(sys.argv[1:]) > 0:
driver.execute(sys.argv[1:])
else:
print("PILDriver says hello.")
while True:
try:
if sys.version_info[0] >= 3:
line = input('pildriver> ')
else:
line = raw_input('pildriver> ')
except EOFError:
print("\nPILDriver says goodbye.")
break
driver.execute(line.split())
print(driver.stack)
# The following sets edit modes for GNU EMACS
# Local Variables:
# mode:python
# End:
|
[
"zoe14@live.fr"
] |
zoe14@live.fr
|
e13f315fcdd9f188936c67487b7406fc615608c7
|
0add67e1d0c2915caf84c3af5151ca68d9bb5682
|
/API_PROJ/asgi.py
|
6dbdaac532933c67632230a99da41926e98002b0
|
[] |
no_license
|
kamran1231/MOVIE_REST_API
|
8b30d9b4ecacd788982a9da35046f9b159037c3e
|
677418f94225b60cc486e8027dd381e7d5549239
|
refs/heads/master
| 2023-05-31T23:17:25.761222
| 2021-06-29T12:20:05
| 2021-06-29T12:20:05
| 380,045,682
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 393
|
py
|
"""
ASGI config for API_PROJ project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'API_PROJ.settings')
application = get_asgi_application()
|
[
"khanbrother805@gmail.com"
] |
khanbrother805@gmail.com
|
9596ca94cc905548e8c6637fbd52ec54c10719db
|
ce79d8a92ddc88ee17ccbfbab273fdb37600da0e
|
/0140_Word_Break_II.py
|
8afc6e7874b25d36b55e63afeae431ff16b40a41
|
[
"MIT"
] |
permissive
|
coldmanck/leetcode-python
|
5bf5bc489213a5835acc93b047e1b0ff7a1392bc
|
fd4cf122cfd4920f3bd8dce40ba7487a170a1b57
|
refs/heads/master
| 2023-06-09T02:30:49.681803
| 2023-05-27T04:01:57
| 2023-05-27T04:01:57
| 249,945,225
| 6
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 553
|
py
|
class Solution:
def wordBreak(self, s: str, wordDict: List[str]) -> List[str]:
memo = {len(s): ['']}
def sentences(i):
if i in memo:
return memo[i]
memo[i] = []
for j in range(i + 1, len(s) + 1):
if s[i:j] in wordDict:
for tail in sentences(j):
tail_ans = tail if tail == '' else ' ' + tail
memo[i].append(s[i:j] + tail_ans)
return memo[i]
ans = sentences(0)
return ans
|
[
"coldmanck@gmail.com"
] |
coldmanck@gmail.com
|
006a2323f3e2a8bfdf5efcc2e45813474d8a20a5
|
5a7abc4537039860c49e9a80219efa759aad1b6f
|
/tests/providers/aws/services/trustedadvisor/trustedadvisor_errors_and_warnings/trustedadvisor_errors_and_warnings_test.py
|
e484ead322db370fdda3ad0f146d93ad57f31f8b
|
[
"Apache-2.0"
] |
permissive
|
sec-js/prowler
|
d5a06c72f5d7e490bade1167966f83f7a5d7ed15
|
f72be9a1e492ad593c9ac267d3ca07f626263ccd
|
refs/heads/master
| 2023-08-31T22:48:33.983360
| 2022-12-22T16:02:28
| 2022-12-22T16:02:28
| 243,866,744
| 0
| 0
|
Apache-2.0
| 2022-12-23T12:23:20
| 2020-02-28T22:37:02
|
Python
|
UTF-8
|
Python
| false
| false
| 2,973
|
py
|
from re import search
from unittest import mock
from uuid import uuid4
from prowler.providers.aws.services.trustedadvisor.trustedadvisor_service import Check
AWS_REGION = "eu-west-1"
AWS_ACCOUNT_NUMBER = "123456789012"
detector_id = str(uuid4())
class Test_trustedadvisor_errors_and_warnings:
def test_no_detectors(self):
trustedadvisor_client = mock.MagicMock
trustedadvisor_client.checks = []
with mock.patch(
"prowler.providers.aws.services.trustedadvisor.trustedadvisor_service.TrustedAdvisor",
trustedadvisor_client,
):
from prowler.providers.aws.services.trustedadvisor.trustedadvisor_errors_and_warnings.trustedadvisor_errors_and_warnings import (
trustedadvisor_errors_and_warnings,
)
check = trustedadvisor_errors_and_warnings()
result = check.execute()
assert len(result) == 0
def test_trustedadvisor_all_passed_checks(self):
trustedadvisor_client = mock.MagicMock
trustedadvisor_client.checks = []
trustedadvisor_client.checks.append(
Check(
id="check1",
name="check1",
region=AWS_REGION,
status="ok",
)
)
with mock.patch(
"prowler.providers.aws.services.trustedadvisor.trustedadvisor_service.TrustedAdvisor",
trustedadvisor_client,
):
from prowler.providers.aws.services.trustedadvisor.trustedadvisor_errors_and_warnings.trustedadvisor_errors_and_warnings import (
trustedadvisor_errors_and_warnings,
)
check = trustedadvisor_errors_and_warnings()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert search("ok", result[0].status_extended)
assert result[0].resource_id == "check1"
def test_trustedadvisor_error_check(self):
trustedadvisor_client = mock.MagicMock
trustedadvisor_client.checks = []
trustedadvisor_client.checks.append(
Check(
id="check1",
name="check1",
region=AWS_REGION,
status="error",
)
)
with mock.patch(
"prowler.providers.aws.services.trustedadvisor.trustedadvisor_service.TrustedAdvisor",
trustedadvisor_client,
):
from prowler.providers.aws.services.trustedadvisor.trustedadvisor_errors_and_warnings.trustedadvisor_errors_and_warnings import (
trustedadvisor_errors_and_warnings,
)
check = trustedadvisor_errors_and_warnings()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search("error", result[0].status_extended)
assert result[0].resource_id == "check1"
|
[
"noreply@github.com"
] |
sec-js.noreply@github.com
|
6bc17ae0f3d584cafcfcbc9be7eb61a854ffef96
|
d047fed56a7d1de1d7c32ce83b8d62646fa7d19e
|
/average_kitne_hai.py
|
65f5b01aaddc83e61a33bb02bfb1e308241e38a0
|
[] |
no_license
|
shantinavgurukul/listQuestions
|
508b6bd489731d5b8a9ba1a27e5b88b1bb27341a
|
21f413f65b374e5fa63e0366591895757146d7c7
|
refs/heads/master
| 2022-11-23T07:56:15.392836
| 2020-08-02T03:51:46
| 2020-08-02T03:51:46
| 284,384,225
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,415
|
py
|
# elements = [23, 14, 56, 12, 19, 9, 15, 25, 31, 42, 43]
# index=0
# # average=0
# list1=[]
# list2=[]
# while(index<len(elements)):
# if(elements[index]%2!=0):
# list1.append(elements[index])
# # list1=list1+1
# average=elements[index]//7
# else:
# list2.append(elements[index])
# # list2=list2+1
# average=elements[index]//4
# index=index+1
# print("even number is:",list1)
# print(average)
# print("odd number is:",list2)
# print(average)
elements = [23, 14, 56, 12, 19, 9, 15, 25, 31, 42, 43]
# even=0
# odd=0
# esum1=0
# osum2=0
# index=0
# while index<len(elements):
# if elements[index]%2==0:
# even=even+1
# esum1=esum1+elements[index]
# else:
# odd=odd+1
# osum2=osum2+elements[index]
# index=index+1
# avarge1=esum1/even
# avarge2=osum2/odd
# print(esum1)
# print(osum2)
# print(even)
# print(odd)
# print(avarge1)
# print(avarge2)
index=0
evensum=0
oddsum=0
evencount=0
oddcount=0
while(index<len(elements)):
if(elements[index]%2==0):
evensum=evensum+elements[index]
evencount=evencount+1
else:
oddsum=oddsum+elements[index]
oddcount=oddcount+1
index=index+1
evenAverage=evensum//evencount
oddAverage=oddsum//oddcount
print(evensum)
print(oddsum)
print(evenAverage)
print(oddAverage)
|
[
"you@example.com"
] |
you@example.com
|
e574432f721c510f0ea06c7b99ee335b99f78d75
|
091e97bcfe5acc0635bd601aa8497e377b74d41a
|
/openshift/installer/vendored/openshift-ansible-3.11.28-1/roles/lib_utils/lookup_plugins/openshift_master_facts_default_predicates.py
|
ddecf9cc6017fc3b0177a74857c0523c116be0e6
|
[
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] |
permissive
|
openshift/openshift-tools
|
d59b63778f25cb8fb3c7a0253afe22a173e72f9d
|
e342f6659a4ef1a188ff403e2fc6b06ac6d119c7
|
refs/heads/prod
| 2023-08-30T01:52:04.108978
| 2022-03-23T21:07:28
| 2022-03-23T21:07:28
| 36,827,699
| 170
| 254
|
Apache-2.0
| 2022-06-16T12:11:51
| 2015-06-03T20:09:22
|
Python
|
UTF-8
|
Python
| false
| false
| 3,662
|
py
|
# pylint: disable=missing-docstring
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
# pylint: disable=too-many-branches,too-many-statements,too-many-arguments
def run(self, terms, variables=None, regions_enabled=True, short_version=None,
**kwargs):
predicates = []
if short_version is None:
if 'openshift_release' in variables:
release = variables['openshift_release']
if release.startswith('v'):
short_version = release[1:]
else:
short_version = release
short_version = '.'.join(short_version.split('.')[0:2])
elif 'openshift_version' in variables:
version = variables['openshift_version']
short_version = '.'.join(version.split('.')[0:2])
else:
# pylint: disable=line-too-long
raise AnsibleError("Either OpenShift needs to be installed or openshift_release needs to be specified")
if short_version not in ['3.6', '3.7', '3.8', '3.9', '3.10', '3.11', 'latest']:
raise AnsibleError("Unknown short_version %s" % short_version)
if short_version == 'latest':
short_version = '3.11'
# Predicates ordered according to OpenShift Origin source:
# origin/vendor/k8s.io/kubernetes/plugin/pkg/scheduler/algorithmprovider/defaults/defaults.go
if short_version in ['3.6']:
predicates.extend([
{'name': 'NoVolumeZoneConflict'},
{'name': 'MaxEBSVolumeCount'},
{'name': 'MaxGCEPDVolumeCount'},
{'name': 'MatchInterPodAffinity'},
{'name': 'NoDiskConflict'},
{'name': 'GeneralPredicates'},
{'name': 'PodToleratesNodeTaints'},
{'name': 'CheckNodeMemoryPressure'},
{'name': 'CheckNodeDiskPressure'},
])
if short_version in ['3.7', '3.8']:
predicates.extend([
{'name': 'NoVolumeZoneConflict'},
{'name': 'MaxEBSVolumeCount'},
{'name': 'MaxGCEPDVolumeCount'},
{'name': 'MaxAzureDiskVolumeCount'},
{'name': 'MatchInterPodAffinity'},
{'name': 'NoDiskConflict'},
{'name': 'GeneralPredicates'},
{'name': 'PodToleratesNodeTaints'},
{'name': 'CheckNodeMemoryPressure'},
{'name': 'CheckNodeDiskPressure'},
{'name': 'NoVolumeNodeConflict'},
])
if short_version in ['3.9', '3.10', '3.11']:
predicates.extend([
{'name': 'NoVolumeZoneConflict'},
{'name': 'MaxEBSVolumeCount'},
{'name': 'MaxGCEPDVolumeCount'},
{'name': 'MaxAzureDiskVolumeCount'},
{'name': 'MatchInterPodAffinity'},
{'name': 'NoDiskConflict'},
{'name': 'GeneralPredicates'},
{'name': 'PodToleratesNodeTaints'},
{'name': 'CheckNodeMemoryPressure'},
{'name': 'CheckNodeDiskPressure'},
{'name': 'CheckVolumeBinding'},
])
if regions_enabled:
region_predicate = {
'name': 'Region',
'argument': {
'serviceAffinity': {
'labels': ['region']
}
}
}
predicates.append(region_predicate)
return predicates
|
[
"mwoodson@redhat.com"
] |
mwoodson@redhat.com
|
18e964e43f650f2ccc67efec08a241936fe630f4
|
f607ef7417b73fdc5452ae6c9ea8172094476bdf
|
/hackerrank/nov_3.py
|
bd3effb2620982d72a38e061070ac24e83b6bb3b
|
[] |
no_license
|
pratyakshs/Algorithms
|
9238f58c02cafd81d6d201e64750f4eb92c2ccf6
|
ed4e857dd0f8a48c8ae56111c20d2396d879409c
|
refs/heads/master
| 2016-09-10T01:21:05.258429
| 2014-01-07T09:44:15
| 2014-01-07T09:44:15
| 15,940,840
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 94
|
py
|
T=input()
for t in range(T):
A,B = map(int,raw_input().split())
print pow(A,B,10**9+7)
|
[
"rohith.uppala369@gmail.com"
] |
rohith.uppala369@gmail.com
|
b1a20c5c2747e5c43e1e7d1711018e92b8fdf035
|
b4166044870d1c026e86c95ac41e3e3613ee424f
|
/python_basic/abc086_a.py
|
9f109acd38679ee2f461c2e5a42f88e4fcfe00b8
|
[] |
no_license
|
nsakki55/AtCoder
|
2cbb785415a7c0b9df9953ddc3706c90a5716a03
|
03c428e8eb8f24b8560d00e2388ba75509619690
|
refs/heads/master
| 2020-05-31T04:33:06.400697
| 2020-01-19T13:41:41
| 2020-01-19T13:41:41
| 190,099,669
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 67
|
py
|
a,b=map(int,input().split())
print('Odd' if a*b%2 !=0 else 'Even')
|
[
"n.sakki55@gmail.com"
] |
n.sakki55@gmail.com
|
d48d8dcf7839f5f1eb56a0f65558ff6462b25843
|
e18a8c8ed113d51d99ae942204016a883925163d
|
/25stringchains/trial1.py
|
2f432bcc4243c495265c72aa4e256c91f3b7b115
|
[] |
no_license
|
saurabh11baghel/dsalgo
|
e385fc7739c59e8a78a4f7e30d3cedfdfa4eac06
|
88a108bc3ce7ec95ffad8d95079260a2fc5f6e12
|
refs/heads/master
| 2021-04-30T08:36:56.312035
| 2017-02-20T22:32:40
| 2017-02-20T22:32:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,184
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Jan 29 19:04:08 2017
@author: abgoswam
"""
#_words_cnt = int(raw_input().strip())
#_words_i=0
#_words = []
#
#while _words_i < _words_cnt:
# _words_item = raw_input().strip()
# _words.append(_words_item)
# _words_i += 1
#
#print(_words)
#def longestChain(words):
#words = ['a', 'b', 'ba', 'bca', 'bdca', 'bda']
words = ['ab', 'abc']
#if words is None or len(words) <= 0:
# return 0
words_sorted = sorted(words, key=lambda x: len(x))
chain = {}
for s in words_sorted:
print("word 's' : {0}".format(s))
if len(s) == 1:
chain[s] = 1
else:
# iterate over the characters in s
_m = 0
for i in range(len(s)):
s_prime = (s[:i] + s[i+1:])
print("word 's_prime' : {0}".format(s_prime))
if s_prime in chain:
_m = max(_m, chain[s_prime])
if _m > 0:
_m += 1
chain[s] = _m
argmax_s = max(chain, key=lambda i:chain[i])
#return chain[argmax_s]
#words = ['a', 'b', 'ba', 'bca', 'bdca', 'bda']
##words = ['ab', 'ba']
#print(longestChain(words))
|
[
"abgoswam@gmail.com"
] |
abgoswam@gmail.com
|
ea66726c00ea40930cc3627462df3a3a9bac9593
|
de470f0bad289ab9e8633a4527a2bf4c14a6b2d9
|
/manage-cli/get_sql.py
|
db028fd8f72b46fd3783c4f3d499549155512d80
|
[] |
no_license
|
DingGuodong/kissops
|
f1a22e557ae6b1ea4366d88ffceb157fb0bb5450
|
06ca11d2082d4d7ae88681fc0456a7502d134b27
|
refs/heads/master
| 2021-01-11T11:02:33.583667
| 2018-04-17T08:20:06
| 2018-04-17T08:20:06
| 72,810,369
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 625
|
py
|
#!/usr/bin/python
# encoding: utf-8
# -*- coding: utf8 -*-
"""
Created by PyCharm.
File: LinuxBashShellScriptForOps:get_sql.py
User: Guodong
Create Date: 2017/8/23
Create Time: 16:01
Description:
References: http://luozhaoyu.iteye.com/blog/1510635
"""
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
os.chdir(BASE_DIR)
try:
app_name = raw_input("What app name would you like get SQL?\n")
os.system("python manage.py dbshell {name}".format(name=app_name))
except OSError as e:
print e.message
sys.exit(1)
|
[
"uberurey_ups@163.com"
] |
uberurey_ups@163.com
|
3d99cb9d17223816aa55786f68bb96c5ee9f812a
|
8c917dc4810e2dddf7d3902146280a67412c65ea
|
/v_7/Dongola/common/account_asset_custom/wizard/account_data_migration.py
|
d16a9ffe00724622ae063143e33836fc790a1d27
|
[] |
no_license
|
musabahmed/baba
|
d0906e03c1bbd222d3950f521533f3874434b993
|
0b997095c260d58b026440967fea3a202bef7efb
|
refs/heads/master
| 2021-10-09T02:37:32.458269
| 2018-12-20T06:00:00
| 2018-12-20T06:00:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,631
|
py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv, orm
from tools.translate import _
class account_post_move(osv.osv_memory):
"""
Account move line reconcile wizard, it checks for the write off the reconcile entry or directly reconcile.
"""
_name = 'account.post.move'
_columns = {
'move_date': fields.date('Move date', required=True),
'journal_id': fields.many2one('account.journal', 'Journal', required=True),
'init_account':fields.many2one('account.account', 'Initial Account'),
'reval_account':fields.many2one('account.account', 'Revalue Account'),
}
def trans_rec_reconcile_full(self, cr, uid, ids, context=None):
"""Method to post data migration to the asset by:
creating new asset,
create new opertion of initial then calling post method
create new opertion of revalue then calling post method.
@return: True
"""
if context is None:
context = {}
context.update({'group':True})
account_data = self.pool.get('account.data.move')
location_obj=self.pool.get('account.asset.location')
category_obj=self.pool.get('account.asset.category')
asset_obj=self.pool.get('account.asset.asset')
history_obj=self.pool.get('account.asset.history')
depreciation_line_obj=self.pool.get('account.asset.depreciation.line')
period_obj = self.pool.get('account.period')
wiz_obj = self.browse(cr, uid, ids, context)[0]
asset_ids = []
dprc_line_ids=[]
context.update({'company_id':wiz_obj.journal_id.company_id.id})
pids = period_obj.find(cr, uid, wiz_obj.move_date, context=context)
if not pids:
raise osv.except_osv(_('Warning !'), _('Check the date'))
if 'active_ids' in context and context['active_ids']:
data = account_data.browse(cr, uid, context['active_ids'], context)
for rec in data:
cat_id = category_obj.search(cr, uid, [('code', '=', rec.categ_code), ('company_id','=',wiz_obj.journal_id.company_id.id) ],context=context)
loc_id = location_obj.search(cr, uid, [('code', '=', rec.location_code) ],context=context)
if not loc_id:
account_data.write(cr, uid, rec.id, {'status': 'No location'}, context=context)
continue
if location_obj.browse(cr, uid, loc_id, context)[0].company_id.id != wiz_obj.journal_id.company_id.id:
account_data.write(cr, uid, rec.id, {'status': 'Different company and asset location Journals'}, context=context)
continue
if not cat_id:
account_data.write(cr, uid, rec.id, {'status': 'No category'}, context=context)
continue
if rec.book_value < 0 :
account_data.write(cr, uid, rec.id, {'status': 'Book value less than zero'}, context=context)
continue
asset_id = asset_obj.create(cr,uid,{
'name':rec.description,
'category_id':cat_id[0],
'date_start': rec.comm_date ,
'period_id':pids,
'quantity':rec.quantity,
'location':loc_id},context)
asset_ids.append(int(asset_id))
history_id=history_obj.create(cr,uid,{
'type':'initial',
'name':rec.description,
'quantity':rec.quantity,
'amount': rec.book_value ,
'account_id':wiz_obj.init_account.id,
'user_id':uid,
'date': wiz_obj.move_date,
'period_id': pids[0],
'asset_id':asset_id,
},context)
history_obj.create_operation_move(cr,uid,[history_id],context)
if rec.revalue_amount > 0:
history_id=history_obj.create(cr,uid,{
'type':'reval',
'name':rec.description,
'quantity':rec.quantity,
'amount': rec.revalue_amount ,
'account_id':wiz_obj.reval_account.id,
'user_id':uid,
'date': wiz_obj.move_date,
'period_id': pids[0],
'asset_id':asset_id,
},context)
history_obj.create_operation_move(cr,uid,[history_id],context)
asset_obj.validate(cr,uid,[asset_id],context)
if rec.total_depreciation > 0:
dprc_line_id=depreciation_line_obj.create(cr, uid,{'amount':rec.total_depreciation,
'name':rec.description,
'asset_id':asset_id,
'sequence':asset_id,
'depreciated_value':0.0,
'depreciation_date':wiz_obj.move_date,
'remaining_value':rec.book_value-rec.total_depreciation,
},context)
dprc_line_ids.append(dprc_line_id)
if asset_ids:
depreciation_line_obj.create_move( cr, uid, dprc_line_ids, context={})
asset_obj.compute_depreciation_board(cr,uid,asset_ids,context)
cr.execute('delete FROM account_data_move WHERE id = %s ', (rec.id,))
return True
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
[
"bakry@exp-sa.com"
] |
bakry@exp-sa.com
|
ed96c6be42b15bb8c1698da40cf12109cdc094d7
|
6440a113597191d3b78aa6b5cae6cea9fb057e2e
|
/Daily-Grind/73.py
|
5c3188a2d7ef2ce664500cd3404c4e1039778d18
|
[] |
no_license
|
DarshanGowda0/LC-Grind
|
40c162d8894df81ea7124f66daf20f86f327b6cb
|
f7b9a86797d52ab1057f0300352c0c5670a59bd5
|
refs/heads/master
| 2023-01-28T01:27:45.195164
| 2020-12-06T03:58:14
| 2020-12-06T03:58:14
| 277,024,335
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 792
|
py
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
from collections import deque
class Solution:
def levelOrder(self, root: TreeNode) -> List[List[int]]:
if not root:
return []
que = deque([root])
ans = []
while que:
res = []
for _ in range(len(que)):
node = que.popleft()
res.append(node.val)
if node.left:
que.append(node.left)
if node.right:
que.append(node.right)
ans.append(res)
return ans
|
[
"darshan.gowda008@gmail.com"
] |
darshan.gowda008@gmail.com
|
da1ef01eb25bcea1e1925a60a6dd48a073d52c65
|
a1232023595eed48bf3d56c0c1dcb8f05cdc261a
|
/204. Count Primes/204.py
|
3ebfd940c7ef3e38ddb147a2bdf0c8607730b934
|
[] |
no_license
|
NKcell/leetcode
|
2393ec3f8dc0e26b9ff098a592e4ffa9d7b774b8
|
88dec1c2106950e82819a0dd16425a9ee8fdaca4
|
refs/heads/master
| 2020-07-24T05:27:42.269903
| 2020-05-06T07:45:07
| 2020-05-06T07:45:07
| 207,814,442
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,112
|
py
|
"""
超时
def countPrimes(n):
if n < 3:
return 0
if n == 3:
return 1
primes = [2]
re = 1
for i in range(2,n):
flag = 0
for j in primes:
if i%j == 0:
flag = 1
break
if flag == 0:
re += 1
primes.append(i)
return re
"""
def countPrimes(n):
primes = [True]*n
if n < 3:
return 0
primes[0] = False
primes[1] = False
for i in range(2,int(n**0.5) + 1):
if primes[i]:
for j in range(i*i,n,i):
primes[j] = False
return sum(primes)
print(countPrimes(3))
"""
class Solution:
# @param {integer} n
# @return {integer}
def countPrimes(self, n):
if n < 3:
return 0
primes = [True] * n
primes[0] = primes[1] = False
for i in range(2, int(n ** 0.5) + 1):
if primes[i]:
primes[i * i: n: i] = [False] * len(primes[i * i: n: i])
return sum(primes)
"""
|
[
"517343690@qq.com"
] |
517343690@qq.com
|
996034a8186f4a0272c75010ab385800ae034cad
|
9e2d79a2cf1dbeaffe8ef897bb53f94af8b5b68c
|
/ichnaea/api/locate/tests/test_constants.py
|
476eaa06eae7fec95ad3c105304313df9d89e7f8
|
[
"Apache-2.0"
] |
permissive
|
amolk4games/ichnaea
|
a7d1cbd12b6aa5c0d877fca380080b08fcff24b8
|
907c542da05b428c8e994bce1537390e22b3ca58
|
refs/heads/master
| 2021-01-19T07:21:54.851167
| 2016-04-08T15:20:37
| 2016-04-08T15:21:52
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,359
|
py
|
from ichnaea.api.locate.constants import DataAccuracy
from ichnaea.tests.base import TestCase
class DataAccuracyTest(TestCase):
def test_compare(self):
self.assertTrue(DataAccuracy.high < DataAccuracy.medium)
self.assertTrue(DataAccuracy.high < DataAccuracy.low)
self.assertTrue(DataAccuracy.medium < DataAccuracy.low)
self.assertTrue(DataAccuracy.medium != DataAccuracy.high)
self.assertTrue(DataAccuracy.low == DataAccuracy.low)
self.assertTrue(DataAccuracy.low < DataAccuracy.none)
self.assertFalse(DataAccuracy.none == 'ab')
def test_compare_number(self):
self.assertTrue(DataAccuracy.none == float('inf'))
self.assertTrue(DataAccuracy.low > 50000)
self.assertTrue(DataAccuracy.low > 50000.0)
self.assertTrue(DataAccuracy.medium == 50000)
self.assertTrue(DataAccuracy.medium >= 50000.0)
self.assertTrue(DataAccuracy.medium <= 50000)
self.assertFalse(DataAccuracy.medium != 50000.0)
self.assertTrue(500.0 <= DataAccuracy.high)
self.assertFalse(1000.1 <= DataAccuracy.high)
def test_uncomparable(self):
with self.assertRaises(TypeError):
DataAccuracy.low < object()
with self.assertRaises(TypeError):
DataAccuracy.low >= 'ab'
with self.assertRaises(TypeError):
DataAccuracy.low > DataAccuracy
def test_from_number(self):
self.assertEqual(DataAccuracy.from_number(1), DataAccuracy.high)
self.assertEqual(DataAccuracy.from_number(-0.1), DataAccuracy.high)
self.assertEqual(DataAccuracy.from_number(1000), DataAccuracy.high)
self.assertEqual(DataAccuracy.from_number(1000.1), DataAccuracy.medium)
self.assertEqual(DataAccuracy.from_number(10 ** 5), DataAccuracy.low)
self.assertEqual(DataAccuracy.from_number(10 ** 9), DataAccuracy.none)
with self.assertRaises(TypeError):
DataAccuracy.from_number(None)
with self.assertRaises(ValueError):
DataAccuracy.from_number('ab')
def test_hash(self):
accuracies = {
DataAccuracy.none: 0,
DataAccuracy.low: 1,
DataAccuracy.medium: 2,
DataAccuracy.high: 3,
}
self.assertEqual(set(accuracies.values()),
set([0, 1, 2, 3]))
|
[
"hanno@hannosch.eu"
] |
hanno@hannosch.eu
|
095ee990026e24affd14798a17fabcd80698962c
|
2560feda0e6875e797571e3992192c1ad9b223ef
|
/A_CNN_sub_K-32-32-64-128_KS-37-37-37-37_MP-12-22-22-32_DO-2-2-2-2-2_AD.py
|
4c6f65a575028004a22f425a7df2a827ad59a62d
|
[] |
no_license
|
kikivanderheijden/CNN_SoundLoc_NC
|
7060945e62973d21e9389659cb38b76e7fe76f7c
|
c158481524cef6af92487a16a711b00255732762
|
refs/heads/master
| 2023-02-07T16:20:39.412567
| 2020-12-29T14:50:18
| 2020-12-29T14:50:18
| 319,252,239
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,989
|
py
|
#------------------------------------------------------------------------------
# Specifications
#------------------------------------------------------------------------------
# specify directories
dir_wrfiles = "/workspace/notebooks/models" # for testing on DSRI
#dir_wrfiles = r"C:\Users\kiki.vanderheijden\Documents\PostDoc_Auditory\DeepLearning" # for testing locally
# import libraries
from tensorflow.keras import layers
from tensorflow.keras import models # contains different types of models (use sequential model here?)
from tensorflow.keras import optimizers # contains different types of back propagation algorithms to train the model,
# including sgd (stochastic gradient
#from CustLoss_MSE import cust_mean_squared_error # note that in this loss function, the axis of the MSE is set to 1
from CustLoss_cosine_distance_angular import cos_dist_2D_angular # note that in this loss function, the axis of the MSE is set to 1
from CustMet_cosine_distance_angular import cos_distmet_2D_angular
# specify parameters
modelname = 'CNN_sub_K-32-32-64-128_KS-37-37-37-37_MP-12-22-22-32_DO-2-2-2-2-2_AD'
time_sound = 750 # input dimension 1 (time)
nfreqs = 99 # input dimension 2 (frequencies)
#------------------------------------------------------------------------------
# Define model architecture
#------------------------------------------------------------------------------
# CNN 1 - left channel
in1 = layers.Input(shape=(time_sound,nfreqs,1)) # define input (rows, columns, channels (only one in my case))
model_l_conv1 = layers.Conv2D(32,(3,7),activation='relu', padding = 'same')(in1) # define first layer and input to the layer
model_l_conv1_mp = layers.MaxPooling2D(pool_size = (1,2))(model_l_conv1)
model_l_conv1_mp_do = layers.Dropout(0.2)(model_l_conv1_mp)
# CNN 1 - right channel
in2 = layers.Input(shape=(time_sound,nfreqs,1)) # define input
model_r_conv1 = layers.Conv2D(32,(3,7),activation='relu', padding = 'same')(in2) # define first layer and input to the layer
model_r_conv1_mp = layers.MaxPooling2D(pool_size = (1,2))(model_r_conv1)
model_r_conv1_mp_do = layers.Dropout(0.2)(model_r_conv1_mp)
# CNN 2 - merged
model_final_merge = layers.Subtract()([model_l_conv1_mp_do, model_r_conv1_mp_do])
model_final_conv1 = layers.Conv2D(32,(3,7),activation='relu', padding = 'same')(model_final_merge)
model_final_conv1_mp = layers.MaxPooling2D(pool_size = (2,2))(model_final_conv1)
model_final_conv1_mp_do = layers.Dropout(0.2)(model_final_conv1_mp)
# CNN 3 - merged
model_final_conv2 = layers.Conv2D(64,(3,7), activation = 'relu', padding = 'same')(model_final_conv1_mp_do)
model_final_conv2_mp = layers.MaxPooling2D(pool_size = (2,2))(model_final_conv2)
model_final_conv2_mp_do = layers.Dropout(0.2)(model_final_conv2_mp)
# CNN 4 - merged
model_final_conv3 = layers.Conv2D(128,(3,7), activation = 'relu', padding = 'same')(model_final_conv2_mp_do)
model_final_conv3_mp = layers.MaxPooling2D(pool_size = (3,2))(model_final_conv3)
model_final_conv3_mp_do = layers.Dropout(0.2)(model_final_conv3_mp)
# flatten
model_final_flatten = layers.Flatten()(model_final_conv3_mp_do)
model_final_dropout = layers.Dropout(0.2)(model_final_flatten) # dropout for regularization
predicted_coords = layers.Dense(2, activation = 'tanh')(model_final_dropout) # I have used the tanh activation because our outputs should be between -1 and 1
#------------------------------------------------------------------------------
# Create model
#------------------------------------------------------------------------------
# create
model = models.Model(inputs = [in1,in2], outputs = predicted_coords) # create
# compile
model.compile(loss = cos_dist_2D_angular, optimizer = optimizers.Adam(), metrics=['cosine_proximity','mse', cos_distmet_2D_angular])
# print summary
model.summary()
# save
model.save(dir_wrfiles+'/A_'+modelname+'.h5') # save model
|
[
"Kiki.vanderHeijden@unimaas.nl"
] |
Kiki.vanderHeijden@unimaas.nl
|
74105e5ec2623d439250e684bf167a4b94637c36
|
e9ff112a590a2707e66c518328ba71a4d964846a
|
/train_scripts/train_img.py
|
36a145fe2b40065fa0ddee19383018c080fd1909
|
[
"MIT"
] |
permissive
|
n644t031/fastMRI-kspace
|
60a6ca9679ede25f0db89f174647a8451a578331
|
4c484b3183e9f06838b5ee108af283611c2e1e77
|
refs/heads/master
| 2022-08-30T17:19:23.105996
| 2020-05-24T13:55:40
| 2020-05-24T13:55:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,028
|
py
|
import torch
from torch import nn, optim
from pathlib import Path
from utils.run_utils import initialize, save_dict_as_json, get_logger, create_arg_parser
from utils.train_utils import create_custom_data_loaders
from train.subsample import MaskFunc
from data.input_transforms import Prefetch2Device, TrainPreProcessK
from data.output_transforms import OutputReplaceTransformK
from models.ks_unet import UnetKS
from train.model_trainers.model_trainer_IMG import ModelTrainerIMG
from metrics.custom_losses import CSSIM
def train_img(args):
# Maybe move this to args later.
train_method = 'IMG'
# Creating checkpoint and logging directories, as well as the run name.
ckpt_path = Path(args.ckpt_root)
ckpt_path.mkdir(exist_ok=True)
ckpt_path = ckpt_path / train_method
ckpt_path.mkdir(exist_ok=True)
run_number, run_name = initialize(ckpt_path)
ckpt_path = ckpt_path / run_name
ckpt_path.mkdir(exist_ok=True)
log_path = Path(args.log_root)
log_path.mkdir(exist_ok=True)
log_path = log_path / train_method
log_path.mkdir(exist_ok=True)
log_path = log_path / run_name
log_path.mkdir(exist_ok=True)
logger = get_logger(name=__name__, save_file=log_path / run_name)
# Assignment inside running code appears to work.
if (args.gpu is not None) and torch.cuda.is_available():
device = torch.device(f'cuda:{args.gpu}')
logger.info(f'Using GPU {args.gpu} for {run_name}')
else:
device = torch.device('cpu')
logger.info(f'Using CPU for {run_name}')
# Saving peripheral variables and objects in args to reduce clutter and make the structure flexible.
args.run_number = run_number
args.run_name = run_name
args.ckpt_path = ckpt_path
args.log_path = log_path
args.device = device
save_dict_as_json(vars(args), log_dir=log_path, save_name=run_name)
# Input transforms. These are on a per-slice basis.
# UNET architecture requires that all inputs be dividable by some power of 2.
divisor = 2 ** args.num_pool_layers
mask_func = MaskFunc(args.center_fractions, args.accelerations)
data_prefetch = Prefetch2Device(device)
input_train_transform = TrainPreProcessK(mask_func, args.challenge, args.device, use_seed=False, divisor=divisor)
input_val_transform = TrainPreProcessK(mask_func, args.challenge, args.device, use_seed=True, divisor=divisor)
# train_transform = InputTransformK(mask_func, args.challenge, args.device, use_seed=False, divisor=divisor)
# val_transform = InputTransformK(mask_func, args.challenge, args.device, use_seed=True, divisor=divisor)
# DataLoaders
train_loader, val_loader = create_custom_data_loaders(args, transform=data_prefetch)
losses = dict(
cmg_loss=nn.MSELoss(reduction='mean'),
img_loss=CSSIM(filter_size=7)
)
output_transform = OutputReplaceTransformK()
data_chans = 2 if args.challenge == 'singlecoil' else 30 # Multicoil has 15 coils with 2 for real/imag
model = UnetKS(in_chans=data_chans, out_chans=data_chans, ext_chans=args.chans, chans=args.chans,
num_pool_layers=args.num_pool_layers, min_ext_size=args.min_ext_size, max_ext_size=args.max_ext_size,
use_ext_bias=args.use_ext_bias).to(device)
optimizer = optim.Adam(model.parameters(), lr=args.init_lr)
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=args.lr_red_epoch, gamma=args.lr_red_rate)
trainer = ModelTrainerIMG(args, model, optimizer, train_loader, val_loader,
input_train_transform, input_val_transform, output_transform, losses, scheduler)
# TODO: Implement logging of model, losses, transforms, etc.
trainer.train_model()
if __name__ == '__main__':
settings = dict(
# Variables that almost never change.
challenge='multicoil',
data_root='/media/veritas/D/FastMRI',
log_root='./logs',
ckpt_root='./checkpoints',
batch_size=1, # This MUST be 1 for now.
chans=32,
num_pool_layers=4,
save_best_only=True,
center_fractions=[0.08, 0.04],
accelerations=[4, 8],
smoothing_factor=8,
# Variables that occasionally change.
max_images=8, # Maximum number of images to save.
num_workers=1,
init_lr=1E-4,
gpu=1, # Set to None for CPU mode.
max_to_keep=1,
img_lambda=100,
start_slice=10,
min_ext_size=3, # 1x1 extractor is included by default.
max_ext_size=15, # This trial is running with max 15 extractors!!!
# Variables that change frequently.
sample_rate=0.02,
num_epochs=50,
verbose=False,
use_slice_metrics=True, # Using slice metrics causes a 30% increase in training time.
lr_red_epoch=40,
lr_red_rate=0.1,
use_ext_bias=True,
# prev_model_ckpt='',
)
options = create_arg_parser(**settings).parse_args()
train_img(options)
|
[
"veritas9872@gmail.com"
] |
veritas9872@gmail.com
|
6e927067f2f9b821d839a61e0c4bc806d450dca4
|
df7b40e95718ac0f6071a0ba571b42efc81cf6de
|
/configs/cd_stb/liky_base_config.py
|
b09f7d24727fc477b50af4bc28b1118a4838038e
|
[
"Apache-2.0"
] |
permissive
|
shinianzhihou/ChangeDetection
|
87fa2c498248e6124aeefb8f0ee8154bda36deee
|
354e71234bef38b6e142b6ba02f23db958582844
|
refs/heads/master
| 2023-01-23T20:42:31.017006
| 2023-01-09T11:37:24
| 2023-01-09T11:37:24
| 218,001,748
| 162
| 29
|
Apache-2.0
| 2022-11-03T04:11:00
| 2019-10-28T08:41:54
|
Python
|
UTF-8
|
Python
| false
| false
| 3,758
|
py
|
_base_ = [
# '../_base_/models/cd_vit.py',
# '../_base_/datasets/two_input.py',
# '../_base_/default_runtime.py',
# '../_base_/schedules/schedule_40k.py'
]
# model settings
norm_cfg = dict(type='BN', requires_grad=True) # TO: BN
model = dict(
type='EncoderDecoder',
pretrained=None,
backbone=dict(
type='SiameseEfficientNet',
name='efficientnet_b1',
fusion='diff',
# pretrained=True,
checkpoint_path='../weights/efficientnet_b1-533bc792.pth',
),
decode_head=dict(
type='UPerHead',
in_channels=[24, 40, 112, 320],
in_index=[1, 2, 3, 4],
pool_scales=(1, 2, 3, 6),
channels=512,
dropout_ratio=0.1,
num_classes=2,
norm_cfg=norm_cfg,
align_corners=False,
loss_decode=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0, class_weight=[0.8, 1.2])),
auxiliary_head=dict(
type='FCNHead',
in_channels=112,
in_index=3,
channels=256,
num_convs=1,
concat_input=False,
dropout_ratio=0.1,
num_classes=2,
norm_cfg=norm_cfg,
align_corners=False,
loss_decode=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.4)),
# model training and testing settings
train_cfg=dict(),
test_cfg=dict(mode='whole'))
# dataset settings
dataset_type = 'TwoInputDataset'
img_norm_cfg = dict(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225))
train_pipeline = [
dict(type='RandomResizedCrop', height=512, width=512, p=0.8),
dict(type='RandomRotate90',p=0.5),
dict(type='RandomBrightnessContrast',p=0.2),
dict(type='GaussianBlur',p=0.3),
dict(type='HorizontalFlip', p=0.5),
dict(type='VerticalFlip', p=0.5),
dict(type='Normalize',**img_norm_cfg),
dict(type='ToTensorV2'),
]
test_pipeline = [
# dict(type='CenterCrop', height=256, width=256, p=1.0),
dict(type='Normalize',**img_norm_cfg),
dict(type='ToTensorV2'),
]
data_root = '/cache'
train_file = './work_dirs/cd_stb/meta_files/train.v1.txt'
val_file = './work_dirs/cd_stb/meta_files/val.v1.txt'
test_file = './work_dirs/cd_stb/meta_files/test.txt'
data = dict(
samples_per_gpu=12,
workers_per_gpu=4,
train=dict(
type=dataset_type,
meta_file=train_file,
data_root=data_root,
sep='\t',
imdecode_backend='pillow',
c255_t1_in_mask=False,
pipeline=train_pipeline),
val=dict(
type=dataset_type,
meta_file=val_file,
data_root=data_root,
sep='\t',
imdecode_backend='pillow',
c255_t1_in_mask=False,
pipeline=test_pipeline),
test=dict(
type=dataset_type,
meta_file=val_file,
data_root=data_root,
sep='\t',
imdecode_backend='pillow',
c255_t1_in_mask=False,
pipeline=test_pipeline))
# optimizer
optimizer = dict(
paramwise_cfg = dict(
custom_keys={
'head': dict(lr_mult=4.)}),
type='SGD',
lr=1e-3,
momentum=0.9,
weight_decay=0.0005)
optimizer_config = dict()
# learning policy
lr_config = dict(policy='poly', power=0.9, min_lr=1e-6, by_epoch=False)
# runtime settings
runner = dict(type='IterBasedRunner', max_iters=6000)
checkpoint_config = dict(by_epoch=False, interval=500)
evaluation = dict(interval=500000, metric='mIoU')
# runtime
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook', by_epoch=False),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
dist_params = dict(backend='nccl')
log_level = 'INFO'
load_from = None
resume_from = None
workflow = [('train', 1)]
cudnn_benchmark = True
|
[
"1178396201@qq.com"
] |
1178396201@qq.com
|
44f63e331e30929d37a460bce7411a4a6ae31475
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2308/60668/302743.py
|
9d04c19613bf9cf13a8648209475e330103ce9a5
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011
| 2020-07-28T16:21:24
| 2020-07-28T16:21:24
| 259,576,640
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 211
|
py
|
def trees_5_after(s):
if s=="6 3 9":
print(0)
elif s=="7 4 9":
print(10)
else:
print(s)
if __name__=='__main__':
m,r = input().split()
s = input()
trees_5_after(s)
|
[
"1069583789@qq.com"
] |
1069583789@qq.com
|
280fe52635c663628f4545cf099b820d4a2990b4
|
18dca9a552f5aa9303536613ec39f19cebf6647c
|
/BM25/BM25IndexingPrograms/index_mag_en_cs_allyears.py
|
1133c0c15e7ed385f68695ce4327e05ac3873006
|
[
"MIT"
] |
permissive
|
ashwath92/MastersThesis
|
9a39ed7eec825ed559d09507721c21bd12e2ab9c
|
f74755dc0c32f316da3c860dd5dbfa4c9cad97b3
|
refs/heads/master
| 2021-08-16T12:01:33.282459
| 2020-06-27T16:00:16
| 2020-06-27T16:00:16
| 197,282,312
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,462
|
py
|
from gensim.parsing import preprocessing
import contractions
from tqdm import tqdm
import pysolr
# IMPORTANT: I'M KEEPING THE REFERENCE IDS IN THE CONTEXTS. SO WHILE CHECKING BM25,
# CONTEXTS WHICH REFER TO THE SAME PAPER MIGHT BE MORE SIMILAR (IF CITATIONS ALREADY
#EXIST)
def clean_text(text):
""" Cleans the text in the only argument in various steps
ARGUMENTS: text: content/title, string
RETURNS: cleaned text, string"""
# Expand contractions: you're to you are and so on.
text = contractions.fix(text)
# Remove punctuation -- all special characters
text = preprocessing.strip_multiple_whitespaces(preprocessing.strip_punctuation(text))
return text
solr = pysolr.Solr('http://localhost:8983/solr/mag_en_cs_all', always_commit=True)
list_for_solr = []
rownum = 0
filename = '/home/ashwath/Programs/MAGCS/AllYearsFiles/mag_cs_allyears.txt'
with open(filename, 'r') as file:
# list of lists
for line in tqdm(file):
solr_record = dict()
rownum += 1
parts = clean_text(line).split()
paperid = parts[0]
content = ' '.join(parts[1:])
solr_record['paperid'] = paperid
solr_record['content'] = content
if rownum % 10000 == 0:
list_for_solr.append(solr_record)
solr.add(list_for_solr)
list_for_solr = []
print(rownum)
else:
list_for_solr.append(solr_record)
solr.add(list_for_solr)
|
[
"ashwath92@gmail.com"
] |
ashwath92@gmail.com
|
a944ad7ef3a0746f8e1658a859afad898e97a673
|
56231e5b77a8b743e84e43d28691da36b89a0cca
|
/platform-tools/systrace/catapult/telemetry/telemetry/internal/platform/tracing_agent/cpu_tracing_agent_unittest.py
|
f87f00902938107c3301b6fa4b7a40f22fe76d77
|
[
"MIT",
"LicenseRef-scancode-proprietary-license",
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
cricketclubucd/davisdragons
|
ee3aa6ad72197c2218660843e03d58c562b965aa
|
99d5877377b80d1b20c78cc3c4c6f26795f29b14
|
refs/heads/master
| 2023-01-30T05:37:45.923195
| 2021-01-27T06:30:25
| 2021-01-27T06:30:25
| 96,661,120
| 2
| 2
|
MIT
| 2023-01-23T18:42:26
| 2017-07-09T04:32:10
|
HTML
|
UTF-8
|
Python
| false
| false
| 5,810
|
py
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import sys
import time
import unittest
from telemetry import decorators
from telemetry.internal.platform.tracing_agent import cpu_tracing_agent
from telemetry.internal.platform import tracing_agent
from telemetry.internal.platform import linux_platform_backend
from telemetry.internal.platform import mac_platform_backend
from telemetry.internal.platform import win_platform_backend
from telemetry.timeline import tracing_config
from tracing.trace_data import trace_data
SNAPSHOT_KEYS = ['pid', 'ppid', 'name', 'pCpu', 'pMem']
TRACE_EVENT_KEYS = ['name', 'tid', 'pid', 'ph', 'args', 'local', 'id', 'ts']
class FakeAndroidPlatformBackend(object):
def __init__(self):
self.device = 'fake_device'
def GetOSName(self):
return 'android'
class CpuTracingAgentTest(unittest.TestCase):
def setUp(self):
self._config = tracing_config.TracingConfig()
self._config.enable_cpu_trace = True
if sys.platform.startswith('win'):
self._desktop_backend = win_platform_backend.WinPlatformBackend()
elif sys.platform.startswith('darwin'):
self._desktop_backend = mac_platform_backend.MacPlatformBackend()
else:
self._desktop_backend = linux_platform_backend.LinuxPlatformBackend()
self._agent = cpu_tracing_agent.CpuTracingAgent(self._desktop_backend)
@decorators.Enabled('linux', 'mac', 'win')
def testInit(self):
self.assertTrue(isinstance(self._agent,
tracing_agent.TracingAgent))
self.assertFalse(self._agent._snapshots)
self.assertFalse(self._agent._snapshot_ongoing)
@decorators.Enabled('linux', 'mac', 'win')
def testIsSupported(self):
self.assertTrue(cpu_tracing_agent.CpuTracingAgent.IsSupported(
self._desktop_backend))
self.assertFalse(cpu_tracing_agent.CpuTracingAgent.IsSupported(
FakeAndroidPlatformBackend()))
@decorators.Enabled('linux', 'mac', 'win')
def testStartAgentTracing(self):
self.assertFalse(self._agent._snapshot_ongoing)
self.assertFalse(self._agent._snapshots)
self.assertTrue(self._agent.StartAgentTracing(self._config, 0))
self.assertTrue(self._agent._snapshot_ongoing)
time.sleep(2)
self.assertTrue(self._agent._snapshots)
self._agent.StopAgentTracing()
@decorators.Enabled('linux', 'mac', 'win')
def testStartAgentTracingNotEnabled(self):
self._config.enable_cpu_trace = False
self.assertFalse(self._agent._snapshot_ongoing)
self.assertFalse(self._agent.StartAgentTracing(self._config, 0))
self.assertFalse(self._agent._snapshot_ongoing)
self.assertFalse(self._agent._snapshots)
time.sleep(2)
self.assertFalse(self._agent._snapshots)
@decorators.Enabled('linux', 'mac', 'win')
def testStopAgentTracingBeforeStart(self):
self.assertRaises(AssertionError, self._agent.StopAgentTracing)
@decorators.Enabled('linux', 'mac', 'win')
def testStopAgentTracing(self):
self._agent.StartAgentTracing(self._config, 0)
self._agent.StopAgentTracing()
self.assertFalse(self._agent._snapshot_ongoing)
@decorators.Enabled('linux', 'mac', 'win')
def testCollectAgentTraceDataBeforeStop(self):
self._agent.StartAgentTracing(self._config, 0)
self.assertRaises(AssertionError, self._agent.CollectAgentTraceData,
trace_data.TraceDataBuilder())
self._agent.StopAgentTracing()
@decorators.Enabled('linux', 'mac', 'win')
def testCollectAgentTraceData(self):
builder = trace_data.TraceDataBuilder()
self._agent.StartAgentTracing(self._config, 0)
self._agent.StopAgentTracing()
self._agent.CollectAgentTraceData(builder)
self.assertFalse(self._agent._snapshot_ongoing)
builder = builder.AsData()
self.assertTrue(builder.HasTracesFor(trace_data.CPU_TRACE_DATA))
@decorators.Enabled('linux', 'mac', 'win')
def testCollectAgentTraceDataFormat(self):
builder = trace_data.TraceDataBuilder()
self._agent.StartAgentTracing(self._config, 0)
time.sleep(2)
self._agent.StopAgentTracing()
self._agent.CollectAgentTraceData(builder)
builder = builder.AsData()
data = json.loads(builder.GetTracesFor(trace_data.CPU_TRACE_DATA)[0])
self.assertTrue(data)
self.assertEquals(set(data[0].keys()), set(TRACE_EVENT_KEYS))
self.assertEquals(set(data[0]['args']['snapshot'].keys()),
set(['processes']))
self.assertTrue(data[0]['args']['snapshot']['processes'])
self.assertEquals(set(data[0]['args']['snapshot']['processes'][0].keys()),
set(SNAPSHOT_KEYS))
@decorators.Enabled('linux', 'mac', 'win')
def testContainsRealProcesses(self):
builder = trace_data.TraceDataBuilder()
self._agent.StartAgentTracing(self._config, 0)
time.sleep(2)
self._agent.StopAgentTracing()
self._agent.CollectAgentTraceData(builder)
builder = builder.AsData()
data = json.loads(builder.GetTracesFor(trace_data.CPU_TRACE_DATA)[0])
self.assertTrue(data)
for snapshot in data:
found_unittest_process = False
processes = snapshot['args']['snapshot']['processes']
for process in processes:
if 'run_tests' in process['name']:
found_unittest_process = True
self.assertTrue(found_unittest_process)
@decorators.Enabled('win')
def testWindowsCanHandleProcessesWithSpaces(self):
proc_collector = cpu_tracing_agent.WindowsProcessCollector()
proc_collector.Init()
proc = proc_collector._ParseProcessString(
'0 1 Multi Word Process 50 75')
self.assertEquals(proc['ppid'], 0)
self.assertEquals(proc['pid'], 1)
self.assertEquals(proc['name'], 'Multi Word Process')
self.assertEquals(proc['pCpu'], 50)
|
[
"jena.suraj.k@gmail.com"
] |
jena.suraj.k@gmail.com
|
2cf2548ec33a387b618360771da7ed86198cafc8
|
8845a9557ef7a93a4235bc5aff059110c7c0846c
|
/python_sql/book_scraper.py
|
baa3c24c9aa47a73c32f47b19104f533080c500d
|
[] |
no_license
|
Makhanya/PythonMasterClass
|
c127791337c862bf5c6c8780a1643642d6e99ab6
|
241f48396e59cd20f1a275f15fa6fec3e8676bb6
|
refs/heads/master
| 2023-07-20T12:44:05.055259
| 2023-07-09T11:20:52
| 2023-07-09T11:20:52
| 86,587,952
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 815
|
py
|
import sqlite3
import requests
from bs4 import BeautifulSoup
# Request URL
response = requests.get("http://books.toscrape.com/catalogue/category/books/history_32/index.html")
soup = BeautifulSoup(response.text, "html.parser")
books = soup.find_all("article")
for book in books:
# print(book.find("h3").find("a")["title"])
# price = book.select(".price_color")[0].get_text()
# price = float(price.replace("£", "").replace("Â", ""))
ratings = {"Zero": 0, "One": 1, "Two": 2, "Three": 3, "Four": 4, "Five": 5}
paragraph = book.select(".star-rating")[0]
rating = paragraph.get_attribute_list("class")[-1]
int_rating = ratings[rating]
print(int_rating)
def get_title(book):
return book.find("h3").find("a")["title"]
# Initialize BS
# Extract Data we Went
# Save data to database
|
[
"makhanya.mzili@gmail.com"
] |
makhanya.mzili@gmail.com
|
0ff8b707234a1ca0f9f6545c2a2a81e78061d016
|
a48eaa4419b87c011abdee1eebfd04b469f4417b
|
/.history/ghostpost/models_20200210194514.py
|
6e30172547ebdc65f4aca769b0575aa72372211a
|
[] |
no_license
|
Imraj423/ghostpost
|
6418d6c9561528ac8c31dd70d8aae7fac4c77cca
|
4edc559eb1f9ef0d11aae78e2b1dbd5c4903ddb5
|
refs/heads/master
| 2021-01-02T13:32:58.032239
| 2020-02-11T23:21:31
| 2020-02-11T23:21:31
| 239,644,968
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 420
|
py
|
from django.db import models
from django.utils import timezone
class ghostPost(models.Model):
message = models.CharField(max_length=280)
time = models.DateTimeField(default=timezone.now)
like = models.IntegerField(default=0)
dislike = models.IntegerField(default=0)
is_Boast = models.BooleanField(
widget=models.CheckboxSelectMultiple())
def __str__(self):
return self.message
|
[
"dahqniss@gmail.com"
] |
dahqniss@gmail.com
|
bcd73e2c410197d8acf9939ea8cc23d1fff8e9fe
|
b73ce9da9ddf21e59444b36d0b747994072f42da
|
/tests/html_parser.py
|
af2feee2a62d712a60506d9076cf8a291d3e0b0f
|
[
"MIT"
] |
permissive
|
manga-py/providers
|
de68017205850f5d90869464aa41b1691dfca235
|
1ad9f74d1e672f62070632f77fc74e66d35d0e85
|
refs/heads/master
| 2020-07-28T17:29:07.923115
| 2020-05-23T08:27:22
| 2020-05-23T08:27:22
| 209,479,291
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,443
|
py
|
import unittest
from lxml.html import HtmlElement
from manga_py.providers.exceptions import *
from manga_py.providers.utils.html_parser import HtmlParser
from ._test_variables import TestVariables
class TestHtmlParser(unittest.TestCase, TestVariables):
def test_parser(self):
html = HtmlParser.parse(self.default_html)
self.assertEqual(len(HtmlParser.parse(self.default_html, 'a')), 1)
title = HtmlParser.select_one(html, 'title', 0)
self.assertEqual(HtmlParser.text(title), 'Title')
self.assertIsInstance(html, HtmlElement)
def test_background_image(self):
html = HtmlParser.parse(self.default_html)
self.assertEqual(
self.default_image_url,
HtmlParser.background_image(HtmlParser.select_one(html, 'div.image', 0))
)
with self.assertRaises(BackgroundImageExtractException) as e:
HtmlParser.background_image(HtmlParser.select_one(html, 'div.bad-image', 0))
self.assertEqual('background: url()', e.exception.style)
def test_get_empty_text(self):
html = HtmlParser.parse(self.default_html)
with self.assertRaises(InfoException) as e:
HtmlParser.text(HtmlParser.select_one(html, 'div.empty-element', 0))
self.assertEqual(('Element not have text',), e.exception.args)
with self.assertRaises(InfoException) as e:
HtmlParser.text(HtmlParser.select_one(html, 'div.inner-element-text', 0))
self.assertEqual(('Element not have text',), e.exception.args)
with self.assertRaises(InfoException) as e:
HtmlParser.text(HtmlParser.select_one(html, 'div.space-only-element', 0))
self.assertEqual(('Text is too short',), e.exception.args)
with self.assertRaises(InfoException) as e:
HtmlParser.text_full(HtmlParser.select_one(html, 'div.space-only-element', 0))
self.assertEqual(('Text is too short',), e.exception.args)
self.assertEqual('text', HtmlParser.text_full(HtmlParser.select_one(html, 'div.inner-element-text', 0)))
def test_attributes(self):
elements = HtmlParser.parse(self.default_html, '.empty-element')
self.assertEqual(['element-title'], HtmlParser.extract_attribute(elements, 'title'))
def test_cover(self):
html = HtmlParser.parse(self.default_html)
self.assertEqual(self.default_image_url, HtmlParser.cover(html, '.image > img'))
|
[
"sttv-pc@mail.ru"
] |
sttv-pc@mail.ru
|
ecc7bc3853d20d63778c3f3b2e9797f155087c66
|
0dcdf4d1e0d83e9af30369c5d7899f1ea069ead5
|
/project/backend/main/models.py
|
52ac188bfe05ccc30abb43c1bb2fe4e97623f310
|
[
"MIT"
] |
permissive
|
senavs/website-hosting
|
a4684aef35943ee3635237f9b99d7efe89cc6161
|
b38589ca4478d9ca0189d0bfcedcbd854de7eacc
|
refs/heads/master
| 2023-05-12T11:43:28.230688
| 2020-05-29T15:19:36
| 2020-05-29T15:19:36
| 264,193,522
| 1
| 0
|
MIT
| 2023-05-01T21:40:34
| 2020-05-15T12:56:05
|
HTML
|
UTF-8
|
Python
| false
| false
| 2,021
|
py
|
import sqlite3
from sqlite3 import ProgrammingError
from typing import Iterable
# NOT USING ORM BECAUSE OF THE PROJECT REQUIREMENTS
class Database:
_connection = _cursor = None
__tablename__ = None
__tablecolumns__ = []
def __init__(self, url: str):
self.url = url
def __enter__(self):
self.open()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
@property
def cursor(self):
return self._cursor
def insert(self, *args, **kwargs):
raise NotImplementedError()
def update(self, *args, **kwargs):
raise NotImplementedError()
def delete(self, *args, **kwargs):
raise NotImplementedError()
def open(self):
self._connection = sqlite3.connect(self.url)
self._cursor = self._connection.cursor()
def close(self):
self._connection.close()
def commit(self):
self._connection.commit()
def rollback(self):
self._connection.rollback()
def cursor_to_dict(self, cursors: Iterable):
return [dict(zip(self.__tablecolumns__, cursor)) for cursor in cursors]
def select_all(self):
if self._connection:
return self.cursor_to_dict(self.cursor.execute(f'SELECT * FROM "{self.__tablename__}" ;'))
raise ProgrammingError('ProgrammingError: Cannot operate on a closed database.')
def select_all_by(self, and_operator=True, **kwargs):
if self._connection:
if and_operator:
filters = ' AND '.join(f'{key} = {value}'.upper() for key, value in kwargs.items())
else:
filters = ' OR '.join(f'{key} = {value}'.upper() for key, value in kwargs.items())
return self.cursor_to_dict(self.cursor.execute(f'SELECT * FROM "{self.__tablename__}" WHERE {filters} ;'))
raise ProgrammingError('ProgrammingError: Cannot operate on a closed database.')
class DatabaseException(Exception):
"""Base class to database exception"""
|
[
"sena.matheus14@gmail.com"
] |
sena.matheus14@gmail.com
|
127dd90853c25dbea635b9dd8408f5b3bdcf5419
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02641/s218012358.py
|
150b550e08d6d7ff5fea6bdc3c1c73415536a324
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 735
|
py
|
from sys import stdin
readline = stdin.readline
def i_input(): return int(readline().rstrip())
def i_map(): return map(int, readline().rstrip().split())
def i_list(): return list(i_map())
def main():
X, N = i_map()
if N == 0:
print(X)
exit()
P = i_list()
p = list(map(lambda x: abs(x - X), P))
p.sort()
for i, j in enumerate(p, 1):
if i // 2 != j:
ans = X - (i // 2)
if ans in P:
ans = X + (i // 2)
break
else:
if N % 2 == 1:
ans = X - ((N + 1) // 2)
else:
ans = X - (N // 2)
if ans in P:
ans = X + (N // 2)
print(ans)
if __name__ == "__main__":
main()
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
5082cfcb3ca5d5b78605012561050bd02c54c5d4
|
14b91c8b09eb6fd5b099743b85b0bd3cbfa20b0e
|
/Variables.py
|
6294b4f9b231896737258226ed0857da75a2cd4c
|
[] |
no_license
|
artheadsweden/python_fundamentals
|
937e001fc1dc157286e166152d830b10524fb981
|
4d38fd7d69dccbf733f059a406f123d6e149f97f
|
refs/heads/master
| 2020-03-23T07:15:07.418803
| 2018-09-17T08:42:38
| 2018-09-17T08:42:38
| 141,258,276
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 100
|
py
|
def main():
x = "Hello There"
y = x
print(y)
if __name__ == '__main__':
main()
|
[
"joakim@arthead.se"
] |
joakim@arthead.se
|
a5869970d2b78ec96897101cab117327353b702c
|
e4ec5b6cf3cfe2568ef0b5654c019e398b4ecc67
|
/azure-cli/2.0.18/libexec/lib/python3.6/site-packages/azure/mgmt/network/v2017_08_01/operations/load_balancer_frontend_ip_configurations_operations.py
|
9c0cdae555548e8f98c0c3052d6d83201eac652f
|
[] |
no_license
|
EnjoyLifeFund/macHighSierra-cellars
|
59051e496ed0e68d14e0d5d91367a2c92c95e1fb
|
49a477d42f081e52f4c5bdd39535156a2df52d09
|
refs/heads/master
| 2022-12-25T19:28:29.992466
| 2017-10-10T13:00:08
| 2017-10-10T13:00:08
| 96,081,471
| 3
| 1
| null | 2022-12-17T02:26:21
| 2017-07-03T07:17:34
| null |
UTF-8
|
Python
| false
| false
| 8,339
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from .. import models
class LoadBalancerFrontendIPConfigurationsOperations(object):
"""LoadBalancerFrontendIPConfigurationsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: Client API version. Constant value: "2017-08-01".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2017-08-01"
self.config = config
def list(
self, resource_group_name, load_balancer_name, custom_headers=None, raw=False, **operation_config):
"""Gets all the load balancer frontend IP configurations.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of :class:`FrontendIPConfiguration
<azure.mgmt.network.v2017_08_01.models.FrontendIPConfiguration>`
:rtype: :class:`FrontendIPConfigurationPaged
<azure.mgmt.network.v2017_08_01.models.FrontendIPConfigurationPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/frontendIPConfigurations'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.FrontendIPConfigurationPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.FrontendIPConfigurationPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def get(
self, resource_group_name, load_balancer_name, frontend_ip_configuration_name, custom_headers=None, raw=False, **operation_config):
"""Gets load balancer frontend IP configuration.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param frontend_ip_configuration_name: The name of the frontend IP
configuration.
:type frontend_ip_configuration_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: :class:`FrontendIPConfiguration
<azure.mgmt.network.v2017_08_01.models.FrontendIPConfiguration>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype: :class:`FrontendIPConfiguration
<azure.mgmt.network.v2017_08_01.models.FrontendIPConfiguration>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/frontendIPConfigurations/{frontendIPConfigurationName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'frontendIPConfigurationName': self._serialize.url("frontend_ip_configuration_name", frontend_ip_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('FrontendIPConfiguration', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
|
[
"Raliclo@gmail.com"
] |
Raliclo@gmail.com
|
175bf5483a2bdabcdb51e0f86efdccc41148535b
|
c15847d7c689d45aa7273e608ea447e28f1dfff6
|
/performance_testing/jmeter/cancel_running_jobs.py
|
e645fd6420384b57448a43010e86b29305a60ef8
|
[
"Apache-2.0"
] |
permissive
|
GoogleCloudPlatform/bigquery-utils
|
1b9ae6cff5aaa25b7312c4393551352066e0aa53
|
2e30dc06ef6452d3d1b6cdc5a468732a2327d11c
|
refs/heads/master
| 2023-09-03T07:08:37.783915
| 2023-08-11T16:51:04
| 2023-08-11T16:51:04
| 201,975,309
| 906
| 269
|
Apache-2.0
| 2023-09-07T11:43:23
| 2019-08-12T17:12:09
|
Java
|
UTF-8
|
Python
| false
| false
| 1,156
|
py
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from argparse import ArgumentParser
from google.cloud import bigquery
def cancel_jobs(client):
for job in client.list_jobs(all_users=True, state_filter="RUNNING"):
client.cancel_job(job.job_id, location='us')
def get_cmd_line_args():
parser = ArgumentParser()
parser.add_argument(
'--project_id',
help='Project in which all running BigQuery jobs will be cancelled.')
return parser.parse_args()
def main():
args = get_cmd_line_args()
cancel_jobs(bigquery.Client(project=args.project_id))
if __name__ == '__main__':
main()
|
[
"noreply@github.com"
] |
GoogleCloudPlatform.noreply@github.com
|
15f0162edd3a76b4a984fee76ec8c64da65af592
|
b3fc3f3e92f4cfa7a69b3cc5e7f29dac372a5179
|
/pactools/dar_model/preprocess.py
|
023f7da5b9c681008b74606af26cd20e735704b6
|
[] |
no_license
|
agramfort/pactools
|
29c23ed840a59b92e85e0663755f3693386bd944
|
d78b8e2eaa4ec2a5e3380d7e4ec41fa6d8a5e541
|
refs/heads/master
| 2023-07-06T19:41:13.734330
| 2017-03-29T14:32:18
| 2017-03-29T14:32:18
| 86,735,551
| 0
| 0
| null | 2017-03-30T18:30:15
| 2017-03-30T18:30:15
| null |
UTF-8
|
Python
| false
| false
| 19,159
|
py
|
import numpy as np
from scipy import signal
import matplotlib.pyplot as plt
from ..utils.spectrum import Spectrum
from ..utils.carrier import Carrier, LowPass
from ..utils.arma import Arma
from ..utils.validation import check_random_state
def _decimate(x, q):
"""
Downsample the signal after low-pass filtering to avoid aliasing.
An order 16 Chebyshev type I filter is used.
Parameters
----------
x : ndarray
The signal to be downsampled, as an N-dimensional array.
q : int
The downsampling factor.
Returns
-------
y : ndarray
The down-sampled signal.
"""
if not isinstance(q, int):
raise TypeError("q must be an integer")
b, a = signal.filter_design.cheby1(16, 0.025, 0.98 / q)
y = signal.filtfilt(b, a, x, axis=-1)
sl = [slice(None)] * y.ndim
sl[-1] = slice(None, None, q)
return y[sl]
def decimate(sig, fs, decimation_factor):
"""Decimates the signal:
Downsampling after low-pass filtering to avoid aliasing
Parameters
----------
sig : array
Raw input signal
fs : float
Sampling frequency of the input
decimation_factor : int > 0
Ratio of sampling frequencies (old/new)
Returns
-------
sig : array
Decimated signal
fs : float
Sampling frequency of the output
"""
# -------- center the signal
sig = sig - np.mean(sig)
# -------- resample
# decimation could be performed in two steps for better performance
# 0 in the following array means no decimation
dec_1st = [
0, 0, 2, 3, 4, 5, 6, 7, 2, 3, 2, 0, 3, 0, 2, 3, 4, 0, 3, 0, 4, 3, 0, 0,
4, 5, 0, 3, 4, 0, 5
]
dec_2nd = [
0, 0, 0, 0, 0, 0, 0, 0, 4, 3, 5, 0, 4, 0, 7, 5, 4, 0, 6, 0, 5, 7, 0, 0,
6, 5, 0, 9, 7, 0, 6
]
d1 = dec_1st[decimation_factor]
if d1 == 0:
raise ValueError('cannot decimate by %d' % decimation_factor)
sig = _decimate(sig, d1)
sig = sig.astype(np.float32)
d2 = dec_2nd[decimation_factor]
if d2 > 0:
sig = _decimate(sig, d2)
sig = sig.astype(np.float32)
fs = fs / decimation_factor
return sig, fs
def extract_and_fill(sig, fs, fc, n_cycles=None, bandwidth=1.0, fill=0,
draw='', random_noise=None, extract_complex=True,
low_pass=False, random_state=None):
"""Creates a FIR bandpass filter, applies this filter to a signal to obtain
the filtered signal low_sig and its complement high_sig.
Also fills the frequency gap in high_sig.
sig : array
Input signal
fs : float
Sampling frequency
fc : float
Center frequency of the bandpass filter
n_cycles : float
Number of cycles in the bandpass filter
Should be None if bandwidth is not None
bandwidth : float
Bandwidth of the bandpass filter
Should be None if n_cycles is not None
fill : int in {0, 1, 2}
Filling strategy for in high_sig
0 : keep the signal unchanged: high_sig = sig
1 : remove (the bandpass filtered signal): high_sig = sig - low_sig
2 : remove and replace by bandpass filtered Gaussian white noise
draw : string
List of plots to draw
extract_complex : boolean
Use a complex wavelet
low_pass : boolean
Use a lowpass filter at fc instead of a bandpass filter centered at fc
random_state : None, int or np.random.RandomState instance
Seed or random number generator for the surrogate analysis
Returns
-------
low_sig : array
Bandpass filtered signal
high_sig : array
Processed fullband signal
low_sig_imag : array (returned only if extract_complex is True)
Imaginary part of the bandpass filtered signal
"""
rng = check_random_state(random_state)
if random_noise is None:
random_noise = rng.randn(len(sig))
if low_pass:
filt = LowPass().design(fs=fs, fc=fc, bandwidth=bandwidth)
if extract_complex:
raise NotImplementedError('extract_complex incompatible with '
'low_pass filter.')
else:
filt = Carrier(extract_complex=extract_complex)
filt.design(fs, fc, n_cycles, bandwidth, zero_mean=True)
if 'e' in draw or 'z' in draw:
filt.plot(fscale='lin', print_width=True)
if extract_complex:
low_sig, low_sig_imag = filt.direct(sig)
else:
low_sig = filt.direct(sig)
if fill == 0:
# keeping driver in high_sig
high_sig = sig
if 'z' in draw or 'e' in draw:
_plot_multiple_spectrum([sig, low_sig, high_sig], labels=None,
fs=fs, colors='bgr')
plt.legend(['input', 'driver', 'output'], loc=0)
elif fill == 1:
# subtracting driver
high_sig = sig - low_sig
if 'z' in draw or 'e' in draw:
_plot_multiple_spectrum([sig, low_sig, high_sig], labels=None,
fs=fs, colors='bgr')
plt.legend(['input', 'driver', 'output'], loc=0)
elif fill == 2:
# replacing driver by a white noise
high_sig = sig - low_sig
if extract_complex:
fill_sig, _ = filt.direct(random_noise)
else:
fill_sig = filt.direct(random_noise)
fill_sig.shape = sig.shape
# adjust the power of the filling signal and add it to high_sig
high_sig = fill_gap(high_sig, fs, fa=fc, dfa=bandwidth, draw=draw,
fill_sig=fill_sig)
if 'z' in draw or 'e' in draw:
_plot_multiple_spectrum(
[sig, low_sig, sig - low_sig, fill_sig, high_sig], labels=None,
fs=fs, colors='bggrr')
plt.legend(['input', 'driver', 'input-driver', 'filling'
'output'], loc=0)
else:
raise ValueError('Invalid fill parameter: %s' % str(fill))
if extract_complex:
return low_sig, high_sig, low_sig_imag
else:
return low_sig, high_sig
def low_pass_and_fill(sig, fs, fc=1.0, draw='', bandwidth=1.,
random_state=None):
low_sig, high_sig = extract_and_fill(sig, fs, fc, fill=1, low_pass=True,
bandwidth=bandwidth,
random_state=random_state)
rng = check_random_state(random_state)
random_noise = rng.randn(*sig.shape)
filt = LowPass().design(fs=fs, fc=fc)
fill_sig = filt.direct(random_noise)
# adjust power of fill_sig and add it to high_sig
filled_sig = fill_gap(sig=high_sig, fs=fs, fa=fc / 2., dfa=fc / 2.,
draw=draw, fill_sig=fill_sig)
return filled_sig
def _plot_multiple_spectrum(signals, fs, labels, colors):
"""
plot the signals spectrum
"""
s = Spectrum(block_length=min(2048, signals[0].size), fs=fs,
wfunc=np.blackman)
for sig in signals:
s.periodogram(sig, hold=True)
s.plot(labels=labels, colors=colors, fscale='lin')
def whiten(sig, fs, ordar=8, draw='', enf=50.0, d_enf=1.0, zero_phase=True,
**kwargs):
"""Use an AR model to whiten a signal
The whitening filter is not estimated around multiples of
the electric network frequency (up to d_enf Hz)
sig : input signal
fs : sampling frequency of input signal
ordar : order of AR whitening filter
draw : list of plots
enf : electric network frequency
denf : tolerance on electric network frequency
zero_phase : if True, apply half the whitening for sig(t) and sig(-t)
returns the whitened signal
"""
# -------- create the AR model and its spectrum
ar = Arma(ordar=ordar, ordma=0, fs=fs, block_length=min(1024, sig.size))
ar.periodogram(sig)
# duplicate to see the removal of the electric network frequency
ar.periodogram(sig, hold=True)
fft_length, _ = ar.check_params()
# -------- remove the influence of the electric network frequency
k = 1
# while the harmonic k is included in the spectrum
while k * enf - d_enf < fs / 2.0:
fmin = k * enf - d_enf
fmax = k * enf + d_enf
kmin = max((0, int(fft_length * fmin / fs)))
kmax = min(fft_length // 2, int(fft_length * fmax / fs) + 1)
Amin = ar.psd[-1][0, kmin]
Amax = ar.psd[-1][0, kmax]
# linear interpolation between (kmin, Amin) and (kmax, Amax)
interpol = np.linspace(Amin, Amax, kmax - kmin, endpoint=False)
# remove positive frequencies
ar.psd[-1][0, kmin:kmax] = interpol
k += 1
# -------- change psd for zero phase filtering
if zero_phase:
ar.psd[-1] = np.sqrt(ar.psd[-1])
# -------- estimate the model and apply it
ar.estimate()
# apply the whitening twice (forward and backward) for zero-phase filtering
if zero_phase:
sigout = ar.inverse(sig)
sigout = sigout[::-1]
sigout = ar.inverse(sigout)
sigout = sigout[::-1]
else:
sigout = ar.inverse(sig)
gain = np.std(sig) / np.std(sigout)
sigout *= gain
if 'w' in draw or 'z' in draw:
ar.arma2psd(hold=True)
ar.periodogram(sigout, hold=True)
ar.plot('periodogram before/after whitening', labels=[
'with electric network', 'without electric network', 'model AR',
'whitened'
], fscale='lin')
plt.legend(loc='lower left')
return sigout
def fill_gap(sig, fs, fa=50.0, dfa=25.0, draw='', fill_sig=None,
random_state=None):
"""Fill a frequency gap with white noise.
"""
rng = check_random_state(random_state)
# -------- get the amplitude of the gap
sp = Spectrum(block_length=min(2048, sig.size), fs=fs, wfunc=np.blackman)
fft_length, _ = sp.check_params()
sp.periodogram(sig)
fmin = fa - dfa
fmax = fa + dfa
kmin = max((0, int(fft_length * fmin / fs)))
kmax = min(fft_length // 2, int(fft_length * fmax / fs) + 1)
Amin = sp.psd[-1][0, kmin]
Amax = sp.psd[-1][0, kmax]
if kmin == 0 and kmax == (fft_length // 2):
# we can't fill the entire spectrum
return sig
if kmin == 0:
# if the gap reach zero, we only consider the right bound
Amin = Amax
if kmax == (fft_length // 2):
# if the gap reach fft_length / 2, we only consider the left bound
Amax = Amin
A_fa = (Amin + Amax) * 0.5
# -------- bandpass filtering of white noise
if fill_sig is None:
n_cycles = 1.65 * fa / dfa
fir = Carrier()
fir.design(fs, fa, n_cycles, None, zero_mean=False)
fill_sig = fir.direct(rng.randn(*sig.shape))
# -------- compute the scale parameter
sp.periodogram(fill_sig, hold=True)
kfa = int(fft_length * fa / fs)
scale = np.sqrt(A_fa / sp.psd[-1][0, kfa])
fill_sig *= scale
sig += fill_sig
if 'g' in draw or 'z' in draw:
labels = ['signal', 'fill signal', 'gap filled']
sp.periodogram(sig, hold=True)
sp.plot(labels=labels, fscale='lin', title='fill')
return sig
def _show_plot(draw):
if draw:
plt.show()
def extract_driver(sigs, fs, low_fq, n_cycles=None, bandwidth=1.0, fill=2,
whitening='after', ordar=10, normalize=False,
extract_complex=True, random_state=None, draw=''):
"""Extract the driver with filtering and fill the rest of the signal.
The driver is extracted with a bandpass filter, subtracted from the signal,
and the frequency gap is filled with filtered white noise.
Parameters
----------
sigs : array, shape (n_epochs, n_points)
Input array to filter
fs : float
Sampling frequency
low_fq : float
Center frequency of bandpass filters.
bandwidth : float
Bandwidth of the bandpass filters.
Use it to have a constant bandwidth for all filters.
Should be None if n_cycles is not None.
n_cycles : float
Number of cycles of the bandpass filters.
Use it to have a bandwidth proportional to the center frequency.
Should be None if bandwidth is not None.
fill : in {0, 1, 2}
Filling strategy for the full band signal high_sigs:
0 : keep the signal unchanged: high_sigs = sigs
1 : remove the bandpass filtered signal: high_sigs = sigs - low_sigs
2 : remove and replace by bandpass filtered Gaussian white noise
whitening : in {'before', 'after', None}
Define when the whitening is done compared to the filtering.
ordar : int >= 0
Order of the AR model used for whitening
normalize : boolean
Whether to scale the signals to have unit norm high_sigs.
The low_sigs are scaled with the same scales.
extract_complex : boolean
Whether to extract a complex driver (low_sigs and low_sigs_imag)
random_state : None, int or np.random.RandomState instance
Seed or random number generator for the white noise filling strategy.
draw : string
Add a letter to the string to draw the corresponding figures:
- 'e' : extraction of the driver
- 'g' : gap filling
- 'w' : whitening step
- 'z' : all
Returns
-------
low_sigs : array, shape (n_epochs, n_points)
Bandpass filtered signal (aka driver)
high_sigs : array, shape (n_epochs, n_points)
Bandstop filtered signal
low_sigs_imag : array, shape (n_epochs, n_points)
Imaginary part of the bandpass filtered signal
Returned only if extract_complex is True.
Examples
--------
low_sig, high_sig = extract_driver(sigs, fs, 3.0):
"""
for sigs in multiple_extract_driver(
sigs=sigs, fs=fs, frequency_range=[low_fq], n_cycles=n_cycles,
bandwidth=bandwidth, fill=fill, whitening=whitening, ordar=ordar,
normalize=normalize, extract_complex=extract_complex,
random_state=random_state, draw=draw):
pass
return sigs
def multiple_extract_driver(sigs, fs, frequency_range, n_cycles=None,
bandwidth=1.0, fill=2, whitening='after', ordar=10,
normalize=False, extract_complex=True,
random_state=None, draw=''):
"""Extract the driver for several bandpass center frequency.
Parameters
----------
sigs : array, shape (n_epochs, n_points)
Input array to filter
fs : float
Sampling frequency
frequency_range : float, list, or array, shape (n_frequencies, )
List of center frequency of bandpass filters.
bandwidth : float
Bandwidth of the bandpass filters.
Use it to have a constant bandwidth for all filters.
Should be None if n_cycles is not None.
n_cycles : float
Number of cycles of the bandpass filters.
Use it to have a bandwidth proportional to the center frequency.
Should be None if bandwidth is not None.
fill : in {0, 1, 2}
Filling strategy for the full band signal high_sigs:
0 : keep the signal unchanged: high_sigs = sigs
1 : remove the bandpass filtered signal: high_sigs = sigs - low_sigs
2 : remove and replace by bandpass filtered Gaussian white noise
whitening : in {'before', 'after', None}
Define when the whitening is done compared to the filtering.
ordar : int >= 0
Order of the AR model used for whitening
normalize : boolean
Whether to scale the signals to have unit norm high_sigs.
The low_sigs are scaled with the same scales.
extract_complex : boolean
Whether to extract a complex driver (low_sigs and low_sigs_imag)
random_state : None, int or np.random.RandomState instance
Seed or random number generator for the white noise filling strategy.
draw : string
Add a letter to the string to draw the corresponding figures:
- 'e' : extraction of the driver
- 'g' : gap filling
- 'w' : whitening step
- 'z' : all
Returns
-------
low_sigs : array, shape (n_epochs, n_points)
Bandpass filtered signal (aka driver)
high_sigs : array, shape (n_epochs, n_points)
Bandstop filtered signal
low_sigs_imag : array, shape (n_epochs, n_points)
Imaginary part of the bandpass filtered signal
Returned only if extract_complex is True.
Example
-------
for (low_sig, high_sig) in multiple_extract_driver(sigs, fs, [2., 3., 4.]):
pass
"""
frequency_range = np.atleast_1d(frequency_range)
sigs = np.atleast_2d(sigs)
rng = check_random_state(random_state)
if whitening == 'before':
sigs = [whiten(sig, fs=fs, ordar=ordar, draw=draw) for sig in sigs]
_show_plot(draw)
if fill == 2:
random_noise = rng.randn(len(sigs[0]))
else:
random_noise = None
# extract the high frequencies independently of the driver
fc_low_pass = frequency_range[-1] + frequency_range[
0] + bandwidth # arbitrary
low_pass_width = bandwidth
low_and_high = [
extract_and_fill(sig, fs=fs, fc=fc_low_pass, bandwidth=low_pass_width,
fill=fill, random_noise=random_noise, draw=draw,
extract_complex=False, low_pass=True,
random_state=random_state) for sig in sigs
]
high_sigs = [both[1] for both in low_and_high]
if whitening == 'after':
high_sigs = [
whiten(high_sig, fs=fs, ordar=ordar, draw=draw)
for high_sig in high_sigs
]
if normalize:
scales = [1.0 / np.std(high_sig) for high_sig in high_sigs]
high_sigs = [high * s for (high, s) in zip(high_sigs, scales)]
# as high_sigs is now fixed, we don't need the following
fill = 0
random_noise = None
# extract_and_fill the driver
for fc in frequency_range:
low_and_high = [
extract_and_fill(
sig, fs=fs, fc=fc, n_cycles=n_cycles, bandwidth=bandwidth,
fill=fill, random_noise=random_noise, draw=draw,
extract_complex=extract_complex, random_state=random_state)
for sig in sigs
]
low_sigs = [both[0] for both in low_and_high]
if extract_complex:
low_sigs_imag = [both[2] for both in low_and_high]
# normalize variances
if normalize:
low_sigs = [low * s for (low, s) in zip(low_sigs, scales)]
if extract_complex:
low_sigs_imag = [
low * s for (low, s) in zip(low_sigs_imag, scales)
]
_show_plot(draw)
low_sigs = np.array(low_sigs)
high_sigs = np.array(high_sigs)
if extract_complex:
low_sigs_imag = np.array(low_sigs_imag)
yield low_sigs, high_sigs, low_sigs_imag
else:
yield low_sigs, high_sigs
|
[
"tom.dupre-la-tour@m4x.org"
] |
tom.dupre-la-tour@m4x.org
|
bd787adc4fb245d2acb71f579c985593fe27e6d8
|
b7e1d227d41542bf20f92d08bb0d453058cf6d19
|
/search/urls.py
|
1c810a86e3ed3a21dba7ab31f2b6f0dd6f3cd470
|
[] |
no_license
|
rusrom/django-ecommerce
|
dfa35bdb2832abf4077dd0883ec0e5e79ffa9662
|
aebef77713ab7c1c2118d5c190deee5ccfbd3cb9
|
refs/heads/master
| 2020-08-04T23:36:09.610480
| 2019-10-22T14:00:04
| 2019-10-22T14:00:04
| 212,315,359
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 142
|
py
|
from django.conf.urls import url
from .views import SearchListView
urlpatterns = [
url(r'^$', SearchListView.as_view(), name='list'),
]
|
[
"rusrom@guyfawkes.33mail.com"
] |
rusrom@guyfawkes.33mail.com
|
0850525da95f116617b72ae302c4e47e6613f311
|
5d4504fe2a4881d20669d2a19392c7ac0d9b2831
|
/Prerequisites/Python27/Lib/site-packages/esphome/components/sensor/max6675.py
|
0ea383d4f693857d29a8459d19d278f8d2e144e2
|
[] |
no_license
|
lanbing8023/esphome-tools
|
5c98f513b768f71742dc68ad68271e22652db9ea
|
6b641e2dcb35130432f2409c50e03ff93af5ceec
|
refs/heads/master
| 2020-05-02T13:24:19.744788
| 2019-03-11T02:25:57
| 2019-03-11T02:25:57
| 177,983,108
| 1
| 1
| null | 2019-03-27T11:48:47
| 2019-03-27T11:48:47
| null |
UTF-8
|
Python
| false
| false
| 1,494
|
py
|
import voluptuous as vol
from esphome import pins
from esphome.components import sensor, spi
from esphome.components.spi import SPIComponent
import esphome.config_validation as cv
from esphome.const import CONF_CS_PIN, CONF_ID, CONF_NAME, CONF_SPI_ID, \
CONF_UPDATE_INTERVAL
from esphome.cpp_generator import Pvariable, get_variable
from esphome.cpp_helpers import gpio_output_pin_expression, setup_component
from esphome.cpp_types import App
MAX6675Sensor = sensor.sensor_ns.class_('MAX6675Sensor', sensor.PollingSensorComponent,
spi.SPIDevice)
PLATFORM_SCHEMA = cv.nameable(sensor.SENSOR_PLATFORM_SCHEMA.extend({
cv.GenerateID(): cv.declare_variable_id(MAX6675Sensor),
cv.GenerateID(CONF_SPI_ID): cv.use_variable_id(SPIComponent),
vol.Required(CONF_CS_PIN): pins.gpio_output_pin_schema,
vol.Optional(CONF_UPDATE_INTERVAL): cv.update_interval,
}).extend(cv.COMPONENT_SCHEMA.schema))
def to_code(config):
for spi_ in get_variable(config[CONF_SPI_ID]):
yield
for cs in gpio_output_pin_expression(config[CONF_CS_PIN]):
yield
rhs = App.make_max6675_sensor(config[CONF_NAME], spi_, cs,
config.get(CONF_UPDATE_INTERVAL))
max6675 = Pvariable(config[CONF_ID], rhs)
sensor.setup_sensor(max6675, config)
setup_component(max6675, config)
BUILD_FLAGS = '-DUSE_MAX6675_SENSOR'
def to_hass_config(data, config):
return sensor.core_to_hass_config(data, config)
|
[
"imhsaw@gmail.com"
] |
imhsaw@gmail.com
|
2e05cf353597c2eb858f2a627d9d163053e35801
|
72bce1fbde45f03808f1dc5b8a3d6385d9a3fa61
|
/train_V1.py
|
ca123c889682ec02bd7465b4ab8b2d8b232c67fd
|
[] |
no_license
|
Kimyuhwanpeter/2021-03-12
|
d3e1567899e7648bce66ca5a7f7184032d9222bd
|
cdd9b398bfe3d061dadad2b2039672acd9e2034c
|
refs/heads/main
| 2023-03-18T23:41:17.725492
| 2021-03-12T06:14:55
| 2021-03-12T06:14:55
| 346,961,557
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,923
|
py
|
# -*- coding:utf-8 -*-
from random import random, shuffle
import tensorflow as tf
import easydict
import numpy as np
import os
FLAGS = easydict.EasyDict({"img_size": 224,
"load_size": 266,
"batch_size": 32,
"epochs": 500,
"n_classes": 55,
"lr": 0.001,
"tr_img_path": "D:/[1]DB/[4]etc_experiment/UTK_face/drive-download-20210310T064015Z-001/UTKFace/",
"tr_txt_path": "D:/[1]DB/[4]etc_experiment/UTK_face/crop_face_train.txt",
"re_img_path": "D:/[1]DB/[4]etc_experiment/UTK_face/drive-download-20210310T063149Z-001/ALL/",
"re_txt_path": "D:/[1]DB/[4]etc_experiment/UTK_face/full_face_train.txt",
"te_img_path": "D:/[1]DB/[4]etc_experiment/UTK_face/drive-download-20210310T063149Z-001/ALL/",
"te_txt_path": "D:/[1]DB/[4]etc_experiment/UTK_face/full_face_test.txt",
"train": True,
"pre_checkpoint": False,
"pre_checkpoint_path": "",
"save_checkpoint": ""})
tr_optim = tf.keras.optimizers.Adam(FLAGS.lr)
re_optim = tf.keras.optimizers.Adam(FLAGS.lr)
def func(tr_list, re_list):
tr_img = tf.io.read_file(tr_list)
tr_img = tf.image.decode_jpeg(tr_img, 3)
tr_img = tf.image.resize(tr_img, [FLAGS.img_size, FLAGS.img_size])
tr_img = tf.image.per_image_standardization(tr_img)
re_img = tf.io.read_file(re_list)
re_img = tf.image.decode_jpeg(re_img, 3)
re_img = tf.image.resize(re_img, [FLAGS.img_size, FLAGS.img_size])
re_img = tf.image.per_image_standardization(re_img)
return tr_img, tr_list, re_img, re_list
def te_func(img, lab):
img = tf.io.read_file(img)
img = tf.image.decode_jpeg(img, 3)
img = tf.image.resize(img, [FLAGS.img_size, FLAGS.img_size])
img = tf.image.per_image_standardization(img)
return img, lab
def feature_cal_loss(tr_logits, re_logits):
tr_logits = tf.nn.sigmoid(tr_logits)
re_logits = tf.nn.sigmoid(re_logits)
energy_ft = tf.reduce_sum(tf.abs(tr_logits - re_logits), 1)
Q = 10
total_loss = tf.reduce_mean(2*energy_ft*energy_ft/(Q))
return total_loss
@tf.function
def train_step(tr_img, tr_lab, re_img, re_lab, tr_model, re_model):
# tr model 에는 crop된 이미지들, re model 에는 full shot 이미지들을 넣어서 full shot 이미지에 대한것을 나이 추정에 써보자
# 각 age는 동일하다고 가정하자
with tf.GradientTape(persistent=True) as tape:
tr_logits, tr_class_logits = tr_model(tr_img, True)
re_logits, re_class_logits = re_model(re_img, True)
tr_class_softmax = tf.nn.softmax(tr_class_logits, 1)
re_class_softmax = tf.nn.softmax(re_class_logits, 1)
diff_softmax = tf.abs(tr_class_softmax - re_class_softmax)
feature_loss = feature_cal_loss(tr_logits, re_logits)
cross_entropy_loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False)(re_lab, diff_softmax)
total_loss = feature_loss + cross_entropy_loss
tr_grads = tape.gradient(total_loss, tr_model.trainable_variables)
re_grads = tape.gradient(total_loss, re_model.trainable_variables)
tr_optim.apply_gradients(zip(tr_grads, tr_model.trainable_variables))
re_optim.apply_gradients(zip(re_grads, re_model.trainable_variables))
return total_loss, feature_loss, cross_entropy_loss
def age_label(tr_name_list, re_name_list):
age_buf = []
for i in range(FLAGS.batch_size):
tr_name = tf.compat.as_str_any(tr_name_list[i].numpy())
re_name = tf.compat.as_str_any(re_name_list[i].numpy())
tr_name = tr_name.split("/")
tr_name = tr_name[-1].split("_")
age = int(tr_name[0]) - 16
age_buf.append(age)
age_buf = tf.convert_to_tensor(age_buf)
return age_buf
@tf.function
def test_step(input, label, model):
_, logits = model(input, False)
logits = tf.nn.softmax(logits, 1)
logits = tf.argmax(logits, 1, tf.int32)
predict = tf.abs(logits - label)
return predict[0]
def main():
tr_model = tf.keras.applications.MobileNetV2(input_shape=(FLAGS.img_size, FLAGS.img_size, 3),
include_top=False) # 잠시 임시로 해놓은것
re_model = tf.keras.applications.ResNet50V2(input_shape=(FLAGS.img_size, FLAGS.img_size, 3),
include_top=False) # 잠시 임시로 해놓은것
tr_model.summary()
re_model.summary()
h = tr_model.output
h = tf.keras.layers.GlobalAveragePooling2D()(h)
r = re_model.output
r = tf.keras.layers.GlobalAveragePooling2D()(r)
h_logits = tf.keras.layers.Dense(1024)(h)
r_logits = tf.keras.layers.Dense(1024)(r)
h = tf.keras.layers.Dense(FLAGS.n_classes)(h_logits)
r = tf.keras.layers.Dense(FLAGS.n_classes)(r_logits)
tr_model = tf.keras.Model(inputs=tr_model.input, outputs=[h_logits, h])
re_model = tf.keras.Model(inputs=re_model.input, outputs=[r_logits, r])
tr_model.summary()
re_model.summary()
if FLAGS.pre_checkpoint:
ckpt = tf.train.Checkpoint(model=model, optim=optim)
ckpt_manager = tf.train.CheckpointManager(ckpt, FLAGS.pre_checkpoint_path, 5)
if ckpt_manager.latest_checkpoint:
ckpt.restore(ckpt_manager.latest_checkpoint)
print("===============")
print("* Restored!!! *")
print("===============")
if FLAGS.train:
count = 0
tr_img = np.loadtxt(FLAGS.tr_txt_path, dtype="<U200", skiprows=0, usecols=0)
tr_img = [FLAGS.tr_img_path + img for img in tr_img]
re_img = np.loadtxt(FLAGS.re_txt_path, dtype="<U200", skiprows=0, usecols=0)
re_img = [FLAGS.re_img_path + img for img in re_img]
test_data = np.loadtxt(FLAGS.te_txt_path, dtype="<U200", skiprows=0, usecols=0)
test_data = [FLAGS.te_img_path + img for img in test_data]
test_lab = np.loadtxt(FLAGS.te_txt_path, dtype=np.int32, skiprows=0, usecols=1)
te_gener = tf.data.Dataset.from_tensor_slices((test_data, test_lab))
te_gener = te_gener.map(te_func)
te_gener = te_gener.batch(1)
te_gener = te_gener.prefetch(tf.data.experimental.AUTOTUNE)
for epoch in range(FLAGS.epochs):
T = list(zip(tr_img, re_img))
shuffle(T)
tr_img, re_img = zip(*T)
tr_img, re_img = np.array(tr_img), np.array(re_img)
tr_gener = tf.data.Dataset.from_tensor_slices((tr_img, re_img))
tr_gener = tr_gener.map(func)
tr_gener = tr_gener.batch(FLAGS.batch_size)
tr_gener = tr_gener.prefetch(tf.data.experimental.AUTOTUNE)
tr_idx = len(tr_img) // FLAGS.batch_size
tr_iter = iter(tr_gener)
for step in range(tr_idx):
tr_images, tr_name, re_images, re_name = next(tr_iter)
labels = age_label(tr_name, re_name)
total_loss, feature_loss, cross_entropy_loss = train_step(tr_images, labels,
re_images, labels,
tr_model, re_model)
if count % 10 == 0:
print("Epoch: {} [{}/{}]\ntotal loss = {}\nfeature loss = {}\ncross entropy loss = {}".format(epoch, step + 1, tr_idx, total_loss,
feature_loss, cross_entropy_loss))
if count % 50 == 0 and count != 0:
te_iter = iter(te_gener)
te_idx = len(test_data) // 1
ae = 0
for i in range(te_idx):
test_images, test_labels = next(te_iter)
ae += test_step(test_images, test_labels, re_model)
print("===========================")
print("{} steps test MAE = {}".format(count, ae / te_idx))
print("===========================")
count += 1
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
Kimyuhwanpeter.noreply@github.com
|
3bcef533264cc999482acad54ede4c319059a45c
|
38c677ab6ad24a70319ca0c3b952e597e62991d1
|
/hackbright_web.py
|
7d365aa03731539222261baff7d67bca228a8647
|
[] |
no_license
|
dmcdekker/project-tracker-flask
|
b24ce0a6c0e53ec7b596876d9a53096f492ee285
|
fe7fd68c9d1988d16bffad3ce45421aa334688ef
|
refs/heads/master
| 2020-03-13T03:35:24.274671
| 2018-04-25T03:41:00
| 2018-04-25T03:41:00
| 130,946,878
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,307
|
py
|
"""A web application for tracking projects, students, and student grades."""
from flask import Flask, request, render_template
import hackbright
app = Flask(__name__)
@app.route("/student")
def get_student():
"""Show information about a student."""
github = request.args.get('github')
first, last, github = hackbright.get_student_by_github(github)
grades = hackbright.get_grades_by_github(github)
html = render_template("student_info.html", first=first,
last=last, github=github, grades=grades)
return html
@app.route("/student-search")
def get_student_form():
"""Show form for searching for a student."""
return render_template("student_search.html")
@app.route("/student-add-form")
def student_add_form():
"""Show form for adding a student."""
return render_template("student_add_form.html")
@app.route("/student-add", methods=['POST'])
def student_add():
"""Add a student."""
first = request.form.get('first')
last = request.form.get('last')
github = request.form.get('github')
hackbright.make_new_student(first, last, github)
return render_template('confirmation.html', github=github)
if __name__ == "__main__":
hackbright.connect_to_db(app)
app.run(debug=True, host="0.0.0.0")
|
[
"denisemdekker@gmail.com"
] |
denisemdekker@gmail.com
|
6b97d89ed87820245834127c27c74873a0e8da46
|
29881fa0c087f3d3ce0e27fb51309384266203e1
|
/listings/migrations/0008_listing_expiration_date.py
|
83b7fc387dda027d9abffaaa66e3772326f0ed97
|
[] |
no_license
|
aidant842/mymo
|
0e5ec2a5c73b6755d994467e4afba10141f449ea
|
877e7a38198d1b5effc6c3a63ad12e7166c20a77
|
refs/heads/master
| 2023-07-17T15:30:21.350974
| 2021-08-24T12:43:18
| 2021-08-24T12:43:18
| 340,033,414
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 395
|
py
|
# Generated by Django 3.1.6 on 2021-02-23 11:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('listings', '0007_auto_20210222_1610'),
]
operations = [
migrations.AddField(
model_name='listing',
name='expiration_date',
field=models.DateTimeField(null=True),
),
]
|
[
"aidant842@gmail.com"
] |
aidant842@gmail.com
|
3c558cd0e27631a53814488e91e71d01604fee41
|
6b6f2ebcc9fbf7518ec998e1d8d914dd875742d7
|
/djblets/mail/utils.py
|
5e37af0c35710562a82c3b0cbc9f3279caf60a13
|
[] |
no_license
|
pombredanne/djblets
|
da89e0398fb44250b09e7201c940e54cffbebcb3
|
9c4e4a1e6fa71d59b4f555c6ec5699616eb19335
|
refs/heads/master
| 2022-03-02T00:00:21.921658
| 2022-02-08T11:30:56
| 2022-02-08T11:30:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,312
|
py
|
"""General utility functions for working with e-mail."""
from email.utils import escapesre, parseaddr, specialsre
from django.conf import settings
def build_email_address(email, full_name=None):
"""Build an e-mail address for a To/CC/BCC field from a user's information.
Args:
email (unicode):
The e-mail address.
full_name (unicode, optional):
The optional full name associated with the e-mail address.
Returns:
unicode:
A formatted e-mail address intended for a To/CC/BCC field.
"""
if full_name:
escaped_name = escapesre.sub(r'\\\g<0>', full_name)
if specialsre.search(full_name):
escaped_name = '"%s"' % escaped_name
return '%s <%s>' % (escaped_name, email)
return email
def build_email_address_for_user(user):
"""Build an e-mail address for a To/CC/BCC field from a User.
Args:
user (django.contrib.auth.models.User):
The user.
Returns:
unicode:
A formatted e-mail address intended for a To/CC/BCC field.
"""
return build_email_address(email=user.email,
full_name=user.get_full_name())
def build_email_address_via_service(email, full_name=None, service_name=None,
sender_email=None):
"""Build an e-mail address for sending on behalf of a user via a service.
This will construct a formatted e-mail address that can be safely used
in a :mailheader:`From` field without risking being quarantined/rejected
by DMARC rules.
The address will be in the form of "Full Name via Service Name
<sender@domain.tld>".
Args:
email (unicode):
The unformatted e-mail address of the user.
full_name (unicode, optional):
The full name of the user. If not provided, the username in the
e-mail address will be used.
service_name (unicode, optional):
The name of the service sending the e-mail. If not provided,
``settings.EMAIL_DEFAULT_SENDER_SERVICE_NAME`` will be used.
sender_email (unicode, optional):
The unformatted e-mail address for the sending service. If not
provided, the e-mail address in
:django:setting:`DEFAULT_FROM_EMAIL` will be used.
Returns:
unicode:
A formatted e-mail address safe to use in a :mailheader:`From` field.
"""
if not service_name:
# A service name wasn't specified. We'll try to use the one from
# settings, and if that doesn't exist, we'll use the domain name
# from the sender (assuming it parsed, and if it didn't, there are
# bigger problems we're not going to deal with here).
service_name = (
getattr(settings, 'EMAIL_DEFAULT_SENDER_SERVICE_NAME', None) or
email.split('@')[-1]
)
if not sender_email:
sender_email = parseaddr(settings.DEFAULT_FROM_EMAIL)[1]
# We need a name from the user. If a full name wasn't
# available, use the first part of the e-mail address.
if not full_name:
full_name = email.split('@')[0]
return build_email_address(
email=sender_email,
full_name='%s via %s' % (full_name, service_name))
|
[
"christian@beanbaginc.com"
] |
christian@beanbaginc.com
|
f5de3033852e2a457427fafdf76c59847f85f1cc
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02393/s327666970.py
|
700b176b69a13b7dfa8a6b0295a518a8f481bbf1
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 104
|
py
|
nums = [int(i) for i in input().split()]
nums.sort()
print("{} {} {}".format(nums[0], nums[1], nums[2]))
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
0dc21621af4cce33fbdc36b818eb44f102674aca
|
838d23e9590bc855926628d0f7b4ffe73e108565
|
/Python_Programs/Madlibs_generator.py
|
6729ad7eb3eb4a0bd5cb64dd8890f7d84a433a8c
|
[] |
no_license
|
ArnabBasak/PythonRepository
|
ca475b1bc728ede1e033c54f40392f5b4c3494d4
|
388478fd33c4ed654eb6b1cba5e0cbdcfb90cf0e
|
refs/heads/master
| 2021-07-15T17:05:47.435677
| 2020-07-17T09:09:56
| 2020-07-17T09:09:56
| 84,456,349
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,847
|
py
|
"""
3. Mad Libs Generator
The Goal: Inspired by Summer Son’s Mad Libs project with Javascript.
The program will first prompt the user for a series of inputs a la Mad Libs.
For example, a singular noun, an adjective, etc. Then, once all the information
has been inputted, the program will take that data and place them into a premade
story template. You’ll need prompts for user input, and to then print out the full story at the end with the input included.
Concepts to keep in mind:
Strings
Variables
Concatenation
Print
A pretty fun beginning project that gets you thinking about how to manipulate userinputted data.
Compared to the prior projects, this project focuses far more on strings and concatenating.
Have some fun coming up with some wacky stories for this!
"""
class Madlibs:
def __init__(self):
pass
def userInput(self):
self.name = input("Enter a name of a person ")
self.place = input("Enter the name of a place")
self.animal = input("enter the name of an animal")
self.thing = input("enter the name of random thing")
self.verb = input("enter an action word that is verb")
self.adverb = input("enter an adverb")
self.adjective = input("enter and adjective")
def display(self):
print()
print("Hello {0}".format(self.name))
print("good to see you, how are you?,when did you came to {0},".format(self.place))
print("how is your pet {0},".format(self.animal))
print("what is the status of you buying a {0}".format(self.thing))
print("Do you {0} to office".format(self.verb))
print("Anyway {0} is good for health".format(self.adverb))
print("ofcourse you are a {0} person".format(self.adjective))
ML = Madlibs()
ML.userInput()
ML.display()
|
[
"arnabbasak11@gmail.com"
] |
arnabbasak11@gmail.com
|
e8b8fdcaab85cea4620b71da164f18fd7dbe4449
|
947e71b34d21f3c9f5c0a197d91a880f346afa6c
|
/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/service_check.py
|
ef4585594bb4b167cfabe3c48136d2e3bc2bfe32
|
[
"Apache-2.0",
"MIT",
"GPL-1.0-or-later",
"GPL-2.0-or-later",
"OFL-1.1",
"MS-PL",
"AFL-2.1",
"GPL-2.0-only",
"Python-2.0",
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
liuwenru/Apache-Ambari-ZH
|
4bc432d4ea7087bb353a6dd97ffda0a85cb0fef0
|
7879810067f1981209b658ceb675ac76e951b07b
|
refs/heads/master
| 2023-01-14T14:43:06.639598
| 2020-07-28T12:06:25
| 2020-07-28T12:06:25
| 223,551,095
| 38
| 44
|
Apache-2.0
| 2023-01-02T21:55:10
| 2019-11-23T07:43:49
|
Java
|
UTF-8
|
Python
| false
| false
| 2,076
|
py
|
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from resource_management.libraries.script.script import Script
from resource_management.libraries.functions.format import format
from resource_management.core.resources.system import Execute
from ambari_commons import OSConst
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
class FalconServiceCheck(Script):
pass
@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
class FalconServiceCheckLinux(FalconServiceCheck):
def service_check(self, env):
import params
env.set_params(params)
if params.security_enabled:
Execute(format("{kinit_path_local} -kt {smokeuser_keytab} {smokeuser_principal}"),
user=params.smoke_user)
Execute(format("{falcon_home}/bin/falcon admin -version"),
user=params.smoke_user,
logoutput=True,
tries = 3,
try_sleep = 20
)
@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
class FalconServiceCheckWindows(FalconServiceCheck):
def service_check(self, env):
import params
env.set_params(params)
smoke_cmd = os.path.join(params.stack_root,"Run-SmokeTests.cmd")
service = "FALCON"
Execute(format("cmd /C {smoke_cmd} {service}"), user=params.falcon_user, logoutput=True, tries = 3, try_sleep = 20)
if __name__ == "__main__":
FalconServiceCheck().execute()
|
[
"ijarvis@sina.com"
] |
ijarvis@sina.com
|
f9aa49ceb8dcba436595b48509f035bc2bdb19d5
|
6daf9fe45d498ab0d9c765ee094bca55e5c14291
|
/polyaxon/polyaxon/config_settings/logging.py
|
f7a9ad2b70c05984062122943c0546f3e0302a97
|
[
"MIT"
] |
permissive
|
vaer-k/polyaxon
|
7b23628093f017852735c893cf0a862cc983911e
|
da13c95d23999145763626f836f9be40a6e8f965
|
refs/heads/master
| 2020-03-13T08:19:15.744002
| 2018-04-25T10:37:39
| 2018-04-25T10:37:39
| 131,041,646
| 0
| 0
| null | 2018-04-25T17:32:05
| 2018-04-25T17:32:05
| null |
UTF-8
|
Python
| false
| false
| 1,828
|
py
|
import os
from polyaxon.utils import ROOT_DIR
LOG_DIRECTORY = ROOT_DIR.child('logs')
if not os.path.exists(LOG_DIRECTORY):
os.makedirs(LOG_DIRECTORY)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': '[%(asctime)s] %(levelname)s %(message)s [%(name)s:%(lineno)s]',
'datefmt': '%d/%b/%Y %H:%M:%S'
},
'simple': {
'format': '%(levelname)8s %(message)s [%(name)s]'
},
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
},
},
'handlers': {
'logfile': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': '{}/polyaxon_{}.log'.format(LOG_DIRECTORY, os.getpid()),
'maxBytes': 1024 * 1024 * 8, # 8 MByte
'backupCount': 5,
'formatter': 'standard',
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
},
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler',
},
},
'loggers': {
'polyaxon.monitors': {
'handlers': ['console', ],
'propagate': True,
'level': 'INFO',
},
'django.request': {
'level': 'DEBUG',
'propagate': True,
'handlers': ['console', ],
},
},
}
CLUSTER_NOTIFICATION_URL = "https://www.google-analytics.com/collect?v=1&tid=UA-89493331-1"
|
[
"mouradmourafiq@gmail.com"
] |
mouradmourafiq@gmail.com
|
89f9ff1fc57d19bfc0ba90652c5632f79ae10a15
|
1f626975077725c2d6fa364a4ba530675566d6e0
|
/userHandling/urls.py
|
0b00f7539a6ad0671af093574b0d0b192e3910d2
|
[] |
no_license
|
Nsokol44/Carthago
|
54a92acfcceef94feda4c3750f6cb469fd8fd571
|
b5dd0c14c05cb21b059ac48ff0b6416c5194c203
|
refs/heads/master
| 2023-02-17T02:36:46.413918
| 2021-01-19T20:33:44
| 2021-01-19T20:33:44
| 331,069,019
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 364
|
py
|
from django.urls import path, include
from . import views
urlpatterns = [
path('', views.index, name='user-home'),
path('register/', views.register, name='register'),
path('user_login/', views.user_login, name='user_login'),
path('user_logout/', views.user_logout, name='user_logout'),
path('profile/', views.profile, name='user_profile')
]
|
[
"nsokol@nicholass-mbp.lan"
] |
nsokol@nicholass-mbp.lan
|
4723274435f75a11c268f333e216e32fd69a877d
|
eb0f13155a6c97a561e4df66f8c96f25b3587eb7
|
/api/sktkr.py
|
5f3313eb06461bf94d6137a07f2892163df44169
|
[] |
no_license
|
jaehyek/stock-analysis
|
3005cdf9a118fa78f854fa8215eda837abf4e75d
|
2f78dacc9aee4254b958ab928ab913c4f3007979
|
refs/heads/master
| 2020-04-20T07:08:00.829939
| 2017-10-27T10:16:32
| 2017-10-27T10:16:32
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,521
|
py
|
"""
sktkr.py
This script should use sklearn to learn from stock market data.
"""
import io
import pdb
import os
import flask
import datetime as dt
import flask_restful as fr
import numpy as np
import pandas as pd
import sqlalchemy as sql
import sklearn.linear_model as skl
# modules in the py folder:
import api.pgdb as pgdb
# By default, I should train from 20 years of data.
def learn_predict_sklinear(tkr='ABC',yrs=20,mnth='2016-11', features='pct_lag1,slope4,moy'):
"""This function should use sklearn to learn, predict."""
linr_model = skl.LinearRegression()
xtrain_a, ytrain_a, xtest_a, out_df = pgdb.get_train_test(tkr,yrs,mnth,features)
if ((xtrain_a.size == 0) or (ytrain_a.size == 0) or (xtest_a.size == 0)):
return out_df # probably empty too.
# I should fit a model to xtrain_a, ytrain_a
linr_model.fit(xtrain_a,ytrain_a)
# I should predict xtest_a then update out_df
out_df['prediction'] = np.round(linr_model.predict(xtest_a),3).tolist()
out_df['effectiveness'] = np.sign(out_df.pct_lead*out_df.prediction)*np.abs(out_df.pct_lead)
out_df['accuracy'] = (1+np.sign(out_df.effectiveness))/2
algo = 'sklinear'
kmodel = None # sklearn has no kmodel, keras does.
# I should save work to the db:
pgdb.predictions2db(tkr,yrs,mnth,features,algo,out_df,kmodel)
return out_df
def learn_predict_sklinear_yr(tkr='ABC',yrs=20,yr=2016, features='pct_lag1,slope4,moy'):
"""This function should use sklearn to learn and predict for a year."""
empty_df = pd.DataFrame()
yr_l = [empty_df, empty_df] # Ready for pd.concat()
# I should rely on monthy predictions:
for mnth_i in range(1,13):
mnth_s = str(mnth_i).zfill(2)
mnth = str(yr)+'-'+mnth_s
m_df = learn_predict_sklinear(tkr,yrs,mnth, features)
yr_l.append(m_df)
# I should gather the monthy predictions:
yr_df = pd.concat(yr_l, ignore_index=True)
return yr_df
def learn_predict_sklinear_tkr(tkr='ABC',yrs=20, features='pct_lag1,slope4,moy'):
"""This function should use sklearn to learn and predict for a tkr."""
# From db, I should get a list of all months for tkr:
mnth_l = pgdb.getmonths4tkr(tkr,yrs)
# I should rely on monthy predictions:
empty_df = pd.DataFrame()
tkr_l = [empty_df, empty_df] # Ready for pd.concat()
for mnth_s in mnth_l:
m_df = learn_predict_sklinear(tkr,yrs,mnth_s, features)
tkr_l.append(m_df)
# I should gather the monthy predictions:
tkr_df = pd.concat(tkr_l, ignore_index=True)
return tkr_df
'bye'
|
[
"verystrongjoe@gmail.com"
] |
verystrongjoe@gmail.com
|
953a315f777053b29c6890ea46141a5d1a775b67
|
acb8e84e3b9c987fcab341f799f41d5a5ec4d587
|
/langs/3/h9o.py
|
7c4d926efac165c05ca196fb75ddfc9e4d37a0d0
|
[] |
no_license
|
G4te-Keep3r/HowdyHackers
|
46bfad63eafe5ac515da363e1c75fa6f4b9bca32
|
fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2
|
refs/heads/master
| 2020-08-01T12:08:10.782018
| 2016-11-13T20:45:50
| 2016-11-13T20:45:50
| 73,624,224
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 486
|
py
|
import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'h9O':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1])
|
[
"juliettaylorswift@gmail.com"
] |
juliettaylorswift@gmail.com
|
c36737d70f36cd5e2a9ae0a8957a169c2ed18c6c
|
8417564be258bf1b8ed1c75cf8cdbcea346239b1
|
/venv/bin/pip3.5
|
26454f64bd409142cbde634ad3a4aa6107b1ac24
|
[] |
no_license
|
rdahal35/djangoproject
|
abebe56f7e5be2a45fb656b030aed2b9c9c6e201
|
8aa0d967018d6eb4097235135661cda2c46d67a3
|
refs/heads/master
| 2021-09-09T23:02:17.107653
| 2018-03-20T05:40:26
| 2018-03-20T05:40:26
| 125,965,879
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 251
|
5
|
#!/home/rupesh/python/django/yetanotherproject/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"rdahal35@gmail.com"
] |
rdahal35@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.